lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
d9343cd8db4c43f170e6f5e5a24770e8ac6c8201
| 0 |
nmoghadam/BPIM,OnePaaS/jbpm,ibek/jbpm,livthomas/jbpm,MetSystem/jbpm,psakar/jbpm,psakar/jbpm,mrietveld/jbpm,droolsjbpm/jbpm,psakar/jbpm,nmoghadam/jbpm,jakubschwan/jbpm,selrahal/jbpm,Multi-Support/jbpm,Multi-Support/jbpm,nmoghadam/BPIM,livthomas/jbpm,karthikprabhu17/jbpm,ifu-lobuntu/jbpm,sutaakar/jbpm,jomarko/jbpm,bxf12315/jbpm,Aaron2000/jbpm,DuncanDoyle/jbpm,pleacu/jbpm,rabbitcount/jbpm,akoskm/jbpm,ifu-lobuntu/jbpm,Salaboy/jbpm,DuncanDoyle/jbpm,jakubschwan/jbpm,DuncanDoyle/jbpm,ibek/jbpm,rabbitcount/jbpm,jakubschwan/jbpm,OnePaaS/jbpm,Multi-Support/jbpm,jesuino/jbpm,romartin/jbpm,selrahal/jbpm,ifu-lobuntu/jbpm,lukenjmcd/jbpm,akoskm/jbpm,jesuino/jbpm,winklerm/jbpm,ibek/jbpm,romartin/jbpm,pleacu/jbpm,domhanak/jbpm,domhanak/jbpm,sutaakar/jbpm,jgoldsmith613/jbpm,mrietveld/jbpm,jgoldsmith613/jbpm,Salaboy/jbpm,romartin/jbpm,OnePaaS/jbpm,mrietveld/jbpm,MetSystem/jbpm,jomarko/jbpm,lukenjmcd/jbpm,jomarko/jbpm,nmoghadam/BPIM,bxf12315/jbpm,karthikprabhu17/jbpm,bxf12315/jbpm,jesuino/jbpm,nmoghadam/jbpm,Aaron2000/jbpm,xingguang2013/jbpm-1,MetSystem/jbpm,sutaakar/jbpm,xingguang2013/jbpm-1,jesuino/jbpm,akoskm/jbpm,pleacu/jbpm,domhanak/jbpm,droolsjbpm/jbpm,nmoghadam/jbpm,lukenjmcd/jbpm,jomarko/jbpm,selrahal/jbpm,rabbitcount/jbpm,xingguang2013/jbpm-1,romartin/jbpm,livthomas/jbpm,jgoldsmith613/jbpm,droolsjbpm/jbpm,Aaron2000/jbpm,karthikprabhu17/jbpm,winklerm/jbpm,winklerm/jbpm,Salaboy/jbpm
|
/**
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.instance.timer;
import java.io.IOException;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import org.drools.common.InternalKnowledgeRuntime;
import org.drools.marshalling.impl.InputMarshaller;
import org.drools.marshalling.impl.MarshallerReaderContext;
import org.drools.marshalling.impl.MarshallerWriteContext;
import org.drools.marshalling.impl.OutputMarshaller;
import org.drools.marshalling.impl.PersisterEnums;
import org.drools.marshalling.impl.ProtobufInputMarshaller;
import org.drools.marshalling.impl.ProtobufMessages;
import org.drools.marshalling.impl.ProtobufMessages.Timers.Timer;
import org.drools.marshalling.impl.ProtobufOutputMarshaller;
import org.drools.marshalling.impl.TimersInputMarshaller;
import org.drools.marshalling.impl.TimersOutputMarshaller;
import org.drools.time.Job;
import org.drools.time.JobContext;
import org.drools.time.JobHandle;
import org.kie.time.SessionClock;
import org.drools.time.TimerService;
import org.drools.time.Trigger;
import org.drools.time.impl.IntervalTrigger;
import org.drools.time.impl.JDKTimerService.JDKJobHandle;
import org.jbpm.marshalling.impl.JBPMMessages;
import org.jbpm.marshalling.impl.ProcessMarshallerImpl;
import org.jbpm.marshalling.impl.ProtobufProcessMarshaller;
import org.jbpm.process.instance.InternalProcessRuntime;
import org.jbpm.process.instance.ProcessInstance;
import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author <a href="mailto:[email protected]">Kris Verlaenen</a>
*/
public class TimerManager {
private static final Logger logger = LoggerFactory.getLogger(TimerManager.class);
private long timerId = 0;
private InternalKnowledgeRuntime kruntime;
private TimerService timerService;
private Map<Long, TimerInstance> timers = new ConcurrentHashMap<Long, TimerInstance>();
public static final Job processJob = new ProcessJob();
public TimerManager(InternalKnowledgeRuntime kruntime,
TimerService timerService) {
this.kruntime = kruntime;
this.timerService = timerService;
}
public void registerTimer(final TimerInstance timer,
ProcessInstance processInstance) {
try {
kruntime.startOperation();
if ( !kruntime.getActionQueue().isEmpty() ) {
kruntime.executeQueuedActions();
}
timer.setId( ++timerId );
timer.setProcessInstanceId( processInstance.getId() );
timer.setActivated( new Date() );
Trigger trigger = new IntervalTrigger( timerService.getCurrentTime(),
null,
null,
timer.getRepeatLimit(),
timer.getDelay(),
timer.getPeriod(),
null,
null );
ProcessJobContext ctx = new ProcessJobContext( timer,
trigger,
processInstance.getId(),
this.kruntime );
JobHandle jobHandle = this.timerService.scheduleJob( processJob,
ctx,
trigger );
timer.setJobHandle( jobHandle );
timers.put( timer.getId(),
timer );
} finally {
kruntime.endOperation();
}
}
public void internalAddTimer(final TimerInstance timer) {
long delay;
Date lastTriggered = timer.getLastTriggered();
if ( lastTriggered == null ) {
Date activated = timer.getActivated();
Date now = new Date();
long timespan = now.getTime() - activated.getTime();
delay = timer.getDelay() - timespan;
if ( delay < 0 ) {
delay = 0;
}
} else {
Date now = new Date();
long timespan = now.getTime() - lastTriggered.getTime();
delay = timespan - timer.getPeriod();
if ( delay < 0 ) {
delay = 0;
}
}
Trigger trigger = new IntervalTrigger( timerService.getCurrentTime(),
null,
null,
-1,
delay,
timer.getPeriod(),
null,
null ) ;
ProcessJobContext ctx = new ProcessJobContext( timer,
trigger,
timer.getProcessInstanceId(),
this.kruntime );
JobHandle jobHandle = this.timerService.scheduleJob( processJob,
ctx,
trigger );
timer.setJobHandle( jobHandle );
timers.put( timer.getId(),
timer );
}
public void cancelTimer(long timerId) {
TimerInstance timer = timers.remove( timerId );
if ( timer != null ) {
timerService.removeJob( timer.getJobHandle() );
}
}
public void dispose() {
// for ( TimerInstance timer : timers.values() ) {
// System.out.println( timer );
// timerService.removeJob( timer.getJobHandle() );
// }
for ( Iterator<TimerInstance> it = timers.values().iterator(); it.hasNext(); ) {
TimerInstance timer = it.next();
timerService.removeJob( timer.getJobHandle() );
it.remove();
}
timerService.shutdown();
}
public TimerService getTimerService() {
return this.timerService;
}
public Collection<TimerInstance> getTimers() {
return timers.values();
}
public Map<Long, TimerInstance> getTimerMap() {
return this.timers;
}
public long internalGetTimerId() {
return timerId;
}
public void internalSetTimerId(long timerId) {
this.timerId = timerId;
}
public void setTimerService(TimerService timerService) {
this.timerService = timerService;
}
public static class ProcessTimerOutputMarshaller implements TimersOutputMarshaller {
public void write(JobContext ctx, MarshallerWriteContext outCtx) throws IOException {
outCtx.writeShort( PersisterEnums.PROCESS_TIMER );
ProcessJobContext pctx = ( ProcessJobContext ) ctx;
outCtx.writeLong( pctx.getProcessInstanceId() );
OutputMarshaller.writeTrigger( pctx.getTrigger(), outCtx );
ProcessMarshallerImpl.writeTimer( outCtx, pctx.getTimer() );
}
public Timer serialize(JobContext jobCtx,
MarshallerWriteContext outputCtx) {
ProcessJobContext pctx = ( ProcessJobContext ) jobCtx;
return ProtobufMessages.Timers.Timer.newBuilder()
.setType( ProtobufMessages.Timers.TimerType.PROCESS )
.setExtension( JBPMMessages.procTimer,
JBPMMessages.ProcessTimer.newBuilder()
.setTimer( ProtobufProcessMarshaller.writeTimer( outputCtx, pctx.getTimer() ) )
.setTrigger( ProtobufOutputMarshaller.writeTrigger( pctx.getTrigger(), outputCtx ) )
.build() )
.build();
}
}
public static class ProcessTimerInputMarshaller implements TimersInputMarshaller {
public void read(MarshallerReaderContext inCtx) throws IOException, ClassNotFoundException {
TimerService ts = inCtx.wm.getTimerService();
long processInstanceId = inCtx.readLong();
Trigger trigger = InputMarshaller.readTrigger( inCtx );
TimerInstance timerInstance = ProcessMarshallerImpl.readTimer( inCtx );
TimerManager tm = ((InternalProcessRuntime)inCtx.wm.getProcessRuntime()).getTimerManager();
// check if the timer instance is not already registered to avoid duplicated timers
if (!tm.getTimerMap().containsKey(timerInstance.getId())) {
ProcessJobContext pctx = new ProcessJobContext(timerInstance, trigger, processInstanceId, inCtx.wm.getKnowledgeRuntime());
Date date = trigger.hasNextFireTime();
if (date != null) {
long then = date.getTime();
long now = pctx.getKnowledgeRuntime().getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
trigger = new OverdueTrigger(trigger, pctx.getKnowledgeRuntime());
}
}
JobHandle jobHandle = ts.scheduleJob( processJob,
pctx,
trigger );
timerInstance.setJobHandle( jobHandle );
pctx.setJobHandle( jobHandle );
tm.getTimerMap().put( timerInstance.getId(),
timerInstance );
}
}
public void deserialize(MarshallerReaderContext inCtx,
Timer _timer) throws ClassNotFoundException {
JBPMMessages.ProcessTimer _ptimer = _timer.getExtension( JBPMMessages.procTimer );
TimerService ts = inCtx.wm.getTimerService();
long processInstanceId = _ptimer.getTimer().getProcessInstanceId();
Trigger trigger = ProtobufInputMarshaller.readTrigger( inCtx, _ptimer.getTrigger() );
TimerInstance timerInstance = ProtobufProcessMarshaller.readTimer( inCtx, _ptimer.getTimer() );
TimerManager tm = ((InternalProcessRuntime)inCtx.wm.getProcessRuntime()).getTimerManager();
// check if the timer instance is not already registered to avoid duplicated timers
if (!tm.getTimerMap().containsKey(timerInstance.getId())) {
ProcessJobContext pctx = new ProcessJobContext(timerInstance, trigger, processInstanceId, inCtx.wm.getKnowledgeRuntime());
Date date = trigger.hasNextFireTime();
if (date != null) {
long then = date.getTime();
long now = pctx.getKnowledgeRuntime().getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
trigger = new OverdueTrigger(trigger, pctx.getKnowledgeRuntime());
}
}
JobHandle jobHandle = ts.scheduleJob( processJob,
pctx,
trigger );
timerInstance.setJobHandle( jobHandle );
pctx.setJobHandle( jobHandle );
tm.getTimerMap().put( timerInstance.getId(),
timerInstance );
}
}
}
public static class ProcessJob
implements
Job {
public void execute(JobContext c) {
ProcessJobContext ctx = (ProcessJobContext) c;
Long processInstanceId = ctx.getProcessInstanceId();
InternalKnowledgeRuntime kruntime = ctx.getKnowledgeRuntime();
try {
if ( processInstanceId == null ) {
throw new IllegalArgumentException( "Could not find process instance for timer " );
}
ctx.getTimer().setLastTriggered( new Date( ctx.getKnowledgeRuntime().<SessionClock>getSessionClock().getCurrentTime() ) );
// if there is no more trigger reset period on timer so its node instance can be removed
if (ctx.getTrigger().hasNextFireTime() == null) {
ctx.getTimer().setPeriod(0);
}
((InternalProcessRuntime) kruntime.getProcessRuntime())
.getSignalManager().signalEvent( processInstanceId,
"timerTriggered",
ctx.getTimer() );
TimerManager tm = ((InternalProcessRuntime)ctx.getKnowledgeRuntime().getProcessRuntime()).getTimerManager();
if ( ctx.getTimer().getPeriod() == 0 ) {
tm.getTimerMap().remove( ctx.getTimer().getId() );
}
} catch (Throwable e) {
logger.error("Error when executing timer job", e);
WorkflowProcessInstanceImpl processInstance = (WorkflowProcessInstanceImpl) kruntime.getProcessInstance(processInstanceId);
if (processInstance != null && ctx.getTimer().getPeriod() == 0) {
processInstance.setState(ProcessInstance.STATE_ABORTED);
}
}
}
}
public static class ProcessJobContext
implements
JobContext {
private static final long serialVersionUID = 476843895176221627L;
private Long processInstanceId;
private InternalKnowledgeRuntime kruntime;
private TimerInstance timer;
private Trigger trigger;
private JobHandle jobHandle;
public ProcessJobContext(final TimerInstance timer,
final Trigger trigger,
final Long processInstanceId,
final InternalKnowledgeRuntime kruntime) {
this.timer = timer;
this.trigger = trigger;
this.processInstanceId = processInstanceId;
this.kruntime = kruntime;
}
public Long getProcessInstanceId() {
return processInstanceId;
}
public InternalKnowledgeRuntime getKnowledgeRuntime() {
return kruntime;
}
public Trigger getTrigger() {
return trigger;
}
public JobHandle getJobHandle() {
return this.jobHandle;
}
public void setJobHandle(JobHandle jobHandle) {
this.jobHandle = jobHandle;
}
public TimerInstance getTimer() {
return timer;
}
}
/**
* Overdue aware trigger that introduces fixed delay to allow completion of session initialization
*
*/
public static class OverdueTrigger implements Trigger {
private static final long serialVersionUID = -2368476147776308013L;
public static final long OVERDUE_DELAY = Long.parseLong(System.getProperty("jbpm.overdue.timer.delay", "2000"));
private Trigger orig;
private InternalKnowledgeRuntime kruntime;
public OverdueTrigger(Trigger orig, InternalKnowledgeRuntime kruntime) {
this.orig = orig;
this.kruntime = kruntime;
}
public Date hasNextFireTime() {
Date date = orig.hasNextFireTime();
if (date == null) {
return null;
}
long then = date.getTime();
long now = kruntime.getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
return new Date((now + OVERDUE_DELAY));
} else {
return orig.hasNextFireTime();
}
}
public Date nextFireTime() {
return orig.nextFireTime();
}
}
}
|
jbpm-flow/src/main/java/org/jbpm/process/instance/timer/TimerManager.java
|
/**
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.instance.timer;
import java.io.IOException;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import org.drools.common.InternalKnowledgeRuntime;
import org.drools.marshalling.impl.InputMarshaller;
import org.drools.marshalling.impl.MarshallerReaderContext;
import org.drools.marshalling.impl.MarshallerWriteContext;
import org.drools.marshalling.impl.OutputMarshaller;
import org.drools.marshalling.impl.PersisterEnums;
import org.drools.marshalling.impl.ProtobufInputMarshaller;
import org.drools.marshalling.impl.ProtobufMessages;
import org.drools.marshalling.impl.ProtobufMessages.Timers.Timer;
import org.drools.marshalling.impl.ProtobufOutputMarshaller;
import org.drools.marshalling.impl.TimersInputMarshaller;
import org.drools.marshalling.impl.TimersOutputMarshaller;
import org.drools.time.Job;
import org.drools.time.JobContext;
import org.drools.time.JobHandle;
import org.kie.time.SessionClock;
import org.drools.time.TimerService;
import org.drools.time.Trigger;
import org.drools.time.impl.IntervalTrigger;
import org.drools.time.impl.JDKTimerService.JDKJobHandle;
import org.jbpm.marshalling.impl.JBPMMessages;
import org.jbpm.marshalling.impl.ProcessMarshallerImpl;
import org.jbpm.marshalling.impl.ProtobufProcessMarshaller;
import org.jbpm.process.instance.InternalProcessRuntime;
import org.jbpm.process.instance.ProcessInstance;
import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author <a href="mailto:[email protected]">Kris Verlaenen</a>
*/
public class TimerManager {
private static final Logger logger = LoggerFactory.getLogger(TimerManager.class);
private long timerId = 0;
private InternalKnowledgeRuntime kruntime;
private TimerService timerService;
private Map<Long, TimerInstance> timers = new ConcurrentHashMap<Long, TimerInstance>();
public static final Job processJob = new ProcessJob();
public TimerManager(InternalKnowledgeRuntime kruntime,
TimerService timerService) {
this.kruntime = kruntime;
this.timerService = timerService;
}
public void registerTimer(final TimerInstance timer,
ProcessInstance processInstance) {
try {
kruntime.startOperation();
if ( !kruntime.getActionQueue().isEmpty() ) {
kruntime.executeQueuedActions();
}
timer.setId( ++timerId );
timer.setProcessInstanceId( processInstance.getId() );
timer.setActivated( new Date() );
Trigger trigger = new IntervalTrigger( timerService.getCurrentTime(),
null,
null,
timer.getRepeatLimit(),
timer.getDelay(),
timer.getPeriod(),
null,
null );
ProcessJobContext ctx = new ProcessJobContext( timer,
trigger,
processInstance.getId(),
this.kruntime );
JobHandle jobHandle = this.timerService.scheduleJob( processJob,
ctx,
trigger );
timer.setJobHandle( jobHandle );
timers.put( timer.getId(),
timer );
} finally {
kruntime.endOperation();
}
}
public void internalAddTimer(final TimerInstance timer) {
long delay;
Date lastTriggered = timer.getLastTriggered();
if ( lastTriggered == null ) {
Date activated = timer.getActivated();
Date now = new Date();
long timespan = now.getTime() - activated.getTime();
delay = timer.getDelay() - timespan;
if ( delay < 0 ) {
delay = 0;
}
} else {
Date now = new Date();
long timespan = now.getTime() - lastTriggered.getTime();
delay = timespan - timer.getPeriod();
if ( delay < 0 ) {
delay = 0;
}
}
Trigger trigger = new IntervalTrigger( timerService.getCurrentTime(),
null,
null,
-1,
delay,
timer.getPeriod(),
null,
null ) ;
ProcessJobContext ctx = new ProcessJobContext( timer,
trigger,
timer.getProcessInstanceId(),
this.kruntime );
JobHandle jobHandle = this.timerService.scheduleJob( processJob,
ctx,
trigger );
timer.setJobHandle( jobHandle );
timers.put( timer.getId(),
timer );
}
public void cancelTimer(long timerId) {
TimerInstance timer = timers.remove( timerId );
if ( timer != null ) {
timerService.removeJob( timer.getJobHandle() );
}
}
public void dispose() {
// for ( TimerInstance timer : timers.values() ) {
// System.out.println( timer );
// timerService.removeJob( timer.getJobHandle() );
// }
for ( Iterator<TimerInstance> it = timers.values().iterator(); it.hasNext(); ) {
TimerInstance timer = it.next();
timerService.removeJob( timer.getJobHandle() );
it.remove();
}
timerService.shutdown();
}
public TimerService getTimerService() {
return this.timerService;
}
public Collection<TimerInstance> getTimers() {
return timers.values();
}
public Map<Long, TimerInstance> getTimerMap() {
return this.timers;
}
public long internalGetTimerId() {
return timerId;
}
public void internalSetTimerId(long timerId) {
this.timerId = timerId;
}
public void setTimerService(TimerService timerService) {
this.timerService = timerService;
}
public static class ProcessTimerOutputMarshaller implements TimersOutputMarshaller {
public void write(JobContext ctx, MarshallerWriteContext outCtx) throws IOException {
outCtx.writeShort( PersisterEnums.PROCESS_TIMER );
ProcessJobContext pctx = ( ProcessJobContext ) ctx;
outCtx.writeLong( pctx.getProcessInstanceId() );
OutputMarshaller.writeTrigger( pctx.getTrigger(), outCtx );
ProcessMarshallerImpl.writeTimer( outCtx, pctx.getTimer() );
}
public Timer serialize(JobContext jobCtx,
MarshallerWriteContext outputCtx) {
ProcessJobContext pctx = ( ProcessJobContext ) jobCtx;
return ProtobufMessages.Timers.Timer.newBuilder()
.setType( ProtobufMessages.Timers.TimerType.PROCESS )
.setExtension( JBPMMessages.procTimer,
JBPMMessages.ProcessTimer.newBuilder()
.setTimer( ProtobufProcessMarshaller.writeTimer( outputCtx, pctx.getTimer() ) )
.setTrigger( ProtobufOutputMarshaller.writeTrigger( pctx.getTrigger(), outputCtx ) )
.build() )
.build();
}
}
public static class ProcessTimerInputMarshaller implements TimersInputMarshaller {
public void read(MarshallerReaderContext inCtx) throws IOException, ClassNotFoundException {
TimerService ts = inCtx.wm.getTimerService();
long processInstanceId = inCtx.readLong();
Trigger trigger = InputMarshaller.readTrigger( inCtx );
TimerInstance timerInstance = ProcessMarshallerImpl.readTimer( inCtx );
TimerManager tm = ((InternalProcessRuntime)inCtx.wm.getProcessRuntime()).getTimerManager();
// check if the timer instance is not already registered to avoid duplicated timers
if (!tm.getTimerMap().containsKey(timerInstance.getId())) {
ProcessJobContext pctx = new ProcessJobContext(timerInstance, trigger, processInstanceId, inCtx.wm.getKnowledgeRuntime());
Date date = trigger.hasNextFireTime();
if (date != null) {
long then = date.getTime();
long now = pctx.getKnowledgeRuntime().getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
trigger = new OverdueTrigger(trigger, pctx.getKnowledgeRuntime());
}
}
JobHandle jobHandle = ts.scheduleJob( processJob,
pctx,
trigger );
timerInstance.setJobHandle( jobHandle );
pctx.setJobHandle( jobHandle );
tm.getTimerMap().put( timerInstance.getId(),
timerInstance );
}
}
public void deserialize(MarshallerReaderContext inCtx,
Timer _timer) throws ClassNotFoundException {
JBPMMessages.ProcessTimer _ptimer = _timer.getExtension( JBPMMessages.procTimer );
TimerService ts = inCtx.wm.getTimerService();
long processInstanceId = _ptimer.getTimer().getProcessInstanceId();
Trigger trigger = ProtobufInputMarshaller.readTrigger( inCtx, _ptimer.getTrigger() );
TimerInstance timerInstance = ProtobufProcessMarshaller.readTimer( inCtx, _ptimer.getTimer() );
TimerManager tm = ((InternalProcessRuntime)inCtx.wm.getProcessRuntime()).getTimerManager();
// check if the timer instance is not already registered to avoid duplicated timers
if (!tm.getTimerMap().containsKey(timerInstance.getId())) {
ProcessJobContext pctx = new ProcessJobContext(timerInstance, trigger, processInstanceId, inCtx.wm.getKnowledgeRuntime());
Date date = trigger.hasNextFireTime();
if (date != null) {
long then = date.getTime();
long now = pctx.getKnowledgeRuntime().getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
trigger = new OverdueTrigger(trigger, pctx.getKnowledgeRuntime());
}
}
JobHandle jobHandle = ts.scheduleJob( processJob,
pctx,
trigger );
timerInstance.setJobHandle( jobHandle );
pctx.setJobHandle( jobHandle );
tm.getTimerMap().put( timerInstance.getId(),
timerInstance );
}
}
}
public static class ProcessJob
implements
Job {
public void execute(JobContext c) {
ProcessJobContext ctx = (ProcessJobContext) c;
Long processInstanceId = ctx.getProcessInstanceId();
InternalKnowledgeRuntime kruntime = ctx.getKnowledgeRuntime();
try {
if ( processInstanceId == null ) {
throw new IllegalArgumentException( "Could not find process instance for timer " );
}
ctx.getTimer().setLastTriggered( new Date( ctx.getKnowledgeRuntime().<SessionClock>getSessionClock().getCurrentTime() ) );
((InternalProcessRuntime) kruntime.getProcessRuntime())
.getSignalManager().signalEvent( processInstanceId,
"timerTriggered",
ctx.getTimer() );
TimerManager tm = ((InternalProcessRuntime)ctx.getKnowledgeRuntime().getProcessRuntime()).getTimerManager();
if ( ctx.getTimer().getPeriod() == 0 ) {
tm.getTimerMap().remove( ctx.getTimer().getId() );
}
} catch (Throwable e) {
logger.error("Error when executing timer job", e);
WorkflowProcessInstanceImpl processInstance = (WorkflowProcessInstanceImpl) kruntime.getProcessInstance(processInstanceId);
if (processInstance != null && ctx.getTimer().getPeriod() == 0) {
processInstance.setState(ProcessInstance.STATE_ABORTED);
}
}
}
}
public static class ProcessJobContext
implements
JobContext {
private static final long serialVersionUID = 476843895176221627L;
private Long processInstanceId;
private InternalKnowledgeRuntime kruntime;
private TimerInstance timer;
private Trigger trigger;
private JobHandle jobHandle;
public ProcessJobContext(final TimerInstance timer,
final Trigger trigger,
final Long processInstanceId,
final InternalKnowledgeRuntime kruntime) {
this.timer = timer;
this.trigger = trigger;
this.processInstanceId = processInstanceId;
this.kruntime = kruntime;
}
public Long getProcessInstanceId() {
return processInstanceId;
}
public InternalKnowledgeRuntime getKnowledgeRuntime() {
return kruntime;
}
public Trigger getTrigger() {
return trigger;
}
public JobHandle getJobHandle() {
return this.jobHandle;
}
public void setJobHandle(JobHandle jobHandle) {
this.jobHandle = jobHandle;
}
public TimerInstance getTimer() {
return timer;
}
}
/**
* Overdue aware trigger that introduces fixed delay to allow completion of session initialization
*
*/
public static class OverdueTrigger implements Trigger {
private static final long serialVersionUID = -2368476147776308013L;
public static final long OVERDUE_DELAY = Long.parseLong(System.getProperty("jbpm.overdue.timer.delay", "2000"));
private Trigger orig;
private InternalKnowledgeRuntime kruntime;
public OverdueTrigger(Trigger orig, InternalKnowledgeRuntime kruntime) {
this.orig = orig;
this.kruntime = kruntime;
}
public Date hasNextFireTime() {
Date date = orig.hasNextFireTime();
if (date == null) {
return null;
}
long then = date.getTime();
long now = kruntime.getSessionClock().getCurrentTime();
// overdue timer
if (then < now) {
return new Date((now + OVERDUE_DELAY));
} else {
return orig.hasNextFireTime();
}
}
public Date nextFireTime() {
return orig.nextFireTime();
}
}
}
|
- remove timer in case no more triggers are available so node instance can be removed
|
jbpm-flow/src/main/java/org/jbpm/process/instance/timer/TimerManager.java
|
- remove timer in case no more triggers are available so node instance can be removed
|
<ide><path>bpm-flow/src/main/java/org/jbpm/process/instance/timer/TimerManager.java
<ide> }
<ide>
<ide> ctx.getTimer().setLastTriggered( new Date( ctx.getKnowledgeRuntime().<SessionClock>getSessionClock().getCurrentTime() ) );
<del>
<add>
<add> // if there is no more trigger reset period on timer so its node instance can be removed
<add> if (ctx.getTrigger().hasNextFireTime() == null) {
<add> ctx.getTimer().setPeriod(0);
<add> }
<ide> ((InternalProcessRuntime) kruntime.getProcessRuntime())
<ide> .getSignalManager().signalEvent( processInstanceId,
<ide> "timerTriggered",
<ide> if ( ctx.getTimer().getPeriod() == 0 ) {
<ide> tm.getTimerMap().remove( ctx.getTimer().getId() );
<ide> }
<add>
<ide> } catch (Throwable e) {
<ide> logger.error("Error when executing timer job", e);
<ide> WorkflowProcessInstanceImpl processInstance = (WorkflowProcessInstanceImpl) kruntime.getProcessInstance(processInstanceId);
|
|
Java
|
unlicense
|
error: pathspec 'Trabalho3/Netgifx/src/br/com/fiap/utils/ConversorImagensUtil.java' did not match any file(s) known to git
|
c6dc667462b4b7247964cc3480dbcea4469915a4
| 1 |
pedrohnog/Trabalhos-FIAP,pedrohnog/Trabalhos-FIAP
|
package br.com.fiap.utils;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
import br.com.fiap.entity.Gif;
public class ConversorImagensUtil {
public static void converterGifParaPng(Gif gif) throws IOException {
File entrada = new File(gif.getCaminho() + ".gif");
File saida = new File(gif.getCaminho() + ".png");
ImageIO.write(ImageIO.read(entrada), "png", saida);
}
}
|
Trabalho3/Netgifx/src/br/com/fiap/utils/ConversorImagensUtil.java
|
Conversor de gif para estático
|
Trabalho3/Netgifx/src/br/com/fiap/utils/ConversorImagensUtil.java
|
Conversor de gif para estático
|
<ide><path>rabalho3/Netgifx/src/br/com/fiap/utils/ConversorImagensUtil.java
<add>package br.com.fiap.utils;
<add>
<add>import java.io.File;
<add>import java.io.IOException;
<add>
<add>import javax.imageio.ImageIO;
<add>
<add>import br.com.fiap.entity.Gif;
<add>
<add>public class ConversorImagensUtil {
<add>
<add> public static void converterGifParaPng(Gif gif) throws IOException {
<add> File entrada = new File(gif.getCaminho() + ".gif");
<add> File saida = new File(gif.getCaminho() + ".png");
<add>
<add> ImageIO.write(ImageIO.read(entrada), "png", saida);
<add> }
<add>
<add>}
|
|
Java
|
apache-2.0
|
823842394808b72996c159e7a60cb121d910b1bf
| 0 |
minyans/optimize
|
package edu.jhu.hlt.optimize;
import static org.junit.Assert.*;
import org.junit.Test;
import edu.jhu.hlt.optimize.BottouSchedule.BottouSchedulePrm;
import edu.jhu.hlt.optimize.SGD.SGDPrm;
import edu.jhu.hlt.optimize.function.AbstractDifferentiableBatchFunction;
import edu.jhu.hlt.optimize.function.DifferentiableBatchFunction;
import edu.jhu.hlt.optimize.function.ValueGradient;
import edu.jhu.hlt.optimize.functions.SumSquares;
import edu.jhu.hlt.util.JUnitUtils;
import edu.jhu.hlt.util.math.Vectors;
import edu.jhu.prim.util.random.Prng;
import edu.jhu.prim.vector.IntDoubleDenseVector;
import edu.jhu.prim.vector.IntDoubleVector;
public class SGDTest extends AbstractBatchOptimizerTest {
@Override
protected Optimizer<DifferentiableBatchFunction> getOptimizer() {
SGDPrm prm = new SGDPrm();
prm.sched.setEta0(0.1 * 10);
prm.numPasses = 100;
prm.batchSize = 1;
prm.autoSelectLr = false;
return new SGD(prm);
}
@Test
public void testSgdAutoSelectLr() {
{
// Test with the initial learning rate too small
BottouSchedulePrm sched = new BottouSchedulePrm();
sched.initialLr = 0.005;
sched.lambda = 0.1;
runSgdAutoSelectLr(new BottouSchedule(sched));
}
{
// Test with the initial learning rate too large
BottouSchedulePrm sched = new BottouSchedulePrm();
sched.initialLr = 10;
sched.lambda = 0.01;
runSgdAutoSelectLr(new BottouSchedule(sched));
}
}
public static void runSgdAutoSelectLr(GainSchedule sched) {
SGDPrm prm = new SGDPrm();
prm.sched = sched;
prm.numPasses = 7;
prm.batchSize = 1;
prm.autoSelectLr = true;
SGD opt = new SGD(prm);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 3, -5, 11};
opt.maximize(negate(bf(new SumSquares(offsets))), new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(offsets, max, 1e-1);
}
private static class MyFnForAvg extends AbstractDifferentiableBatchFunction implements DifferentiableBatchFunction {
@Override
public ValueGradient getValueGradient(IntDoubleVector point, int[] batch) {
IntDoubleDenseVector g = new IntDoubleDenseVector();
double val = 0;
for (int i=0; i<batch.length; i++) {
int c = (batch[i] % 2 == 0) ? 1 : -1;
val += c;
g.set(0, c);
g.set(1, -c);
}
return new ValueGradient(val, g);
}
@Override
public IntDoubleVector getGradient(IntDoubleVector point, int[] batch) {
return getValueGradient(point, batch).getGradient();
}
@Override
public double getValue(IntDoubleVector point, int[] batch) {
return getValueGradient(point, batch).getValue();
}
@Override
public int getNumDimensions() {
return 2;
}
@Override
public int getNumExamples() {
return 5;
}
}
@Test
public void testAveraging() {
Prng.seed(123456789101112l);
SGDPrm prm = new SGDPrm();
prm.sched.setEta0(0.1 * 10);
prm.numPasses = 1;
prm.batchSize = 1;
prm.autoSelectLr = false;
// Use a schedule which always has learning rate 1.0.
BottouSchedulePrm sPrm = new BottouSchedulePrm();
sPrm.initialLr = 1;
sPrm.lambda = 0;
sPrm.power = 1;
prm.sched = new BottouSchedule(sPrm);
prm.averaging = false;
{
SGD sgd = new SGD(prm);
IntDoubleDenseVector point = new IntDoubleDenseVector(2);
sgd.optimize(new MyFnForAvg(), point, true, null);
System.out.println(point);
assertEquals(1.0, point.get(0), 1e-13);
assertEquals(-1.0, point.get(1), 1e-13);
}
prm.passToStartAvg = 0;
prm.averaging = true;
{
SGD sgd = new SGD(prm);
IntDoubleDenseVector point = new IntDoubleDenseVector(2);
sgd.optimize(new MyFnForAvg(), point, true, null);
System.out.println(point);
assertEquals(-(1 + 0 + 1 + 0 + 1) / 5.0, point.get(0), 1e-13);
assertEquals(-(-1 + -2 + -1 + 0 + 1) / 5.0, point.get(1), 1e-13);
}
}
// Below, L1 regularization with SGD doesn't land at the same spot as SGDFobos.
@Test
public void testL1RegularizedOffsetNegSumSquaresMax() {
Optimizer<DifferentiableBatchFunction> opt = getRegularizedOptimizer(1.0, 0.0);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 0.4, -5, 11};
double[] expected = new double[]{-0.00130530530530, 4.5, -10.5};
DifferentiableBatchFunction f = negate(bf(new SumSquares(offsets)));
JUnitUtils.assertArrayEquals(new double[]{-0.797, 1.0, -1.0},
f.getGradient(new IntDoubleDenseVector(expected)).toNativeArray(),
1e-3);
opt.maximize(f, new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(expected, max, 1e-10);
}
@Test
public void testL1RegularizedOffsetNegSumSquaresMin() {
Optimizer<DifferentiableBatchFunction> opt = getRegularizedOptimizer(1.0, 0.0);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 0.4, -5, 11};
double[] expected = new double[]{-0.00130530530530, 4.5, -10.5};
DifferentiableBatchFunction f = bf(new SumSquares(offsets));
JUnitUtils.assertArrayEquals(new double[]{0.797, -1.0, 1.0},
f.getGradient(new IntDoubleDenseVector(expected)).toNativeArray(),
1e-3);
opt.minimize(f, new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(expected, max, 1e-10);
}
}
|
src/test/java/edu/jhu/hlt/optimize/SGDTest.java
|
package edu.jhu.hlt.optimize;
import static org.junit.Assert.*;
import org.junit.Test;
import edu.jhu.hlt.optimize.BottouSchedule.BottouSchedulePrm;
import edu.jhu.hlt.optimize.SGD.SGDPrm;
import edu.jhu.hlt.optimize.function.AbstractDifferentiableBatchFunction;
import edu.jhu.hlt.optimize.function.DifferentiableBatchFunction;
import edu.jhu.hlt.optimize.function.ValueGradient;
import edu.jhu.hlt.optimize.functions.SumSquares;
import edu.jhu.hlt.util.JUnitUtils;
import edu.jhu.hlt.util.math.Vectors;
import edu.jhu.prim.vector.IntDoubleDenseVector;
import edu.jhu.prim.vector.IntDoubleVector;
public class SGDTest extends AbstractBatchOptimizerTest {
@Override
protected Optimizer<DifferentiableBatchFunction> getOptimizer() {
SGDPrm prm = new SGDPrm();
prm.sched.setEta0(0.1 * 10);
prm.numPasses = 100;
prm.batchSize = 1;
prm.autoSelectLr = false;
return new SGD(prm);
}
@Test
public void testSgdAutoSelectLr() {
{
// Test with the initial learning rate too small
BottouSchedulePrm sched = new BottouSchedulePrm();
sched.initialLr = 0.005;
sched.lambda = 0.1;
runSgdAutoSelectLr(new BottouSchedule(sched));
}
{
// Test with the initial learning rate too large
BottouSchedulePrm sched = new BottouSchedulePrm();
sched.initialLr = 10;
sched.lambda = 0.01;
runSgdAutoSelectLr(new BottouSchedule(sched));
}
}
public static void runSgdAutoSelectLr(GainSchedule sched) {
SGDPrm prm = new SGDPrm();
prm.sched = sched;
prm.numPasses = 7;
prm.batchSize = 1;
prm.autoSelectLr = true;
SGD opt = new SGD(prm);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 3, -5, 11};
opt.maximize(negate(bf(new SumSquares(offsets))), new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(offsets, max, 1e-1);
}
private static class MyFnForAvg extends AbstractDifferentiableBatchFunction implements DifferentiableBatchFunction {
@Override
public ValueGradient getValueGradient(IntDoubleVector point, int[] batch) {
IntDoubleDenseVector g = new IntDoubleDenseVector();
double val = 0;
for (int i=0; i<batch.length; i++) {
int c = (batch[i] % 2 == 0) ? 1 : -1;
val += c;
g.set(0, c);
g.set(1, -c);
}
return new ValueGradient(val, g);
}
@Override
public IntDoubleVector getGradient(IntDoubleVector point, int[] batch) {
return getValueGradient(point, batch).getGradient();
}
@Override
public double getValue(IntDoubleVector point, int[] batch) {
return getValueGradient(point, batch).getValue();
}
@Override
public int getNumDimensions() {
return 2;
}
@Override
public int getNumExamples() {
return 5;
}
}
@Test
public void testAveraging() {
SGDPrm prm = new SGDPrm();
prm.sched.setEta0(0.1 * 10);
prm.numPasses = 1;
prm.batchSize = 1;
prm.autoSelectLr = false;
// Use a schedule which always has learning rate 1.0.
BottouSchedulePrm sPrm = new BottouSchedulePrm();
sPrm.initialLr = 1;
sPrm.lambda = 0;
sPrm.power = 1;
prm.sched = new BottouSchedule(sPrm);
prm.averaging = false;
{
SGD sgd = new SGD(prm);
IntDoubleDenseVector point = new IntDoubleDenseVector(2);
sgd.optimize(new MyFnForAvg(), point, true, null);
System.out.println(point);
assertEquals(1.0, point.get(0), 1e-13);
assertEquals(-1.0, point.get(1), 1e-13);
}
prm.passToStartAvg = 0;
prm.averaging = true;
{
SGD sgd = new SGD(prm);
IntDoubleDenseVector point = new IntDoubleDenseVector(2);
sgd.optimize(new MyFnForAvg(), point, true, null);
System.out.println(point);
assertEquals(-(1 + 0 + 1 + 0 + 1) / 5.0, point.get(0), 1e-13);
assertEquals(-(-1 + -2 + -1 + 0 + 1) / 5.0, point.get(1), 1e-13);
}
}
// Below, L1 regularization with SGD doesn't land at the same spot as SGDFobos.
@Test
public void testL1RegularizedOffsetNegSumSquaresMax() {
Optimizer<DifferentiableBatchFunction> opt = getRegularizedOptimizer(1.0, 0.0);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 0.4, -5, 11};
double[] expected = new double[]{-0.00130530530530, 4.5, -10.5};
DifferentiableBatchFunction f = negate(bf(new SumSquares(offsets)));
JUnitUtils.assertArrayEquals(new double[]{-0.797, 1.0, -1.0},
f.getGradient(new IntDoubleDenseVector(expected)).toNativeArray(),
1e-3);
opt.maximize(f, new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(expected, max, 1e-10);
}
@Test
public void testL1RegularizedOffsetNegSumSquaresMin() {
Optimizer<DifferentiableBatchFunction> opt = getRegularizedOptimizer(1.0, 0.0);
double[] initial = new double[] { 9, 2, -7};
double[] offsets = new double[] { 0.4, -5, 11};
double[] expected = new double[]{-0.00130530530530, 4.5, -10.5};
DifferentiableBatchFunction f = bf(new SumSquares(offsets));
JUnitUtils.assertArrayEquals(new double[]{0.797, -1.0, 1.0},
f.getGradient(new IntDoubleDenseVector(expected)).toNativeArray(),
1e-3);
opt.minimize(f, new IntDoubleDenseVector(initial));
double[] max = initial;
Vectors.scale(offsets, -1.0);
JUnitUtils.assertArrayEquals(expected, max, 1e-10);
}
}
|
Fixing broken unit test
|
src/test/java/edu/jhu/hlt/optimize/SGDTest.java
|
Fixing broken unit test
|
<ide><path>rc/test/java/edu/jhu/hlt/optimize/SGDTest.java
<ide> import edu.jhu.hlt.optimize.functions.SumSquares;
<ide> import edu.jhu.hlt.util.JUnitUtils;
<ide> import edu.jhu.hlt.util.math.Vectors;
<add>import edu.jhu.prim.util.random.Prng;
<ide> import edu.jhu.prim.vector.IntDoubleDenseVector;
<ide> import edu.jhu.prim.vector.IntDoubleVector;
<ide>
<ide>
<ide> @Test
<ide> public void testAveraging() {
<add> Prng.seed(123456789101112l);
<ide> SGDPrm prm = new SGDPrm();
<ide> prm.sched.setEta0(0.1 * 10);
<ide> prm.numPasses = 1;
|
|
Java
|
apache-2.0
|
242bfdaa9c8fb8cb4c7cac01c1403d0b89c41f5d
| 0 |
murador/ignite,agura/incubator-ignite,SharplEr/ignite,dlnufox/ignite,dream-x/ignite,SharplEr/ignite,VladimirErshov/ignite,endian675/ignite,voipp/ignite,akuznetsov-gridgain/ignite,irudyak/ignite,irudyak/ignite,ntikhonov/ignite,NSAmelchev/ignite,vldpyatkov/ignite,wmz7year/ignite,agoncharuk/ignite,samaitra/ignite,amirakhmedov/ignite,apacheignite/ignite,apache/ignite,ptupitsyn/ignite,apache/ignite,andrey-kuznetsov/ignite,akuznetsov-gridgain/ignite,dlnufox/ignite,tkpanther/ignite,vadopolski/ignite,ptupitsyn/ignite,apacheignite/ignite,agoncharuk/ignite,amirakhmedov/ignite,arijitt/incubator-ignite,agoncharuk/ignite,chandresh-pancholi/ignite,ryanzz/ignite,amirakhmedov/ignite,StalkXT/ignite,SharplEr/ignite,leveyj/ignite,WilliamDo/ignite,kromulan/ignite,dmagda/incubator-ignite,nizhikov/ignite,svladykin/ignite,daradurvs/ignite,BiryukovVA/ignite,louishust/incubator-ignite,ilantukh/ignite,ntikhonov/ignite,agura/incubator-ignite,dream-x/ignite,ashutakGG/incubator-ignite,ryanzz/ignite,louishust/incubator-ignite,vldpyatkov/ignite,agoncharuk/ignite,thuTom/ignite,agura/incubator-ignite,shurun19851206/ignite,f7753/ignite,andrey-kuznetsov/ignite,xtern/ignite,VladimirErshov/ignite,andrey-kuznetsov/ignite,dream-x/ignite,ptupitsyn/ignite,StalkXT/ignite,ntikhonov/ignite,f7753/ignite,SomeFire/ignite,akuznetsov-gridgain/ignite,kromulan/ignite,f7753/ignite,nivanov/ignite,afinka77/ignite,vldpyatkov/ignite,vladisav/ignite,xtern/ignite,iveselovskiy/ignite,amirakhmedov/ignite,sk0x50/ignite,shroman/ignite,shroman/ignite,vsisko/incubator-ignite,SomeFire/ignite,leveyj/ignite,vsuslov/incubator-ignite,WilliamDo/ignite,gargvish/ignite,adeelmahmood/ignite,voipp/ignite,xtern/ignite,ilantukh/ignite,gargvish/ignite,svladykin/ignite,rfqu/ignite,wmz7year/ignite,apache/ignite,agura/incubator-ignite,psadusumilli/ignite,apacheignite/ignite,gargvish/ignite,apache/ignite,ryanzz/ignite,dmagda/incubator-ignite,abhishek-ch/incubator-ignite,avinogradovgg/ignite,gridgain/apache-ignite,pperalta/ignite,kromulan/ignite,xtern/ignite,rfqu/ignite,DoudTechData/ignite,chandresh-pancholi/ignite,mcherkasov/ignite,StalkXT/ignite,andrey-kuznetsov/ignite,StalkXT/ignite,leveyj/ignite,tkpanther/ignite,apacheignite/ignite,apache/ignite,ntikhonov/ignite,voipp/ignite,apacheignite/ignite,NSAmelchev/ignite,a1vanov/ignite,sylentprayer/ignite,ptupitsyn/ignite,kromulan/ignite,irudyak/ignite,thuTom/ignite,sk0x50/ignite,rfqu/ignite,afinka77/ignite,ascherbakoff/ignite,irudyak/ignite,ptupitsyn/ignite,zzcclp/ignite,shroman/ignite,thuTom/ignite,kidaa/incubator-ignite,vadopolski/ignite,avinogradovgg/ignite,samaitra/ignite,alexzaitzev/ignite,pperalta/ignite,apache/ignite,alexzaitzev/ignite,NSAmelchev/ignite,murador/ignite,xtern/ignite,louishust/incubator-ignite,leveyj/ignite,agura/incubator-ignite,xtern/ignite,DoudTechData/ignite,DoudTechData/ignite,ptupitsyn/ignite,gargvish/ignite,endian675/ignite,kromulan/ignite,vsuslov/incubator-ignite,vsisko/incubator-ignite,ascherbakoff/ignite,apacheignite/ignite,SharplEr/ignite,f7753/ignite,ryanzz/ignite,ascherbakoff/ignite,samaitra/ignite,ntikhonov/ignite,gridgain/apache-ignite,sk0x50/ignite,ilantukh/ignite,f7753/ignite,sylentprayer/ignite,nizhikov/ignite,vsuslov/incubator-ignite,xtern/ignite,vldpyatkov/ignite,zzcclp/ignite,BiryukovVA/ignite,thuTom/ignite,kidaa/incubator-ignite,sk0x50/ignite,adeelmahmood/ignite,nizhikov/ignite,pperalta/ignite,nizhikov/ignite,louishust/incubator-ignite,a1vanov/ignite,murador/ignite,nizhikov/ignite,tkpanther/ignite,ntikhonov/ignite,alexzaitzev/ignite,ilantukh/ignite,VladimirErshov/ignite,ryanzz/ignite,apacheignite/ignite,dlnufox/ignite,ptupitsyn/ignite,murador/ignite,ashutakGG/incubator-ignite,wmz7year/ignite,ilantukh/ignite,vsisko/incubator-ignite,shroman/ignite,arijitt/incubator-ignite,chandresh-pancholi/ignite,thuTom/ignite,SharplEr/ignite,agoncharuk/ignite,sylentprayer/ignite,StalkXT/ignite,abhishek-ch/incubator-ignite,samaitra/ignite,rfqu/ignite,ashutakGG/incubator-ignite,ascherbakoff/ignite,chandresh-pancholi/ignite,VladimirErshov/ignite,SharplEr/ignite,ilantukh/ignite,nizhikov/ignite,amirakhmedov/ignite,ashutakGG/incubator-ignite,kidaa/incubator-ignite,alexzaitzev/ignite,wmz7year/ignite,psadusumilli/ignite,chandresh-pancholi/ignite,wmz7year/ignite,a1vanov/ignite,daradurvs/ignite,wmz7year/ignite,ascherbakoff/ignite,mcherkasov/ignite,DoudTechData/ignite,pperalta/ignite,pperalta/ignite,ryanzz/ignite,gridgain/apache-ignite,kromulan/ignite,sylentprayer/ignite,agoncharuk/ignite,gridgain/apache-ignite,zzcclp/ignite,SharplEr/ignite,thuTom/ignite,a1vanov/ignite,NSAmelchev/ignite,BiryukovVA/ignite,BiryukovVA/ignite,DoudTechData/ignite,SharplEr/ignite,pperalta/ignite,irudyak/ignite,andrey-kuznetsov/ignite,shroman/ignite,daradurvs/ignite,dream-x/ignite,andrey-kuznetsov/ignite,voipp/ignite,svladykin/ignite,vladisav/ignite,psadusumilli/ignite,daradurvs/ignite,shroman/ignite,kidaa/incubator-ignite,voipp/ignite,gridgain/apache-ignite,afinka77/ignite,DoudTechData/ignite,BiryukovVA/ignite,abhishek-ch/incubator-ignite,mcherkasov/ignite,daradurvs/ignite,daradurvs/ignite,NSAmelchev/ignite,vsuslov/incubator-ignite,murador/ignite,tkpanther/ignite,vldpyatkov/ignite,andrey-kuznetsov/ignite,apache/ignite,andrey-kuznetsov/ignite,zzcclp/ignite,vsisko/incubator-ignite,samaitra/ignite,VladimirErshov/ignite,voipp/ignite,rfqu/ignite,samaitra/ignite,SomeFire/ignite,rfqu/ignite,dlnufox/ignite,pperalta/ignite,avinogradovgg/ignite,rfqu/ignite,thuTom/ignite,StalkXT/ignite,kromulan/ignite,ptupitsyn/ignite,nivanov/ignite,SomeFire/ignite,svladykin/ignite,wmz7year/ignite,abhishek-ch/incubator-ignite,akuznetsov-gridgain/ignite,svladykin/ignite,iveselovskiy/ignite,dmagda/incubator-ignite,mcherkasov/ignite,amirakhmedov/ignite,vsisko/incubator-ignite,BiryukovVA/ignite,dream-x/ignite,ntikhonov/ignite,chandresh-pancholi/ignite,iveselovskiy/ignite,ptupitsyn/ignite,dream-x/ignite,adeelmahmood/ignite,daradurvs/ignite,agura/incubator-ignite,nivanov/ignite,dlnufox/ignite,amirakhmedov/ignite,afinka77/ignite,SomeFire/ignite,StalkXT/ignite,zzcclp/ignite,alexzaitzev/ignite,vsisko/incubator-ignite,daradurvs/ignite,louishust/incubator-ignite,sk0x50/ignite,amirakhmedov/ignite,akuznetsov-gridgain/ignite,murador/ignite,nizhikov/ignite,shurun19851206/ignite,psadusumilli/ignite,apacheignite/ignite,SomeFire/ignite,sylentprayer/ignite,NSAmelchev/ignite,avinogradovgg/ignite,WilliamDo/ignite,avinogradovgg/ignite,endian675/ignite,samaitra/ignite,SharplEr/ignite,tkpanther/ignite,ryanzz/ignite,vldpyatkov/ignite,ntikhonov/ignite,nizhikov/ignite,StalkXT/ignite,vadopolski/ignite,ashutakGG/incubator-ignite,vadopolski/ignite,nizhikov/ignite,dmagda/incubator-ignite,WilliamDo/ignite,VladimirErshov/ignite,VladimirErshov/ignite,agura/incubator-ignite,akuznetsov-gridgain/ignite,abhishek-ch/incubator-ignite,BiryukovVA/ignite,irudyak/ignite,ryanzz/ignite,vladisav/ignite,endian675/ignite,WilliamDo/ignite,louishust/incubator-ignite,irudyak/ignite,voipp/ignite,svladykin/ignite,mcherkasov/ignite,alexzaitzev/ignite,adeelmahmood/ignite,arijitt/incubator-ignite,f7753/ignite,nivanov/ignite,a1vanov/ignite,pperalta/ignite,apache/ignite,iveselovskiy/ignite,gargvish/ignite,NSAmelchev/ignite,kidaa/incubator-ignite,alexzaitzev/ignite,murador/ignite,f7753/ignite,vldpyatkov/ignite,alexzaitzev/ignite,sk0x50/ignite,SomeFire/ignite,mcherkasov/ignite,daradurvs/ignite,SomeFire/ignite,leveyj/ignite,psadusumilli/ignite,agoncharuk/ignite,sk0x50/ignite,WilliamDo/ignite,adeelmahmood/ignite,sylentprayer/ignite,vsuslov/incubator-ignite,arijitt/incubator-ignite,arijitt/incubator-ignite,vladisav/ignite,a1vanov/ignite,vadopolski/ignite,afinka77/ignite,psadusumilli/ignite,vadopolski/ignite,SomeFire/ignite,thuTom/ignite,arijitt/incubator-ignite,mcherkasov/ignite,daradurvs/ignite,kidaa/incubator-ignite,psadusumilli/ignite,endian675/ignite,chandresh-pancholi/ignite,VladimirErshov/ignite,nivanov/ignite,shroman/ignite,shurun19851206/ignite,DoudTechData/ignite,shroman/ignite,shroman/ignite,voipp/ignite,iveselovskiy/ignite,vsuslov/incubator-ignite,WilliamDo/ignite,adeelmahmood/ignite,irudyak/ignite,tkpanther/ignite,dream-x/ignite,ilantukh/ignite,endian675/ignite,andrey-kuznetsov/ignite,ptupitsyn/ignite,zzcclp/ignite,afinka77/ignite,BiryukovVA/ignite,DoudTechData/ignite,samaitra/ignite,ilantukh/ignite,sylentprayer/ignite,shroman/ignite,abhishek-ch/incubator-ignite,samaitra/ignite,vadopolski/ignite,BiryukovVA/ignite,dlnufox/ignite,ascherbakoff/ignite,amirakhmedov/ignite,irudyak/ignite,psadusumilli/ignite,svladykin/ignite,iveselovskiy/ignite,tkpanther/ignite,agoncharuk/ignite,dlnufox/ignite,avinogradovgg/ignite,nivanov/ignite,vladisav/ignite,sk0x50/ignite,vsisko/incubator-ignite,a1vanov/ignite,rfqu/ignite,NSAmelchev/ignite,vsisko/incubator-ignite,dlnufox/ignite,agura/incubator-ignite,dream-x/ignite,SomeFire/ignite,alexzaitzev/ignite,zzcclp/ignite,sylentprayer/ignite,WilliamDo/ignite,ascherbakoff/ignite,chandresh-pancholi/ignite,adeelmahmood/ignite,endian675/ignite,xtern/ignite,dmagda/incubator-ignite,samaitra/ignite,shurun19851206/ignite,kromulan/ignite,a1vanov/ignite,gargvish/ignite,vldpyatkov/ignite,voipp/ignite,gargvish/ignite,NSAmelchev/ignite,StalkXT/ignite,vladisav/ignite,ilantukh/ignite,murador/ignite,vladisav/ignite,apache/ignite,gargvish/ignite,vadopolski/ignite,BiryukovVA/ignite,dmagda/incubator-ignite,shurun19851206/ignite,nivanov/ignite,andrey-kuznetsov/ignite,ashutakGG/incubator-ignite,sk0x50/ignite,avinogradovgg/ignite,leveyj/ignite,f7753/ignite,endian675/ignite,dmagda/incubator-ignite,afinka77/ignite,leveyj/ignite,ascherbakoff/ignite,leveyj/ignite,wmz7year/ignite,vladisav/ignite,gridgain/apache-ignite,adeelmahmood/ignite,zzcclp/ignite,shurun19851206/ignite,mcherkasov/ignite,chandresh-pancholi/ignite,shurun19851206/ignite,tkpanther/ignite,nivanov/ignite,shurun19851206/ignite,gridgain/apache-ignite,ascherbakoff/ignite,dmagda/incubator-ignite,xtern/ignite,afinka77/ignite,ilantukh/ignite
|
/* @java.file.header */
/* _________ _____ __________________ _____
* __ ____/___________(_)______ /__ ____/______ ____(_)_______
* _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \
* / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / /
* \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/
*/
package org.gridgain.grid.kernal.processors.dataload;
import org.gridgain.grid.*;
import org.gridgain.grid.cache.*;
import org.gridgain.grid.dataload.*;
import org.gridgain.grid.marshaller.*;
import org.gridgain.grid.marshaller.optimized.*;
import org.gridgain.grid.portables.*;
import org.gridgain.grid.spi.discovery.tcp.*;
import org.gridgain.grid.spi.discovery.tcp.ipfinder.*;
import org.gridgain.grid.spi.discovery.tcp.ipfinder.vm.*;
import org.gridgain.grid.util.typedef.*;
import org.gridgain.grid.util.typedef.internal.*;
import org.gridgain.testframework.junits.common.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import static org.gridgain.grid.cache.GridCacheMode.*;
import static org.gridgain.grid.cache.GridCacheWriteSynchronizationMode.*;
/**
* Tests for {@code GridDataLoaderImpl}.
*/
public class GridDataLoaderImplSelfTest extends GridCommonAbstractTest {
/** IP finder. */
private static final GridTcpDiscoveryIpFinder IP_FINDER = new GridTcpDiscoveryVmIpFinder(true);
/** Number of keys to load via data loader. */
private static final int KEYS_COUNT = 1000;
/** Started grid counter. */
private static int cnt;
/** Flag indicating should be cache configured with portables or not. */
private static boolean portables;
/** {@inheritDoc} */
@Override protected GridConfiguration getConfiguration(String gridName) throws Exception {
GridConfiguration cfg = super.getConfiguration(gridName);
GridTcpDiscoverySpi discoSpi = new GridTcpDiscoverySpi();
discoSpi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(discoSpi);
if (portables) {
GridPortableConfiguration portableCfg = new GridPortableConfiguration();
portableCfg.setTypeConfigurations(Arrays.asList(
new GridPortableTypeConfiguration(TestObject.class.getName())));
cfg.setPortableConfiguration(portableCfg);
}
// Forth node goes without cache.
if (cnt < 4)
cfg.setCacheConfiguration(cacheConfiguration());
cnt++;
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testNullPointerExceptionUponDataLoaderClosing() throws Exception {
try {
startGrids(5);
final CyclicBarrier barrier = new CyclicBarrier(2);
multithreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
U.awaitQuiet(barrier);
G.stopAll(true);
return null;
}
}, 1);
Grid g4 = grid(4);
GridDataLoader<Object, Object> dataLdr = g4.dataLoader(null);
dataLdr.perNodeBufferSize(32);
for (int i = 0; i < 100000; i += 2) {
dataLdr.addData(i, i);
dataLdr.removeData(i + 1);
}
U.awaitQuiet(barrier);
info("Closing data loader.");
try {
dataLdr.close(true);
}
catch (IllegalStateException ignore) {
// This is ok to ignore this exception as test is racy by it's nature -
// grid is stopping in different thread.
}
}
finally {
G.stopAll(true);
}
}
/**
* Data loader should correctly load entries from HashMap in case of grids with more than one node
* and with GridOptimizedMarshaller that requires serializable.
*
* @throws Exception If failed.
*/
public void testAddDataFromMap() throws Exception {
try {
cnt = 0;
portables = false;
startGrids(2);
Grid g0 = grid(0);
GridMarshaller marsh = g0.configuration().getMarshaller();
if (marsh instanceof GridOptimizedMarshaller)
assertTrue(((GridOptimizedMarshaller)marsh).isRequireSerializable());
else
fail("Expected GridOptimizedMarshaller, but found: " + marsh.getClass().getName());
GridDataLoader<Integer, String> dataLdr = g0.dataLoader(null);
Map<Integer, String> map = U.newHashMap(KEYS_COUNT);
for (int i = 0; i < KEYS_COUNT; i ++)
map.put(i, String.valueOf(i));
dataLdr.addData(map);
dataLdr.close();
Random rnd = new Random();
GridCache<Integer, String> c = g0.cache(null);
for (int i = 0; i < KEYS_COUNT; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
String v = c.get(k);
assertEquals(k.toString(), v);
}
}
finally {
G.stopAll(true);
}
}
/**
* Data loader should correctly load portable entries from HashMap in case of grids with more than one node
* and with GridOptimizedMarshaller that requires serializable.
*
* @throws Exception If failed.
*/
public void testAddPortableDataFromMap() throws Exception {
try {
cnt = 0;
portables = true;
startGrids(2);
Grid g0 = grid(0);
GridMarshaller marsh = g0.configuration().getMarshaller();
if (marsh instanceof GridOptimizedMarshaller)
assertTrue(((GridOptimizedMarshaller)marsh).isRequireSerializable());
else
fail("Expected GridOptimizedMarshaller, but found: " + marsh.getClass().getName());
GridDataLoader<Integer, TestObject> dataLdr = g0.dataLoader(null);
Map<Integer, TestObject> map = U.newHashMap(KEYS_COUNT);
for (int i = 0; i < KEYS_COUNT; i ++)
map.put(i, new TestObject(i));
dataLdr.addData(map);
dataLdr.close();
Random rnd = new Random();
GridCache<Integer, TestObject> c = g0.cache(null);
for (int i = 0; i < 100; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
TestObject v = c.get(k);
assertEquals(k, v.val());
}
GridCacheProjection<Integer, TestObject> c2 = c.keepPortable();
for (int i = 0; i < 100; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
TestObject v = c2.get(k);
assertEquals(k, v.val());
}
}
finally {
G.stopAll(true);
}
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private GridCacheConfiguration cacheConfiguration() {
GridCacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setCacheMode(PARTITIONED);
cacheCfg.setBackups(1);
cacheCfg.setWriteSynchronizationMode(FULL_SYNC);
if (portables)
cacheCfg.setPortableEnabled(true);
return cacheCfg;
}
/**
*
*/
private static class TestObject implements GridPortableMarshalAware, Serializable {
/** */
private int val;
/**
*/
private TestObject() {
// No-op.
}
/**
* @param val Value.
*/
private TestObject(int val) {
this.val = val;
}
public Integer val() {
return val;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return val;
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return obj instanceof TestObject && ((TestObject)obj).val == val;
}
/** {@inheritDoc} */
@Override public void writePortable(GridPortableWriter writer) throws GridPortableException {
writer.writeInt("val", val);
}
/** {@inheritDoc} */
@Override public void readPortable(GridPortableReader reader) throws GridPortableException {
val = reader.readInt("val");
}
}
}
|
modules/core/src/test/java/org/gridgain/grid/kernal/processors/dataload/GridDataLoaderImplSelfTest.java
|
/* @java.file.header */
/* _________ _____ __________________ _____
* __ ____/___________(_)______ /__ ____/______ ____(_)_______
* _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \
* / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / /
* \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/
*/
package org.gridgain.grid.kernal.processors.dataload;
import org.gridgain.grid.*;
import org.gridgain.grid.cache.*;
import org.gridgain.grid.dataload.*;
import org.gridgain.grid.marshaller.*;
import org.gridgain.grid.marshaller.optimized.*;
import org.gridgain.grid.portables.*;
import org.gridgain.grid.spi.discovery.tcp.*;
import org.gridgain.grid.spi.discovery.tcp.ipfinder.*;
import org.gridgain.grid.spi.discovery.tcp.ipfinder.vm.*;
import org.gridgain.grid.util.typedef.*;
import org.gridgain.grid.util.typedef.internal.*;
import org.gridgain.testframework.junits.common.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import static org.gridgain.grid.cache.GridCacheMode.*;
import static org.gridgain.grid.cache.GridCacheWriteSynchronizationMode.*;
/**
* Tests for {@code GridDataLoaderImpl}.
*/
public class GridDataLoaderImplSelfTest extends GridCommonAbstractTest {
/** IP finder. */
private static final GridTcpDiscoveryIpFinder IP_FINDER = new GridTcpDiscoveryVmIpFinder(true);
/** Number of keys to load via data loader. */
private static final int KEYS_COUNT = 1000;
/** Started grid counter. */
private static int cnt;
/** Flag indicating should be cache configured with portables or not. */
private static boolean portables;
/** {@inheritDoc} */
@Override protected GridConfiguration getConfiguration(String gridName) throws Exception {
GridConfiguration cfg = super.getConfiguration(gridName);
GridTcpDiscoverySpi discoSpi = new GridTcpDiscoverySpi();
discoSpi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(discoSpi);
if (portables) {
GridPortableConfiguration portableCfg = new GridPortableConfiguration();
portableCfg.setTypeConfigurations(Arrays.asList(
new GridPortableTypeConfiguration(TestObject.class.getName())));
cfg.setPortableConfiguration(portableCfg);
}
// Forth node goes without cache.
if (cnt < 4)
cfg.setCacheConfiguration(cacheConfiguration());
cnt++;
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testNullPointerExceptionUponDataLoaderClosing() throws Exception {
try {
startGrids(5);
final CyclicBarrier barrier = new CyclicBarrier(2);
multithreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
U.awaitQuiet(barrier);
G.stopAll(true);
return null;
}
}, 1);
Grid g4 = grid(4);
GridDataLoader<Object, Object> dataLdr = g4.dataLoader(null);
dataLdr.perNodeBufferSize(32);
for (int i = 0; i < 100000; i += 2) {
dataLdr.addData(i, i);
dataLdr.removeData(i + 1);
}
U.awaitQuiet(barrier);
info("Closing data loader.");
try {
dataLdr.close(true);
}
catch (IllegalStateException ignore) {
// This is ok to ignore this exception as test is racy by it's nature -
// grid is stopping in different thread.
}
}
finally {
G.stopAll(true);
}
}
/**
* Data loader should correctly load entries from HashMap in case of grids with more than one node
* and with GridOptimizedMarshaller that requires serializable.
*
* @throws Exception If failed.
*/
public void testAddDataFromMap() throws Exception {
try {
portables = false;
startGrids(2);
Grid g0 = grid(0);
GridMarshaller marsh = g0.configuration().getMarshaller();
if (marsh instanceof GridOptimizedMarshaller)
assertTrue(((GridOptimizedMarshaller)marsh).isRequireSerializable());
else
fail("Expected GridOptimizedMarshaller, but found: " + marsh.getClass().getName());
GridDataLoader<Integer, String> dataLdr = g0.dataLoader(null);
Map<Integer, String> map = U.newHashMap(KEYS_COUNT);
for (int i = 0; i < KEYS_COUNT; i ++)
map.put(i, String.valueOf(i));
dataLdr.addData(map);
dataLdr.close();
Random rnd = new Random();
GridCache<Integer, String> c = g0.cache(null);
for (int i = 0; i < KEYS_COUNT; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
String v = c.get(k);
assertEquals(k.toString(), v);
}
}
finally {
G.stopAll(true);
}
}
/**
* Data loader should correctly load portable entries from HashMap in case of grids with more than one node
* and with GridOptimizedMarshaller that requires serializable.
*
* @throws Exception If failed.
*/
public void testAddPortableDataFromMap() throws Exception {
try {
portables = true;
startGrids(2);
Grid g0 = grid(0);
GridMarshaller marsh = g0.configuration().getMarshaller();
if (marsh instanceof GridOptimizedMarshaller)
assertTrue(((GridOptimizedMarshaller)marsh).isRequireSerializable());
else
fail("Expected GridOptimizedMarshaller, but found: " + marsh.getClass().getName());
GridDataLoader<Integer, TestObject> dataLdr = g0.dataLoader(null);
Map<Integer, TestObject> map = U.newHashMap(KEYS_COUNT);
for (int i = 0; i < KEYS_COUNT; i ++)
map.put(i, new TestObject(i));
dataLdr.addData(map);
dataLdr.close();
Random rnd = new Random();
GridCache<Integer, TestObject> c = g0.cache(null);
for (int i = 0; i < 100; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
TestObject v = c.get(k);
assertEquals(k, v.val());
}
GridCacheProjection<Integer, TestObject> c2 = c.keepPortable();
for (int i = 0; i < 100; i ++) {
Integer k = rnd.nextInt(KEYS_COUNT);
TestObject v = c2.get(k);
assertEquals(k, v.val());
}
}
finally {
G.stopAll(true);
}
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private GridCacheConfiguration cacheConfiguration() {
GridCacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setCacheMode(PARTITIONED);
cacheCfg.setBackups(1);
cacheCfg.setWriteSynchronizationMode(FULL_SYNC);
if (portables)
cacheCfg.setPortableEnabled(true);
return cacheCfg;
}
/**
*
*/
private static class TestObject implements GridPortableMarshalAware, Serializable {
/** */
private int val;
/**
*/
private TestObject() {
// No-op.
}
/**
* @param val Value.
*/
private TestObject(int val) {
this.val = val;
}
public Integer val() {
return val;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return val;
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
return obj instanceof TestObject && ((TestObject)obj).val == val;
}
/** {@inheritDoc} */
@Override public void writePortable(GridPortableWriter writer) throws GridPortableException {
writer.writeInt("val", val);
}
/** {@inheritDoc} */
@Override public void readPortable(GridPortableReader reader) throws GridPortableException {
val = reader.readInt("val");
}
}
}
|
# GG-9514 Fixed tests.
|
modules/core/src/test/java/org/gridgain/grid/kernal/processors/dataload/GridDataLoaderImplSelfTest.java
|
# GG-9514 Fixed tests.
|
<ide><path>odules/core/src/test/java/org/gridgain/grid/kernal/processors/dataload/GridDataLoaderImplSelfTest.java
<ide> */
<ide> public void testAddDataFromMap() throws Exception {
<ide> try {
<add> cnt = 0;
<ide> portables = false;
<ide>
<ide> startGrids(2);
<ide> */
<ide> public void testAddPortableDataFromMap() throws Exception {
<ide> try {
<add> cnt = 0;
<ide> portables = true;
<ide>
<ide> startGrids(2);
|
|
Java
|
apache-2.0
|
369ce6e917be9da1da96b02775e5e03d9a7b4a32
| 0 |
GiapIT/TuDienAnhViet
|
package nguyengiap.vietitpro.tudienanhviet.com.fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import nguyengiap.vietitpro.tudienanhviet.com.IClickListener;
import nguyengiap.vietitpro.tudienanhviet.com.R;
import nguyengiap.vietitpro.tudienanhviet.com.activity.VerbActivity;
import nguyengiap.vietitpro.tudienanhviet.com.adapter.AdaptetListSearch;
import nguyengiap.vietitpro.tudienanhviet.com.common.Common;
import nguyengiap.vietitpro.tudienanhviet.com.model.DictEntity;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchAdapter;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchHistoryTable;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchItem;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchView;
/**
* A simple {@link Fragment} subclass.
*/
public class FragmentMain extends Fragment implements IClickListener, View.OnClickListener {
RecyclerView lvListSearch;
AdaptetListSearch mListSearchAdapter;
View mMainLayout;
LinearLayout layout_verb;
LinearLayout layout_windown;
LinearLayout layout_face;
LinearLayout layout_rate;
LinearLayout layout_feedback;
protected SearchView mSearchView = null;
private SearchHistoryTable mHistoryDatabase;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
mMainLayout = inflater.inflate(R.layout.fragment_fragment_main, container, false);
initView();
initListSearch();
setSearchView();
return mMainLayout;
}
private void initView() {
lvListSearch = (RecyclerView) mMainLayout.findViewById(R.id.lvWord);
lvListSearch.setHasFixedSize(true);
lvListSearch.setLayoutManager(new StaggeredGridLayoutManager(1,
StaggeredGridLayoutManager.VERTICAL));
layout_verb = (LinearLayout) mMainLayout.findViewById(R.id.layout_verb);
layout_windown = (LinearLayout) mMainLayout.findViewById(R.id.layout_windown);
layout_face = (LinearLayout) mMainLayout.findViewById(R.id.layout_face);
layout_rate = (LinearLayout) mMainLayout.findViewById(R.id.layout_rate);
layout_feedback = (LinearLayout) mMainLayout.findViewById(R.id.layout_feedback);
layout_verb.setOnClickListener(this);
layout_windown.setOnClickListener(this);
layout_face.setOnClickListener(this);
layout_rate.setOnClickListener(this);
layout_feedback.setOnClickListener(this);
mSearchView = (SearchView)mMainLayout.findViewById(R.id.searchView);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
private void initListSearch() {
ArrayList<DictEntity> list = new ArrayList<>();
for (int i = 0; i < 100; i++) {
list.add(new DictEntity(i, "Love", "lav", "tinh yêu,tinh thương"));
}
mListSearchAdapter = new AdaptetListSearch(getActivity(), list);
lvListSearch.setAdapter(mListSearchAdapter);
mListSearchAdapter.notifyDataSetChanged();
}
@Override
public void onItemClick(View view, DictEntity engVietDict) {
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.layout_verb:
Intent intent=new Intent(getActivity(), VerbActivity.class);
startActivity(intent);
break;
case R.id.layout_windown:
break;
case R.id.layout_face:
Common.shareFacebook(getActivity());
break;
case R.id.layout_rate:
Common.showApp(getActivity(),getActivity().getPackageName());
break;
case R.id.layout_feedback:
Common.sendFeedBack(getActivity());
break;
}
}
protected void setSearchView() {
mHistoryDatabase = new SearchHistoryTable(getContext());
if (mSearchView != null) {
mSearchView.setVersion(SearchView.VERSION_TOOLBAR);
mSearchView.setVersionMargins(SearchView.VERSION_MARGINS_TOOLBAR_BIG);
mSearchView.setHint(R.string.search);
mSearchView.setTextSize(16);
mSearchView.setHint("Nhập từ cần tra");
mSearchView.setDivider(false);
mSearchView.setVoice(true);
mSearchView.setVoiceText("Set permission on Android 6+ !");
mSearchView.setAnimationDuration(SearchView.ANIMATION_DURATION);
mSearchView.setShadowColor(ContextCompat.getColor(getContext(), R.color.search_shadow_layout));
mSearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
mSearchView.close(false);
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
mSearchView.setOnOpenCloseListener(new SearchView.OnOpenCloseListener() {
@Override
public void onOpen() {
}
@Override
public void onClose() {
}
});
List<SearchItem> suggestionsList = new ArrayList<>();
suggestionsList.add(new SearchItem("search1"));
suggestionsList.add(new SearchItem("search2"));
suggestionsList.add(new SearchItem("search3"));
SearchAdapter searchAdapter = new SearchAdapter(getContext(), suggestionsList);
searchAdapter.setOnItemClickListener(new SearchAdapter.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
mSearchView.close(false);
TextView textView = (TextView) view.findViewById(R.id.textView_item_text);
String query = textView.getText().toString();
}
});
mSearchView.setAdapter(searchAdapter);
}
}
}
|
app/src/main/java/nguyengiap/vietitpro/tudienanhviet/com/fragment/FragmentMain.java
|
package nguyengiap.vietitpro.tudienanhviet.com.fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import nguyengiap.vietitpro.tudienanhviet.com.IClickListener;
import nguyengiap.vietitpro.tudienanhviet.com.R;
import nguyengiap.vietitpro.tudienanhviet.com.activity.VerbActivity;
import nguyengiap.vietitpro.tudienanhviet.com.adapter.AdaptetListSearch;
import nguyengiap.vietitpro.tudienanhviet.com.common.Common;
import nguyengiap.vietitpro.tudienanhviet.com.model.DictEntity;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchAdapter;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchHistoryTable;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchItem;
import nguyengiap.vietitpro.tudienanhviet.com.ui.materialsearch.searchview.SearchView;
/**
* A simple {@link Fragment} subclass.
*/
public class FragmentMain extends Fragment implements IClickListener, View.OnClickListener {
RecyclerView lvListSearch;
AdaptetListSearch mListSearchAdapter;
View mMainLayout;
LinearLayout layout_verb;
LinearLayout layout_windown;
LinearLayout layout_face;
LinearLayout layout_rate;
LinearLayout layout_feedback;
protected SearchView mSearchView = null;
private SearchHistoryTable mHistoryDatabase;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
mMainLayout = inflater.inflate(R.layout.fragment_fragment_main, container, false);
initView();
initListSearch();
setSearchView();
return mMainLayout;
}
private void initView() {
lvListSearch = (RecyclerView) mMainLayout.findViewById(R.id.lvWord);
lvListSearch.setHasFixedSize(true);
lvListSearch.setLayoutManager(new StaggeredGridLayoutManager(1,
StaggeredGridLayoutManager.VERTICAL));
layout_verb = (LinearLayout) mMainLayout.findViewById(R.id.layout_verb);
layout_windown = (LinearLayout) mMainLayout.findViewById(R.id.layout_windown);
layout_face = (LinearLayout) mMainLayout.findViewById(R.id.layout_face);
layout_rate = (LinearLayout) mMainLayout.findViewById(R.id.layout_rate);
layout_feedback = (LinearLayout) mMainLayout.findViewById(R.id.layout_feedback);
layout_verb.setOnClickListener(this);
layout_windown.setOnClickListener(this);
layout_face.setOnClickListener(this);
layout_rate.setOnClickListener(this);
layout_feedback.setOnClickListener(this);
mSearchView = (SearchView)mMainLayout.findViewById(R.id.searchView);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
private void initListSearch() {
ArrayList<DictEntity> list = new ArrayList<>();
for (int i = 0; i < 100; i++) {
list.add(new DictEntity(i, "Love", "lav", "tinh yêu,tinh thương"));
}
mListSearchAdapter = new AdaptetListSearch(getActivity(), list);
lvListSearch.setAdapter(mListSearchAdapter);
mListSearchAdapter.notifyDataSetChanged();
}
@Override
public void onItemClick(View view, DictEntity engVietDict) {
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.layout_verb:
Intent intent=new Intent(getActivity(), VerbActivity.class);
startActivity(intent);
break;
case R.id.layout_windown:
break;
case R.id.layout_face:
Common.shareFacebook(getActivity());
break;
case R.id.layout_rate:
Common.showApp(getActivity(),getActivity().getPackageName());
break;
case R.id.layout_feedback:
Common.sendFeedBack(getActivity());
break;
}
}
protected void setSearchView() {
mHistoryDatabase = new SearchHistoryTable(getContext());
if (mSearchView != null) {
mSearchView.setVersion(SearchView.VERSION_TOOLBAR);
mSearchView.setVersionMargins(SearchView.VERSION_MARGINS_TOOLBAR_BIG);
mSearchView.setHint(R.string.search);
mSearchView.setTextSize(16);
mSearchView.setHint("Nhập từ cần tra");
mSearchView.setDivider(false);
mSearchView.setVoice(true);
mSearchView.setVoiceText("Set permission on Android 6+ !");
mSearchView.setAnimationDuration(SearchView.ANIMATION_DURATION);
mSearchView.setShadowColor(ContextCompat.getColor(getContext(), R.color.search_shadow_layout));
mSearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
mSearchView.close(false);
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
mSearchView.setOnOpenCloseListener(new SearchView.OnOpenCloseListener() {
@Override
public void onOpen() {
}
@Override
public void onClose() {
}
});
List<SearchItem> suggestionsList = new ArrayList<>();
suggestionsList.add(new SearchItem("search1"));
suggestionsList.add(new SearchItem("search2"));
suggestionsList.add(new SearchItem("search3"));
SearchAdapter searchAdapter = new SearchAdapter(getContext(), suggestionsList);
searchAdapter.setOnItemClickListener(new SearchAdapter.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
mSearchView.close(false);
TextView textView = (TextView) view.findViewById(R.id.textView_item_text);
String query = textView.getText().toString();
}
});
mSearchView.setAdapter(searchAdapter);
}
}
}
|
update
|
app/src/main/java/nguyengiap/vietitpro/tudienanhviet/com/fragment/FragmentMain.java
|
update
|
<ide><path>pp/src/main/java/nguyengiap/vietitpro/tudienanhviet/com/fragment/FragmentMain.java
<ide> public void onItemClick(View view, int position) {
<ide> mSearchView.close(false);
<ide> TextView textView = (TextView) view.findViewById(R.id.textView_item_text);
<add>
<ide> String query = textView.getText().toString();
<ide> }
<ide> });
|
|
Java
|
apache-2.0
|
5117d7d04cf6e70401162a948b1a601ca71f2e50
| 0 |
Commonjava/indy,Commonjava/indy,yma88/indy,jdcasey/indy,pkocandr/indy,ligangty/indy,pkocandr/indy,ligangty/indy,ligangty/indy,jdcasey/indy,jdcasey/indy,ruhan1/indy,ruhan1/indy,jdcasey/indy,pkocandr/indy,pkocandr/indy,jdcasey/indy,pkocandr/indy,ligangty/indy,ligangty/indy,Commonjava/indy,pkocandr/indy,yma88/indy,ruhan1/indy,Commonjava/indy,yma88/indy,Commonjava/indy,yma88/indy,ligangty/indy,yma88/indy,Commonjava/indy,yma88/indy,ruhan1/indy,jdcasey/indy,ruhan1/indy,ruhan1/indy
|
/**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.httprox.util;
import org.commonjava.indy.sli.metrics.GoldenSignalsMetricSet;
import org.slf4j.Logger;
import org.commonjava.indy.metrics.RequestContextHelper;
import java.net.SocketAddress;
import static java.lang.Integer.parseInt;
import static org.commonjava.indy.metrics.RequestContextHelper.HTTP_METHOD;
import static org.commonjava.indy.metrics.RequestContextHelper.HTTP_STATUS;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_LATENCY_NS;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_PHASE;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_PHASE_END;
import static org.commonjava.indy.metrics.RequestContextHelper.getContext;
import static org.commonjava.indy.metrics.RequestContextHelper.setContext;
import static org.commonjava.indy.metrics.TrafficClassifierConstants.FN_CONTENT_GENERIC;
public class ProxyMeter
{
private boolean summaryReported;
private final String method;
private final String requestLine;
private final long startNanos;
private final GoldenSignalsMetricSet sliMetricSet;
private final Logger restLogger;
private final SocketAddress peerAddress;
public ProxyMeter( final String method, final String requestLine, final long startNanos, final GoldenSignalsMetricSet sliMetricSet, final Logger restLogger,
final SocketAddress peerAddress )
{
this.method = method;
this.requestLine = requestLine;
this.startNanos = startNanos;
this.sliMetricSet = sliMetricSet;
this.restLogger = restLogger;
this.peerAddress = peerAddress;
}
public void reportResponseSummary()
{
/*
Here, we make this call idempotent to make the logic easier in the doHandleEvent method.
This way, for content-transfer requests we will call this JUST BEFORE the transfer begins,
while for all other requests we will handle it in the finally block of the doHandleEvent() method.
NOTE: This will probably result in incorrect latency measurements for any client using HTTPS via the
CONNECT method.
*/
if ( !summaryReported )
{
summaryReported = true;
long latency = System.nanoTime() - startNanos;
RequestContextHelper.setContext( REQUEST_LATENCY_NS, String.valueOf( latency ) );
setContext( HTTP_METHOD, method );
// log SLI metrics
if ( sliMetricSet != null )
{
sliMetricSet.function( FN_CONTENT_GENERIC ).ifPresent( ms ->{
ms.latency( latency ).call();
if ( getContext( HTTP_STATUS, 200 ) > 499 )
{
ms.error();
}
} );
}
RequestContextHelper.setContext( REQUEST_PHASE, REQUEST_PHASE_END );
restLogger.info( "END {} (from: {})", requestLine, peerAddress );
RequestContextHelper.clearContext( REQUEST_PHASE );
}
}
public ProxyMeter copy( final long startNanos, final String method, final String requestLine )
{
return new ProxyMeter( method, requestLine, startNanos, sliMetricSet, restLogger, peerAddress );
}
}
|
addons/httprox/common/src/main/java/org/commonjava/indy/httprox/util/ProxyMeter.java
|
/**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.httprox.util;
import org.commonjava.indy.sli.metrics.GoldenSignalsMetricSet;
import org.slf4j.Logger;
import org.commonjava.indy.metrics.RequestContextHelper;
import java.net.SocketAddress;
import static java.lang.Integer.parseInt;
import static org.commonjava.indy.metrics.RequestContextHelper.HTTP_METHOD;
import static org.commonjava.indy.metrics.RequestContextHelper.HTTP_STATUS;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_LATENCY_NS;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_PHASE;
import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_PHASE_END;
import static org.commonjava.indy.metrics.RequestContextHelper.getContext;
import static org.commonjava.indy.metrics.RequestContextHelper.setContext;
public class ProxyMeter
{
private boolean summaryReported;
private final String method;
private final String requestLine;
private final long startNanos;
private final GoldenSignalsMetricSet sliMetricSet;
private final Logger restLogger;
private final SocketAddress peerAddress;
public ProxyMeter( final String method, final String requestLine, final long startNanos, final GoldenSignalsMetricSet sliMetricSet, final Logger restLogger,
final SocketAddress peerAddress )
{
this.method = method;
this.requestLine = requestLine;
this.startNanos = startNanos;
this.sliMetricSet = sliMetricSet;
this.restLogger = restLogger;
this.peerAddress = peerAddress;
}
public void reportResponseSummary()
{
/*
Here, we make this call idempotent to make the logic easier in the doHandleEvent method.
This way, for content-transfer requests we will call this JUST BEFORE the transfer begins,
while for all other requests we will handle it in the finally block of the doHandleEvent() method.
NOTE: This will probably result in incorrect latency measurements for any client using HTTPS via the
CONNECT method.
*/
if ( !summaryReported )
{
summaryReported = true;
long latency = System.nanoTime() - startNanos;
RequestContextHelper.setContext( REQUEST_LATENCY_NS, String.valueOf( latency ) );
setContext( HTTP_METHOD, method );
// log SLI metrics
if ( sliMetricSet != null )
{
sliMetricSet.function( GoldenSignalsMetricSet.FN_CONTENT_GENERIC ).ifPresent( ms ->{
ms.latency( latency ).call();
if ( getContext( HTTP_STATUS, 200 ) > 499 )
{
ms.error();
}
} );
}
RequestContextHelper.setContext( REQUEST_PHASE, REQUEST_PHASE_END );
restLogger.info( "END {} (from: {})", requestLine, peerAddress );
RequestContextHelper.clearContext( REQUEST_PHASE );
}
}
public ProxyMeter copy( final long startNanos, final String method, final String requestLine )
{
return new ProxyMeter( method, requestLine, startNanos, sliMetricSet, restLogger, peerAddress );
}
}
|
fix compilation error
|
addons/httprox/common/src/main/java/org/commonjava/indy/httprox/util/ProxyMeter.java
|
fix compilation error
|
<ide><path>ddons/httprox/common/src/main/java/org/commonjava/indy/httprox/util/ProxyMeter.java
<ide> import static org.commonjava.indy.metrics.RequestContextHelper.REQUEST_PHASE_END;
<ide> import static org.commonjava.indy.metrics.RequestContextHelper.getContext;
<ide> import static org.commonjava.indy.metrics.RequestContextHelper.setContext;
<add>import static org.commonjava.indy.metrics.TrafficClassifierConstants.FN_CONTENT_GENERIC;
<ide>
<ide> public class ProxyMeter
<ide> {
<ide> // log SLI metrics
<ide> if ( sliMetricSet != null )
<ide> {
<del> sliMetricSet.function( GoldenSignalsMetricSet.FN_CONTENT_GENERIC ).ifPresent( ms ->{
<add> sliMetricSet.function( FN_CONTENT_GENERIC ).ifPresent( ms ->{
<ide> ms.latency( latency ).call();
<ide>
<ide> if ( getContext( HTTP_STATUS, 200 ) > 499 )
|
|
Java
|
mpl-2.0
|
5a4536fb4a90693d70ffafc1623516239293972d
| 0 |
hyperrail/hyperrail-for-android
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package be.hyperrail.android.irail.implementation;
import org.joda.time.DateTime;
import java.util.Arrays;
import be.hyperrail.android.irail.contracts.IRailErrorResponseListener;
import be.hyperrail.android.irail.contracts.IRailSuccessResponseListener;
import be.hyperrail.android.irail.contracts.IrailDataProvider;
import be.hyperrail.android.irail.contracts.RouteTimeDefinition;
import be.hyperrail.android.irail.factories.IrailFactory;
import be.hyperrail.android.util.ArrayUtils;
public class LiveboardAppendHelper implements IRailSuccessResponseListener<LiveBoard>, IRailErrorResponseListener<LiveBoard> {
private final int TAG_APPEND = 0;
private final int TAG_PREPEND = 1;
private int attempt = 0;
private DateTime lastSearchTime;
private LiveBoard originalLiveboard;
private IRailSuccessResponseListener<LiveBoard> successResponseListener;
private IRailErrorResponseListener<LiveBoard> errorResponseListener;
IrailDataProvider api = IrailFactory.getDataProviderInstance();
public void appendLiveboard(final LiveBoard liveBoard, final IRailSuccessResponseListener<LiveBoard> successResponseListener,
final IRailErrorResponseListener<LiveBoard> errorResponseListener) {
this.successResponseListener = successResponseListener;
this.errorResponseListener = errorResponseListener;
this.originalLiveboard = liveBoard;
if (liveBoard.getStops().length > 0) {
this.lastSearchTime = liveBoard.getStops()[liveBoard.getStops().length - 1].getDepartureTime().plusMinutes(1);
} else {
this.lastSearchTime = liveBoard.getSearchTime().plusHours(1);
}
api.getLiveboard(liveBoard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, TAG_APPEND);
}
public void prependLiveboard(final LiveBoard liveBoard, final IRailSuccessResponseListener<LiveBoard> successResponseListener,
final IRailErrorResponseListener<LiveBoard> errorResponseListener) {
this.successResponseListener = successResponseListener;
this.errorResponseListener = errorResponseListener;
this.originalLiveboard = liveBoard;
if (liveBoard.getStops().length > 0) {
this.lastSearchTime = liveBoard.getStops()[0].getDepartureTime();
} else {
this.lastSearchTime = liveBoard.getSearchTime();
}
api.getLiveboardBefore(liveBoard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, TAG_PREPEND);
}
@Override
public void onSuccessResponse(LiveBoard data, Object tag) {
switch ((int) tag) {
case TAG_APPEND:
TrainStop[] newStops = data.getStops();
if (newStops.length > 0) {
// It can happen that a scheduled departure was before the search time.
// In this case, prevent duplicates by searching the first stop which isn't before
// the searchdate, and removing all earlier stops.
int i = 0;
while (i < newStops.length && newStops[i].getDepartureTime().isBefore(data.getSearchTime())) {
i++;
}
if (i > 0) {
if (i <= data.getStops().length - 1) {
newStops = Arrays.copyOfRange(data.getStops(), i, data.getStops().length - 1);
} else {
newStops = new TrainStop[0];
}
}
}
if (newStops.length > 0) {
TrainStop[] mergedStops = ArrayUtils.concatenate(originalLiveboard.getStops(), newStops);
LiveBoard merged = new LiveBoard(originalLiveboard, mergedStops, originalLiveboard.getSearchTime());
this.successResponseListener.onSuccessResponse(merged, tag);
} else {
// No results, search two hours further in case this day doesn't have results.
// Skip 2 hours at once, possible due to large API pages.
attempt++;
lastSearchTime = lastSearchTime.plusHours(2);
if (attempt < 12) {
api.getLiveboard(originalLiveboard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, tag);
} else {
if (this.successResponseListener != null) {
this.successResponseListener.onSuccessResponse(originalLiveboard, this);
}
}
}
break;
case TAG_PREPEND:
if (data.getStops().length > 0) {
// TODO: prevent duplicates by checking arrival time
TrainStop[] mergedStops = ArrayUtils.concatenate(data.getStops(), originalLiveboard.getStops());
LiveBoard merged = new LiveBoard(originalLiveboard, mergedStops, originalLiveboard.getSearchTime());
this.successResponseListener.onSuccessResponse(merged, tag);
} else {
attempt++;
lastSearchTime = lastSearchTime.minusHours(1);
if (attempt < 12) {
api.getLiveboardBefore(originalLiveboard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, tag);
} else {
if (this.successResponseListener != null) {
this.successResponseListener.onSuccessResponse(originalLiveboard, this);
}
}
}
break;
}
}
@Override
public void onErrorResponse(Exception e, Object tag) {
if (this.errorResponseListener != null) {
this.errorResponseListener.onErrorResponse(e, this);
}
}
}
|
Hyperrail/src/main/java/be/hyperrail/android/irail/implementation/LiveboardAppendHelper.java
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package be.hyperrail.android.irail.implementation;
import org.joda.time.DateTime;
import java.util.Arrays;
import be.hyperrail.android.irail.contracts.IRailErrorResponseListener;
import be.hyperrail.android.irail.contracts.IRailSuccessResponseListener;
import be.hyperrail.android.irail.contracts.IrailDataProvider;
import be.hyperrail.android.irail.contracts.RouteTimeDefinition;
import be.hyperrail.android.irail.factories.IrailFactory;
import be.hyperrail.android.util.ArrayUtils;
public class LiveboardAppendHelper implements IRailSuccessResponseListener<LiveBoard>, IRailErrorResponseListener<LiveBoard> {
private final int TAG_APPEND = 0;
private final int TAG_PREPEND = 1;
private int attempt = 0;
private DateTime lastSearchTime;
private LiveBoard originalLiveboard;
private IRailSuccessResponseListener<LiveBoard> successResponseListener;
private IRailErrorResponseListener<LiveBoard> errorResponseListener;
IrailDataProvider api = IrailFactory.getDataProviderInstance();
public void appendLiveboard(final LiveBoard liveBoard, final IRailSuccessResponseListener<LiveBoard> successResponseListener,
final IRailErrorResponseListener<LiveBoard> errorResponseListener) {
this.successResponseListener = successResponseListener;
this.errorResponseListener = errorResponseListener;
this.originalLiveboard = liveBoard;
if (liveBoard.getStops().length > 0) {
this.lastSearchTime = liveBoard.getStops()[liveBoard.getStops().length - 1].getDepartureTime().plusMinutes(1);
} else {
this.lastSearchTime = liveBoard.getSearchTime().plusHours(1);
}
api.getLiveboard(liveBoard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, TAG_APPEND);
}
public void prependLiveboard(final LiveBoard liveBoard, final IRailSuccessResponseListener<LiveBoard> successResponseListener,
final IRailErrorResponseListener<LiveBoard> errorResponseListener) {
this.successResponseListener = successResponseListener;
this.errorResponseListener = errorResponseListener;
this.originalLiveboard = liveBoard;
if (liveBoard.getStops().length > 0) {
this.lastSearchTime = liveBoard.getStops()[0].getDepartureTime();
} else {
this.lastSearchTime = liveBoard.getSearchTime();
}
api.getLiveboardBefore(liveBoard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, TAG_PREPEND);
}
@Override
public void onSuccessResponse(LiveBoard data, Object tag) {
switch ((int) tag) {
case TAG_APPEND:
if (data.getStops().length > 0) {
TrainStop[] newStops = data.getStops();
// It can happen that a scheduled departure was before the search time.
// In this case, prevent duplicates by searching the first stop which isn't before
// the searchdate, and removing all earlier stops.
int i = 0;
while (i < newStops.length && newStops[i].getDepartureTime().isBefore(data.getSearchTime())) {
i++;
}
if (i > 0) {
newStops = Arrays.copyOfRange(data.getStops(), i, data.getStops().length - 1);
}
TrainStop[] mergedStops = ArrayUtils.concatenate(originalLiveboard.getStops(), newStops);
LiveBoard merged = new LiveBoard(originalLiveboard, mergedStops, originalLiveboard.getSearchTime());
this.successResponseListener.onSuccessResponse(merged, tag);
} else {
// No results, search two hours further in case this day doesn't have results.
// Skip 2 hours at once, possible due to large API pages.
attempt++;
lastSearchTime = lastSearchTime.plusHours(2);
if (attempt < 12) {
api.getLiveboard(originalLiveboard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, tag);
} else {
if (this.successResponseListener != null) {
this.successResponseListener.onSuccessResponse(originalLiveboard, this);
}
}
}
break;
case TAG_PREPEND:
if (data.getStops().length > 0) {
// TODO: prevent duplicates by checking arrival time
TrainStop[] mergedStops = ArrayUtils.concatenate(data.getStops(), originalLiveboard.getStops());
LiveBoard merged = new LiveBoard(originalLiveboard, mergedStops, originalLiveboard.getSearchTime());
this.successResponseListener.onSuccessResponse(merged, tag);
} else {
attempt++;
lastSearchTime = lastSearchTime.minusHours(1);
if (attempt < 12) {
api.getLiveboardBefore(originalLiveboard, lastSearchTime, RouteTimeDefinition.DEPART, this, this, tag);
} else {
if (this.successResponseListener != null) {
this.successResponseListener.onSuccessResponse(originalLiveboard, this);
}
}
}
break;
}
}
@Override
public void onErrorResponse(Exception e, Object tag) {
if (this.errorResponseListener != null) {
this.errorResponseListener.onErrorResponse(e, this);
}
}
}
|
Fix a crash when there are no new departures during an attempt to load the next departures
|
Hyperrail/src/main/java/be/hyperrail/android/irail/implementation/LiveboardAppendHelper.java
|
Fix a crash when there are no new departures during an attempt to load the next departures
|
<ide><path>yperrail/src/main/java/be/hyperrail/android/irail/implementation/LiveboardAppendHelper.java
<ide> public void onSuccessResponse(LiveBoard data, Object tag) {
<ide> switch ((int) tag) {
<ide> case TAG_APPEND:
<del> if (data.getStops().length > 0) {
<del> TrainStop[] newStops = data.getStops();
<ide>
<add> TrainStop[] newStops = data.getStops();
<add>
<add> if (newStops.length > 0) {
<ide> // It can happen that a scheduled departure was before the search time.
<ide> // In this case, prevent duplicates by searching the first stop which isn't before
<ide> // the searchdate, and removing all earlier stops.
<ide> i++;
<ide> }
<ide> if (i > 0) {
<del> newStops = Arrays.copyOfRange(data.getStops(), i, data.getStops().length - 1);
<add> if (i <= data.getStops().length - 1) {
<add> newStops = Arrays.copyOfRange(data.getStops(), i, data.getStops().length - 1);
<add> } else {
<add> newStops = new TrainStop[0];
<add> }
<ide> }
<add> }
<add>
<add> if (newStops.length > 0) {
<add>
<ide> TrainStop[] mergedStops = ArrayUtils.concatenate(originalLiveboard.getStops(), newStops);
<ide> LiveBoard merged = new LiveBoard(originalLiveboard, mergedStops, originalLiveboard.getSearchTime());
<ide> this.successResponseListener.onSuccessResponse(merged, tag);
|
|
Java
|
lgpl-2.1
|
3963600e27b7bbe3efb71db64f75d4168f5a5774
| 0 |
CreativeMD/LittleTiles
|
package com.creativemd.littletiles.common.gui;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.util.ArrayList;
import java.util.List;
import com.creativemd.creativecore.common.gui.container.SubGui;
import com.creativemd.creativecore.common.gui.controls.gui.GuiButton;
import com.creativemd.creativecore.common.gui.controls.gui.GuiComboBox;
import com.creativemd.creativecore.common.gui.controls.gui.GuiTextfield;
import com.creativemd.creativecore.common.gui.event.container.SlotChangeEvent;
import com.creativemd.creativecore.common.gui.event.gui.GuiControlChangedEvent;
import com.creativemd.littletiles.common.container.SubContainerExport;
import com.creativemd.littletiles.common.items.ItemRecipe;
import com.creativemd.littletiles.common.utils.converting.StructureStringUtils;
import com.creativemd.littletiles.common.utils.placing.PlacementHelper;
import com.n247s.api.eventapi.eventsystem.CustomEventSubscribe;
import net.minecraft.item.ItemStack;
public class SubGuiExport extends SubGui {
public GuiTextfield textfield;
@Override
public void createControls() {
textfield = new GuiTextfield("export", "", 10, 30, 150, 14);
textfield.maxLength = Integer.MAX_VALUE;
controls.add(textfield);
controls.add(new GuiButton("Copy", 10, 52) {
@Override
public void onClicked(int x, int y, int button) {
StringSelection stringSelection = new StringSelection(textfield.text);
Clipboard clpbrd = Toolkit.getDefaultToolkit().getSystemClipboard();
clpbrd.setContents(stringSelection, null);
}
});
List<String> lines = new ArrayList<>();
lines.add("structure");
lines.add("model");
controls.add(new GuiComboBox("type", 43, 52, 100, lines));
}
public void updateTextfield() {
ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
GuiComboBox box = (GuiComboBox) get("type");
if (box.index == 0)
textfield.text = StructureStringUtils.exportStructure(stack);
else
textfield.text = StructureStringUtils.exportModel(stack);
;
} else
textfield.text = "";
}
@CustomEventSubscribe
public void onSelectionChanged(GuiControlChangedEvent event) {
updateTextfield();
}
@CustomEventSubscribe
public void onSlotChange(SlotChangeEvent event) {
updateTextfield();
}
}
|
src/main/java/com/creativemd/littletiles/common/gui/SubGuiExport.java
|
package com.creativemd.littletiles.common.gui;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import com.creativemd.creativecore.common.gui.container.SubGui;
import com.creativemd.creativecore.common.gui.controls.gui.GuiButton;
import com.creativemd.creativecore.common.gui.controls.gui.GuiTextfield;
import com.creativemd.creativecore.common.gui.event.container.SlotChangeEvent;
import com.creativemd.littletiles.common.container.SubContainerExport;
import com.creativemd.littletiles.common.items.ItemRecipe;
import com.creativemd.littletiles.common.utils.converting.StructureStringUtils;
import com.creativemd.littletiles.common.utils.placing.PlacementHelper;
import com.n247s.api.eventapi.eventsystem.CustomEventSubscribe;
import net.minecraft.item.ItemStack;
public class SubGuiExport extends SubGui {
public GuiTextfield textfield;
@Override
public void createControls() {
textfield = new GuiTextfield("export", "", 10, 30, 150, 14);
textfield.maxLength = Integer.MAX_VALUE;
controls.add(textfield);
controls.add(new GuiButton("Copy", 10, 52) {
@Override
public void onClicked(int x, int y, int button) {
StringSelection stringSelection = new StringSelection(textfield.text);
Clipboard clpbrd = Toolkit.getDefaultToolkit().getSystemClipboard();
clpbrd.setContents(stringSelection, null);
}
});
controls.add(new GuiButton("Export model", 43, 52, 100) {
@Override
public void onClicked(int x, int y, int button) {
if (this.caption.equals("Export model")) {
ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
textfield.text = StructureStringUtils.exportModel(stack);
this.caption = "Export structure";
this.customTooltip.clear();
this.customTooltip.add("Export structure instead,");
this.customTooltip.add("can be imported again!");
} else
textfield.text = "";
} else {
ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
textfield.text = StructureStringUtils.exportStructure(stack);
this.caption = "Export model";
this.customTooltip.clear();
this.customTooltip.add("Export minecraft model instead,");
this.customTooltip.add("cannot be imported again!");
} else
textfield.text = "";
}
}
}.setCustomTooltip("Export minecraft model instead.", "CANNOT be imported again!"));
}
@CustomEventSubscribe
public void onSlotChange(SlotChangeEvent event) {
ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
textfield.text = StructureStringUtils.exportStructure(stack);
} else
textfield.text = "";
}
}
|
Changed button to combobox in export gui
|
src/main/java/com/creativemd/littletiles/common/gui/SubGuiExport.java
|
Changed button to combobox in export gui
|
<ide><path>rc/main/java/com/creativemd/littletiles/common/gui/SubGuiExport.java
<ide> import java.awt.Toolkit;
<ide> import java.awt.datatransfer.Clipboard;
<ide> import java.awt.datatransfer.StringSelection;
<add>import java.util.ArrayList;
<add>import java.util.List;
<ide>
<ide> import com.creativemd.creativecore.common.gui.container.SubGui;
<ide> import com.creativemd.creativecore.common.gui.controls.gui.GuiButton;
<add>import com.creativemd.creativecore.common.gui.controls.gui.GuiComboBox;
<ide> import com.creativemd.creativecore.common.gui.controls.gui.GuiTextfield;
<ide> import com.creativemd.creativecore.common.gui.event.container.SlotChangeEvent;
<add>import com.creativemd.creativecore.common.gui.event.gui.GuiControlChangedEvent;
<ide> import com.creativemd.littletiles.common.container.SubContainerExport;
<ide> import com.creativemd.littletiles.common.items.ItemRecipe;
<ide> import com.creativemd.littletiles.common.utils.converting.StructureStringUtils;
<ide> }
<ide> });
<ide>
<del> controls.add(new GuiButton("Export model", 43, 52, 100) {
<del>
<del> @Override
<del> public void onClicked(int x, int y, int button) {
<del> if (this.caption.equals("Export model")) {
<del> ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
<del> if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
<del> textfield.text = StructureStringUtils.exportModel(stack);
<del> this.caption = "Export structure";
<del> this.customTooltip.clear();
<del> this.customTooltip.add("Export structure instead,");
<del> this.customTooltip.add("can be imported again!");
<del> } else
<del> textfield.text = "";
<del>
<del> } else {
<del> ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
<del> if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
<del> textfield.text = StructureStringUtils.exportStructure(stack);
<del> this.caption = "Export model";
<del> this.customTooltip.clear();
<del> this.customTooltip.add("Export minecraft model instead,");
<del> this.customTooltip.add("cannot be imported again!");
<del> } else
<del> textfield.text = "";
<del> }
<del> }
<del> }.setCustomTooltip("Export minecraft model instead.", "CANNOT be imported again!"));
<add> List<String> lines = new ArrayList<>();
<add> lines.add("structure");
<add> lines.add("model");
<add> controls.add(new GuiComboBox("type", 43, 52, 100, lines));
<add> }
<add>
<add> public void updateTextfield() {
<add> ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
<add> if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
<add> GuiComboBox box = (GuiComboBox) get("type");
<add> if (box.index == 0)
<add> textfield.text = StructureStringUtils.exportStructure(stack);
<add> else
<add> textfield.text = StructureStringUtils.exportModel(stack);
<add> ;
<add> } else
<add> textfield.text = "";
<add> }
<add>
<add> @CustomEventSubscribe
<add> public void onSelectionChanged(GuiControlChangedEvent event) {
<add> updateTextfield();
<ide> }
<ide>
<ide> @CustomEventSubscribe
<ide> public void onSlotChange(SlotChangeEvent event) {
<del> ItemStack stack = ((SubContainerExport) container).slot.getStackInSlot(0);
<del> if (stack != null && (PlacementHelper.isLittleBlock(stack) || stack.getItem() instanceof ItemRecipe)) {
<del> textfield.text = StructureStringUtils.exportStructure(stack);
<del> } else
<del> textfield.text = "";
<add> updateTextfield();
<ide> }
<ide>
<ide> }
|
|
Java
|
apache-2.0
|
0889919a08bb577a2b7e1d4ca5ce345b125a43a7
| 0 |
bearprada/cwac-camera,doo/cwac-camera,commonsguy/cwac-camera,kolipass/cwac-camera,Heart2009/cwac-camera,newtonker/cwac-camera,paulpv/cwac-camera,cookbrite/cwac-camera,scana/cwac-camera
|
/***
Copyright (c) 2013 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.commonsware.cwac.camera;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class CameraUtils {
// based on ApiDemos
private static final double ASPECT_TOLERANCE=0.1;
public static Camera.Size getOptimalPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters) {
double targetRatio=(double)width / height;
List<Camera.Size> sizes=parameters.getSupportedPreviewSizes();
Camera.Size optimalSize=null;
double minDiff=Double.MAX_VALUE;
int targetHeight=height;
if (displayOrientation == 90 || displayOrientation == 270) {
targetRatio=(double)height / width;
}
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio=(double)size.width / size.height;
if (Math.abs(ratio - targetRatio) <= ASPECT_TOLERANCE) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize=size;
minDiff=Math.abs(size.height - targetHeight);
}
}
}
// Cannot find the one match the aspect ratio, ignore
// the requirement
if (optimalSize == null) {
minDiff=Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize=size;
minDiff=Math.abs(size.height - targetHeight);
}
}
}
return(optimalSize);
}
public static Camera.Size getBestAspectPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters) {
return(getBestAspectPreviewSize(displayOrientation, width, height,
parameters, 0.0d));
}
public static Camera.Size getBestAspectPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters,
double closeEnough) {
double targetRatio=(double)width / height;
Camera.Size optimalSize=null;
double minDiff=Double.MAX_VALUE;
if (displayOrientation == 90 || displayOrientation == 270) {
targetRatio=(double)height / width;
}
List<Size> sizes=parameters.getSupportedPreviewSizes();
Collections.sort(sizes,
Collections.reverseOrder(new SizeComparator()));
for (Size size : sizes) {
double ratio=(double)size.width / size.height;
if (Math.abs(ratio - targetRatio) < minDiff) {
optimalSize=size;
minDiff=Math.abs(ratio - targetRatio);
}
if (minDiff < closeEnough) {
break;
}
}
return(optimalSize);
}
public static Camera.Size getLargestPictureSize(CameraHost host,
Camera.Parameters parameters) {
return(getLargestPictureSize(host, parameters, true));
}
public static Camera.Size getLargestPictureSize(CameraHost host,
Camera.Parameters parameters,
boolean enforceProfile) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
// android.util.Log.d("CWAC-Camera",
// String.format("%d x %d", size.width, size.height));
if (!enforceProfile
|| (size.height <= host.getDeviceProfile()
.getMaxPictureHeight() && size.height >= host.getDeviceProfile()
.getMinPictureHeight())) {
if (result == null) {
result=size;
}
else {
int resultArea=result.width * result.height;
int newArea=size.width * size.height;
if (newArea > resultArea) {
result=size;
}
}
}
}
if (result == null && enforceProfile) {
result=getLargestPictureSize(host, parameters, false);
}
return(result);
}
public static Camera.Size getSmallestPictureSize(Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result=size;
}
else {
int resultArea=result.width * result.height;
int newArea=size.width * size.height;
if (newArea < resultArea) {
result=size;
}
}
}
return(result);
}
public static String findBestFlashModeMatch(Camera.Parameters params,
String... modes) {
String match=null;
List<String> flashModes=params.getSupportedFlashModes();
if (flashModes != null) {
for (String mode : modes) {
if (flashModes.contains(mode)) {
match=mode;
break;
}
}
}
return(match);
}
private static class SizeComparator implements
Comparator<Camera.Size> {
@Override
public int compare(Size lhs, Size rhs) {
int left=lhs.width * lhs.height;
int right=rhs.width * rhs.height;
if (left < right) {
return(-1);
}
else if (left > right) {
return(1);
}
return(0);
}
}
}
|
camera/src/com/commonsware/cwac/camera/CameraUtils.java
|
/***
Copyright (c) 2013 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.commonsware.cwac.camera;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class CameraUtils {
// based on ApiDemos
private static final double ASPECT_TOLERANCE=0.1;
public static Camera.Size getOptimalPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters) {
double targetRatio=(double)width / height;
List<Camera.Size> sizes=parameters.getSupportedPreviewSizes();
Camera.Size optimalSize=null;
double minDiff=Double.MAX_VALUE;
int targetHeight=height;
if (displayOrientation == 90 || displayOrientation == 270) {
targetRatio=(double)height / width;
}
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio=(double)size.width / size.height;
if (Math.abs(ratio - targetRatio) <= ASPECT_TOLERANCE) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize=size;
minDiff=Math.abs(size.height - targetHeight);
}
}
}
// Cannot find the one match the aspect ratio, ignore
// the requirement
if (optimalSize == null) {
minDiff=Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize=size;
minDiff=Math.abs(size.height - targetHeight);
}
}
}
return(optimalSize);
}
public static Camera.Size getBestAspectPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters) {
return(getBestAspectPreviewSize(displayOrientation, width, height,
parameters, 0.0d));
}
public static Camera.Size getBestAspectPreviewSize(int displayOrientation,
int width,
int height,
Camera.Parameters parameters,
double closeEnough) {
double targetRatio=(double)width / height;
Camera.Size optimalSize=null;
double minDiff=Double.MAX_VALUE;
if (displayOrientation == 90 || displayOrientation == 270) {
targetRatio=(double)height / width;
}
List<Size> sizes=parameters.getSupportedPreviewSizes();
Collections.sort(sizes,
Collections.reverseOrder(new SizeComparator()));
for (Size size : sizes) {
double ratio=(double)size.width / size.height;
if (Math.abs(ratio - targetRatio) < minDiff) {
optimalSize=size;
minDiff=Math.abs(ratio - targetRatio);
}
if (minDiff < closeEnough) {
break;
}
}
return(optimalSize);
}
public static Camera.Size getLargestPictureSize(CameraHost host,
Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
// android.util.Log.d("CWAC-Camera",
// String.format("%d x %d", size.width, size.height));
if (size.height <= host.getDeviceProfile().getMaxPictureHeight()
&& size.height >= host.getDeviceProfile()
.getMinPictureHeight()) {
if (result == null) {
result=size;
}
else {
int resultArea=result.width * result.height;
int newArea=size.width * size.height;
if (newArea > resultArea) {
result=size;
}
}
}
}
return(result);
}
public static Camera.Size getSmallestPictureSize(Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result=size;
}
else {
int resultArea=result.width * result.height;
int newArea=size.width * size.height;
if (newArea < resultArea) {
result=size;
}
}
}
return(result);
}
public static String findBestFlashModeMatch(Camera.Parameters params,
String... modes) {
String match=null;
List<String> flashModes=params.getSupportedFlashModes();
if (flashModes != null) {
for (String mode : modes) {
if (flashModes.contains(mode)) {
match=mode;
break;
}
}
}
return(match);
}
private static class SizeComparator implements
Comparator<Camera.Size> {
@Override
public int compare(Size lhs, Size rhs) {
int left=lhs.width * lhs.height;
int right=rhs.width * rhs.height;
if (left < right) {
return(-1);
}
else if (left > right) {
return(1);
}
return(0);
}
}
}
|
workaround for issue #159
|
camera/src/com/commonsware/cwac/camera/CameraUtils.java
|
workaround for issue #159
|
<ide><path>amera/src/com/commonsware/cwac/camera/CameraUtils.java
<ide>
<ide> public static Camera.Size getLargestPictureSize(CameraHost host,
<ide> Camera.Parameters parameters) {
<add> return(getLargestPictureSize(host, parameters, true));
<add> }
<add>
<add> public static Camera.Size getLargestPictureSize(CameraHost host,
<add> Camera.Parameters parameters,
<add> boolean enforceProfile) {
<ide> Camera.Size result=null;
<ide>
<ide> for (Camera.Size size : parameters.getSupportedPictureSizes()) {
<ide> // android.util.Log.d("CWAC-Camera",
<ide> // String.format("%d x %d", size.width, size.height));
<ide>
<del> if (size.height <= host.getDeviceProfile().getMaxPictureHeight()
<del> && size.height >= host.getDeviceProfile()
<del> .getMinPictureHeight()) {
<add> if (!enforceProfile
<add> || (size.height <= host.getDeviceProfile()
<add> .getMaxPictureHeight() && size.height >= host.getDeviceProfile()
<add> .getMinPictureHeight())) {
<ide> if (result == null) {
<ide> result=size;
<ide> }
<ide> }
<ide> }
<ide> }
<add> }
<add>
<add> if (result == null && enforceProfile) {
<add> result=getLargestPictureSize(host, parameters, false);
<ide> }
<ide>
<ide> return(result);
|
|
Java
|
mit
|
4a4044484ead4563fcb74f07284bae0b7b7966a4
| 0 |
c19354837/ninty
|
package com.ninty.runtime.heap;
import com.ninty.classfile.AnnotationAttr;
import com.ninty.classfile.AttributeInfo;
import com.ninty.classfile.ClassFile;
import com.ninty.classfile.MemberInfo;
import com.ninty.runtime.LocalVars;
import com.ninty.runtime.NiFrame;
import com.ninty.runtime.NiThread;
import com.ninty.runtime.heap.constantpool.NiConstantPool;
import com.ninty.utils.VMUtils;
/**
* Created by ninty on 2017/7/23.
*/
public class NiClass {
int accessFlags; //u2
String className;
String superClassName;
String[] interfaceNames;
NiClass superClass;
NiClass[] interfaces;
private NiConstantPool cps;
private NiField[] fields;
private NiMethod[] methods;
private AttributeInfo[] attributeInfos;
private byte[] annotationDatas;
private String sourceFile;
NiClassLoader loader;
LocalVars staticSlots;
int instantceSlotCount;
int staticSlotCount;
private boolean clinit;
private NiObject jClass; // class's class, className=java/lang/Class
public NiClass() {
}
public NiClass(ClassFile classFile) {
accessFlags = classFile.getAccessFlags();
className = classFile.getClassName();
superClassName = classFile.getSuperClassName();
interfaceNames = classFile.getInterfaceNames();
initCP(classFile);
initFiled(classFile);
initMethod(classFile);
initSourceFile(classFile);
initAnnotation(classFile);
}
public NiClass(int accessFlags, String className, String superClassName, String[] interfaceNames) {
this.accessFlags = accessFlags;
this.className = className;
this.superClassName = superClassName;
this.interfaceNames = interfaceNames;
methods = new NiMethod[0];
}
private void initCP(ClassFile classFile) {
cps = new NiConstantPool(this, classFile.getCps());
}
private void initFiled(ClassFile classFile) {
MemberInfo[] fieldInfos = classFile.getFieldInfos();
fields = new NiField[fieldInfos.length];
for (int i = 0; i < fieldInfos.length; i++) {
fields[i] = new NiField(this, fieldInfos[i]);
}
}
private void initMethod(ClassFile classFile) {
MemberInfo[] methodInfos = classFile.getMethodInfos();
methods = new NiMethod[methodInfos.length];
for (int i = 0; i < methodInfos.length; i++) {
methods[i] = new NiMethod(this, methodInfos[i]);
}
}
private void initSourceFile(ClassFile classFile) {
AttributeInfo[] attributeInfos = classFile.getAttributeInfos();
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AttributeInfo.AttrSourceFile) {
sourceFile = ((AttributeInfo.AttrSourceFile) attributeInfos[i]).sourceFile;
return;
}
}
sourceFile = "unknow";
}
private void initAnnotation(ClassFile classFile) {
attributeInfos = classFile.getAttributeInfos();
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AnnotationAttr.RuntimeVisibleAnnotations) {
annotationDatas = ((AnnotationAttr.RuntimeVisibleAnnotations) attributeInfos[i]).annotationDatas;
}
}
}
public NiMethod getMainMethod() {
return getMethod("main", "([Ljava/lang/String;)V");
}
public NiMethod getClinitMethod() {
return getMethod("<clinit>", "()V");
}
public NiMethod getDefaultInitMethod() {
return getInitMethod("()V");
}
public NiMethod getInitMethod(String desc) {
return getMethod("<init>", desc);
}
public NiMethod getToStringMethod() {
return getMethod("toString", "()Ljava/lang/String;");
}
public NiMethod getMethod(String name, String desc) {
for (NiMethod method : methods) {
if (method.getName().equals(name) && method.getDesc().equals(desc)) {
return method;
}
}
return null;
}
public NiField findField(String name, String desc) {
for (NiField field : fields) {
if (field.getName().equals(name) && field.getDesc().equals(desc)) {
return field;
}
}
return null;
}
public boolean isAssignableFrom(NiClass clz) {
NiClass s = clz;
NiClass t = this;
if (s == t) {
return true;
}
if (s.isArray()) {
if (t.isArray()) {
NiClass sc = s.componentClass();
NiClass tc = t.componentClass();
return sc == tc || tc.isAssignableFrom(sc);
} else {
if (t.isInterface()) {
return t.className.equals("java/lang/Cloneable") || t.className.equals("java/io/Serializable");
} else {
return t.className.equals("java/lang/Object");
}
}
} else {
if (s.isInterface()) {
if (t.isInterface()) {
return s.isSubClass(t);
} else {
return t.className.equals("java/lang/Object");
}
} else {
if (t.isInterface()) {
return s.isImplements(t); // s implements t
} else {
return t.isSubClass(s); // s extends t
}
}
}
}
public boolean isSame(NiClass clz) {
if (clz == null) {
return false;
}
return clz.className.equals(className) && clz.loader == loader;
}
/**
* this implements clz
*/
public boolean isImplements(NiClass clz) {
for (NiClass c = this; c != null; c = c.superClass) {
if (c == clz || c.isSubInterfaceOf(clz)) {
return true;
}
}
return false;
}
public boolean isSubInterfaceOf(NiClass c) {
for (NiClass interf : this.interfaces) {
if (interf == c || interf.isSubInterfaceOf(c)) {
return true;
}
}
return false;
}
public NiObject newObject() {
return new NiObject(this, instantceSlotCount);
}
public boolean clinit(NiThread thread) {
clinit = true;
boolean result1 = scheduleClinit(thread);
boolean result2 = initSuperClass(thread);
return result1 || result2;
}
private boolean scheduleClinit(NiThread thread) {
NiMethod clinitMethod = getClinitMethod();
if (clinitMethod != null) {
thread.pushFrame(new NiFrame(clinitMethod));
return true;
}
return false;
}
private boolean initSuperClass(NiThread thread) {
if (!isInterface() && superClass != null && !superClass.isClinit()) {
return superClass.clinit(thread);
}
return false;
}
public NiObject newArray(int count) {
if (!isArray()) {
throw new IllegalAccessError("Current class is not an array:" + this);
}
switch (className.substring(0, 2)) {
case "[Z":
case "[B":
return new NiObject(this, new byte[count]);
case "[C":
return new NiObject(this, new char[count]);
case "[S":
return new NiObject(this, new short[count]);
case "[I":
return new NiObject(this, new int[count]);
case "[J":
return new NiObject(this, new long[count]);
case "[F":
return new NiObject(this, new float[count]);
case "[D":
return new NiObject(this, new double[count]);
default:
return new NiObject(this, new NiObject[count]);
}
}
public boolean isArray() {
return className.charAt(0) == '[';
}
/**
* convert classname to Array class
*/
public NiClass getArrayClass() {
String className = this.className;
if (!isArray()) {
String type = VMUtils.primitiveTypes.get(className);
if (type == null) {
className = "L" + className + ";";
} else {
className = type;
}
}
className = "[" + className;
return getLoader().loadClass(className);
}
public NiClass componentClass() {
if (isArray()) {
return loader.loadClass(VMUtils.toClassname(className.substring(1)));
}
throw new IllegalAccessError("Current class is not an array:" + this);
}
public String javaName() {
return className.replace('/', '.');
}
public boolean isPublic() {
return (accessFlags & ClassConstant.ACC_PUBLIC) != 0;
}
/**
* clz extend this
*/
public boolean isSubClass(NiClass clz) {
return clz != null && (clz.superClassName != null && clz.superClassName.equals(className) || this.isSubClass
(clz.getSuperClass()));
}
/**
* clz extend this
*/
public boolean isSuperClass(NiClass clz) {
return !isSubClass(clz);
}
public boolean isAbstract() {
return (accessFlags & ClassConstant.ACC_ABSTRACT) != 0;
}
public boolean isInterface() {
return (accessFlags & ClassConstant.ACC_INTERFACE) != 0;
}
private String packageName() {
int indexOf = className.lastIndexOf('/');
return indexOf > -1 ? className.substring(0, indexOf) : "";
}
public boolean isSamePackge(NiClass clz) {
return packageName().equals(clz.packageName());
}
public boolean canAccess(NiClass clz) {
return clz.isPublic() || clz.packageName().equals(this.packageName());
}
public AttributeInfo.BootstrapMethodInfo getBootstrapMethodInfo(int index) {
AttributeInfo.AttrBootstrapMethods bootstrapMethods = null;
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AttributeInfo.AttrBootstrapMethods) {
bootstrapMethods = (AttributeInfo.AttrBootstrapMethods) attributeInfos[i];
}
}
if (bootstrapMethods == null) {
throw new ClassFormatError("can not find BootstrapMethods in: " + index + ", className: " + className);
}
return bootstrapMethods.bootstarpMethods[index];
}
public int getStaticInt(String name) {
NiField field = findField(name, "I");
return staticSlots.getInt(field.getSlotId());
}
public NiObject getStaticRef(String name, String desc) {
NiField field = findField(name, desc);
return staticSlots.getRef(field.getSlotId());
}
public void setStaticRef(String name, String desc, NiObject ref) {
NiField field = findField(name, desc);
staticSlots.setRef(field.getSlotId(), ref);
}
public int getAccessFlags() {
return accessFlags;
}
public String getClassName() {
return className;
}
public NiClass getSuperClass() {
return superClass;
}
public NiClass[] getInterfaces() {
return interfaces;
}
public NiField[] getFields() {
return fields;
}
public NiMethod[] getMethods() {
return methods;
}
public NiClassLoader getLoader() {
return loader;
}
public NiConstantPool getCps() {
return cps;
}
public LocalVars getStaticSlots() {
return staticSlots;
}
public NiObject getjClass() {
return jClass;
}
void setjClass(NiObject jClass) {
this.jClass = jClass;
}
public boolean isClinit() {
return clinit;
}
public String getSourceFile() {
return sourceFile;
}
public byte[] getAnnotationDatas() {
return annotationDatas;
}
@Override
public String toString() {
return className;
}
}
|
src/main/java/com/ninty/runtime/heap/NiClass.java
|
package com.ninty.runtime.heap;
import com.ninty.classfile.AnnotationAttr;
import com.ninty.classfile.AttributeInfo;
import com.ninty.classfile.ClassFile;
import com.ninty.classfile.MemberInfo;
import com.ninty.runtime.LocalVars;
import com.ninty.runtime.NiFrame;
import com.ninty.runtime.NiThread;
import com.ninty.runtime.heap.constantpool.NiConstantPool;
import com.ninty.utils.VMUtils;
/**
* Created by ninty on 2017/7/23.
*/
public class NiClass {
int accessFlags; //u2
String className;
String superClassName;
String[] interfaceNames;
NiClass superClass;
NiClass[] interfaces;
private NiConstantPool cps;
private NiField[] fields;
private NiMethod[] methods;
private AttributeInfo[] attributeInfos;
private byte[] annotationDatas;
private String sourceFile;
NiClassLoader loader;
LocalVars staticSlots;
int instantceSlotCount;
int staticSlotCount;
private boolean clinit;
private NiObject jClass; // class's class, className=java/lang/Class
public NiClass() {
}
public NiClass(ClassFile classFile) {
accessFlags = classFile.getAccessFlags();
className = classFile.getClassName();
superClassName = classFile.getSuperClassName();
interfaceNames = classFile.getInterfaceNames();
initCP(classFile);
initFiled(classFile);
initMethod(classFile);
initSourceFile(classFile);
initAnnotation(classFile);
}
public NiClass(int accessFlags, String className, String superClassName, String[] interfaceNames) {
this.accessFlags = accessFlags;
this.className = className;
this.superClassName = superClassName;
this.interfaceNames = interfaceNames;
methods = new NiMethod[0];
}
private void initCP(ClassFile classFile) {
cps = new NiConstantPool(this, classFile.getCps());
}
private void initFiled(ClassFile classFile) {
MemberInfo[] fieldInfos = classFile.getFieldInfos();
fields = new NiField[fieldInfos.length];
for (int i = 0; i < fieldInfos.length; i++) {
fields[i] = new NiField(this, fieldInfos[i]);
}
}
private void initMethod(ClassFile classFile) {
MemberInfo[] methodInfos = classFile.getMethodInfos();
methods = new NiMethod[methodInfos.length];
for (int i = 0; i < methodInfos.length; i++) {
methods[i] = new NiMethod(this, methodInfos[i]);
}
}
private void initSourceFile(ClassFile classFile) {
AttributeInfo[] attributeInfos = classFile.getAttributeInfos();
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AttributeInfo.AttrSourceFile) {
sourceFile = ((AttributeInfo.AttrSourceFile) attributeInfos[i]).sourceFile;
return;
}
}
sourceFile = "unknow";
}
private void initAnnotation(ClassFile classFile) {
attributeInfos = classFile.getAttributeInfos();
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AnnotationAttr.RuntimeVisibleAnnotations) {
annotationDatas = ((AnnotationAttr.RuntimeVisibleAnnotations) attributeInfos[i]).annotationDatas;
}
}
}
public NiMethod getMainMethod() {
return getMethod("main", "([Ljava/lang/String;)V");
}
public NiMethod getClinitMethod() {
return getMethod("<clinit>", "()V");
}
public NiMethod getDefaultInitMethod() {
return getInitMethod("()V");
}
public NiMethod getInitMethod(String desc) {
return getMethod("<init>", desc);
}
public NiMethod getToStringMethod() {
return getMethod("toString", "()Ljava/lang/String;");
}
public NiMethod getMethod(String name, String desc) {
for (NiMethod method : methods) {
if (method.getName().equals(name) && method.getDesc().equals(desc)) {
return method;
}
}
return null;
}
public NiField findField(String name, String desc) {
for (NiField field : fields) {
if (field.getName().equals(name) && field.getDesc().equals(desc)) {
return field;
}
}
return null;
}
public boolean isAssignableFrom(NiClass clz) {
NiClass s = clz;
NiClass t = this;
if (s == t) {
return true;
}
if (s.isArray()) {
if (t.isArray()) {
NiClass sc = s.componentClass();
NiClass tc = t.componentClass();
return sc == tc || tc.isAssignableFrom(sc);
} else {
if (t.isInterface()) {
return t.className.equals("java/lang/Cloneable") || t.className.equals("java/io/Serializable");
} else {
return t.className.equals("java/lang/Object");
}
}
} else {
if (s.isInterface()) {
if (t.isInterface()) {
return s.isSubClass(t);
} else {
return t.className.equals("java/lang/Object");
}
} else {
if (t.isInterface()) {
return s.isImplements(t); // s implements t
} else {
return t.isSubClass(s); // s extends t
}
}
}
}
public boolean isSame(NiClass clz) {
if (clz == null) {
return false;
}
return clz.className.equals(className) && clz.loader == loader;
}
/**
* this implements clz
*/
public boolean isImplements(NiClass clz) {
for (NiClass c = this; c != null; c = c.superClass) {
if (c == clz || c.isSubInterfaceOf(clz)) {
return true;
}
}
return false;
}
public boolean isSubInterfaceOf(NiClass c) {
for (NiClass interf : this.interfaces) {
if (interf == c || interf.isSubInterfaceOf(c)) {
return true;
}
}
return false;
}
public NiObject newObject() {
return new NiObject(this, instantceSlotCount);
}
public boolean clinit(NiThread thread) {
clinit = true;
boolean result1 = scheduleClinit(thread);
boolean result2 = initSuperClass(thread);
return result1 || result2;
}
private boolean scheduleClinit(NiThread thread) {
NiMethod clinitMethod = getClinitMethod();
if (clinitMethod != null) {
thread.pushFrame(new NiFrame(clinitMethod));
return true;
}
return false;
}
private boolean initSuperClass(NiThread thread) {
if (!isInterface() && superClass != null && !superClass.isClinit()) {
return superClass.clinit(thread);
}
return false;
}
public NiObject newArray(int count) {
if (!isArray()) {
throw new IllegalAccessError("Current class is not an array:" + this);
}
switch (className.substring(0, 2)) {
case "[Z":
return new NiObject(this, new byte[count]);
case "[B":
return new NiObject(this, new byte[count]);
case "[C":
return new NiObject(this, new char[count]);
case "[S":
return new NiObject(this, new short[count]);
case "[I":
return new NiObject(this, new int[count]);
case "[J":
return new NiObject(this, new long[count]);
case "[F":
return new NiObject(this, new float[count]);
case "[D":
return new NiObject(this, new double[count]);
default:
return new NiObject(this, new NiObject[count]);
}
}
public boolean isArray() {
return className.charAt(0) == '[';
}
/**
* convert classname to Array class
*/
public NiClass getArrayClass() {
String className = this.className;
if (!isArray()) {
String type = VMUtils.primitiveTypes.get(className);
if (type == null) {
className = "L" + className + ";";
} else {
className = type;
}
}
className = "[" + className;
return getLoader().loadClass(className);
}
public NiClass componentClass() {
if (isArray()) {
return loader.loadClass(VMUtils.toClassname(className.substring(1)));
}
throw new IllegalAccessError("Current class is not an array:" + this);
}
public String javaName() {
return className.replace('/', '.');
}
public boolean isPublic() {
return (accessFlags & ClassConstant.ACC_PUBLIC) != 0;
}
/**
* clz extend this
*/
public boolean isSubClass(NiClass clz) {
return clz != null && (clz.superClassName != null && clz.superClassName.equals(className) || this.isSubClass
(clz.getSuperClass()));
}
/**
* clz extend this
*/
public boolean isSuperClass(NiClass clz) {
return !isSubClass(clz);
}
public boolean isAbstract() {
return (accessFlags & ClassConstant.ACC_ABSTRACT) != 0;
}
public boolean isInterface() {
return (accessFlags & ClassConstant.ACC_INTERFACE) != 0;
}
private String packageName() {
int indexOf = className.lastIndexOf('/');
return indexOf > -1 ? className.substring(0, indexOf) : "";
}
public boolean isSamePackge(NiClass clz) {
return packageName().equals(clz.packageName());
}
public boolean canAccess(NiClass clz) {
return clz.isPublic() || clz.packageName().equals(this.packageName());
}
public AttributeInfo.BootstrapMethodInfo getBootstrapMethodInfo(int index) {
AttributeInfo.AttrBootstrapMethods bootstrapMethods = null;
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i] instanceof AttributeInfo.AttrBootstrapMethods) {
bootstrapMethods = (AttributeInfo.AttrBootstrapMethods) attributeInfos[i];
}
}
if (bootstrapMethods == null) {
throw new ClassFormatError("can not find BootstrapMethods in: " + index + ", className: " + className);
}
return bootstrapMethods.bootstarpMethods[index];
}
public int getStaticInt(String name) {
NiField field = findField(name, "I");
return staticSlots.getInt(field.getSlotId());
}
public NiObject getStaticRef(String name, String desc) {
NiField field = findField(name, desc);
return staticSlots.getRef(field.getSlotId());
}
public void setStaticRef(String name, String desc, NiObject ref) {
NiField field = findField(name, desc);
staticSlots.setRef(field.getSlotId(), ref);
}
public int getAccessFlags() {
return accessFlags;
}
public String getClassName() {
return className;
}
public NiClass getSuperClass() {
return superClass;
}
public NiClass[] getInterfaces() {
return interfaces;
}
public NiField[] getFields() {
return fields;
}
public NiMethod[] getMethods() {
return methods;
}
public NiClassLoader getLoader() {
return loader;
}
public NiConstantPool getCps() {
return cps;
}
public LocalVars getStaticSlots() {
return staticSlots;
}
public NiObject getjClass() {
return jClass;
}
void setjClass(NiObject jClass) {
this.jClass = jClass;
}
public boolean isClinit() {
return clinit;
}
public String getSourceFile() {
return sourceFile;
}
public byte[] getAnnotationDatas() {
return annotationDatas;
}
@Override
public String toString() {
return className;
}
}
|
delete duplicate code
|
src/main/java/com/ninty/runtime/heap/NiClass.java
|
delete duplicate code
|
<ide><path>rc/main/java/com/ninty/runtime/heap/NiClass.java
<ide> }
<ide> switch (className.substring(0, 2)) {
<ide> case "[Z":
<del> return new NiObject(this, new byte[count]);
<ide> case "[B":
<ide> return new NiObject(this, new byte[count]);
<ide> case "[C":
|
|
Java
|
mit
|
93ab1979846e5f2ceb26dbe14c14c44679f8f4b1
| 0 |
jiguoling/Android-Plugin-Framework,limpoxe/Android-Plugin-Framework,GKerison/Android-Plugin-Framework,tempbottle/Android-Plugin-Framework,AronXue/Android-Plugin-Framework,shper/Android-Plugin-Framework,limpoxe/Android-Plugin-Framework
|
package com.plugin.core;
import java.io.File;
import java.lang.reflect.Method;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.os.Build;
import android.util.Log;
import dalvik.system.DexClassLoader;
public class PluginCreator {
private static final String LOG_TAG = PluginCreator.class.getSimpleName();
private PluginCreator() {
}
/**
* 根据插件apk文件,创建插件dex的classloader
*
* @param absolutePluginApkPath
* 插件apk文件路径
* @return
*/
public static DexClassLoader createPluginClassLoader(String absolutePluginApkPath) {
return new DexClassLoader(absolutePluginApkPath, new File(absolutePluginApkPath).getParent(), null,
PluginLoader.class.getClassLoader());
}
/**
* 根据插件apk文件,创建插件资源文件,同时绑定宿主程序的资源,这样就可以在插件中使用宿主程序的资源。
*
* @param application
* 宿主程序的Application
* @param absolutePluginApkPath
* 插件apk文件路径
* @return
*/
public static Resources createPluginResource(Application application, String absolutePluginApkPath) {
try {
AssetManager assetMgr = AssetManager.class.newInstance();
Method addAssetPaths = AssetManager.class.getDeclaredMethod("addAssetPaths", String[].class);
String[] assetPaths = new String[2];
//不可更改顺序否则不能兼容4.x
assetPaths[0] = application.getApplicationInfo().sourceDir;
assetPaths[1] = absolutePluginApkPath;
addAssetPaths.invoke(assetMgr, new Object[] { assetPaths });
Resources mainRes = application.getResources();
Resources pluginRes = new Resources(assetMgr, mainRes.getDisplayMetrics(), mainRes.getConfiguration());
Log.e(LOG_TAG, "create Plugin Resource from: " + assetPaths[0] + ", " + assetPaths[1]);
return pluginRes;
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 创建插件apk的Context。
* 如果插件是运行在普通的Activity中,那么插件中需要使用context的地方,都需要使用此方法返回的Context
*
* @param application
* @param pluginRes
* @param pluginClassLoader
* @return
*/
public static Context createPluginApplicationContext(Application application, Resources pluginRes,
DexClassLoader pluginClassLoader) {
return new PluginContextTheme(application, pluginRes, pluginClassLoader);
}
/**
* 创建插件apk的Activity context,
* 如果用此Context替换宿主程序的Activity的baseContext,那么插件中需要使用context的地方,和非插件开发时完全一致。
* 即开发出来的插件代码,和普通的应用程序代码没有区别。 这个方法由于传入了Activity,为了避免泄漏,切记不可长久保持催此方法返回值的引用
*
* @param activity
* @param pluginContext
* @return
*/
public static Context createPluginActivityContext(Activity activity, Context pluginContext) {
return new PluginContextTheme(activity, pluginContext.getResources(), pluginContext.getClassLoader());
}
}
|
PluginCore/src/com/plugin/core/PluginCreator.java
|
package com.plugin.core;
import java.io.File;
import java.lang.reflect.Method;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.os.Build;
import android.util.Log;
import dalvik.system.DexClassLoader;
public class PluginCreator {
private static final String LOG_TAG = PluginCreator.class.getSimpleName();
private PluginCreator() {
}
/**
* 根据插件apk文件,创建插件dex的classloader
*
* @param absolutePluginApkPath
* 插件apk文件路径
* @return
*/
public static DexClassLoader createPluginClassLoader(String absolutePluginApkPath) {
return new DexClassLoader(absolutePluginApkPath, new File(absolutePluginApkPath).getParent(), null,
PluginLoader.class.getClassLoader());
}
/**
* 根据插件apk文件,创建插件资源文件,同时绑定宿主程序的资源,这样就可以在插件中使用宿主程序的资源。
*
* @param application
* 宿主程序的Application
* @param absolutePluginApkPath
* 插件apk文件路径
* @return
*/
public static Resources createPluginResource(Application application, String absolutePluginApkPath) {
try {
AssetManager assetMgr = AssetManager.class.newInstance();
Method addAssetPaths = AssetManager.class.getDeclaredMethod("addAssetPaths", String[].class);
String[] assetPaths = new String[2];
// 5.x or 2.x
if (Build.VERSION.SDK_INT > 20 || Build.VERSION.SDK_INT < 14) {
assetPaths[0] = absolutePluginApkPath;
assetPaths[1] = application.getApplicationInfo().sourceDir;
} else {
// 4.x
assetPaths[0] = application.getApplicationInfo().sourceDir;
assetPaths[1] = absolutePluginApkPath;
}
addAssetPaths.invoke(assetMgr, new Object[] { assetPaths });
Resources mainRes = application.getResources();
Resources pluginRes = new Resources(assetMgr, mainRes.getDisplayMetrics(), mainRes.getConfiguration());
Log.e(LOG_TAG, "create Plugin Resource from: " + assetPaths[0] + ", " + assetPaths[1]);
return pluginRes;
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 创建插件apk的Context。
* 如果插件是运行在普通的Activity中,那么插件中需要使用context的地方,都需要使用此方法返回的Context
*
* @param application
* @param pluginRes
* @param pluginClassLoader
* @return
*/
public static Context createPluginApplicationContext(Application application, Resources pluginRes,
DexClassLoader pluginClassLoader) {
return new PluginContextTheme(application, pluginRes, pluginClassLoader);
}
/**
* 创建插件apk的Activity context,
* 如果用此Context替换宿主程序的Activity的baseContext,那么插件中需要使用context的地方,和非插件开发时完全一致。
* 即开发出来的插件代码,和普通的应用程序代码没有区别。 这个方法由于传入了Activity,为了避免泄漏,切记不可长久保持催此方法返回值的引用
*
* @param activity
* @param pluginContext
* @return
*/
public static Context createPluginActivityContext(Activity activity, Context pluginContext) {
return new PluginContextTheme(activity, pluginContext.getResources(), pluginContext.getClassLoader());
}
}
|
update
Signed-off-by: Cai Liming <[email protected]>
|
PluginCore/src/com/plugin/core/PluginCreator.java
|
update
|
<ide><path>luginCore/src/com/plugin/core/PluginCreator.java
<ide>
<ide> String[] assetPaths = new String[2];
<ide>
<del> // 5.x or 2.x
<del> if (Build.VERSION.SDK_INT > 20 || Build.VERSION.SDK_INT < 14) {
<del> assetPaths[0] = absolutePluginApkPath;
<del> assetPaths[1] = application.getApplicationInfo().sourceDir;
<del> } else {
<del> // 4.x
<del> assetPaths[0] = application.getApplicationInfo().sourceDir;
<del> assetPaths[1] = absolutePluginApkPath;
<del> }
<add> //不可更改顺序否则不能兼容4.x
<add> assetPaths[0] = application.getApplicationInfo().sourceDir;
<add> assetPaths[1] = absolutePluginApkPath;
<ide>
<ide> addAssetPaths.invoke(assetMgr, new Object[] { assetPaths });
<ide>
|
|
Java
|
mit
|
error: pathspec 'Rectangle_Area.java' did not match any file(s) known to git
|
b08de64b61db902dfb3181f6b74b4ed1a51bbe72
| 1 |
antonio081014/LeetCode-CodeBase,antonio081014/LeetCode-CodeBase,antonio081014/LeetCode-CodeBase,antonio081014/LeeCode-CodeBase
|
public class Solution {
public int computeArea(int A, int B, int C, int D, int E, int F, int G, int H) {
if(C<E||G<A )
return (G-E)*(H-F) + (C-A)*(D-B);
if(D<F || H<B)
return (G-E)*(H-F) + (C-A)*(D-B);
int right = Math.min(C,G);
int left = Math.max(A,E);
int top = Math.min(H,D);
int bottom = Math.max(F,B);
return (G-E)*(H-F) + (C-A)*(D-B) - (right-left)*(top-bottom);
}
}
|
Rectangle_Area.java
|
Create Rectangle_Area.java
|
Rectangle_Area.java
|
Create Rectangle_Area.java
|
<ide><path>ectangle_Area.java
<add>public class Solution {
<add> public int computeArea(int A, int B, int C, int D, int E, int F, int G, int H) {
<add> if(C<E||G<A )
<add> return (G-E)*(H-F) + (C-A)*(D-B);
<add>
<add> if(D<F || H<B)
<add> return (G-E)*(H-F) + (C-A)*(D-B);
<add>
<add> int right = Math.min(C,G);
<add> int left = Math.max(A,E);
<add> int top = Math.min(H,D);
<add> int bottom = Math.max(F,B);
<add>
<add> return (G-E)*(H-F) + (C-A)*(D-B) - (right-left)*(top-bottom);
<add> }
<add>}
|
|
Java
|
mit
|
bc213d680b174c90a7a536be11aae6359f165c42
| 0 |
CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab
|
package org.xcolab.view.pages.contestmanagement.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xcolab.client.activities.pojo.ActivityEntry;
import org.xcolab.client.members.MembersClient;
import org.xcolab.client.members.exceptions.MemberNotFoundException;
import org.xcolab.client.members.pojo.Member;
import org.xcolab.util.enums.activity.ActivityEntryType;
import org.xcolab.view.activityentry.ActivityEntryHelper;
import org.xcolab.view.util.CsvResponseWriter;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
public class ActivityCsvWriter extends CsvResponseWriter {
private static final Logger _log = LoggerFactory.getLogger(ActivityCsvWriter.class);
private static final String MEMBER_NOT_FOUND_MESSAGE = "Member not found";
private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
private static final String FILE_NAME = "activityReport";
private static final List<String> COLUMN_NAMES = Arrays.asList(
"User Id",
"screenName",
"firstName",
"lastName",
"activityType",
"activityCreateDate",
"activityBody"
);
private final ActivityEntryHelper activityEntryHelper;
public ActivityCsvWriter(HttpServletResponse response,
ActivityEntryHelper activityEntryHelper) throws IOException {
super(FILE_NAME, COLUMN_NAMES, response);
this.activityEntryHelper = activityEntryHelper;
}
public void writeActivities(Collection<ActivityEntry> activityEntries) {
activityEntries.forEach(this::writeActivity);
}
public void writeActivity(ActivityEntry activityEntry) {
ActivityEntryType activityType = ActivityEntryType
.getActivityEntryTypeByPrimaryType(activityEntry.getPrimaryType());
if (activityType != null) {
Member member = getMemberOrNull(activityEntry);
List<String> row = new ArrayList<>();
addValue(row, member != null ? member.getId_() : MEMBER_NOT_FOUND_MESSAGE);
addValue(row, member != null ? member.getScreenName() : MEMBER_NOT_FOUND_MESSAGE);
addValue(row, member != null ? member.getFirstName() : MEMBER_NOT_FOUND_MESSAGE);
addValue(row, member != null ? member.getLastName() : MEMBER_NOT_FOUND_MESSAGE);
addValue(row, activityType.name());
addValue(row, DATE_FORMAT.format(activityEntry.getCreateDate()));
addValue(row, activityEntryHelper.getActivityBody(activityEntry));
writeRow(row);
} else {
_log.warn("Unknown ActivityEntryType {} found when generating report",
activityEntry.getPrimaryType());
}
}
private Member getMemberOrNull(ActivityEntry activityEntry) {
try {
return MembersClient.getMember(activityEntry.getMemberId());
} catch (MemberNotFoundException e) {
_log.warn("Member {} not found when generating report", activityEntry.getMemberId());
return null;
}
}
private void addValue(List<String> list, Object value) {
list.add(String.valueOf(value));
}
}
|
view/src/main/java/org/xcolab/view/pages/contestmanagement/utils/ActivityCsvWriter.java
|
package org.xcolab.view.pages.contestmanagement.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xcolab.client.activities.pojo.ActivityEntry;
import org.xcolab.client.members.MembersClient;
import org.xcolab.client.members.exceptions.MemberNotFoundException;
import org.xcolab.client.members.pojo.Member;
import org.xcolab.util.enums.activity.ActivityEntryType;
import org.xcolab.view.activityentry.ActivityEntryHelper;
import org.xcolab.view.util.CsvResponseWriter;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
public class ActivityCsvWriter extends CsvResponseWriter {
private static final Logger _log = LoggerFactory.getLogger(ActivityCsvWriter.class);
private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
private static final String FILE_NAME = "activityReport";
private static final List<String> COLUMN_NAMES = Arrays.asList(
"User Id",
"screenName",
"firstName",
"lastName",
"activityType",
"activityCreateDate",
"activityBody"
);
private final ActivityEntryHelper activityEntryHelper;
public ActivityCsvWriter(HttpServletResponse response,
ActivityEntryHelper activityEntryHelper) throws IOException {
super(FILE_NAME, COLUMN_NAMES, response);
this.activityEntryHelper = activityEntryHelper;
}
public void writeActivities(Collection<ActivityEntry> activityEntries) {
activityEntries.forEach(this::writeActivity);
}
public void writeActivity(ActivityEntry activityEntry) {
ActivityEntryType activityType = ActivityEntryType
.getActivityEntryTypeByPrimaryType(activityEntry.getPrimaryType());
if (activityType != null) {
Member member = getMemberOrNull(activityEntry);
List<String> row = new ArrayList<>();
addValue(row, member != null ? member.getId_() : "Member not found");
addValue(row, member != null ? member.getScreenName() : "Member not found");
addValue(row, member != null ? member.getFirstName() : "Member not found");
addValue(row, member != null ? member.getLastName() : "Member not found");
addValue(row, activityType.name());
addValue(row, DATE_FORMAT.format(activityEntry.getCreateDate()));
addValue(row, activityEntryHelper.getActivityBody(activityEntry));
writeRow(row);
} else {
_log.warn("Unknown ActivityEntryType {} found when generating report",
activityEntry.getPrimaryType());
}
}
private Member getMemberOrNull(ActivityEntry activityEntry) {
try {
return MembersClient.getMember(activityEntry.getMemberId());
} catch (MemberNotFoundException e) {
_log.warn("Member {} not found when generating report", activityEntry.getMemberId());
return null;
}
}
private void addValue(List<String> list, Object value) {
list.add(String.valueOf(value));
}
}
|
[COLAB-2344] Extract constant from repeated String literal
|
view/src/main/java/org/xcolab/view/pages/contestmanagement/utils/ActivityCsvWriter.java
|
[COLAB-2344] Extract constant from repeated String literal
|
<ide><path>iew/src/main/java/org/xcolab/view/pages/contestmanagement/utils/ActivityCsvWriter.java
<ide>
<ide> private static final Logger _log = LoggerFactory.getLogger(ActivityCsvWriter.class);
<ide>
<add> private static final String MEMBER_NOT_FOUND_MESSAGE = "Member not found";
<ide> private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
<ide> private static final String FILE_NAME = "activityReport";
<ide>
<ide> Member member = getMemberOrNull(activityEntry);
<ide>
<ide> List<String> row = new ArrayList<>();
<del> addValue(row, member != null ? member.getId_() : "Member not found");
<del> addValue(row, member != null ? member.getScreenName() : "Member not found");
<del> addValue(row, member != null ? member.getFirstName() : "Member not found");
<del> addValue(row, member != null ? member.getLastName() : "Member not found");
<add> addValue(row, member != null ? member.getId_() : MEMBER_NOT_FOUND_MESSAGE);
<add> addValue(row, member != null ? member.getScreenName() : MEMBER_NOT_FOUND_MESSAGE);
<add> addValue(row, member != null ? member.getFirstName() : MEMBER_NOT_FOUND_MESSAGE);
<add> addValue(row, member != null ? member.getLastName() : MEMBER_NOT_FOUND_MESSAGE);
<ide> addValue(row, activityType.name());
<ide> addValue(row, DATE_FORMAT.format(activityEntry.getCreateDate()));
<ide> addValue(row, activityEntryHelper.getActivityBody(activityEntry));
|
|
Java
|
apache-2.0
|
54e522195620f2b726da2d3bb40a766cfeff95b9
| 0 |
quarian/dataverse,leeper/dataverse-1,majorseitan/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,majorseitan/dataverse,leeper/dataverse-1,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,quarian/dataverse,leeper/dataverse-1,jacksonokuhn/dataverse,quarian/dataverse,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,majorseitan/dataverse,quarian/dataverse,jacksonokuhn/dataverse,JayanthyChengan/dataverse,JayanthyChengan/dataverse,jacksonokuhn/dataverse,leeper/dataverse-1,quarian/dataverse,leeper/dataverse-1,leeper/dataverse-1,majorseitan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,JayanthyChengan/dataverse,leeper/dataverse-1,quarian/dataverse,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,quarian/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,leeper/dataverse-1,jacksonokuhn/dataverse
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.mydata;
import static java.lang.Math.max;
import static java.lang.Math.min;
/**
*
* @author rmp553
*/
public class Pager {
/* inputs */
public int numResults;
public int docsPerPage = 10;
public int selectedPageNumber = 1;
/* calculated */
public int pageCount;
public int[] pageNumberList;
public int previousPageNumber;
public int nextPageNumber;
public int startCardNumber;
public int endCardNumber;
public Pager(int numResults, int docsPerPage, int selectedPageNumber) {
if (numResults < 0){
throw new IllegalArgumentException("numResults must be 0 or higher");
}
if (docsPerPage < 1){
throw new IllegalArgumentException("docsPerPage must be 1 or higher");
}
if (selectedPageNumber < 1){
throw new IllegalArgumentException("selectedPageNumber must be 1 or higher");
}
numResults = numResults;
docsPerPage = docsPerPage;
selectedPageNumber = selectedPageNumber;
makePageStats();
}
private void makePageStats(){
// page count
this.pageCount = numResults / docsPerPage;
if ((this.numResults % this.docsPerPage) > 0){
this.pageCount += 1;
}
// Sanity check for the selected page
if (this.selectedPageNumber > this.pageCount){
this.selectedPageNumber = 1;
}
// page number list
pageNumberList = new int[this.pageCount];
for(int i=1; i<this.pageCount; i++){
pageNumberList[i] = i + 1;
}
// prev/next page numbers
this.previousPageNumber = max(this.selectedPageNumber-1, 1); // must be at least 1
this.nextPageNumber = min(this.selectedPageNumber+1, this.pageCount); // must be at least 1
// start/end card numbers
this.startCardNumber = (this.docsPerPage * (this.selectedPageNumber - 1)) + 1;
this.endCardNumber = min(this.startCardNumber + (this.docsPerPage-1), this.numResults );
}
/**
* get numResults
*/
public int getNumResults(){
return this.numResults;
}
/**
* set numResults
*/
public void setNumResults(int numResults){
this.numResults = numResults;
}
/**
* get docsPerPage
*/
public int getDocsPerPage(){
return this.docsPerPage;
}
/**
* set docsPerPage
*/
public void setDocsPerPage(int docsPerPage){
this.docsPerPage = docsPerPage;
}
/**
* get selectedPageNumber
*/
public int getSelectedPageNumber(){
return this.selectedPageNumber;
}
/**
* set selectedPageNumber
*/
public void setSelectedPageNumber(int selectedPageNumber){
this.selectedPageNumber = selectedPageNumber;
}
/**
* get pageCount
*/
public int getPageCount(){
return this.pageCount;
}
/**
* set pageCount
*/
public void setPageCount(int pageCount){
this.pageCount = pageCount;
}
/**
* get pageNumberList
*/
public int[] getPageNumberList(){
return this.pageNumberList;
}
/**
* set pageNumberList
*/
public void setPageNumberList(int[] pageNumberList){
this.pageNumberList = pageNumberList;
}
/**
* get previousPageNumber
*/
public int getPreviousPageNumber(){
return this.previousPageNumber;
}
/**
* set previousPageNumber
*/
public void setPreviousPageNumber(int previousPageNumber){
this.previousPageNumber = previousPageNumber;
}
/**
* get nextPageNumber
*/
public int getNextPageNumber(){
return this.nextPageNumber;
}
/**
* set nextPageNumber
*/
public void setNextPageNumber(int nextPageNumber){
this.nextPageNumber = nextPageNumber;
}
/**
* get startCardNumber
*/
public int getStartCardNumber(){
return this.startCardNumber;
}
/**
* set startCardNumber
*/
public void setStartCardNumber(int startCardNumber){
this.startCardNumber = startCardNumber;
}
/**
* get endCardNumber
*/
public int getEndCardNumber(){
return this.endCardNumber;
}
/**
* set endCardNumber
*/
public void setEndCardNumber(int endCardNumber){
this.endCardNumber = endCardNumber;
}
}
|
src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.mydata;
/**
*
* @author rmp553
*/
public class Pager {
}
|
added logic to pager
|
src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
|
added logic to pager
|
<ide><path>rc/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
<ide> */
<ide> package edu.harvard.iq.dataverse.mydata;
<ide>
<add>import static java.lang.Math.max;
<add>import static java.lang.Math.min;
<add>
<ide> /**
<ide> *
<ide> * @author rmp553
<ide> */
<ide> public class Pager {
<ide>
<del>}
<add> /* inputs */
<add> public int numResults;
<add> public int docsPerPage = 10;
<add> public int selectedPageNumber = 1;
<add>
<add> /* calculated */
<add> public int pageCount;
<add> public int[] pageNumberList;
<add>
<add> public int previousPageNumber;
<add> public int nextPageNumber;
<add>
<add> public int startCardNumber;
<add> public int endCardNumber;
<add>
<add>
<add> public Pager(int numResults, int docsPerPage, int selectedPageNumber) {
<add>
<add> if (numResults < 0){
<add> throw new IllegalArgumentException("numResults must be 0 or higher");
<add> }
<add> if (docsPerPage < 1){
<add> throw new IllegalArgumentException("docsPerPage must be 1 or higher");
<add> }
<add> if (selectedPageNumber < 1){
<add> throw new IllegalArgumentException("selectedPageNumber must be 1 or higher");
<add> }
<add> numResults = numResults;
<add> docsPerPage = docsPerPage;
<add> selectedPageNumber = selectedPageNumber;
<add> makePageStats();
<add> }
<add>
<add> private void makePageStats(){
<add> // page count
<add> this.pageCount = numResults / docsPerPage;
<add> if ((this.numResults % this.docsPerPage) > 0){
<add> this.pageCount += 1;
<add> }
<add>
<add> // Sanity check for the selected page
<add> if (this.selectedPageNumber > this.pageCount){
<add> this.selectedPageNumber = 1;
<add> }
<add>
<add> // page number list
<add> pageNumberList = new int[this.pageCount];
<add> for(int i=1; i<this.pageCount; i++){
<add> pageNumberList[i] = i + 1;
<add> }
<add>
<add> // prev/next page numbers
<add> this.previousPageNumber = max(this.selectedPageNumber-1, 1); // must be at least 1
<add> this.nextPageNumber = min(this.selectedPageNumber+1, this.pageCount); // must be at least 1
<add>
<add> // start/end card numbers
<add> this.startCardNumber = (this.docsPerPage * (this.selectedPageNumber - 1)) + 1;
<add> this.endCardNumber = min(this.startCardNumber + (this.docsPerPage-1), this.numResults );
<add>
<add>
<add> }
<add>
<add>
<add> /**
<add> * get numResults
<add> */
<add> public int getNumResults(){
<add> return this.numResults;
<add> }
<add>
<add>
<add> /**
<add> * set numResults
<add> */
<add> public void setNumResults(int numResults){
<add> this.numResults = numResults;
<add> }
<add>
<add>
<add> /**
<add> * get docsPerPage
<add> */
<add> public int getDocsPerPage(){
<add> return this.docsPerPage;
<add> }
<add>
<add>
<add> /**
<add> * set docsPerPage
<add> */
<add> public void setDocsPerPage(int docsPerPage){
<add> this.docsPerPage = docsPerPage;
<add> }
<add>
<add>
<add> /**
<add> * get selectedPageNumber
<add> */
<add> public int getSelectedPageNumber(){
<add> return this.selectedPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * set selectedPageNumber
<add> */
<add> public void setSelectedPageNumber(int selectedPageNumber){
<add> this.selectedPageNumber = selectedPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * get pageCount
<add> */
<add> public int getPageCount(){
<add> return this.pageCount;
<add> }
<add>
<add>
<add> /**
<add> * set pageCount
<add> */
<add> public void setPageCount(int pageCount){
<add> this.pageCount = pageCount;
<add> }
<add>
<add>
<add> /**
<add> * get pageNumberList
<add> */
<add> public int[] getPageNumberList(){
<add> return this.pageNumberList;
<add> }
<add>
<add>
<add> /**
<add> * set pageNumberList
<add> */
<add> public void setPageNumberList(int[] pageNumberList){
<add> this.pageNumberList = pageNumberList;
<add> }
<add>
<add>
<add> /**
<add> * get previousPageNumber
<add> */
<add> public int getPreviousPageNumber(){
<add> return this.previousPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * set previousPageNumber
<add> */
<add> public void setPreviousPageNumber(int previousPageNumber){
<add> this.previousPageNumber = previousPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * get nextPageNumber
<add> */
<add> public int getNextPageNumber(){
<add> return this.nextPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * set nextPageNumber
<add> */
<add> public void setNextPageNumber(int nextPageNumber){
<add> this.nextPageNumber = nextPageNumber;
<add> }
<add>
<add>
<add> /**
<add> * get startCardNumber
<add> */
<add> public int getStartCardNumber(){
<add> return this.startCardNumber;
<add> }
<add>
<add>
<add> /**
<add> * set startCardNumber
<add> */
<add> public void setStartCardNumber(int startCardNumber){
<add> this.startCardNumber = startCardNumber;
<add> }
<add>
<add>
<add> /**
<add> * get endCardNumber
<add> */
<add> public int getEndCardNumber(){
<add> return this.endCardNumber;
<add> }
<add>
<add>
<add> /**
<add> * set endCardNumber
<add> */
<add> public void setEndCardNumber(int endCardNumber){
<add> this.endCardNumber = endCardNumber;
<add> }
<add>
<add>
<add>}
|
|
Java
|
apache-2.0
|
6fb84448847dad59a4dd37ffc24caa158c317588
| 0 |
corbel-platform/lib-ws,bq/lib-ws,corbel-platform/lib-ws
|
/*
* Copyright (C) 2013 StarTIC
*/
package com.bqreaders.silkroad.common.auth.ioc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.JedisPoolConfig;
import com.bqreaders.lib.token.ioc.TokenIoc;
import com.bqreaders.lib.token.parser.TokenParser;
import com.bqreaders.silkroad.common.auth.AuthorizationInfo;
import com.bqreaders.silkroad.common.auth.AuthorizationRequestFilter;
import com.bqreaders.silkroad.common.auth.AuthorizationRulesService;
import com.bqreaders.silkroad.common.auth.BearerTokenAuthenticator;
import com.bqreaders.silkroad.common.auth.DefaultAuthorizationRulesService;
import com.bqreaders.silkroad.common.auth.repository.AuthorizationRulesRepository;
import com.bqreaders.silkroad.common.auth.repository.RedisAuthorizationRulesRepository;
import com.bqreaders.silkroad.common.health.AuthorizationRedisHealthCheck;
import com.bqreaders.silkroad.common.redis.GsonRedisSerializer;
import com.google.gson.JsonObject;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.yammer.dropwizard.auth.Authenticator;
import com.yammer.dropwizard.auth.oauth.OAuthProvider;
/**
* @author Alexander De Leon
*
*/
@Configuration
@Import(TokenIoc.class)
public class AuthorizationIoc {
private static final Logger LOG = LoggerFactory.getLogger(AuthorizationIoc.class);
@Bean
public AuthorizationRulesRepository getAuthorizationRulesRepository(RedisTemplate<String, JsonObject> redisTemplate) {
return new RedisAuthorizationRulesRepository(redisTemplate);
}
@Bean
public RedisTemplate<String, JsonObject> redisTemplate(JedisConnectionFactory jedisConnectionFactory) {
final RedisTemplate<String, JsonObject> template = new RedisTemplate<>();
template.setConnectionFactory(jedisConnectionFactory);
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GsonRedisSerializer<JsonObject>());
return template;
}
@Bean
public JedisConnectionFactory jedisConnectionFactory(JedisPoolConfig jedisPoolConfig,
@Value("${auth.redis.host:@null}") String host, @Value("${auth.redis.port:@null}") Integer port,
@Value("${auth.redis.password:}") String password) {
JedisConnectionFactory connFactory = new JedisConnectionFactory(jedisPoolConfig);
connFactory.setPassword(password);
if (host != null) {
connFactory.setHostName(host);
}
if (port != null) {
connFactory.setPort(port);
}
return connFactory;
}
@Bean
public JedisPoolConfig jedisPoolConfig(@Value("${auth.redis.maxIdle:@null}") Integer maxIdle,
@Value("${auth.redis.maxTotal:@null}") Integer maxTotal,
@Value("${auth.redis.minIdle:@null}") Integer minIdle,
@Value("${auth.redis.testOnBorrow:@null}") Boolean testOnBorrow,
@Value("${auth.redis.testOnReturn:@null}") Boolean testOnReturn,
@Value("${auth.redis.testWhileIdle:@null}") Boolean testWhileIdle,
@Value("${auth.redis.numTestsPerEvictionRun:@null}") Integer numTestsPerEvictionRun,
@Value("${auth.redis.maxWaitMillis:@null}") Long maxWaitMillis,
@Value("${auth.redis.timeBetweenEvictionRunsMillis:@null}") Long timeBetweenEvictionRunsMillis,
@Value("${auth.redis.blockWhenExhausted:@null}") Boolean blockWhenExhausted) {
JedisPoolConfig config = new JedisPoolConfig();
if (maxIdle != null) {
config.setMaxIdle(maxIdle);
}
if (maxTotal != null) {
config.setMaxTotal(maxTotal);
}
if (minIdle != null) {
config.setMinIdle(minIdle);
}
if (testOnBorrow != null) {
config.setTestOnBorrow(testOnBorrow);
}
if (testOnReturn != null) {
config.setTestOnReturn(testOnReturn);
}
if (testWhileIdle != null) {
config.setTestWhileIdle(testWhileIdle);
}
if (numTestsPerEvictionRun != null) {
config.setNumTestsPerEvictionRun(numTestsPerEvictionRun);
}
if (timeBetweenEvictionRunsMillis != null) {
config.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
}
if (maxWaitMillis != null) {
config.setMaxWaitMillis(maxWaitMillis);
}
if (blockWhenExhausted != null) {
config.setBlockWhenExhausted(blockWhenExhausted);
}
return config;
}
@Bean
public AuthorizationRulesService authorizationRulesService(AuthorizationRulesRepository authorizationRulesRepository) {
return new DefaultAuthorizationRulesService(authorizationRulesRepository);
}
@Bean
public Authenticator<String, AuthorizationInfo> authenticator(@Value("${auth.audience}") String audience,
TokenParser tokenParser, AuthorizationRulesService authorizationRulesService) {
return new BearerTokenAuthenticator(audience, authorizationRulesService, tokenParser);
}
@Bean(name = "authProvider")
public OAuthProvider<AuthorizationInfo> getOAuthProvider(Authenticator<String, AuthorizationInfo> authenticator,
@Value("${auth.realm}") String realm) {
return new OAuthProvider<>(authenticator, realm);
}
@Bean
public ContainerRequestFilter getAuthorizationRequestFileter(OAuthProvider<AuthorizationInfo> oauthProvider,
@Value("${auth.enabled}") boolean authEnabled, @Value("${auth.securePath}") String securePath) {
if (authEnabled) {
return new AuthorizationRequestFilter(oauthProvider, securePath);
} else {
LOG.warn("Authorization validation is disabled. The systen is in a INSECURE mode");
return emptyFilter();
}
}
@Bean
public AuthorizationRedisHealthCheck getAuthorizationRedisHealthCheck(
RedisTemplate<String, JsonObject> redisTemplate) {
return new AuthorizationRedisHealthCheck(redisTemplate);
}
private ContainerRequestFilter emptyFilter() {
return request -> request;
}
}
|
src/main/java/com/bqreaders/silkroad/common/auth/ioc/AuthorizationIoc.java
|
/*
* Copyright (C) 2013 StarTIC
*/
package com.bqreaders.silkroad.common.auth.ioc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.JedisPoolConfig;
import com.bqreaders.lib.token.ioc.TokenIoc;
import com.bqreaders.lib.token.parser.TokenParser;
import com.bqreaders.silkroad.common.auth.AuthorizationInfo;
import com.bqreaders.silkroad.common.auth.AuthorizationRequestFilter;
import com.bqreaders.silkroad.common.auth.AuthorizationRulesService;
import com.bqreaders.silkroad.common.auth.BearerTokenAuthenticator;
import com.bqreaders.silkroad.common.auth.DefaultAuthorizationRulesService;
import com.bqreaders.silkroad.common.auth.repository.AuthorizationRulesRepository;
import com.bqreaders.silkroad.common.auth.repository.RedisAuthorizationRulesRepository;
import com.bqreaders.silkroad.common.health.AuthorizationRedisHealthCheck;
import com.bqreaders.silkroad.common.redis.GsonRedisSerializer;
import com.google.gson.JsonObject;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.yammer.dropwizard.auth.Authenticator;
import com.yammer.dropwizard.auth.oauth.OAuthProvider;
/**
* @author Alexander De Leon
*
*/
@Configuration
@Import(TokenIoc.class)
public class AuthorizationIoc {
private static final Logger LOG = LoggerFactory.getLogger(AuthorizationIoc.class);
@Bean
public AuthorizationRulesRepository getAuthorizationRulesRepository(RedisTemplate<String, JsonObject> redisTemplate) {
return new RedisAuthorizationRulesRepository(redisTemplate);
}
@Bean
public RedisTemplate<String, JsonObject> redisTemplate(JedisConnectionFactory jedisConnectionFactory) {
final RedisTemplate<String, JsonObject> template = new RedisTemplate<>();
template.setConnectionFactory(jedisConnectionFactory);
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GsonRedisSerializer<JsonObject>());
return template;
}
@Bean
public JedisConnectionFactory jedisConnectionFactory(JedisPoolConfig jedisPoolConfig,
@Value("${auth.redis.host:@null}") String host, @Value("${auth.redis.port:@null}") Integer port,
@Value("${auth.redis.password:}") String password) {
JedisConnectionFactory connFactory = new JedisConnectionFactory(jedisPoolConfig);
connFactory.setPassword(password);
if (host != null) {
connFactory.setHostName(host);
}
if (port != null) {
connFactory.setPort(port);
}
return connFactory;
}
@Bean
public JedisPoolConfig jedisPoolConfig(@Value("${auth.redis.maxIdle:@null}") Integer maxIdle,
@Value("${auth.redis.maxTotal:@null}") Integer maxTotal,
@Value("${auth.redis.minIdle:@null}") Integer minIdle,
@Value("${auth.redis.testOnBorrow:@null}") Boolean testOnBorrow,
@Value("${auth.redis.testOnReturn:@null}") Boolean testOnReturn,
@Value("${auth.redis.testWhileIdle:@null}") Boolean testWhileIdle,
@Value("${auth.redis.numTestsPerEvictionRun:@null}") Integer numTestsPerEvictionRun,
@Value("${auth.redis.maxWaitMillis:@null}") Long maxWaitMillis,
@Value("${auth.redis.timeBetweenEvictionRunsMillis:@null}") Long timeBetweenEvictionRunsMillis,
@Value("${auth.redis.blockWhenExhausted:@null}") Boolean blockWhenExhausted) {
JedisPoolConfig config = new JedisPoolConfig();
if (maxIdle != null) {
config.setMaxIdle(maxIdle);
}
if (maxTotal != null) {
config.setMaxTotal(maxTotal);
}
if (minIdle != null) {
config.setMinIdle(minIdle);
}
if (testOnBorrow != null) {
config.setTestOnBorrow(testOnBorrow);
}
if (testOnReturn != null) {
config.setTestOnReturn(testOnReturn);
}
if (testWhileIdle != null) {
config.setTestWhileIdle(testWhileIdle);
}
if (numTestsPerEvictionRun != null) {
config.setNumTestsPerEvictionRun(numTestsPerEvictionRun);
}
if (timeBetweenEvictionRunsMillis != null) {
config.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
}
if (maxWaitMillis != null) {
config.setMaxWaitMillis(maxWaitMillis);
}
if (blockWhenExhausted != null) {
config.setBlockWhenExhausted(blockWhenExhausted);
}
return config;
}
@Bean
public AuthorizationRulesService authorizationRulesService(AuthorizationRulesRepository authorizationRulesRepository) {
return new DefaultAuthorizationRulesService(authorizationRulesRepository);
}
@Bean
public Authenticator<String, AuthorizationInfo> authenticator(@Value("${auth.audience}") String audience,
TokenParser tokenParser, AuthorizationRulesService authorizationRulesService) {
return new BearerTokenAuthenticator(audience, authorizationRulesService, tokenParser);
}
@Bean(name = "authProvider")
public OAuthProvider<AuthorizationInfo> getOAuthProvider(Authenticator<String, AuthorizationInfo> authenticator,
@Value("${auth.realm}") String realm) {
return new OAuthProvider<>(authenticator, realm);
}
@Bean
public ContainerRequestFilter getAuthorizationRequestFileter(OAuthProvider<AuthorizationInfo> oauthProvider,
@Value("${auth.enabled}") boolean authEnabled, @Value("${auth.securePath}") String securePath) {
if (authEnabled) {
return new AuthorizationRequestFilter(oauthProvider, securePath);
} else {
LOG.warn("Authorization validation is disabled. The systen is in a INSECURE mode");
return emptyFilter();
}
}
@Bean
public AuthorizationRedisHealthCheck getAuthorizationRedisHealthCheck() {
return new AuthorizationRedisHealthCheck(redisTemplate());
}
private ContainerRequestFilter emptyFilter() {
return request -> request;
}
}
|
solved compilation problem
Change-Id: Id1f5af8e7e8863601aeee4ce2d63f4f1908838cd
|
src/main/java/com/bqreaders/silkroad/common/auth/ioc/AuthorizationIoc.java
|
solved compilation problem
|
<ide><path>rc/main/java/com/bqreaders/silkroad/common/auth/ioc/AuthorizationIoc.java
<ide> @Value("${auth.redis.host:@null}") String host, @Value("${auth.redis.port:@null}") Integer port,
<ide> @Value("${auth.redis.password:}") String password) {
<ide> JedisConnectionFactory connFactory = new JedisConnectionFactory(jedisPoolConfig);
<del> connFactory.setPassword(password);
<add> connFactory.setPassword(password);
<ide> if (host != null) {
<ide> connFactory.setHostName(host);
<ide> }
<ide> }
<ide>
<ide> @Bean
<del> public AuthorizationRedisHealthCheck getAuthorizationRedisHealthCheck() {
<del> return new AuthorizationRedisHealthCheck(redisTemplate());
<add> public AuthorizationRedisHealthCheck getAuthorizationRedisHealthCheck(
<add> RedisTemplate<String, JsonObject> redisTemplate) {
<add> return new AuthorizationRedisHealthCheck(redisTemplate);
<ide> }
<ide>
<ide> private ContainerRequestFilter emptyFilter() {
|
|
Java
|
lgpl-2.1
|
f8954e49a26fe22743b3bda660e579844767d356
| 0 |
xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.refactoring.internal.job;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.xwiki.bridge.event.DocumentsDeletingEvent;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.SpaceReference;
import org.xwiki.refactoring.job.EntityJobStatus;
import org.xwiki.refactoring.job.EntityRequest;
import org.xwiki.refactoring.job.question.EntitySelection;
import org.xwiki.stability.Unstable;
/**
* Abstract job that create the list of pages to delete in order to do some checks and ask a confirmation to the user.
*
* @param <R> the request type
* @param <S> the job status type
*
* @version $Id$
* @since 9.1RC1
*/
@Unstable
public abstract class AbstractEntityJobWithChecks<R extends EntityRequest, S extends EntityJobStatus<? super R>>
extends AbstractEntityJob<R, S>
{
/**
* Map that will contain all entities that are concerned by the refactoring.
*/
protected Map<EntityReference, EntitySelection> concernedEntities = new HashMap<>();
/**
* Specify if the job needs to check that the entities can be deleted.
*/
private boolean skipChecks;
/**
* @param skipChecks set true to skip the deletion check.
* @since 10.11RC1
*/
protected void setSkipChecks(boolean skipChecks)
{
this.skipChecks = skipChecks;
}
/**
* @return true means that the deletion check will be skipped.
* @since 10.11RC1
*/
protected boolean isSkipChecks()
{
return this.skipChecks;
}
@Override
protected void runInternal() throws Exception
{
progressManager.pushLevelProgress(2, this);
try {
Collection<EntityReference> entityReferences = this.request.getEntityReferences();
if (entityReferences != null) {
// Get the list of concerned entities
progressManager.startStep(this);
getEntities(entityReferences);
if (!skipChecks) {
// Send the event
DocumentsDeletingEvent event = new DocumentsDeletingEvent();
observationManager.notify(event, this, concernedEntities);
// Stop the job if some listener has canceled the action
if (event.isCanceled()) {
getStatus().cancel();
return;
}
}
// Process
progressManager.startStep(this);
setContextUser();
process(entityReferences);
}
} finally {
progressManager.popLevelProgress(this);
}
}
protected void getEntities(Collection<EntityReference> entityReferences)
{
this.progressManager.pushLevelProgress(entityReferences.size(), this);
try {
for (EntityReference entityReference : entityReferences) {
if (this.status.isCanceled()) {
break;
} else {
this.progressManager.startStep(this);
getEntities(entityReference);
this.progressManager.endStep(this);
}
}
} finally {
this.progressManager.popLevelProgress(this);
}
}
protected void getEntities(EntityReference entityReference)
{
// Dispatch the check operation based on the entity type.
switch (entityReference.getType()) {
case DOCUMENT:
getEntities(new DocumentReference(entityReference));
break;
case SPACE:
getEntities(new SpaceReference(entityReference));
break;
default:
this.logger.error("Unsupported entity type [{}].", entityReference.getType());
}
}
private void getEntities(DocumentReference documentReference)
{
if (this.request.isDeep() && isSpaceHomeReference(documentReference)) {
getEntities(documentReference.getLastSpaceReference());
} else {
this.concernedEntities.put(documentReference, new EntitySelection(documentReference));
}
}
private void getEntities(SpaceReference spaceReference)
{
visitDocuments(spaceReference, documentReference ->
concernedEntities.put(documentReference, new EntitySelection(documentReference)));
}
}
|
xwiki-platform-core/xwiki-platform-refactoring/xwiki-platform-refactoring-api/src/main/java/org/xwiki/refactoring/internal/job/AbstractEntityJobWithChecks.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.refactoring.internal.job;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.xwiki.bridge.event.DocumentsDeletingEvent;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.SpaceReference;
import org.xwiki.refactoring.job.EntityJobStatus;
import org.xwiki.refactoring.job.EntityRequest;
import org.xwiki.refactoring.job.question.EntitySelection;
import org.xwiki.stability.Unstable;
/**
* Abstract job that create the list of pages to delete in order to do some checks and ask a confirmation to the user.
*
* @param <R> the request type
* @param <S> the job status type
*
* @version $Id$
* @since 9.1RC1
*/
@Unstable
public abstract class AbstractEntityJobWithChecks<R extends EntityRequest, S extends EntityJobStatus<? super R>>
extends AbstractEntityJob<R, S>
{
/**
* Map that will contain all entities that are concerned by the refactoring.
*/
protected Map<EntityReference, EntitySelection> concernedEntities = new HashMap<>();
/**
* Specify if the job needs to check that the entities can be deleted.
*/
protected boolean skipChecks;
/**
* @param skipChecks set true to skip the deletion check.
*/
protected void setSkipChecks(boolean skipChecks)
{
this.skipChecks = skipChecks;
}
/**
* @return true means that the deletion check will be skipped.
*/
protected boolean isSkipChecks()
{
return this.skipChecks;
}
@Override
protected void runInternal() throws Exception
{
progressManager.pushLevelProgress(2, this);
try {
Collection<EntityReference> entityReferences = this.request.getEntityReferences();
if (entityReferences != null) {
// Get the list of concerned entities
progressManager.startStep(this);
getEntities(entityReferences);
if (!skipChecks) {
// Send the event
DocumentsDeletingEvent event = new DocumentsDeletingEvent();
observationManager.notify(event, this, concernedEntities);
// Stop the job if some listener has canceled the action
if (event.isCanceled()) {
getStatus().cancel();
return;
}
}
// Process
progressManager.startStep(this);
setContextUser();
process(entityReferences);
}
} finally {
progressManager.popLevelProgress(this);
}
}
protected void getEntities(Collection<EntityReference> entityReferences)
{
this.progressManager.pushLevelProgress(entityReferences.size(), this);
try {
for (EntityReference entityReference : entityReferences) {
if (this.status.isCanceled()) {
break;
} else {
this.progressManager.startStep(this);
getEntities(entityReference);
this.progressManager.endStep(this);
}
}
} finally {
this.progressManager.popLevelProgress(this);
}
}
protected void getEntities(EntityReference entityReference)
{
// Dispatch the check operation based on the entity type.
switch (entityReference.getType()) {
case DOCUMENT:
getEntities(new DocumentReference(entityReference));
break;
case SPACE:
getEntities(new SpaceReference(entityReference));
break;
default:
this.logger.error("Unsupported entity type [{}].", entityReference.getType());
}
}
private void getEntities(DocumentReference documentReference)
{
if (this.request.isDeep() && isSpaceHomeReference(documentReference)) {
getEntities(documentReference.getLastSpaceReference());
} else {
this.concernedEntities.put(documentReference, new EntitySelection(documentReference));
}
}
private void getEntities(SpaceReference spaceReference)
{
visitDocuments(spaceReference, documentReference ->
concernedEntities.put(documentReference, new EntitySelection(documentReference)));
}
}
|
XWIKI-15870: Warning message when copying a page
* Add the missing since
|
xwiki-platform-core/xwiki-platform-refactoring/xwiki-platform-refactoring-api/src/main/java/org/xwiki/refactoring/internal/job/AbstractEntityJobWithChecks.java
|
XWIKI-15870: Warning message when copying a page
|
<ide><path>wiki-platform-core/xwiki-platform-refactoring/xwiki-platform-refactoring-api/src/main/java/org/xwiki/refactoring/internal/job/AbstractEntityJobWithChecks.java
<ide> /**
<ide> * Specify if the job needs to check that the entities can be deleted.
<ide> */
<del> protected boolean skipChecks;
<add> private boolean skipChecks;
<ide>
<ide> /**
<ide> * @param skipChecks set true to skip the deletion check.
<add> * @since 10.11RC1
<ide> */
<ide> protected void setSkipChecks(boolean skipChecks)
<ide> {
<ide>
<ide> /**
<ide> * @return true means that the deletion check will be skipped.
<add> * @since 10.11RC1
<ide> */
<ide> protected boolean isSkipChecks()
<ide> {
|
|
JavaScript
|
mit
|
51f02cf3d6a768f746a5f9eda4ead9d4618139ab
| 0 |
ethz-nus/nan-codefest,ethz-nus/nan-codefest,ethz-nus/nan-codefest
|
angular.module('starter.controllers',['ionic'])
.controller('MapCtrl', function($scope, $ionicLoading, $compile) {
function initialize() {
console.log("load map");
var myLatlng = new google.maps.LatLng(43.07493,-89.381388);
var mapOptions = {
center: myLatlng,
zoom: 16,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
var map = new google.maps.Map(document.getElementById("map"),
mapOptions);
//Marker + infowindow + angularjs compiled ng-click
var contentString = "<div><a ng-click='clickTest()'>Click me!</a></div>";
var compiled = $compile(contentString)($scope);
var infowindow = new google.maps.InfoWindow({
content: compiled[0]
});
var marker = new google.maps.Marker({
position: myLatlng,
map: map,
title: 'Uluru (Ayers Rock)'
});
google.maps.event.addListener(marker, 'click', function() {
infowindow.open(map,marker);
});
$scope.map = map;
}
$( document ).ready(function() {
initialize();
});
// google.maps.event.addDomListener(window, 'load', initialize);
$scope.centerOnMe = function() {
if(!$scope.map) {
return;
}
$scope.loading = $ionicLoading.show({
content: 'Getting current location...',
showBackdrop: false
});
navigator.geolocation.getCurrentPosition(function(pos) {
$scope.map.setCenter(new google.maps.LatLng(pos.coords.latitude, pos.coords.longitude));
$scope.loading.hide();
}, function(error) {
alert('Unable to get location: ' + error.message);
});
};
$scope.clickTest = function() {
alert('Example of infowindow with ng-click')
};
// $scope.centerOnMe();
})
.controller('SearchCtrl', function($scope, Events){
$scope.events = Events.all();
$scope.search = {
date: new Date(),
location: ''
};
$scope.search = function(){
console.log("search");
$scope.filteredEvents = $scope.events.filter( function(event){
var dateTemp = null;
if ( Object.prototype.toString.call($scope.search.date) === "[object Date]" ) {
// it is a date
if ( !isNaN( $scope.search.date.getTime() ) ) { // d.valueOf() could also work
// date is valid
dateTemp = new Date(Date.parse($scope.search.date) - Date.parse(event.time));
}
}
if( dateTemp != null && dateTemp.getDate() - 1 != 0)
return false;
else if (!isNaN($scope.location) && (!$scope.search.location.indexOf(event.location) || !event.location.indexOf($scope.search.location)) )
return false;
else if ( !isNaN($scope.category) && ($scope.search.catalog != event.catalog) )
return false;
else return true;
});
window.location.href = "#/tab/results";
console.log($scope.filteredEvents);
}
})
.controller('ResultDetailCtrl', function($scope, $stateParams, Events, AccountManager) {
$scope.event = Events.get($stateParams.eventId);
$scope.selectedGroupIndex;
$scope.print = function(array){
var str = ' ';
array.forEach(function(element, index, array){
str += element;
if(index != array.length - 1){
str+=", "
}
});
return str;
}
$scope.register = function(groupID){
$scope.selectedGroupIndex = groupID;
}
$scope.deregister = function(groupID){
console.log("deregister");
$scope.selectedGroupIndex = -1;
}
$scope.createGroup = function(isPrivate){
userId = AccountManager.getUserId();
Events.createGroup($scope.event, userId, isPrivate)
}
$scope.deleteGroup = function(){
userId = AccountManager.getUserId();
Events.deleteGroup($scope.event, userId);
}
$scope.goSolo = function(){
userId = AccountManager.getUserId();
}
})
.controller('EventsCtrl', function($scope, AttendingEvents) {
$scope.events = AttendingEvents.all();
$scope.remove = function(event) {
AttendingEvents.remove(event);
};
$scope.sortByTime = function(){
$scope.events.sort(function(eventA, eventB){
return eventA.time - eventB.time;
})
};
$scope.sortByDistance = function(eventA, eventB){
return eventA.distance - eventB.distance;
}
$scope.sortByTime();
})
.controller('EventDetailCtrl', function($scope, $stateParams, Events) {
$scope.event = Events.get($stateParams.eventId);
})
.controller('PreferenceCtrl', function($scope) {
$scope.settings = {
enableFriends: true
};
});
|
www/js/controllers.js
|
angular.module('starter.controllers',['ionic'])
.controller('MapCtrl', function($scope, $ionicLoading, $compile) {
function initialize() {
console.log("load map");
var myLatlng = new google.maps.LatLng(43.07493,-89.381388);
var mapOptions = {
center: myLatlng,
zoom: 16,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
var map = new google.maps.Map(document.getElementById("map"),
mapOptions);
//Marker + infowindow + angularjs compiled ng-click
var contentString = "<div><a ng-click='clickTest()'>Click me!</a></div>";
var compiled = $compile(contentString)($scope);
var infowindow = new google.maps.InfoWindow({
content: compiled[0]
});
var marker = new google.maps.Marker({
position: myLatlng,
map: map,
title: 'Uluru (Ayers Rock)'
});
google.maps.event.addListener(marker, 'click', function() {
infowindow.open(map,marker);
});
$scope.map = map;
}
$( document ).ready(function() {
initialize();
});
// google.maps.event.addDomListener(window, 'load', initialize);
$scope.centerOnMe = function() {
if(!$scope.map) {
return;
}
$scope.loading = $ionicLoading.show({
content: 'Getting current location...',
showBackdrop: false
});
navigator.geolocation.getCurrentPosition(function(pos) {
$scope.map.setCenter(new google.maps.LatLng(pos.coords.latitude, pos.coords.longitude));
$scope.loading.hide();
}, function(error) {
alert('Unable to get location: ' + error.message);
});
};
$scope.clickTest = function() {
alert('Example of infowindow with ng-click')
};
// $scope.centerOnMe();
})
.controller('SearchCtrl', function($scope, Events){
$scope.events = Events.all();
$scope.search = {
date: new Date(),
location: ''
};
$scope.search = function(){
console.log("search");
$scope.filteredEvents = $scope.events.filter( function(event){
var dateTemp = null;
if ( Object.prototype.toString.call($scope.search.date) === "[object Date]" ) {
// it is a date
if ( !isNaN( $scope.search.date.getTime() ) ) { // d.valueOf() could also work
// date is valid
dateTemp = new Date(Date.parse($scope.search.date) - Date.parse(event.time));
}
}
if( dateTemp != null && dateTemp.getDate() - 1 != 0)
return false;
else if (!isNaN($scope.location) && (!$scope.search.location.indexOf(event.location) || !event.location.indexOf($scope.search.location)) )
return false;
else if ( !isNaN($scope.category) && ($scope.search.catalog != event.catalog) )
return false;
else return true;
});
window.location.href = "#/tab/results";
console.log($scope.filteredEvents);
}
})
.controller('ResultDetailCtrl', function($scope, $stateParams, Events, AccountManager) {
$scope.event = Events.get($stateParams.eventId);
$scope.selectedGroupIndex;
$scope.print = function(array){
var str = ' ';
array.forEach(function(element, index, array){
str += element;
if(index != array.length - 1){
str+=", "
}
});
return str;
}
$scope.register = function(groupID){
$scope.selectedGroupIndex = groupID;
}
$scope.deregister = function(groupID){
console.log("deregister");
$scope.selectedGroupIndex = -1;
}
$scope.createGroup = function(isPrivate){
userId = AccountManager.getUserId();
Events.createGroup($scope.event, userId, isPrivate)
}
$scope.goSolo = function(){
userId = AccountManager.getUserId();
}
})
.controller('EventsCtrl', function($scope, AttendingEvents) {
$scope.events = AttendingEvents.all();
$scope.remove = function(event) {
AttendingEvents.remove(event);
};
$scope.sortByTime = function(){
$scope.events.sort(function(eventA, eventB){
return eventA.time - eventB.time;
})
};
$scope.sortByDistance = function(eventA, eventB){
return eventA.distance - eventB.distance;
}
$scope.sortByTime();
})
.controller('EventDetailCtrl', function($scope, $stateParams, Events) {
$scope.event = Events.get($stateParams.eventId);
})
.controller('PreferenceCtrl', function($scope) {
$scope.settings = {
enableFriends: true
};
});
|
one more
|
www/js/controllers.js
|
one more
|
<ide><path>ww/js/controllers.js
<ide> Events.createGroup($scope.event, userId, isPrivate)
<ide> }
<ide>
<add> $scope.deleteGroup = function(){
<add> userId = AccountManager.getUserId();
<add> Events.deleteGroup($scope.event, userId);
<add> }
<add>
<ide> $scope.goSolo = function(){
<ide> userId = AccountManager.getUserId();
<ide> }
|
|
JavaScript
|
apache-2.0
|
8b3f0ffe00c871fe9884393208a34e7af4809974
| 0 |
TanayParikh/foam2,foam-framework/foam2,TanayParikh/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,TanayParikh/foam2,TanayParikh/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2
|
/*
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TODO: This should probably be in core.
foam.CLASS({
package: 'foam.dao',
name: 'Sink',
methods: [
{
name: 'put',
args: [
'obj',
'fc'
],
code: function () {}
},
{
name: 'remove',
args: [
'obj',
'fc'
],
code: function() {}
},
{
name: 'eof',
args: [],
code: function() {}
},
{
name: 'error',
args: [],
code: function() {}
},
{
name: 'reset',
args: [],
code: function() {}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ProxySink',
properties: [
{
class: 'Proxy',
of: 'foam.dao.Sink',
name: 'delegate'
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'DAO',
// TODO: make an interface or abstract, then remove NOP code:'s
// documentation: 'DAO Interface',
methods: [
{
name: 'put',
code: function() { },
returns: 'Promise'
},
{
name: 'remove',
code: function() { },
returns: 'Promise'
},
{
name: 'find',
code: function() { },
returns: 'Promise'
},
{
name: 'select',
code: function() { },
returns: 'Promise'
},
{
name: 'removeAll',
code: function() { },
returns: 'Promise'
},
{
name: 'pipe', // TODO: return a promise? don't put pipe and listen here?
code: function() { },
},
{
name: 'where',
code: function() { },
},
{
name: 'orderBy',
code: function() { },
},
{
name: 'skip',
code: function() { },
},
{
name: 'limit',
code: function() { },
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'AbstractSink',
implements: [ 'foam.dao.Sink' ]
});
foam.CLASS({
package: 'foam.dao',
name: 'QuickSink',
extends: 'foam.dao.AbstractSink',
properties: [
{
class: 'Function',
name: 'putFn'
},
{
class: 'Function',
name: 'removeFn'
},
{
class: 'Function',
name: 'eofFn'
},
{
class: 'Function',
name: 'errorFn'
},
{
class: 'Function',
name: 'resetFn'
},
],
methods: [
function put() {
return this.putFn && this.putFn.apply(this, arguments);
},
function remove() {
return this.removeFn && this.removeFn.apply(this, arguments);
},
function eof() {
return this.eofFn && this.eofFn.apply(this, arguments);
},
function error() {
return this.errorFn && this.errorFn.apply(this, arguments);
},
function reset() {
return this.resetFn && this.resetFn.apply(this, arguments);
},
]
});
foam.CLASS({
package: 'foam.dao',
name: 'PredicatedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'predicate'
}
],
methods: [
function put(obj, fc) {
if ( this.predicate.f(obj) ) this.delegate.put(obj, fc);
},
function remove(obj, fc) {
if ( this.predicate.f(obj) ) this.delegate.remove(obj, fc);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LimitedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'limit'
},
{
name: 'count',
class: 'Int',
value: 0
}
],
methods: [
function put(obj, fc) {
if ( this.count++ >= this.limit && fc ) {
fc.stop();
} else {
this.delegate.put(obj, fc);
}
},
function remove(obj, fc) {
if ( this.count++ >= this.limit && fc ) {
fc.stop();
} else {
this.delegate.remove(obj, fc);
}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'SkipSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'skip'
},
{
name: 'count',
class: 'Int',
value: 0
}
],
methods: [
function put(obj, fc) {
if ( this.count < this.skip ) {
this.count++;
return;
}
this.delegate.put(obj, fc);
},
function remove(obj, fc) {
if ( this.count < this.skip ) {
this.count++;
return;
}
this.delegate.remove(obj, fc);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'OrderedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'comparator'
},
{
name: 'arr',
factory: function() { return []; }
}
],
methods: [
function put(obj, fc) {
this.arr.push(obj);
},
function eof() {
this.arr.sort(this.comparator.compare || this.comparator);
for ( var i = 0 ; i < this.arr.length ; i++ ) {
this.delegate.put(this.arr[i]);
}
},
function remove(obj, fc) {
// TODO
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'FlowControl',
properties: [
'stopped',
'errorEvt'
],
methods: [
function stop() { this.stopped = true; },
function error(e) { this.errorEvt = e; }
]
});
foam.CLASS({
package: 'foam.core',
name: 'Exception',
properties: [
'message'
]
});
foam.CLASS({
package: 'foam.dao',
name: 'InternalException',
extends: 'Exception'
});
foam.CLASS({
package: 'foam.dao',
name: 'ExternalException',
extends: 'Exception'
})
foam.CLASS({
package: 'foam.dao',
name: 'ObjectNotFoundException',
extends: 'foam.dao.ExternalException',
properties: [
'id',
{
name: 'message',
expression: function(id) { return "No record found for id: " + id; }
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'AbstractDAO',
implements: ['foam.dao.DAO'],
requires: [
'foam.dao.ExternalException',
'foam.dao.InternalException',
'foam.dao.ObjectNotFoundException',
'foam.dao.FlowControl',
'foam.dao.LimitedSink',
'foam.dao.SkipSink',
'foam.dao.OrderedSink',
'foam.dao.PredicatedSink',
'foam.dao.FilteredDAO',
'foam.dao.OrderedDAO',
'foam.dao.SkipDAO',
'foam.dao.LimitedDAO'
],
topics: [
{
name: 'on',
topics: [
'put',
'remove',
'reset'
]
}
],
properties: [
{
class: 'Class2',
name: 'of'
}
],
methods: [
{
name: 'where',
code: function where(p) {
return this.FilteredDAO.create({
delegate: this,
predicate: p
});
}
},
{
name: 'orderBy',
code: function orderBy(o) {
return this.OrderedDAO.create({
delegate: this,
comparator: o
});
}
},
{
name: 'skip',
code: function skip(s) {
return this.SkipDAO.create({
delegate: this,
skip_: s
});
}
},
{
name: 'limit',
code: function limit(l) {
return this.LimitedDAO.create({
delegate: this,
limit_: l
});
}
},
{
name: 'pipe',
code: function pipe(sink, skip, limit, order, predicate) {
var mySink = this.decorateSink_(sink, skip, limit, order, predicate, true);
var fc = this.FlowControl.create();
var sub;
fc.propertyChange.sub(function(s, _, p) {
if ( p.name == "stopped") {
if ( sub ) sub.destroy();
} else if ( p.name === "errorEvt" ) {
if ( sub ) sub.destroy();
mySink.error(fc.errorEvt);
}
});
this.select(sink, skip, limit, order, predicate).then(function() {
this.on.sub(function(s, on, e, obj) {
sub = s;
switch(e) {
case 'put':
sink.put(obj, fc);
break;
case 'remove':
sink.remove(obj, fc);
break;
case 'reset':
sink.reset();
break;
}
});
}.bind(this));
}
},
function update() {},
function decorateSink_(sink, skip, limit, order, predicate, isListener, disableLimit) {
if ( ! disableLimit ) {
if ( limit != undefined ) {
sink = this.LimitedSink.create({
limit: limit,
delegate: sink
});
}
if ( skip != undefined ) {
sink = this.SkipSink.create({
skip: skip,
delegate: sink
});
}
}
if ( order != undefined && ! isListener ) {
sink = this.OrderedSink.create({
comparator: order,
delegate: sink
});
}
if ( predicate != undefined ) {
sink = this.PredicatedSink.create({
predicate: predicate.partialEval ?
predicate.partialEval() :
predicate,
delegate: sink
});
}
return sink;
},
function eof() {
// Do nothing by default, but can be overridden.
// This allows DAOs to be used as a Sink.
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ProxyDAO',
extends: 'foam.dao.AbstractDAO',
properties: [
{
class: 'Proxy',
of: 'foam.dao.DAO',
name: 'delegate',
topics: [ 'on' ],
forwards: [ 'put', 'remove', 'find', 'select', 'removeAll' ],
postSet: function(old, nu) {
// Only fire a 'reset' when the delegate is actually changing, not being
// set for the first time.
if ( old ) {
this.on.reset.pub();
}
}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'FilteredDAO',
extends: 'foam.dao.ProxyDAO',
requires: [
'foam.mlang.predicate.And'
],
properties: [
{
name: 'predicate'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(
sink, skip, limit, order,
predicate ?
this.And.create({ args: [this.predicate, predicate] }) :
this.predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(
skip, limit, order,
predicate ?
this.And.create({ args: [this.predicate, predicate] }) :
this.predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'OrderedDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'comparator'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(sink, skip, limit, this.comparator, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(skip, limit, this.comparator, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'SkipDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'skip_'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(sink, this.skip_, limit, order, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(this.skip_, limit, order, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LimitedDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'limit_'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(
sink, skip,
limit !== undefined ? Math.min(this.limit_, limit) : this.limit_,
order, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(
skip,
limit !== undefined ? Math.min(this.limit_, limit) : this.limit_,
order, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ArraySink',
implements: ['foam.dao.Sink'],
properties: [
{
name: 'a',
factory: function() { return []; }
}
],
methods: [
function put(o) {
this.a.push(o);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ArrayDAO',
extends: 'foam.dao.AbstractDAO',
requires: [
'foam.dao.ArraySink',
'foam.mlang.predicate.True'
],
properties: [
{
name: 'array',
factory: function() { return []; }
}
],
methods: [
function put(obj) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(obj.id, this.array[i].id) ) {
this.array[i] = obj;
break;
}
}
if ( i == this.array.length ) this.array.push(obj);
this.on.put.pub(obj);
return Promise.resolve(obj);
},
function remove(obj) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(obj.id, this.array[i].id) ) {
var o2 = this.array.splice(i, 1)[0];
this.on.remove.pub(o2);
break;
}
}
return Promise.resolve();
},
function select(sink, skip, limit, order, predicate) {
var resultSink = sink || this.ArraySink.create();
sink = this.decorateSink_(resultSink, skip, limit, order, predicate);
var fc = this.FlowControl.create();
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( fc.stopped ) break;
if ( fc.errorEvt ) {
sink.error(fc.errorEvt);
return Promise.reject(fc.errorEvt);
}
sink.put(this.array[i], fc);
}
sink.eof();
return Promise.resolve(resultSink);
},
function removeAll(skip, limit, order, predicate) {
predicate = predicate || this.True.create();
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( predicate.f(this.array[i]) ) {
var obj = this.array.splice(i, 1)[0];
i--;
this.on.remove.pub(obj);
}
}
return Promise.resolve();
},
function find(id) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(id, this.array[i].id) ) {
return Promise.resolve(this.array[i]);
}
}
return Promise.reject(this.ObjectNotFoundException.create({ id: id }));
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'PromisedDAO',
extends: 'foam.dao.AbstractDAO',
properties: [
{
class: 'Promised',
of: 'foam.dao.DAO',
methods: [ 'put', 'remove', 'find', 'select', 'removeAll' ],
topics: [ 'on' ],
name: 'promise'
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LocalStorageDAO',
extends: 'foam.dao.ArrayDAO',
properties: [
{
name: 'name',
label: 'Store Name',
class: 'foam.core.String'
}
],
methods: [
function init() {
var objs = localStorage.getItem(this.name);
if ( objs ) this.array = foam.json.parse(foam.json.parseString(objs));
this.on.put.sub(this.updated);
this.on.remove.sub(this.updated);
// TODO: base on an indexed DAO
}
],
listeners: [
{
name: 'updated',
isMerged: 100,
code: function() {
localStorage.setItem(this.name, foam.json.stringify(this.array));
}
}
]
});
foam.LIB({
name: 'foam.String',
methods: [
{
name: 'daoize',
code: foam.Function.memoize1(function(str) {
// Turns SomeClassName into someClassNameDAO.
return str.substring(0, 1).toLowerCase() + str.substring(1) + 'DAO';
})
}
]
});
/*
TODO:
-Context oriented ?
-enforcement of interfaces
-anonymous sinks ?
*/
|
src/foam/dao/dao.js
|
/*
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TODO: This should probably be in core.
foam.CLASS({
package: 'foam.dao',
name: 'Sink',
methods: [
{
name: 'put',
args: [
'obj',
'fc'
],
code: function () {}
},
{
name: 'remove',
args: [
'obj',
'fc'
],
code: function() {}
},
{
name: 'eof',
args: [],
code: function() {}
},
{
name: 'error',
args: [],
code: function() {}
},
{
name: 'reset',
args: [],
code: function() {}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ProxySink',
properties: [
{
class: 'Proxy',
of: 'foam.dao.Sink',
name: 'delegate'
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'DAO',
// TODO: make an interface or abstract, then remove NOP code:'s
// documentation: 'DAO Interface',
methods: [
{
name: 'put',
code: function() { },
returns: 'Promise'
},
{
name: 'remove',
code: function() { },
returns: 'Promise'
},
{
name: 'find',
code: function() { },
returns: 'Promise'
},
{
name: 'select',
code: function() { },
returns: 'Promise'
},
{
name: 'removeAll',
code: function() { },
returns: 'Promise'
},
{
name: 'pipe', // TODO: return a promise? don't put pipe and listen here?
code: function() { },
},
{
name: 'where',
code: function() { },
},
{
name: 'orderBy',
code: function() { },
},
{
name: 'skip',
code: function() { },
},
{
name: 'limit',
code: function() { },
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'QuickSink',
extends: 'foam.dao.Sink',
properties: [
{
class: 'Function',
name: 'putFn'
},
{
class: 'Function',
name: 'removeFn'
},
{
class: 'Function',
name: 'eofFn'
},
{
class: 'Function',
name: 'errorFn'
},
{
class: 'Function',
name: 'resetFn'
},
],
methods: [
function put() {
return this.putFn && this.putFn.apply(this, arguments);
},
function remove() {
return this.removeFn && this.removeFn.apply(this, arguments);
},
function eof() {
return this.eofFn && this.eofFn.apply(this, arguments);
},
function error() {
return this.errorFn && this.errorFn.apply(this, arguments);
},
function reset() {
return this.resetFn && this.resetFn.apply(this, arguments);
},
]
});
foam.CLASS({
package: 'foam.dao',
name: 'PredicatedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'predicate'
}
],
methods: [
function put(obj, fc) {
if ( this.predicate.f(obj) ) this.delegate.put(obj, fc);
},
function remove(obj, fc) {
if ( this.predicate.f(obj) ) this.delegate.remove(obj, fc);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LimitedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'limit'
},
{
name: 'count',
class: 'Int',
value: 0
}
],
methods: [
function put(obj, fc) {
if ( this.count++ >= this.limit && fc ) {
fc.stop();
} else {
this.delegate.put(obj, fc);
}
},
function remove(obj, fc) {
if ( this.count++ >= this.limit && fc ) {
fc.stop();
} else {
this.delegate.remove(obj, fc);
}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'SkipSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'skip'
},
{
name: 'count',
class: 'Int',
value: 0
}
],
methods: [
function put(obj, fc) {
if ( this.count < this.skip ) {
this.count++;
return;
}
this.delegate.put(obj, fc);
},
function remove(obj, fc) {
if ( this.count < this.skip ) {
this.count++;
return;
}
this.delegate.remove(obj, fc);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'OrderedSink',
extends: 'foam.dao.ProxySink',
properties: [
{
name: 'comparator'
},
{
name: 'arr',
factory: function() { return []; }
}
],
methods: [
function put(obj, fc) {
this.arr.push(obj);
},
function eof() {
this.arr.sort(this.comparator.compare || this.comparator);
for ( var i = 0 ; i < this.arr.length ; i++ ) {
this.delegate.put(this.arr[i]);
}
},
function remove(obj, fc) {
// TODO
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'FlowControl',
properties: [
'stopped',
'errorEvt'
],
methods: [
function stop() { this.stopped = true; },
function error(e) { this.errorEvt = e; }
]
});
foam.CLASS({
package: 'foam.core',
name: 'Exception',
properties: [
'message'
]
});
foam.CLASS({
package: 'foam.dao',
name: 'InternalException',
extends: 'Exception'
});
foam.CLASS({
package: 'foam.dao',
name: 'ExternalException',
extends: 'Exception'
})
foam.CLASS({
package: 'foam.dao',
name: 'ObjectNotFoundException',
extends: 'foam.dao.ExternalException',
properties: [
'id',
{
name: 'message',
expression: function(id) { return "No record found for id: " + id; }
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'AbstractDAO',
implements: ['foam.dao.DAO'],
requires: [
'foam.dao.ExternalException',
'foam.dao.InternalException',
'foam.dao.ObjectNotFoundException',
'foam.dao.FlowControl',
'foam.dao.LimitedSink',
'foam.dao.SkipSink',
'foam.dao.OrderedSink',
'foam.dao.PredicatedSink',
'foam.dao.FilteredDAO',
'foam.dao.OrderedDAO',
'foam.dao.SkipDAO',
'foam.dao.LimitedDAO'
],
topics: [
{
name: 'on',
topics: [
'put',
'remove',
'reset'
]
}
],
properties: [
{
class: 'Class2',
name: 'of'
}
],
methods: [
{
name: 'where',
code: function where(p) {
return this.FilteredDAO.create({
delegate: this,
predicate: p
});
}
},
{
name: 'orderBy',
code: function orderBy(o) {
return this.OrderedDAO.create({
delegate: this,
comparator: o
});
}
},
{
name: 'skip',
code: function skip(s) {
return this.SkipDAO.create({
delegate: this,
skip_: s
});
}
},
{
name: 'limit',
code: function limit(l) {
return this.LimitedDAO.create({
delegate: this,
limit_: l
});
}
},
{
name: 'pipe',
code: function pipe(sink, skip, limit, order, predicate) {
var mySink = this.decorateSink_(sink, skip, limit, order, predicate, true);
var fc = this.FlowControl.create();
var sub;
fc.propertyChange.sub(function(s, _, p) {
if ( p.name == "stopped") {
if ( sub ) sub.destroy();
} else if ( p.name === "errorEvt" ) {
if ( sub ) sub.destroy();
mySink.error(fc.errorEvt);
}
});
this.select(sink, skip, limit, order, predicate).then(function() {
this.on.sub(function(s, on, e, obj) {
sub = s;
switch(e) {
case 'put':
sink.put(obj, fc);
break;
case 'remove':
sink.remove(obj, fc);
break;
case 'reset':
sink.reset();
break;
}
});
}.bind(this));
}
},
function update() {},
function decorateSink_(sink, skip, limit, order, predicate, isListener, disableLimit) {
if ( ! disableLimit ) {
if ( limit != undefined ) {
sink = this.LimitedSink.create({
limit: limit,
delegate: sink
});
}
if ( skip != undefined ) {
sink = this.SkipSink.create({
skip: skip,
delegate: sink
});
}
}
if ( order != undefined && ! isListener ) {
sink = this.OrderedSink.create({
comparator: order,
delegate: sink
});
}
if ( predicate != undefined ) {
sink = this.PredicatedSink.create({
predicate: predicate.partialEval ?
predicate.partialEval() :
predicate,
delegate: sink
});
}
return sink;
},
function eof() {
// Do nothing by default, but can be overridden.
// This allows DAOs to be used as a Sink.
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ProxyDAO',
extends: 'foam.dao.AbstractDAO',
properties: [
{
class: 'Proxy',
of: 'foam.dao.DAO',
name: 'delegate',
topics: [ 'on' ],
forwards: [ 'put', 'remove', 'find', 'select', 'removeAll' ],
postSet: function(old, nu) {
// Only fire a 'reset' when the delegate is actually changing, not being
// set for the first time.
if ( old ) {
this.on.reset.pub();
}
}
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'FilteredDAO',
extends: 'foam.dao.ProxyDAO',
requires: [
'foam.mlang.predicate.And'
],
properties: [
{
name: 'predicate'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(
sink, skip, limit, order,
predicate ?
this.And.create({ args: [this.predicate, predicate] }) :
this.predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(
skip, limit, order,
predicate ?
this.And.create({ args: [this.predicate, predicate] }) :
this.predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'OrderedDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'comparator'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(sink, skip, limit, this.comparator, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(skip, limit, this.comparator, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'SkipDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'skip_'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(sink, this.skip_, limit, order, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(this.skip_, limit, order, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LimitedDAO',
extends: 'foam.dao.ProxyDAO',
properties: [
{
name: 'limit_'
}
],
methods: [
function select(sink, skip, limit, order, predicate) {
return this.delegate.select(
sink, skip,
limit !== undefined ? Math.min(this.limit_, limit) : this.limit_,
order, predicate);
},
function removeAll(skip, limit, order, predicate) {
return this.delegate.removeAll(
skip,
limit !== undefined ? Math.min(this.limit_, limit) : this.limit_,
order, predicate);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ArraySink',
implements: ['foam.dao.Sink'],
properties: [
{
name: 'a',
factory: function() { return []; }
}
],
methods: [
function put(o) {
this.a.push(o);
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'ArrayDAO',
extends: 'foam.dao.AbstractDAO',
requires: [
'foam.dao.ArraySink',
'foam.mlang.predicate.True'
],
properties: [
{
name: 'array',
factory: function() { return []; }
}
],
methods: [
function put(obj) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(obj.id, this.array[i].id) ) {
this.array[i] = obj;
break;
}
}
if ( i == this.array.length ) this.array.push(obj);
this.on.put.pub(obj);
return Promise.resolve(obj);
},
function remove(obj) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(obj.id, this.array[i].id) ) {
var o2 = this.array.splice(i, 1)[0];
this.on.remove.pub(o2);
break;
}
}
return Promise.resolve();
},
function select(sink, skip, limit, order, predicate) {
var resultSink = sink || this.ArraySink.create();
sink = this.decorateSink_(resultSink, skip, limit, order, predicate);
var fc = this.FlowControl.create();
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( fc.stopped ) break;
if ( fc.errorEvt ) {
sink.error(fc.errorEvt);
return Promise.reject(fc.errorEvt);
}
sink.put(this.array[i], fc);
}
sink.eof();
return Promise.resolve(resultSink);
},
function removeAll(skip, limit, order, predicate) {
predicate = predicate || this.True.create();
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( predicate.f(this.array[i]) ) {
var obj = this.array.splice(i, 1)[0];
i--;
this.on.remove.pub(obj);
}
}
return Promise.resolve();
},
function find(id) {
for ( var i = 0 ; i < this.array.length ; i++ ) {
if ( foam.util.equals(id, this.array[i].id) ) {
return Promise.resolve(this.array[i]);
}
}
return Promise.reject(this.ObjectNotFoundException.create({ id: id }));
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'PromisedDAO',
extends: 'foam.dao.AbstractDAO',
properties: [
{
class: 'Promised',
of: 'foam.dao.DAO',
methods: [ 'put', 'remove', 'find', 'select', 'removeAll' ],
topics: [ 'on' ],
name: 'promise'
}
]
});
foam.CLASS({
package: 'foam.dao',
name: 'LocalStorageDAO',
extends: 'foam.dao.ArrayDAO',
properties: [
{
name: 'name',
label: 'Store Name',
class: 'foam.core.String'
}
],
methods: [
function init() {
var objs = localStorage.getItem(this.name);
if ( objs ) this.array = foam.json.parse(foam.json.parseString(objs));
this.on.put.sub(this.updated);
this.on.remove.sub(this.updated);
// TODO: base on an indexed DAO
}
],
listeners: [
{
name: 'updated',
isMerged: 100,
code: function() {
localStorage.setItem(this.name, foam.json.stringify(this.array));
}
}
]
});
foam.LIB({
name: 'foam.String',
methods: [
{
name: 'daoize',
code: foam.Function.memoize1(function(str) {
// Turns SomeClassName into someClassNameDAO.
return str.substring(0, 1).toLowerCase() + str.substring(1) + 'DAO';
})
}
]
});
/*
TODO:
-Context oriented ?
-enforcement of interfaces
-anonymous sinks ?
*/
|
Added AbstractSink as base class, implementing Sink interface. Closes #65
|
src/foam/dao/dao.js
|
Added AbstractSink as base class, implementing Sink interface. Closes #65
|
<ide><path>rc/foam/dao/dao.js
<ide>
<ide> foam.CLASS({
<ide> package: 'foam.dao',
<add> name: 'AbstractSink',
<add>
<add> implements: [ 'foam.dao.Sink' ]
<add>
<add>});
<add>
<add>foam.CLASS({
<add> package: 'foam.dao',
<ide> name: 'QuickSink',
<ide>
<del> extends: 'foam.dao.Sink',
<add> extends: 'foam.dao.AbstractSink',
<ide>
<ide> properties: [
<ide> {
|
|
Java
|
apache-2.0
|
6473bfb69bb3e91b28c0e0b440ddf8244a6c1c10
| 0 |
noboomu/proteus,noboomu/proteus
|
package io.sinistral.proteus.services;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import javax.ws.rs.HttpMethod;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.typesafe.config.Config;
import io.sinistral.proteus.server.endpoints.EndpointInfo;
import io.sinistral.proteus.server.tools.oas.Reader;
import io.sinistral.proteus.server.tools.oas.ServerModelResolver;
import io.sinistral.proteus.server.tools.oas.ServerParameterExtension;
import io.swagger.v3.core.util.Json;
import io.swagger.v3.core.util.Yaml;
import io.swagger.v3.jaxrs2.ext.OpenAPIExtensions;
import io.swagger.v3.jaxrs2.integration.JaxrsApplicationAndAnnotationScanner;
import io.swagger.v3.oas.integration.GenericOpenApiContext;
import io.swagger.v3.oas.integration.SwaggerConfiguration;
import io.swagger.v3.oas.integration.api.OpenApiContext;
import io.swagger.v3.oas.models.Components;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.security.SecurityScheme;
import io.swagger.v3.oas.models.servers.Server;
import io.undertow.server.HandlerWrapper;
import io.undertow.server.HttpHandler;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.RoutingHandler;
import io.undertow.util.Headers;
import io.undertow.util.Methods;
@Singleton
public class OpenAPIService extends BaseService implements Supplier<RoutingHandler>
{
private static Logger log = LoggerFactory.getLogger(OpenAPIService.class.getCanonicalName());
protected final String resourcePathPrefix = "oas";
@Inject
@Named("openapi.resourcePrefix")
protected String resourcePrefix;
@Inject
@Named("openapi.basePath")
protected String basePath;
@Inject
@Named("openapi.specFilename")
protected String specFilename;
@Inject
@Named("openapi")
protected Config openAPIConfig;
@Inject
@Named("application.name")
protected String applicationName;
@Inject
@Named("openapi.port")
protected Integer port;
@Inject
@Named("application.path")
protected String applicationPath;
@Inject
protected RoutingHandler router;
@Inject
@Named("registeredEndpoints")
protected Set<EndpointInfo> registeredEndpoints;
@Inject
@Named("registeredControllers")
protected Set<Class<?>> registeredControllers;
@Inject
@Named("registeredHandlerWrappers")
protected Map<String,HandlerWrapper> registeredHandlerWrappers;
protected ObjectMapper mapper = null;
protected ObjectWriter writer = null;
protected ObjectMapper yamlMapper = null;
protected Path resourcePath = null;
protected ClassLoader serviceClassLoader = null;
protected OpenAPI openApi = null;
protected String spec = null;
protected String indexHTML = null;
public OpenAPIService( )
{
mapper = Json.mapper();
mapper.registerModule(new Jdk8Module());
yamlMapper = Yaml.mapper();
writer = Yaml.pretty();
}
@SuppressWarnings("rawtypes")
public void generateSpec() throws Exception
{
Set<Class<?>> classes = this.registeredControllers;
OpenAPIExtensions.setExtensions(Collections.singletonList(new ServerParameterExtension()));
OpenAPI openApi = new OpenAPI();
Info info = mapper.convertValue(openAPIConfig.getValue("info").unwrapped(), Info.class);
openApi.setInfo(info);
Map<String,SecurityScheme> securitySchemes = mapper.convertValue(openAPIConfig.getValue("securitySchemes").unwrapped(), new TypeReference<Map<String,SecurityScheme>>(){});
if(openApi.getComponents() == null)
{
openApi.setComponents(new Components());
}
openApi.getComponents().setSecuritySchemes(securitySchemes);
List<Server> servers = mapper.convertValue(openAPIConfig.getValue("servers").unwrapped(), new TypeReference<List<Server>>(){});
openApi.setServers(servers);
SwaggerConfiguration config = new SwaggerConfiguration()
.resourceClasses(classes.stream().map( c -> c.getName()).collect(Collectors.toSet()))
.openAPI(openApi);
config.setModelConverterClassess(Collections.singleton(ServerModelResolver.class.getName()));
OpenApiContext ctx = new GenericOpenApiContext()
.openApiConfiguration(config)
.openApiReader(new Reader(config))
.openApiScanner(new JaxrsApplicationAndAnnotationScanner().openApiConfiguration(config))
.init();
openApi = ctx.read();
this.openApi = openApi;
this.spec = writer.writeValueAsString(openApi);
}
public void generateHTML()
{
try
{
try(InputStream templateInputStream = this.getClass().getClassLoader().getResourceAsStream(resourcePrefix + "/index.html"))
{
byte[] templateBytes = IOUtils.toByteArray(templateInputStream);
String templateString = new String(templateBytes,Charset.defaultCharset());
templateString = templateString.replaceAll("\\{\\{ basePath \\}\\}", basePath);
templateString = templateString.replaceAll("\\{\\{ title \\}\\}",applicationName + " Swagger UI");
templateString = templateString.replaceAll("\\{\\{ filePath \\}\\}", basePath + ".yaml");
this.indexHTML = templateString;
}
URL url = this.getClass().getClassLoader().getResource(resourcePrefix);
if( url.toExternalForm().contains("!") )
{
log.debug("Copying OpenAPI resources...");
String jarPathString = url.toExternalForm().substring(0, url.toExternalForm().indexOf("!") ).replaceAll("file:", "").replaceAll("jar:", "");
File srcFile = new File(jarPathString);
try(JarFile jarFile = new JarFile(srcFile, false))
{
String appName = config.getString("application.name").replaceAll(" ", "_");
Path tmpDirParent = Files.createTempDirectory(appName);
Path tmpDir = tmpDirParent.resolve("oas/");
if(tmpDir.toFile().exists())
{
log.debug("Deleting existing OpenAPI directory at " + tmpDir);
try
{
FileUtils.deleteDirectory(tmpDir.toFile());
} catch (java.lang.IllegalArgumentException e)
{
log.debug("Tmp directory is not a directory...");
tmpDir.toFile().delete();
}
}
java.nio.file.Files.createDirectory( tmpDir );
this.resourcePath = tmpDir;
jarFile.stream().filter( ze -> ze.getName().endsWith("js") || ze.getName().endsWith("css") || ze.getName().endsWith("map") || ze.getName().endsWith("html") ).forEach( ze -> {
try
{
final InputStream entryInputStream = jarFile.getInputStream(ze);
String filename = ze.getName().substring(resourcePrefix.length() + 1);
Path entryFilePath = tmpDir.resolve(filename);
java.nio.file.Files.createDirectories(entryFilePath.getParent());
java.nio.file.Files.copy(entryInputStream, entryFilePath,StandardCopyOption.REPLACE_EXISTING);
} catch (Exception e)
{
log.error(e.getMessage() + " for entry " + ze.getName());
}
});
}
}
else
{
this.resourcePath = Paths.get(this.getClass().getClassLoader().getResource(this.resourcePrefix).toURI());
this.serviceClassLoader = this.getClass().getClassLoader();
}
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
}
public RoutingHandler get()
{
RoutingHandler router = new RoutingHandler();
/*
* YAML path
*/
String pathTemplate = this.applicationPath + File.separator + this.specFilename ;
//FileResourceManager resourceManager = new FileResourceManager(this.resourcePath.toFile(),1024);
router.add(HttpMethod.GET, pathTemplate, new HttpHandler(){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, io.sinistral.proteus.server.MediaType.TEXT_YAML.contentType());
try
{
// swaggerCopy.setHost(exchange.getHostAndPort());
//
// spec = writer.writeValueAsString(swaggerCopy);
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
exchange.getResponseSender().send(spec);
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes("*/*").withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).withProduces(io.sinistral.proteus.server.MediaType.TEXT_YAML.contentType()).build());
/*
pathTemplate = this.basePath;
router.add(HttpMethod.GET, pathTemplate , new HttpHandler(){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, MediaType.TEXT_HTML);
exchange.getResponseSender().send(indexHTML);
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes(MediaType.WILDCARD).withProduces(MediaType.TEXT_HTML).withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).build());
try
{
pathTemplate = this.basePath + "/*";
router.add(HttpMethod.GET, pathTemplate, new ResourceHandler(resourceManager){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
String canonicalPath = CanonicalPathUtils.canonicalize((exchange.getRelativePath()));
canonicalPath = canonicalPath.split(basePath)[1];
exchange.setRelativePath(canonicalPath);
if(serviceClassLoader == null)
{
super.handleRequest(exchange);
}
else
{
canonicalPath = resourcePrefix + canonicalPath;
try(final InputStream resourceInputStream = serviceClassLoader.getResourceAsStream( canonicalPath))
{
if(resourceInputStream == null)
{
ResponseCodeHandler.HANDLE_404.handleRequest(exchange);
return;
}
byte[] resourceBytes = IOUtils.toByteArray(resourceInputStream);
io.sinistral.proteus.server.MediaType mediaType = io.sinistral.proteus.server.MediaType.getByFileName(canonicalPath);
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, mediaType.toString());
exchange.getResponseSender().send(ByteBuffer.wrap(resourceBytes));
}
}
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes(MediaType.WILDCARD).withProduces(MediaType.WILDCARD).withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).build());
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
*/
return router;
}
/* (non-Javadoc)
* @see com.google.common.util.concurrent.AbstractIdleService#startUp()
*/
@Override
protected void startUp() throws Exception
{
// TODO Auto-generated method stub
this.generateSpec();
//this.generateHTML();
log.debug("\nOpenAPI Spec:\n" + writer.writeValueAsString(this.openApi));
router.addAll(this.get());
}
/* (non-Javadoc)
* @see com.google.common.util.concurrent.AbstractIdleService#shutDown()
*/
@Override
protected void shutDown() throws Exception
{
// TODO Auto-generated method stub
}
}
|
src/main/java/io/sinistral/proteus/services/OpenAPIService.java
|
package io.sinistral.proteus.services;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import javax.ws.rs.HttpMethod;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.dataformat.yaml.YAMLMapper;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.typesafe.config.Config;
import io.sinistral.proteus.server.endpoints.EndpointInfo;
import io.sinistral.proteus.server.tools.oas.Reader;
import io.sinistral.proteus.server.tools.oas.ServerModelResolver;
import io.sinistral.proteus.server.tools.oas.ServerParameterExtension;
import io.swagger.util.Yaml;
import io.swagger.v3.core.util.Json;
import io.swagger.v3.jaxrs2.ext.OpenAPIExtensions;
import io.swagger.v3.jaxrs2.integration.JaxrsApplicationAndAnnotationScanner;
import io.swagger.v3.oas.integration.GenericOpenApiContext;
import io.swagger.v3.oas.integration.SwaggerConfiguration;
import io.swagger.v3.oas.integration.api.OpenApiContext;
import io.swagger.v3.oas.models.Components;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.security.SecurityScheme;
import io.swagger.v3.oas.models.servers.Server;
import io.undertow.server.HandlerWrapper;
import io.undertow.server.HttpHandler;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.RoutingHandler;
import io.undertow.util.Headers;
import io.undertow.util.Methods;
@Singleton
public class OpenAPIService extends BaseService implements Supplier<RoutingHandler>
{
private static Logger log = LoggerFactory.getLogger(OpenAPIService.class.getCanonicalName());
protected final String resourcePathPrefix = "oas";
@Inject
@Named("openapi.resourcePrefix")
protected String resourcePrefix;
@Inject
@Named("openapi.basePath")
protected String basePath;
@Inject
@Named("openapi.specFilename")
protected String specFilename;
@Inject
@Named("openapi")
protected Config openAPIConfig;
@Inject
@Named("application.name")
protected String applicationName;
@Inject
@Named("openapi.port")
protected Integer port;
@Inject
@Named("application.path")
protected String applicationPath;
@Inject
protected RoutingHandler router;
@Inject
@Named("registeredEndpoints")
protected Set<EndpointInfo> registeredEndpoints;
@Inject
@Named("registeredControllers")
protected Set<Class<?>> registeredControllers;
@Inject
@Named("registeredHandlerWrappers")
protected Map<String,HandlerWrapper> registeredHandlerWrappers;
protected ObjectMapper mapper = null;
protected ObjectWriter writer = null;
protected ObjectMapper yamlMapper = null;
protected Path resourcePath = null;
protected ClassLoader serviceClassLoader = null;
protected OpenAPI openApi = null;
protected String spec = null;
protected String indexHTML = null;
public OpenAPIService( )
{
mapper = Json.mapper();
mapper.registerModule(new Jdk8Module());
yamlMapper = Yaml.mapper();
writer = Yaml.pretty();
}
@SuppressWarnings("rawtypes")
public void generateSpec() throws Exception
{
Set<Class<?>> classes = this.registeredControllers;
OpenAPIExtensions.setExtensions(Collections.singletonList(new ServerParameterExtension()));
OpenAPI openApi = new OpenAPI();
Info info = mapper.convertValue(openAPIConfig.getValue("info").unwrapped(), Info.class);
openApi.setInfo(info);
Map<String,SecurityScheme> securitySchemes = mapper.convertValue(openAPIConfig.getValue("securitySchemes").unwrapped(), new TypeReference<Map<String,SecurityScheme>>(){});
if(openApi.getComponents() == null)
{
openApi.setComponents(new Components());
}
openApi.getComponents().setSecuritySchemes(securitySchemes);
List<Server> servers = mapper.convertValue(openAPIConfig.getValue("servers").unwrapped(), new TypeReference<List<Server>>(){});
openApi.setServers(servers);
SwaggerConfiguration config = new SwaggerConfiguration()
.resourceClasses(classes.stream().map( c -> c.getName()).collect(Collectors.toSet()))
.openAPI(openApi);
config.setModelConverterClassess(Collections.singleton(ServerModelResolver.class.getName()));
OpenApiContext ctx = new GenericOpenApiContext()
.openApiConfiguration(config)
.openApiReader(new Reader(config))
.openApiScanner(new JaxrsApplicationAndAnnotationScanner().openApiConfiguration(config))
.init();
openApi = ctx.read();
this.openApi = openApi;
this.spec = writer.writeValueAsString(openApi);
}
public void generateHTML()
{
try
{
try(InputStream templateInputStream = this.getClass().getClassLoader().getResourceAsStream(resourcePrefix + "/index.html"))
{
byte[] templateBytes = IOUtils.toByteArray(templateInputStream);
String templateString = new String(templateBytes,Charset.defaultCharset());
templateString = templateString.replaceAll("\\{\\{ basePath \\}\\}", basePath);
templateString = templateString.replaceAll("\\{\\{ title \\}\\}",applicationName + " Swagger UI");
templateString = templateString.replaceAll("\\{\\{ filePath \\}\\}", basePath + ".yaml");
this.indexHTML = templateString;
}
URL url = this.getClass().getClassLoader().getResource(resourcePrefix);
if( url.toExternalForm().contains("!") )
{
log.debug("Copying OpenAPI resources...");
String jarPathString = url.toExternalForm().substring(0, url.toExternalForm().indexOf("!") ).replaceAll("file:", "").replaceAll("jar:", "");
File srcFile = new File(jarPathString);
try(JarFile jarFile = new JarFile(srcFile, false))
{
String appName = config.getString("application.name").replaceAll(" ", "_");
Path tmpDirParent = Files.createTempDirectory(appName);
Path tmpDir = tmpDirParent.resolve("oas/");
if(tmpDir.toFile().exists())
{
log.debug("Deleting existing OpenAPI directory at " + tmpDir);
try
{
FileUtils.deleteDirectory(tmpDir.toFile());
} catch (java.lang.IllegalArgumentException e)
{
log.debug("Tmp directory is not a directory...");
tmpDir.toFile().delete();
}
}
java.nio.file.Files.createDirectory( tmpDir );
this.resourcePath = tmpDir;
jarFile.stream().filter( ze -> ze.getName().endsWith("js") || ze.getName().endsWith("css") || ze.getName().endsWith("map") || ze.getName().endsWith("html") ).forEach( ze -> {
try
{
final InputStream entryInputStream = jarFile.getInputStream(ze);
String filename = ze.getName().substring(resourcePrefix.length() + 1);
Path entryFilePath = tmpDir.resolve(filename);
java.nio.file.Files.createDirectories(entryFilePath.getParent());
java.nio.file.Files.copy(entryInputStream, entryFilePath,StandardCopyOption.REPLACE_EXISTING);
} catch (Exception e)
{
log.error(e.getMessage() + " for entry " + ze.getName());
}
});
}
}
else
{
this.resourcePath = Paths.get(this.getClass().getClassLoader().getResource(this.resourcePrefix).toURI());
this.serviceClassLoader = this.getClass().getClassLoader();
}
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
}
public RoutingHandler get()
{
RoutingHandler router = new RoutingHandler();
/*
* YAML path
*/
String pathTemplate = this.applicationPath + File.separator + this.specFilename ;
//FileResourceManager resourceManager = new FileResourceManager(this.resourcePath.toFile(),1024);
router.add(HttpMethod.GET, pathTemplate, new HttpHandler(){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, io.sinistral.proteus.server.MediaType.TEXT_YAML.contentType());
try
{
// swaggerCopy.setHost(exchange.getHostAndPort());
//
// spec = writer.writeValueAsString(swaggerCopy);
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
exchange.getResponseSender().send(spec);
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes("*/*").withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).withProduces(io.sinistral.proteus.server.MediaType.TEXT_YAML.contentType()).build());
/*
pathTemplate = this.basePath;
router.add(HttpMethod.GET, pathTemplate , new HttpHandler(){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, MediaType.TEXT_HTML);
exchange.getResponseSender().send(indexHTML);
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes(MediaType.WILDCARD).withProduces(MediaType.TEXT_HTML).withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).build());
try
{
pathTemplate = this.basePath + "/*";
router.add(HttpMethod.GET, pathTemplate, new ResourceHandler(resourceManager){
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception
{
String canonicalPath = CanonicalPathUtils.canonicalize((exchange.getRelativePath()));
canonicalPath = canonicalPath.split(basePath)[1];
exchange.setRelativePath(canonicalPath);
if(serviceClassLoader == null)
{
super.handleRequest(exchange);
}
else
{
canonicalPath = resourcePrefix + canonicalPath;
try(final InputStream resourceInputStream = serviceClassLoader.getResourceAsStream( canonicalPath))
{
if(resourceInputStream == null)
{
ResponseCodeHandler.HANDLE_404.handleRequest(exchange);
return;
}
byte[] resourceBytes = IOUtils.toByteArray(resourceInputStream);
io.sinistral.proteus.server.MediaType mediaType = io.sinistral.proteus.server.MediaType.getByFileName(canonicalPath);
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, mediaType.toString());
exchange.getResponseSender().send(ByteBuffer.wrap(resourceBytes));
}
}
}
});
this.registeredEndpoints.add(EndpointInfo.builder().withConsumes(MediaType.WILDCARD).withProduces(MediaType.WILDCARD).withPathTemplate(pathTemplate).withControllerName(this.getClass().getSimpleName()).withMethod(Methods.GET).build());
} catch (Exception e)
{
log.error(e.getMessage(),e);
}
*/
return router;
}
/* (non-Javadoc)
* @see com.google.common.util.concurrent.AbstractIdleService#startUp()
*/
@Override
protected void startUp() throws Exception
{
// TODO Auto-generated method stub
this.generateSpec();
//this.generateHTML();
log.debug("\nOpenAPI Spec:\n" + writer.writeValueAsString(this.openApi));
router.addAll(this.get());
}
/* (non-Javadoc)
* @see com.google.common.util.concurrent.AbstractIdleService#shutDown()
*/
@Override
protected void shutDown() throws Exception
{
// TODO Auto-generated method stub
}
}
|
Fix for wrong YAML mapper.
|
src/main/java/io/sinistral/proteus/services/OpenAPIService.java
|
Fix for wrong YAML mapper.
|
<ide><path>rc/main/java/io/sinistral/proteus/services/OpenAPIService.java
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide>
<del>import com.fasterxml.jackson.annotation.JsonInclude.Include;
<ide> import com.fasterxml.jackson.core.type.TypeReference;
<del>import com.fasterxml.jackson.databind.DeserializationFeature;
<ide> import com.fasterxml.jackson.databind.ObjectMapper;
<ide> import com.fasterxml.jackson.databind.ObjectWriter;
<del>import com.fasterxml.jackson.databind.SerializationFeature;
<del>import com.fasterxml.jackson.dataformat.yaml.YAMLMapper;
<ide> import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
<ide> import com.google.inject.Inject;
<ide> import com.google.inject.Singleton;
<ide> import io.sinistral.proteus.server.tools.oas.Reader;
<ide> import io.sinistral.proteus.server.tools.oas.ServerModelResolver;
<ide> import io.sinistral.proteus.server.tools.oas.ServerParameterExtension;
<del>import io.swagger.util.Yaml;
<ide> import io.swagger.v3.core.util.Json;
<add>import io.swagger.v3.core.util.Yaml;
<ide> import io.swagger.v3.jaxrs2.ext.OpenAPIExtensions;
<ide> import io.swagger.v3.jaxrs2.integration.JaxrsApplicationAndAnnotationScanner;
<ide> import io.swagger.v3.oas.integration.GenericOpenApiContext;
|
|
JavaScript
|
mit
|
66af7c881dcee2badc8834eeeeed07be3c2da2b7
| 0 |
jessiejea/Flotr2,alanponce/Flotr2,alanponce/Flotr2,nicgallardo/Flotr2,alanponce/Flotr2,nicgallardo/Flotr2,HumbleSoftware/Flotr2,beastlike/Flotr2,RobertoMalatesta/Flotr2,beastlike/Flotr2,beastlike/Flotr2,jessiejea/Flotr2,jessiejea/Flotr2,RobertoMalatesta/Flotr2,HumbleSoftware/Flotr2
|
/** Bubbles **/
Flotr.addType('bubbles', {
options: {
show: false, // => setting to true will show radar chart, false will hide
lineWidth: 2, // => line width in pixels
fill: true, // => true to fill the area from the line to the x axis, false for (transparent) no fill
fillOpacity: 0.4, // => opacity of the fill color, set to 1 for a solid fill, 0 hides the fill
baseRadius: 2 // => ratio of the radar, against the plot size
},
draw : function (options) {
var
context = options.context,
shadowSize = options.shadowSize;
context.save();
context.translate(options.offsetLeft, options.offsetTop);
context.lineWidth = options.lineWidth;
// Shadows
context.fillStyle = 'rgba(0,0,0,0.05)';
context.strokeStyle = 'rgba(0,0,0,0.05)';
this.plot(options, shadowSize / 2);
context.strokeStyle = 'rgba(0,0,0,0.1)';
this.plot(options, shadowSize / 4);
// Chart
context.strokeStyle = options.color;
context.fillStyle = options.fillStyle;
this.plot(options);
context.restore();
},
plot : function (options, offset) {
var
data = options.data,
context = options.context,
i, x, y, z;
offset = offset || 0;
for (i = 0; i < data.length; ++i){
x = options.xScale(data[i][0]) + offset,
y = options.yScale(data[i][1]) + offset,
z = data[i][2] * options.baseRadius;
context.beginPath();
context.arc(x, y, z, 0, Math.PI*2, true);
context.stroke();
if (options.fill) context.fill();
context.closePath();
}
},
drawHit : function (options) {
var
data = options.data,
args = options.args,
context = options.context,
x = options.xScale(args.x),
y = options.yScale(args.y),
z = data[args.index][2] * options.baseRadius;
context.save();
context.lineWidth = options.lineWidth;
context.fillStyle = options.fillStyle;
context.strokeStyle = options.color;
context.translate(options.offsetLeft, options.offsetTop);
context.beginPath();
context.arc(x, y, z, 0, 2 * Math.PI, true);
context.fill();
context.stroke();
context.closePath();
context.restore();
},
clearHit : function (options) {
var
data = options.data,
args = options.args,
context = options.context,
x = options.xScale(args.x),
y = options.yScale(args.y),
z = data[args.index][2] * options.baseRadius,
offset = z + options.lineWidth;
context.save();
context.translate(options.offsetLeft, options.offsetTop);
context.clearRect(
x - offset,
y - offset,
2 * offset,
2 * offset
);
context.restore();
}
/*,
extendXRange: function(axis){
if(axis.options.max == null){
var newmin = axis.min,
newmax = axis.max,
i, j, c, r, data, d;
for(i = 0; i < this.series.length; ++i){
c = this.series[i].bubbles;
if(c.show && this.series[i].xaxis == axis) {
data = this.series[i].data;
if (data)
for(j = 0; j < data.length; j++) {
d = data[j];
r = d[2] * c.baseRadius * (this.plotWidth / (axis.datamax - axis.datamin));
newmax = Math.max(d[0] + r, newmax);
newmin = Math.min(d[0] - r, newmin);
}
}
}
axis.max = newmax;
axis.min = newmin;
}
},
extendYRange: function(axis){
if(axis.options.max == null){
var newmin = axis.min,
newmax = axis.max,
i, j, c, r, data, d;
for(i = 0; i < this.series.length; ++i){
c = this.series[i].bubbles;
if(c.show && this.series[i].yaxis == axis) {
data = this.series[i].data;
if (data)
for(j = 0; j < data.length; j++) {
d = data[j];
r = d[2] * c.baseRadius;
newmax = Math.max(d[1] + r, newmax);
newmin = Math.min(d[1] - r, newmin);
}
}
}
axis.max = newmax;
axis.min = newmin;
}
}*/
});
|
js/types/bubbles.js
|
/** Bubbles **/
Flotr.addType('bubbles', {
options: {
show: false, // => setting to true will show radar chart, false will hide
lineWidth: 2, // => line width in pixels
fill: true, // => true to fill the area from the line to the x axis, false for (transparent) no fill
fillOpacity: 0.4, // => opacity of the fill color, set to 1 for a solid fill, 0 hides the fill
baseRadius: 2 // => ratio of the radar, against the plot size
},
draw : function (options) {
var
context = options.context,
shadowSize = options.shadowSize;
context.save();
context.translate(options.offsetLeft, options.offsetTop);
context.lineWidth = options.lineWidth;
// Shadows
context.fillStyle = 'rgba(0,0,0,0.05)';
context.strokeStyle = 'rgba(0,0,0,0.05)';
this.plot(options, shadowSize / 2);
context.strokeStyle = 'rgba(0,0,0,0.1)';
this.plot(options, shadowSize / 4);
// Chart
context.strokeStyle = options.color;
context.fillStyle = options.fillStyle;
this.plot(options);
context.restore();
},
plot : function (options, offset) {
var
data = options.data,
context = options.context,
i, x, y, z;
offset = offset || 0;
for (i = 0; i < data.length; ++i){
x = options.xScale(data[i][0]) + offset,
y = options.yScale(data[i][1]) + offset,
z = data[i][2] * options.baseRadius;
context.beginPath();
context.arc(x, y, z, 0, Math.PI*2, true);
context.stroke();
if (options.fill) context.fill();
context.closePath();
}
},
drawHit : function (options) {
var
data = options.data,
args = options.args,
context = options.context,
x = options.xScale(args.x),
y = options.yScale(args.y),
z = data[args.index][2] * options.baseRadius;
context.save();
context.lineWidth = options.lineWidth;
context.fillStyle = options.fillStyle;
context.strokeStyle = options.color;
context.translate(options.offsetLeft, options.offsetTop);
context.beginPath();
context.arc(x, y, z, 0, 2 * Math.PI, true);
context.fill();
context.stroke();
context.closePath();
context.restore();
},
clearHit: function(){
var prevHit = this.prevHit,
plotOffset = this.plotOffset,
s = prevHit.series,
lw = s.bars.lineWidth,
xa = prevHit.xaxis,
ya = prevHit.yaxis,
z = s.data[0][2],
r = this.options.bubbles.baseRadius,
offset = z*r+lw;
this.context.clearRect(
plotOffset.left + xa.d2p(prevHit.x) - offset,
plotOffset.top + ya.d2p(prevHit.y) - offset,
offset*2,
offset*2
);
}
/*,
extendXRange: function(axis){
if(axis.options.max == null){
var newmin = axis.min,
newmax = axis.max,
i, j, c, r, data, d;
for(i = 0; i < this.series.length; ++i){
c = this.series[i].bubbles;
if(c.show && this.series[i].xaxis == axis) {
data = this.series[i].data;
if (data)
for(j = 0; j < data.length; j++) {
d = data[j];
r = d[2] * c.baseRadius * (this.plotWidth / (axis.datamax - axis.datamin));
newmax = Math.max(d[0] + r, newmax);
newmin = Math.min(d[0] - r, newmin);
}
}
}
axis.max = newmax;
axis.min = newmin;
}
},
extendYRange: function(axis){
if(axis.options.max == null){
var newmin = axis.min,
newmax = axis.max,
i, j, c, r, data, d;
for(i = 0; i < this.series.length; ++i){
c = this.series[i].bubbles;
if(c.show && this.series[i].yaxis == axis) {
data = this.series[i].data;
if (data)
for(j = 0; j < data.length; j++) {
d = data[j];
r = d[2] * c.baseRadius;
newmax = Math.max(d[1] + r, newmax);
newmin = Math.min(d[1] - r, newmin);
}
}
}
axis.max = newmax;
axis.min = newmin;
}
}*/
});
|
Fixed coupling in bubbles clearHit.
|
js/types/bubbles.js
|
Fixed coupling in bubbles clearHit.
|
<ide><path>s/types/bubbles.js
<ide> context.closePath();
<ide> context.restore();
<ide> },
<del> clearHit: function(){
<del> var prevHit = this.prevHit,
<del> plotOffset = this.plotOffset,
<del> s = prevHit.series,
<del> lw = s.bars.lineWidth,
<del> xa = prevHit.xaxis,
<del> ya = prevHit.yaxis,
<del> z = s.data[0][2],
<del> r = this.options.bubbles.baseRadius,
<del> offset = z*r+lw;
<add> clearHit : function (options) {
<ide>
<del> this.context.clearRect(
<del> plotOffset.left + xa.d2p(prevHit.x) - offset,
<del> plotOffset.top + ya.d2p(prevHit.y) - offset,
<del> offset*2,
<del> offset*2
<add> var
<add> data = options.data,
<add> args = options.args,
<add> context = options.context,
<add> x = options.xScale(args.x),
<add> y = options.yScale(args.y),
<add> z = data[args.index][2] * options.baseRadius,
<add> offset = z + options.lineWidth;
<add>
<add> context.save();
<add> context.translate(options.offsetLeft, options.offsetTop);
<add> context.clearRect(
<add> x - offset,
<add> y - offset,
<add> 2 * offset,
<add> 2 * offset
<ide> );
<add> context.restore();
<ide> }
<ide>
<ide> /*,
|
|
Java
|
apache-2.0
|
a40c5237665feee5f1324aed03862b53686c192b
| 0 |
ilscipio/scipio-erp,ilscipio/scipio-erp,ilscipio/scipio-erp,ilscipio/scipio-erp,ilscipio/scipio-erp
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.ilscipio.scipio.ce.webapp.ftl.template;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.ilscipio.scipio.ce.webapp.ftl.CommonFtlUtil;
import com.ilscipio.scipio.ce.webapp.ftl.lang.LangFtlUtil;
import com.ilscipio.scipio.ce.webapp.ftl.lang.OfbizFtlObjectType;
import freemarker.core.Environment;
import freemarker.template.ObjectWrapper;
import freemarker.template.SimpleHash;
import freemarker.template.TemplateHashModel;
import freemarker.template.TemplateHashModelEx;
import freemarker.template.TemplateMethodModelEx;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
/**
* SCIPIO: MakeAttribMapFromArgMapMethod - Freemarker Method for getting an attribs map from an args map.
*/
public class MakeAttribMapFromArgMapMethod implements TemplateMethodModelEx {
public static final String module = MakeAttribMapFromArgMapMethod.class.getName();
/*
* @see freemarker.template.TemplateMethodModel#exec(java.util.List)
*/
@SuppressWarnings("unchecked")
@Override
public Object exec(List args) throws TemplateModelException {
if (args == null || args.size() < 1 || args.size() > 2 ) {
throw new TemplateModelException("Invalid number of arguments (expected: 1-2)");
}
ObjectWrapper objectWrapper = CommonFtlUtil.getCurrentEnvironment().getObjectWrapper();
TemplateHashModelEx argsMap = (TemplateHashModelEx) args.get(0);
// caller-supplied excludes
TemplateModel excludesModel = (args.size() >=2) ? (TemplateModel) args.get(1) : null;
Set<String> excludes;
if (excludesModel != null) {
excludes = LangFtlUtil.getAsStringSet(excludesModel);
} else {
excludes = new HashSet<>();
}
SimpleHash res = null;
final Boolean useExclude = Boolean.FALSE;
// put attribs from explicit attribs map first, if any
TemplateModel attribsModel = argsMap.get("attribs");
if (attribsModel != null && OfbizFtlObjectType.isObjectType(OfbizFtlObjectType.MAP, attribsModel)) {
if (OfbizFtlObjectType.isObjectType(OfbizFtlObjectType.COMPLEXMAP, attribsModel)) {
attribsModel = LangFtlUtil.toSimpleMap(attribsModel, false, objectWrapper);
}
res = LangFtlUtil.copyMapToSimple((TemplateHashModel) attribsModel, excludes, useExclude, objectWrapper);
}
// to get inline attribs, add list of all arg names to excludes as well as the lists themselves
TemplateModel allArgNamesModel = argsMap.get("allArgNames");
if (allArgNamesModel != null) {
excludes.addAll(LangFtlUtil.getAsStringSet(allArgNamesModel));
}
excludes.add("allArgNames");
excludes.add("localArgNames");
// add the inline attribs over the attribs map (if any)
if (res == null) {
res = LangFtlUtil.copyMapToSimple(argsMap, excludes, useExclude, objectWrapper);
} else {
LangFtlUtil.putAll(res, argsMap, excludes, useExclude, objectWrapper);
}
return res;
}
}
|
framework/webapp/src/com/ilscipio/scipio/ce/webapp/ftl/template/MakeAttribMapFromArgMapMethod.java
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.ilscipio.scipio.ce.webapp.ftl.template;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.ilscipio.scipio.ce.webapp.ftl.CommonFtlUtil;
import com.ilscipio.scipio.ce.webapp.ftl.lang.LangFtlUtil;
import com.ilscipio.scipio.ce.webapp.ftl.lang.OfbizFtlObjectType;
import freemarker.core.Environment;
import freemarker.template.ObjectWrapper;
import freemarker.template.SimpleHash;
import freemarker.template.TemplateHashModel;
import freemarker.template.TemplateHashModelEx;
import freemarker.template.TemplateMethodModelEx;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
/**
* SCIPIO: MakeAttribMapFromArgMapMethod - Freemarker Method for getting an attribs map from an args map.
*/
public class MakeAttribMapFromArgMapMethod implements TemplateMethodModelEx {
public static final String module = MakeAttribMapFromArgMapMethod.class.getName();
/*
* @see freemarker.template.TemplateMethodModel#exec(java.util.List)
*/
@SuppressWarnings("unchecked")
@Override
public Object exec(List args) throws TemplateModelException {
if (args == null || args.size() < 1 || args.size() > 2 ) {
throw new TemplateModelException("Invalid number of arguments (expected: 1-2)");
}
ObjectWrapper objectWrapper = CommonFtlUtil.getCurrentEnvironment().getObjectWrapper();
TemplateHashModelEx argsMap = (TemplateHashModelEx) args.get(0);
// caller-supplied excludes
TemplateModel excludesModel = (args.size() >=2) ? (TemplateModel) args.get(1) : null;
Set<String> excludes;
if (excludesModel != null) {
excludes = LangFtlUtil.getAsStringSet(excludesModel);
} else {
excludes = new HashSet<>();
}
SimpleHash res = null;
final Boolean useExclude = Boolean.FALSE;
// put attribs from explicit attribs map first, if any
TemplateModel attribsModel = argsMap.get("attribs");
if (attribsModel != null && OfbizFtlObjectType.isObjectType(OfbizFtlObjectType.MAP, attribsModel)) {
res = LangFtlUtil.copyMapToSimple((TemplateHashModel) attribsModel, excludes, useExclude, objectWrapper);
}
// to get inline attribs, add list of all arg names to excludes as well as the lists themselves
TemplateModel allArgNamesModel = argsMap.get("allArgNames");
if (allArgNamesModel != null) {
excludes.addAll(LangFtlUtil.getAsStringSet(allArgNamesModel));
}
excludes.add("allArgNames");
excludes.add("localArgNames");
// add the inline attribs over the attribs map (if any)
if (res == null) {
res = LangFtlUtil.copyMapToSimple(argsMap, excludes, useExclude, objectWrapper);
} else {
LangFtlUtil.putAll(res, argsMap, excludes, useExclude, objectWrapper);
}
return res;
}
}
|
NoRef utilities.ftl: #makeAttribMapFromArgMap: bugfix: fix explicit "attribs" map not being cast to simple map before iteration (DEV NOTE: this partial fixes JStree problem in cms)
git-svn-id: 6c0edb9fdd085beb7f3b78cf385b6ddede550bd9@13737 55bbc10b-e964-4c8f-a844-a62c6f7d3c80
|
framework/webapp/src/com/ilscipio/scipio/ce/webapp/ftl/template/MakeAttribMapFromArgMapMethod.java
|
NoRef utilities.ftl: #makeAttribMapFromArgMap: bugfix: fix explicit "attribs" map not being cast to simple map before iteration (DEV NOTE: this partial fixes JStree problem in cms)
|
<ide><path>ramework/webapp/src/com/ilscipio/scipio/ce/webapp/ftl/template/MakeAttribMapFromArgMapMethod.java
<ide> // put attribs from explicit attribs map first, if any
<ide> TemplateModel attribsModel = argsMap.get("attribs");
<ide> if (attribsModel != null && OfbizFtlObjectType.isObjectType(OfbizFtlObjectType.MAP, attribsModel)) {
<add> if (OfbizFtlObjectType.isObjectType(OfbizFtlObjectType.COMPLEXMAP, attribsModel)) {
<add> attribsModel = LangFtlUtil.toSimpleMap(attribsModel, false, objectWrapper);
<add> }
<ide> res = LangFtlUtil.copyMapToSimple((TemplateHashModel) attribsModel, excludes, useExclude, objectWrapper);
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
cadb0a3a95ff52360ffb782df24915d7aa1d39fc
| 0 |
francescomari/jackrabbit-oak,catholicon/jackrabbit-oak,chetanmeh/jackrabbit-oak,yesil/jackrabbit-oak,alexkli/jackrabbit-oak,code-distillery/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,anchela/jackrabbit-oak,meggermo/jackrabbit-oak,stillalex/jackrabbit-oak,alexparvulescu/jackrabbit-oak,anchela/jackrabbit-oak,alexkli/jackrabbit-oak,catholicon/jackrabbit-oak,kwin/jackrabbit-oak,alexparvulescu/jackrabbit-oak,alexparvulescu/jackrabbit-oak,stillalex/jackrabbit-oak,kwin/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,alexkli/jackrabbit-oak,alexparvulescu/jackrabbit-oak,tripodsan/jackrabbit-oak,anchela/jackrabbit-oak,code-distillery/jackrabbit-oak,meggermo/jackrabbit-oak,meggermo/jackrabbit-oak,code-distillery/jackrabbit-oak,anchela/jackrabbit-oak,yesil/jackrabbit-oak,meggermo/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,yesil/jackrabbit-oak,tripodsan/jackrabbit-oak,kwin/jackrabbit-oak,alexparvulescu/jackrabbit-oak,code-distillery/jackrabbit-oak,francescomari/jackrabbit-oak,alexkli/jackrabbit-oak,francescomari/jackrabbit-oak,catholicon/jackrabbit-oak,catholicon/jackrabbit-oak,kwin/jackrabbit-oak,tripodsan/jackrabbit-oak,meggermo/jackrabbit-oak,kwin/jackrabbit-oak,stillalex/jackrabbit-oak,anchela/jackrabbit-oak,francescomari/jackrabbit-oak,tripodsan/jackrabbit-oak,stillalex/jackrabbit-oak,code-distillery/jackrabbit-oak,chetanmeh/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,alexkli/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak,catholicon/jackrabbit-oak,stillalex/jackrabbit-oak,yesil/jackrabbit-oak,chetanmeh/jackrabbit-oak,francescomari/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.principal;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider;
import org.apache.jackrabbit.test.AbstractJCRTest;
/**
* <pre>
* Module: Principal Management
* =============================================================================
*
* Title: Introduction to Principal Management
* -----------------------------------------------------------------------------
*
* Goal:
* Understand the usage of principal management in Oak and become familiar with
* the difference between the Jackrabbit {@link PrincipalManager} and the
* {@link PrincipalProvider} exposed by Oak SPI.
*
* Exercises:
*
* - Overview and Usages of Principal Management
* Search for usage of principal management API (e.g. the {@link org.apache.jackrabbit.api.security.principal.PrincipalManager}
* and {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider}
* interface in Oak _and_ Jackrabbit JCR Commons. List your findings and discuss the impact.
*
* Question: Where is the principal manager|provider being used for?
* Question: Who is the expected API consumer?
* Question: What are the characteristics of this areas?
* Question: What can you say about the relation of principal management and authentication?
* Question: What can you say about the relation of principal management and authorization?
*
*
* - Configuration
* Look at the default implementation of the {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration}
* and try to identify the configurable parts. Compare your results with the
* Oak documentation.
*
* Question: Can you provide a list of configuration options?
*
* - Pluggability
* Become familar with the pluggable nature of the principal management
*
* Question: What means does Oak provide to change or extend the set of principals exposed?
* Question: What interfaces do you need to implement?
* Question: Is it possible to combine different principal implementations? How does that work?
*
*
* Additional Exercises:
* -----------------------------------------------------------------------------
*
* - Discuss why principal management API is read only.
*
* Question: How are principals exposed by the {@link PrincipalManager} collected?
* Question: How does the default implementation look like?
*
*
* Advanced Exercises:
* -----------------------------------------------------------------------------
*
* If you want to dig deeper into the principal management implementation details
* you may want to play around with plugging your custom principal provider instance
* or replacing the default setup altogether.
*
* - Write your your custom implemenation of the principal provider and deploy it
* in an OSGi based repository setup. Observe the effect it has on principal
* management, authentication and authorization.
*
*
* Related Exercises
* -----------------------------------------------------------------------------
*
* - {@link L2_PrincipalManagerTest}
* - {@link L4_PrincipalProviderTest}
* - {@link L3_EveryoneTest}
*
* </pre>
*
* @see org.apache.jackrabbit.api.security.principal.PrincipalManager
* @see org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider
*/
public class L1_IntroductionTest extends AbstractJCRTest {
}
|
oak-exercise/src/test/java/org/apache/jackrabbit/oak/security/principal/L1_IntroductionTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.principal;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.test.AbstractJCRTest;
/**
* <pre>
* Module: Principal Management
* =============================================================================
*
* Title: Introduction to Principal Management
* -----------------------------------------------------------------------------
*
* Goal:
* Understand the usage of principal management in Oak and become familiar with
* the difference between the Jackrabbit {@link PrincipalManager} and the
* {@link PrincipalProvider} exposed by Oak SPI.
*
* Exercises:
*
* - Overview and Usages of Principal Management
* Search for usage of principal management API (e.g. the {@link org.apache.jackrabbit.api.security.principal.PrincipalManager}
* and {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider}
* interface in Oak _and_ Jackrabbit JCR Commons. List your findings and discuss the impact.
*
* Question: Where is the principal manager|provider being used for?
* Question: Who is the expected API consumer?
* Question: What are the characteristics of this areas?
* Question: What can you say about the relation of principal management and authentication?
* Question: What can you say about the relation of principal management and authorization?
*
*
* - Configuration
* Look at the default implementation of the {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfigurationConfiguration}
* and try to identify the configurable parts. Compare your results with the
* Oak documentation.
*
* Question: Can you provide a list of configuration options?
*
* - Pluggability
* Become familar with the pluggable nature of the principal management
*
* Question: What means does Oak provide to change or extend the set of principals exposed?
* Question: What interfaces do you need to implement?
* Question: Is it possible to combine different principal implementations? How does that work?
*
*
* Additional Exercises:
* -----------------------------------------------------------------------------
*
* - Discuss why principal management API is read only.
*
* Question: How are principals exposed by the {@link PrincipalManager} collected?
* Question: How does the default implementation look like?
*
*
* Advanced Exercises:
* -----------------------------------------------------------------------------
*
* If you want to dig deeper into the principal management implementation details
* you may want to play around with plugging your custom principal provider instance
* or replacing the default setup altogether.
*
* - Write your your custom implemenation of the principal provider and deploy it
* in an OSGi based repository setup. Observe the effect it has on principal
* management, authentication and authorization.
*
*
* Related Exercises
* -----------------------------------------------------------------------------
*
* - {@link L2_PrincipalManagerTest}
* - {@link L4_PrincipalProviderTest}
* - {@link L3_EveryoneTest}
*
* </pre>
*
* @see org.apache.jackrabbit.api.security.principal.PrincipalManager
* @see org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider
*/
public class L1_IntroductionTest extends AbstractJCRTest {
}
|
OAK-3008: Training material for Oak security (WIP)
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1731622 13f79535-47bb-0310-9956-ffa450edef68
|
oak-exercise/src/test/java/org/apache/jackrabbit/oak/security/principal/L1_IntroductionTest.java
|
OAK-3008: Training material for Oak security (WIP)
|
<ide><path>ak-exercise/src/test/java/org/apache/jackrabbit/oak/security/principal/L1_IntroductionTest.java
<ide> package org.apache.jackrabbit.oak.security.principal;
<ide>
<ide> import org.apache.jackrabbit.api.security.principal.PrincipalManager;
<add>import org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider;
<ide> import org.apache.jackrabbit.test.AbstractJCRTest;
<ide>
<ide> /**
<ide> *
<ide> *
<ide> * - Configuration
<del> * Look at the default implementation of the {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfigurationConfiguration}
<add> * Look at the default implementation of the {@link org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration}
<ide> * and try to identify the configurable parts. Compare your results with the
<ide> * Oak documentation.
<ide> *
|
|
Java
|
apache-2.0
|
e66ef0aa000b0c42820b07fc4218656684d8d85b
| 0 |
hmusavi/jpo-ode,hmusavi/jpo-ode,hmusavi/jpo-ode,hmusavi/jpo-ode,hmusavi/jpo-ode
|
package us.dot.its.jpo.ode.services.vsd;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.oss.asn1.EncodeFailedException;
import com.oss.asn1.EncodeNotSupportedException;
import com.oss.asn1.PERUnalignedCoder;
import us.dot.its.jpo.ode.OdeProperties;
import us.dot.its.jpo.ode.j2735.J2735;
import us.dot.its.jpo.ode.j2735.dsrc.MsgCRC;
import us.dot.its.jpo.ode.j2735.dsrc.TemporaryID;
import us.dot.its.jpo.ode.j2735.semi.GroupID;
import us.dot.its.jpo.ode.j2735.semi.SemiDialogID;
import us.dot.its.jpo.ode.j2735.semi.SemiSequenceID;
import us.dot.its.jpo.ode.j2735.semi.VehSitDataMessage;
import us.dot.its.jpo.ode.j2735.semi.VehSitDataMessage.Bundle;
import us.dot.its.jpo.ode.j2735.semi.VehSitRecord;
import us.dot.its.jpo.ode.j2735.semi.VsmType;
import us.dot.its.jpo.ode.plugin.j2735.J2735Bsm;
import us.dot.its.jpo.ode.plugin.j2735.oss.OssVehicleSituationRecord;
import us.dot.its.jpo.ode.udp.bsm.BsmComparator;
import us.dot.its.jpo.ode.util.CodecUtils;
import us.dot.its.jpo.ode.util.JsonUtils;
import us.dot.its.jpo.ode.wrapper.AbstractSubPubTransformer;
import us.dot.its.jpo.ode.wrapper.MessageProducer;
/**
* Kafka consumer/publisher that creates VSDs from BSMs.
*
* Input stream: j2735FilteredBsm (JSON) Output stream: topic.asnVsd (byte)
*
* dialogID = SemiDialogID.vehSitData seqID = SemiSequenceID.data groupID =
* "jode".bytes requestID from BSMs
*
* VehSitDataMessage ::= SEQUENCE { dialogID SemiDialogID, -- 0x9A Vehicle
* Situation Data Deposit seqID SemiSequenceID, -- 0x05 Data Content groupID
* GroupID, -- unique ID used to identify an organization requestID
* DSRC.TemporaryID, -- random 4 byte ID generated following trust establishment
* type VsmType, -- the type of vehicle situation data included bundle SEQUENCE
* (SIZE (1..10)) OF VehSitRecord, -- sets of situation data records crc
* DSRC.MsgCRC }
*/
public class BsmToVsdPackager extends AbstractSubPubTransformer<String, String, byte[]> {
private static final Logger logger = LoggerFactory.getLogger(BsmToVsdPackager.class);
private static final int VSD_PACKAGE_SIZE = 10;
private final PERUnalignedCoder coder;
private ConcurrentHashMap<String, Queue<J2735Bsm>> bsmQueueMap;
public BsmToVsdPackager(MessageProducer<String, byte[]> producer, String outputTopic) {
super(producer, (java.lang.String) outputTopic);
this.coder = J2735.getPERUnalignedCoder();
this.bsmQueueMap = new ConcurrentHashMap<>();
}
@Override
protected byte[] transform(String consumedData) {
if (null == consumedData) {
return new byte[0];
}
logger.debug("VsdDepositor received data: {}", consumedData);
J2735Bsm bsmData = (J2735Bsm) JsonUtils.fromJson(consumedData, J2735Bsm.class);
byte[] encodedVsd = null;
try {
logger.debug("Consuming BSM.");
VehSitDataMessage vsd = addToVsdBundle(bsmData);
// Only full VSDs (10) will be published
// TODO - toggleable mechanism for periodically publishing not-full
// VSDs
if (vsd != null) {
logger.debug("VSD ready to send: (pojo) {}", vsd); // if encoding
// fails, look at
// this for
// mistakes
encodedVsd = coder.encode(vsd).array();
logger.debug("VSD ready to send: {}", encodedVsd);
}
} catch (EncodeFailedException | EncodeNotSupportedException e) {
logger.error("Error Sending VSD to SDC", e);
}
return encodedVsd;
}
private VehSitDataMessage addToVsdBundle(J2735Bsm j2735Bsm) {
String tempId = j2735Bsm.getCoreData().getId();
if (!bsmQueueMap.containsKey(tempId)) {
logger.info("Creating new VSD package queue for BSMs with tempID {} to VSD package queue", tempId);
Queue<J2735Bsm> bsmQueue = new PriorityQueue<>(VSD_PACKAGE_SIZE, new BsmComparator());
bsmQueueMap.put(tempId, bsmQueue);
}
bsmQueueMap.get(tempId).add(j2735Bsm);
// After receiving enough messages, craft the VSD and return it
if (bsmQueueMap.get(tempId).size() == VSD_PACKAGE_SIZE) {
logger.info("BSM queue ID {} full, crafting VSD", tempId);
// convert the BSMs in the priority queue to VSRs to craft VSD bundle
Bundle vsrBundle = new Bundle();
Queue<J2735Bsm> bsmArray = bsmQueueMap.get(tempId);
for (J2735Bsm entry : bsmArray) {
VehSitRecord vsr = OssVehicleSituationRecord.convertBsmToVsr(entry);
vsrBundle.add(vsr);
}
VehSitDataMessage vsd = new VehSitDataMessage();
vsd.bundle = vsrBundle;
vsd.dialogID = SemiDialogID.vehSitData;
vsd.seqID = SemiSequenceID.data;
vsd.groupID = new GroupID(OdeProperties.getJpoOdeGroupId());
vsd.requestID = new TemporaryID(CodecUtils.fromHex(tempId));
vsd.crc = new MsgCRC(new byte[] { 0, 0 });
vsd.type = new VsmType(new byte[] { 1 }); // 1=fundamental sit. status
bsmQueueMap.remove(tempId); // prevent duplicates
return vsd;
} else {
logger.info("Added BSM with tempID {} to existing VSD package queue ({}/{})", tempId,
bsmQueueMap.get(tempId).size(), VSD_PACKAGE_SIZE);
return null;
}
}
}
|
jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/services/vsd/BsmToVsdPackager.java
|
package us.dot.its.jpo.ode.services.vsd;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.tomcat.util.buf.HexUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.oss.asn1.EncodeFailedException;
import com.oss.asn1.EncodeNotSupportedException;
import com.oss.asn1.PERUnalignedCoder;
import us.dot.its.jpo.ode.OdeProperties;
import us.dot.its.jpo.ode.j2735.J2735;
import us.dot.its.jpo.ode.j2735.dsrc.MsgCRC;
import us.dot.its.jpo.ode.j2735.dsrc.TemporaryID;
import us.dot.its.jpo.ode.j2735.semi.GroupID;
import us.dot.its.jpo.ode.j2735.semi.SemiDialogID;
import us.dot.its.jpo.ode.j2735.semi.SemiSequenceID;
import us.dot.its.jpo.ode.j2735.semi.VehSitDataMessage;
import us.dot.its.jpo.ode.j2735.semi.VehSitDataMessage.Bundle;
import us.dot.its.jpo.ode.j2735.semi.VehSitRecord;
import us.dot.its.jpo.ode.j2735.semi.VsmType;
import us.dot.its.jpo.ode.plugin.j2735.J2735Bsm;
import us.dot.its.jpo.ode.plugin.j2735.oss.OssVehicleSituationRecord;
import us.dot.its.jpo.ode.udp.bsm.BsmComparator;
import us.dot.its.jpo.ode.util.CodecUtils;
import us.dot.its.jpo.ode.util.JsonUtils;
import us.dot.its.jpo.ode.wrapper.AbstractSubPubTransformer;
import us.dot.its.jpo.ode.wrapper.MessageProducer;
/**
* Kafka consumer/publisher that creates VSDs from BSMs.
*
* Input stream: j2735FilteredBsm (JSON) Output stream: topic.asnVsd (byte)
*
* dialogID = SemiDialogID.vehSitData seqID = SemiSequenceID.data groupID =
* "jode".bytes requestID from BSMs
*
* VehSitDataMessage ::= SEQUENCE { dialogID SemiDialogID, -- 0x9A Vehicle
* Situation Data Deposit seqID SemiSequenceID, -- 0x05 Data Content groupID
* GroupID, -- unique ID used to identify an organization requestID
* DSRC.TemporaryID, -- random 4 byte ID generated following trust establishment
* type VsmType, -- the type of vehicle situation data included bundle SEQUENCE
* (SIZE (1..10)) OF VehSitRecord, -- sets of situation data records crc
* DSRC.MsgCRC }
*/
public class BsmToVsdPackager extends AbstractSubPubTransformer<String, String, byte[]> {
private static final Logger logger = LoggerFactory.getLogger(BsmToVsdPackager.class);
private static final int VSD_PACKAGE_SIZE = 10;
private final PERUnalignedCoder coder;
private ConcurrentHashMap<String, Queue<J2735Bsm>> bsmQueueMap;
public BsmToVsdPackager(MessageProducer<String, byte[]> producer, String outputTopic) {
super(producer, (java.lang.String) outputTopic);
this.coder = J2735.getPERUnalignedCoder();
this.bsmQueueMap = new ConcurrentHashMap<>();
}
@Override
protected byte[] transform(String consumedData) {
if (null == consumedData) {
return new byte[0];
}
logger.debug("VsdDepositor received data: {}", consumedData);
J2735Bsm bsmData = (J2735Bsm) JsonUtils.fromJson(consumedData, J2735Bsm.class);
byte[] encodedVsd = null;
try {
logger.debug("Consuming BSM.");
VehSitDataMessage vsd = addToVsdBundle(bsmData);
// Only full VSDs (10) will be published
// TODO - toggleable mechanism for periodically publishing not-full
// VSDs
if (vsd != null) {
logger.debug("VSD ready to send: (pojo) {}", vsd); // if encoding
// fails, look at
// this for
// mistakes
encodedVsd = coder.encode(vsd).array();
logger.debug("VSD ready to send: {}", encodedVsd);
}
} catch (EncodeFailedException | EncodeNotSupportedException e) {
logger.error("Error Sending VSD to SDC", e);
}
return encodedVsd;
}
private VehSitDataMessage addToVsdBundle(J2735Bsm j2735Bsm) {
String tempId = j2735Bsm.getCoreData().getId();
if (!bsmQueueMap.containsKey(tempId)) {
logger.info("Creating new VSD package queue for BSMs with tempID {} to VSD package queue", tempId);
Queue<J2735Bsm> bsmQueue = new PriorityQueue<>(VSD_PACKAGE_SIZE, new BsmComparator());
bsmQueueMap.put(tempId, bsmQueue);
}
bsmQueueMap.get(tempId).add(j2735Bsm);
// After receiving enough messages, craft the VSD and return it
if (bsmQueueMap.get(tempId).size() == VSD_PACKAGE_SIZE) {
logger.info("BSM queue ID {} full, crafting VSD", tempId);
// convert the BSMs in the priority queue to VSRs to craft VSD bundle
Bundle vsrBundle = new Bundle();
Queue<J2735Bsm> bsmArray = bsmQueueMap.get(tempId);
for (J2735Bsm entry : bsmArray) {
VehSitRecord vsr = OssVehicleSituationRecord.convertBsmToVsr(entry);
vsrBundle.add(vsr);
}
VehSitDataMessage vsd = new VehSitDataMessage();
vsd.bundle = vsrBundle;
vsd.dialogID = SemiDialogID.vehSitData;
vsd.seqID = SemiSequenceID.data;
vsd.groupID = new GroupID(OdeProperties.getJpoOdeGroupId());
vsd.requestID = new TemporaryID(CodecUtils.fromHex(tempId));
vsd.crc = new MsgCRC(new byte[] { 0, 0 });
vsd.type = new VsmType(new byte[] { 1 }); // 1=fundamental sit. status
bsmQueueMap.remove(tempId); // prevent duplicates
return vsd;
} else {
logger.info("Added BSM with tempID {} to existing VSD package queue ({}/{})", tempId,
bsmQueueMap.get(tempId).size(), VSD_PACKAGE_SIZE);
return null;
}
}
}
|
ODE-314 removed extra import
|
jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/services/vsd/BsmToVsdPackager.java
|
ODE-314 removed extra import
|
<ide><path>po-ode-svcs/src/main/java/us/dot/its/jpo/ode/services/vsd/BsmToVsdPackager.java
<ide> import java.util.Queue;
<ide> import java.util.concurrent.ConcurrentHashMap;
<ide>
<del>import org.apache.tomcat.util.buf.HexUtils;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide>
|
|
Java
|
mit
|
error: pathspec 'PVManager/src/test/java/org/epics/pvmanager/data/VTableAggegationTest.java' did not match any file(s) known to git
|
20afd5c6ef12cd9d70955965696ad1a632da2874
| 1 |
ControlSystemStudio/diirt,richardfearn/diirt,ControlSystemStudio/diirt,diirt/diirt,diirt/diirt,diirt/diirt,richardfearn/diirt,berryma4/diirt,ControlSystemStudio/diirt,richardfearn/diirt,diirt/diirt,berryma4/diirt,ControlSystemStudio/diirt,berryma4/diirt,berryma4/diirt
|
/*
* Copyright 2010-11 Brookhaven National Laboratory
* All rights reserved. Use is subject to license terms.
*/
package org.epics.pvmanager.data;
import java.util.Arrays;
import java.util.List;
import org.epics.pvmanager.expression.DesiredRateExpression;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.hamcrest.Matchers.*;
import static org.epics.pvmanager.data.ExpressionLanguage.*;
/**
*
* @author carcassi
*/
public class VTableAggegationTest {
@Test
public void aggregateSimpleTable() throws Exception {
List<String> names = Arrays.asList("One", "Two", "Three");
List<Double> values = Arrays.asList(1.0, 2.0, 3.0);
List<Integer> counts = Arrays.asList(1, 2, 3);
DesiredRateExpression<VTable> exp = vTable(column("Names", vStringConstants(names)), column("Values", vDoubleConstants(values)), column("Counts", vIntConstants(counts)));
VTable result = exp.getFunction().getValue();
assertThat(result.getColumnCount(), equalTo(3));
assertThat(result.getColumnName(0), equalTo("Names"));
assertThat(result.getColumnType(0), equalTo((Class) String.class));
assertThat(result.getColumnArray(0), equalTo((Object) new String[]{"One", "Two", "Three"}));
assertThat(result.getColumnName(1), equalTo("Values"));
assertThat(result.getColumnType(1), equalTo((Class) Double.TYPE));
assertThat(result.getColumnArray(1), equalTo((Object) new double[]{1.0, 2.0, 3.0}));
assertThat(result.getColumnName(2), equalTo("Counts"));
assertThat(result.getColumnType(2), equalTo((Class) Integer.TYPE));
assertThat(result.getColumnArray(2), equalTo((Object) new int[]{1, 2, 3}));
}
}
|
PVManager/src/test/java/org/epics/pvmanager/data/VTableAggegationTest.java
|
Data: test for table aggregation.
|
PVManager/src/test/java/org/epics/pvmanager/data/VTableAggegationTest.java
|
Data: test for table aggregation.
|
<ide><path>VManager/src/test/java/org/epics/pvmanager/data/VTableAggegationTest.java
<add>/*
<add> * Copyright 2010-11 Brookhaven National Laboratory
<add> * All rights reserved. Use is subject to license terms.
<add> */
<add>package org.epics.pvmanager.data;
<add>
<add>import java.util.Arrays;
<add>import java.util.List;
<add>import org.epics.pvmanager.expression.DesiredRateExpression;
<add>import org.junit.Test;
<add>import static org.junit.Assert.*;
<add>import static org.hamcrest.Matchers.*;
<add>import static org.epics.pvmanager.data.ExpressionLanguage.*;
<add>
<add>/**
<add> *
<add> * @author carcassi
<add> */
<add>public class VTableAggegationTest {
<add>
<add> @Test
<add> public void aggregateSimpleTable() throws Exception {
<add> List<String> names = Arrays.asList("One", "Two", "Three");
<add> List<Double> values = Arrays.asList(1.0, 2.0, 3.0);
<add> List<Integer> counts = Arrays.asList(1, 2, 3);
<add> DesiredRateExpression<VTable> exp = vTable(column("Names", vStringConstants(names)), column("Values", vDoubleConstants(values)), column("Counts", vIntConstants(counts)));
<add> VTable result = exp.getFunction().getValue();
<add> assertThat(result.getColumnCount(), equalTo(3));
<add> assertThat(result.getColumnName(0), equalTo("Names"));
<add> assertThat(result.getColumnType(0), equalTo((Class) String.class));
<add> assertThat(result.getColumnArray(0), equalTo((Object) new String[]{"One", "Two", "Three"}));
<add> assertThat(result.getColumnName(1), equalTo("Values"));
<add> assertThat(result.getColumnType(1), equalTo((Class) Double.TYPE));
<add> assertThat(result.getColumnArray(1), equalTo((Object) new double[]{1.0, 2.0, 3.0}));
<add> assertThat(result.getColumnName(2), equalTo("Counts"));
<add> assertThat(result.getColumnType(2), equalTo((Class) Integer.TYPE));
<add> assertThat(result.getColumnArray(2), equalTo((Object) new int[]{1, 2, 3}));
<add> }
<add>}
|
|
JavaScript
|
apache-2.0
|
1286b7fbbc9b18d5c44906e403e1c045a379299c
| 0 |
SAP/openui5,SAP/openui5,SAP/openui5,SAP/openui5
|
/*!
* ${copyright}
*/
// Provides control sap.m.MessagePopover.
sap.ui.define([
"./ResponsivePopover",
"./Button",
"./Toolbar",
"./Bar",
"sap/ui/core/Control",
"sap/ui/core/IconPool",
"./semantic/SemanticPage",
"./Popover",
"./MessageView",
"sap/ui/Device",
"./MessagePopoverRenderer",
"sap/base/Log",
"sap/ui/thirdparty/jquery"
],
function(
ResponsivePopover,
Button,
Toolbar,
Bar,
Control,
IconPool,
SemanticPage,
Popover,
MessageView,
Device,
MessagePopoverRenderer,
Log,
jQuery
) {
"use strict";
/**
* Constructor for a new MessagePopover.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* A summarized list of different types of messages.
* <h3>Overview</h3>
* A message popover is used to display a summarized list of different types of messages (errors, warnings, success and information).
* It provides a handy and systemized way to navigate and explore details for every message. It also exposes an event {@link sap.m.MessagePopover#activeTitlePress}, which can be used for navigation from a message to the source of the issue.
* <h4>Notes:</h4>
* <ul>
* <li> Messages can have descriptions pre-formatted with HTML markup. In this case, the <code>markupDescription</code> has to be set to <code>true</code>.</li>
* <li> If the message cannot be fully displayed or includes a long description, the message popover provides navigation to the detailed description.</li>
* </ul>
* <h3>Structure</h3>
* The message popover stores all messages in an aggregation of type {@link sap.m.MessageItem} named <code>items</code>.
*
* A set of properties determines how the items are rendered:
* <ul>
* <li> counter - An integer that is used to indicate the number of errors for each type </li>
* <li> type - The type of message </li>
* <li> title/subtitle - The title and subtitle of the message</li>
* <li> description - The long text description of the message</li>
* <li> activeTitle - Determines whether the title of the item is interactive</li>
* </ul>
* <h3>Usage</h3>
* With the message concept, MessagePopover provides a way to centrally manage messages and show them to the user without additional work for the developer.
* The message popover is triggered from a messaging button in the footer toolbar. If an error has occurred at any validation point,
* the total number of messages should be incremented, but the user's work shouldn't be interrupted.
* Navigation between the message item and the source of the error can be created, if needed by the application.
* This can be done by setting the <code>activeTitle</code> property to true and providing a handler for the <code>activeTitlePress</code> event.
* In addition, you can achieve the same functionality inside a different container using the sap.m.MessageView control.
* <h3>Responsive Behavior</h3>
* On mobile phones, the message popover is automatically shown in full screen mode.<br>
* On desktop and tablet, the message popover opens in a popover.<br>
* On desktop the opened popover is resizable if it is placed in a {@link sap.m.Toolbar}, {@link sap.m.Bar} or used in {@link sap.f.semantic.SemanticPage}
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version ${version}
*
* @constructor
* @public
* @since 1.28
* @alias sap.m.MessagePopover
* @see {@link fiori:https://experience.sap.com/fiori-design-web/message-popover/ Message Popover}
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var MessagePopover = Control.extend("sap.m.MessagePopover", /** @lends sap.m.MessagePopover.prototype */ {
metadata: {
library: "sap.m",
properties: {
/**
* Callback function for resolving a promise after description has been asynchronously loaded inside this function
* @callback sap.m.MessagePopover~asyncDescriptionHandler
* @param {object} config A single parameter object
* @param {MessagePopoverItem} config.item Reference to respective MessagePopoverItem instance
* @param {object} config.promise Object grouping a promise's reject and resolve methods
* @param {function} config.promise.resolve Method to resolve promise
* @param {function} config.promise.reject Method to reject promise
*/
asyncDescriptionHandler: {type: "any", group: "Behavior", defaultValue: null},
/**
* Callback function for resolving a promise after a link has been asynchronously validated inside this function
* @callback sap.m.MessagePopover~asyncURLHandler
* @param {object} config A single parameter object
* @param {string} config.url URL to validate
* @param {string|Int} config.id ID of the validation job
* @param {object} config.promise Object grouping a promise's reject and resolve methods
* @param {function} config.promise.resolve Method to resolve promise
* @param {function} config.promise.reject Method to reject promise
*/
asyncURLHandler: {type: "any", group: "Behavior", defaultValue: null},
/**
* Determines the position, where the control will appear on the screen. Possible values are: sap.m.VerticalPlacementType.Top, sap.m.VerticalPlacementType.Bottom and sap.m.VerticalPlacementType.Vertical.
* The default value is sap.m.VerticalPlacementType.Vertical. Setting this property while the control is open, will not cause any re-rendering and changing of the position. Changes will only be applied with the next interaction.
*/
placement: {type: "sap.m.VerticalPlacementType", group: "Behavior", defaultValue: "Vertical"},
/**
* Sets the initial state of the control - expanded or collapsed. By default the control opens as expanded.
*/
initiallyExpanded: {type: "boolean", group: "Behavior", defaultValue: true}
},
defaultAggregation: "items",
aggregations: {
/**
* A list with message items
*/
items: {type: "sap.m.MessageItem", altTypes: ["sap.m.MessagePopoverItem"], multiple: true, singularName: "item"},
/**
* A custom header button
*/
headerButton: {type: "sap.m.Button", multiple: false, forwarding: {idSuffix: "-messageView", aggregation: "headerButton"}}
},
events: {
/**
* This event will be fired after the popover is opened
*/
afterOpen: {
parameters: {
/**
* This refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired after the popover is closed
*/
afterClose: {
parameters: {
/**
* Refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired before the popover is opened
*/
beforeOpen: {
parameters: {
/**
* Refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired before the popover is closed
*/
beforeClose: {
parameters: {
/**
* Refers to the control which opens the popover
* See sap.ui.core.MessageType enum values for types
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired when description is shown
*/
itemSelect: {
parameters: {
/**
* Refers to the message popover item that is being presented
*/
item: {type: "sap.m.MessagePopoverItem"},
/**
* Refers to the type of messages being shown
* See sap.ui.core.MessageType values for types
*/
messageTypeFilter: {type: "sap.ui.core.MessageType"}
}
},
/**
* This event will be fired when one of the lists is shown when (not) filtered by type
*/
listSelect: {
parameters: {
/**
* This parameter refers to the type of messages being shown.
*/
messageTypeFilter: {type: "sap.ui.core.MessageType"}
}
},
/**
* This event will be fired when the long text description data from a remote URL is loaded
*/
longtextLoaded: {},
/**
* This event will be fired when a validation of a URL from long text description is ready
*/
urlValidated: {},
/**
* This event will be fired when an active title of a MessageItem is clicked
* @since 1.58
*/
activeTitlePress: {
parameters: {
/**
* Refers to the message item that contains the active Title
*/
item: { type: "sap.m.MessageItem" }
}
}
}
}
});
function capitalize(sName) {
return sName.charAt(0).toUpperCase() + sName.slice(1);
}
var CSS_CLASS = "sapMMsgPopover",
DEFAULT_CONTENT_HEIGHT = "320px",
DEFAULT_CONTENT_WIDTH = "440px",
ICONS = {
back: IconPool.getIconURI("nav-back"),
close: IconPool.getIconURI("decline"),
information: IconPool.getIconURI("message-information"),
warning: IconPool.getIconURI("message-warning"),
error: IconPool.getIconURI("message-error"),
success: IconPool.getIconURI("message-success")
},
// Property names array
ASYNC_HANDLER_NAMES = ["asyncDescriptionHandler", "asyncURLHandler"],
// Private class variable used for static method below that sets default async handlers
DEFAULT_ASYNC_HANDLERS = {
asyncDescriptionHandler: function (config) {
var sLongTextUrl = config.item.getLongtextUrl();
if (sLongTextUrl) {
jQuery.ajax({
type: "GET",
url: sLongTextUrl,
success: function (data) {
config.item.setDescription(data);
config.promise.resolve();
},
error: function() {
var sError = "A request has failed for long text data. URL: " + sLongTextUrl;
Log.error(sError);
config.promise.reject(sError);
}
});
}
}
};
/**
* Setter for default description and URL validation callbacks across all instances of MessagePopover
* @static
* @protected
* @param {object} mDefaultHandlers An object setting default callbacks
* @param {function} mDefaultHandlers.asyncDescriptionHandler The description handler
* @param {function} mDefaultHandlers.asyncURLHandler The URL handler
*/
MessagePopover.setDefaultHandlers = function (mDefaultHandlers) {
ASYNC_HANDLER_NAMES.forEach(function (sFuncName) {
if (mDefaultHandlers.hasOwnProperty(sFuncName)) {
DEFAULT_ASYNC_HANDLERS[sFuncName] = mDefaultHandlers[sFuncName];
}
});
};
/*
* =========================================
* Lifecycle methods
* =========================================
*/
/**
* Initializes the control
*
* @override
* @private
*/
MessagePopover.prototype.init = function () {
var that = this;
var oPopupControl;
this._oResourceBundle = sap.ui.getCore().getLibraryResourceBundle("sap.m");
this._oMessageView = this._initMessageView();
this._oMessageView.addEventDelegate({
onBeforeRendering: function () {
var bSegmentedButtonVisibleInMV = that._oMessageView._oSegmentedButton.getVisible(),
bShowHeader = !that.getInitiallyExpanded() || bSegmentedButtonVisibleInMV;
that._oMessageView._oSegmentedButton.setVisible(bShowHeader);
that._oMessageView._listPage.setShowHeader(true);
}
});
// insert the close buttons in both list and details pages as the MessageView
// doesn't know it is being created in Popover
this._insertCloseBtn(this._oMessageView._oListHeader);
this._insertCloseBtn(this._oMessageView._oDetailsHeader);
this._oMessageView._oSegmentedButton.attachEvent("select", this._onSegButtonSelect, this);
this._oPopover = new ResponsivePopover(this.getId() + "-messagePopover", {
showHeader: false,
contentWidth: DEFAULT_CONTENT_WIDTH,
contentHeight: DEFAULT_CONTENT_HEIGHT,
placement: this.getPlacement(),
showCloseButton: false,
verticalScrolling: false,
horizontalScrolling: false,
modal: false,
afterOpen: function (oEvent) {
that.fireAfterOpen({openBy: oEvent.getParameter("openBy")});
},
afterClose: function (oEvent) {
that._oMessageView._navContainer.backToTop();
that.fireAfterClose({openBy: oEvent.getParameter("openBy")});
},
beforeOpen: function (oEvent) {
that.fireBeforeOpen({openBy: oEvent.getParameter("openBy")});
},
beforeClose: function (oEvent) {
that.fireBeforeClose({openBy: oEvent.getParameter("openBy")});
}
}).addStyleClass(CSS_CLASS);
this._oPopover.addContent(this._oMessageView);
this._oPopover.addAssociation("ariaLabelledBy", this.getId() + "-messageView-HeadingDescr", true);
oPopupControl = this._oPopover.getAggregation("_popup");
oPopupControl.oPopup.setAutoClose(false);
oPopupControl.addEventDelegate({
onBeforeRendering: this.onBeforeRenderingPopover,
onAfterRendering: this.onAfterRenderingPopover
}, this);
if (Device.system.phone) {
this._oPopover.setBeginButton(new Button({
text: this._oResourceBundle.getText("MESSAGEPOPOVER_CLOSE"),
press: this.close.bind(this)
}));
}
// Check for default async handlers and set them appropriately
ASYNC_HANDLER_NAMES.forEach(function (sFuncName) {
if (DEFAULT_ASYNC_HANDLERS.hasOwnProperty(sFuncName)) {
this['set' + capitalize(sFuncName)](DEFAULT_ASYNC_HANDLERS[sFuncName]);
}
}, this);
};
MessagePopover.prototype.onBeforeRendering = function () {
if (this.getDependents().indexOf(this._oPopover) === -1) {
this.addDependent(this._oPopover);
}
};
/**
* Required adaptations before rendering MessagePopover
*
* @private
*/
MessagePopover.prototype.onBeforeRenderingPopover = function () {
// If there is no item's binding given - it should happen automatically in the MessageView
// However for backwards compatibility we need to have the same binding on the MessagePopover
// TODO: Decide what to do in this case
/*if (!this.getBinding("items") && this._oMessageView.getBinding("items")) {
this.bindAggregation("items", this._oMessageView.getBindingInfo("items"));
}*/
// Update MV only if 'items' aggregation is changed
if (this._bItemsChanged) {
var items = this.getItems();
var that = this;
this._oMessageView.destroyItems();
items.forEach(function (item) {
// we need to know if the MessagePopover's item was changed so to
// update the MessageView's items as well
item._updateProperties(function () {
that._bItemsChanged = true;
});
// we need to clone the item along with its bindings and aggregations
this._oMessageView.addItem(item.clone("", "", {
cloneChildren: true,
cloneBinding: true
}));
}, this);
this._bItemsChanged = false;
}
this._setInitialFocus();
};
/**
* Required adaptations after rendering MessagePopover
*
* @private
*/
MessagePopover.prototype.onAfterRenderingPopover = function () {
// Because we remove the items from the MessageView and fill it in with new items
// every time something is changed - we need to update the id of the element which
// will receive the focus given by the Popover control.
// First we need to check if such id is stored in the MessagePopover -> ResponsivePopover -> Popover control
if (this._oPopover._oControl._sFocusControlId) {
// then we remove any stored item id because it no longer exists after the re-rendering.
this._oPopover._oControl._sFocusControlId = null;
}
};
/**
* Called when the control is destroyed
*
* @private
*/
MessagePopover.prototype.exit = function () {
this._oResourceBundle = null;
if (this._oMessageView) {
this._oMessageView.destroy();
this._oMessageView = null;
}
// Destroys ResponsivePopover control that is used by MessagePopover
// This will walk through all aggregations in the Popover and destroy them (in our case this is NavContainer)
// Next this will walk through all aggregations in the NavContainer, etc.
if (this._oPopover) {
this._oPopover.destroy();
this._oPopover = null;
}
};
/**
* Opens the MessagePopover
*
* @param {sap.ui.core.Control} oControl Control which opens the MessagePopover
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
* @ui5-metamodel
*/
MessagePopover.prototype.openBy = function (oControl) {
var oResponsivePopoverControl = this._oPopover.getAggregation("_popup"),
oParent = oControl.getParent();
// If MessagePopover is opened from an instance of sap.m.Toolbar and is instance of sap.m.Popover remove the Arrow
if (oResponsivePopoverControl instanceof Popover) {
if ((oParent instanceof Toolbar || oParent instanceof Bar || oParent instanceof SemanticPage)) {
oResponsivePopoverControl.setShowArrow(false);
oResponsivePopoverControl.setResizable(true);
} else {
oResponsivePopoverControl.setShowArrow(true);
}
}
if (this._oPopover) {
this._restoreExpansionDefaults();
this._oPopover.openBy(oControl);
}
return this;
};
/**
* Closes the MessagePopover
*
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.close = function () {
if (this._oPopover) {
this._oPopover.close();
}
return this;
};
/**
* The method checks if the MessagePopover is open. It returns true when the MessagePopover is currently open
* (this includes opening and closing animations), otherwise it returns false
*
* @public
* @returns {boolean} Whether the MessagePopover is open
*/
MessagePopover.prototype.isOpen = function () {
return this._oPopover.isOpen();
};
/**
* This method toggles between open and closed state of the MessagePopover instance.
* oControl parameter is mandatory in the same way as in 'openBy' method
*
* @param {sap.ui.core.Control} oControl Control which opens the MessagePopover
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.toggle = function (oControl) {
if (this.isOpen()) {
this.close();
} else {
this.openBy(oControl);
}
return this;
};
/**
* The method sets the placement position of the MessagePopover. Only accepted Values are:
* sap.m.PlacementType.Top, sap.m.PlacementType.Bottom and sap.m.PlacementType.Vertical
*
* @param {sap.m.PlacementType} sPlacement Placement type
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.setPlacement = function (sPlacement) {
this.setProperty("placement", sPlacement, true);
this._oPopover.setPlacement(sPlacement);
return this;
};
MessagePopover.prototype.getDomRef = function (sSuffix) {
return this._oPopover && this._oPopover.getAggregation("_popup").getDomRef(sSuffix);
};
/*
* =========================================
* Internal methods
* =========================================
*/
/**
* Creates new internal MessageView control
*
* @returns {sap.m.MessageView} The newly instantiated message view control
* @private
*/
MessagePopover.prototype._initMessageView = function () {
var that = this,
oMessageView;
oMessageView = new MessageView(this.getId() + "-messageView", {
activeTitlePress: function (oEvent) {
that.fireActiveTitlePress({ item: oEvent.getParameter("item")});
},
listSelect: function(oEvent) {
that.fireListSelect({messageTypeFilter: oEvent.getParameter('messageTypeFilter')});
},
itemSelect: function(oEvent) {
that.fireItemSelect({
messageTypeFilter: oEvent.getParameter('messageTypeFilter'),
item: oEvent.getParameter('item')
});
},
longtextLoaded: function() {
that.fireLongtextLoaded();
},
urlValidated: function() {
that.fireUrlValidated();
}
});
return oMessageView;
};
MessagePopover.prototype._onSegButtonSelect = function () {
// expanding the message popover if it is still collapsed
if (this.isOpen() && !this.getInitiallyExpanded() && this._oPopover.hasStyleClass(CSS_CLASS + "-init")) {
this._expandMsgPopover();
}
};
/**
* Restores the state defined by the initiallyExpanded property of the MessagePopover
* @private
*/
MessagePopover.prototype._restoreExpansionDefaults = function () {
if (!this.getInitiallyExpanded()) {
this._collapseMsgPopover();
this._oMessageView._oSegmentedButton.setSelectedButton("none");
} else {
this._expandMsgPopover();
}
};
/**
* Expands the MessagePopover so that the width and height are with their default values
* @private
*/
MessagePopover.prototype._expandMsgPopover = function () {
var sDomHeight,
sHeight = DEFAULT_CONTENT_HEIGHT,
sDomHeight = this._oPopover.$("cont").css("height");
if (this.getInitiallyExpanded() && sDomHeight !== "0px") {
sHeight = parseFloat(sDomHeight) ? sDomHeight : sHeight;
}
this._oPopover
.setContentHeight(sHeight)
.removeStyleClass(CSS_CLASS + "-init");
};
/**
* Sets the height of the MessagePopover to auto so that only the header with
* the SegmentedButton is visible
* @private
*/
MessagePopover.prototype._collapseMsgPopover = function () {
this._oPopover
.addStyleClass(CSS_CLASS + "-init")
.setContentHeight("auto");
};
/**
* Inserts close button in the in the provided location
*
* @param {sap.ui.core.Control} oInsertCloseBtnHere The object in which we want to insert the control
* @private
*/
MessagePopover.prototype._insertCloseBtn = function (oInsertCloseBtnHere) {
var sCloseBtnDescr = this._oResourceBundle.getText("MESSAGEPOPOVER_CLOSE"),
oCloseBtn = new Button({
icon: ICONS["close"],
visible: !Device.system.phone,
tooltip: sCloseBtnDescr,
press: this.close.bind(this)
}).addStyleClass(CSS_CLASS + "CloseBtn");
oInsertCloseBtnHere.insertContent(oCloseBtn, 3, true);
};
/**
* Sets initial focus of the control
*
* @private
*/
MessagePopover.prototype._setInitialFocus = function () {
if (this._oMessageView._isListPage() && this.getInitiallyExpanded()) {
// if the controls state is "InitiallyExpanded: true" and
// if current page is the list page - set initial focus to the list.
// otherwise use default functionality built-in the popover
this._oPopover.setInitialFocus(this._oMessageView._oLists[this._sCurrentList || 'all']);
}
};
/*
* =========================================
* MessagePopover async handlers
* proxy methods
* =========================================
*/
MessagePopover.prototype.setAsyncDescriptionHandler = function (asyncDescriptionHandler) {
// MessagePopover is just a proxy to the MessageView
this.setProperty('asyncDescriptionHandler', asyncDescriptionHandler, true);
this._oMessageView.setProperty('asyncDescriptionHandler', asyncDescriptionHandler, true);
return this;
};
MessagePopover.prototype.setAsyncURLHandler = function (asyncURLHandler) {
// MessagePopover is just a proxy to the MessageView
this.setProperty('asyncURLHandler', asyncURLHandler, true);
this._oMessageView.setProperty('asyncURLHandler', asyncURLHandler, true);
return this;
};
MessagePopover.prototype.setModel = function(oModel, sName) {
/* When a model is set to the MessagePopover it is propagated to all its aggregation
Unfortunately the MessageView is not an aggregation of the MessagePopover (due to some rendering issues)
Furthermore the MessageView is actually child of a ResponsivePopover
Therefore once the developer set a model to the MessagePopover we need to forward it to the internal MessageView */
this._oMessageView.setModel(oModel, sName);
return Control.prototype.setModel.apply(this, arguments);
};
["invalidate", "addStyleClass", "removeStyleClass", "toggleStyleClass", "hasStyleClass", "getBusyIndicatorDelay",
"setBusyIndicatorDelay", "getVisible", "setVisible", "getBusy", "setBusy"].forEach(function(sName){
MessagePopover.prototype[sName] = function() {
if (this._oPopover && this._oPopover[sName]) {
var oPopover = this._oPopover;
var res = oPopover[sName].apply(oPopover, arguments);
return res === oPopover ? this : res;
}
};
});
// The following inherited methods of this control are extended because this control uses ResponsivePopover for rendering
["setModel", "bindAggregation", "setAggregation", "insertAggregation", "addAggregation",
"removeAggregation", "removeAllAggregation", "destroyAggregation"].forEach(function (sFuncName) {
// First, they are saved for later reference
MessagePopover.prototype["_" + sFuncName + "Old"] = MessagePopover.prototype[sFuncName];
// Once they are called
MessagePopover.prototype[sFuncName] = function () {
// We immediately call the saved method first
var result = MessagePopover.prototype["_" + sFuncName + "Old"].apply(this, arguments);
// Then there is additional logic
// Mark items aggregation as changed and invalidate popover to trigger rendering
// See 'MessagePopover.prototype.onBeforeRenderingPopover'
this._bItemsChanged = true;
// If Popover dependency has already been instantiated ...
if (this._oPopover) {
// ... invalidate it
this._oPopover.invalidate();
}
// If the called method is 'removeAggregation' or 'removeAllAggregation' ...
if (["removeAggregation", "removeAllAggregation"].indexOf(sFuncName) !== -1) {
// ... return the result of the operation
return result;
}
return this;
};
});
return MessagePopover;
});
|
src/sap.m/src/sap/m/MessagePopover.js
|
/*!
* ${copyright}
*/
// Provides control sap.m.MessagePopover.
sap.ui.define([
"./ResponsivePopover",
"./Button",
"./Toolbar",
"./Bar",
"sap/ui/core/Control",
"sap/ui/core/IconPool",
"./semantic/SemanticPage",
"./Popover",
"./MessageView",
"sap/ui/Device",
"./MessagePopoverRenderer",
"sap/base/Log",
"sap/ui/thirdparty/jquery"
],
function(
ResponsivePopover,
Button,
Toolbar,
Bar,
Control,
IconPool,
SemanticPage,
Popover,
MessageView,
Device,
MessagePopoverRenderer,
Log,
jQuery
) {
"use strict";
/**
* Constructor for a new MessagePopover.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* A summarized list of different types of messages.
* <h3>Overview</h3>
* A message popover is used to display a summarized list of different types of messages (errors, warnings, success and information).
* It provides a handy and systemized way to navigate and explore details for every message. It also exposes an event {@link sap.m.MessagePopover#activeTitlePress}, which can be used for navigation from a message to the source of the issue.
* <h4>Notes:</h4>
* <ul>
* <li> Messages can have descriptions pre-formatted with HTML markup. In this case, the <code>markupDescription</code> has to be set to <code>true</code>.</li>
* <li> If the message cannot be fully displayed or includes a long description, the message popover provides navigation to the detailed description.</li>
* </ul>
* <h3>Structure</h3>
* The message popover stores all messages in an aggregation of type {@link sap.m.MessageItem} named <code>items</code>.
*
* A set of properties determines how the items are rendered:
* <ul>
* <li> counter - An integer that is used to indicate the number of errors for each type </li>
* <li> type - The type of message </li>
* <li> title/subtitle - The title and subtitle of the message</li>
* <li> description - The long text description of the message</li>
* <li> activeTitle - Determines whether the title of the item is interactive</li>
* </ul>
* <h3>Usage</h3>
* With the message concept, MessagePopover provides a way to centrally manage messages and show them to the user without additional work for the developer.
* The message popover is triggered from a messaging button in the footer toolbar. If an error has occurred at any validation point,
* the total number of messages should be incremented, but the user's work shouldn't be interrupted.
* Navigation between the message item and the source of the error can be created, if needed by the application.
* This can be done by setting the <code>activeTitle</code> property to true and providing a handler for the <code>activeTitlePress</code> event.
* <h3>Responsive Behavior</h3>
* On mobile phones, the message popover is automatically shown in full screen mode.<br>
* On desktop and tablet, the message popover opens in a popover.<br>
* On desktop the opened popover is resizable if it is placed in a {@link sap.m.Toolbar}, {@link sap.m.Bar} or used in {@link sap.f.semantic.SemanticPage}
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version ${version}
*
* @constructor
* @public
* @since 1.28
* @alias sap.m.MessagePopover
* @see {@link fiori:https://experience.sap.com/fiori-design-web/message-popover/ Message Popover}
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var MessagePopover = Control.extend("sap.m.MessagePopover", /** @lends sap.m.MessagePopover.prototype */ {
metadata: {
library: "sap.m",
properties: {
/**
* Callback function for resolving a promise after description has been asynchronously loaded inside this function
* @callback sap.m.MessagePopover~asyncDescriptionHandler
* @param {object} config A single parameter object
* @param {MessagePopoverItem} config.item Reference to respective MessagePopoverItem instance
* @param {object} config.promise Object grouping a promise's reject and resolve methods
* @param {function} config.promise.resolve Method to resolve promise
* @param {function} config.promise.reject Method to reject promise
*/
asyncDescriptionHandler: {type: "any", group: "Behavior", defaultValue: null},
/**
* Callback function for resolving a promise after a link has been asynchronously validated inside this function
* @callback sap.m.MessagePopover~asyncURLHandler
* @param {object} config A single parameter object
* @param {string} config.url URL to validate
* @param {string|Int} config.id ID of the validation job
* @param {object} config.promise Object grouping a promise's reject and resolve methods
* @param {function} config.promise.resolve Method to resolve promise
* @param {function} config.promise.reject Method to reject promise
*/
asyncURLHandler: {type: "any", group: "Behavior", defaultValue: null},
/**
* Determines the position, where the control will appear on the screen. Possible values are: sap.m.VerticalPlacementType.Top, sap.m.VerticalPlacementType.Bottom and sap.m.VerticalPlacementType.Vertical.
* The default value is sap.m.VerticalPlacementType.Vertical. Setting this property while the control is open, will not cause any re-rendering and changing of the position. Changes will only be applied with the next interaction.
*/
placement: {type: "sap.m.VerticalPlacementType", group: "Behavior", defaultValue: "Vertical"},
/**
* Sets the initial state of the control - expanded or collapsed. By default the control opens as expanded.
*/
initiallyExpanded: {type: "boolean", group: "Behavior", defaultValue: true}
},
defaultAggregation: "items",
aggregations: {
/**
* A list with message items
*/
items: {type: "sap.m.MessageItem", altTypes: ["sap.m.MessagePopoverItem"], multiple: true, singularName: "item"},
/**
* A custom header button
*/
headerButton: {type: "sap.m.Button", multiple: false, forwarding: {idSuffix: "-messageView", aggregation: "headerButton"}}
},
events: {
/**
* This event will be fired after the popover is opened
*/
afterOpen: {
parameters: {
/**
* This refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired after the popover is closed
*/
afterClose: {
parameters: {
/**
* Refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired before the popover is opened
*/
beforeOpen: {
parameters: {
/**
* Refers to the control which opens the popover
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired before the popover is closed
*/
beforeClose: {
parameters: {
/**
* Refers to the control which opens the popover
* See sap.ui.core.MessageType enum values for types
*/
openBy: {type: "sap.ui.core.Control"}
}
},
/**
* This event will be fired when description is shown
*/
itemSelect: {
parameters: {
/**
* Refers to the message popover item that is being presented
*/
item: {type: "sap.m.MessagePopoverItem"},
/**
* Refers to the type of messages being shown
* See sap.ui.core.MessageType values for types
*/
messageTypeFilter: {type: "sap.ui.core.MessageType"}
}
},
/**
* This event will be fired when one of the lists is shown when (not) filtered by type
*/
listSelect: {
parameters: {
/**
* This parameter refers to the type of messages being shown.
*/
messageTypeFilter: {type: "sap.ui.core.MessageType"}
}
},
/**
* This event will be fired when the long text description data from a remote URL is loaded
*/
longtextLoaded: {},
/**
* This event will be fired when a validation of a URL from long text description is ready
*/
urlValidated: {},
/**
* This event will be fired when an active title of a MessageItem is clicked
* @since 1.58
*/
activeTitlePress: {
parameters: {
/**
* Refers to the message item that contains the active Title
*/
item: { type: "sap.m.MessageItem" }
}
}
}
}
});
function capitalize(sName) {
return sName.charAt(0).toUpperCase() + sName.slice(1);
}
var CSS_CLASS = "sapMMsgPopover",
DEFAULT_CONTENT_HEIGHT = "320px",
DEFAULT_CONTENT_WIDTH = "440px",
ICONS = {
back: IconPool.getIconURI("nav-back"),
close: IconPool.getIconURI("decline"),
information: IconPool.getIconURI("message-information"),
warning: IconPool.getIconURI("message-warning"),
error: IconPool.getIconURI("message-error"),
success: IconPool.getIconURI("message-success")
},
// Property names array
ASYNC_HANDLER_NAMES = ["asyncDescriptionHandler", "asyncURLHandler"],
// Private class variable used for static method below that sets default async handlers
DEFAULT_ASYNC_HANDLERS = {
asyncDescriptionHandler: function (config) {
var sLongTextUrl = config.item.getLongtextUrl();
if (sLongTextUrl) {
jQuery.ajax({
type: "GET",
url: sLongTextUrl,
success: function (data) {
config.item.setDescription(data);
config.promise.resolve();
},
error: function() {
var sError = "A request has failed for long text data. URL: " + sLongTextUrl;
Log.error(sError);
config.promise.reject(sError);
}
});
}
}
};
/**
* Setter for default description and URL validation callbacks across all instances of MessagePopover
* @static
* @protected
* @param {object} mDefaultHandlers An object setting default callbacks
* @param {function} mDefaultHandlers.asyncDescriptionHandler The description handler
* @param {function} mDefaultHandlers.asyncURLHandler The URL handler
*/
MessagePopover.setDefaultHandlers = function (mDefaultHandlers) {
ASYNC_HANDLER_NAMES.forEach(function (sFuncName) {
if (mDefaultHandlers.hasOwnProperty(sFuncName)) {
DEFAULT_ASYNC_HANDLERS[sFuncName] = mDefaultHandlers[sFuncName];
}
});
};
/*
* =========================================
* Lifecycle methods
* =========================================
*/
/**
* Initializes the control
*
* @override
* @private
*/
MessagePopover.prototype.init = function () {
var that = this;
var oPopupControl;
this._oResourceBundle = sap.ui.getCore().getLibraryResourceBundle("sap.m");
this._oMessageView = this._initMessageView();
this._oMessageView.addEventDelegate({
onBeforeRendering: function () {
var bSegmentedButtonVisibleInMV = that._oMessageView._oSegmentedButton.getVisible(),
bShowHeader = !that.getInitiallyExpanded() || bSegmentedButtonVisibleInMV;
that._oMessageView._oSegmentedButton.setVisible(bShowHeader);
that._oMessageView._listPage.setShowHeader(true);
}
});
// insert the close buttons in both list and details pages as the MessageView
// doesn't know it is being created in Popover
this._insertCloseBtn(this._oMessageView._oListHeader);
this._insertCloseBtn(this._oMessageView._oDetailsHeader);
this._oMessageView._oSegmentedButton.attachEvent("select", this._onSegButtonSelect, this);
this._oPopover = new ResponsivePopover(this.getId() + "-messagePopover", {
showHeader: false,
contentWidth: DEFAULT_CONTENT_WIDTH,
contentHeight: DEFAULT_CONTENT_HEIGHT,
placement: this.getPlacement(),
showCloseButton: false,
verticalScrolling: false,
horizontalScrolling: false,
modal: false,
afterOpen: function (oEvent) {
that.fireAfterOpen({openBy: oEvent.getParameter("openBy")});
},
afterClose: function (oEvent) {
that._oMessageView._navContainer.backToTop();
that.fireAfterClose({openBy: oEvent.getParameter("openBy")});
},
beforeOpen: function (oEvent) {
that.fireBeforeOpen({openBy: oEvent.getParameter("openBy")});
},
beforeClose: function (oEvent) {
that.fireBeforeClose({openBy: oEvent.getParameter("openBy")});
}
}).addStyleClass(CSS_CLASS);
this._oPopover.addContent(this._oMessageView);
this._oPopover.addAssociation("ariaLabelledBy", this.getId() + "-messageView-HeadingDescr", true);
oPopupControl = this._oPopover.getAggregation("_popup");
oPopupControl.oPopup.setAutoClose(false);
oPopupControl.addEventDelegate({
onBeforeRendering: this.onBeforeRenderingPopover,
onAfterRendering: this.onAfterRenderingPopover
}, this);
if (Device.system.phone) {
this._oPopover.setBeginButton(new Button({
text: this._oResourceBundle.getText("MESSAGEPOPOVER_CLOSE"),
press: this.close.bind(this)
}));
}
// Check for default async handlers and set them appropriately
ASYNC_HANDLER_NAMES.forEach(function (sFuncName) {
if (DEFAULT_ASYNC_HANDLERS.hasOwnProperty(sFuncName)) {
this['set' + capitalize(sFuncName)](DEFAULT_ASYNC_HANDLERS[sFuncName]);
}
}, this);
};
MessagePopover.prototype.onBeforeRendering = function () {
if (this.getDependents().indexOf(this._oPopover) === -1) {
this.addDependent(this._oPopover);
}
};
/**
* Required adaptations before rendering MessagePopover
*
* @private
*/
MessagePopover.prototype.onBeforeRenderingPopover = function () {
// If there is no item's binding given - it should happen automatically in the MessageView
// However for backwards compatibility we need to have the same binding on the MessagePopover
// TODO: Decide what to do in this case
/*if (!this.getBinding("items") && this._oMessageView.getBinding("items")) {
this.bindAggregation("items", this._oMessageView.getBindingInfo("items"));
}*/
// Update MV only if 'items' aggregation is changed
if (this._bItemsChanged) {
var items = this.getItems();
var that = this;
this._oMessageView.destroyItems();
items.forEach(function (item) {
// we need to know if the MessagePopover's item was changed so to
// update the MessageView's items as well
item._updateProperties(function () {
that._bItemsChanged = true;
});
// we need to clone the item along with its bindings and aggregations
this._oMessageView.addItem(item.clone("", "", {
cloneChildren: true,
cloneBinding: true
}));
}, this);
this._bItemsChanged = false;
}
this._setInitialFocus();
};
/**
* Required adaptations after rendering MessagePopover
*
* @private
*/
MessagePopover.prototype.onAfterRenderingPopover = function () {
// Because we remove the items from the MessageView and fill it in with new items
// every time something is changed - we need to update the id of the element which
// will receive the focus given by the Popover control.
// First we need to check if such id is stored in the MessagePopover -> ResponsivePopover -> Popover control
if (this._oPopover._oControl._sFocusControlId) {
// then we remove any stored item id because it no longer exists after the re-rendering.
this._oPopover._oControl._sFocusControlId = null;
}
};
/**
* Called when the control is destroyed
*
* @private
*/
MessagePopover.prototype.exit = function () {
this._oResourceBundle = null;
if (this._oMessageView) {
this._oMessageView.destroy();
this._oMessageView = null;
}
// Destroys ResponsivePopover control that is used by MessagePopover
// This will walk through all aggregations in the Popover and destroy them (in our case this is NavContainer)
// Next this will walk through all aggregations in the NavContainer, etc.
if (this._oPopover) {
this._oPopover.destroy();
this._oPopover = null;
}
};
/**
* Opens the MessagePopover
*
* @param {sap.ui.core.Control} oControl Control which opens the MessagePopover
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
* @ui5-metamodel
*/
MessagePopover.prototype.openBy = function (oControl) {
var oResponsivePopoverControl = this._oPopover.getAggregation("_popup"),
oParent = oControl.getParent();
// If MessagePopover is opened from an instance of sap.m.Toolbar and is instance of sap.m.Popover remove the Arrow
if (oResponsivePopoverControl instanceof Popover) {
if ((oParent instanceof Toolbar || oParent instanceof Bar || oParent instanceof SemanticPage)) {
oResponsivePopoverControl.setShowArrow(false);
oResponsivePopoverControl.setResizable(true);
} else {
oResponsivePopoverControl.setShowArrow(true);
}
}
if (this._oPopover) {
this._restoreExpansionDefaults();
this._oPopover.openBy(oControl);
}
return this;
};
/**
* Closes the MessagePopover
*
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.close = function () {
if (this._oPopover) {
this._oPopover.close();
}
return this;
};
/**
* The method checks if the MessagePopover is open. It returns true when the MessagePopover is currently open
* (this includes opening and closing animations), otherwise it returns false
*
* @public
* @returns {boolean} Whether the MessagePopover is open
*/
MessagePopover.prototype.isOpen = function () {
return this._oPopover.isOpen();
};
/**
* This method toggles between open and closed state of the MessagePopover instance.
* oControl parameter is mandatory in the same way as in 'openBy' method
*
* @param {sap.ui.core.Control} oControl Control which opens the MessagePopover
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.toggle = function (oControl) {
if (this.isOpen()) {
this.close();
} else {
this.openBy(oControl);
}
return this;
};
/**
* The method sets the placement position of the MessagePopover. Only accepted Values are:
* sap.m.PlacementType.Top, sap.m.PlacementType.Bottom and sap.m.PlacementType.Vertical
*
* @param {sap.m.PlacementType} sPlacement Placement type
* @returns {sap.m.MessagePopover} Reference to the 'this' for chaining purposes
* @public
*/
MessagePopover.prototype.setPlacement = function (sPlacement) {
this.setProperty("placement", sPlacement, true);
this._oPopover.setPlacement(sPlacement);
return this;
};
MessagePopover.prototype.getDomRef = function (sSuffix) {
return this._oPopover && this._oPopover.getAggregation("_popup").getDomRef(sSuffix);
};
/*
* =========================================
* Internal methods
* =========================================
*/
/**
* Creates new internal MessageView control
*
* @returns {sap.m.MessageView} The newly instantiated message view control
* @private
*/
MessagePopover.prototype._initMessageView = function () {
var that = this,
oMessageView;
oMessageView = new MessageView(this.getId() + "-messageView", {
activeTitlePress: function (oEvent) {
that.fireActiveTitlePress({ item: oEvent.getParameter("item")});
},
listSelect: function(oEvent) {
that.fireListSelect({messageTypeFilter: oEvent.getParameter('messageTypeFilter')});
},
itemSelect: function(oEvent) {
that.fireItemSelect({
messageTypeFilter: oEvent.getParameter('messageTypeFilter'),
item: oEvent.getParameter('item')
});
},
longtextLoaded: function() {
that.fireLongtextLoaded();
},
urlValidated: function() {
that.fireUrlValidated();
}
});
return oMessageView;
};
MessagePopover.prototype._onSegButtonSelect = function () {
// expanding the message popover if it is still collapsed
if (this.isOpen() && !this.getInitiallyExpanded() && this._oPopover.hasStyleClass(CSS_CLASS + "-init")) {
this._expandMsgPopover();
}
};
/**
* Restores the state defined by the initiallyExpanded property of the MessagePopover
* @private
*/
MessagePopover.prototype._restoreExpansionDefaults = function () {
if (!this.getInitiallyExpanded()) {
this._collapseMsgPopover();
this._oMessageView._oSegmentedButton.setSelectedButton("none");
} else {
this._expandMsgPopover();
}
};
/**
* Expands the MessagePopover so that the width and height are with their default values
* @private
*/
MessagePopover.prototype._expandMsgPopover = function () {
var sDomHeight,
sHeight = DEFAULT_CONTENT_HEIGHT,
sDomHeight = this._oPopover.$("cont").css("height");
if (this.getInitiallyExpanded() && sDomHeight !== "0px") {
sHeight = parseFloat(sDomHeight) ? sDomHeight : sHeight;
}
this._oPopover
.setContentHeight(sHeight)
.removeStyleClass(CSS_CLASS + "-init");
};
/**
* Sets the height of the MessagePopover to auto so that only the header with
* the SegmentedButton is visible
* @private
*/
MessagePopover.prototype._collapseMsgPopover = function () {
this._oPopover
.addStyleClass(CSS_CLASS + "-init")
.setContentHeight("auto");
};
/**
* Inserts close button in the in the provided location
*
* @param {sap.ui.core.Control} oInsertCloseBtnHere The object in which we want to insert the control
* @private
*/
MessagePopover.prototype._insertCloseBtn = function (oInsertCloseBtnHere) {
var sCloseBtnDescr = this._oResourceBundle.getText("MESSAGEPOPOVER_CLOSE"),
oCloseBtn = new Button({
icon: ICONS["close"],
visible: !Device.system.phone,
tooltip: sCloseBtnDescr,
press: this.close.bind(this)
}).addStyleClass(CSS_CLASS + "CloseBtn");
oInsertCloseBtnHere.insertContent(oCloseBtn, 3, true);
};
/**
* Sets initial focus of the control
*
* @private
*/
MessagePopover.prototype._setInitialFocus = function () {
if (this._oMessageView._isListPage() && this.getInitiallyExpanded()) {
// if the controls state is "InitiallyExpanded: true" and
// if current page is the list page - set initial focus to the list.
// otherwise use default functionality built-in the popover
this._oPopover.setInitialFocus(this._oMessageView._oLists[this._sCurrentList || 'all']);
}
};
/*
* =========================================
* MessagePopover async handlers
* proxy methods
* =========================================
*/
MessagePopover.prototype.setAsyncDescriptionHandler = function (asyncDescriptionHandler) {
// MessagePopover is just a proxy to the MessageView
this.setProperty('asyncDescriptionHandler', asyncDescriptionHandler, true);
this._oMessageView.setProperty('asyncDescriptionHandler', asyncDescriptionHandler, true);
return this;
};
MessagePopover.prototype.setAsyncURLHandler = function (asyncURLHandler) {
// MessagePopover is just a proxy to the MessageView
this.setProperty('asyncURLHandler', asyncURLHandler, true);
this._oMessageView.setProperty('asyncURLHandler', asyncURLHandler, true);
return this;
};
MessagePopover.prototype.setModel = function(oModel, sName) {
/* When a model is set to the MessagePopover it is propagated to all its aggregation
Unfortunately the MessageView is not an aggregation of the MessagePopover (due to some rendering issues)
Furthermore the MessageView is actually child of a ResponsivePopover
Therefore once the developer set a model to the MessagePopover we need to forward it to the internal MessageView */
this._oMessageView.setModel(oModel, sName);
return Control.prototype.setModel.apply(this, arguments);
};
["invalidate", "addStyleClass", "removeStyleClass", "toggleStyleClass", "hasStyleClass", "getBusyIndicatorDelay",
"setBusyIndicatorDelay", "getVisible", "setVisible", "getBusy", "setBusy"].forEach(function(sName){
MessagePopover.prototype[sName] = function() {
if (this._oPopover && this._oPopover[sName]) {
var oPopover = this._oPopover;
var res = oPopover[sName].apply(oPopover, arguments);
return res === oPopover ? this : res;
}
};
});
// The following inherited methods of this control are extended because this control uses ResponsivePopover for rendering
["setModel", "bindAggregation", "setAggregation", "insertAggregation", "addAggregation",
"removeAggregation", "removeAllAggregation", "destroyAggregation"].forEach(function (sFuncName) {
// First, they are saved for later reference
MessagePopover.prototype["_" + sFuncName + "Old"] = MessagePopover.prototype[sFuncName];
// Once they are called
MessagePopover.prototype[sFuncName] = function () {
// We immediately call the saved method first
var result = MessagePopover.prototype["_" + sFuncName + "Old"].apply(this, arguments);
// Then there is additional logic
// Mark items aggregation as changed and invalidate popover to trigger rendering
// See 'MessagePopover.prototype.onBeforeRenderingPopover'
this._bItemsChanged = true;
// If Popover dependency has already been instantiated ...
if (this._oPopover) {
// ... invalidate it
this._oPopover.invalidate();
}
// If the called method is 'removeAggregation' or 'removeAllAggregation' ...
if (["removeAggregation", "removeAllAggregation"].indexOf(sFuncName) !== -1) {
// ... return the result of the operation
return result;
}
return this;
};
});
return MessagePopover;
});
|
[INTERNAL] sap.m.MessagePopover: Documentation update
The documentation update is a result of users` feedback in DemoKit.
JIRA: BGSOFUIRILA-2152
Change-Id: If6f267f08703901b8f48e1f10ea054345844f551
|
src/sap.m/src/sap/m/MessagePopover.js
|
[INTERNAL] sap.m.MessagePopover: Documentation update
|
<ide><path>rc/sap.m/src/sap/m/MessagePopover.js
<ide> * the total number of messages should be incremented, but the user's work shouldn't be interrupted.
<ide> * Navigation between the message item and the source of the error can be created, if needed by the application.
<ide> * This can be done by setting the <code>activeTitle</code> property to true and providing a handler for the <code>activeTitlePress</code> event.
<add> * In addition, you can achieve the same functionality inside a different container using the sap.m.MessageView control.
<ide> * <h3>Responsive Behavior</h3>
<ide> * On mobile phones, the message popover is automatically shown in full screen mode.<br>
<ide> * On desktop and tablet, the message popover opens in a popover.<br>
|
|
Java
|
artistic-2.0
|
2a986602a6b14cea8164a56e96d46e1d58f78453
| 0 |
jdownloader-mirror/appwork-utils
|
package org.appwork.utils.net.httpconnection;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PushbackInputStream;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.zip.GZIPInputStream;
import org.appwork.utils.LowerCaseHashMap;
import org.appwork.utils.Regex;
import org.appwork.utils.net.Base64InputStream;
import org.appwork.utils.net.ChunkedInputStream;
import org.appwork.utils.net.CountingOutputStream;
public class HTTPConnectionImpl implements HTTPConnection {
protected LinkedHashMap<String, String> requestProperties = null;
protected long[] ranges;
protected String customcharset = null;
protected Socket httpSocket = null;
protected URL httpURL = null;
protected HTTPProxy proxy = null;
protected String httpPath = null;
protected RequestMethod httpMethod = RequestMethod.GET;
protected LowerCaseHashMap<List<String>> headers = null;
protected int httpResponseCode = -1;
protected String httpResponseMessage = "";
protected int readTimeout = 30000;
protected int connectTimeout = 30000;
protected long requestTime = -1;
protected OutputStream outputStream = null;
protected InputStream inputStream = null;
protected InputStream convertedInputStream = null;
protected boolean inputStreamConnected = false;
protected String httpHeader = null;
protected boolean outputClosed = false;
private boolean contentDecoded = true;
protected long postTodoLength = -1;
private int[] allowedResponseCodes = new int[0];
private InetSocketAddress proxyInetSocketAddress = null;
protected InetSocketAddress connectedInetSocketAddress = null;
public HTTPConnectionImpl(final URL url) {
this(url, null);
}
public HTTPConnectionImpl(final URL url, final HTTPProxy p) {
this.httpURL = url;
this.proxy = p;
this.requestProperties = new LinkedHashMap<String, String>();
this.headers = new LowerCaseHashMap<List<String>>();
}
/* this will add Host header at the beginning */
protected void addHostHeader() {
final int defaultPort = this.httpURL.getDefaultPort();
final int usedPort = this.httpURL.getPort();
String port = "";
if (usedPort != -1 && defaultPort != -1 && usedPort != defaultPort) {
port = ":" + usedPort;
}
this.requestProperties.put("Host", this.httpURL.getHost() + port);
}
public void connect() throws IOException {
if (this.isConnected()) { return;/* oder fehler */
}
InetAddress hosts[] = null;
try {
/* resolv all possible ip's */
hosts = InetAddress.getAllByName(this.httpURL.getHost());
} catch (final UnknownHostException e) {
throw e;
}
/* try all different ip's until one is valid and connectable */
IOException ee = null;
for (final InetAddress host : hosts) {
if (this.httpURL.getProtocol().startsWith("https")) {
/* https */
this.httpSocket = TrustALLSSLFactory.getSSLFactoryTrustALL().createSocket();
} else {
/* http */
this.httpSocket = new Socket();
}
this.httpSocket.setSoTimeout(this.readTimeout);
this.httpResponseCode = -1;
int port = this.httpURL.getPort();
if (port == -1) {
port = this.httpURL.getDefaultPort();
}
final long startTime = System.currentTimeMillis();
if (this.proxy != null && this.proxy.isDirect()) {
/* bind socket to given interface */
try {
if (this.proxy.getLocalIP() == null) { throw new IOException("Invalid localIP"); }
this.httpSocket.bind(this.proxyInetSocketAddress = new InetSocketAddress(this.proxy.getLocalIP(), 0));
} catch (final IOException e) {
this.proxyInetSocketAddress = null;
throw new ProxyConnectException(e, this.proxy);
}
} else if (this.proxy != null && this.proxy.isNone()) {
/* none is also allowed here */
} else if (this.proxy != null) { throw new RuntimeException("Invalid Direct Proxy"); }
try {
/* try to connect to given host now */
this.httpSocket.connect(this.connectedInetSocketAddress = new InetSocketAddress(host, port), this.connectTimeout);
this.requestTime = System.currentTimeMillis() - startTime;
ee = null;
break;
} catch (final IOException e) {
this.connectedInetSocketAddress = null;
try {
this.httpSocket.close();
} catch (final Throwable nothing) {
}
ee = e;
}
}
if (ee != null) { throw ee; }
this.httpPath = new org.appwork.utils.Regex(this.httpURL.toString(), "https?://.*?(/.+)").getMatch(0);
if (this.httpPath == null) {
this.httpPath = "/";
}
/* now send Request */
this.sendRequest();
}
protected synchronized void connectInputStream() throws IOException {
if (this.httpMethod == RequestMethod.POST) {
final long done = ((CountingOutputStream) this.outputStream).transferedBytes();
if (done != this.postTodoLength) { throw new IOException("Content-Length" + this.postTodoLength + " does not match send " + done + " bytes"); }
}
if (this.inputStreamConnected) { return; }
if (this.httpMethod == RequestMethod.POST) {
/* flush outputstream in case some buffers are not flushed yet */
this.outputStream.flush();
}
this.inputStreamConnected = true;
/* first read http header */
ByteBuffer header = HTTPConnectionUtils.readheader(this.httpSocket.getInputStream(), true);
byte[] bytes = new byte[header.limit()];
header.get(bytes);
this.httpHeader = new String(bytes, "ISO-8859-1").trim();
/* parse response code/message */
if (this.httpHeader.startsWith("HTTP")) {
final String code = new Regex(this.httpHeader, "HTTP.*? (\\d+)").getMatch(0);
if (code != null) {
this.httpResponseCode = Integer.parseInt(code);
}
this.httpResponseMessage = new Regex(this.httpHeader, "HTTP.*? \\d+ (.+)").getMatch(0);
if (this.httpResponseMessage == null) {
this.httpResponseMessage = "";
}
} else {
this.httpHeader = "unknown HTTP response";
this.httpResponseCode = 200;
this.httpResponseMessage = "unknown HTTP response";
if (bytes.length > 0) {
this.inputStream = new PushbackInputStream(this.httpSocket.getInputStream(), bytes.length);
/*
* push back the data that got read because no http header
* exists
*/
((PushbackInputStream) this.inputStream).unread(bytes);
} else {
/* nothing to push back */
this.inputStream = this.httpSocket.getInputStream();
}
return;
}
/* read rest of http headers */
header = HTTPConnectionUtils.readheader(this.httpSocket.getInputStream(), false);
bytes = new byte[header.limit()];
header.get(bytes);
String temp = new String(bytes, "UTF-8");
/* split header into single strings, use RN or N(buggy fucking non rfc) */
String[] headerStrings = temp.split("(\r\n)|(\n)");
temp = null;
for (final String line : headerStrings) {
String key = null;
String value = null;
int index = 0;
if ((index = line.indexOf(": ")) > 0) {
key = line.substring(0, index);
value = line.substring(index + 2);
} else if ((index = line.indexOf(":")) > 0) {
/* buggy servers that don't have :space ARG */
key = line.substring(0, index);
value = line.substring(index + 1);
} else {
key = null;
value = line;
}
List<String> list = this.headers.get(key);
if (list == null) {
list = new ArrayList<String>();
this.headers.put(key, list);
}
list.add(value);
}
headerStrings = null;
final List<String> chunked = this.headers.get("Transfer-Encoding");
if (chunked != null && chunked.size() > 0 && "chunked".equalsIgnoreCase(chunked.get(0))) {
this.inputStream = new ChunkedInputStream(this.httpSocket.getInputStream());
} else {
this.inputStream = this.httpSocket.getInputStream();
}
}
public void disconnect() {
if (this.isConnected()) {
try {
this.httpSocket.close();
} catch (final Throwable e) {
}
}
}
/*
* (non-Javadoc)
*
* @see
* org.appwork.utils.net.httpconnection.HTTPConnection#finalizeConnect()
*/
@Override
public void finalizeConnect() throws IOException {
this.connect();
this.connectInputStream();
}
@Override
public int[] getAllowedResponseCodes() {
return this.allowedResponseCodes;
}
public String getCharset() {
int i;
if (this.customcharset != null) { return this.customcharset; }
return this.getContentType() != null && (i = this.getContentType().toLowerCase().indexOf("charset=")) > 0 ? this.getContentType().substring(i + 8).trim() : null;
}
@Override
public long getCompleteContentLength() {
this.getRange();
if (this.ranges != null) { return this.ranges[2]; }
return this.getContentLength();
}
public long getContentLength() {
final String length = this.getHeaderField("Content-Length");
if (length != null) { return Long.parseLong(length); }
return -1;
}
public String getContentType() {
final String type = this.getHeaderField("Content-Type");
if (type == null) { return "unknown"; }
return type;
}
public String getHeaderField(final String string) {
final List<String> ret = this.headers.get(string);
if (ret == null || ret.size() == 0) { return null; }
return ret.get(0);
}
public Map<String, List<String>> getHeaderFields() {
return this.headers;
}
public List<String> getHeaderFields(final String string) {
final List<String> ret = this.headers.get(string);
if (ret == null || ret.size() == 0) { return null; }
return ret;
}
public InputStream getInputStream() throws IOException {
this.connect();
this.connectInputStream();
final int code = this.getResponseCode();
if (code >= 200 && code <= 400 || code == 404 || code == 403 || code == 416 || this.isResponseCodeAllowed(code)) {
if (this.convertedInputStream != null) { return this.convertedInputStream; }
if (this.contentDecoded) {
final String encodingTransfer = this.getHeaderField("Content-Transfer-Encoding");
if ("base64".equalsIgnoreCase(encodingTransfer)) {
/* base64 encoded content */
this.inputStream = new Base64InputStream(this.inputStream);
}
/* we convert different content-encodings to normal inputstream */
final String encoding = this.getHeaderField("Content-Encoding");
if (encoding == null || encoding.length() == 0 || "none".equalsIgnoreCase(encoding)) {
/* no encoding */
this.convertedInputStream = this.inputStream;
} else if ("gzip".equalsIgnoreCase(encoding)) {
/* gzip encoding */
this.convertedInputStream = new GZIPInputStream(this.inputStream);
} else if ("deflate".equalsIgnoreCase(encoding)) {
/* deflate encoding */
this.convertedInputStream = new java.util.zip.DeflaterInputStream(this.inputStream);
} else {
/* unsupported */
throw new UnsupportedOperationException("Encoding " + encoding + " not supported!");
}
} else {
/* use original inputstream */
this.convertedInputStream = this.inputStream;
}
return this.convertedInputStream;
} else {
throw new IOException(this.getResponseCode() + " " + this.getResponseMessage());
}
}
public OutputStream getOutputStream() throws IOException {
this.connect();
if (this.outputClosed) { throw new IOException("OutputStream no longer available"); }
return this.outputStream;
}
public long[] getRange() {
String range;
if (this.ranges != null) { return this.ranges; }
if ((range = this.getHeaderField("Content-Range")) == null) { return null; }
// bytes 174239-735270911/735270912
final String[] ranges = new Regex(range, ".*?(\\d+).*?-.*?(\\d+).*?/.*?(\\d+)").getRow(0);
if (ranges == null) {
System.err.print(this + "");
return null;
}
this.ranges = new long[] { Long.parseLong(ranges[0]), Long.parseLong(ranges[1]), Long.parseLong(ranges[2]) };
return this.ranges;
}
protected String getRequestInfo() {
final StringBuilder sb = new StringBuilder();
sb.append("-->Host:").append(this.getURL().getHost()).append("\r\n");
if (this.connectedInetSocketAddress != null && this.connectedInetSocketAddress.getAddress() != null) {
sb.append("-->HostIP:").append(this.connectedInetSocketAddress.getAddress().getHostAddress()).append("\r\n");
}
if (this.proxyInetSocketAddress != null && this.proxyInetSocketAddress.getAddress() != null) {
sb.append("-->LocalIP:").append(this.proxyInetSocketAddress.getAddress().getHostAddress()).append("\r\n");
}
sb.append("----------------Request-------------------------\r\n");
sb.append(this.httpMethod.toString()).append(' ').append(this.httpPath).append(" HTTP/1.1\r\n");
for (final String key : this.getRequestProperties().keySet()) {
final String v = this.getRequestProperties().get(key);
if (v == null) {
continue;
}
sb.append(key);
sb.append(": ");
sb.append(v);
sb.append("\r\n");
}
return sb.toString();
}
public RequestMethod getRequestMethod() {
return this.httpMethod;
}
public Map<String, String> getRequestProperties() {
return this.requestProperties;
}
public String getRequestProperty(final String string) {
return this.requestProperties.get(string);
}
public long getRequestTime() {
return this.requestTime;
}
public int getResponseCode() {
return this.httpResponseCode;
}
protected String getResponseInfo() {
final StringBuilder sb = new StringBuilder();
sb.append("----------------Response------------------------\r\n");
try {
if (this.isConnected()) {
this.connectInputStream();
sb.append(this.httpHeader).append("\r\n");
for (final Entry<String, List<String>> next : this.getHeaderFields().entrySet()) {
// Achtung cookie reihenfolge ist wichtig!!!
for (int i = next.getValue().size() - 1; i >= 0; i--) {
if (next.getKey() == null) {
sb.append(next.getValue().get(i));
sb.append("\r\n");
} else {
sb.append(next.getKey());
sb.append(": ");
sb.append(next.getValue().get(i));
sb.append("\r\n");
}
}
}
sb.append("------------------------------------------------\r\n");
} else {
sb.append("-------------not connected yet------------------");
}
} catch (final IOException nothing) {
sb.append("----------no InputStream available--------------");
}
sb.append("\r\n");
return sb.toString();
}
public String getResponseMessage() {
return this.httpResponseMessage;
}
public URL getURL() {
return this.httpURL;
}
public boolean isConnected() {
if (this.httpSocket != null && this.httpSocket.isConnected()) { return true; }
return false;
}
@Override
public boolean isContentDecoded() {
return this.contentDecoded;
}
public boolean isContentDisposition() {
return this.getHeaderField("Content-Disposition") != null;
}
public boolean isOK() {
if (this.getResponseCode() > -2 && this.getResponseCode() < 400) { return true; }
return false;
}
protected boolean isResponseCodeAllowed(final int code) {
for (final int c : this.allowedResponseCodes) {
if (c == code) { return true; }
}
return false;
}
protected void sendRequest() throws UnsupportedEncodingException, IOException {
/* now send Request */
final StringBuilder sb = new StringBuilder();
sb.append(this.httpMethod.name()).append(' ').append(this.httpPath).append(" HTTP/1.1\r\n");
boolean hostSet = false;
/* check if host entry does exist */
for (final String key : this.requestProperties.keySet()) {
if ("Host".equalsIgnoreCase(key)) {
hostSet = true;
break;
}
}
if (hostSet == false) {
/* host entry does not exist, lets add it as first entry */
this.addHostHeader();
}
for (final String key : this.requestProperties.keySet()) {
if (this.requestProperties.get(key) == null) {
continue;
}
if ("Content-Length".equalsIgnoreCase(key)) {
/* content length to check if we send out all data */
this.postTodoLength = Long.parseLong(this.requestProperties.get(key));
}
sb.append(key).append(": ").append(this.requestProperties.get(key)).append("\r\n");
}
sb.append("\r\n");
this.httpSocket.getOutputStream().write(sb.toString().getBytes("UTF-8"));
this.httpSocket.getOutputStream().flush();
if (this.httpMethod != RequestMethod.POST) {
this.outputStream = this.httpSocket.getOutputStream();
this.outputClosed = true;
this.connectInputStream();
} else {
this.outputStream = new CountingOutputStream(this.httpSocket.getOutputStream());
}
}
@Override
public void setAllowedResponseCodes(final int[] codes) {
if (codes == null) { throw new IllegalArgumentException("codes==null"); }
this.allowedResponseCodes = codes;
}
public void setCharset(final String Charset) {
this.customcharset = Charset;
}
public void setConnectTimeout(final int connectTimeout) {
this.connectTimeout = connectTimeout;
}
@Override
public void setContentDecoded(final boolean b) {
if (this.convertedInputStream != null) { throw new IllegalStateException("InputStream already in use!"); }
this.contentDecoded = b;
}
public void setReadTimeout(final int readTimeout) {
try {
if (this.isConnected()) {
this.httpSocket.setSoTimeout(readTimeout);
}
this.readTimeout = readTimeout;
} catch (final Throwable e) {
e.printStackTrace();
}
}
public void setRequestMethod(final RequestMethod method) {
this.httpMethod = method;
}
public void setRequestProperty(final String key, final String value) {
this.requestProperties.put(key, value);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(this.getRequestInfo());
sb.append(this.getResponseInfo());
return sb.toString();
}
}
|
src/org/appwork/utils/net/httpconnection/HTTPConnectionImpl.java
|
package org.appwork.utils.net.httpconnection;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PushbackInputStream;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.zip.GZIPInputStream;
import org.appwork.utils.LowerCaseHashMap;
import org.appwork.utils.Regex;
import org.appwork.utils.net.Base64InputStream;
import org.appwork.utils.net.ChunkedInputStream;
import org.appwork.utils.net.CountingOutputStream;
public class HTTPConnectionImpl implements HTTPConnection {
protected LinkedHashMap<String, String> requestProperties = null;
protected long[] ranges;
protected String customcharset = null;
protected Socket httpSocket = null;
protected URL httpURL = null;
protected HTTPProxy proxy = null;
protected String httpPath = null;
protected RequestMethod httpMethod = RequestMethod.GET;
protected LowerCaseHashMap<List<String>> headers = null;
protected int httpResponseCode = -1;
protected String httpResponseMessage = "";
protected int readTimeout = 30000;
protected int connectTimeout = 30000;
protected long requestTime = -1;
protected OutputStream outputStream = null;
protected InputStream inputStream = null;
protected InputStream convertedInputStream = null;
protected boolean inputStreamConnected = false;
protected String httpHeader = null;
protected boolean outputClosed = false;
private boolean contentDecoded = true;
protected long postTodoLength = -1;
private int[] allowedResponseCodes = new int[0];
private InetSocketAddress proxyInetSocketAddress = null;
protected InetSocketAddress connectedInetSocketAddress = null;
public HTTPConnectionImpl(final URL url) {
this(url, null);
}
public HTTPConnectionImpl(final URL url, final HTTPProxy p) {
this.httpURL = url;
this.proxy = p;
this.requestProperties = new LinkedHashMap<String, String>();
this.headers = new LowerCaseHashMap<List<String>>();
}
/* this will add Host header at the beginning */
protected void addHostHeader() {
final int defaultPort = this.httpURL.getDefaultPort();
final int usedPort = this.httpURL.getPort();
String port = "";
if (usedPort != -1 && defaultPort != -1 && usedPort != defaultPort) {
port = ":" + usedPort;
}
this.requestProperties.put("Host", this.httpURL.getHost() + port);
}
public void connect() throws IOException {
if (this.isConnected()) { return;/* oder fehler */
}
InetAddress hosts[] = null;
try {
/* resolv all possible ip's */
hosts = InetAddress.getAllByName(this.httpURL.getHost());
} catch (final UnknownHostException e) {
throw e;
}
/* try all different ip's until one is valid and connectable */
IOException ee = null;
for (final InetAddress host : hosts) {
if (this.httpURL.getProtocol().startsWith("https")) {
/* https */
this.httpSocket = TrustALLSSLFactory.getSSLFactoryTrustALL().createSocket();
} else {
/* http */
this.httpSocket = new Socket();
}
this.httpSocket.setSoTimeout(this.readTimeout);
this.httpResponseCode = -1;
int port = this.httpURL.getPort();
if (port == -1) {
port = this.httpURL.getDefaultPort();
}
final long startTime = System.currentTimeMillis();
if (this.proxy != null && this.proxy.isDirect()) {
/* bind socket to given interface */
try {
if (this.proxy.getLocalIP() == null) { throw new IOException("Invalid localIP"); }
this.httpSocket.bind(this.proxyInetSocketAddress = new InetSocketAddress(this.proxy.getLocalIP(), 0));
} catch (final IOException e) {
this.proxyInetSocketAddress = null;
throw new ProxyConnectException(e, this.proxy);
}
} else if (this.proxy != null && this.proxy.isNone()) {
/* none is also allowed here */
} else if (this.proxy != null) { throw new RuntimeException("Invalid Direct Proxy"); }
try {
/* try to connect to given host now */
this.httpSocket.connect(this.connectedInetSocketAddress = new InetSocketAddress(host, port), this.connectTimeout);
this.requestTime = System.currentTimeMillis() - startTime;
ee = null;
break;
} catch (final IOException e) {
this.connectedInetSocketAddress = null;
try {
this.httpSocket.close();
} catch (final Throwable nothing) {
}
ee = e;
}
}
if (ee != null) { throw ee; }
this.httpPath = new org.appwork.utils.Regex(this.httpURL.toString(), "https?://.*?(/.+)").getMatch(0);
if (this.httpPath == null) {
this.httpPath = "/";
}
/* now send Request */
this.sendRequest();
}
protected synchronized void connectInputStream() throws IOException {
if (this.httpMethod == RequestMethod.POST) {
final long done = ((CountingOutputStream) this.outputStream).transferedBytes();
if (done != this.postTodoLength) { throw new IOException("Content-Length" + this.postTodoLength + " does not match send " + done + " bytes"); }
}
if (this.inputStreamConnected) { return; }
if (this.httpMethod == RequestMethod.POST) {
/* flush outputstream in case some buffers are not flushed yet */
this.outputStream.flush();
}
this.inputStreamConnected = true;
/* first read http header */
ByteBuffer header = HTTPConnectionUtils.readheader(this.httpSocket.getInputStream(), true);
byte[] bytes = new byte[header.limit()];
header.get(bytes);
this.httpHeader = new String(bytes, "ISO-8859-1").trim();
/* parse response code/message */
if (this.httpHeader.startsWith("HTTP")) {
final String code = new Regex(this.httpHeader, "HTTP.*? (\\d+)").getMatch(0);
if (code != null) {
this.httpResponseCode = Integer.parseInt(code);
}
this.httpResponseMessage = new Regex(this.httpHeader, "HTTP.*? \\d+ (.+)").getMatch(0);
if (this.httpResponseMessage == null) {
this.httpResponseMessage = "";
}
} else {
this.httpHeader = "unknown HTTP response";
this.httpResponseCode = 200;
this.httpResponseMessage = "unknown HTTP response";
if (bytes.length > 0) {
this.inputStream = new PushbackInputStream(this.httpSocket.getInputStream(), bytes.length);
/*
* push back the data that got read because no http header
* exists
*/
((PushbackInputStream) this.inputStream).unread(bytes);
} else {
/* nothing to push back */
this.inputStream = this.httpSocket.getInputStream();
}
return;
}
/* read rest of http headers */
header = HTTPConnectionUtils.readheader(this.httpSocket.getInputStream(), false);
bytes = new byte[header.limit()];
header.get(bytes);
String temp = new String(bytes, "UTF-8");
/* split header into single strings, use RN or N(buggy fucking non rfc) */
String[] headerStrings = temp.split("(\r\n)|(\n)");
temp = null;
for (final String line : headerStrings) {
String key = null;
String value = null;
int index = 0;
if ((index = line.indexOf(": ")) > 0) {
key = line.substring(0, index);
value = line.substring(index + 2);
} else if ((index = line.indexOf(":")) > 0) {
/* buggy servers that don't have :space ARG */
key = line.substring(0, index);
value = line.substring(index + 1);
} else {
key = null;
value = line;
}
List<String> list = this.headers.get(key);
if (list == null) {
list = new ArrayList<String>();
this.headers.put(key, list);
}
list.add(value);
}
headerStrings = null;
final List<String> chunked = this.headers.get("Transfer-Encoding");
if (chunked != null && chunked.size() > 0 && "chunked".equalsIgnoreCase(chunked.get(0))) {
this.inputStream = new ChunkedInputStream(this.httpSocket.getInputStream());
} else {
this.inputStream = this.httpSocket.getInputStream();
}
}
public void disconnect() {
if (this.isConnected()) {
try {
this.httpSocket.close();
} catch (final Throwable e) {
}
}
}
/*
* (non-Javadoc)
*
* @see
* org.appwork.utils.net.httpconnection.HTTPConnection#finalizeConnect()
*/
@Override
public void finalizeConnect() throws IOException {
this.connect();
this.connectInputStream();
}
@Override
public int[] getAllowedResponseCodes() {
return this.allowedResponseCodes;
}
public String getCharset() {
int i;
if (this.customcharset != null) { return this.customcharset; }
return this.getContentType() != null && (i = this.getContentType().toLowerCase().indexOf("charset=")) > 0 ? this.getContentType().substring(i + 8).trim() : null;
}
@Override
public long getCompleteContentLength() {
this.getRange();
if (this.ranges != null) { return this.ranges[2]; }
return this.getContentLength();
}
public long getContentLength() {
final String length = this.getHeaderField("Content-Length");
if (length != null) { return Long.parseLong(length); }
return -1;
}
public String getContentType() {
final String type = this.getHeaderField("Content-Type");
if (type == null) { return "unknown"; }
return type;
}
public String getHeaderField(final String string) {
final List<String> ret = this.headers.get(string);
if (ret == null || ret.size() == 0) { return null; }
return ret.get(0);
}
public Map<String, List<String>> getHeaderFields() {
return this.headers;
}
public List<String> getHeaderFields(final String string) {
final List<String> ret = this.headers.get(string);
if (ret == null || ret.size() == 0) { return null; }
return ret;
}
public InputStream getInputStream() throws IOException {
this.connect();
this.connectInputStream();
final int code = this.getResponseCode();
if (code >= 200 && code <= 400 || code == 404 || code == 403 || this.isResponseCodeAllowed(code)) {
if (this.convertedInputStream != null) { return this.convertedInputStream; }
if (this.contentDecoded) {
final String encodingTransfer = this.getHeaderField("Content-Transfer-Encoding");
if ("base64".equalsIgnoreCase(encodingTransfer)) {
/* base64 encoded content */
this.inputStream = new Base64InputStream(this.inputStream);
}
/* we convert different content-encodings to normal inputstream */
final String encoding = this.getHeaderField("Content-Encoding");
if (encoding == null || encoding.length() == 0 || "none".equalsIgnoreCase(encoding)) {
/* no encoding */
this.convertedInputStream = this.inputStream;
} else if ("gzip".equalsIgnoreCase(encoding)) {
/* gzip encoding */
this.convertedInputStream = new GZIPInputStream(this.inputStream);
} else if ("deflate".equalsIgnoreCase(encoding)) {
/* deflate encoding */
this.convertedInputStream = new java.util.zip.DeflaterInputStream(this.inputStream);
} else {
/* unsupported */
throw new UnsupportedOperationException("Encoding " + encoding + " not supported!");
}
} else {
/* use original inputstream */
this.convertedInputStream = this.inputStream;
}
return this.convertedInputStream;
} else {
throw new IOException(this.getResponseCode() + " " + this.getResponseMessage());
}
}
public OutputStream getOutputStream() throws IOException {
this.connect();
if (this.outputClosed) { throw new IOException("OutputStream no longer available"); }
return this.outputStream;
}
public long[] getRange() {
String range;
if (this.ranges != null) { return this.ranges; }
if ((range = this.getHeaderField("Content-Range")) == null) { return null; }
// bytes 174239-735270911/735270912
final String[] ranges = new Regex(range, ".*?(\\d+).*?-.*?(\\d+).*?/.*?(\\d+)").getRow(0);
if (ranges == null) {
System.err.print(this + "");
return null;
}
this.ranges = new long[] { Long.parseLong(ranges[0]), Long.parseLong(ranges[1]), Long.parseLong(ranges[2]) };
return this.ranges;
}
protected String getRequestInfo() {
final StringBuilder sb = new StringBuilder();
sb.append("-->Host:").append(this.getURL().getHost()).append("\r\n");
if (this.connectedInetSocketAddress != null && this.connectedInetSocketAddress.getAddress() != null) {
sb.append("-->HostIP:").append(this.connectedInetSocketAddress.getAddress().getHostAddress()).append("\r\n");
}
if (this.proxyInetSocketAddress != null && this.proxyInetSocketAddress.getAddress() != null) {
sb.append("-->LocalIP:").append(this.proxyInetSocketAddress.getAddress().getHostAddress()).append("\r\n");
}
sb.append("----------------Request-------------------------\r\n");
sb.append(this.httpMethod.toString()).append(' ').append(this.httpPath).append(" HTTP/1.1\r\n");
for (final String key : this.getRequestProperties().keySet()) {
final String v = this.getRequestProperties().get(key);
if (v == null) {
continue;
}
sb.append(key);
sb.append(": ");
sb.append(v);
sb.append("\r\n");
}
return sb.toString();
}
public RequestMethod getRequestMethod() {
return this.httpMethod;
}
public Map<String, String> getRequestProperties() {
return this.requestProperties;
}
public String getRequestProperty(final String string) {
return this.requestProperties.get(string);
}
public long getRequestTime() {
return this.requestTime;
}
public int getResponseCode() {
return this.httpResponseCode;
}
protected String getResponseInfo() {
final StringBuilder sb = new StringBuilder();
sb.append("----------------Response------------------------\r\n");
try {
if (this.isConnected()) {
this.connectInputStream();
sb.append(this.httpHeader).append("\r\n");
for (final Entry<String, List<String>> next : this.getHeaderFields().entrySet()) {
// Achtung cookie reihenfolge ist wichtig!!!
for (int i = next.getValue().size() - 1; i >= 0; i--) {
if (next.getKey() == null) {
sb.append(next.getValue().get(i));
sb.append("\r\n");
} else {
sb.append(next.getKey());
sb.append(": ");
sb.append(next.getValue().get(i));
sb.append("\r\n");
}
}
}
sb.append("------------------------------------------------\r\n");
} else {
sb.append("-------------not connected yet------------------");
}
} catch (final IOException nothing) {
sb.append("----------no InputStream available--------------");
}
sb.append("\r\n");
return sb.toString();
}
public String getResponseMessage() {
return this.httpResponseMessage;
}
public URL getURL() {
return this.httpURL;
}
public boolean isConnected() {
if (this.httpSocket != null && this.httpSocket.isConnected()) { return true; }
return false;
}
@Override
public boolean isContentDecoded() {
return this.contentDecoded;
}
public boolean isContentDisposition() {
return this.getHeaderField("Content-Disposition") != null;
}
public boolean isOK() {
if (this.getResponseCode() > -2 && this.getResponseCode() < 400) { return true; }
return false;
}
protected boolean isResponseCodeAllowed(final int code) {
for (final int c : this.allowedResponseCodes) {
if (c == code) { return true; }
}
return false;
}
protected void sendRequest() throws UnsupportedEncodingException, IOException {
/* now send Request */
final StringBuilder sb = new StringBuilder();
sb.append(this.httpMethod.name()).append(' ').append(this.httpPath).append(" HTTP/1.1\r\n");
boolean hostSet = false;
/* check if host entry does exist */
for (final String key : this.requestProperties.keySet()) {
if ("Host".equalsIgnoreCase(key)) {
hostSet = true;
break;
}
}
if (hostSet == false) {
/* host entry does not exist, lets add it as first entry */
this.addHostHeader();
}
for (final String key : this.requestProperties.keySet()) {
if (this.requestProperties.get(key) == null) {
continue;
}
if ("Content-Length".equalsIgnoreCase(key)) {
/* content length to check if we send out all data */
this.postTodoLength = Long.parseLong(this.requestProperties.get(key));
}
sb.append(key).append(": ").append(this.requestProperties.get(key)).append("\r\n");
}
sb.append("\r\n");
this.httpSocket.getOutputStream().write(sb.toString().getBytes("UTF-8"));
this.httpSocket.getOutputStream().flush();
if (this.httpMethod != RequestMethod.POST) {
this.outputStream = this.httpSocket.getOutputStream();
this.outputClosed = true;
this.connectInputStream();
} else {
this.outputStream = new CountingOutputStream(this.httpSocket.getOutputStream());
}
}
@Override
public void setAllowedResponseCodes(final int[] codes) {
if (codes == null) { throw new IllegalArgumentException("codes==null"); }
this.allowedResponseCodes = codes;
}
public void setCharset(final String Charset) {
this.customcharset = Charset;
}
public void setConnectTimeout(final int connectTimeout) {
this.connectTimeout = connectTimeout;
}
@Override
public void setContentDecoded(final boolean b) {
if (this.convertedInputStream != null) { throw new IllegalStateException("InputStream already in use!"); }
this.contentDecoded = b;
}
public void setReadTimeout(final int readTimeout) {
try {
if (this.isConnected()) {
this.httpSocket.setSoTimeout(readTimeout);
}
this.readTimeout = readTimeout;
} catch (final Throwable e) {
e.printStackTrace();
}
}
public void setRequestMethod(final RequestMethod method) {
this.httpMethod = method;
}
public void setRequestProperty(final String key, final String value) {
this.requestProperties.put(key, value);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(this.getRequestInfo());
sb.append(this.getResponseInfo());
return sb.toString();
}
}
|
HTTPConnectionImpl: made http 416 readable
|
src/org/appwork/utils/net/httpconnection/HTTPConnectionImpl.java
|
HTTPConnectionImpl: made http 416 readable
|
<ide><path>rc/org/appwork/utils/net/httpconnection/HTTPConnectionImpl.java
<ide> this.connect();
<ide> this.connectInputStream();
<ide> final int code = this.getResponseCode();
<del> if (code >= 200 && code <= 400 || code == 404 || code == 403 || this.isResponseCodeAllowed(code)) {
<add> if (code >= 200 && code <= 400 || code == 404 || code == 403 || code == 416 || this.isResponseCodeAllowed(code)) {
<ide> if (this.convertedInputStream != null) { return this.convertedInputStream; }
<ide> if (this.contentDecoded) {
<ide> final String encodingTransfer = this.getHeaderField("Content-Transfer-Encoding");
|
|
Java
|
apache-2.0
|
ac3d989069ea50f1f44f4f7cd85d48ca8070a49b
| 0 |
Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0,Wajihulhassan/Hadoop-2.7.0
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceOption;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationConstants;
import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRejectedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt.ContainersAndNMTokensAllocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.QueueEntitlement;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeResourceUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
* A scheduler that schedules resources between a set of queues. The scheduler
* keeps track of the resources used by each queue, and attempts to maintain
* fairness by scheduling tasks at queues whose allocations are farthest below
* an ideal fair distribution.
*
* The fair scheduler supports hierarchical queues. All queues descend from a
* queue named "root". Available resources are distributed among the children
* of the root queue in the typical fair scheduling fashion. Then, the children
* distribute the resources assigned to them to their children in the same
* fashion. Applications may only be scheduled on leaf queues. Queues can be
* specified as children of other queues by placing them as sub-elements of their
* parents in the fair scheduler configuration file.
*
* A queue's name starts with the names of its parents, with periods as
* separators. So a queue named "queue1" under the root named, would be
* referred to as "root.queue1", and a queue named "queue2" under a queue
* named "parent1" would be referred to as "root.parent1.queue2".
*/
@LimitedPrivate("yarn")
@Unstable
@SuppressWarnings("unchecked")
public class FairScheduler extends
AbstractYarnScheduler<FSAppAttempt, FSSchedulerNode> {
private FairSchedulerConfiguration conf;
private Resource incrAllocation;
private QueueManager queueMgr;
private volatile Clock clock;
private boolean usePortForNodeName;
private static final Log LOG = LogFactory.getLog(FairScheduler.class);
private static final ResourceCalculator RESOURCE_CALCULATOR =
new DefaultResourceCalculator();
private static final ResourceCalculator DOMINANT_RESOURCE_CALCULATOR =
new DominantResourceCalculator();
// Value that container assignment methods return when a container is
// reserved
public static final Resource CONTAINER_RESERVED = Resources.createResource(-1);
// How often fair shares are re-calculated (ms)
protected long updateInterval;
private final int UPDATE_DEBUG_FREQUENCY = 5;
private int updatesToSkipForDebug = UPDATE_DEBUG_FREQUENCY;
@VisibleForTesting
Thread updateThread;
@VisibleForTesting
Thread schedulingThread;
// timeout to join when we stop this service
protected final long THREAD_JOIN_TIMEOUT_MS = 1000;
// Aggregate metrics
FSQueueMetrics rootMetrics;
FSOpDurations fsOpDurations;
// Time when we last updated preemption vars
protected long lastPreemptionUpdateTime;
// Time we last ran preemptTasksIfNecessary
private long lastPreemptCheckTime;
// Preemption related variables
protected boolean preemptionEnabled;
protected float preemptionUtilizationThreshold;
// How often tasks are preempted
protected long preemptionInterval;
// ms to wait before force killing stuff (must be longer than a couple
// of heartbeats to give task-kill commands a chance to act).
protected long waitTimeBeforeKill;
// Containers whose AMs have been warned that they will be preempted soon.
private List<RMContainer> warnedContainers = new ArrayList<RMContainer>();
protected boolean sizeBasedWeight; // Give larger weights to larger jobs
protected WeightAdjuster weightAdjuster; // Can be null for no weight adjuster
protected boolean continuousSchedulingEnabled; // Continuous Scheduling enabled or not
protected int continuousSchedulingSleepMs; // Sleep time for each pass in continuous scheduling
private Comparator<NodeId> nodeAvailableResourceComparator =
new NodeAvailableResourceComparator(); // Node available resource comparator
protected double nodeLocalityThreshold; // Cluster threshold for node locality
protected double rackLocalityThreshold; // Cluster threshold for rack locality
protected long nodeLocalityDelayMs; // Delay for node locality
protected long rackLocalityDelayMs; // Delay for rack locality
private FairSchedulerEventLog eventLog; // Machine-readable event log
protected boolean assignMultiple; // Allocate multiple containers per
// heartbeat
protected int maxAssign; // Max containers to assign per heartbeat
/* Start -Wajih Measuring decision timings*/
public int dec_array_size=10000;
public int[] decision_time;
public long no_of_decisions;
/* End - Wajih*/
@VisibleForTesting
final MaxRunningAppsEnforcer maxRunningEnforcer;
private AllocationFileLoaderService allocsLoader;
@VisibleForTesting
AllocationConfiguration allocConf;
public FairScheduler() {
super(FairScheduler.class.getName());
clock = new SystemClock();
allocsLoader = new AllocationFileLoaderService();
queueMgr = new QueueManager(this);
maxRunningEnforcer = new MaxRunningAppsEnforcer(this);
/*Start Wajih Measuring decision timings*/
decision_time = new int[dec_array_size];
/* End Wajih */
}
private void validateConf(Configuration conf) {
// validate scheduler memory allocation setting
int minMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
int maxMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
if (minMem < 0 || minMem > maxMem) {
throw new YarnRuntimeException("Invalid resource scheduler memory"
+ " allocation configuration"
+ ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB
+ "=" + minMem
+ ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB
+ "=" + maxMem + ", min should equal greater than 0"
+ ", max should be no smaller than min.");
}
// validate scheduler vcores allocation setting
int minVcores = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
int maxVcores = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
if (minVcores < 0 || minVcores > maxVcores) {
throw new YarnRuntimeException("Invalid resource scheduler vcores"
+ " allocation configuration"
+ ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES
+ "=" + minVcores
+ ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES
+ "=" + maxVcores + ", min should equal greater than 0"
+ ", max should be no smaller than min.");
}
}
public FairSchedulerConfiguration getConf() {
return conf;
}
public QueueManager getQueueManager() {
return queueMgr;
}
/**
* Thread which calls {@link FairScheduler#update()} every
* <code>updateInterval</code> milliseconds.
*/
private class UpdateThread extends Thread {
@Override
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
Thread.sleep(updateInterval);
long start = getClock().getTime();
update();
preemptTasksIfNecessary();
long duration = getClock().getTime() - start;
fsOpDurations.addUpdateThreadRunDuration(duration);
} catch (InterruptedException ie) {
LOG.warn("Update thread interrupted. Exiting.");
return;
} catch (Exception e) {
LOG.error("Exception in fair scheduler UpdateThread", e);
}
}
}
}
/**
* Thread which attempts scheduling resources continuously,
* asynchronous to the node heartbeats.
*/
private class ContinuousSchedulingThread extends Thread {
@Override
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
continuousSchedulingAttempt();
Thread.sleep(getContinuousSchedulingSleepMs());
} catch (InterruptedException e) {
LOG.warn("Continuous scheduling thread interrupted. Exiting.", e);
return;
}
}
}
}
/**
* Recompute the internal variables used by the scheduler - per-job weights,
* fair shares, deficits, minimum slot allocations, and amount of used and
* required resources per job.
*/
protected synchronized void update() {
long start = getClock().getTime();
updateStarvationStats(); // Determine if any queues merit preemption
FSQueue rootQueue = queueMgr.getRootQueue();
// Recursively update demands for all queues
rootQueue.updateDemand();
rootQueue.setFairShare(clusterResource);
// Recursively compute fair shares for all queues
// and update metrics
rootQueue.recomputeShares();
updateRootQueueMetrics();
if (LOG.isDebugEnabled()) {
if (--updatesToSkipForDebug < 0) {
updatesToSkipForDebug = UPDATE_DEBUG_FREQUENCY;
LOG.debug("Cluster Capacity: " + clusterResource +
" Allocations: " + rootMetrics.getAllocatedResources() +
" Availability: " + Resource.newInstance(
rootMetrics.getAvailableMB(),
rootMetrics.getAvailableVirtualCores()) +
" Demand: " + rootQueue.getDemand());
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addUpdateCallDuration(duration);
}
/**
* Update the preemption fields for all QueueScheduables, i.e. the times since
* each queue last was at its guaranteed share and over its fair share
* threshold for each type of task.
*/
private void updateStarvationStats() {
lastPreemptionUpdateTime = clock.getTime();
for (FSLeafQueue sched : queueMgr.getLeafQueues()) {
sched.updateStarvationStats();
}
}
/**
* Check for queues that need tasks preempted, either because they have been
* below their guaranteed share for minSharePreemptionTimeout or they have
* been below their fair share threshold for the fairSharePreemptionTimeout. If
* such queues exist, compute how many tasks of each type need to be preempted
* and then select the right ones using preemptTasks.
*/
protected synchronized void preemptTasksIfNecessary() {
if (!shouldAttemptPreemption()) {
return;
}
long curTime = getClock().getTime();
if (curTime - lastPreemptCheckTime < preemptionInterval) {
return;
}
lastPreemptCheckTime = curTime;
Resource resToPreempt = Resources.clone(Resources.none());
for (FSLeafQueue sched : queueMgr.getLeafQueues()) {
Resources.addTo(resToPreempt, resToPreempt(sched, curTime));
}
if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource, resToPreempt,
Resources.none())) {
preemptResources(resToPreempt);
}
}
/**
* Preempt a quantity of resources. Each round, we start from the root queue,
* level-by-level, until choosing a candidate application.
* The policy for prioritizing preemption for each queue depends on its
* SchedulingPolicy: (1) fairshare/DRF, choose the ChildSchedulable that is
* most over its fair share; (2) FIFO, choose the childSchedulable that is
* latest launched.
* Inside each application, we further prioritize preemption by choosing
* containers with lowest priority to preempt.
* We make sure that no queue is placed below its fair share in the process.
*/
protected void preemptResources(Resource toPreempt) {
long start = getClock().getTime();
if (Resources.equals(toPreempt, Resources.none())) {
return;
}
// Scan down the list of containers we've already warned and kill them
// if we need to. Remove any containers from the list that we don't need
// or that are no longer running.
Iterator<RMContainer> warnedIter = warnedContainers.iterator();
while (warnedIter.hasNext()) {
RMContainer container = warnedIter.next();
if ((container.getState() == RMContainerState.RUNNING ||
container.getState() == RMContainerState.ALLOCATED) &&
Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
toPreempt, Resources.none())) {
warnOrKillContainer(container);
Resources.subtractFrom(toPreempt, container.getContainer().getResource());
} else {
warnedIter.remove();
}
}
try {
// Reset preemptedResource for each app
for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
queue.resetPreemptedResources();
}
while (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
toPreempt, Resources.none())) {
RMContainer container =
getQueueManager().getRootQueue().preemptContainer();
if (container == null) {
break;
} else {
warnOrKillContainer(container);
warnedContainers.add(container);
Resources.subtractFrom(
toPreempt, container.getContainer().getResource());
}
}
} finally {
// Clear preemptedResources for each app
for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
queue.clearPreemptedResources();
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addPreemptCallDuration(duration);
}
protected void warnOrKillContainer(RMContainer container) {
ApplicationAttemptId appAttemptId = container.getApplicationAttemptId();
FSAppAttempt app = getSchedulerApp(appAttemptId);
FSLeafQueue queue = app.getQueue();
LOG.info("Preempting container (prio=" + container.getContainer().getPriority() +
"res=" + container.getContainer().getResource() +
") from queue " + queue.getName());
Long time = app.getContainerPreemptionTime(container);
if (time != null) {
// if we asked for preemption more than maxWaitTimeBeforeKill ms ago,
// proceed with kill
if (time + waitTimeBeforeKill < getClock().getTime()) {
ContainerStatus status =
SchedulerUtils.createPreemptedContainerStatus(
container.getContainerId(), SchedulerUtils.PREEMPTED_CONTAINER);
recoverResourceRequestForContainer(container);
// TODO: Not sure if this ever actually adds this to the list of cleanup
// containers on the RMNode (see SchedulerNode.releaseContainer()).
completedContainer(container, status, RMContainerEventType.KILL);
LOG.info("Killing container" + container +
" (after waiting for premption for " +
(getClock().getTime() - time) + "ms)");
}
} else {
// track the request in the FSAppAttempt itself
app.addPreemption(container, getClock().getTime());
}
}
/**
* Return the resource amount that this queue is allowed to preempt, if any.
* If the queue has been below its min share for at least its preemption
* timeout, it should preempt the difference between its current share and
* this min share. If it has been below its fair share preemption threshold
* for at least the fairSharePreemptionTimeout, it should preempt enough tasks
* to get up to its full fair share. If both conditions hold, we preempt the
* max of the two amounts (this shouldn't happen unless someone sets the
* timeouts to be identical for some reason).
*/
protected Resource resToPreempt(FSLeafQueue sched, long curTime) {
long minShareTimeout = sched.getMinSharePreemptionTimeout();
long fairShareTimeout = sched.getFairSharePreemptionTimeout();
Resource resDueToMinShare = Resources.none();
Resource resDueToFairShare = Resources.none();
if (curTime - sched.getLastTimeAtMinShare() > minShareTimeout) {
Resource target = Resources.min(RESOURCE_CALCULATOR, clusterResource,
sched.getMinShare(), sched.getDemand());
resDueToMinShare = Resources.max(RESOURCE_CALCULATOR, clusterResource,
Resources.none(), Resources.subtract(target, sched.getResourceUsage()));
}
if (curTime - sched.getLastTimeAtFairShareThreshold() > fairShareTimeout) {
Resource target = Resources.min(RESOURCE_CALCULATOR, clusterResource,
sched.getFairShare(), sched.getDemand());
resDueToFairShare = Resources.max(RESOURCE_CALCULATOR, clusterResource,
Resources.none(), Resources.subtract(target, sched.getResourceUsage()));
}
Resource resToPreempt = Resources.max(RESOURCE_CALCULATOR, clusterResource,
resDueToMinShare, resDueToFairShare);
if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
resToPreempt, Resources.none())) {
String message = "Should preempt " + resToPreempt + " res for queue "
+ sched.getName() + ": resDueToMinShare = " + resDueToMinShare
+ ", resDueToFairShare = " + resDueToFairShare;
LOG.info(message);
}
return resToPreempt;
}
public synchronized RMContainerTokenSecretManager
getContainerTokenSecretManager() {
return rmContext.getContainerTokenSecretManager();
}
// synchronized for sizeBasedWeight
public synchronized ResourceWeights getAppWeight(FSAppAttempt app) {
double weight = 1.0;
if (sizeBasedWeight) {
// Set weight based on current memory demand
weight = Math.log1p(app.getDemand().getMemory()) / Math.log(2);
}
weight *= app.getPriority().getPriority();
if (weightAdjuster != null) {
// Run weight through the user-supplied weightAdjuster
weight = weightAdjuster.adjustWeight(app, weight);
}
ResourceWeights resourceWeights = app.getResourceWeights();
resourceWeights.setWeight((float)weight);
return resourceWeights;
}
public Resource getIncrementResourceCapability() {
return incrAllocation;
}
private FSSchedulerNode getFSSchedulerNode(NodeId nodeId) {
return nodes.get(nodeId);
}
public double getNodeLocalityThreshold() {
return nodeLocalityThreshold;
}
public double getRackLocalityThreshold() {
return rackLocalityThreshold;
}
public long getNodeLocalityDelayMs() {
return nodeLocalityDelayMs;
}
public long getRackLocalityDelayMs() {
return rackLocalityDelayMs;
}
public boolean isContinuousSchedulingEnabled() {
return continuousSchedulingEnabled;
}
public synchronized int getContinuousSchedulingSleepMs() {
return continuousSchedulingSleepMs;
}
public Clock getClock() {
return clock;
}
@VisibleForTesting
void setClock(Clock clock) {
this.clock = clock;
}
public FairSchedulerEventLog getEventLog() {
return eventLog;
}
/**
* Add a new application to the scheduler, with a given id, queue name, and
* user. This will accept a new app even if the user or queue is above
* configured limits, but the app will not be marked as runnable.
*/
protected synchronized void addApplication(ApplicationId applicationId,
String queueName, String user, boolean isAppRecovering) {
if (queueName == null || queueName.isEmpty()) {
String message = "Reject application " + applicationId +
" submitted by user " + user + " with an empty queue name.";
LOG.info(message);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return;
}
if (queueName.startsWith(".") || queueName.endsWith(".")) {
String message = "Reject application " + applicationId
+ " submitted by user " + user + " with an illegal queue name "
+ queueName + ". "
+ "The queue name cannot start/end with period.";
LOG.info(message);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return;
}
RMApp rmApp = rmContext.getRMApps().get(applicationId);
FSLeafQueue queue = assignToQueue(rmApp, queueName, user);
if (queue == null) {
return;
}
// Enforce ACLs
UserGroupInformation userUgi = UserGroupInformation.createRemoteUser(user);
if (!queue.hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)
&& !queue.hasAccess(QueueACL.ADMINISTER_QUEUE, userUgi)) {
String msg = "User " + userUgi.getUserName() +
" cannot submit applications to queue " + queue.getName();
LOG.info(msg);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, msg));
return;
}
SchedulerApplication<FSAppAttempt> application =
new SchedulerApplication<FSAppAttempt>(queue, user);
applications.put(applicationId, application);
queue.getMetrics().submitApp(user);
LOG.info("Accepted application " + applicationId + " from user: " + user
+ ", in queue: " + queueName + ", currently num of applications: "
+ applications.size());
if (isAppRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationId + " is recovering. Skip notifying APP_ACCEPTED");
}
} else {
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED));
}
}
/**
* Add a new application attempt to the scheduler.
*/
protected synchronized void addApplicationAttempt(
ApplicationAttemptId applicationAttemptId,
boolean transferStateFromPreviousAttempt,
boolean isAttemptRecovering) {
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationAttemptId.getApplicationId());
String user = application.getUser();
FSLeafQueue queue = (FSLeafQueue) application.getQueue();
FSAppAttempt attempt =
new FSAppAttempt(this, applicationAttemptId, user,
queue, new ActiveUsersManager(getRootQueueMetrics()),
rmContext);
if (transferStateFromPreviousAttempt) {
attempt.transferStateFromPreviousAttempt(application
.getCurrentAppAttempt());
}
application.setCurrentAppAttempt(attempt);
boolean runnable = maxRunningEnforcer.canAppBeRunnable(queue, user);
queue.addApp(attempt, runnable);
if (runnable) {
maxRunningEnforcer.trackRunnableApp(attempt);
} else {
maxRunningEnforcer.trackNonRunnableApp(attempt);
}
queue.getMetrics().submitAppAttempt(user);
LOG.info("Added Application Attempt " + applicationAttemptId
+ " to scheduler from user: " + user);
if (isAttemptRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationAttemptId
+ " is recovering. Skipping notifying ATTEMPT_ADDED");
}
} else {
rmContext.getDispatcher().getEventHandler().handle(
new RMAppAttemptEvent(applicationAttemptId,
RMAppAttemptEventType.ATTEMPT_ADDED));
}
}
/**
* Helper method that attempts to assign the app to a queue. The method is
* responsible to call the appropriate event-handler if the app is rejected.
*/
@VisibleForTesting
FSLeafQueue assignToQueue(RMApp rmApp, String queueName, String user) {
FSLeafQueue queue = null;
String appRejectMsg = null;
try {
QueuePlacementPolicy placementPolicy = allocConf.getPlacementPolicy();
queueName = placementPolicy.assignAppToQueue(queueName, user);
if (queueName == null) {
appRejectMsg = "Application rejected by queue placement policy";
} else {
queue = queueMgr.getLeafQueue(queueName, true);
if (queue == null) {
appRejectMsg = queueName + " is not a leaf queue";
}
}
} catch (IOException ioe) {
appRejectMsg = "Error assigning app to queue " + queueName;
}
if (appRejectMsg != null && rmApp != null) {
LOG.error(appRejectMsg);
rmContext.getDispatcher().getEventHandler().handle(
new RMAppRejectedEvent(rmApp.getApplicationId(), appRejectMsg));
return null;
}
if (rmApp != null) {
rmApp.setQueue(queue.getName());
} else {
LOG.error("Couldn't find RM app to set queue name on");
}
return queue;
}
private synchronized void removeApplication(ApplicationId applicationId,
RMAppState finalState) {
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationId);
if (application == null){
LOG.warn("Couldn't find application " + applicationId);
return;
}
application.stop(finalState);
applications.remove(applicationId);
}
private synchronized void removeApplicationAttempt(
ApplicationAttemptId applicationAttemptId,
RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers) {
LOG.info("Application " + applicationAttemptId + " is done." +
" finalState=" + rmAppAttemptFinalState);
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationAttemptId.getApplicationId());
FSAppAttempt attempt = getSchedulerApp(applicationAttemptId);
if (attempt == null || application == null) {
LOG.info("Unknown application " + applicationAttemptId + " has completed!");
return;
}
// Release all the running containers
for (RMContainer rmContainer : attempt.getLiveContainers()) {
if (keepContainers
&& rmContainer.getState().equals(RMContainerState.RUNNING)) {
// do not kill the running container in the case of work-preserving AM
// restart.
LOG.info("Skip killing " + rmContainer.getContainerId());
continue;
}
completedContainer(rmContainer,
SchedulerUtils.createAbnormalContainerStatus(
rmContainer.getContainerId(),
SchedulerUtils.COMPLETED_APPLICATION),
RMContainerEventType.KILL);
}
// Release all reserved containers
for (RMContainer rmContainer : attempt.getReservedContainers()) {
completedContainer(rmContainer,
SchedulerUtils.createAbnormalContainerStatus(
rmContainer.getContainerId(),
"Application Complete"),
RMContainerEventType.KILL);
}
// Clean up pending requests, metrics etc.
attempt.stop(rmAppAttemptFinalState);
// Inform the queue
FSLeafQueue queue = queueMgr.getLeafQueue(attempt.getQueue()
.getQueueName(), false);
boolean wasRunnable = queue.removeApp(attempt);
if (wasRunnable) {
maxRunningEnforcer.untrackRunnableApp(attempt);
maxRunningEnforcer.updateRunnabilityOnAppRemoval(attempt,
attempt.getQueue());
} else {
maxRunningEnforcer.untrackNonRunnableApp(attempt);
}
}
/**
* Clean up a completed container.
*/
@Override
protected synchronized void completedContainer(RMContainer rmContainer,
ContainerStatus containerStatus, RMContainerEventType event) {
if (rmContainer == null) {
LOG.info("Null container completed...");
return;
}
Container container = rmContainer.getContainer();
// Get the application for the finished container
FSAppAttempt application =
getCurrentAttemptForContainer(container.getId());
ApplicationId appId =
container.getId().getApplicationAttemptId().getApplicationId();
if (application == null) {
LOG.info("Container " + container + " of" +
" unknown application attempt " + appId +
" completed with event " + event);
return;
}
// Get the node on which the container was allocated
FSSchedulerNode node = getFSSchedulerNode(container.getNodeId());
if (rmContainer.getState() == RMContainerState.RESERVED) {
application.unreserve(rmContainer.getReservedPriority(), node);
} else {
application.containerCompleted(rmContainer, containerStatus, event);
node.releaseContainer(container);
updateRootQueueMetrics();
}
LOG.info("Application attempt " + application.getApplicationAttemptId()
+ " released container " + container.getId() + " on node: " + node
+ " with event: " + event);
}
private synchronized void addNode(RMNode node) {
FSSchedulerNode schedulerNode = new FSSchedulerNode(node, usePortForNodeName);
nodes.put(node.getNodeID(), schedulerNode);
Resources.addTo(clusterResource, node.getTotalCapability());
updateRootQueueMetrics();
updateMaximumAllocation(schedulerNode, true);
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
LOG.info("Added node " + node.getNodeAddress() +
" cluster capacity: " + clusterResource);
}
private synchronized void removeNode(RMNode rmNode) {
FSSchedulerNode node = getFSSchedulerNode(rmNode.getNodeID());
// This can occur when an UNHEALTHY node reconnects
if (node == null) {
return;
}
Resources.subtractFrom(clusterResource, rmNode.getTotalCapability());
updateRootQueueMetrics();
// Remove running containers
List<RMContainer> runningContainers = node.getRunningContainers();
for (RMContainer container : runningContainers) {
completedContainer(container,
SchedulerUtils.createAbnormalContainerStatus(
container.getContainerId(),
SchedulerUtils.LOST_CONTAINER),
RMContainerEventType.KILL);
}
// Remove reservations, if any
RMContainer reservedContainer = node.getReservedContainer();
if (reservedContainer != null) {
completedContainer(reservedContainer,
SchedulerUtils.createAbnormalContainerStatus(
reservedContainer.getContainerId(),
SchedulerUtils.LOST_CONTAINER),
RMContainerEventType.KILL);
}
nodes.remove(rmNode.getNodeID());
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
updateMaximumAllocation(node, false);
LOG.info("Removed node " + rmNode.getNodeAddress() +
" cluster capacity: " + clusterResource);
}
@Override
public Allocation allocate(ApplicationAttemptId appAttemptId,
List<ResourceRequest> ask, List<ContainerId> release,
List<String> blacklistAdditions, List<String> blacklistRemovals) {
// Make sure this application exists
FSAppAttempt application = getSchedulerApp(appAttemptId);
if (application == null) {
LOG.info("Calling allocate on removed " +
"or non existant application " + appAttemptId);
return EMPTY_ALLOCATION;
}
// Sanity check
SchedulerUtils.normalizeRequests(ask, DOMINANT_RESOURCE_CALCULATOR,
clusterResource, minimumAllocation, getMaximumResourceCapability(),
incrAllocation);
// Set amResource for this app
if (!application.getUnmanagedAM() && ask.size() == 1
&& application.getLiveContainers().isEmpty()) {
application.setAMResource(ask.get(0).getCapability());
}
// Release containers
releaseContainers(release, application);
synchronized (application) {
if (!ask.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("allocate: pre-update" +
" applicationAttemptId=" + appAttemptId +
" application=" + application.getApplicationId());
}
application.showRequests();
// Update application requests
application.updateResourceRequests(ask);
application.showRequests();
}
if (LOG.isDebugEnabled()) {
LOG.debug("allocate: post-update" +
" applicationAttemptId=" + appAttemptId +
" #ask=" + ask.size() +
" reservation= " + application.getCurrentReservation());
LOG.debug("Preempting " + application.getPreemptionContainers().size()
+ " container(s)");
}
Set<ContainerId> preemptionContainerIds = new HashSet<ContainerId>();
for (RMContainer container : application.getPreemptionContainers()) {
preemptionContainerIds.add(container.getContainerId());
}
application.updateBlacklist(blacklistAdditions, blacklistRemovals);
ContainersAndNMTokensAllocation allocation =
application.pullNewlyAllocatedContainersAndNMTokens();
Resource headroom = application.getHeadroom();
application.setApplicationHeadroomForMetrics(headroom);
return new Allocation(allocation.getContainerList(), headroom,
preemptionContainerIds, null, null, allocation.getNMTokenList());
}
}
/**
* Process a heartbeat update from a node.
*/
private synchronized void nodeUpdate(RMNode nm) {
long start = getClock().getTime();
if (LOG.isDebugEnabled()) {
LOG.debug("nodeUpdate: " + nm + " cluster capacity: " + clusterResource);
}
eventLog.log("HEARTBEAT", nm.getHostName());
FSSchedulerNode node = getFSSchedulerNode(nm.getNodeID());
List<UpdatedContainerInfo> containerInfoList = nm.pullContainerUpdates();
List<ContainerStatus> newlyLaunchedContainers = new ArrayList<ContainerStatus>();
List<ContainerStatus> completedContainers = new ArrayList<ContainerStatus>();
for(UpdatedContainerInfo containerInfo : containerInfoList) {
newlyLaunchedContainers.addAll(containerInfo.getNewlyLaunchedContainers());
completedContainers.addAll(containerInfo.getCompletedContainers());
}
// Processing the newly launched containers
for (ContainerStatus launchedContainer : newlyLaunchedContainers) {
containerLaunchedOnNode(launchedContainer.getContainerId(), node);
}
// Process completed containers
for (ContainerStatus completedContainer : completedContainers) {
ContainerId containerId = completedContainer.getContainerId();
LOG.debug("Container FINISHED: " + containerId);
completedContainer(getRMContainer(containerId),
completedContainer, RMContainerEventType.FINISHED);
}
/* Start Wajih
Adding Timers to check decision delays*/
no_of_decisions++;
/* End */
if (continuousSchedulingEnabled) {
if (!completedContainers.isEmpty()) {
attemptScheduling(node);
}
} else {
/* Start Wajih
Adding Timers to check decision delays*/
long beforeTime = System.currentTimeMillis();
/* End */
attemptScheduling(node);
/* Start Wajih
Adding Timers to check decision delays*/
long afterTime = System.currentTimeMillis();
int dec_time = (int)(afterTime-beforeTime);
decision_time[dec_time]++;
/* End */
}
long duration = getClock().getTime() - start;
fsOpDurations.addNodeUpdateDuration(duration);
}
void continuousSchedulingAttempt() throws InterruptedException {
long start = getClock().getTime();
List<NodeId> nodeIdList = new ArrayList<NodeId>(nodes.keySet());
// Sort the nodes by space available on them, so that we offer
// containers on emptier nodes first, facilitating an even spread. This
// requires holding the scheduler lock, so that the space available on a
// node doesn't change during the sort.
synchronized (this) {
Collections.sort(nodeIdList, nodeAvailableResourceComparator);
}
// iterate all nodes
for (NodeId nodeId : nodeIdList) {
FSSchedulerNode node = getFSSchedulerNode(nodeId);
try {
if (node != null && Resources.fitsIn(minimumAllocation,
node.getAvailableResource())) {
attemptScheduling(node);
}
} catch (Throwable ex) {
LOG.error("Error while attempting scheduling for node " + node +
": " + ex.toString(), ex);
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addContinuousSchedulingRunDuration(duration);
}
/** Sort nodes by available resource */
private class NodeAvailableResourceComparator implements Comparator<NodeId> {
@Override
public int compare(NodeId n1, NodeId n2) {
if (!nodes.containsKey(n1)) {
return 1;
}
if (!nodes.containsKey(n2)) {
return -1;
}
return RESOURCE_CALCULATOR.compare(clusterResource,
nodes.get(n2).getAvailableResource(),
nodes.get(n1).getAvailableResource());
}
}
private synchronized void attemptScheduling(FSSchedulerNode node) {
if (rmContext.isWorkPreservingRecoveryEnabled()
&& !rmContext.isSchedulerReadyForAllocatingContainers()) {
return;
}
// Assign new containers...
// 1. Check for reserved applications
// 2. Schedule if there are no reservations
FSAppAttempt reservedAppSchedulable = node.getReservedAppSchedulable();
if (reservedAppSchedulable != null) {
Priority reservedPriority = node.getReservedContainer().getReservedPriority();
FSQueue queue = reservedAppSchedulable.getQueue();
if (!reservedAppSchedulable.hasContainerForNode(reservedPriority, node)
|| !fitsInMaxShare(queue,
node.getReservedContainer().getReservedResource())) {
// Don't hold the reservation if app can no longer use it
LOG.info("Releasing reservation that cannot be satisfied for application "
+ reservedAppSchedulable.getApplicationAttemptId()
+ " on node " + node);
reservedAppSchedulable.unreserve(reservedPriority, node);
reservedAppSchedulable = null;
} else {
// Reservation exists; try to fulfill the reservation
if (LOG.isDebugEnabled()) {
LOG.debug("Trying to fulfill reservation for application "
+ reservedAppSchedulable.getApplicationAttemptId()
+ " on node: " + node);
}
node.getReservedAppSchedulable().assignReservedContainer(node);
}
}
if (reservedAppSchedulable == null) {
// No reservation, schedule at queue which is farthest below fair share
int assignedContainers = 0;
while (node.getReservedContainer() == null) {
boolean assignedContainer = false;
if (!queueMgr.getRootQueue().assignContainer(node).equals(
Resources.none())) {
assignedContainers++;
assignedContainer = true;
}
if (!assignedContainer) { break; }
if (!assignMultiple) { break; }
if ((assignedContainers >= maxAssign) && (maxAssign > 0)) { break; }
}
}
updateRootQueueMetrics();
}
static boolean fitsInMaxShare(FSQueue queue, Resource
additionalResource) {
Resource usagePlusAddition =
Resources.add(queue.getResourceUsage(), additionalResource);
if (!Resources.fitsIn(usagePlusAddition, queue.getMaxShare())) {
return false;
}
FSQueue parentQueue = queue.getParent();
if (parentQueue != null) {
return fitsInMaxShare(parentQueue, additionalResource);
}
return true;
}
public FSAppAttempt getSchedulerApp(ApplicationAttemptId appAttemptId) {
return super.getApplicationAttempt(appAttemptId);
}
@Override
public ResourceCalculator getResourceCalculator() {
return RESOURCE_CALCULATOR;
}
/**
* Subqueue metrics might be a little out of date because fair shares are
* recalculated at the update interval, but the root queue metrics needs to
* be updated synchronously with allocations and completions so that cluster
* metrics will be consistent.
*/
private void updateRootQueueMetrics() {
rootMetrics.setAvailableResourcesToQueue(
Resources.subtract(
clusterResource, rootMetrics.getAllocatedResources()));
}
/**
* Check if preemption is enabled and the utilization threshold for
* preemption is met.
*
* @return true if preemption should be attempted, false otherwise.
*/
private boolean shouldAttemptPreemption() {
if (preemptionEnabled) {
return (preemptionUtilizationThreshold < Math.max(
(float) rootMetrics.getAllocatedMB() / clusterResource.getMemory(),
(float) rootMetrics.getAllocatedVirtualCores() /
clusterResource.getVirtualCores()));
}
return false;
}
@Override
public QueueMetrics getRootQueueMetrics() {
return rootMetrics;
}
@Override
public void handle(SchedulerEvent event) {
switch (event.getType()) {
case NODE_ADDED:
if (!(event instanceof NodeAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event;
addNode(nodeAddedEvent.getAddedRMNode());
recoverContainersOnNode(nodeAddedEvent.getContainerReports(),
nodeAddedEvent.getAddedRMNode());
break;
case NODE_REMOVED:
if (!(event instanceof NodeRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event;
removeNode(nodeRemovedEvent.getRemovedRMNode());
break;
case NODE_UPDATE:
if (!(event instanceof NodeUpdateSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeUpdateSchedulerEvent nodeUpdatedEvent = (NodeUpdateSchedulerEvent)event;
nodeUpdate(nodeUpdatedEvent.getRMNode());
break;
case APP_ADDED:
if (!(event instanceof AppAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event;
String queueName =
resolveReservationQueueName(appAddedEvent.getQueue(),
appAddedEvent.getApplicationId(),
appAddedEvent.getReservationID());
if (queueName != null) {
addApplication(appAddedEvent.getApplicationId(),
queueName, appAddedEvent.getUser(),
appAddedEvent.getIsAppRecovering());
}
break;
case APP_REMOVED:
if (!(event instanceof AppRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event;
removeApplication(appRemovedEvent.getApplicationID(),
appRemovedEvent.getFinalState());
break;
case NODE_RESOURCE_UPDATE:
if (!(event instanceof NodeResourceUpdateSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeResourceUpdateSchedulerEvent nodeResourceUpdatedEvent =
(NodeResourceUpdateSchedulerEvent)event;
updateNodeResource(nodeResourceUpdatedEvent.getRMNode(),
nodeResourceUpdatedEvent.getResourceOption());
break;
case APP_ATTEMPT_ADDED:
if (!(event instanceof AppAttemptAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAttemptAddedSchedulerEvent appAttemptAddedEvent =
(AppAttemptAddedSchedulerEvent) event;
addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(),
appAttemptAddedEvent.getTransferStateFromPreviousAttempt(),
appAttemptAddedEvent.getIsAttemptRecovering());
break;
case APP_ATTEMPT_REMOVED:
if (!(event instanceof AppAttemptRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent =
(AppAttemptRemovedSchedulerEvent) event;
removeApplicationAttempt(
appAttemptRemovedEvent.getApplicationAttemptID(),
appAttemptRemovedEvent.getFinalAttemptState(),
appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts());
break;
case CONTAINER_EXPIRED:
if (!(event instanceof ContainerExpiredSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
ContainerExpiredSchedulerEvent containerExpiredEvent =
(ContainerExpiredSchedulerEvent)event;
ContainerId containerId = containerExpiredEvent.getContainerId();
completedContainer(getRMContainer(containerId),
SchedulerUtils.createAbnormalContainerStatus(
containerId,
SchedulerUtils.EXPIRED_CONTAINER),
RMContainerEventType.EXPIRE);
break;
default:
LOG.error("Unknown event arrived at FairScheduler: " + event.toString());
}
}
private synchronized String resolveReservationQueueName(String queueName,
ApplicationId applicationId, ReservationId reservationID) {
FSQueue queue = queueMgr.getQueue(queueName);
if ((queue == null) || !allocConf.isReservable(queue.getQueueName())) {
return queueName;
}
// Use fully specified name from now on (including root. prefix)
queueName = queue.getQueueName();
if (reservationID != null) {
String resQName = queueName + "." + reservationID.toString();
queue = queueMgr.getQueue(resQName);
if (queue == null) {
String message =
"Application "
+ applicationId
+ " submitted to a reservation which is not yet currently active: "
+ resQName;
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return null;
}
if (!queue.getParent().getQueueName().equals(queueName)) {
String message =
"Application: " + applicationId + " submitted to a reservation "
+ resQName + " which does not belong to the specified queue: "
+ queueName;
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return null;
}
// use the reservation queue to run the app
queueName = resQName;
} else {
// use the default child queue of the plan for unreserved apps
queueName = getDefaultQueueForPlanQueue(queueName);
}
return queueName;
}
private String getDefaultQueueForPlanQueue(String queueName) {
String planName = queueName.substring(queueName.lastIndexOf(".") + 1);
queueName = queueName + "." + planName + ReservationConstants.DEFAULT_QUEUE_SUFFIX;
return queueName;
}
@Override
public void recover(RMState state) throws Exception {
// NOT IMPLEMENTED
}
public synchronized void setRMContext(RMContext rmContext) {
this.rmContext = rmContext;
}
private void initScheduler(Configuration conf) throws IOException {
synchronized (this) {
this.conf = new FairSchedulerConfiguration(conf);
validateConf(this.conf);
minimumAllocation = this.conf.getMinimumAllocation();
initMaximumResourceCapability(this.conf.getMaximumAllocation());
incrAllocation = this.conf.getIncrementAllocation();
continuousSchedulingEnabled = this.conf.isContinuousSchedulingEnabled();
continuousSchedulingSleepMs =
this.conf.getContinuousSchedulingSleepMs();
nodeLocalityThreshold = this.conf.getLocalityThresholdNode();
rackLocalityThreshold = this.conf.getLocalityThresholdRack();
nodeLocalityDelayMs = this.conf.getLocalityDelayNodeMs();
rackLocalityDelayMs = this.conf.getLocalityDelayRackMs();
preemptionEnabled = this.conf.getPreemptionEnabled();
preemptionUtilizationThreshold =
this.conf.getPreemptionUtilizationThreshold();
assignMultiple = this.conf.getAssignMultiple();
maxAssign = this.conf.getMaxAssign();
sizeBasedWeight = this.conf.getSizeBasedWeight();
preemptionInterval = this.conf.getPreemptionInterval();
waitTimeBeforeKill = this.conf.getWaitTimeBeforeKill();
usePortForNodeName = this.conf.getUsePortForNodeName();
updateInterval = this.conf.getUpdateInterval();
if (updateInterval < 0) {
updateInterval = FairSchedulerConfiguration.DEFAULT_UPDATE_INTERVAL_MS;
LOG.warn(FairSchedulerConfiguration.UPDATE_INTERVAL_MS
+ " is invalid, so using default value " +
+FairSchedulerConfiguration.DEFAULT_UPDATE_INTERVAL_MS
+ " ms instead");
}
rootMetrics = FSQueueMetrics.forQueue("root", null, true, conf);
fsOpDurations = FSOpDurations.getInstance(true);
// This stores per-application scheduling information
this.applications = new ConcurrentHashMap<
ApplicationId, SchedulerApplication<FSAppAttempt>>();
this.eventLog = new FairSchedulerEventLog();
eventLog.init(this.conf);
allocConf = new AllocationConfiguration(conf);
try {
queueMgr.initialize(conf);
} catch (Exception e) {
throw new IOException("Failed to start FairScheduler", e);
}
updateThread = new UpdateThread();
updateThread.setName("FairSchedulerUpdateThread");
updateThread.setDaemon(true);
if (continuousSchedulingEnabled) {
// start continuous scheduling thread
schedulingThread = new ContinuousSchedulingThread();
schedulingThread.setName("FairSchedulerContinuousScheduling");
schedulingThread.setDaemon(true);
}
}
allocsLoader.init(conf);
allocsLoader.setReloadListener(new AllocationReloadListener());
// If we fail to load allocations file on initialize, we want to fail
// immediately. After a successful load, exceptions on future reloads
// will just result in leaving things as they are.
try {
allocsLoader.reloadAllocations();
} catch (Exception e) {
throw new IOException("Failed to initialize FairScheduler", e);
}
}
private synchronized void startSchedulerThreads() {
Preconditions.checkNotNull(updateThread, "updateThread is null");
Preconditions.checkNotNull(allocsLoader, "allocsLoader is null");
updateThread.start();
if (continuousSchedulingEnabled) {
Preconditions.checkNotNull(schedulingThread, "schedulingThread is null");
schedulingThread.start();
}
allocsLoader.start();
}
@Override
public void serviceInit(Configuration conf) throws Exception {
initScheduler(conf);
super.serviceInit(conf);
}
@Override
public void serviceStart() throws Exception {
startSchedulerThreads();
super.serviceStart();
}
@Override
public void serviceStop() throws Exception {
synchronized (this) {
if (updateThread != null) {
updateThread.interrupt();
updateThread.join(THREAD_JOIN_TIMEOUT_MS);
}
if (continuousSchedulingEnabled) {
if (schedulingThread != null) {
schedulingThread.interrupt();
schedulingThread.join(THREAD_JOIN_TIMEOUT_MS);
}
}
if (allocsLoader != null) {
allocsLoader.stop();
}
}
super.serviceStop();
}
@Override
public void reinitialize(Configuration conf, RMContext rmContext)
throws IOException {
try {
allocsLoader.reloadAllocations();
} catch (Exception e) {
LOG.error("Failed to reload allocations file", e);
}
}
@Override
public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues,
boolean recursive) throws IOException {
if (!queueMgr.exists(queueName)) {
throw new IOException("queue " + queueName + " does not exist");
}
return queueMgr.getQueue(queueName).getQueueInfo(includeChildQueues,
recursive);
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo() {
UserGroupInformation user;
try {
user = UserGroupInformation.getCurrentUser();
} catch (IOException ioe) {
return new ArrayList<QueueUserACLInfo>();
}
return queueMgr.getRootQueue().getQueueUserAclInfo(user);
}
@Override
public int getNumClusterNodes() {
return nodes.size();
}
@Override
public synchronized boolean checkAccess(UserGroupInformation callerUGI,
QueueACL acl, String queueName) {
FSQueue queue = getQueueManager().getQueue(queueName);
if (queue == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("ACL not found for queue access-type " + acl
+ " for queue " + queueName);
}
return false;
}
return queue.hasAccess(acl, callerUGI);
}
public AllocationConfiguration getAllocationConfiguration() {
return allocConf;
}
private class AllocationReloadListener implements
AllocationFileLoaderService.Listener {
@Override
public void onReload(AllocationConfiguration queueInfo) {
// Commit the reload; also create any queue defined in the alloc file
// if it does not already exist, so it can be displayed on the web UI.
synchronized (FairScheduler.this) {
allocConf = queueInfo;
allocConf.getDefaultSchedulingPolicy().initialize(clusterResource);
queueMgr.updateAllocationConfiguration(allocConf);
maxRunningEnforcer.updateRunnabilityOnReload();
}
}
}
@Override
public List<ApplicationAttemptId> getAppsInQueue(String queueName) {
FSQueue queue = queueMgr.getQueue(queueName);
if (queue == null) {
return null;
}
List<ApplicationAttemptId> apps = new ArrayList<ApplicationAttemptId>();
queue.collectSchedulerApplications(apps);
return apps;
}
@Override
public synchronized String moveApplication(ApplicationId appId,
String queueName) throws YarnException {
SchedulerApplication<FSAppAttempt> app = applications.get(appId);
if (app == null) {
throw new YarnException("App to be moved " + appId + " not found.");
}
FSAppAttempt attempt = (FSAppAttempt) app.getCurrentAppAttempt();
// To serialize with FairScheduler#allocate, synchronize on app attempt
synchronized (attempt) {
FSLeafQueue oldQueue = (FSLeafQueue) app.getQueue();
String destQueueName = handleMoveToPlanQueue(queueName);
FSLeafQueue targetQueue = queueMgr.getLeafQueue(destQueueName, false);
if (targetQueue == null) {
throw new YarnException("Target queue " + queueName
+ " not found or is not a leaf queue.");
}
if (targetQueue == oldQueue) {
return oldQueue.getQueueName();
}
if (oldQueue.isRunnableApp(attempt)) {
verifyMoveDoesNotViolateConstraints(attempt, oldQueue, targetQueue);
}
executeMove(app, attempt, oldQueue, targetQueue);
return targetQueue.getQueueName();
}
}
private void verifyMoveDoesNotViolateConstraints(FSAppAttempt app,
FSLeafQueue oldQueue, FSLeafQueue targetQueue) throws YarnException {
String queueName = targetQueue.getQueueName();
ApplicationAttemptId appAttId = app.getApplicationAttemptId();
// When checking maxResources and maxRunningApps, only need to consider
// queues before the lowest common ancestor of the two queues because the
// total running apps in queues above will not be changed.
FSQueue lowestCommonAncestor = findLowestCommonAncestorQueue(oldQueue,
targetQueue);
Resource consumption = app.getCurrentConsumption();
// Check whether the move would go over maxRunningApps or maxShare
FSQueue cur = targetQueue;
while (cur != lowestCommonAncestor) {
// maxRunningApps
if (cur.getNumRunnableApps() == allocConf.getQueueMaxApps(cur.getQueueName())) {
throw new YarnException("Moving app attempt " + appAttId + " to queue "
+ queueName + " would violate queue maxRunningApps constraints on"
+ " queue " + cur.getQueueName());
}
// maxShare
if (!Resources.fitsIn(Resources.add(cur.getResourceUsage(), consumption),
cur.getMaxShare())) {
throw new YarnException("Moving app attempt " + appAttId + " to queue "
+ queueName + " would violate queue maxShare constraints on"
+ " queue " + cur.getQueueName());
}
cur = cur.getParent();
}
}
/**
* Helper for moveApplication, which has appropriate synchronization, so all
* operations will be atomic.
*/
private void executeMove(SchedulerApplication<FSAppAttempt> app,
FSAppAttempt attempt, FSLeafQueue oldQueue, FSLeafQueue newQueue) {
boolean wasRunnable = oldQueue.removeApp(attempt);
// if app was not runnable before, it may be runnable now
boolean nowRunnable = maxRunningEnforcer.canAppBeRunnable(newQueue,
attempt.getUser());
if (wasRunnable && !nowRunnable) {
throw new IllegalStateException("Should have already verified that app "
+ attempt.getApplicationId() + " would be runnable in new queue");
}
if (wasRunnable) {
maxRunningEnforcer.untrackRunnableApp(attempt);
} else if (nowRunnable) {
// App has changed from non-runnable to runnable
maxRunningEnforcer.untrackNonRunnableApp(attempt);
}
attempt.move(newQueue); // This updates all the metrics
app.setQueue(newQueue);
newQueue.addApp(attempt, nowRunnable);
if (nowRunnable) {
maxRunningEnforcer.trackRunnableApp(attempt);
}
if (wasRunnable) {
maxRunningEnforcer.updateRunnabilityOnAppRemoval(attempt, oldQueue);
}
}
@VisibleForTesting
FSQueue findLowestCommonAncestorQueue(FSQueue queue1, FSQueue queue2) {
// Because queue names include ancestors, separated by periods, we can find
// the lowest common ancestors by going from the start of the names until
// there's a character that doesn't match.
String name1 = queue1.getName();
String name2 = queue2.getName();
// We keep track of the last period we encounter to avoid returning root.apple
// when the queues are root.applepie and root.appletart
int lastPeriodIndex = -1;
for (int i = 0; i < Math.max(name1.length(), name2.length()); i++) {
if (name1.length() <= i || name2.length() <= i ||
name1.charAt(i) != name2.charAt(i)) {
return queueMgr.getQueue(name1.substring(0, lastPeriodIndex));
} else if (name1.charAt(i) == '.') {
lastPeriodIndex = i;
}
}
return queue1; // names are identical
}
/**
* Process resource update on a node and update Queue.
*/
@Override
public synchronized void updateNodeResource(RMNode nm,
ResourceOption resourceOption) {
super.updateNodeResource(nm, resourceOption);
updateRootQueueMetrics();
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
}
/** {@inheritDoc} */
@Override
public EnumSet<SchedulerResourceTypes> getSchedulingResourceTypes() {
return EnumSet
.of(SchedulerResourceTypes.MEMORY, SchedulerResourceTypes.CPU);
}
@Override
public Set<String> getPlanQueues() throws YarnException {
Set<String> planQueues = new HashSet<String>();
for (FSQueue fsQueue : queueMgr.getQueues()) {
String queueName = fsQueue.getName();
if (allocConf.isReservable(queueName)) {
planQueues.add(queueName);
}
}
return planQueues;
}
@Override
public void setEntitlement(String queueName,
QueueEntitlement entitlement) throws YarnException {
FSLeafQueue reservationQueue = queueMgr.getLeafQueue(queueName, false);
if (reservationQueue == null) {
throw new YarnException("Target queue " + queueName
+ " not found or is not a leaf queue.");
}
reservationQueue.setWeights(entitlement.getCapacity());
// TODO Does MaxCapacity need to be set for fairScheduler ?
}
/**
* Only supports removing empty leaf queues
* @param queueName name of queue to remove
* @throws YarnException if queue to remove is either not a leaf or if its
* not empty
*/
@Override
public void removeQueue(String queueName) throws YarnException {
FSLeafQueue reservationQueue = queueMgr.getLeafQueue(queueName, false);
if (reservationQueue != null) {
if (!queueMgr.removeLeafQueue(queueName)) {
throw new YarnException("Could not remove queue " + queueName + " as " +
"its either not a leaf queue or its not empty");
}
}
}
private String handleMoveToPlanQueue(String targetQueueName) {
FSQueue dest = queueMgr.getQueue(targetQueueName);
if (dest != null && allocConf.isReservable(dest.getQueueName())) {
// use the default child reservation queue of the plan
targetQueueName = getDefaultQueueForPlanQueue(targetQueueName);
}
return targetQueueName;
}
/* Start-Wajih Get decision stats */
@Override
public String getDecisionTimeStats() {
int max_time;
int min_time;
// Break down the decision time space into 4 spaces;
long part_0_5=0;
long part_5_10=0;
long part_10_25=0;
long part_25_inf=0;
String dec_string=" ";
boolean flag=true;
for(int i =0 ; i< dec_array_size;i++){
if(i>0 && i<=5)
part_0_5+=decision_time[i];
if(i>5 && i<=10)
part_5_10+=decision_time[i];
if(i>10 && i<=25)
part_10_25+=decision_time[i];
if(i>25)
part_25_inf+=decision_time[i];
if(flag && decision_time[i] >=1){
min_time=i;
flag=false;
}
if(decision_time[i] >=1){
max_time=i;
}
dec_string+="Max Time : ";
dec_string+=max_time;
dec_string+=" ---- ";
dec_string+="Min Time : ";
dec_string+=min_time;
dec_string+="\n";
dec_string+="Percentage of decision timings in between 0-5 Millisecond = ";
dec_string+=((part_0_5*1.0)/no_of_decisions)*100;
dec_string+="\n";
dec_string+="Percentage of decision timings in between 5-10 Millisecond = ";
dec_string+=((part_5_10*1.0)/no_of_decisions)*100;
dec_string+="\n";
dec_string+="Percentage of decision timings in between 10-25 Millisecond = ";
dec_string+=((part_10_25*1.0)/no_of_decisions)*100;
dec_string+="\n";
dec_string+="Percentage of decision timings >25 Millisecond = ";
dec_string+=((part_10_25*1.0)/no_of_decisions)*100;
}
return dec_string;
/*
String decision_str="";
long tmp_sum=0;
long tmp_sum2=0;
for(int i=0;i<=5;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 0-5: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=6;i<=10;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 5-10: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=11;i<=25;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 10-25: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=26;i<=50;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 25-50: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
return decision_str;
*/
}
/* END -wajih*/
}
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceOption;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationConstants;
import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRejectedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt.ContainersAndNMTokensAllocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.QueueEntitlement;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeResourceUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
* A scheduler that schedules resources between a set of queues. The scheduler
* keeps track of the resources used by each queue, and attempts to maintain
* fairness by scheduling tasks at queues whose allocations are farthest below
* an ideal fair distribution.
*
* The fair scheduler supports hierarchical queues. All queues descend from a
* queue named "root". Available resources are distributed among the children
* of the root queue in the typical fair scheduling fashion. Then, the children
* distribute the resources assigned to them to their children in the same
* fashion. Applications may only be scheduled on leaf queues. Queues can be
* specified as children of other queues by placing them as sub-elements of their
* parents in the fair scheduler configuration file.
*
* A queue's name starts with the names of its parents, with periods as
* separators. So a queue named "queue1" under the root named, would be
* referred to as "root.queue1", and a queue named "queue2" under a queue
* named "parent1" would be referred to as "root.parent1.queue2".
*/
@LimitedPrivate("yarn")
@Unstable
@SuppressWarnings("unchecked")
public class FairScheduler extends
AbstractYarnScheduler<FSAppAttempt, FSSchedulerNode> {
private FairSchedulerConfiguration conf;
private Resource incrAllocation;
private QueueManager queueMgr;
private volatile Clock clock;
private boolean usePortForNodeName;
private static final Log LOG = LogFactory.getLog(FairScheduler.class);
private static final ResourceCalculator RESOURCE_CALCULATOR =
new DefaultResourceCalculator();
private static final ResourceCalculator DOMINANT_RESOURCE_CALCULATOR =
new DominantResourceCalculator();
// Value that container assignment methods return when a container is
// reserved
public static final Resource CONTAINER_RESERVED = Resources.createResource(-1);
// How often fair shares are re-calculated (ms)
protected long updateInterval;
private final int UPDATE_DEBUG_FREQUENCY = 5;
private int updatesToSkipForDebug = UPDATE_DEBUG_FREQUENCY;
@VisibleForTesting
Thread updateThread;
@VisibleForTesting
Thread schedulingThread;
// timeout to join when we stop this service
protected final long THREAD_JOIN_TIMEOUT_MS = 1000;
// Aggregate metrics
FSQueueMetrics rootMetrics;
FSOpDurations fsOpDurations;
// Time when we last updated preemption vars
protected long lastPreemptionUpdateTime;
// Time we last ran preemptTasksIfNecessary
private long lastPreemptCheckTime;
// Preemption related variables
protected boolean preemptionEnabled;
protected float preemptionUtilizationThreshold;
// How often tasks are preempted
protected long preemptionInterval;
// ms to wait before force killing stuff (must be longer than a couple
// of heartbeats to give task-kill commands a chance to act).
protected long waitTimeBeforeKill;
// Containers whose AMs have been warned that they will be preempted soon.
private List<RMContainer> warnedContainers = new ArrayList<RMContainer>();
protected boolean sizeBasedWeight; // Give larger weights to larger jobs
protected WeightAdjuster weightAdjuster; // Can be null for no weight adjuster
protected boolean continuousSchedulingEnabled; // Continuous Scheduling enabled or not
protected int continuousSchedulingSleepMs; // Sleep time for each pass in continuous scheduling
private Comparator<NodeId> nodeAvailableResourceComparator =
new NodeAvailableResourceComparator(); // Node available resource comparator
protected double nodeLocalityThreshold; // Cluster threshold for node locality
protected double rackLocalityThreshold; // Cluster threshold for rack locality
protected long nodeLocalityDelayMs; // Delay for node locality
protected long rackLocalityDelayMs; // Delay for rack locality
private FairSchedulerEventLog eventLog; // Machine-readable event log
protected boolean assignMultiple; // Allocate multiple containers per
// heartbeat
protected int maxAssign; // Max containers to assign per heartbeat
/* Start -Wajih Measuring decision timings*/
public int[] decision_time;
/* End - Wajih*/
@VisibleForTesting
final MaxRunningAppsEnforcer maxRunningEnforcer;
private AllocationFileLoaderService allocsLoader;
@VisibleForTesting
AllocationConfiguration allocConf;
public FairScheduler() {
super(FairScheduler.class.getName());
clock = new SystemClock();
allocsLoader = new AllocationFileLoaderService();
queueMgr = new QueueManager(this);
maxRunningEnforcer = new MaxRunningAppsEnforcer(this);
/*Start Wajih Measuring decision timings*/
decision_time = new int[10000];
/* End Wajih */
}
private void validateConf(Configuration conf) {
// validate scheduler memory allocation setting
int minMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
int maxMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
if (minMem < 0 || minMem > maxMem) {
throw new YarnRuntimeException("Invalid resource scheduler memory"
+ " allocation configuration"
+ ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB
+ "=" + minMem
+ ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB
+ "=" + maxMem + ", min should equal greater than 0"
+ ", max should be no smaller than min.");
}
// validate scheduler vcores allocation setting
int minVcores = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
int maxVcores = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
if (minVcores < 0 || minVcores > maxVcores) {
throw new YarnRuntimeException("Invalid resource scheduler vcores"
+ " allocation configuration"
+ ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES
+ "=" + minVcores
+ ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES
+ "=" + maxVcores + ", min should equal greater than 0"
+ ", max should be no smaller than min.");
}
}
public FairSchedulerConfiguration getConf() {
return conf;
}
public QueueManager getQueueManager() {
return queueMgr;
}
/**
* Thread which calls {@link FairScheduler#update()} every
* <code>updateInterval</code> milliseconds.
*/
private class UpdateThread extends Thread {
@Override
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
Thread.sleep(updateInterval);
long start = getClock().getTime();
update();
preemptTasksIfNecessary();
long duration = getClock().getTime() - start;
fsOpDurations.addUpdateThreadRunDuration(duration);
} catch (InterruptedException ie) {
LOG.warn("Update thread interrupted. Exiting.");
return;
} catch (Exception e) {
LOG.error("Exception in fair scheduler UpdateThread", e);
}
}
}
}
/**
* Thread which attempts scheduling resources continuously,
* asynchronous to the node heartbeats.
*/
private class ContinuousSchedulingThread extends Thread {
@Override
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
continuousSchedulingAttempt();
Thread.sleep(getContinuousSchedulingSleepMs());
} catch (InterruptedException e) {
LOG.warn("Continuous scheduling thread interrupted. Exiting.", e);
return;
}
}
}
}
/**
* Recompute the internal variables used by the scheduler - per-job weights,
* fair shares, deficits, minimum slot allocations, and amount of used and
* required resources per job.
*/
protected synchronized void update() {
long start = getClock().getTime();
updateStarvationStats(); // Determine if any queues merit preemption
FSQueue rootQueue = queueMgr.getRootQueue();
// Recursively update demands for all queues
rootQueue.updateDemand();
rootQueue.setFairShare(clusterResource);
// Recursively compute fair shares for all queues
// and update metrics
rootQueue.recomputeShares();
updateRootQueueMetrics();
if (LOG.isDebugEnabled()) {
if (--updatesToSkipForDebug < 0) {
updatesToSkipForDebug = UPDATE_DEBUG_FREQUENCY;
LOG.debug("Cluster Capacity: " + clusterResource +
" Allocations: " + rootMetrics.getAllocatedResources() +
" Availability: " + Resource.newInstance(
rootMetrics.getAvailableMB(),
rootMetrics.getAvailableVirtualCores()) +
" Demand: " + rootQueue.getDemand());
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addUpdateCallDuration(duration);
}
/**
* Update the preemption fields for all QueueScheduables, i.e. the times since
* each queue last was at its guaranteed share and over its fair share
* threshold for each type of task.
*/
private void updateStarvationStats() {
lastPreemptionUpdateTime = clock.getTime();
for (FSLeafQueue sched : queueMgr.getLeafQueues()) {
sched.updateStarvationStats();
}
}
/**
* Check for queues that need tasks preempted, either because they have been
* below their guaranteed share for minSharePreemptionTimeout or they have
* been below their fair share threshold for the fairSharePreemptionTimeout. If
* such queues exist, compute how many tasks of each type need to be preempted
* and then select the right ones using preemptTasks.
*/
protected synchronized void preemptTasksIfNecessary() {
if (!shouldAttemptPreemption()) {
return;
}
long curTime = getClock().getTime();
if (curTime - lastPreemptCheckTime < preemptionInterval) {
return;
}
lastPreemptCheckTime = curTime;
Resource resToPreempt = Resources.clone(Resources.none());
for (FSLeafQueue sched : queueMgr.getLeafQueues()) {
Resources.addTo(resToPreempt, resToPreempt(sched, curTime));
}
if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource, resToPreempt,
Resources.none())) {
preemptResources(resToPreempt);
}
}
/**
* Preempt a quantity of resources. Each round, we start from the root queue,
* level-by-level, until choosing a candidate application.
* The policy for prioritizing preemption for each queue depends on its
* SchedulingPolicy: (1) fairshare/DRF, choose the ChildSchedulable that is
* most over its fair share; (2) FIFO, choose the childSchedulable that is
* latest launched.
* Inside each application, we further prioritize preemption by choosing
* containers with lowest priority to preempt.
* We make sure that no queue is placed below its fair share in the process.
*/
protected void preemptResources(Resource toPreempt) {
long start = getClock().getTime();
if (Resources.equals(toPreempt, Resources.none())) {
return;
}
// Scan down the list of containers we've already warned and kill them
// if we need to. Remove any containers from the list that we don't need
// or that are no longer running.
Iterator<RMContainer> warnedIter = warnedContainers.iterator();
while (warnedIter.hasNext()) {
RMContainer container = warnedIter.next();
if ((container.getState() == RMContainerState.RUNNING ||
container.getState() == RMContainerState.ALLOCATED) &&
Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
toPreempt, Resources.none())) {
warnOrKillContainer(container);
Resources.subtractFrom(toPreempt, container.getContainer().getResource());
} else {
warnedIter.remove();
}
}
try {
// Reset preemptedResource for each app
for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
queue.resetPreemptedResources();
}
while (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
toPreempt, Resources.none())) {
RMContainer container =
getQueueManager().getRootQueue().preemptContainer();
if (container == null) {
break;
} else {
warnOrKillContainer(container);
warnedContainers.add(container);
Resources.subtractFrom(
toPreempt, container.getContainer().getResource());
}
}
} finally {
// Clear preemptedResources for each app
for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
queue.clearPreemptedResources();
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addPreemptCallDuration(duration);
}
protected void warnOrKillContainer(RMContainer container) {
ApplicationAttemptId appAttemptId = container.getApplicationAttemptId();
FSAppAttempt app = getSchedulerApp(appAttemptId);
FSLeafQueue queue = app.getQueue();
LOG.info("Preempting container (prio=" + container.getContainer().getPriority() +
"res=" + container.getContainer().getResource() +
") from queue " + queue.getName());
Long time = app.getContainerPreemptionTime(container);
if (time != null) {
// if we asked for preemption more than maxWaitTimeBeforeKill ms ago,
// proceed with kill
if (time + waitTimeBeforeKill < getClock().getTime()) {
ContainerStatus status =
SchedulerUtils.createPreemptedContainerStatus(
container.getContainerId(), SchedulerUtils.PREEMPTED_CONTAINER);
recoverResourceRequestForContainer(container);
// TODO: Not sure if this ever actually adds this to the list of cleanup
// containers on the RMNode (see SchedulerNode.releaseContainer()).
completedContainer(container, status, RMContainerEventType.KILL);
LOG.info("Killing container" + container +
" (after waiting for premption for " +
(getClock().getTime() - time) + "ms)");
}
} else {
// track the request in the FSAppAttempt itself
app.addPreemption(container, getClock().getTime());
}
}
/**
* Return the resource amount that this queue is allowed to preempt, if any.
* If the queue has been below its min share for at least its preemption
* timeout, it should preempt the difference between its current share and
* this min share. If it has been below its fair share preemption threshold
* for at least the fairSharePreemptionTimeout, it should preempt enough tasks
* to get up to its full fair share. If both conditions hold, we preempt the
* max of the two amounts (this shouldn't happen unless someone sets the
* timeouts to be identical for some reason).
*/
protected Resource resToPreempt(FSLeafQueue sched, long curTime) {
long minShareTimeout = sched.getMinSharePreemptionTimeout();
long fairShareTimeout = sched.getFairSharePreemptionTimeout();
Resource resDueToMinShare = Resources.none();
Resource resDueToFairShare = Resources.none();
if (curTime - sched.getLastTimeAtMinShare() > minShareTimeout) {
Resource target = Resources.min(RESOURCE_CALCULATOR, clusterResource,
sched.getMinShare(), sched.getDemand());
resDueToMinShare = Resources.max(RESOURCE_CALCULATOR, clusterResource,
Resources.none(), Resources.subtract(target, sched.getResourceUsage()));
}
if (curTime - sched.getLastTimeAtFairShareThreshold() > fairShareTimeout) {
Resource target = Resources.min(RESOURCE_CALCULATOR, clusterResource,
sched.getFairShare(), sched.getDemand());
resDueToFairShare = Resources.max(RESOURCE_CALCULATOR, clusterResource,
Resources.none(), Resources.subtract(target, sched.getResourceUsage()));
}
Resource resToPreempt = Resources.max(RESOURCE_CALCULATOR, clusterResource,
resDueToMinShare, resDueToFairShare);
if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
resToPreempt, Resources.none())) {
String message = "Should preempt " + resToPreempt + " res for queue "
+ sched.getName() + ": resDueToMinShare = " + resDueToMinShare
+ ", resDueToFairShare = " + resDueToFairShare;
LOG.info(message);
}
return resToPreempt;
}
public synchronized RMContainerTokenSecretManager
getContainerTokenSecretManager() {
return rmContext.getContainerTokenSecretManager();
}
// synchronized for sizeBasedWeight
public synchronized ResourceWeights getAppWeight(FSAppAttempt app) {
double weight = 1.0;
if (sizeBasedWeight) {
// Set weight based on current memory demand
weight = Math.log1p(app.getDemand().getMemory()) / Math.log(2);
}
weight *= app.getPriority().getPriority();
if (weightAdjuster != null) {
// Run weight through the user-supplied weightAdjuster
weight = weightAdjuster.adjustWeight(app, weight);
}
ResourceWeights resourceWeights = app.getResourceWeights();
resourceWeights.setWeight((float)weight);
return resourceWeights;
}
public Resource getIncrementResourceCapability() {
return incrAllocation;
}
private FSSchedulerNode getFSSchedulerNode(NodeId nodeId) {
return nodes.get(nodeId);
}
public double getNodeLocalityThreshold() {
return nodeLocalityThreshold;
}
public double getRackLocalityThreshold() {
return rackLocalityThreshold;
}
public long getNodeLocalityDelayMs() {
return nodeLocalityDelayMs;
}
public long getRackLocalityDelayMs() {
return rackLocalityDelayMs;
}
public boolean isContinuousSchedulingEnabled() {
return continuousSchedulingEnabled;
}
public synchronized int getContinuousSchedulingSleepMs() {
return continuousSchedulingSleepMs;
}
public Clock getClock() {
return clock;
}
@VisibleForTesting
void setClock(Clock clock) {
this.clock = clock;
}
public FairSchedulerEventLog getEventLog() {
return eventLog;
}
/**
* Add a new application to the scheduler, with a given id, queue name, and
* user. This will accept a new app even if the user or queue is above
* configured limits, but the app will not be marked as runnable.
*/
protected synchronized void addApplication(ApplicationId applicationId,
String queueName, String user, boolean isAppRecovering) {
if (queueName == null || queueName.isEmpty()) {
String message = "Reject application " + applicationId +
" submitted by user " + user + " with an empty queue name.";
LOG.info(message);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return;
}
if (queueName.startsWith(".") || queueName.endsWith(".")) {
String message = "Reject application " + applicationId
+ " submitted by user " + user + " with an illegal queue name "
+ queueName + ". "
+ "The queue name cannot start/end with period.";
LOG.info(message);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return;
}
RMApp rmApp = rmContext.getRMApps().get(applicationId);
FSLeafQueue queue = assignToQueue(rmApp, queueName, user);
if (queue == null) {
return;
}
// Enforce ACLs
UserGroupInformation userUgi = UserGroupInformation.createRemoteUser(user);
if (!queue.hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)
&& !queue.hasAccess(QueueACL.ADMINISTER_QUEUE, userUgi)) {
String msg = "User " + userUgi.getUserName() +
" cannot submit applications to queue " + queue.getName();
LOG.info(msg);
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, msg));
return;
}
SchedulerApplication<FSAppAttempt> application =
new SchedulerApplication<FSAppAttempt>(queue, user);
applications.put(applicationId, application);
queue.getMetrics().submitApp(user);
LOG.info("Accepted application " + applicationId + " from user: " + user
+ ", in queue: " + queueName + ", currently num of applications: "
+ applications.size());
if (isAppRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationId + " is recovering. Skip notifying APP_ACCEPTED");
}
} else {
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED));
}
}
/**
* Add a new application attempt to the scheduler.
*/
protected synchronized void addApplicationAttempt(
ApplicationAttemptId applicationAttemptId,
boolean transferStateFromPreviousAttempt,
boolean isAttemptRecovering) {
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationAttemptId.getApplicationId());
String user = application.getUser();
FSLeafQueue queue = (FSLeafQueue) application.getQueue();
FSAppAttempt attempt =
new FSAppAttempt(this, applicationAttemptId, user,
queue, new ActiveUsersManager(getRootQueueMetrics()),
rmContext);
if (transferStateFromPreviousAttempt) {
attempt.transferStateFromPreviousAttempt(application
.getCurrentAppAttempt());
}
application.setCurrentAppAttempt(attempt);
boolean runnable = maxRunningEnforcer.canAppBeRunnable(queue, user);
queue.addApp(attempt, runnable);
if (runnable) {
maxRunningEnforcer.trackRunnableApp(attempt);
} else {
maxRunningEnforcer.trackNonRunnableApp(attempt);
}
queue.getMetrics().submitAppAttempt(user);
LOG.info("Added Application Attempt " + applicationAttemptId
+ " to scheduler from user: " + user);
if (isAttemptRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationAttemptId
+ " is recovering. Skipping notifying ATTEMPT_ADDED");
}
} else {
rmContext.getDispatcher().getEventHandler().handle(
new RMAppAttemptEvent(applicationAttemptId,
RMAppAttemptEventType.ATTEMPT_ADDED));
}
}
/**
* Helper method that attempts to assign the app to a queue. The method is
* responsible to call the appropriate event-handler if the app is rejected.
*/
@VisibleForTesting
FSLeafQueue assignToQueue(RMApp rmApp, String queueName, String user) {
FSLeafQueue queue = null;
String appRejectMsg = null;
try {
QueuePlacementPolicy placementPolicy = allocConf.getPlacementPolicy();
queueName = placementPolicy.assignAppToQueue(queueName, user);
if (queueName == null) {
appRejectMsg = "Application rejected by queue placement policy";
} else {
queue = queueMgr.getLeafQueue(queueName, true);
if (queue == null) {
appRejectMsg = queueName + " is not a leaf queue";
}
}
} catch (IOException ioe) {
appRejectMsg = "Error assigning app to queue " + queueName;
}
if (appRejectMsg != null && rmApp != null) {
LOG.error(appRejectMsg);
rmContext.getDispatcher().getEventHandler().handle(
new RMAppRejectedEvent(rmApp.getApplicationId(), appRejectMsg));
return null;
}
if (rmApp != null) {
rmApp.setQueue(queue.getName());
} else {
LOG.error("Couldn't find RM app to set queue name on");
}
return queue;
}
private synchronized void removeApplication(ApplicationId applicationId,
RMAppState finalState) {
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationId);
if (application == null){
LOG.warn("Couldn't find application " + applicationId);
return;
}
application.stop(finalState);
applications.remove(applicationId);
}
private synchronized void removeApplicationAttempt(
ApplicationAttemptId applicationAttemptId,
RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers) {
LOG.info("Application " + applicationAttemptId + " is done." +
" finalState=" + rmAppAttemptFinalState);
SchedulerApplication<FSAppAttempt> application =
applications.get(applicationAttemptId.getApplicationId());
FSAppAttempt attempt = getSchedulerApp(applicationAttemptId);
if (attempt == null || application == null) {
LOG.info("Unknown application " + applicationAttemptId + " has completed!");
return;
}
// Release all the running containers
for (RMContainer rmContainer : attempt.getLiveContainers()) {
if (keepContainers
&& rmContainer.getState().equals(RMContainerState.RUNNING)) {
// do not kill the running container in the case of work-preserving AM
// restart.
LOG.info("Skip killing " + rmContainer.getContainerId());
continue;
}
completedContainer(rmContainer,
SchedulerUtils.createAbnormalContainerStatus(
rmContainer.getContainerId(),
SchedulerUtils.COMPLETED_APPLICATION),
RMContainerEventType.KILL);
}
// Release all reserved containers
for (RMContainer rmContainer : attempt.getReservedContainers()) {
completedContainer(rmContainer,
SchedulerUtils.createAbnormalContainerStatus(
rmContainer.getContainerId(),
"Application Complete"),
RMContainerEventType.KILL);
}
// Clean up pending requests, metrics etc.
attempt.stop(rmAppAttemptFinalState);
// Inform the queue
FSLeafQueue queue = queueMgr.getLeafQueue(attempt.getQueue()
.getQueueName(), false);
boolean wasRunnable = queue.removeApp(attempt);
if (wasRunnable) {
maxRunningEnforcer.untrackRunnableApp(attempt);
maxRunningEnforcer.updateRunnabilityOnAppRemoval(attempt,
attempt.getQueue());
} else {
maxRunningEnforcer.untrackNonRunnableApp(attempt);
}
}
/**
* Clean up a completed container.
*/
@Override
protected synchronized void completedContainer(RMContainer rmContainer,
ContainerStatus containerStatus, RMContainerEventType event) {
if (rmContainer == null) {
LOG.info("Null container completed...");
return;
}
Container container = rmContainer.getContainer();
// Get the application for the finished container
FSAppAttempt application =
getCurrentAttemptForContainer(container.getId());
ApplicationId appId =
container.getId().getApplicationAttemptId().getApplicationId();
if (application == null) {
LOG.info("Container " + container + " of" +
" unknown application attempt " + appId +
" completed with event " + event);
return;
}
// Get the node on which the container was allocated
FSSchedulerNode node = getFSSchedulerNode(container.getNodeId());
if (rmContainer.getState() == RMContainerState.RESERVED) {
application.unreserve(rmContainer.getReservedPriority(), node);
} else {
application.containerCompleted(rmContainer, containerStatus, event);
node.releaseContainer(container);
updateRootQueueMetrics();
}
LOG.info("Application attempt " + application.getApplicationAttemptId()
+ " released container " + container.getId() + " on node: " + node
+ " with event: " + event);
}
private synchronized void addNode(RMNode node) {
FSSchedulerNode schedulerNode = new FSSchedulerNode(node, usePortForNodeName);
nodes.put(node.getNodeID(), schedulerNode);
Resources.addTo(clusterResource, node.getTotalCapability());
updateRootQueueMetrics();
updateMaximumAllocation(schedulerNode, true);
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
LOG.info("Added node " + node.getNodeAddress() +
" cluster capacity: " + clusterResource);
}
private synchronized void removeNode(RMNode rmNode) {
FSSchedulerNode node = getFSSchedulerNode(rmNode.getNodeID());
// This can occur when an UNHEALTHY node reconnects
if (node == null) {
return;
}
Resources.subtractFrom(clusterResource, rmNode.getTotalCapability());
updateRootQueueMetrics();
// Remove running containers
List<RMContainer> runningContainers = node.getRunningContainers();
for (RMContainer container : runningContainers) {
completedContainer(container,
SchedulerUtils.createAbnormalContainerStatus(
container.getContainerId(),
SchedulerUtils.LOST_CONTAINER),
RMContainerEventType.KILL);
}
// Remove reservations, if any
RMContainer reservedContainer = node.getReservedContainer();
if (reservedContainer != null) {
completedContainer(reservedContainer,
SchedulerUtils.createAbnormalContainerStatus(
reservedContainer.getContainerId(),
SchedulerUtils.LOST_CONTAINER),
RMContainerEventType.KILL);
}
nodes.remove(rmNode.getNodeID());
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
updateMaximumAllocation(node, false);
LOG.info("Removed node " + rmNode.getNodeAddress() +
" cluster capacity: " + clusterResource);
}
@Override
public Allocation allocate(ApplicationAttemptId appAttemptId,
List<ResourceRequest> ask, List<ContainerId> release,
List<String> blacklistAdditions, List<String> blacklistRemovals) {
// Make sure this application exists
FSAppAttempt application = getSchedulerApp(appAttemptId);
if (application == null) {
LOG.info("Calling allocate on removed " +
"or non existant application " + appAttemptId);
return EMPTY_ALLOCATION;
}
// Sanity check
SchedulerUtils.normalizeRequests(ask, DOMINANT_RESOURCE_CALCULATOR,
clusterResource, minimumAllocation, getMaximumResourceCapability(),
incrAllocation);
// Set amResource for this app
if (!application.getUnmanagedAM() && ask.size() == 1
&& application.getLiveContainers().isEmpty()) {
application.setAMResource(ask.get(0).getCapability());
}
// Release containers
releaseContainers(release, application);
synchronized (application) {
if (!ask.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("allocate: pre-update" +
" applicationAttemptId=" + appAttemptId +
" application=" + application.getApplicationId());
}
application.showRequests();
// Update application requests
application.updateResourceRequests(ask);
application.showRequests();
}
if (LOG.isDebugEnabled()) {
LOG.debug("allocate: post-update" +
" applicationAttemptId=" + appAttemptId +
" #ask=" + ask.size() +
" reservation= " + application.getCurrentReservation());
LOG.debug("Preempting " + application.getPreemptionContainers().size()
+ " container(s)");
}
Set<ContainerId> preemptionContainerIds = new HashSet<ContainerId>();
for (RMContainer container : application.getPreemptionContainers()) {
preemptionContainerIds.add(container.getContainerId());
}
application.updateBlacklist(blacklistAdditions, blacklistRemovals);
ContainersAndNMTokensAllocation allocation =
application.pullNewlyAllocatedContainersAndNMTokens();
Resource headroom = application.getHeadroom();
application.setApplicationHeadroomForMetrics(headroom);
return new Allocation(allocation.getContainerList(), headroom,
preemptionContainerIds, null, null, allocation.getNMTokenList());
}
}
/**
* Process a heartbeat update from a node.
*/
private synchronized void nodeUpdate(RMNode nm) {
long start = getClock().getTime();
if (LOG.isDebugEnabled()) {
LOG.debug("nodeUpdate: " + nm + " cluster capacity: " + clusterResource);
}
eventLog.log("HEARTBEAT", nm.getHostName());
FSSchedulerNode node = getFSSchedulerNode(nm.getNodeID());
List<UpdatedContainerInfo> containerInfoList = nm.pullContainerUpdates();
List<ContainerStatus> newlyLaunchedContainers = new ArrayList<ContainerStatus>();
List<ContainerStatus> completedContainers = new ArrayList<ContainerStatus>();
for(UpdatedContainerInfo containerInfo : containerInfoList) {
newlyLaunchedContainers.addAll(containerInfo.getNewlyLaunchedContainers());
completedContainers.addAll(containerInfo.getCompletedContainers());
}
// Processing the newly launched containers
for (ContainerStatus launchedContainer : newlyLaunchedContainers) {
containerLaunchedOnNode(launchedContainer.getContainerId(), node);
}
// Process completed containers
for (ContainerStatus completedContainer : completedContainers) {
ContainerId containerId = completedContainer.getContainerId();
LOG.debug("Container FINISHED: " + containerId);
completedContainer(getRMContainer(containerId),
completedContainer, RMContainerEventType.FINISHED);
}
if (continuousSchedulingEnabled) {
if (!completedContainers.isEmpty()) {
attemptScheduling(node);
}
} else {
/* Start Wajih
Adding Timers to check decision delays*/
long beforeTime = System.currentTimeMillis();
/* End */
attemptScheduling(node);
/* Start Wajih
Adding Timers to check decision delays*/
long afterTime = System.currentTimeMillis();
int dec_time = (int)(afterTime-beforeTime);
decision_time[dec_time]++;
/* End */
}
long duration = getClock().getTime() - start;
fsOpDurations.addNodeUpdateDuration(duration);
}
void continuousSchedulingAttempt() throws InterruptedException {
long start = getClock().getTime();
List<NodeId> nodeIdList = new ArrayList<NodeId>(nodes.keySet());
// Sort the nodes by space available on them, so that we offer
// containers on emptier nodes first, facilitating an even spread. This
// requires holding the scheduler lock, so that the space available on a
// node doesn't change during the sort.
synchronized (this) {
Collections.sort(nodeIdList, nodeAvailableResourceComparator);
}
// iterate all nodes
for (NodeId nodeId : nodeIdList) {
FSSchedulerNode node = getFSSchedulerNode(nodeId);
try {
if (node != null && Resources.fitsIn(minimumAllocation,
node.getAvailableResource())) {
attemptScheduling(node);
}
} catch (Throwable ex) {
LOG.error("Error while attempting scheduling for node " + node +
": " + ex.toString(), ex);
}
}
long duration = getClock().getTime() - start;
fsOpDurations.addContinuousSchedulingRunDuration(duration);
}
/** Sort nodes by available resource */
private class NodeAvailableResourceComparator implements Comparator<NodeId> {
@Override
public int compare(NodeId n1, NodeId n2) {
if (!nodes.containsKey(n1)) {
return 1;
}
if (!nodes.containsKey(n2)) {
return -1;
}
return RESOURCE_CALCULATOR.compare(clusterResource,
nodes.get(n2).getAvailableResource(),
nodes.get(n1).getAvailableResource());
}
}
private synchronized void attemptScheduling(FSSchedulerNode node) {
if (rmContext.isWorkPreservingRecoveryEnabled()
&& !rmContext.isSchedulerReadyForAllocatingContainers()) {
return;
}
// Assign new containers...
// 1. Check for reserved applications
// 2. Schedule if there are no reservations
FSAppAttempt reservedAppSchedulable = node.getReservedAppSchedulable();
if (reservedAppSchedulable != null) {
Priority reservedPriority = node.getReservedContainer().getReservedPriority();
FSQueue queue = reservedAppSchedulable.getQueue();
if (!reservedAppSchedulable.hasContainerForNode(reservedPriority, node)
|| !fitsInMaxShare(queue,
node.getReservedContainer().getReservedResource())) {
// Don't hold the reservation if app can no longer use it
LOG.info("Releasing reservation that cannot be satisfied for application "
+ reservedAppSchedulable.getApplicationAttemptId()
+ " on node " + node);
reservedAppSchedulable.unreserve(reservedPriority, node);
reservedAppSchedulable = null;
} else {
// Reservation exists; try to fulfill the reservation
if (LOG.isDebugEnabled()) {
LOG.debug("Trying to fulfill reservation for application "
+ reservedAppSchedulable.getApplicationAttemptId()
+ " on node: " + node);
}
node.getReservedAppSchedulable().assignReservedContainer(node);
}
}
if (reservedAppSchedulable == null) {
// No reservation, schedule at queue which is farthest below fair share
int assignedContainers = 0;
while (node.getReservedContainer() == null) {
boolean assignedContainer = false;
if (!queueMgr.getRootQueue().assignContainer(node).equals(
Resources.none())) {
assignedContainers++;
assignedContainer = true;
}
if (!assignedContainer) { break; }
if (!assignMultiple) { break; }
if ((assignedContainers >= maxAssign) && (maxAssign > 0)) { break; }
}
}
updateRootQueueMetrics();
}
static boolean fitsInMaxShare(FSQueue queue, Resource
additionalResource) {
Resource usagePlusAddition =
Resources.add(queue.getResourceUsage(), additionalResource);
if (!Resources.fitsIn(usagePlusAddition, queue.getMaxShare())) {
return false;
}
FSQueue parentQueue = queue.getParent();
if (parentQueue != null) {
return fitsInMaxShare(parentQueue, additionalResource);
}
return true;
}
public FSAppAttempt getSchedulerApp(ApplicationAttemptId appAttemptId) {
return super.getApplicationAttempt(appAttemptId);
}
@Override
public ResourceCalculator getResourceCalculator() {
return RESOURCE_CALCULATOR;
}
/**
* Subqueue metrics might be a little out of date because fair shares are
* recalculated at the update interval, but the root queue metrics needs to
* be updated synchronously with allocations and completions so that cluster
* metrics will be consistent.
*/
private void updateRootQueueMetrics() {
rootMetrics.setAvailableResourcesToQueue(
Resources.subtract(
clusterResource, rootMetrics.getAllocatedResources()));
}
/**
* Check if preemption is enabled and the utilization threshold for
* preemption is met.
*
* @return true if preemption should be attempted, false otherwise.
*/
private boolean shouldAttemptPreemption() {
if (preemptionEnabled) {
return (preemptionUtilizationThreshold < Math.max(
(float) rootMetrics.getAllocatedMB() / clusterResource.getMemory(),
(float) rootMetrics.getAllocatedVirtualCores() /
clusterResource.getVirtualCores()));
}
return false;
}
@Override
public QueueMetrics getRootQueueMetrics() {
return rootMetrics;
}
@Override
public void handle(SchedulerEvent event) {
switch (event.getType()) {
case NODE_ADDED:
if (!(event instanceof NodeAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event;
addNode(nodeAddedEvent.getAddedRMNode());
recoverContainersOnNode(nodeAddedEvent.getContainerReports(),
nodeAddedEvent.getAddedRMNode());
break;
case NODE_REMOVED:
if (!(event instanceof NodeRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event;
removeNode(nodeRemovedEvent.getRemovedRMNode());
break;
case NODE_UPDATE:
if (!(event instanceof NodeUpdateSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeUpdateSchedulerEvent nodeUpdatedEvent = (NodeUpdateSchedulerEvent)event;
nodeUpdate(nodeUpdatedEvent.getRMNode());
break;
case APP_ADDED:
if (!(event instanceof AppAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event;
String queueName =
resolveReservationQueueName(appAddedEvent.getQueue(),
appAddedEvent.getApplicationId(),
appAddedEvent.getReservationID());
if (queueName != null) {
addApplication(appAddedEvent.getApplicationId(),
queueName, appAddedEvent.getUser(),
appAddedEvent.getIsAppRecovering());
}
break;
case APP_REMOVED:
if (!(event instanceof AppRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event;
removeApplication(appRemovedEvent.getApplicationID(),
appRemovedEvent.getFinalState());
break;
case NODE_RESOURCE_UPDATE:
if (!(event instanceof NodeResourceUpdateSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
NodeResourceUpdateSchedulerEvent nodeResourceUpdatedEvent =
(NodeResourceUpdateSchedulerEvent)event;
updateNodeResource(nodeResourceUpdatedEvent.getRMNode(),
nodeResourceUpdatedEvent.getResourceOption());
break;
case APP_ATTEMPT_ADDED:
if (!(event instanceof AppAttemptAddedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAttemptAddedSchedulerEvent appAttemptAddedEvent =
(AppAttemptAddedSchedulerEvent) event;
addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(),
appAttemptAddedEvent.getTransferStateFromPreviousAttempt(),
appAttemptAddedEvent.getIsAttemptRecovering());
break;
case APP_ATTEMPT_REMOVED:
if (!(event instanceof AppAttemptRemovedSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent =
(AppAttemptRemovedSchedulerEvent) event;
removeApplicationAttempt(
appAttemptRemovedEvent.getApplicationAttemptID(),
appAttemptRemovedEvent.getFinalAttemptState(),
appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts());
break;
case CONTAINER_EXPIRED:
if (!(event instanceof ContainerExpiredSchedulerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
ContainerExpiredSchedulerEvent containerExpiredEvent =
(ContainerExpiredSchedulerEvent)event;
ContainerId containerId = containerExpiredEvent.getContainerId();
completedContainer(getRMContainer(containerId),
SchedulerUtils.createAbnormalContainerStatus(
containerId,
SchedulerUtils.EXPIRED_CONTAINER),
RMContainerEventType.EXPIRE);
break;
default:
LOG.error("Unknown event arrived at FairScheduler: " + event.toString());
}
}
private synchronized String resolveReservationQueueName(String queueName,
ApplicationId applicationId, ReservationId reservationID) {
FSQueue queue = queueMgr.getQueue(queueName);
if ((queue == null) || !allocConf.isReservable(queue.getQueueName())) {
return queueName;
}
// Use fully specified name from now on (including root. prefix)
queueName = queue.getQueueName();
if (reservationID != null) {
String resQName = queueName + "." + reservationID.toString();
queue = queueMgr.getQueue(resQName);
if (queue == null) {
String message =
"Application "
+ applicationId
+ " submitted to a reservation which is not yet currently active: "
+ resQName;
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return null;
}
if (!queue.getParent().getQueueName().equals(queueName)) {
String message =
"Application: " + applicationId + " submitted to a reservation "
+ resQName + " which does not belong to the specified queue: "
+ queueName;
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMAppRejectedEvent(applicationId, message));
return null;
}
// use the reservation queue to run the app
queueName = resQName;
} else {
// use the default child queue of the plan for unreserved apps
queueName = getDefaultQueueForPlanQueue(queueName);
}
return queueName;
}
private String getDefaultQueueForPlanQueue(String queueName) {
String planName = queueName.substring(queueName.lastIndexOf(".") + 1);
queueName = queueName + "." + planName + ReservationConstants.DEFAULT_QUEUE_SUFFIX;
return queueName;
}
@Override
public void recover(RMState state) throws Exception {
// NOT IMPLEMENTED
}
public synchronized void setRMContext(RMContext rmContext) {
this.rmContext = rmContext;
}
private void initScheduler(Configuration conf) throws IOException {
synchronized (this) {
this.conf = new FairSchedulerConfiguration(conf);
validateConf(this.conf);
minimumAllocation = this.conf.getMinimumAllocation();
initMaximumResourceCapability(this.conf.getMaximumAllocation());
incrAllocation = this.conf.getIncrementAllocation();
continuousSchedulingEnabled = this.conf.isContinuousSchedulingEnabled();
continuousSchedulingSleepMs =
this.conf.getContinuousSchedulingSleepMs();
nodeLocalityThreshold = this.conf.getLocalityThresholdNode();
rackLocalityThreshold = this.conf.getLocalityThresholdRack();
nodeLocalityDelayMs = this.conf.getLocalityDelayNodeMs();
rackLocalityDelayMs = this.conf.getLocalityDelayRackMs();
preemptionEnabled = this.conf.getPreemptionEnabled();
preemptionUtilizationThreshold =
this.conf.getPreemptionUtilizationThreshold();
assignMultiple = this.conf.getAssignMultiple();
maxAssign = this.conf.getMaxAssign();
sizeBasedWeight = this.conf.getSizeBasedWeight();
preemptionInterval = this.conf.getPreemptionInterval();
waitTimeBeforeKill = this.conf.getWaitTimeBeforeKill();
usePortForNodeName = this.conf.getUsePortForNodeName();
updateInterval = this.conf.getUpdateInterval();
if (updateInterval < 0) {
updateInterval = FairSchedulerConfiguration.DEFAULT_UPDATE_INTERVAL_MS;
LOG.warn(FairSchedulerConfiguration.UPDATE_INTERVAL_MS
+ " is invalid, so using default value " +
+FairSchedulerConfiguration.DEFAULT_UPDATE_INTERVAL_MS
+ " ms instead");
}
rootMetrics = FSQueueMetrics.forQueue("root", null, true, conf);
fsOpDurations = FSOpDurations.getInstance(true);
// This stores per-application scheduling information
this.applications = new ConcurrentHashMap<
ApplicationId, SchedulerApplication<FSAppAttempt>>();
this.eventLog = new FairSchedulerEventLog();
eventLog.init(this.conf);
allocConf = new AllocationConfiguration(conf);
try {
queueMgr.initialize(conf);
} catch (Exception e) {
throw new IOException("Failed to start FairScheduler", e);
}
updateThread = new UpdateThread();
updateThread.setName("FairSchedulerUpdateThread");
updateThread.setDaemon(true);
if (continuousSchedulingEnabled) {
// start continuous scheduling thread
schedulingThread = new ContinuousSchedulingThread();
schedulingThread.setName("FairSchedulerContinuousScheduling");
schedulingThread.setDaemon(true);
}
}
allocsLoader.init(conf);
allocsLoader.setReloadListener(new AllocationReloadListener());
// If we fail to load allocations file on initialize, we want to fail
// immediately. After a successful load, exceptions on future reloads
// will just result in leaving things as they are.
try {
allocsLoader.reloadAllocations();
} catch (Exception e) {
throw new IOException("Failed to initialize FairScheduler", e);
}
}
private synchronized void startSchedulerThreads() {
Preconditions.checkNotNull(updateThread, "updateThread is null");
Preconditions.checkNotNull(allocsLoader, "allocsLoader is null");
updateThread.start();
if (continuousSchedulingEnabled) {
Preconditions.checkNotNull(schedulingThread, "schedulingThread is null");
schedulingThread.start();
}
allocsLoader.start();
}
@Override
public void serviceInit(Configuration conf) throws Exception {
initScheduler(conf);
super.serviceInit(conf);
}
@Override
public void serviceStart() throws Exception {
startSchedulerThreads();
super.serviceStart();
}
@Override
public void serviceStop() throws Exception {
synchronized (this) {
if (updateThread != null) {
updateThread.interrupt();
updateThread.join(THREAD_JOIN_TIMEOUT_MS);
}
if (continuousSchedulingEnabled) {
if (schedulingThread != null) {
schedulingThread.interrupt();
schedulingThread.join(THREAD_JOIN_TIMEOUT_MS);
}
}
if (allocsLoader != null) {
allocsLoader.stop();
}
}
super.serviceStop();
}
@Override
public void reinitialize(Configuration conf, RMContext rmContext)
throws IOException {
try {
allocsLoader.reloadAllocations();
} catch (Exception e) {
LOG.error("Failed to reload allocations file", e);
}
}
@Override
public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues,
boolean recursive) throws IOException {
if (!queueMgr.exists(queueName)) {
throw new IOException("queue " + queueName + " does not exist");
}
return queueMgr.getQueue(queueName).getQueueInfo(includeChildQueues,
recursive);
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo() {
UserGroupInformation user;
try {
user = UserGroupInformation.getCurrentUser();
} catch (IOException ioe) {
return new ArrayList<QueueUserACLInfo>();
}
return queueMgr.getRootQueue().getQueueUserAclInfo(user);
}
@Override
public int getNumClusterNodes() {
return nodes.size();
}
@Override
public synchronized boolean checkAccess(UserGroupInformation callerUGI,
QueueACL acl, String queueName) {
FSQueue queue = getQueueManager().getQueue(queueName);
if (queue == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("ACL not found for queue access-type " + acl
+ " for queue " + queueName);
}
return false;
}
return queue.hasAccess(acl, callerUGI);
}
public AllocationConfiguration getAllocationConfiguration() {
return allocConf;
}
private class AllocationReloadListener implements
AllocationFileLoaderService.Listener {
@Override
public void onReload(AllocationConfiguration queueInfo) {
// Commit the reload; also create any queue defined in the alloc file
// if it does not already exist, so it can be displayed on the web UI.
synchronized (FairScheduler.this) {
allocConf = queueInfo;
allocConf.getDefaultSchedulingPolicy().initialize(clusterResource);
queueMgr.updateAllocationConfiguration(allocConf);
maxRunningEnforcer.updateRunnabilityOnReload();
}
}
}
@Override
public List<ApplicationAttemptId> getAppsInQueue(String queueName) {
FSQueue queue = queueMgr.getQueue(queueName);
if (queue == null) {
return null;
}
List<ApplicationAttemptId> apps = new ArrayList<ApplicationAttemptId>();
queue.collectSchedulerApplications(apps);
return apps;
}
@Override
public synchronized String moveApplication(ApplicationId appId,
String queueName) throws YarnException {
SchedulerApplication<FSAppAttempt> app = applications.get(appId);
if (app == null) {
throw new YarnException("App to be moved " + appId + " not found.");
}
FSAppAttempt attempt = (FSAppAttempt) app.getCurrentAppAttempt();
// To serialize with FairScheduler#allocate, synchronize on app attempt
synchronized (attempt) {
FSLeafQueue oldQueue = (FSLeafQueue) app.getQueue();
String destQueueName = handleMoveToPlanQueue(queueName);
FSLeafQueue targetQueue = queueMgr.getLeafQueue(destQueueName, false);
if (targetQueue == null) {
throw new YarnException("Target queue " + queueName
+ " not found or is not a leaf queue.");
}
if (targetQueue == oldQueue) {
return oldQueue.getQueueName();
}
if (oldQueue.isRunnableApp(attempt)) {
verifyMoveDoesNotViolateConstraints(attempt, oldQueue, targetQueue);
}
executeMove(app, attempt, oldQueue, targetQueue);
return targetQueue.getQueueName();
}
}
private void verifyMoveDoesNotViolateConstraints(FSAppAttempt app,
FSLeafQueue oldQueue, FSLeafQueue targetQueue) throws YarnException {
String queueName = targetQueue.getQueueName();
ApplicationAttemptId appAttId = app.getApplicationAttemptId();
// When checking maxResources and maxRunningApps, only need to consider
// queues before the lowest common ancestor of the two queues because the
// total running apps in queues above will not be changed.
FSQueue lowestCommonAncestor = findLowestCommonAncestorQueue(oldQueue,
targetQueue);
Resource consumption = app.getCurrentConsumption();
// Check whether the move would go over maxRunningApps or maxShare
FSQueue cur = targetQueue;
while (cur != lowestCommonAncestor) {
// maxRunningApps
if (cur.getNumRunnableApps() == allocConf.getQueueMaxApps(cur.getQueueName())) {
throw new YarnException("Moving app attempt " + appAttId + " to queue "
+ queueName + " would violate queue maxRunningApps constraints on"
+ " queue " + cur.getQueueName());
}
// maxShare
if (!Resources.fitsIn(Resources.add(cur.getResourceUsage(), consumption),
cur.getMaxShare())) {
throw new YarnException("Moving app attempt " + appAttId + " to queue "
+ queueName + " would violate queue maxShare constraints on"
+ " queue " + cur.getQueueName());
}
cur = cur.getParent();
}
}
/**
* Helper for moveApplication, which has appropriate synchronization, so all
* operations will be atomic.
*/
private void executeMove(SchedulerApplication<FSAppAttempt> app,
FSAppAttempt attempt, FSLeafQueue oldQueue, FSLeafQueue newQueue) {
boolean wasRunnable = oldQueue.removeApp(attempt);
// if app was not runnable before, it may be runnable now
boolean nowRunnable = maxRunningEnforcer.canAppBeRunnable(newQueue,
attempt.getUser());
if (wasRunnable && !nowRunnable) {
throw new IllegalStateException("Should have already verified that app "
+ attempt.getApplicationId() + " would be runnable in new queue");
}
if (wasRunnable) {
maxRunningEnforcer.untrackRunnableApp(attempt);
} else if (nowRunnable) {
// App has changed from non-runnable to runnable
maxRunningEnforcer.untrackNonRunnableApp(attempt);
}
attempt.move(newQueue); // This updates all the metrics
app.setQueue(newQueue);
newQueue.addApp(attempt, nowRunnable);
if (nowRunnable) {
maxRunningEnforcer.trackRunnableApp(attempt);
}
if (wasRunnable) {
maxRunningEnforcer.updateRunnabilityOnAppRemoval(attempt, oldQueue);
}
}
@VisibleForTesting
FSQueue findLowestCommonAncestorQueue(FSQueue queue1, FSQueue queue2) {
// Because queue names include ancestors, separated by periods, we can find
// the lowest common ancestors by going from the start of the names until
// there's a character that doesn't match.
String name1 = queue1.getName();
String name2 = queue2.getName();
// We keep track of the last period we encounter to avoid returning root.apple
// when the queues are root.applepie and root.appletart
int lastPeriodIndex = -1;
for (int i = 0; i < Math.max(name1.length(), name2.length()); i++) {
if (name1.length() <= i || name2.length() <= i ||
name1.charAt(i) != name2.charAt(i)) {
return queueMgr.getQueue(name1.substring(0, lastPeriodIndex));
} else if (name1.charAt(i) == '.') {
lastPeriodIndex = i;
}
}
return queue1; // names are identical
}
/**
* Process resource update on a node and update Queue.
*/
@Override
public synchronized void updateNodeResource(RMNode nm,
ResourceOption resourceOption) {
super.updateNodeResource(nm, resourceOption);
updateRootQueueMetrics();
queueMgr.getRootQueue().setSteadyFairShare(clusterResource);
queueMgr.getRootQueue().recomputeSteadyShares();
}
/** {@inheritDoc} */
@Override
public EnumSet<SchedulerResourceTypes> getSchedulingResourceTypes() {
return EnumSet
.of(SchedulerResourceTypes.MEMORY, SchedulerResourceTypes.CPU);
}
@Override
public Set<String> getPlanQueues() throws YarnException {
Set<String> planQueues = new HashSet<String>();
for (FSQueue fsQueue : queueMgr.getQueues()) {
String queueName = fsQueue.getName();
if (allocConf.isReservable(queueName)) {
planQueues.add(queueName);
}
}
return planQueues;
}
@Override
public void setEntitlement(String queueName,
QueueEntitlement entitlement) throws YarnException {
FSLeafQueue reservationQueue = queueMgr.getLeafQueue(queueName, false);
if (reservationQueue == null) {
throw new YarnException("Target queue " + queueName
+ " not found or is not a leaf queue.");
}
reservationQueue.setWeights(entitlement.getCapacity());
// TODO Does MaxCapacity need to be set for fairScheduler ?
}
/**
* Only supports removing empty leaf queues
* @param queueName name of queue to remove
* @throws YarnException if queue to remove is either not a leaf or if its
* not empty
*/
@Override
public void removeQueue(String queueName) throws YarnException {
FSLeafQueue reservationQueue = queueMgr.getLeafQueue(queueName, false);
if (reservationQueue != null) {
if (!queueMgr.removeLeafQueue(queueName)) {
throw new YarnException("Could not remove queue " + queueName + " as " +
"its either not a leaf queue or its not empty");
}
}
}
private String handleMoveToPlanQueue(String targetQueueName) {
FSQueue dest = queueMgr.getQueue(targetQueueName);
if (dest != null && allocConf.isReservable(dest.getQueueName())) {
// use the default child reservation queue of the plan
targetQueueName = getDefaultQueueForPlanQueue(targetQueueName);
}
return targetQueueName;
}
/* Start-Wajih Get decision stats */
@Override
public String getDecisionTimeStats() {
String my_string = Arrays.toString(decision_time);
return my_string ;
/*
String decision_str="";
long tmp_sum=0;
long tmp_sum2=0;
for(int i=0;i<=5;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 0-5: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=6;i<=10;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 5-10: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=11;i<=25;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 10-25: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
tmp_sum=0;
for(int i=26;i<=50;i++)
tmp_sum+=decisionArray[i];
decision_str+=" 25-50: "+tmp_sum+" "+((int)((tmp_sum*1.0/totalDec)*10000))/100.0+"%";
tmp_sum2+=tmp_sum;
return decision_str;
*/
}
/* END -wajih*/
}
|
Calculating Scheduling Decision timings
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
|
Calculating Scheduling Decision timings
|
<ide><path>adoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
<ide> protected int maxAssign; // Max containers to assign per heartbeat
<ide>
<ide>
<add>
<add>
<ide> /* Start -Wajih Measuring decision timings*/
<add> public int dec_array_size=10000;
<ide> public int[] decision_time;
<add> public long no_of_decisions;
<add>
<ide> /* End - Wajih*/
<ide>
<ide> @VisibleForTesting
<ide> queueMgr = new QueueManager(this);
<ide> maxRunningEnforcer = new MaxRunningAppsEnforcer(this);
<ide> /*Start Wajih Measuring decision timings*/
<del> decision_time = new int[10000];
<add> decision_time = new int[dec_array_size];
<ide> /* End Wajih */
<ide> }
<ide>
<ide> }
<ide> }
<ide>
<add>
<add>
<ide> /**
<ide> * Process a heartbeat update from a node.
<ide> */
<ide> completedContainer, RMContainerEventType.FINISHED);
<ide> }
<ide>
<add> /* Start Wajih
<add> Adding Timers to check decision delays*/
<add> no_of_decisions++;
<add> /* End */
<add>
<add>
<ide> if (continuousSchedulingEnabled) {
<ide> if (!completedContainers.isEmpty()) {
<ide> attemptScheduling(node);
<ide> }
<ide> } else {
<del>
<add>
<add>
<add>
<ide> /* Start Wajih
<ide> Adding Timers to check decision delays*/
<ide> long beforeTime = System.currentTimeMillis();
<ide> @Override
<ide> public String getDecisionTimeStats() {
<ide>
<del> String my_string = Arrays.toString(decision_time);
<del> return my_string ;
<add> int max_time;
<add> int min_time;
<add>
<add> // Break down the decision time space into 4 spaces;
<add> long part_0_5=0;
<add> long part_5_10=0;
<add> long part_10_25=0;
<add> long part_25_inf=0;
<add> String dec_string=" ";
<add> boolean flag=true;
<add>
<add> for(int i =0 ; i< dec_array_size;i++){
<add> if(i>0 && i<=5)
<add> part_0_5+=decision_time[i];
<add> if(i>5 && i<=10)
<add> part_5_10+=decision_time[i];
<add> if(i>10 && i<=25)
<add> part_10_25+=decision_time[i];
<add> if(i>25)
<add> part_25_inf+=decision_time[i];
<add> if(flag && decision_time[i] >=1){
<add> min_time=i;
<add> flag=false;
<add> }
<add> if(decision_time[i] >=1){
<add> max_time=i;
<add> }
<add>
<add> dec_string+="Max Time : ";
<add> dec_string+=max_time;
<add> dec_string+=" ---- ";
<add> dec_string+="Min Time : ";
<add> dec_string+=min_time;
<add> dec_string+="\n";
<add> dec_string+="Percentage of decision timings in between 0-5 Millisecond = ";
<add> dec_string+=((part_0_5*1.0)/no_of_decisions)*100;
<add> dec_string+="\n";
<add> dec_string+="Percentage of decision timings in between 5-10 Millisecond = ";
<add> dec_string+=((part_5_10*1.0)/no_of_decisions)*100;
<add> dec_string+="\n";
<add> dec_string+="Percentage of decision timings in between 10-25 Millisecond = ";
<add> dec_string+=((part_10_25*1.0)/no_of_decisions)*100;
<add> dec_string+="\n";
<add> dec_string+="Percentage of decision timings >25 Millisecond = ";
<add> dec_string+=((part_10_25*1.0)/no_of_decisions)*100;
<add>
<add> }
<add> return dec_string;
<ide> /*
<ide> String decision_str="";
<ide> long tmp_sum=0;
|
|
Java
|
apache-2.0
|
d4f958f4386e2ce451e0f008eec723cf807071bd
| 0 |
kevinpeterson/drools,rajashekharmunthakewill/drools,romartin/drools,romartin/drools,kevinpeterson/drools,HHzzhz/drools,kevinpeterson/drools,reynoldsm88/drools,kedzie/drools-android,ChallenHB/drools,sotty/drools,droolsjbpm/drools,psiroky/drools,HHzzhz/drools,OnePaaS/drools,ChallenHB/drools,iambic69/drools,jomarko/drools,ngs-mtech/drools,ThiagoGarciaAlves/drools,TonnyFeng/drools,sutaakar/drools,292388900/drools,kedzie/drools-android,winklerm/drools,ThomasLau/drools,amckee23/drools,iambic69/drools,lanceleverich/drools,jomarko/drools,manstis/drools,amckee23/drools,iambic69/drools,liupugong/drools,lanceleverich/drools,liupugong/drools,psiroky/drools,droolsjbpm/drools,OnePaaS/drools,psiroky/drools,mrietveld/drools,ChallenHB/drools,jiripetrlik/drools,sotty/drools,rajashekharmunthakewill/drools,sutaakar/drools,vinodkiran/drools,sutaakar/drools,TonnyFeng/drools,ngs-mtech/drools,yurloc/drools,jiripetrlik/drools,ngs-mtech/drools,vinodkiran/drools,ChallenHB/drools,winklerm/drools,jomarko/drools,ThiagoGarciaAlves/drools,yurloc/drools,ngs-mtech/drools,292388900/drools,ThomasLau/drools,sutaakar/drools,winklerm/drools,ThiagoGarciaAlves/drools,droolsjbpm/drools,prabasn/drools,ThiagoGarciaAlves/drools,mrietveld/drools,ngs-mtech/drools,sotty/drools,droolsjbpm/drools,292388900/drools,lanceleverich/drools,pperboires/PocDrools,mrrodriguez/drools,OnePaaS/drools,lanceleverich/drools,manstis/drools,vinodkiran/drools,liupugong/drools,TonnyFeng/drools,liupugong/drools,prabasn/drools,kevinpeterson/drools,mrietveld/drools,reynoldsm88/drools,Buble1981/MyDroolsFork,droolsjbpm/drools,rajashekharmunthakewill/drools,vinodkiran/drools,iambic69/drools,vinodkiran/drools,OnePaaS/drools,jomarko/drools,amckee23/drools,iambic69/drools,jomarko/drools,pperboires/PocDrools,ChallenHB/drools,kedzie/drools-android,pwachira/droolsexamples,mrietveld/drools,rajashekharmunthakewill/drools,romartin/drools,amckee23/drools,292388900/drools,prabasn/drools,mrrodriguez/drools,yurloc/drools,HHzzhz/drools,jiripetrlik/drools,HHzzhz/drools,liupugong/drools,OnePaaS/drools,sotty/drools,kevinpeterson/drools,lanceleverich/drools,mrrodriguez/drools,kedzie/drools-android,mrietveld/drools,winklerm/drools,Buble1981/MyDroolsFork,kedzie/drools-android,reynoldsm88/drools,rajashekharmunthakewill/drools,mrrodriguez/drools,reynoldsm88/drools,manstis/drools,TonnyFeng/drools,yurloc/drools,ThomasLau/drools,amckee23/drools,pperboires/PocDrools,ThomasLau/drools,jiripetrlik/drools,manstis/drools,Buble1981/MyDroolsFork,romartin/drools,HHzzhz/drools,psiroky/drools,prabasn/drools,sotty/drools,TonnyFeng/drools,mrrodriguez/drools,ThomasLau/drools,sutaakar/drools,292388900/drools,jiripetrlik/drools,romartin/drools,Buble1981/MyDroolsFork,pperboires/PocDrools,prabasn/drools,winklerm/drools,manstis/drools,reynoldsm88/drools,ThiagoGarciaAlves/drools
|
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.integrationtests;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringReader;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import org.acme.insurance.Driver;
import org.acme.insurance.Policy;
import org.drools.ActivationListenerFactory;
import org.drools.Address;
import org.drools.Attribute;
import org.drools.Bar;
import org.drools.Cat;
import org.drools.Cell;
import org.drools.Cheese;
import org.drools.CheeseEqual;
import org.drools.Cheesery;
import org.drools.Cheesery.Maturity;
import org.drools.Child;
import org.drools.ClassObjectFilter;
import org.drools.CommonTestMethodBase;
import org.drools.DomainObjectHolder;
import org.drools.FactA;
import org.drools.FactB;
import org.drools.FactC;
import org.drools.FactHandle;
import org.drools.FirstClass;
import org.drools.FromTestClass;
import org.drools.Guess;
import org.drools.IndexedNumber;
import org.drools.KnowledgeBase;
import org.drools.KnowledgeBaseConfiguration;
import org.drools.KnowledgeBaseFactory;
import org.drools.LongAddress;
import org.drools.Message;
import org.drools.MockPersistentSet;
import org.drools.Move;
import org.drools.ObjectWithSet;
import org.drools.Order;
import org.drools.OrderItem;
import org.drools.OuterClass;
import org.drools.Person;
import org.drools.PersonFinal;
import org.drools.PersonInterface;
import org.drools.PersonWithEquals;
import org.drools.Pet;
import org.drools.PolymorphicFact;
import org.drools.Primitives;
import org.drools.RandomNumber;
import org.drools.RuleBase;
import org.drools.RuleBaseConfiguration;
import org.drools.RuleBaseFactory;
import org.drools.SecondClass;
import org.drools.Sensor;
import org.drools.SpecialString;
import org.drools.State;
import org.drools.StatefulSession;
import org.drools.StatelessSession;
import org.drools.StockTick;
import org.drools.TestParam;
import org.drools.Triangle;
import org.drools.Win;
import org.drools.WorkingMemory;
import org.drools.audit.WorkingMemoryConsoleLogger;
import org.drools.base.RuleNameEndsWithAgendaFilter;
import org.drools.base.RuleNameEqualsAgendaFilter;
import org.drools.base.RuleNameMatchesAgendaFilter;
import org.drools.base.RuleNameStartsWithAgendaFilter;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderConfiguration;
import org.drools.builder.KnowledgeBuilderError;
import org.drools.builder.KnowledgeBuilderErrors;
import org.drools.builder.KnowledgeBuilderFactory;
import org.drools.builder.ResourceType;
import org.drools.builder.conf.DefaultPackageNameOption;
import org.drools.command.CommandFactory;
import org.drools.command.Setter;
import org.drools.common.AbstractWorkingMemory;
import org.drools.common.DefaultAgenda;
import org.drools.common.DefaultFactHandle;
import org.drools.common.InternalFactHandle;
import org.drools.common.InternalWorkingMemory;
import org.drools.compiler.DescrBuildError;
import org.drools.compiler.DrlParser;
import org.drools.compiler.DroolsError;
import org.drools.compiler.PackageBuilder;
import org.drools.compiler.PackageBuilder.PackageMergeException;
import org.drools.compiler.PackageBuilderConfiguration;
import org.drools.compiler.ParserError;
import org.drools.compiler.xml.XmlDumper;
import org.drools.conf.AssertBehaviorOption;
import org.drools.definition.KnowledgePackage;
import org.drools.definition.rule.Rule;
import org.drools.definition.type.FactType;
import org.drools.event.ActivationCancelledEvent;
import org.drools.event.ActivationCreatedEvent;
import org.drools.event.AfterActivationFiredEvent;
import org.drools.event.AgendaEventListener;
import org.drools.event.AgendaGroupPoppedEvent;
import org.drools.event.AgendaGroupPushedEvent;
import org.drools.event.BeforeActivationFiredEvent;
import org.drools.event.DefaultWorkingMemoryEventListener;
import org.drools.event.ObjectInsertedEvent;
import org.drools.event.ObjectRetractedEvent;
import org.drools.event.ObjectUpdatedEvent;
import org.drools.event.RuleFlowGroupActivatedEvent;
import org.drools.event.RuleFlowGroupDeactivatedEvent;
import org.drools.event.WorkingMemoryEventListener;
import org.drools.impl.EnvironmentFactory;
import org.drools.impl.StatefulKnowledgeSessionImpl;
import org.drools.io.ResourceFactory;
import org.drools.lang.DrlDumper;
import org.drools.lang.descr.AttributeDescr;
import org.drools.lang.descr.PackageDescr;
import org.drools.lang.descr.RuleDescr;
import org.drools.marshalling.ObjectMarshallingStrategy;
import org.drools.marshalling.impl.ClassObjectMarshallingStrategyAcceptor;
import org.drools.marshalling.impl.IdentityPlaceholderResolverStrategy;
import org.drools.reteoo.LeftTuple;
import org.drools.reteoo.LeftTupleSource;
import org.drools.reteoo.ReteooWorkingMemory;
import org.drools.reteoo.RuleTerminalNode;
import org.drools.reteoo.TerminalNode;
import org.drools.reteoo.builder.BuildContext;
import org.drools.rule.GroupElement;
import org.drools.rule.InvalidRulePackage;
import org.drools.rule.MapBackedClassLoader;
import org.drools.rule.Package;
import org.drools.rule.builder.dialect.java.JavaDialectConfiguration;
import org.drools.rule.builder.dialect.mvel.MVELDialectConfiguration;
import org.drools.runtime.Environment;
import org.drools.runtime.EnvironmentName;
import org.drools.runtime.Globals;
import org.drools.runtime.StatefulKnowledgeSession;
import org.drools.runtime.conf.ClockTypeOption;
import org.drools.runtime.rule.WorkingMemoryEntryPoint;
import org.drools.runtime.rule.impl.AgendaImpl;
import org.drools.spi.ConsequenceExceptionHandler;
import org.drools.spi.GlobalResolver;
import org.drools.spi.PropagationContext;
import org.drools.time.SessionPseudoClock;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.mvel2.MVEL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Run all the tests with the ReteOO engine implementation
*/
public class MiscTest extends CommonTestMethodBase {
private static Logger logger = LoggerFactory.getLogger(MiscTest.class);
@Test
public void testImportFunctions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ImportFunctions.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final Cheese cheese = new Cheese( "stilton",
15 );
session.insert( cheese );
List list = new ArrayList();
session.setGlobal( "list",
list );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
int fired = session.fireAllRules();
list = (List) session.getGlobal( "list" );
assertEquals( 4,
fired );
assertEquals( 4,
list.size() );
assertEquals( "rule1",
list.get( 0 ) );
assertEquals( "rule2",
list.get( 1 ) );
assertEquals( "rule3",
list.get( 2 ) );
assertEquals( "rule4",
list.get( 3 ) );
}
@Test
public void testStaticFieldReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_StaticField.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
// will test serialisation of int and typesafe enums tests
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheesery cheesery1 = new Cheesery();
cheesery1.setStatus( Cheesery.SELLING_CHEESE );
cheesery1.setMaturity( Maturity.OLD );
session.insert( cheesery1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
final Cheesery cheesery2 = new Cheesery();
cheesery2.setStatus( Cheesery.MAKING_CHEESE );
cheesery2.setMaturity( Maturity.YOUNG );
session.insert( cheesery2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( cheesery1,
list.get( 0 ) );
assertEquals( cheesery2,
list.get( 1 ) );
}
@Test
public void testMetaConsequence() throws Exception {
final Package pkg = loadPackage( "test_MetaConsequence.drl" );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( new Person( "Michael" ) );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
results = (List) session.getGlobal( "results" );
session.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "bar",
(results.get( 0 )) );
assertEquals( "bar2",
(results.get( 1 )) );
}
@Test
public void testEnabledExpression() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_enabledExpression.drl" ) ) );
final Package pkg = builder.getPackage();
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( new Person( "Michael" ) );
// session = SerializationHelper.getSerialisedStatefulSession( session,
// ruleBase );
results = (List) session.getGlobal( "results" );
session.fireAllRules();
assertEquals( 3,
results.size() );
assertTrue( results.contains( "1" ) );
assertTrue( results.contains( "2" ) );
assertTrue( results.contains( "3" ) );
}
@Test
public void testGetStatefulKnowledgeSessions() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "empty.drl",
getClass() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession_1 = createKnowledgeSession(kbase);
String expected_1 = "expected_1";
String expected_2 = "expected_2";
org.drools.runtime.rule.FactHandle handle_1 = ksession_1.insert( expected_1 );
org.drools.runtime.rule.FactHandle handle_2 = ksession_1.insert( expected_2 );
ksession_1.fireAllRules();
Collection<StatefulKnowledgeSession> coll_1 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_1.size() == 1 );
StatefulKnowledgeSession ksession_2 = coll_1.iterator().next();
Object actual_1 = ksession_2.getObject( handle_1 );
Object actual_2 = ksession_2.getObject( handle_2 );
assertEquals( expected_1,
actual_1 );
assertEquals( expected_2,
actual_2 );
ksession_1.dispose();
Collection<StatefulKnowledgeSession> coll_2 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_2.size() == 0 );
// here to make sure it's safe to call dispose() twice
ksession_1.dispose();
Collection<StatefulKnowledgeSession> coll_3 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_3.size() == 0 );
}
@Test
public void testGetFactHandle() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "empty.drl",
getClass() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
for ( int i = 0; i < 20; i++ ) {
Object object = new Object();
ksession.insert( object );
org.drools.runtime.rule.FactHandle factHandle = ksession.getFactHandle( object );
assertNotNull( factHandle );
assertEquals( object,
ksession.getObject( factHandle ) );
}
ksession.dispose();
}
@Test
public void testPrimitiveArray() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_primitiveArray.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List result = new ArrayList();
session.setGlobal( "result",
result );
final Primitives p1 = new Primitives();
p1.setPrimitiveIntArray( new int[]{1, 2, 3} );
p1.setArrayAttribute( new String[]{"a", "b"} );
session.insert( p1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
result = (List) session.getGlobal( "result" );
session.fireAllRules();
assertEquals( 3,
result.size() );
assertEquals( 3,
((Integer) result.get( 0 )).intValue() );
assertEquals( 2,
((Integer) result.get( 1 )).intValue() );
assertEquals( 3,
((Integer) result.get( 2 )).intValue() );
}
@Test
public void testMVELSoundex() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "MVEL_soundex.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
Cheese c = new Cheese( "fubar",
2 );
ksession.insert( c );
ksession.fireAllRules();
assertEquals( 42,
c.getPrice() );
}
@Test
public void testMVELSoundexNoCharParam() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "MVEL_soundexNPE2500.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
Cheese foobarCheese = new Cheese( "foobar",
2 );
Cheese nullCheese = new Cheese( null,
2 );
Cheese starCheese = new Cheese( "*",
2 );
ksession.insert( foobarCheese );
ksession.insert( nullCheese );
ksession.insert( starCheese );
ksession.fireAllRules();
assertEquals( 42,
foobarCheese.getPrice() );
assertEquals( 2,
nullCheese.getPrice() );
assertEquals( 2,
starCheese.getPrice() );
}
@Test
public void testMVELRewrite() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_MVELrewrite.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
List results = new ArrayList();
ksession.setGlobal( "results",
results );
Cheese brie = new Cheese( "brie",
2 );
Cheese stilton = new Cheese( "stilton",
2 );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( brie );
cheesery.addCheese( stilton );
ksession.insert( cheesery );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( cheesery,
results.get( 0 ) );
}
@Test
public void testVariableDeclaration() throws Exception {
String str = "rule KickOff\n" +
"dialect \"mvel\"\n" +
"when\n" +
"then\n" +
"int i;\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
}
@Test
public void testMissingImport() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += "when \n";
str += " $i : Cheese() \n";
str += " MissingClass( fieldName == $i ) \n";
str += "then \n";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testInvalidModify1() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " no-loop \n";
str += "when \n";
str += " $i : Cheese() \n";
str += "then \n";
str += " modify( $i ); ";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testInvalidModify2() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " no-loop \n";
str += "when \n";
str += " $i : Cheese() \n";
str += "then \n";
str += " modify( $i ) { setType( \"stilton\" ); setType( \"stilton\" );}; ";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testIncrementOperator() throws Exception {
String str = "";
str += "package org.drools \n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " dialect \"java\" \n";
str += "when \n";
str += " $I : Integer() \n";
str += "then \n";
str += " int i = $I.intValue(); \n";
str += " i += 5; \n";
str += " list.add( i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( 5 );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( 10,
list.get( 0 ) );
}
@Test
public void testKnowledgeRuntimeAccess() throws Exception {
String str = "";
str += "package org.test\n";
str += "import org.drools.Message\n";
str += "rule \"Hello World\"\n";
str += "when\n";
str += " Message( )\n";
str += "then\n";
str += " System.out.println( drools.getKnowledgeRuntime() );\n";
str += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Message( "help" ) );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testEvalWithBigDecimal() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import java.math.BigDecimal; \n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " dialect \"java\" \n";
str += "when \n";
str += " $bd : BigDecimal() \n";
str += " eval( $bd.compareTo( BigDecimal.ZERO ) > 0 ) \n";
str += "then \n";
str += " list.add( $bd ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new BigDecimal( 1.5 ) );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( new BigDecimal( 1.5 ),
list.get( 0 ) );
}
@Test
public void testCustomGlobalResolver() throws Exception {
final Package pkg = loadPackage( "test_globalCustomResolver.drl" );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map map = new HashMap();
List list = new ArrayList();
String string = "stilton";
map.put( "list",
list );
map.put( "string",
string );
workingMemory.setGlobalResolver( new GlobalResolver() {
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
}
public void writeExternal( ObjectOutput out ) throws IOException {
}
public Object resolveGlobal( String identifier ) {
return map.get( identifier );
}
public void setGlobal( String identifier,
Object value ) {
map.put( identifier,
value );
}
public Object get( String identifier ) {
return resolveGlobal( identifier );
}
public void set( String identifier,
Object value ) {
setGlobal( identifier,
value );
}
public void setDelegate( Globals delegate ) {
}
} );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( 1,
list.size() );
assertEquals( new Integer( 5 ),
list.get( 0 ) );
}
@Test
public void testCustomGlobalResolverWithWorkingMemoryObject() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_globalCustomResolver.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map map = new HashMap();
List list = new ArrayList();
String string = "stilton";
map.put( "list",
list );
map.put( "string",
string );
workingMemory.setGlobalResolver( new GlobalResolver() {
public Object resolveGlobal( String identifier ) {
return map.get( identifier );
}
public void setGlobal( String identifier,
Object value ) {
map.put( identifier,
value );
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
}
public void writeExternal( ObjectOutput out ) throws IOException {
}
public Object get( String identifier ) {
return resolveGlobal( identifier );
}
public void set( String identifier,
Object value ) {
setGlobal( identifier,
value );
}
public void setDelegate( Globals delegate ) {
}
} );
Cheese bree = new Cheese();
bree.setPrice( 100 );
workingMemory.insert( bree );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( 5, list.get( 0 ) );
assertEquals( 6, list.get( 1 ) );
}
@Test
public void testFieldBiningsAndEvalSharing() throws Exception {
final String drl = "test_FieldBindingsAndEvalSharing.drl";
evalSharingTest( drl );
}
@Test
public void testFieldBiningsAndPredicateSharing() throws Exception {
final String drl = "test_FieldBindingsAndPredicateSharing.drl";
evalSharingTest( drl );
}
private void evalSharingTest( final String drl ) throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( drl ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
final TestParam tp1 = new TestParam();
tp1.setValue2( "boo" );
session.insert( tp1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testGeneratedBeans1() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 2,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
// cheeseFact.getField( "type" ).set( cheese,
// "stilton" );
cheeseFact.set( cheese,
"type",
"stilton" );
assertEquals( "stilton",
cheeseFact.get( cheese,
"type" ) );
FactType personType = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
Object ps = personType.newInstance();
personType.set( ps,
"age",
42 );
Map<String, Object> personMap = personType.getAsMap( ps );
assertEquals( 42,
personMap.get( "age" ) );
personMap.put( "age",
43 );
personType.setFromMap( ps,
personMap );
assertEquals( 43,
personType.get( ps,
"age" ) );
// just documenting toString() result:
// assertEquals( "Cheese( type=stilton )",
// cheese.toString() );
// reading the field attribute, using the method chain
assertEquals( "stilton",
cheeseFact.getField( "type" ).get( cheese ) );
// creating a stateful session
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Object cg = cheeseFact.newInstance();
ksession.setGlobal( "cg",
cg );
List<Object> result = new ArrayList<Object>();
ksession.setGlobal( "list",
result );
// inserting fact
ksession.insert( cheese );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 1,
result.size() );
assertEquals( new Integer( 5 ),
result.get( 0 ) );
// creating a person that likes the cheese:
// Retrieve the generated fact type
FactType personFact = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
// Create a new Fact instance
Object person = personFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
personFact.getField( "likes" ).set( person,
cheese );
// demonstrating primitive type support
personFact.getField( "age" ).set( person,
7 );
// just documenting toString() result:
// assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )",
// person.toString() );
// inserting fact
ksession.insert( person );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 2,
result.size() );
assertEquals( person,
result.get( 1 ) );
}
@Test
public void testGeneratedBeansMVEL() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansMVEL.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 1,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType pf = kbase.getFactType( "mortgages",
"Applicant" );
FactType af = kbase.getFactType( "mortgages",
"LoanApplication" );
Object person = pf.newInstance();
pf.set( person,
"creditRating",
"OK" );
Object application = af.newInstance();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( person );
ksession.insert( application );
ksession.fireAllRules();
}
@Test
public void testGeneratedBeans2() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans2.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 2,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
cheeseFact.set( cheese,
"type",
"stilton" );
assertEquals( "stilton",
cheeseFact.get( cheese,
"type" ) );
// testing equals method
Object cheese2 = cheeseFact.newInstance();
cheeseFact.set( cheese2,
"type",
"stilton" );
assertEquals( cheese,
cheese2 );
FactType personType = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
Object ps = personType.newInstance();
personType.set( ps,
"name",
"mark" );
personType.set( ps,
"last",
"proctor" );
personType.set( ps,
"age",
42 );
Object ps2 = personType.newInstance();
personType.set( ps2,
"name",
"mark" );
personType.set( ps2,
"last",
"proctor" );
personType.set( ps2,
"age",
30 );
assertEquals( ps,
ps2 );
personType.set( ps2,
"last",
"little" );
assertFalse( ps.equals( ps2 ) );
// creating a stateful session
StatefulKnowledgeSession wm = createKnowledgeSession(kbase);
Object cg = cheeseFact.newInstance();
wm.setGlobal( "cg",
cg );
List result = new ArrayList();
wm.setGlobal( "list",
result );
// inserting fact
wm.insert( cheese );
// firing rules
wm.fireAllRules();
// checking results
assertEquals( 1,
result.size() );
assertEquals( new Integer( 5 ),
result.get( 0 ) );
// creating a person that likes the cheese:
// Retrieve the generated fact type
FactType personFact = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
// Create a new Fact instance
Object person = personFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
personFact.getField( "likes" ).set( person,
cheese );
// demonstrating primitive type support
personFact.getField( "age" ).set( person,
7 );
// just documenting toString() result:
// assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )",
// person.toString() );
// inserting fact
wm.insert( person );
// firing rules
wm.fireAllRules();
// checking results
assertEquals( 2,
result.size() );
assertEquals( person,
result.get( 1 ) );
}
@Test
public void testDeclaredFactAndFunction() throws Exception {
String rule = "package com.jboss.qa;\n";
rule += "global java.util.List list\n";
rule += "declare Address\n";
rule += " street: String\n";
rule += "end\n";
rule += "function void myFunction() {\n";
rule += "}\n";
rule += "rule \"r1\"\n";
rule += " dialect \"mvel\"\n";
rule += "when\n";
rule += " Address()\n";
rule += "then\n";
rule += " list.add(\"r1\");\n";
rule += "end\n";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
FactType addressFact = ruleBase.getFactType( "com.jboss.qa.Address" );
Object address = addressFact.newInstance();
session.insert( address );
session.fireAllRules();
list = (List) session.getGlobal( "list" );
assertEquals( 1,
list.size() );
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testTypeDeclarationOnSeparateResource() throws Exception {
System.setProperty( "drools.dump.dir", "target" );
String file1 = "package a.b.c\n" +
"declare SomePerson\n" +
" weight : double\n" +
" height : double\n" +
"end\n";
String file2 = "package a.b.c\n" +
"import org.drools.*\n" +
"declare Holder\n" +
" person : Person\n" +
"end\n" +
"rule \"create holder\"\n" +
" when\n" +
" person : Person( )\n" +
" not (\n" +
" Holder( person; )\n" +
" )\n" +
" then\n" +
" insert(new Holder(person));\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( file1 , file2 );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
assertEquals( 0,
ksession.fireAllRules() );
ksession.insert( new org.drools.Person("Bob") );
assertEquals( 1,
ksession.fireAllRules() );
assertEquals( 0,
ksession.fireAllRules() );
}
@Test
public void testUppercaseField() throws Exception {
String rule = "package org.drools.test;\n";
rule += "global java.util.List list\n";
rule += "declare Address\n";
rule += " Street: String\n";
rule += "end\n";
rule += "rule \"r1\"\n";
rule += "when\n";
rule += " Address($street: Street)\n";
rule += "then\n";
rule += " list.add($street);\n";
rule += "end\n";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "list",
new ArrayList<String>() );
FactType addressType = kbase.getFactType( "org.drools.test",
"Address" );
Object address = addressType.newInstance();
addressType.set( address,
"Street",
"5th Avenue" );
ksession.insert( address );
ksession.fireAllRules();
List list = (List) ksession.getGlobal( "list" );
assertEquals( 1,
list.size() );
assertEquals( "5th Avenue",
list.get( 0 ) );
ksession.dispose();
}
@Test
public void testUppercaseField2() throws Exception {
String rule = "package org.drools\n" +
"declare SomeFact\n" +
" Field : String\n" +
" aField : String\n" +
"end\n" +
"rule X\n" +
"when\n" +
" SomeFact( Field == \"foo\", aField == \"bar\" )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType factType = kbase.getFactType( "org.drools",
"SomeFact" );
Object fact = factType.newInstance();
factType.set( fact,
"Field",
"foo" );
factType.set( fact,
"aField",
"bar" );
ksession.insert( fact );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
ksession.dispose();
}
@Test
public void testNullHandling() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NullHandling.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese nullCheese = new Cheese( null,
2 );
session.insert( nullCheese );
final Person notNullPerson = new Person( "shoes butt back" );
notNullPerson.setBigDecimal( new BigDecimal( "42.42" ) );
session.insert( notNullPerson );
Person nullPerson = new Person( "whee" );
nullPerson.setBigDecimal( null );
session.insert( nullPerson );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
//System.out.println(((List) session.getGlobal("list")).get(0));
assertEquals( 3,
((List) session.getGlobal( "list" )).size() );
nullPerson = new Person( null );
session.insert( nullPerson );
session.fireAllRules();
assertEquals( 4,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testNullFieldOnCompositeSink() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_NullFieldOnCompositeSink.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Attribute() );
ksession.insert( new Message() );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( 1,
((List) ksession.getGlobal( "list" )).size() );
assertEquals( "X",
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testEmptyPattern() throws Exception {
// pre build the package
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EmptyPattern.drl" ) ) );
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 5,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testExplicitAnd() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_ExplicitAnd.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Message( "hola" ) );
ksession.fireAllRules();
assertEquals( 0,
list.size() );
ksession.insert( new Cheese( "brie",
33 ) );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( 1,
((List) ksession.getGlobal( "list" )).size() );
}
@Test
public void testHelloWorld() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "HelloWorld.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
// go !
final Message message = new Message( "hola" );
message.addToList( "hello" );
message.setNumber( 42 );
ksession.insert( message );
ksession.insert( "boo" );
// workingMemory = SerializationHelper.serializeObject(workingMemory);
ksession.fireAllRules();
assertTrue( message.isFired() );
assertEquals( message,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testExtends() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "extend_rule_test.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
//ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
//Test 2 levels of inheritance, and basic rule
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese mycheese = new Cheese( "cheddar",
4 );
FactHandle handle = session.insert( mycheese );
session.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "rule 4",
list.get( 0 ) );
assertEquals( "rule 2b",
list.get( 1 ) );
//Test 2nd level (parent) to make sure rule honors the extend rule
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle );
final Cheese mycheese2 = new Cheese( "notcheddar",
4 );
FactHandle handle2 = session.insert( mycheese2 );
session.fireAllRules();
assertEquals( "rule 4",
list.get( 0 ) );
assertEquals( 1,
list.size() );
//Test 3 levels of inheritance, all levels
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle2 );
final Cheese mycheese3 = new Cheese( "stilton",
6 );
FactHandle handle3 = session.insert( mycheese3 );
session.fireAllRules();
//System.out.println(list.toString());
assertEquals( "rule 3",
list.get( 0 ) );
assertEquals( 1,
list.size() );
//Test 3 levels of inheritance, third only
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle3 );
final Cheese mycheese4 = new Cheese( "notstilton",
6 );
FactHandle handle4 = session.insert( mycheese4 );
session.fireAllRules();
//System.out.println(((List) session.getGlobal( "list" )).toString());
assertTrue( ((List) session.getGlobal( "list" )).size() == 0 );
//Test 3 levels of inheritance, 2nd only
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle4 );
final Cheese mycheese5 = new Cheese( "stilton",
7 );
FactHandle handle5 = session.insert( mycheese5 );
session.fireAllRules();
//System.out.println(((List) session.getGlobal( "list" )).toString());
assertEquals( 0,
list.size() );
}
@Test
public void testExtends2() {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
try {
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_RuleExtend.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List results = new ArrayList();
ksession.setGlobal( "results",
results );
final Cheese stilton = new Cheese( "stilton",
5 );
final Cheese cheddar = new Cheese( "cheddar",
7 );
final Cheese brie = new Cheese( "brie",
5 );
ksession.insert( stilton );
ksession.insert( cheddar );
ksession.insert( brie );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( "brie",
results.get( 1 ) );
} catch ( Exception e ) {
e.printStackTrace();
if ( kbuilder.hasErrors() ) logger.info( kbuilder.getErrors().toString() );
fail( "Unexpected exception: " + e.getMessage() );
}
}
@Test
public void testLatinLocale() throws Exception {
Locale defaultLoc = Locale.getDefault();
try {
// setting a locale that uses COMMA as decimal separator
Locale.setDefault( new Locale( "pt",
"BR" ) );
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_LatinLocale.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
final Cheese mycheese = new Cheese( "cheddar",
4 );
org.drools.runtime.rule.FactHandle handle = ksession.insert( mycheese );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "1",
results.get( 0 ) );
mycheese.setPrice( 8 );
mycheese.setDoublePrice( 8.50 );
ksession.update( handle,
mycheese );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "3",
results.get( 1 ) );
} finally {
Locale.setDefault( defaultLoc );
}
}
@Test
public void testLiteral() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "literal_rule_test.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( "stilton",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testLiteralWithEscapes() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_literal_with_escapes.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
String expected = "s\tti\"lto\nn";
final Cheese stilton = new Cheese( expected,
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
int fired = session.fireAllRules();
assertEquals( 1,
fired );
assertEquals( expected,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testLiteralWithBoolean() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "literal_with_boolean.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
12 );
bill.setAlive( true );
session.insert( bill );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( bill,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testFactBindings() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FactBindings.drl" ) ) );
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List events = new ArrayList();
final WorkingMemoryEventListener listener = new DefaultWorkingMemoryEventListener() {
public void objectUpdated( ObjectUpdatedEvent event ) {
events.add( event );
}
};
workingMemory.addEventListener( listener );
final Person bigCheese = new Person( "big cheese" );
final Cheese cheddar = new Cheese( "cheddar",
15 );
bigCheese.setCheese( cheddar );
final FactHandle bigCheeseHandle = workingMemory.insert( bigCheese );
final FactHandle cheddarHandle = workingMemory.insert( cheddar );
workingMemory.fireAllRules();
ObjectUpdatedEvent event = (ObjectUpdatedEvent) events.get( 0 );
assertSame( cheddarHandle,
event.getFactHandle() );
assertSame( cheddar,
event.getOldObject() );
assertSame( cheddar,
event.getObject() );
event = (ObjectUpdatedEvent) events.get( 1 );
assertSame( bigCheeseHandle,
event.getFactHandle() );
assertSame( bigCheese,
event.getOldObject() );
assertSame( bigCheese,
event.getObject() );
}
@Test
public void testPropertyChangeSupportOldAPI() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_PropertyChange.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
Environment env = EnvironmentFactory.newEnvironment();
env.set( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES,
new ObjectMarshallingStrategy[]{
new IdentityPlaceholderResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT )} );
StatefulSession session = ruleBase.newStatefulSession( null,
env );
final List list = new ArrayList();
session.setGlobal( "list",
list );
final State state = new State( "initial" );
session.insert( state,
true );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
state.setFlag( true );
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
state.setState( "finished" );
StatefulKnowledgeSession ksesion = SerializationHelper.getSerialisedStatefulKnowledgeSession( new StatefulKnowledgeSessionImpl( (ReteooWorkingMemory) session ),
// MarshallerFactory.newIdentityMarshallingStrategy(),
false );
ksesion.fireAllRules();
assertEquals( 3,
((List) session.getGlobal( "list" )).size() );
session.dispose();
// checks that the session removed itself from the bean listeners list
assertEquals( 0,
state.getPropertyChangeListeners().length );
}
@Test
public void testPropertyChangeSupportNewAPI() throws Exception {
final KnowledgeBuilder builder = KnowledgeBuilderFactory.newKnowledgeBuilder();
builder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_PropertyChangeTypeDecl.drl" ) ),
ResourceType.DRL );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Collection<KnowledgePackage> pkgs = builder.getKnowledgePackages();
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( pkgs );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession session = createKnowledgeSession(kbase);
final List list = new ArrayList();
session.setGlobal( "list",
list );
final State state = new State( "initial" );
session.insert( state );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
state.setFlag( true );
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
state.setState( "finished" );
session.dispose();
// checks that the session removed itself from the bean listeners list
assertEquals( 0,
state.getPropertyChangeListeners().length );
}
@Test
public void testDisconnectedFactHandle() {
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
DefaultFactHandle helloHandle = (DefaultFactHandle) ksession.insert( "hello" );
DefaultFactHandle goodbyeHandle = (DefaultFactHandle) ksession.insert( "goodbye" );
org.drools.runtime.rule.FactHandle key = new DefaultFactHandle( helloHandle.toExternalForm() );
assertEquals( "hello",
ksession.getObject( key ) );
key = new DefaultFactHandle( goodbyeHandle.toExternalForm() );
assertEquals( "goodbye",
ksession.getObject( key ) );
}
@Test
public void testBigDecimal() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "big_decimal_and_comparable.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
42 );
bill.setBigDecimal( new BigDecimal( "42" ) );
final PersonInterface ben = new Person( "ben",
null,
43 );
ben.setBigDecimal( new BigDecimal( "43" ) );
session.insert( bill );
session.insert(new Cheese("gorgonzola", 43));
session.insert( ben );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testBigDecimalIntegerLiteral() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "big_decimal_and_literal.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
12 );
bill.setBigDecimal( new BigDecimal( "42" ) );
bill.setBigInteger( new BigInteger( "42" ) );
session.insert( bill );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 6,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testBigDecimalWithFromAndEval() throws Exception {
String rule = "package org.test;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $dec : java.math.BigDecimal() from java.math.BigDecimal.TEN;\n";
rule += " eval( $dec.compareTo(java.math.BigDecimal.ONE) > 0 )\n";
rule += "then\n";
rule += " System.out.println(\"OK!\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
session.fireAllRules();
}
@Test()
public void testImport() throws Exception {
// Same package as this test
String rule = "";
rule += "package org.drools.integrationtests;\n";
rule += "import java.lang.Math;\n";
rule += "rule \"Test Rule\"\n";
rule += " dialect \"mvel\"\n";
rule += " when\n";
rule += " then\n";
// Can't handle the TestFact.TEST
rule += " new TestFact(TestFact.TEST);\n";
rule += "end";
KnowledgeBuilder builder = KnowledgeBuilderFactory.newKnowledgeBuilder();
builder.add( ResourceFactory.newByteArrayResource( rule.getBytes() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
try {
kbase.addKnowledgePackages( builder.getKnowledgePackages() );
} catch ( Exception e ) {
e.printStackTrace();
fail( "Should execute with out exceptions" );
}
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.fireAllRules();
}
@Test
public void testMVELConsequenceWithMapsAndArrays() throws Exception {
String rule = "package org.test;\n";
rule += "import java.util.ArrayList\n";
rule += "import java.util.HashMap\n";
rule += "global java.util.List list\n";
rule += "rule \"Test Rule\"\n";
rule += " dialect \"mvel\"";
rule += "when\n";
rule += "then\n";
rule += " m = new HashMap();\n";
rule += " l = new ArrayList();\n";
rule += " l.add(\"first\");\n";
rule += " m.put(\"content\", l);\n";
rule += " System.out.println(((ArrayList)m[\"content\"])[0]);\n";
rule += " list.add(((ArrayList)m[\"content\"])[0]);\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
assertEquals( "first",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testCell() throws Exception {
final Cell cell1 = new Cell( 9 );
final Cell cell = new Cell( 0 );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "evalmodify.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBase ruleBase = getSinglethreadRuleBase();
Package pkg = builder.getPackage();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
Environment env = EnvironmentFactory.newEnvironment();
env.set( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES,
new ObjectMarshallingStrategy[]{
new IdentityPlaceholderResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT )} );
StatefulSession session = ruleBase.newStatefulSession( null,
env );
session.insert( cell1 );
FactHandle cellHandle = session.insert( cell );
StatefulKnowledgeSession ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( new StatefulKnowledgeSessionImpl( (ReteooWorkingMemory) session ),
// MarshallerFactory.newIdentityMarshallingStrategy(),
false );
session.fireAllRules();
assertEquals( 9,
cell.getValue() );
}
@Test
public void testNesting() throws Exception {
Person p = new Person();
p.setName( "Michael" );
Address add1 = new Address();
add1.setStreet( "High" );
Address add2 = new Address();
add2.setStreet( "Low" );
List l = new ArrayList();
l.add( add1 );
l.add( add2 );
p.setAddresses( l );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested_fields.drl" ) ) );
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
DrlParser parser = new DrlParser();
PackageDescr desc = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "nested_fields.drl" ) ) );
List packageAttrs = desc.getAttributes();
assertEquals( 1,
desc.getRules().size() );
assertEquals( 1,
packageAttrs.size() );
RuleDescr rule = (RuleDescr) desc.getRules().get( 0 );
Map<String, AttributeDescr> ruleAttrs = rule.getAttributes();
assertEquals( 1,
ruleAttrs.size() );
assertEquals( "mvel",
((AttributeDescr) ruleAttrs.get( "dialect" )).getValue() );
assertEquals( "dialect",
((AttributeDescr) ruleAttrs.get( "dialect" )).getName() );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
session.insert( p );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
}
@Test
public void testOr() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "or_test.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese cheddar = new Cheese( "cheddar",
5 );
final FactHandle h = session.insert( cheddar );
session.fireAllRules();
// just one added
assertEquals( "got cheese",
list.get( 0 ) );
assertEquals( 1,
list.size() );
session.retract( h );
session.fireAllRules();
// still just one
assertEquals( 1,
list.size() );
session.insert( new Cheese( "stilton",
5 ) );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
// now have one more
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testEval() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "eval_rule_test.drl" );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "five",
new Integer( 5 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
ksession.insert( stilton );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( stilton,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testJaninoEval() throws Exception {
KnowledgeBuilderConfiguration kbconf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
kbconf.setProperty( JavaDialectConfiguration.JAVA_COMPILER_PROPERTY, "JANINO" );
KnowledgeBase kbase = loadKnowledgeBase( kbconf, "eval_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "five",
new Integer( 5 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
ksession.insert( stilton );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( stilton,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testEvalMore() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "eval_rule_test_more.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Person foo = new Person( "foo" );
session.insert( foo );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( foo,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testReturnValue() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "returnvalue_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "two",
new Integer( 2 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final PersonInterface peter = new Person( "peter",
null,
12 );
ksession.insert( peter );
final PersonInterface jane = new Person( "jane",
null,
10 );
ksession.insert( jane );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession, true );
ksession.fireAllRules();
assertEquals( jane,
((List) ksession.getGlobal( "list" )).get( 0 ) );
assertEquals( peter,
((List) ksession.getGlobal( "list" )).get( 1 ) );
}
@Test
public void testPredicate() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "predicate_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "two",
new Integer( 2 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final PersonInterface peter = new Person( "peter",
null,
12 );
ksession.insert( peter );
final PersonInterface jane = new Person( "jane",
null,
10 );
ksession.insert( jane );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( jane,
((List) ksession.getGlobal( "list" )).get( 0 ) );
assertEquals( peter,
((List) ksession.getGlobal( "list" )).get( 1 ) );
}
@Test
public void testNullBehaviour() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "null_behaviour.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final PersonInterface p1 = new Person( "michael",
"food",
40 );
final PersonInterface p2 = new Person( null,
"drink",
30 );
session.insert( p1 );
session.insert( p2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
}
@Test
public void testNullConstraint() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "null_constraint.drl" ) ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
}
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List foo = new ArrayList();
session.setGlobal( "messages",
foo );
final PersonInterface p1 = new Person( null,
"food",
40 );
final Primitives p2 = new Primitives();
p2.setArrayAttribute( null );
session.insert( p1 );
session.insert( p2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "messages" )).size() );
}
@Test
public void testBasicFrom() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_From.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list1 = new ArrayList();
ksession.setGlobal( "list1",
list1 );
final List list2 = new ArrayList();
ksession.setGlobal( "list2",
list2 );
final List list3 = new ArrayList();
ksession.setGlobal( "list3",
list3 );
final Cheesery cheesery = new Cheesery();
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese cheddar = new Cheese( "cheddar",
15 );
cheesery.addCheese( stilton );
cheesery.addCheese( cheddar );
ksession.setGlobal( "cheesery",
cheesery );
ksession.insert( cheesery );
Person p = new Person( "stilton" );
ksession.insert( p );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
// from using a global
assertEquals( 2,
((List) ksession.getGlobal( "list1" )).size() );
assertEquals( cheddar,
((List) ksession.getGlobal( "list1" )).get( 0 ) );
assertEquals( stilton,
((List) ksession.getGlobal( "list1" )).get( 1 ) );
// from using a declaration
assertEquals( 2,
((List) ksession.getGlobal( "list2" )).size() );
assertEquals( cheddar,
((List) ksession.getGlobal( "list2" )).get( 0 ) );
assertEquals( stilton,
((List) ksession.getGlobal( "list2" )).get( 1 ) );
// from using a declaration
assertEquals( 1,
((List) ksession.getGlobal( "list3" )).size() );
assertEquals( stilton,
((List) ksession.getGlobal( "list3" )).get( 0 ) );
}
@Test
public void testFromWithParams() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromWithParams.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
List list = new ArrayList();
final Object globalObject = new Object();
workingMemory.setGlobal( "list",
list );
workingMemory.setGlobal( "testObject",
new FromTestClass() );
workingMemory.setGlobal( "globalObject",
globalObject );
final Person bob = new Person( "bob" );
workingMemory.insert( bob );
// TODO java.io.NotSerializableException: org.mvel.util.FastList
// workingMemory = SerializationHelper.serializeObject(workingMemory);
workingMemory.fireAllRules();
assertEquals( 6,
((List) workingMemory.getGlobal( "list" )).size() );
final List array = (List) ((List) workingMemory.getGlobal( "list" )).get( 0 );
assertEquals( 3,
array.size() );
final Person p = (Person) array.get( 0 );
assertEquals( p,
bob );
assertEquals( new Integer( 42 ),
array.get( 1 ) );
final List nested = (List) array.get( 2 );
assertEquals( "x",
nested.get( 0 ) );
assertEquals( "y",
nested.get( 1 ) );
final Map map = (Map) ((List) workingMemory.getGlobal( "list" )).get( 1 );
assertEquals( 2,
map.keySet().size() );
assertTrue( map.keySet().contains( bob ) );
assertEquals( globalObject,
map.get( bob ) );
assertTrue( map.keySet().contains( "key1" ) );
final Map nestedMap = (Map) map.get( "key1" );
assertEquals( 1,
nestedMap.keySet().size() );
assertTrue( nestedMap.keySet().contains( "key2" ) );
assertEquals( "value2",
nestedMap.get( "key2" ) );
assertEquals( new Integer( 42 ),
((List) workingMemory.getGlobal( "list" )).get( 2 ) );
assertEquals( "literal",
((List) workingMemory.getGlobal( "list" )).get( 3 ) );
assertEquals( bob,
((List) workingMemory.getGlobal( "list" )).get( 4 ) );
assertEquals( globalObject,
((List) workingMemory.getGlobal( "list" )).get( 5 ) );
}
@Test
public void testFromWithNewConstructor() throws Exception {
DrlParser parser = new DrlParser();
PackageDescr descr = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "test_FromWithNewConstructor.drl" ) ) );
PackageBuilder builder = new PackageBuilder();
builder.addPackage( descr );
Package pkg = builder.getPackage();
pkg.checkValidity();
pkg = SerializationHelper.serializeObject( pkg );
}
/**
* @see JBRULES-1415 Certain uses of from causes NullPointerException in WorkingMemoryLogger
*/
@Test
public void testFromDeclarationWithWorkingMemoryLogger() throws Exception {
String rule = "package org.test;\n";
rule += "import org.drools.Cheesery\n";
rule += "import org.drools.Cheese\n";
rule += "global java.util.List list\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $cheesery : Cheesery()\n";
rule += " Cheese( $type : type) from $cheesery.cheeses\n";
rule += "then\n";
rule += " list.add( $type );\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( new Cheese( "stilton",
22 ) );
session.insert( cheesery );
// TODO java.io.EOFException
// session = SerializationHelper.serializeObject(session);
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
assertEquals( "stilton",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testWithInvalidRule() throws Exception {
final PackageBuilderConfiguration conf = new PackageBuilderConfiguration();
final JavaDialectConfiguration jconf = (JavaDialectConfiguration) conf.getDialectConfiguration( "java" );
jconf.setCompiler( JavaDialectConfiguration.ECLIPSE );
final PackageBuilder builder = new PackageBuilder( conf );
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "invalid_rule.drl" ) ) );
final Package pkg = builder.getPackage();
// Mark: please check if the conseqeuence/should/shouldn't be built
// Rule badBoy = pkg.getRules()[0];
// assertFalse(badBoy.isValid());
RuntimeException runtime = null;
// this should ralph all over the place.
RuleBase ruleBase = getSinglethreadRuleBase();
try {
ruleBase.addPackage( pkg );
fail( "Should have thrown an exception as the rule is NOT VALID." );
} catch ( final RuntimeException e ) {
assertNotNull( e.getMessage() );
runtime = e;
}
ruleBase = SerializationHelper.serializeObject( ruleBase );
assertTrue( builder.getErrors().getErrors().length > 0 );
final String pretty = builder.getErrors().toString();
assertFalse( pretty.equals( "" ) );
assertEquals( pretty,
runtime.getMessage() );
}
@Test
public void testWithInvalidRule2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "invalid_rule2.drl" ) ) );
assertTrue( builder.hasErrors() );
String err = builder.getErrors().toString();
logger.info( err );
}
@Test
public void testErrorLineNumbers() throws Exception {
// this test aims to test semantic errors
// parser errors are another test case
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "errors_in_rule.drl" ) ) );
final Package pkg = builder.getPackage();
DroolsError[] errors = builder.getErrors().getErrors();
assertEquals( 3,
errors.length );
final DescrBuildError stiltonError = (DescrBuildError) errors[0];
assertTrue( stiltonError.getMessage().contains( "Stilton" ) );
assertNotNull( stiltonError.getDescr() );
assertTrue( stiltonError.getLine() != -1 );
// check that its getting it from the ruleDescr
assertEquals( stiltonError.getLine(),
stiltonError.getDescr().getLine() );
// check the absolute error line number (there are more).
assertEquals( 11,
stiltonError.getLine() );
final DescrBuildError poisonError = (DescrBuildError) errors[1];
assertTrue( poisonError.getMessage().contains( "Poison" ) );
assertEquals( 13,
poisonError.getLine() );
assertTrue( errors[2].getMessage().contains( "add" ) );
// now check the RHS, not being too specific yet, as long as it has the
// rules line number, not zero
final DescrBuildError rhsError = (DescrBuildError) errors[2];
assertTrue( rhsError.getLine() >= 8 && rhsError.getLine() <= 17 ); // TODO this should be 16
}
@Test
public void testErrorsParser() throws Exception {
final DrlParser parser = new DrlParser();
assertEquals( 0,
parser.getErrors().size() );
parser.parse( new InputStreamReader( getClass().getResourceAsStream( "errors_parser_multiple.drl" ) ) );
assertTrue( parser.hasErrors() );
assertTrue( parser.getErrors().size() > 0 );
assertTrue( parser.getErrors().get( 0 ) instanceof ParserError );
final ParserError first = ((ParserError) parser.getErrors().get( 0 ));
assertTrue( first.getMessage() != null );
assertFalse( first.getMessage().equals( "" ) );
}
@Test
public void testAssertRetract() throws Exception {
// postponed while I sort out KnowledgeHelperFixer
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "assert_retract.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final PersonInterface person = new Person( "michael",
"cheese" );
person.setStatus( "start" );
workingMemory.insert( person );
// TODO org.drools.spi.ConsequenceException: org.drools.FactException: Update error: handle not found for object:
// workingMemory = SerializationHelper.serializeObject(workingMemory);
workingMemory.fireAllRules();
List<String> results = (List<String>) workingMemory.getGlobal( "list" );
for( String result : results ) {
logger.info( result );
}
assertEquals( 5,
results.size() );
assertTrue( results.contains( "first" ) );
assertTrue( results.contains( "second" ) );
assertTrue( results.contains( "third" ) );
assertTrue( results.contains( "fourth" ) );
assertTrue( results.contains( "fifth" ) );
}
@Test
public void testPredicateAsFirstPattern() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "predicate_as_first_pattern.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese mussarela = new Cheese( "Mussarela",
35 );
workingMemory.insert( mussarela );
final Cheese provolone = new Cheese( "Provolone",
20 );
workingMemory.insert( provolone );
workingMemory.fireAllRules();
assertEquals( "The rule is being incorrectly fired",
35,
mussarela.getPrice() );
assertEquals( "Rule is incorrectly being fired",
20,
provolone.getPrice() );
}
@Test
public void testConsequenceException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceException.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
try {
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Consequence" );
} catch ( final org.drools.runtime.rule.ConsequenceException e ) {
assertEquals( "Throw Consequence Exception",
e.getActivation().getRule().getName() );
assertEquals( "this should throw an exception",
e.getCause().getMessage() );
}
}
@Test
public void testCustomConsequenceException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setConsequenceExceptionHandler( CustomConsequenceExceptionHandler.class.getName() );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertTrue( ((CustomConsequenceExceptionHandler) ((DefaultAgenda) workingMemory.getAgenda()).getConsequenceExceptionHandler()).isCalled() );
}
public static class CustomConsequenceExceptionHandler
implements
ConsequenceExceptionHandler {
private boolean called;
public void handleException( org.drools.spi.Activation activation,
org.drools.WorkingMemory workingMemory,
Exception exception ) {
this.called = true;
}
public boolean isCalled() {
return this.called;
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
called = in.readBoolean();
}
public void writeExternal( ObjectOutput out ) throws IOException {
out.writeBoolean( called );
}
}
@Test
public void testEvalException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Eval" );
} catch ( final Exception e ) {
assertEquals( "this should throw an exception",
e.getCause().getMessage() );
}
}
@Test
public void testPredicateException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_PredicateException.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Predicate" );
} catch ( final Exception e ) {
Throwable cause = e.getCause();
if (cause instanceof InvocationTargetException) {
cause = ((InvocationTargetException)cause).getTargetException();
}
assertTrue( cause.getMessage().contains( "this should throw an exception" ) );
}
}
@Test
public void testReturnValueException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ReturnValueException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the ReturnValue" );
} catch ( final Exception e ) {
Throwable root = e;
while (root.getCause() != null) root = root.getCause();
root.getMessage().contains( "this should throw an exception" );
}
}
@Test
public void testMultiRestrictionFieldConstraint() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MultiRestrictionFieldConstraint.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list1 = new ArrayList();
workingMemory.setGlobal( "list1",
list1 );
final List list2 = new ArrayList();
workingMemory.setGlobal( "list2",
list2 );
final List list3 = new ArrayList();
workingMemory.setGlobal( "list3",
list3 );
final List list4 = new ArrayList();
workingMemory.setGlobal( "list4",
list4 );
final Person youngChili1 = new Person( "young chili1" );
youngChili1.setAge( 12 );
youngChili1.setHair( "blue" );
final Person youngChili2 = new Person( "young chili2" );
youngChili2.setAge( 25 );
youngChili2.setHair( "purple" );
final Person chili1 = new Person( "chili1" );
chili1.setAge( 35 );
chili1.setHair( "red" );
final Person chili2 = new Person( "chili2" );
chili2.setAge( 38 );
chili2.setHair( "indigigo" );
final Person oldChili1 = new Person( "old chili1" );
oldChili1.setAge( 45 );
oldChili1.setHair( "green" );
final Person oldChili2 = new Person( "old chili2" );
oldChili2.setAge( 48 );
oldChili2.setHair( "blue" );
workingMemory.insert( youngChili1 );
workingMemory.insert( youngChili2 );
workingMemory.insert( chili1 );
workingMemory.insert( chili2 );
workingMemory.insert( oldChili1 );
workingMemory.insert( oldChili2 );
workingMemory.fireAllRules();
assertEquals( 1,
list1.size() );
assertTrue( list1.contains( chili1 ) );
assertEquals( 2,
list2.size() );
assertTrue( list2.contains( chili1 ) );
assertTrue( list2.contains( chili2 ) );
assertEquals( 2,
list3.size() );
assertTrue( list3.contains( youngChili1 ) );
assertTrue( list3.contains( youngChili2 ) );
assertEquals( 2,
list4.size() );
assertTrue( list4.contains( youngChili1 ) );
assertTrue( list4.contains( chili1 ) );
}
@Test
@Ignore
public void testDumpers() throws Exception {
final DrlParser parser = new DrlParser();
final PackageDescr pkg = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "test_Dumpers.drl" ) ) );
if ( parser.hasErrors() ) {
for ( DroolsError error : parser.getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
PackageBuilder builder = new PackageBuilder();
builder.addPackage( pkg );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
final DrlDumper drlDumper = new DrlDumper();
final String drlResult = drlDumper.dump( pkg );
builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( drlResult ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
workingMemory = ruleBase.newStatefulSession();
list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
final XmlDumper xmlDumper = new XmlDumper();
final String xmlResult = xmlDumper.dump( pkg );
// System.out.println( xmlResult );
builder = new PackageBuilder();
builder.addPackageFromXml( new StringReader( xmlResult ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
workingMemory = ruleBase.newStatefulSession();
list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
}
@Test
public void testContainsCheese() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ContainsCheese.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( stilton );
final Cheese brie = new Cheese( "brie",
10 );
workingMemory.insert( brie );
final Cheesery cheesery = new Cheesery();
cheesery.getCheeses().add( stilton );
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( brie,
list.get( 1 ) );
}
@Test
public void testDuplicateRuleNames() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DuplicateRuleName1.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DuplicateRuleName2.drl" ) ) );
ruleBase.addPackage( builder.getPackage() );
// @todo: this is from JBRULES-394 - maybe we should test more stuff
// here?
}
@Test
public void testNullValuesIndexing() throws Exception {
final Reader reader = new InputStreamReader( getClass().getResourceAsStream( "test_NullValuesIndexing.drl" ) );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( reader );
final Package pkg1 = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
// Adding person with null name and likes attributes
final PersonInterface bob = new Person( null,
null );
bob.setStatus( "P1" );
final PersonInterface pete = new Person( null,
null );
bob.setStatus( "P2" );
workingMemory.insert( bob );
workingMemory.insert( pete );
workingMemory.fireAllRules();
assertEquals( "Indexing with null values is not working correctly.",
"OK",
bob.getStatus() );
assertEquals( "Indexing with null values is not working correctly.",
"OK",
pete.getStatus() );
}
@Test
public void testEmptyRule() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EmptyRule.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.fireAllRules();
assertTrue( list.contains( "fired1" ) );
assertTrue( list.contains( "fired2" ) );
}
@Test
public void testjustEval() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NoPatterns.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.fireAllRules();
assertTrue( list.contains( "fired1" ) );
assertTrue( list.contains( "fired3" ) );
}
@Test
public void testOrWithBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrWithBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person hola = new Person( "hola" );
workingMemory.insert( hola );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
Cheese brie = new Cheese( "brie" );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertTrue( list.contains( hola ) );
assertTrue( list.contains( brie ) );
}
@Test
public void testJoinNodeModifyObject() throws Exception {
final Reader reader = new InputStreamReader( getClass().getResourceAsStream( "test_JoinNodeModifyObject.drl" ) );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( reader );
final Package pkg1 = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
try {
final List orderedFacts = new ArrayList();
final List errors = new ArrayList();
workingMemory.setGlobal( "orderedNumbers",
orderedFacts );
workingMemory.setGlobal( "errors",
errors );
final int MAX = 2;
for ( int i = 1; i <= MAX; i++ ) {
final IndexedNumber n = new IndexedNumber( i,
MAX - i + 1 );
workingMemory.insert( n );
}
workingMemory.fireAllRules();
assertTrue( "Processing generated errors: " + errors.toString(),
errors.isEmpty() );
for ( int i = 1; i <= MAX; i++ ) {
final IndexedNumber n = (IndexedNumber) orderedFacts.get( i - 1 );
assertEquals( "Fact is out of order",
i,
n.getIndex() );
}
} finally {
}
}
@Test
public void testInsurancePricingExample() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "insurance_pricing_example.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// now create some test data
final Driver driver = new Driver();
final Policy policy = new Policy();
ksession.insert( driver );
ksession.insert( policy );
ksession.fireAllRules();
assertEquals( 120,
policy.getBasePrice() );
}
@Test
public void testLLR() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JoinNodeModifyTuple.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
// 1st time
org.drools.Target tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.26544f ) );
tgt.setLon( new Float( 28.952137f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.8666667f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236874f ) );
tgt.setLon( new Float( 28.992579f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.8666667f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 2nd time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.265343f ) );
tgt.setLon( new Float( 28.952267f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236935f ) );
tgt.setLon( new Float( 28.992493f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 3d time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.26525f ) );
tgt.setLon( new Float( 28.952396f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9333333f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236996f ) );
tgt.setLon( new Float( 28.992405f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9333333f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 4th time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.265163f ) );
tgt.setLon( new Float( 28.952526f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9666667f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.237057f ) );
tgt.setLon( new Float( 28.99232f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9666667f ) );
ksession.insert( tgt );
ksession.fireAllRules();
}
@Test
public void testReturnValueAndGlobal() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ReturnValueAndGlobal.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List matchlist = new ArrayList();
workingMemory.setGlobal( "matchingList",
matchlist );
final List nonmatchlist = new ArrayList();
workingMemory.setGlobal( "nonMatchingList",
nonmatchlist );
workingMemory.setGlobal( "cheeseType",
"stilton" );
final Cheese stilton1 = new Cheese( "stilton",
5 );
final Cheese stilton2 = new Cheese( "stilton",
7 );
final Cheese brie = new Cheese( "brie",
4 );
workingMemory.insert( stilton1 );
workingMemory.insert( stilton2 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 2,
matchlist.size() );
assertEquals( 1,
nonmatchlist.size() );
}
@Test
public void testDeclaringAndUsingBindsInSamePattern() throws Exception {
final RuleBaseConfiguration config = new RuleBaseConfiguration();
config.setRemoveIdentities( true );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclaringAndUsingBindsInSamePattern.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getRuleBase( config );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List sensors = new ArrayList();
workingMemory.setGlobal( "sensors",
sensors );
final Sensor sensor1 = new Sensor( 100,
150 );
workingMemory.insert( sensor1 );
workingMemory.fireAllRules();
assertEquals( 0,
sensors.size() );
final Sensor sensor2 = new Sensor( 200,
150 );
workingMemory.insert( sensor2 );
workingMemory.fireAllRules();
assertEquals( 3,
sensors.size() );
}
@Test
public void testMissingImports() {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_missing_import.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
fail( "Should have thrown an InvalidRulePackage" );
} catch ( final InvalidRulePackage e ) {
// everything fine
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Should have thrown an InvalidRulePackage Exception instead of " + e.getMessage() );
}
}
@Test
public void testNestedConditionalElements() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NestedConditionalElements.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final State state = new State( "SP" );
workingMemory.insert( state );
final Person bob = new Person( "Bob" );
bob.setStatus( state.getState() );
bob.setLikes( "stilton" );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
workingMemory.insert( new Cheese( bob.getLikes(),
10 ) );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testDeclarationUsage() throws Exception {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclarationUsage.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
fail( "Should have trown an exception" );
} catch ( final InvalidRulePackage e ) {
// success ... correct exception thrown
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Wrong exception raised: " + e.getMessage() );
}
}
@Test
public void testDeclareAndFrom() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_DeclareWithFrom.drl" );
FactType profileType = kbase.getFactType( "org.drools",
"Profile" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Object profile = profileType.newInstance();
Map<String, Integer> map = new HashMap<String, Integer>();
map.put( "internet",
Integer.valueOf( 2 ) );
profileType.set( profile,
"pageFreq",
map );
ksession.insert( profile );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testDeclarationNonExistingField() throws Exception {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclarationOfNonExistingField.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
fail( "Should have trown an exception" );
} catch ( final InvalidRulePackage e ) {
// success ... correct exception thrown
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Wrong exception raised: " + e.getMessage() );
}
}
@Test
public void testUnbalancedTrees() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_UnbalancedTrees.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
wm.insert( new Cheese( "a",
10 ) );
wm.insert( new Cheese( "b",
10 ) );
wm.insert( new Cheese( "c",
10 ) );
wm.insert( new Cheese( "d",
10 ) );
final Cheese e = new Cheese( "e",
10 );
wm.insert( e );
wm.fireAllRules();
assertEquals( "Rule should have fired twice, seting the price to 30",
30,
e.getPrice() );
// success
}
@Test
public void testImportConflict() throws Exception {
RuleBase ruleBase = getSinglethreadRuleBase();
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ImportConflict.drl" ) ) );
final Package pkg = builder.getPackage();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
}
@Test
public void testEmptyIdentifier() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_emptyIdentifier.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List result = new ArrayList();
workingMemory.setGlobal( "results",
result );
final Person person = new Person( "bob" );
final Cheese cheese = new Cheese( "brie",
10 );
workingMemory.insert( person );
workingMemory.insert( cheese );
workingMemory.fireAllRules();
assertEquals( 4,
result.size() );
}
@Test
public void testDuplicateVariableBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_duplicateVariableBinding.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map result = new HashMap();
workingMemory.setGlobal( "results",
result );
final Cheese stilton = new Cheese( "stilton",
20 );
final Cheese brie = new Cheese( "brie",
10 );
workingMemory.insert( stilton );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 5,
result.size() );
assertEquals( stilton.getPrice(),
((Integer) result.get( stilton.getType() )).intValue() );
assertEquals( brie.getPrice(),
((Integer) result.get( brie.getType() )).intValue() );
assertEquals( stilton.getPrice(),
((Integer) result.get( stilton )).intValue() );
assertEquals( brie.getPrice(),
((Integer) result.get( brie )).intValue() );
assertEquals( stilton.getPrice(),
((Integer) result.get( "test3" + stilton.getType() )).intValue() );
workingMemory.insert( new Person( "bob",
brie.getType() ) );
workingMemory.fireAllRules();
assertEquals( 6,
result.size() );
assertEquals( brie.getPrice(),
((Integer) result.get( "test3" + brie.getType() )).intValue() );
}
@Test
public void testShadowProxyInHirarchies() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyInHirarchies.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.insert( new Child( "gp" ) );
workingMemory.fireAllRules();
}
@Test
public void testSelfReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SelfReference.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Order order = new Order( 10,
"Bob" );
final OrderItem item1 = new OrderItem( order,
1 );
final OrderItem item2 = new OrderItem( order,
2 );
final OrderItem anotherItem1 = new OrderItem( null,
3 );
final OrderItem anotherItem2 = new OrderItem( null,
4 );
workingMemory.insert( order );
workingMemory.insert( item1 );
workingMemory.insert( item2 );
workingMemory.insert( anotherItem1 );
workingMemory.insert( anotherItem2 );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( item1 ) );
assertTrue( results.contains( item2 ) );
}
@Test
public void testNumberComparisons() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NumberComparisons.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
// asserting the sensor object
final RandomNumber rn = new RandomNumber();
rn.setValue( 10 );
workingMemory.insert( rn );
final Guess guess = new Guess();
guess.setValue( new Integer( 5 ) );
final FactHandle handle = workingMemory.insert( guess );
workingMemory.fireAllRules();
// HIGHER
assertEquals( 1,
list.size() );
assertEquals( "HIGHER",
list.get( 0 ) );
guess.setValue( new Integer( 15 ) );
workingMemory.update( handle,
guess );
workingMemory.fireAllRules();
// LOWER
assertEquals( 2,
list.size() );
assertEquals( "LOWER",
list.get( 1 ) );
guess.setValue( new Integer( 10 ) );
workingMemory.update( handle,
guess );
workingMemory.fireAllRules();
// CORRECT
assertEquals( 3,
list.size() );
assertEquals( "CORRECT",
list.get( 2 ) );
}
@Test
public void testEventModel() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EventModel.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
final List agendaList = new ArrayList();
final AgendaEventListener agendaEventListener = new AgendaEventListener() {
public void activationCancelled( ActivationCancelledEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void activationCreated( ActivationCreatedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void afterActivationFired( AfterActivationFiredEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void agendaGroupPopped( AgendaGroupPoppedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void agendaGroupPushed( AgendaGroupPushedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void beforeActivationFired( BeforeActivationFiredEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void afterRuleFlowGroupActivated(
RuleFlowGroupActivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void afterRuleFlowGroupDeactivated(
RuleFlowGroupDeactivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void beforeRuleFlowGroupActivated(
RuleFlowGroupActivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void beforeRuleFlowGroupDeactivated(
RuleFlowGroupDeactivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
};
final List wmList = new ArrayList();
final WorkingMemoryEventListener workingMemoryListener = new WorkingMemoryEventListener() {
public void objectInserted( ObjectInsertedEvent event ) {
wmList.add( event );
}
public void objectUpdated( ObjectUpdatedEvent event ) {
wmList.add( event );
}
public void objectRetracted( ObjectRetractedEvent event ) {
wmList.add( event );
}
};
wm.addEventListener( workingMemoryListener );
final Cheese stilton = new Cheese( "stilton",
15 );
final Cheese cheddar = new Cheese( "cheddar",
17 );
final FactHandle stiltonHandle = wm.insert( stilton );
final ObjectInsertedEvent oae = (ObjectInsertedEvent) wmList.get( 0 );
assertSame( stiltonHandle,
oae.getFactHandle() );
wm.update( stiltonHandle,
stilton );
final ObjectUpdatedEvent ome = (ObjectUpdatedEvent) wmList.get( 1 );
assertSame( stiltonHandle,
ome.getFactHandle() );
wm.retract( stiltonHandle );
final ObjectRetractedEvent ore = (ObjectRetractedEvent) wmList.get( 2 );
assertSame( stiltonHandle,
ore.getFactHandle() );
wm.insert( cheddar );
}
@Test
public void testImplicitDeclarations() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_implicitDeclarations.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.setGlobal( "factor",
new Double( 1.2 ) );
final Cheese cheese = new Cheese( "stilton",
10 );
workingMemory.insert( cheese );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testMVELImplicitWithFrom() {
String str = "" +
"package org.test \n" +
"import java.util.List \n" +
"global java.util.List list \n" +
"global java.util.List list2 \n" +
"rule \"show\" dialect \"mvel\" \n" +
"when \n" +
" $m : List( eval( size == 0 ) ) from [list] \n" +
"then \n" +
" list2.add('r1'); \n" +
"end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.setGlobal( "list2",
list );
ksession.fireAllRules();
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testJavaImplicitWithFrom() {
String str = "" +
"package org.test \n" +
"import java.util.List \n" +
"global java.util.List list \n" +
"global java.util.List list2 \n" +
"rule \"show\" dialect \"java\" \n" +
"when \n" +
" $m : List( eval( size == 0 ) ) from [list] \n" +
"then \n" +
" list2.add('r1'); \n" +
"end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.setGlobal( "list2",
list );
ksession.fireAllRules();
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testCastingInsideEvals() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_castsInsideEval.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.setGlobal( "value",
new Integer( 20 ) );
workingMemory.fireAllRules();
}
@Test
public void testMemberOfAndNotMemberOf() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_memberOf.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese muzzarela = new Cheese( "muzzarela",
10 );
final Cheese brie = new Cheese( "brie",
15 );
workingMemory.insert( stilton );
workingMemory.insert( muzzarela );
final Cheesery cheesery = new Cheesery();
cheesery.getCheeses().add( stilton.getType() );
cheesery.getCheeses().add( brie.getType() );
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( muzzarela,
list.get( 1 ) );
}
@Test
public void testContainsInArray() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_contains_in_array.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Primitives p = new Primitives();
p.setStringArray( new String[]{"test1", "test3"} );
workingMemory.insert( p );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "ok1",
list.get( 0 ) );
assertEquals( "ok2",
list.get( 1 ) );
}
@Test
public void testNodeSharingNotExists() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_nodeSharingNotExists.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "rule1",
list.get( 0 ) );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "rule2",
list.get( 1 ) );
}
@Test
public void testNullBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_nullBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Person( "bob" ) );
workingMemory.insert( new Person( null ) );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "OK",
list.get( 0 ) );
}
@Test
public void testModifyRetractWithFunction() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RetractModifyWithFunction.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final AbstractWorkingMemory workingMemory = (AbstractWorkingMemory) ruleBase.newStatefulSession();
final Cheese stilton = new Cheese( "stilton",
7 );
final Cheese muzzarella = new Cheese( "muzzarella",
9 );
final int sum = stilton.getPrice() + muzzarella.getPrice();
final FactHandle stiltonHandle = workingMemory.insert( stilton );
final FactHandle muzzarellaHandle = workingMemory.insert( muzzarella );
workingMemory.fireAllRules();
assertEquals( sum,
stilton.getPrice() );
assertEquals( 1,
workingMemory.getObjectStore().size() );
assertNotNull( workingMemory.getObject( stiltonHandle ) );
assertNotNull( workingMemory.getFactHandle( stilton ) );
assertNull( workingMemory.getObject( muzzarellaHandle ) );
assertNull( workingMemory.getFactHandle( muzzarella ) );
}
@Test
public void testConstraintConnectors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConstraintConnectors.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Person youngChili1 = new Person( "young chili1" );
youngChili1.setAge( 12 );
youngChili1.setHair( "blue" );
final Person youngChili2 = new Person( "young chili2" );
youngChili2.setAge( 25 );
youngChili2.setHair( "purple" );
final Person chili1 = new Person( "chili1" );
chili1.setAge( 35 );
chili1.setHair( "red" );
final Person chili2 = new Person( "chili2" );
chili2.setAge( 38 );
chili2.setHair( "indigigo" );
final Person oldChili1 = new Person( "old chili1" );
oldChili1.setAge( 45 );
oldChili1.setHair( "green" );
final Person oldChili2 = new Person( "old chili2" );
oldChili2.setAge( 48 );
oldChili2.setHair( "blue" );
final Person veryold = new Person( "very old" );
veryold.setAge( 99 );
veryold.setHair( "gray" );
workingMemory.insert( youngChili1 );
workingMemory.insert( youngChili2 );
workingMemory.insert( chili1 );
workingMemory.insert( chili2 );
workingMemory.insert( oldChili1 );
workingMemory.insert( oldChili2 );
workingMemory.insert( veryold );
workingMemory.fireAllRules();
assertEquals( 4,
results.size() );
assertEquals( chili1,
results.get( 0 ) );
assertEquals( oldChili1,
results.get( 1 ) );
assertEquals( youngChili1,
results.get( 2 ) );
assertEquals( veryold,
results.get( 3 ) );
}
@Test
public void testConnectorsAndOperators() throws Exception {
final KnowledgeBase kbase = SerializationHelper.serializeObject( loadKnowledgeBase( "test_ConstraintConnectorsAndOperators.drl" ) );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new StockTick( 1,
"RHT",
10,
1000 ) );
ksession.insert( new StockTick( 2,
"IBM",
10,
1100 ) );
final int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
}
@Test
public void testConstraintConnectorOr() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_ConstraintConnectorOr.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Person> results = new ArrayList<Person>();
ksession.setGlobal( "results",
results );
final Person mark = new Person( "Mark" );
mark.setAlive( true );
mark.setHappy( true );
final Person bush = new Person( "Bush" );
bush.setAlive( true );
bush.setHappy( false );
final Person conan = new Person( "Conan" );
conan.setAlive( false );
conan.setHappy( true );
final Person nero = new Person( "Nero" );
nero.setAlive( false );
nero.setHappy( false );
ksession.insert( mark );
ksession.insert( bush );
ksession.insert( conan );
ksession.insert( nero );
ksession.fireAllRules();
assertEquals( 3,
results.size() );
assertTrue( results.contains( mark ) );
assertTrue( results.contains( bush ) );
assertTrue( results.contains( conan ) );
}
@Test
public void testMatchesNotMatchesCheese() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MatchesNotMatches.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese stilton2 = new Cheese( "stilton2",
12 );
final Cheese agedStilton = new Cheese( "aged stilton",
12 );
final Cheese brie = new Cheese( "brie",
10 );
final Cheese brie2 = new Cheese( "brie2",
10 );
final Cheese muzzarella = new Cheese( "muzzarella",
10 );
final Cheese muzzarella2 = new Cheese( "muzzarella2",
10 );
final Cheese provolone = new Cheese( "provolone",
10 );
final Cheese provolone2 = new Cheese( "another cheese (provolone)",
10 );
workingMemory.insert( stilton );
workingMemory.insert( stilton2 );
workingMemory.insert( agedStilton );
workingMemory.insert( brie );
workingMemory.insert( brie2 );
workingMemory.insert( muzzarella );
workingMemory.insert( muzzarella2 );
workingMemory.insert( provolone );
workingMemory.insert( provolone2 );
workingMemory.fireAllRules();
logger.info( list.toString() );
assertEquals( 4,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( brie,
list.get( 1 ) );
assertEquals( agedStilton,
list.get( 2 ) );
assertEquals( provolone,
list.get( 3 ) );
}
@Test
public void testAutomaticBindings() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AutoBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person bob = new Person( "bob",
"stilton" );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( bob );
workingMemory.insert( stilton );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( bob,
list.get( 0 ) );
}
@Test
public void testMatchesMVEL() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MatchesMVEL.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
Map map = new HashMap();
map.put( "content",
"hello ;=" );
session.insert( map );
session.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testMatchesMVEL2() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_MatchesMVEL2.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map map = new HashMap();
map.put( "content",
"String with . and (routine)" );
ksession.insert( map );
int fired = ksession.fireAllRules();
assertEquals( 2,
fired );
}
@Test
public void testMatchesMVEL3() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_MatchesMVEL2.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map map = new HashMap();
map.put( "content",
"String with . and ()" );
ksession.insert( map );
int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
}
@Test
public void testQualifiedFieldReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_QualifiedFieldReference.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person bob = new Person( "bob",
"stilton" );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( bob );
workingMemory.insert( stilton );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( bob,
list.get( 0 ) );
}
@Test
public void testEvalInline() throws Exception {
final String text = "package org.drools\n" +
"rule \"inline eval\"\n" +
"when\n" +
" $str : String()\n" +
" Person( eval( name.startsWith($str) && age == 18) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( "b" );
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testMethodCalls() throws Exception {
final String text = "package org.drools\n" +
"rule \"method calls\"\n" +
"when\n" +
" Person( getName().substring(2) == 'b' )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAlphaExpression() throws Exception {
final String text = "package org.drools\n" +
"rule \"alpha\"\n" +
"when\n" +
" Person( 5 < 6 )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testEvalCE() throws Exception {
final String text = "package org.drools\n" +
"rule \"inline eval\"\n" +
"when\n" +
" $str : String()\n" +
" $p : Person()\n" +
" eval( $p.getName().startsWith($str) && $p.getName().endsWith($str) )" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( "b" );
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testEvalRewrite() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewrite.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 10,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
final Order order2 = new Order( 11,
"Bob" );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order2.addItem( item21 );
order2.addItem( item22 );
final Order order3 = new Order( 12,
"Bob" );
final OrderItem item31 = new OrderItem( order3,
1 );
final OrderItem item32 = new OrderItem( order3,
2 );
order3.addItem( item31 );
order3.addItem( item32 );
final Order order4 = new Order( 13,
"Bob" );
final OrderItem item41 = new OrderItem( order4,
1 );
final OrderItem item42 = new OrderItem( order4,
2 );
order4.addItem( item41 );
order4.addItem( item42 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.insert( order3 );
workingMemory.insert( item31 );
workingMemory.insert( item32 );
workingMemory.insert( order4 );
workingMemory.insert( item41 );
workingMemory.insert( item42 );
workingMemory.fireAllRules();
assertEquals( 5,
list.size() );
assertTrue( list.contains( item11 ) );
assertTrue( list.contains( item12 ) );
assertTrue( list.contains( item22 ) );
assertTrue( list.contains( order3 ) );
assertTrue( list.contains( order4 ) );
}
@Test
public void testMapAccess() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MapAccess.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Map map = new HashMap();
map.put( "name",
"Edson" );
map.put( "surname",
"Tirelli" );
map.put( "age",
"28" );
workingMemory.insert( map );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertTrue( list.contains( map ) );
}
@Test
public void testMapNullConstraint() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_mapNullConstraints.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
new WorkingMemoryConsoleLogger( ksession );
Map addresses = new HashMap();
addresses.put( "home",
new Address( "home street" ) );
Person bob = new Person( "Bob" );
bob.setNamedAddresses( addresses );
ksession.insert( bob );
ksession.fireAllRules();
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 4 ) ).afterActivationFired( arg.capture() );
org.drools.event.rule.AfterActivationFiredEvent aaf = arg.getAllValues().get( 0 );
assertThat( aaf.getActivation().getRule().getName(),
is( "1. home != null" ) );
aaf = arg.getAllValues().get( 1 );
assertThat( aaf.getActivation().getRule().getName(),
is( "2. not home == null" ) );
aaf = arg.getAllValues().get( 2 );
assertThat( aaf.getActivation().getRule().getName(),
is( "7. work == null" ) );
aaf = arg.getAllValues().get( 3 );
assertThat( aaf.getActivation().getRule().getName(),
is( "8. not work != null" ) );
}
@Test
public void testNoneTypeSafeDeclarations() {
// same namespace
String str = "package org.drools\n" +
"global java.util.List list\n" +
"declare Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// different namespace with import
str = "package org.drools.test\n" +
"import org.drools.Person\n" +
"global java.util.List list\n" +
"declare Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// different namespace without import using qualified name
str = "package org.drools.test\n" +
"global java.util.List list\n" +
"declare org.drools.Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : org.drools.Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// this should fail as it's not declared non typesafe
str = "package org.drools.test\n" +
"global java.util.List list\n" +
"declare org.drools.Person\n" +
" @typesafe(true)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : org.drools.Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
false );
}
private void executeTypeSafeDeclarations( String str,
boolean mustSucceed ) {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
if ( mustSucceed ) {
fail( kbuilder.getErrors().toString() );
} else {
return;
}
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Address a = new Address( "s1" );
Person p = new Person( "yoda" );
p.setObject( a );
ksession.insert( p );
ksession.fireAllRules();
assertEquals( p,
list.get( 0 ) );
}
@Test
public void testMapAccessWithVariable() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MapAccessWithVariable.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Map map = new HashMap();
map.put( "name",
"Edson" );
map.put( "surname",
"Tirelli" );
map.put( "age",
"28" );
workingMemory.insert( map );
workingMemory.insert( "name" );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertTrue( list.contains( map ) );
}
// Drools does not support variables inside bindings yet... but we should...
@Test
public void testMapAccessWithVariable2() {
String str = "package org.drools;\n" +
"import java.util.Map;\n" +
"rule \"map access with variable\"\n" +
" when\n" +
" $key : String( )\n" +
" $p1 : Person( name == 'Bob', namedAddresses[$key] != null, $na : namedAddresses[$key] )\n" +
" $p2 : Person( name == 'Mark', namedAddresses[$key] == $na )\n" +
" then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertTrue( kbuilder.hasErrors() );
}
@Test
public void testHalt() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_halt.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules();
assertEquals( 10,
results.size() );
for ( int i = 0; i < 10; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
}
@Test
public void testFireLimit() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_fireLimit.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules();
assertEquals( 20,
results.size() );
for ( int i = 0; i < 20; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules( 10 );
assertEquals( 10,
results.size() );
for ( int i = 0; i < 10; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules( -1 );
assertEquals( 20,
results.size() );
for ( int i = 0; i < 20; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
}
@Test
public void testEqualitySupport() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_equalitySupport.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.EQUALITY );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
PersonWithEquals person = new PersonWithEquals( "bob",
30 );
workingMemory.insert( person );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "mark",
results.get( 0 ) );
}
@Test
public void testCharComparisons() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_charComparisons.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Primitives p1 = new Primitives();
p1.setCharPrimitive( 'a' );
p1.setStringAttribute( "b" );
Primitives p2 = new Primitives();
p2.setCharPrimitive( 'b' );
p2.setStringAttribute( "a" );
workingMemory.insert( p1 );
workingMemory.insert( p2 );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
assertEquals( "1",
results.get( 0 ) );
assertEquals( "2",
results.get( 1 ) );
assertEquals( "3",
results.get( 2 ) );
}
@Test
public void testAlphaNodeSharing() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_alphaNodeSharing.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setShareAlphaNodes( false );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Person p1 = new Person( "bob",
5 );
workingMemory.insert( p1 );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "1",
results.get( 0 ) );
assertEquals( "2",
results.get( 1 ) );
}
@Test
public void testSelfReference2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SelfReference2.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese() );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
}
@Test
public void testSelfJoinWithIndex() {
String drl = "";
drl += "package org.test\n";
drl += "import org.drools.Person\n";
drl += "global java.util.List list\n";
drl += "rule test1\n";
drl += "when\n";
drl += " $p1 : Person( $name : name, $age : age )\n";
drl += " $p2 : Person( name == $name, age < $age)\n";
drl += "then\n";
drl += " list.add( $p1 );\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p1 = new Person( "darth",
30 );
org.drools.runtime.rule.FactHandle fh1 = ksession.insert( p1 );
Person p2 = new Person( "darth",
25 );
org.drools.runtime.rule.FactHandle fh2 = ksession.insert( p2 ); // creates activation.
p1.setName( "yoda" );
ksession.update( fh1,
p1 ); // creates activation
ksession.fireAllRules();
assertEquals( 0,
list.size() );
}
@Test
public void testMergingDifferentPackages() throws Exception {
// using the same builder
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes2.drl" ) ) );
assertEquals( 2,
builder.getPackages().length );
Package pkg1 = builder.getPackageRegistry( "org.drools.package1" ).getPackage();
assertEquals( "rule 1",
pkg1.getRules()[0].getName() );
Package pkg2 = builder.getPackageRegistry( "org.drools.package2" ).getPackage();
assertEquals( "rule 1",
pkg2.getRules()[0].getName() );
} catch ( PackageMergeException e ) {
fail( "unexpected exception: " + e.getMessage() );
} catch ( RuntimeException e ) {
e.printStackTrace();
fail( "unexpected exception: " + e.getMessage() );
}
}
@Test
public void testSelfJoinAndNotWithIndex() {
String drl = "";
drl += "package org.test\n";
drl += "import org.drools.Person\n";
drl += "global java.util.List list\n";
drl += "rule test1\n";
drl += "when\n";
drl += " $p1 : Person( )\n";
drl += " not Person( name == $p1.name, age < $p1.age )\n";
drl += " $p2 : Person( name == $p1.name, likes != $p1.likes, age > $p1.age)\n";
drl += " not Person( name == $p1.name, likes == $p2.likes, age < $p2.age )\n";
drl += "then\n";
drl += " System.out.println( $p1 + \":\" + $p2 );\n";
drl += " list.add( $p1 );\n";
drl += " list.add( $p2 );\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p0 = new Person( "yoda",
0 );
p0.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh0 = ksession.insert( p0 );
Person p1 = new Person( "darth",
15 );
p1.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh1 = ksession.insert( p1 );
Person p2 = new Person( "darth",
25 );
p2.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh2 = ksession.insert( p2 ); // creates activation.
Person p3 = new Person( "darth",
30 );
p3.setLikes( "brie" );
org.drools.runtime.rule.FactHandle fh3 = ksession.insert( p3 );
ksession.fireAllRules();
assertEquals( 2,
list.size() );
assertSame( p1,
list.get( 0 ) );
assertSame( p3,
list.get( 1 ) );
p1.setName( "yoda" );
ksession.update( fh1,
p1 ); // creates activation
ksession.fireAllRules();
assertEquals( 4,
list.size() );
assertSame( p2,
list.get( 2 ) );
assertSame( p3,
list.get( 3 ) );
}
@Test
public void testMergingDifferentPackages2() throws Exception {
// using different builders
try {
final PackageBuilder builder1 = new PackageBuilder();
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
final Package pkg1 = builder1.getPackage();
assertEquals( 1,
pkg1.getRules().length );
final PackageBuilder builder2 = new PackageBuilder();
builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes2.drl" ) ) );
final Package pkg2 = builder2.getPackage();
assertEquals( 1,
pkg2.getRules().length );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase.addPackage( pkg2 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.insert( new Cheese( "brie",
5 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
2,
results.size() );
assertTrue( results.contains( "p1.r1" ) );
assertTrue( results.contains( "p2.r1" ) );
} catch ( PackageMergeException e ) {
fail( "Should not raise exception when merging different packages into the same rulebase: " + e.getMessage() );
} catch ( Exception e ) {
e.printStackTrace();
fail( "unexpected exception: " + e.getMessage() );
}
}
@Test
public void testMergePackageWithSameRuleNames() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MergePackageWithSameRuleNames1.drl" ) ) );
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MergePackageWithSameRuleNames2.drl" ) ) );
ruleBase.addPackage( builder.getPackage() );
StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "rule1 for the package2",
results.get( 0 ) );
}
@Test
public void testRuleRemovalWithJoinedRootPattern() {
String str = "";
str += "package org.drools \n";
str += "rule rule1 \n";
str += "when \n";
str += " String() \n";
str += " Person() \n";
str += "then \n";
str += "end \n";
str += "rule rule2 \n";
str += "when \n";
str += " String() \n";
str += " Cheese() \n";
str += "then \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
DefaultFactHandle handle = (DefaultFactHandle) ksession.insert( "hello" );
LeftTuple leftTuple = handle.getFirstLeftTuple();
assertNotNull( leftTuple );
assertNotNull( leftTuple.getLeftParentNext() );
kbase.removeRule( "org.drools",
"rule2" );
leftTuple = handle.getFirstLeftTuple();
assertNotNull( leftTuple );
assertNull( leftTuple.getLeftParentNext() );
}
// JBRULES-1808
@Test
public void testKnowledgeHelperFixerInStrings() {
String str = "";
str += "package org.simple \n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += " no-loop true ";
str += "when \n";
str += " $fact : String() \n";
str += "then \n";
str += " list.add(\"This is an update()\"); \n";
str += " list.add(\"This is an update($fact)\"); \n";
str += " update($fact); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "hello" );
ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
list.size() );
assertEquals( "This is an update()",
list.get( 0 ) );
assertEquals( "This is an update($fact)",
list.get( 1 ) );
}
@Test
public void testEmptyAfterRetractInIndexedMemory() {
String str = "";
str += "package org.simple \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule xxx dialect 'mvel' \n";
str += "when \n";
str += " Person( $name : name ) \n";
str += " $s : String( this == $name) \n";
str += "then \n";
str += " list.add($s); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p = new Person( "ackbar" );
org.drools.runtime.rule.FactHandle ph = ksession.insert( p );
org.drools.runtime.rule.FactHandle sh = ksession.insert( "ackbar" );
ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
list.size() );
assertEquals( "ackbar",
list.get( 0 ) );
}
@Test
public void testRuleReplacement() throws Exception {
// test rule replacement
final PackageBuilder builder1 = new PackageBuilder();
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes3.drl" ) ) );
final Package pkg1 = builder1.getPackage();
assertEquals( 1,
pkg1.getRules().length );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.insert( new Cheese( "brie",
5 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
0,
results.size() );
workingMemory.insert( new Cheese( "muzzarella",
7 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
1,
results.size() );
assertTrue( results.contains( "p1.r3" ) );
}
@Test
public void testBindingsOnConnectiveExpressions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_bindings.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
15 ) );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( new Integer( 15 ),
results.get( 1 ) );
}
@Test
public void testMultipleFroms() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_multipleFroms.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Cheesery cheesery = new Cheesery();
cheesery.addCheese( new Cheese( "stilton",
15 ) );
cheesery.addCheese( new Cheese( "brie",
10 ) );
workingMemory.setGlobal( "cheesery",
cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( 2,
((List) results.get( 0 )).size() );
assertEquals( 2,
((List) results.get( 1 )).size() );
}
@Test
public void testNullHashing() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NullHashing.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
15 ) );
workingMemory.insert( new Cheese( "",
10 ) );
workingMemory.insert( new Cheese( null,
8 ) );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
}
@Test
public void testDefaultBetaConstrains() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DefaultBetaConstraint.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final FirstClass first = new FirstClass( "1",
"2",
"3",
"4",
"5" );
final FactHandle handle = workingMemory.insert( first );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "NOT",
results.get( 0 ) );
workingMemory.insert( new SecondClass() );
workingMemory.update( handle,
first );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "NOT",
results.get( 1 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( null,
"2",
"3",
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
assertEquals( "NOT",
results.get( 2 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
null,
"3",
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 4,
results.size() );
assertEquals( "NOT",
results.get( 3 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
null,
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 5,
results.size() );
assertEquals( "NOT",
results.get( 4 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
null,
"5" ) );
workingMemory.fireAllRules();
assertEquals( 6,
results.size() );
assertEquals( "NOT",
results.get( 5 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
"4",
null ) );
workingMemory.fireAllRules();
assertEquals( 7,
results.size() );
assertEquals( "NOT",
results.get( 6 ) );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
"4",
"5" ) );
workingMemory.update( handle,
first );
workingMemory.fireAllRules();
assertEquals( 8,
results.size() );
assertEquals( "EQUALS",
results.get( 7 ) );
}
@Test
public void testBooleanWrapper() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_BooleanWrapper.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Primitives p1 = new Primitives();
workingMemory.insert( p1 );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
Primitives p2 = new Primitives();
p2.setBooleanWrapper( Boolean.FALSE );
workingMemory.insert( p2 );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
Primitives p3 = new Primitives();
p3.setBooleanWrapper( Boolean.TRUE );
workingMemory.insert( p3 );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testCrossProductRemovingIdentityEquals() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( MiscTest.class.getResourceAsStream( "test_CrossProductRemovingIdentityEquals.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBaseConfiguration conf = new RuleBaseConfiguration();
RuleBase rb = RuleBaseFactory.newRuleBase( conf );
rb.addPackage( builder.getPackage() );
rb = SerializationHelper.serializeObject( rb );
StatefulSession session = rb.newStatefulSession();
List list1 = new ArrayList();
List list2 = new ArrayList();
session.setGlobal( "list1",
list1 );
session.setGlobal( "list2",
list2 );
SpecialString first42 = new SpecialString( "42" );
SpecialString second43 = new SpecialString( "43" );
SpecialString world = new SpecialString( "World" );
session.insert( world );
session.insert( first42 );
session.insert( second43 );
//System.out.println( "Firing rules ..." );
session.fireAllRules();
assertEquals( 6,
list1.size() );
assertEquals( 6,
list2.size() );
assertEquals( first42,
list1.get( 0 ) );
assertEquals( world,
list1.get( 1 ) );
assertEquals( second43,
list1.get( 2 ) );
assertEquals( second43,
list1.get( 3 ) );
assertEquals( world,
list1.get( 4 ) );
assertEquals( first42,
list1.get( 5 ) );
assertEquals( second43,
list2.get( 0 ) );
assertEquals( second43,
list2.get( 1 ) );
assertEquals( first42,
list2.get( 2 ) );
assertEquals( world,
list2.get( 3 ) );
assertEquals( first42,
list2.get( 4 ) );
assertEquals( world,
list2.get( 5 ) );
}
@Test
public void testIterateObjects() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_IterateObjects.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.fireAllRules();
Iterator events = workingMemory.iterateObjects( new ClassObjectFilter( PersonInterface.class ) );
assertTrue( events.hasNext() );
assertEquals( 1,
results.size() );
assertEquals( results.get( 0 ),
events.next() );
}
@Test
public void testNotInStatelessSession() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NotInStatelessSession.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setSequential( true );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatelessSession session = ruleBase.newStatelessSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.execute( "not integer" );
assertEquals( "not integer",
list.get( 0 ) );
}
@Test
public void testDynamicallyAddInitialFactRule() throws Exception {
PackageBuilder builder = new PackageBuilder();
String rule = "package org.drools.test\n" +
"global java.util.List list\n" +
"rule xxx when\n" +
" i:Integer()\n" +
"then\n" +
" list.add(i);\n" +
"end";
builder.addPackageFromDrl( new StringReader( rule ) );
Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.insert( new Integer( 5 ) );
session.fireAllRules();
assertEquals( new Integer( 5 ),
list.get( 0 ) );
builder = new PackageBuilder();
rule = "package org.drools.test\n" +
"global java.util.List list\n" +
"rule xxx when\n" +
"then\n" +
" list.add(\"x\");\n" +
"end";
builder.addPackageFromDrl( new StringReader( rule ) );
pkg = builder.getPackage();
// Make sure that this rule is fired as the Package is updated, it also tests that InitialFactImpl is still in the network
// even though the first rule didn't use it.
ruleBase.addPackage( pkg );
session.fireAllRules();
assertEquals( "x",
list.get( 1 ) );
}
@Test
public void testEvalRewriteWithSpecialOperators() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewriteWithSpecialOperators.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 10,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
final Order order2 = new Order( 11,
"Bob" );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order2.addItem( item21 );
order2.addItem( item22 );
final Order order3 = new Order( 12,
"Bob" );
final OrderItem item31 = new OrderItem( order3,
1 );
final OrderItem item32 = new OrderItem( order3,
2 );
final OrderItem item33 = new OrderItem( order3,
3 );
order3.addItem( item31 );
order3.addItem( item32 );
order3.addItem( item33 );
final Order order4 = new Order( 13,
"Bob" );
final OrderItem item41 = new OrderItem( order4,
1 );
final OrderItem item42 = new OrderItem( order4,
2 );
order4.addItem( item41 );
order4.addItem( item42 );
final Order order5 = new Order( 14,
"Mark" );
final OrderItem item51 = new OrderItem( order5,
1 );
final OrderItem item52 = new OrderItem( order5,
2 );
order5.addItem( item51 );
order5.addItem( item52 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.insert( order3 );
workingMemory.insert( item31 );
workingMemory.insert( item32 );
workingMemory.insert( item33 );
workingMemory.insert( order4 );
workingMemory.insert( item41 );
workingMemory.insert( item42 );
workingMemory.insert( order5 );
workingMemory.insert( item51 );
workingMemory.insert( item52 );
workingMemory.fireAllRules();
assertEquals( 9,
list.size() );
int index = 0;
assertEquals( item11,
list.get( index++ ) );
assertEquals( item12,
list.get( index++ ) );
assertEquals( item21,
list.get( index++ ) );
assertEquals( item22,
list.get( index++ ) );
assertEquals( item31,
list.get( index++ ) );
assertEquals( item33,
list.get( index++ ) );
assertEquals( item41,
list.get( index++ ) );
assertEquals( order5,
list.get( index++ ) );
assertEquals( order5,
list.get( index++ ) );
}
@Test
public void testImportColision() throws Exception {
final PackageBuilder builder = new PackageBuilder();
final PackageBuilder builder2 = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested1.drl" ) ) );
builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested2.drl" ) ) );
final Package pkg = builder.getPackage();
final Package pkg2 = builder2.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase.addPackage( pkg2 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.insert( new FirstClass() );
workingMemory.insert( new SecondClass() );
workingMemory.insert( new FirstClass.AlternativeKey() );
workingMemory.insert( new SecondClass.AlternativeKey() );
workingMemory.fireAllRules();
}
@Test
public void testAutovivificationOfVariableRestrictions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AutoVivificationVR.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10,
8 ) );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testShadowProxyOnCollections() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyOnCollections.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Cheesery cheesery = new Cheesery();
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( 1,
cheesery.getCheeses().size() );
assertEquals( results.get( 0 ),
cheesery.getCheeses().get( 0 ) );
}
@Test
public void testShadowProxyOnCollections2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyOnCollections2.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final StatefulSession workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
List list = new ArrayList();
list.add( "example1" );
list.add( "example2" );
MockPersistentSet mockPersistentSet = new MockPersistentSet( false );
mockPersistentSet.addAll( list );
org.drools.ObjectWithSet objectWithSet = new ObjectWithSet();
objectWithSet.setSet( mockPersistentSet );
workingMemory.insert( objectWithSet );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "show",
objectWithSet.getMessage() );
}
@Test
public void testNestedAccessors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NestedAccessors.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 11,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
final Order order2 = new Order( 12,
"Mark" );
Order.OrderStatus status = new Order.OrderStatus();
status.setActive( true );
order2.setStatus( status );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order1.addItem( item21 );
order1.addItem( item22 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertSame( item21,
list.get( 0 ) );
assertSame( item22,
list.get( 1 ) );
}
@Test
public void testWorkingMemoryLoggerWithUnbalancedBranches() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Logger.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
try {
wm.fireAllRules();
wm.insert( new Cheese( "a",
10 ) );
wm.insert( new Cheese( "b",
11 ) );
wm.fireAllRules();
} catch ( Exception e ) {
e.printStackTrace();
fail( "No exception should be raised " );
}
}
@Test
public void testFromNestedAccessors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromNestedAccessors.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 11,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertSame( order1.getStatus(),
list.get( 0 ) );
}
@Test
public void testFromArrayIteration() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromArrayIteration.drl" ) ) );
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
final WorkingMemory session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.insert( new DomainObjectHolder() );
session.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "Message3",
list.get( 0 ) );
assertEquals( "Message2",
list.get( 1 ) );
assertEquals( "Message1",
list.get( 2 ) );
}
@Test
public void testSubNetworks() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SubNetworks.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
try {
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
} catch ( Exception e ) {
e.printStackTrace();
fail( "Should not raise any exception!" );
}
}
@Test
public void testFinalClass() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FinalClass.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final PersonFinal bob = new PersonFinal();
bob.setName( "bob" );
bob.setStatus( null );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
// Dynamic addition of rules which use the final class are not supported yet
// final PackageBuilder builder2 = new PackageBuilder();
// builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FinalClass2.drl" ) ) );
// ruleBase.addPackage( builder2.getPackage() );
//
// // it will automatically fire the rule
// assertEquals( 2,
// list.size() );
}
@Test
public void testEvalRewriteMatches() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewriteMatches.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 14,
"Mark" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertTrue( list.contains( item11 ) );
assertTrue( list.contains( item12 ) );
}
@Test
public void testConsequenceBuilderException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceBuilderException.drl" ) ) );
assertTrue( builder.hasErrors() );
}
@Test
public void testRuntimeTypeCoercion() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuntimeTypeCoercion.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final PolymorphicFact fact = new PolymorphicFact( new Integer( 10 ) );
final FactHandle handle = workingMemory.insert( fact );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( fact.getData(),
list.get( 0 ) );
fact.setData( "10" );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( fact.getData(),
list.get( 1 ) );
try {
fact.setData( new Boolean( true ) );
workingMemory.update( handle,
fact );
assertEquals( 2,
list.size() );
} catch (ClassCastException cce) { }
}
@Test
public void testRuntimeTypeCoercion2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuntimeTypeCoercion2.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Primitives fact = new Primitives();
fact.setBooleanPrimitive( true );
fact.setBooleanWrapper( new Boolean( true ) );
fact.setObject( new Boolean( true ) );
fact.setCharPrimitive( 'X' );
final FactHandle handle = workingMemory.insert( fact );
workingMemory.fireAllRules();
int index = 0;
assertEquals( list.toString(),
4,
list.size() );
assertEquals( "boolean",
list.get( index++ ) );
assertEquals( "boolean wrapper",
list.get( index++ ) );
assertEquals( "boolean object",
list.get( index++ ) );
assertEquals( "char",
list.get( index++ ) );
fact.setBooleanPrimitive( false );
fact.setBooleanWrapper( null );
fact.setCharPrimitive( '\0' );
fact.setObject( new Character( 'X' ) );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 5,
list.size() );
assertEquals( "char object",
list.get( index++ ) );
fact.setObject( null );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 6,
list.size() );
assertEquals( "null object",
list.get( index++ ) );
}
@Test
public void testAlphaEvalWithOrCE() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AlphaEvalWithOrCE.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
FactA a = new FactA();
a.setField1( "a value" );
workingMemory.insert( a );
workingMemory.insert( new FactB() );
workingMemory.insert( new FactC() );
workingMemory.fireAllRules();
assertEquals( "should not have fired",
0,
list.size() );
}
@Test
public void testModifyRetractAndModifyInsert() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyRetractInsert.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob" );
bob.setStatus( "hungry" );
workingMemory.insert( bob );
workingMemory.insert( new Cheese() );
workingMemory.insert( new Cheese() );
workingMemory.fireAllRules( 2 );
assertEquals( "should have fired only once",
1,
list.size() );
}
@Test
public void testAlphaCompositeConstraints() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AlphaCompositeConstraints.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "bob",
30 );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testModifyBlock() throws Exception {
doModifyTest( "test_ModifyBlock.drl" );
}
@Test
public void testModifyBlockWithPolymorphism() throws Exception {
doModifyTest( "test_ModifyBlockWithPolymorphism.drl" );
}
private void doModifyTest(String drlResource) throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( drlResource ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob" );
bob.setStatus( "hungry" );
Cheese c = new Cheese();
workingMemory.insert( bob );
workingMemory.insert( c );
workingMemory.fireAllRules();
assertEquals( 10,
c.getPrice() );
assertEquals( "fine",
bob.getStatus() );
}
@Test
public void testModifyBlockWithFrom() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyBlockWithFrom.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Person bob = new Person( "Bob" );
Address addr = new Address( "abc" );
bob.addAddress( addr );
workingMemory.insert( bob );
workingMemory.insert( addr );
workingMemory.fireAllRules();
// modify worked
assertEquals( "12345",
addr.getZipCode() );
// chaining worked
assertEquals( 1,
results.size() );
assertEquals( addr,
results.get( 0 ) );
}
// this test requires mvel 1.2.19. Leaving it commented until mvel is released.
@Test
public void testJavaModifyBlock() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_JavaModifyBlock.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob",
30 );
bob.setStatus( "hungry" );
workingMemory.insert( bob );
workingMemory.insert( new Cheese() );
workingMemory.insert( new Cheese() );
workingMemory.insert( new OuterClass.InnerClass( 1 ) );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "full",
bob.getStatus() );
assertEquals( 31,
bob.getAge() );
assertEquals( 2,
((OuterClass.InnerClass) list.get( 1 )).getIntAttr() );
}
@Test
public void testOrCE() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrCE.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
Package pkg = builder.getPackage();
pkg = SerializationHelper.serializeObject( pkg );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Cheese( "brie",
10 ) );
workingMemory.insert( new Person( "bob" ) );
workingMemory.fireAllRules();
assertEquals( "should have fired once",
1,
list.size() );
}
@Test
public void testOrWithAndUsingNestedBindings() {
String str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "global java.util.List jlist\n";
str += "rule rule1 dialect \"mvel\" \n";
str += "when\n";
str += "$a : Person( name == \"a\" )\n";
str += " (or $b : Person( name == \"b1\" )\n";
str += " (and $p : Person( name == \"p2\" )\n";
str += " $b : Person( name == \"b2\" ) )\n";
str += " (and $p : Person( name == \"p3\" )\n";
str += " $b : Person( name == \"b3\" ) )\n";
str += " )\n ";
str += "then\n";
str += " mlist.add( $b );\n";
str += "end\n";
str += "rule rule2 dialect \"java\" \n";
str += "when\n";
str += "$a : Person( name == \"a\" )\n";
str += " (or $b : Person( name == \"b1\" )\n";
str += " (and $p : Person( name == \"p2\" )\n";
str += " $b : Person( name == \"b2\" ) )\n";
str += " (and $p : Person( name == \"p3\" )\n";
str += " $b : Person( name == \"b3\" ) )\n";
str += " )\n ";
str += "then\n";
str += " jlist.add( $b );\n";
str += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
Person a = new Person( "a" );
Person b1 = new Person( "b1" );
Person p2 = new Person( "p2" );
Person b2 = new Person( "b2" );
Person p3 = new Person( "p3" );
Person b3 = new Person( "b3" );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
List mlist = new ArrayList();
List jlist = new ArrayList();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b1 );
ksession.fireAllRules();
assertEquals( b1,
mlist.get( 0 ) );
assertEquals( b1,
jlist.get( 0 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b2 );
ksession.insert( p2 );
ksession.fireAllRules();
assertEquals( b2,
mlist.get( 1 ) );
assertEquals( b2,
jlist.get( 1 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b3 );
ksession.insert( p3 );
ksession.fireAllRules();
assertEquals( b3,
mlist.get( 2 ) );
assertEquals( b3,
jlist.get( 2 ) );
}
@Test
public void testFieldBindingOnWrongFieldName() {
//JBRULES-2527
String str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "rule rule1 \n";
str += "when\n";
str += " Person( $f : invalidFieldName, eval( $f != null ) )\n";
str += "then\n";
str += "end\n";
try {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "KnowledgeBuilder should have errors" );
}
} catch ( Exception e ) {
fail( "Exception should not be thrown " );
}
str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "rule rule1 \n";
str += "when\n";
str += " Person( $f : invalidFieldName, name == ( $f ) )\n";
str += "then\n";
str += "end\n";
try {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "KnowledgeBuilder should have errors" );
}
} catch ( Exception e ) {
e.printStackTrace();
fail( "Exception should not be thrown " );
}
}
@Test
public void testDeepNestedConstraints() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeepNestedConstraints.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Person( "bob",
"muzzarela" ) );
workingMemory.insert( new Cheese( "brie",
10 ) );
workingMemory.insert( new Cheese( "muzzarela",
80 ) );
workingMemory.fireAllRules();
assertEquals( "should have fired twice",
2,
list.size() );
}
@Test
public void testGetFactHandleEqualityBehavior() throws Exception {
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.EQUALITY );
RuleBase ruleBase = RuleBaseFactory.newRuleBase( conf );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
CheeseEqual cheese = new CheeseEqual( "stilton",
10 );
session.insert( cheese );
FactHandle fh = session.getFactHandle( new CheeseEqual( "stilton",
10 ) );
assertNotNull( fh );
}
@Test
public void testGetFactHandleIdentityBehavior() throws Exception {
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.IDENTITY );
RuleBase ruleBase = RuleBaseFactory.newRuleBase( conf );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
CheeseEqual cheese = new CheeseEqual( "stilton",
10 );
session.insert( cheese );
FactHandle fh1 = session.getFactHandle( new Cheese( "stilton",
10 ) );
assertNull( fh1 );
FactHandle fh2 = session.getFactHandle( cheese );
assertNotNull( fh2 );
}
@Test
public void testOrCEFollowedByEval() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrCEFollowedByEval.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new FactA( "X" ) );
InternalFactHandle b = (InternalFactHandle) workingMemory.insert( new FactB( "X" ) );
workingMemory.fireAllRules();
assertEquals( "should have fired",
2,
list.size() );
assertTrue( list.contains( b.getObject() ) );
}
@Test
public void testNPEOnMVELAlphaPredicates() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NPEOnMVELPredicate.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "results",
list );
Cheese cheese = new Cheese( "stilton",
10 );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( cheese );
Person bob = new Person( "bob",
"stilton" );
Cheese cheese2 = new Cheese();
bob.setCheese( cheese2 );
FactHandle p = session.insert( bob );
FactHandle c = session.insert( cheesery );
session.fireAllRules();
assertEquals( "should not have fired",
0,
list.size() );
cheese2.setType( "stilton" );
session.update( p,
bob );
session.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testModifyWithLockOnActive() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyWithLockOnActive.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
final Person bob = new Person( "Bob",
15 );
final Person mark = new Person( "Mark",
16 );
final Person michael = new Person( "Michael",
14 );
session.insert( bob );
session.insert( mark );
session.insert( michael );
session.setFocus( "feeding" );
session.fireAllRules( 5 );
assertEquals( 2,
((List) session.getGlobal( "results" )).size() );
}
@Test
public void testNPEOnParenthesis() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_ParenthesisUsage.drl" ) ),
ResourceType.DRL );
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final List<Person> results = new ArrayList<Person>();
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
session.setGlobal( "results",
results );
Person bob = new Person( "Bob",
20 );
bob.setAlive( true );
Person foo = new Person( "Foo",
0 );
foo.setAlive( false );
session.insert( bob );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( bob,
results.get( 0 ) );
session.insert( foo );
session.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( foo,
results.get( 1 ) );
}
@Test
public void testEvalWithLineBreaks() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_EvalWithLineBreaks.drl" ) ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final List<Person> results = new ArrayList<Person>();
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
session.setGlobal( "results",
results );
session.insert( Integer.valueOf( 10 ) );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( Integer.valueOf( 10 ),
results.get( 0 ) );
}
@Test
public void testDRLWithoutPackageDeclaration() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_NoPackageDeclaration.drl" ) ),
ResourceType.DRL );
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// no package defined, so it is set to the default
final FactType factType = kbase.getFactType( "defaultpkg",
"Person" );
assertNotNull( factType );
final Object bob = factType.newInstance();
factType.set( bob,
"name",
"Bob" );
factType.set( bob,
"age",
Integer.valueOf( 30 ) );
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
final List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( bob );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( bob,
results.get( 0 ) );
}
@Test
public void testKnowledgeContextJava() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_KnowledgeContextJava.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> list = new ArrayList<String>();
ksession.setGlobal( "list",
list );
ksession.insert( new Message() );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "Hello World",
list.get( 0 ) );
}
@Test
public void testListOfMaps() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_TestMapVariableRef.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
Map mapOne = new HashMap<String, Object>();
Map mapTwo = new HashMap<String, Object>();
mapOne.put( "MSG",
"testMessage" );
mapTwo.put( "MSGTWO",
"testMessage" );
list.add( mapOne );
list.add( mapTwo );
ksession.insert( list );
ksession.fireAllRules();
assertEquals( 3,
list.size() );
}
@Test
public void testKnowledgeContextMVEL() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_KnowledgeContextMVEL.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> list = new ArrayList<String>();
ksession.setGlobal( "list",
list );
ksession.insert( new Message() );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "Hello World",
list.get( 0 ) );
}
@Test
public void testJBRules2055() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2055.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.insert( new Cheese( "stilton" ) );
ksession.insert( new Cheese( "brie" ) );
ksession.insert( new Cheese( "muzzarella" ) );
ksession.insert( new Person( "bob",
"stilton" ) );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( "brie",
results.get( 1 ) );
}
@Test
public void testJBRules2369() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2369.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
fail( "Error loading test_JBRules2369" );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
FactA a = new FactA();
FactB b = new FactB( Integer.valueOf( 0 ) );
org.drools.runtime.rule.FactHandle aHandle = ksession.insert( a );
org.drools.runtime.rule.FactHandle bHandle = ksession.insert( b );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
ksession.update( aHandle,
a );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
}
@Test
public void testInsertionOrder() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_InsertionOrder.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.insert( new Move( 1,
2 ) );
ksession.insert( new Move( 2,
3 ) );
Win win2 = new Win( 2 );
Win win3 = new Win( 3 );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( win2 ) );
assertTrue( results.contains( win3 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
// reverse the order of the inserts
ksession.insert( new Move( 2,
3 ) );
ksession.insert( new Move( 1,
2 ) );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( win2 ) );
assertTrue( results.contains( win3 ) );
}
@Test
public void testFireAllWhenFiringUntilHalt() {
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Runnable fireUntilHalt = new Runnable() {
public void run() {
ksession.fireUntilHalt();
}
};
Runnable fireAllRules = new Runnable() {
public void run() {
ksession.fireAllRules();
}
};
Thread t1 = new Thread( fireUntilHalt );
Thread t2 = new Thread( fireAllRules );
t1.start();
try {
Thread.currentThread().sleep( 500 );
} catch ( InterruptedException e ) {
}
t2.start();
// give the chance for t2 to finish
try {
Thread.currentThread().sleep( 1000 );
} catch ( InterruptedException e ) {
}
boolean aliveT2 = t2.isAlive();
ksession.halt();
try {
Thread.currentThread().sleep( 1000 );
} catch ( InterruptedException e ) {
}
boolean aliveT1 = t1.isAlive();
if ( t2.isAlive() ) {
t2.interrupt();
}
if ( t1.isAlive() ) {
t1.interrupt();
}
assertFalse( "T2 should have finished",
aliveT2 );
assertFalse( "T1 should have finished",
aliveT1 );
}
@Test @Ignore
public void testFireUntilHaltFailingAcrossEntryPoints() throws Exception {
String rule1 = "package org.drools\n";
rule1 += "global java.util.List list\n";
rule1 += "rule testFireUntilHalt\n";
rule1 += "when\n";
rule1 += " Cheese()\n";
rule1 += " $p : Person() from entry-point \"testep2\"\n";
rule1 += "then \n";
rule1 += " list.add( $p ) ;\n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.info( kbuilder.getErrors().toString() );
throw new RuntimeException( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final WorkingMemoryEntryPoint ep = ksession.getWorkingMemoryEntryPoint( "testep2" );
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Cheese( "cheddar" ) );
ksession.fireAllRules();
Runnable fireUntilHalt = new Runnable() {
public void run() {
ksession.fireUntilHalt();
}
};
Thread t1 = new Thread( fireUntilHalt );
t1.start();
Thread.currentThread().sleep( 500 );
ep.insert( new Person( "darth" ) );
Thread.currentThread().sleep( 500 );
ksession.halt();
t1.join( 5000 );
boolean alive = t1.isAlive();
if ( alive ) {
t1.interrupt();
}
assertFalse( "Thread should have died!",
alive );
assertEquals( 1,
list.size() );
}
@Test
public void testNetworkBuildErrorAcrossEntryPointsAndFroms() throws Exception {
String rule1 = "package org.drools\n";
rule1 += "global java.util.List list\n";
rule1 += "rule rule1\n";
rule1 += "when\n";
rule1 += " Cheese() from entry-point \"testep\"\n";
rule1 += " $p : Person() from list\n";
rule1 += "then \n";
rule1 += " list.add( \"rule1\" ) ;\n";
rule1 += " insert( $p );\n";
rule1 += "end\n";
rule1 += "rule rule2\n";
rule1 += "when\n";
rule1 += " $p : Person() \n";
rule1 += "then \n";
rule1 += " list.add( \"rule2\" ) ;\n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.info( kbuilder.getErrors().toString() );
throw new RuntimeException( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final WorkingMemoryEntryPoint ep = ksession.getWorkingMemoryEntryPoint( "testep" );
List list = new ArrayList();
ksession.setGlobal( "list",
list );
list.add( new Person( "darth" ) );
ep.insert( new Cheese( "cheddar" ) );
ksession.fireAllRules();
assertEquals( 3,
list.size() );
}
@Test
public void testBindingToMissingField() throws Exception {
// JBRULES-3047
String rule1 = "package org.drools\n";
rule1 += "rule rule1\n";
rule1 += "when\n";
rule1 += " Integer( $i : noSuchField ) \n";
rule1 += " eval( $i > 0 )\n";
rule1 += "then \n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "this should have errors" );
}
}
@Test
public void testJBRules2140() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2140.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( "java" ) );
assertTrue( results.contains( "mvel" ) );
}
@Test
public void testGeneratedBeansSerializable() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansSerializable.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
assertTrue( "Generated beans must be serializable",
Serializable.class.isAssignableFrom( cheeseFact.getFactClass() ) );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
cheeseFact.set( cheese,
"type",
"stilton" );
// another instance
Object cheese2 = cheeseFact.newInstance();
cheeseFact.set( cheese2,
"type",
"brie" );
// creating a stateful session
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Number> results = new ArrayList<Number>();
ksession.setGlobal( "results",
results );
// inserting fact
ksession.insert( cheese );
ksession.insert( cheese2 );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 1,
results.size() );
assertEquals( 2,
results.get( 0 ).intValue() );
}
@Test
public void testAddRemoveListeners() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_AddRemoveListeners.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// creating listener as a jmock proxy
final org.drools.event.rule.WorkingMemoryEventListener wmeListener = mock( org.drools.event.rule.WorkingMemoryEventListener.class );
ksession.addEventListener( wmeListener );
// listener will be notified of both facts insertion
ksession.insert( new Cheese( "stilton" ) );
ksession.insert( wmeListener );
// firing rules will remove listener
ksession.fireAllRules();
// inserting another object into the working memory, listener should NOT be notified,
// since it is no longer listening.
ksession.insert( new Cheese( "brie" ) );
verify( wmeListener,
times( 2 ) ).objectInserted( any( org.drools.event.rule.ObjectInsertedEvent.class ) );
}
@Test
public void testInsert() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "import org.drools.Pet\n";
drl += "import java.util.ArrayList\n";
drl += "rule test\n";
drl += "when\n";
drl += "$person:Person()\n";
drl += "$pets : ArrayList()\n";
drl += " from collect( \n";
drl += " Pet(\n";
drl += " ownerName == $person.name\n";
drl += " )\n";
drl += " )\n";
drl += "then\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
fail( errors.toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Toni" ) );
ksession.insert( new Pet( "Toni" ) );
}
@Test
public void testMemberOfNotWorkingWithOr() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.ArrayList;\n";
rule += "import org.drools.Person;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $list: ArrayList() \n";
rule += " ArrayList() \n";
rule += " from collect( \n";
rule += " Person( \n";
rule += " ( \n";
rule += " pet memberOf $list \n";
rule += " ) || ( \n";
rule += " pet == null \n";
rule += " ) \n";
rule += " ) \n";
rule += " )\n";
rule += "then\n";
rule += " System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person toni = new Person( "Toni",
12 );
toni.setPet( new Pet( "Mittens" ) );
session.insert( new ArrayList() );
session.insert( toni );
session.fireAllRules();
}
@Test
public void testUnNamed() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.ArrayList;\n";
rule += "import org.drools.Person;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $list: ArrayList() \n";
rule += " ArrayList() \n";
rule += " from collect( \n";
rule += " Person( \n";
rule += " ( \n";
rule += " pet memberOf $list \n";
rule += " ) || ( \n";
rule += " pet == null \n";
rule += " ) \n";
rule += " ) \n";
rule += " )\n";
rule += "then\n";
rule += " System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person toni = new Person( "Toni",
12 );
toni.setPet( new Pet( "Mittens" ) );
session.insert( new ArrayList() );
session.insert( toni );
session.fireAllRules();
}
@Test
// this isn't possible, we can only narrow with type safety, not widen.
// unless typesafe=false is used
public void testAccessFieldsFromSubClass() throws Exception {
// Exception in ClassFieldAccessorStore line: 116
String rule = "";
rule += "package org.drools;\n";
rule += "import org.drools.Person;\n";
rule += "import org.drools.Pet;\n";
rule += "import org.drools.Cat;\n";
rule += "declare Person @typesafe(false) end\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " Person(\n";
rule += " pet.breed == \"Siamise\"\n";
rule += " )\n";
rule += "then\n";
rule += "System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person person = new Person();
person.setPet( new Cat( "Mittens" ) );
session.insert( person );
session.fireAllRules();
}
@Test
public void testGenericsInRHS() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.Map;\n";
rule += "import java.util.HashMap;\n";
rule += "rule \"Test Rule\"\n";
rule += " when\n";
rule += " then\n";
rule += " Map<String,String> map = new HashMap<String,String>();\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
assertNotNull( session );
}
@Test
public void testActivationListener() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.Map;\n";
rule += "import java.util.HashMap;\n";
rule += "rule \"Test Rule\" @activationListener('blah')\n";
rule += " when\n";
rule += " String( this == \"xxx\" )\n ";
rule += " then\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
final List list = new ArrayList();
conf.addActivationListener( "blah",
new ActivationListenerFactory() {
public TerminalNode createActivationListener( int id,
LeftTupleSource source,
org.drools.rule.Rule rule,
GroupElement subrule,
int subruleIndex,
BuildContext context,
Object... args ) {
return new RuleTerminalNode( id,
source,
rule,
subrule,
0,
context ) {
@Override
public void assertLeftTuple( LeftTuple tuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "inserted" );
}
@Override
public void modifyLeftTuple( LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "updated" );
}
@Override
public void retractLeftTuple( LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "retracted" );
}
};
}
} );
final RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
FactHandle fh = session.insert( "xxx" );
session.update( fh,
"xxx" );
session.retract( fh );
assertEquals( "inserted",
list.get( 0 ) );
assertEquals( "updated",
list.get( 1 ) );
assertEquals( "retracted",
list.get( 2 ) );
assertNotNull( session );
}
@Test
public void testAccessingMapValues() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import org.drools.Pet;\n";
rule += "rule \"Test Rule\"\n";
rule += " when\n";
rule += " $pet: Pet()\n";
rule += " Pet( \n";
rule += " ownerName == $pet.attributes[\"key\"] \n";
rule += " )\n";
rule += " then\n";
rule += " System.out.println(\"hi pet\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
assertNotNull( session );
Pet pet1 = new Pet( "Toni" );
pet1.getAttributes().put( "key",
"value" );
Pet pet2 = new Pet( "Toni" );
session.insert( pet1 );
session.insert( pet2 );
session.fireAllRules();
}
@Test
public void testClassLoaderHits() throws Exception {
final KnowledgeBuilderConfiguration conf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
//conf.setOption( ClassLoaderCacheOption.DISABLED );
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder( conf );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansMVEL.drl" ) ),
ResourceType.DRL );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans.drl" ) ),
ResourceType.DRL );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_NullFieldOnCompositeSink.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
//((CompositeClassLoader)((PackageBuilderConfiguration)conf).getClassLoader()).dumpStats();
}
@Test
public void testMVELConsequenceWithoutSemiColon1() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "import org.drools.Pet\n";
drl += "rule test dialect 'mvel'\n";
drl += "when\n";
drl += "$person:Person()\n";
drl += "$pet:Pet()\n";
drl += "then\n";
drl += " retract($person) // some comment\n";
drl += " retract($pet) // another comment\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// create working memory mock listener
org.drools.event.rule.WorkingMemoryEventListener wml = Mockito.mock( org.drools.event.rule.WorkingMemoryEventListener.class );
ksession.addEventListener( wml );
org.drools.runtime.rule.FactHandle personFH = ksession.insert( new Person( "Toni" ) );
org.drools.runtime.rule.FactHandle petFH = ksession.insert( new Pet( "Toni" ) );
int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
// capture the arguments and check that the retracts happened
ArgumentCaptor<org.drools.event.rule.ObjectRetractedEvent> retracts = ArgumentCaptor.forClass( org.drools.event.rule.ObjectRetractedEvent.class );
verify( wml,
times( 2 ) ).objectRetracted( retracts.capture() );
List<org.drools.event.rule.ObjectRetractedEvent> values = retracts.getAllValues();
assertThat( values.get( 0 ).getFactHandle(),
is( personFH ) );
assertThat( values.get( 1 ).getFactHandle(),
is( petFH ) );
}
@Test
public void testRuleMetaAttributes() throws Exception {
String drl = "";
drl += "package test\n";
drl += "rule \"test meta attributes\"\n";
drl += " @id(1234 ) @author( john_doe ) @text(\"It's an escaped\\\" string\" )\n";
drl += "when\n";
drl += "then\n";
drl += " // some comment\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
Rule rule = kbase.getRule( "test",
"test meta attributes" );
assertNotNull( rule );
assertThat( rule.getMetaAttribute( "id" ),
is( "1234" ) );
assertThat( rule.getMetaAttribute( "author" ),
is( "john_doe" ) );
assertThat( rule.getMetaAttribute( "text" ),
is( "It's an escaped\" string" ) );
}
// following test depends on MVEL: http://jira.codehaus.org/browse/MVEL-212
@Test
public void testMVELConsequenceUsingFactConstructors() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "global org.drools.runtime.StatefulKnowledgeSession ksession\n";
drl += "rule test dialect 'mvel'\n";
drl += "when\n";
drl += " $person:Person( name == 'mark' )\n";
drl += "then\n";
drl += " // below constructor for Person does not exist\n";
drl += " Person p = new Person( 'bob', 30, 555 )\n";
drl += " ksession.update(ksession.getFactHandle($person), new Person('bob', 30, 999, 453, 534, 534, 32))\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testRuleChainingWithLogicalInserts() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_RuleChaining.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// create working memory mock listener
org.drools.event.rule.WorkingMemoryEventListener wml = Mockito.mock( org.drools.event.rule.WorkingMemoryEventListener.class );
org.drools.event.rule.AgendaEventListener ael = Mockito.mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( wml );
ksession.addEventListener( ael );
int fired = ksession.fireAllRules();
assertEquals( 3,
fired );
// capture the arguments and check that the rules fired in the proper sequence
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> actvs = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 3 ) ).afterActivationFired( actvs.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> values = actvs.getAllValues();
assertThat( values.get( 0 ).getActivation().getRule().getName(),
is( "init" ) );
assertThat( values.get( 1 ).getActivation().getRule().getName(),
is( "r1" ) );
assertThat( values.get( 2 ).getActivation().getRule().getName(),
is( "r2" ) );
verify( ael,
never() ).activationCancelled( any( org.drools.event.rule.ActivationCancelledEvent.class ) );
verify( wml,
times( 2 ) ).objectInserted( any( org.drools.event.rule.ObjectInsertedEvent.class ) );
verify( wml,
never() ).objectRetracted( any( org.drools.event.rule.ObjectRetractedEvent.class ) );
}
@Test
public void testOrWithReturnValueRestriction() throws Exception {
String fileName = "test_OrWithReturnValue.drl";
KnowledgeBase kbase = loadKnowledgeBase( fileName );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Cheese( "brie",
18 ) );
ksession.insert( new Cheese( "stilton",
8 ) );
ksession.insert( new Cheese( "brie",
28 ) );
int fired = ksession.fireAllRules();
assertEquals( 2,
fired );
}
@Test
public void testFromExprFollowedByNot() {
String rule = "";
rule += "package org.drools\n";
rule += "rule \"Rule 1\"\n";
rule += " when\n";
rule += " Person ($var: pet )\n";
rule += " Pet () from $var\n";
rule += " not Pet ()\n";
rule += " then\n";
rule += " System.out.println(\"Fire in the hole\");\n";
rule += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( rule ) ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
Iterator<KnowledgeBuilderError> errors = kbuilder.getErrors().iterator();
while ( errors.hasNext() ) {
logger.info( "kbuilder error: " + errors.next().getMessage() );
}
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
}
@Test
public void testLastMemoryEntryNotBug() {
// JBRULES-2809
// This occurs when a blocker is the last in the node's memory, or if there is only one fact in the node
// And it gets no opportunity to rematch with itself
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule x1 \n";
str += "when \n";
str += " $s : String( this == 'x1' ) \n";
str += " not A( this != null ) \n";
str += "then \n";
str += " list.add(\"fired x1\"); \n";
str += "end \n";
str += "rule x2 \n";
str += "when \n";
str += " $s : String( this == 'x2' ) \n";
str += " not A( field1 == $s, this != null ) \n"; // this ensures an index bucket
str += "then \n";
str += " list.add(\"fired x2\"); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "x1" );
ksession.insert( "x2" );
A a1 = new A( "x1",
null );
A a2 = new A( "x2",
null );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
// make sure the 'exists' is obeyed when fact is cycled causing add/remove node memory
ksession.update( fa1,
a1 );
ksession.update( fa2,
a2 );
ksession.fireAllRules();
assertEquals( 0,
list.size() );
ksession.dispose();
}
@Test
public void testLastMemoryEntryExistsBug() {
// JBRULES-2809
// This occurs when a blocker is the last in the node's memory, or if there is only one fact in the node
// And it gets no opportunity to rematch with itself
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule x1 \n";
str += "when \n";
str += " $s : String( this == 'x1' ) \n";
str += " exists A( this != null ) \n";
str += "then \n";
str += " list.add(\"fired x1\"); \n";
str += "end \n";
str += "rule x2 \n";
str += "when \n";
str += " $s : String( this == 'x2' ) \n";
str += " exists A( field1 == $s, this != null ) \n"; // this ensures an index bucket
str += "then \n";
str += " list.add(\"fired x2\"); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "x1" );
ksession.insert( "x2" );
A a1 = new A( "x1",
null );
A a2 = new A( "x2",
null );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
// make sure the 'exists' is obeyed when fact is cycled causing add/remove node memory
ksession.update( fa1,
a1 );
ksession.update( fa2,
a2 );
ksession.fireAllRules();
assertEquals( 2,
list.size() );
ksession.dispose();
}
@Test
public void testNotIterativeModifyBug() {
// JBRULES-2809
// This bug occurs when a tuple is modified, the remove/add puts it onto the memory end
// However before this was done it would attempt to find the next tuple, starting from itself
// This meant it would just re-add itself as the blocker, but then be moved to end of the memory
// If this tuple was then removed or changed, the blocked was unable to check previous tuples.
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $f1 : A() \n";
str += " not A(this != $f1, eval(field2 == $f1.getField2())) \n";
str += " eval( !$f1.getField1().equals(\"1\") ) \n";
str += "then \n";
str += " list.add($f1); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
A a1 = new A( "2",
"2" );
A a2 = new A( "1",
"2" );
A a3 = new A( "1",
"2" );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
FactHandle fa3 = (FactHandle) ksession.insert( a3 );
ksession.fireAllRules();
// a1 is blocked by a2
assertEquals( 0,
list.size() );
// modify a2, so that a1 is now blocked by a3
a2.setField2( "1" ); // Do
ksession.update( fa2,
a2 );
a2.setField2( "2" ); // Undo
ksession.update( fa2,
a2 );
// modify a3 to cycle, so that it goes on the memory end, but in a previous bug still blocked a1
ksession.update( fa3,
a3 );
a3.setField2( "1" ); // Do
ksession.update( fa3,
a3 );
ksession.fireAllRules();
assertEquals( 0,
list.size() ); // this should still now blocked by a2, but bug from previous update hanging onto blocked
ksession.dispose();
}
@Test
public void testModifyWithLiaToEval() {
String str = "";
str += "package org.simple \n";
str += "import " + Person.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $p : Person() \n";
str += " eval( $p.getAge() > 30 ) \n";
str += "then \n";
str += " list.add($p); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p1 = new Person("darth", 25);
org.drools.runtime.rule.FactHandle fh = ksession.insert( p1 );
ksession.fireAllRules();
assertEquals( 0, list.size() );
p1.setAge( 35 );
ksession.update( fh, p1 );
ksession.fireAllRules();
assertEquals( 1, list.size() );
ksession.dispose();
}
@Test
public void testExistsIterativeModifyBug() {
// JBRULES-2809
// This bug occurs when a tuple is modified, the remove/add puts it onto the memory end
// However before this was done it would attempt to find the next tuple, starting from itself
// This meant it would just re-add itself as the blocker, but then be moved to end of the memory
// If this tuple was then removed or changed, the blocked was unable to check previous tuples.
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $f1 : A() \n";
str += " exists A(this != $f1, eval(field2 == $f1.getField2())) \n";
str += " eval( !$f1.getField1().equals(\"1\") ) \n";
str += "then \n";
str += " list.add($f1); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
A a1 = new A( "2",
"2" );
A a2 = new A( "1",
"2" );
A a3 = new A( "1",
"2" );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
FactHandle fa3 = (FactHandle) ksession.insert( a3 );
// a2, a3 are blocked by a1
// modify a1, so that a1,a3 are now blocked by a2
a1.setField2( "1" ); // Do
ksession.update( fa1,
a1 );
a1.setField2( "2" ); // Undo
ksession.update( fa1,
a1 );
// modify a2, so that a1,a2 are now blocked by a3
a2.setField2( "1" ); // Do
ksession.update( fa2,
a2 );
a2.setField2( "2" ); // Undo
ksession.update( fa2,
a2 );
// modify a3 to cycle, so that it goes on the memory end, but in a previous bug still blocked a1
ksession.update( fa3,
a3 );
a3.setField2( "1" ); // Do
ksession.update( fa3,
a3 );
ksession.fireAllRules();
assertEquals( 1,
list.size() ); // a2 should still be blocked by a1, but bug from previous update hanging onto blocked
ksession.dispose();
}
@Test
public void testBindingsWithOr() throws InstantiationException,
IllegalAccessException {
// JBRULES-2917: matching of field==v1 || field==v2 breaks when variable binding is added
String str = "package org.drools\n" +
"declare Assignment\n" +
" source : int\n" +
" target : int\n" +
"end\n" +
"rule ValueIsTheSame1\n" +
"when\n" +
" Assignment( $t: target == 10 || target == source )\n" +
"then\n" +
"end\n" +
"rule ValueIsTheSame2\n" +
"when\n" +
" Assignment( $t: target == source || target == 10 )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType asgType = kbase.getFactType( "org.drools",
"Assignment" );
Object asg = asgType.newInstance();
asgType.set( asg,
"source",
10 );
asgType.set( asg,
"target",
10 );
ksession.insert( asg );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
rules );
}
@Test
public void testMVELClassReferences() throws InstantiationException,
IllegalAccessException {
String str = "package org.drools\n" +
"declare Assignment\n" +
" source : Class\n" +
" target : Class\n" +
"end\n" +
"rule ObjectIsAssignable1\n" +
"when\n" +
" Assignment( $t: target == java.lang.Object.class || target == source )\n" +
"then\n" +
"end\n" +
"rule ObjectIsAssignable2\n" +
"when\n" +
" Assignment( $t: target == source || target == java.lang.Object.class )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType asgType = kbase.getFactType( "org.drools",
"Assignment" );
Object asg = asgType.newInstance();
asgType.set( asg,
"source",
Object.class );
asgType.set( asg,
"target",
Object.class );
ksession.insert( asg );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
rules );
}
@Test
public void testNotMatchesSucceeds() throws InstantiationException,
IllegalAccessException {
// JBRULES-2914: Rule misfires due to "not matches" not working
String str = "package org.drools\n" +
"rule NotMatches\n" +
"when\n" +
" Person( name == null || (name != null && name not matches \"-.{2}x.*\" ) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person p = new Person( "-..x..xrwx" );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 0,
rules );
}
@Test
public void testNotMatchesFails() throws InstantiationException,
IllegalAccessException {
// JBRULES-2914: Rule misfires due to "not matches" not working
String str = "package org.drools\n" +
"rule NotMatches\n" +
"when\n" +
" Person( name == null || (name != null && name not matches \"-.{2}x.*\" ) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person p = new Person( "d..x..xrwx" );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
rules );
}
@Test
public void testNotEqualsOperator() {
// JBRULES-3003: restriction evaluation returns 'false' for "trueField != falseField"
String str = "package org.drools\n" +
"rule NotEquals\n" +
"when\n" +
" Primitives( booleanPrimitive != booleanWrapper )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives p = new Primitives();
p.setBooleanPrimitive( true );
p.setBooleanWrapper( Boolean.FALSE );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
rules );
}
@Test
public void testNotContainsOperator() {
// JBRULES-2404: "not contains" operator doesn't work on nested fields
String str = "package org.drools\n" +
"rule NotContains\n" +
"when\n" +
" $oi : OrderItem( )\n" +
" $o : Order( items.values() not contains $oi )" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Order order1 = new Order( 1,
"XYZ" );
Order order2 = new Order( 2,
"ABC" );
OrderItem item11 = new OrderItem( order1,
1 );
order1.addItem( item11 );
OrderItem item21 = new OrderItem( order2,
1 );
order2.addItem( item21 );
ksession.insert( order1 );
ksession.insert( item11 );
// should not fire, as item11 is contained in order1.items
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
// should fire as item21 is not contained in order1.items
ksession.insert( item21 );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testOrWithFrom() {
// JBRULES-2274: Rule does not fire as expected using deep object model and nested 'or' clause
String str = "package org.drools\n" +
"rule NotContains\n" +
"when\n" +
" $oi1 : OrderItem( )\n" +
" $o1 : Order(number == 1) from $oi1.order; \n" +
" ( eval(true) or eval(true) )\n" +
" $oi2 : OrderItem( )\n" +
" $o2 : Order(number == 2) from $oi2.order; \n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Order order1 = new Order( 1,
"XYZ" );
Order order2 = new Order( 2,
"ABC" );
OrderItem item11 = new OrderItem( order1,
1 );
order1.addItem( item11 );
OrderItem item21 = new OrderItem( order2,
1 );
order2.addItem( item21 );
ksession.insert( order1 );
ksession.insert( order2 );
ksession.insert( item11 );
ksession.insert( item21 );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testSoundsLike() {
// JBRULES-2991: Operator soundslike is broken
String str = "package org.drools\n" +
"rule SoundsLike\n" +
"when\n" +
" Person( name soundslike \"Bob\" )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Bob" ) );
ksession.insert( new Person( "Mark" ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAgendaFilter1() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameStartsWithAgendaFilter af = new RuleNameStartsWithAgendaFilter( "B" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Bbb" ) );
}
@Test
public void testAgendaFilter2() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameEndsWithAgendaFilter af = new RuleNameEndsWithAgendaFilter( "a" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Aaa" ) );
}
@Test
public void testAgendaFilter3() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameMatchesAgendaFilter af = new RuleNameMatchesAgendaFilter( ".*b." );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Bbb" ) );
}
@Test
public void testAgendaFilter4() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameEqualsAgendaFilter af = new RuleNameEqualsAgendaFilter( "Aaa" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Aaa" ) );
}
@Test
public void testRestrictionsWithOr() {
// JBRULES-2203: NullPointerException When Using Conditional Element "or" in LHS Together with a Return Value Restriction
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Cheese( price == (1 + 1) );\n" +
" (or eval(true);\n" +
" eval(true);\n" +
" )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Cheese( "Stilton",
2 ) );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testMapModel() {
String str = "package org.drools\n" +
"import java.util.Map\n" +
"rule \"test\"\n" +
"when\n" +
" Map( type == \"Person\", name == \"Bob\" );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map<String, String> mark = new HashMap<String, String>();
mark.put( "type",
"Person" );
mark.put( "name",
"Mark" );
ksession.insert( mark );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
Map<String, String> bob = new HashMap<String, String>();
bob.put( "type",
"Person" );
bob.put( "name",
"Bob" );
ksession.insert( bob );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testConstraintExpression() {
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Person( 5*2 > 3 );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Bob" ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testMethodConstraint() {
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Person( isAlive() );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person person = new Person( "Bob" );
person.setAlive( true );
ksession.insert( person );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testComplexOperator() {
String str = "package org.drools\n" +
"rule \"test in\"\n" +
"when\n" +
" Person( $name : name in (\"bob\", \"mark\") )\n" +
"then\n" +
" boolean test = $name != null;" +
"end\n" +
"rule \"test not in\"\n" +
"when\n" +
" Person( $name : name not in (\"joe\", \"doe\") )\n" +
"then\n" +
" boolean test = $name != null;" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person person = new Person( "bob" );
ksession.insert( person );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testEventsInDifferentPackages() {
String str = "package org.drools.test\n" +
"import org.drools.*\n" +
"declare StockTick\n" +
" @role( event )\n" +
"end\n" +
"rule r1\n" +
"when\n" +
"then\n" +
" StockTick st = new StockTick();\n" +
" st.setCompany(\"RHT\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testClassTypeAttributes() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" Primitives( classAttr == null )" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Primitives() );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
public void testFreeFormExpressions() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" $p1 : Person( age > 2*10, 10 < age )\n" +
" $p2 : Person( age > 2*$p1.age )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person bob = new Person( "bob",
30 );
Person mark = new Person( "mark",
61 );
ksession.insert( bob );
ksession.insert( mark );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testFreeFormExpressions2() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" $p1 : Cell( row == 2 )\n" +
" $p2 : Cell( row == $p1.row + 1, row == ($p1.row + 1), row == 1 + $p1.row, row == (1 + $p1.row) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Cell c1 = new Cell(1, 2, 0 );
Cell c2 = new Cell(1, 3, 0 );
ksession.insert( c1 );
ksession.insert( c2 );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAddMissingResourceToPackageBuilder() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
try {
kbuilder.add( ResourceFactory.newClassPathResource( "some.rf" ),
ResourceType.DRL );
fail( "adding a missing resource should fail" );
} catch ( RuntimeException e ) {
}
try {
kbuilder.add( ResourceFactory.newClassPathResource( "some.rf" ),
ResourceType.DRF );
fail( "adding a missing resource should fail" );
} catch ( RuntimeException e ) {
}
}
@Test
public void testJBRULES_2995() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" Primitives( classAttr == java.lang.String.class, \n" +
" eval(classAttr.equals( java.lang.String.class ) ),\n" +
" classAttr == String.class )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives primitives = new Primitives();
primitives.setClassAttr( String.class );
ksession.insert( primitives );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testJBRULES2872() {
String str = "package org.drools.test\n" +
"import org.drools.FactA\n" +
"rule X\n" +
"when\n" +
" FactA( enumVal == TestEnum.ONE || == TestEnum.TWO )\n" +
"then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
logger.info( errors.toString() );
assertEquals( 1,
errors.size() );
KnowledgeBuilderError error = errors.iterator().next();
assertEquals( 5,
error.getLines()[0] );
}
@Test
public void testJBRULES3030() {
String str = "package org.drools\n" +
"rule X\n" +
"when\n" +
" $gp : GrandParent()" +
" $ch : ChildHolder( child == $gp )\n" +
"then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
}
@Test
public void testJBRULES3111() {
String str = "package org.drools\n" +
"declare Bool123\n" +
" bool1 : boolean\n" +
" bool2 : boolean\n" +
" bool3 : boolean\n" +
"end\n" +
"declare Thing\n" +
" name : String\n" +
" bool123 : Bool123\n" +
"end\n" +
"rule kickOff\n" +
"when\n" +
"then\n" +
" insert( new Thing( \"one\", new Bool123( true, false, false ) ) );\n" +
" insert( new Thing( \"two\", new Bool123( false, false, false ) ) );\n" +
" insert( new Thing( \"three\", new Bool123( false, false, false ) ) );\n" +
"end\n" +
"rule r1\n" +
"when\n" +
" $t: Thing( bool123.bool1 == true )\n" +
"then\n" +
"end\n" +
"rule r2\n" +
"when\n" +
" $t: Thing( bool123.bool2 == true )\n" +
"then\n" +
"end\n" +
"rule r3\n" +
"when\n" +
" $t: Thing( bool123.bool3 == true )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
int rulesFired = ksession.fireAllRules();
assertEquals( 2,
rulesFired );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> captor = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 2 ) ).afterActivationFired( captor.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> aafe = captor.getAllValues();
Assert.assertThat( aafe.get( 0 ).getActivation().getRule().getName(),
is( "kickOff" ) );
Assert.assertThat( aafe.get( 1 ).getActivation().getRule().getName(),
is( "r1" ) );
Object value = aafe.get( 1 ).getActivation().getDeclarationValue( "$t" );
String name = (String) MVEL.eval( "$t.name",
Collections.singletonMap( "$t",
value ) );
Assert.assertThat( name,
is( "one" ) );
}
@Test
public void testBigLiterals() {
String str = "package org.drools\n" +
"rule X\n" +
"when\n" +
" Primitives( bigInteger == 10I, bigInteger < (50I), bigDecimal == 10B, bigDecimal < (50B) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives p = new Primitives();
p.setBigDecimal( BigDecimal.valueOf( 10 ) );
p.setBigInteger( BigInteger.valueOf( 10 ) );
ksession.insert( p );
int rulesFired = ksession.fireAllRules();
assertEquals( 1,
rulesFired );
}
@Test
public void testNonBooleanConstraint() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
"when\n" +
" $p1: Person( name + name )\n" +
"then\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
logger.info(kbuilder.getErrors().toString());
}
@Test
public void testModifyJava() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
"when\n" +
" $l : List() from collect ( Person( alive == false ) );\n" +
"then\n" +
" for(Object p : $l ) {\n" +
" Person p2 = (Person) p;\n" +
" modify(p2) { setAlive(true) }\n" +
" }\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
}
@Test
public void testModifyMVEL() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
" dialect \"mvel\"\n" +
"when\n" +
" $l : List() from collect ( Person( alive == false ) );\n" +
"then\n" +
" for(Object p : $l ) {\n" +
" Person p2 = (Person) p;\n" +
" modify(p2) { setAlive(true) }\n" +
" }\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
}
@Test
public void testPackageNameOfTheBeast() throws Exception {
// JBRULES-2749 Various rules stop firing when they are in unlucky packagename and there is a function declared
String ruleFileContent1 = "package org.drools.integrationtests;\n" +
"function void myFunction() {\n" +
"}\n" +
"declare MyDeclaredType\n" +
" someProperty: boolean\n" +
"end";
String ruleFileContent2 = "package de.something;\n" + // FAILS
// String ruleFileContent2 = "package de.somethinga;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingb;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingc;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingd;\n" + // PASSES
// String ruleFileContent2 = "package de.somethinge;\n" + // FAILS
// String ruleFileContent2 = "package de.somethingf;\n" + // FAILS
// String ruleFileContent2 = "package de.somethingg;\n" + // FAILS
"import org.drools.integrationtests.*;\n" +
"rule \"CheckMyDeclaredType\"\n" +
" when\n" +
" MyDeclaredType()\n" +
" then\n" +
" insertLogical(\"THIS-IS-MY-MARKER-STRING\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( ruleFileContent1,
ruleFileContent2 );
StatefulKnowledgeSession knowledgeSession = createKnowledgeSession(kbase);
final FactType myDeclaredFactType = kbase.getFactType( "org.drools.integrationtests",
"MyDeclaredType" );
Object myDeclaredFactInstance = myDeclaredFactType.newInstance();
knowledgeSession.insert( myDeclaredFactInstance );
int rulesFired = knowledgeSession.fireAllRules();
assertEquals( 1,
rulesFired );
knowledgeSession.dispose();
}
@Test
public void testGUVNOR578_2() throws Exception {
MapBackedClassLoader loader = new MapBackedClassLoader( this.getClass().getClassLoader() );
JarInputStream jis = new JarInputStream( this.getClass().getResourceAsStream( "/primespoc.jar" ) );
JarEntry entry = null;
byte[] buf = new byte[1024];
int len = 0;
while ( (entry = jis.getNextJarEntry()) != null ) {
if ( !entry.isDirectory() ) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
while ( (len = jis.read( buf )) >= 0 ) {
out.write( buf,
0,
len );
}
loader.addResource( entry.getName(),
out.toByteArray() );
}
}
List<JarInputStream> jarInputStreams = new ArrayList<JarInputStream>();
jarInputStreams.add( jis );
Properties properties = new Properties();
properties.setProperty( DefaultPackageNameOption.PROPERTY_NAME,
"foo.bar" );
PackageBuilder builder = new PackageBuilder( new PackageBuilderConfiguration( properties,
loader ) );
PackageDescr pc = new PackageDescr( "foo.bar" );
builder.addPackage( pc );
String header = "import fr.gouv.agriculture.dag.agorha.business.primes.SousPeriodePrimeAgent\n";
builder.addPackageFromDrl( new StringReader( header ) );
assertFalse( builder.hasErrors() );
String passingRule = "rule \"rule1\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
+ "SousPeriodePrimeAgent( echelle == \"abc\" )"
+ "then\n"
+ "end\n";
String failingRule = "rule \"rule2\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
+ "SousPeriodePrimeAgent( quotiteRemuneration == 123 , echelle == \"abc\" )"
+ "then\n"
+ "end\n";
builder.addPackageFromDrl( new StringReader( passingRule ) );
if ( builder.hasErrors() ) {
logger.warn( builder.getErrors().getErrors()[0].getMessage() );
}
assertFalse( builder.hasErrors() );
builder.addPackageFromDrl( new StringReader( failingRule ) );
if ( builder.hasErrors() ) {
logger.warn( builder.getErrors().getErrors()[0].getMessage() );
}
assertFalse( builder.hasErrors() );
}
@Test
public void testJBRULES3323() throws Exception {
//adding rules. it is important to add both since they reciprocate
StringBuilder rule = new StringBuilder();
rule.append("package de.orbitx.accumulatetesettest;\n");
rule.append("import java.util.Set;\n");
rule.append("import java.util.HashSet;\n");
rule.append("import org.drools.Foo;\n");
rule.append("import org.drools.Bar;\n");
rule.append("rule \"Sub optimal foo parallelism - this rule is causing NPE upon reverse\"\n");
rule.append("when\n");
rule.append("$foo : Foo($leftId : id, $leftBar : bar != null)\n");
rule.append("$fooSet : Set()\n");
rule.append("from accumulate ( Foo(id > $leftId, bar != null && != $leftBar, $bar : bar),\n");
rule.append("collectSet( $bar ) )\n");
rule.append("then\n");
rule.append("//System.out.println(\"ok\");\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
//adding test data
Bar[] barList = new Bar[3];
for (int i = 0; i < barList.length; i++) {
barList[i] = new Bar( String.valueOf( i ) );
}
org.drools.Foo[] fooList = new org.drools.Foo[4];
for (int i = 0; i < fooList.length; i++) {
fooList[i] = new org.drools.Foo( String.valueOf( i ), i == 3 ? barList[2] : barList[i] );
}
for (org.drools.Foo foo : fooList) {
ksession.insert(foo);
}
//the NPE is caused by exactly this sequence. of course there are more sequences but this
//appears to be the most short one
int[] magicFoos = new int[] { 3, 3, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 3, 3, 2, 2, 3, 1, 1 };
int[] magicBars = new int[] { 1, 2, 0, 1, 1, 0, 1, 2, 2, 1, 2, 0, 0, 2, 0, 2, 0, 0, 1 };
//upon final rule firing an NPE will be thrown in org.drools.rule.Accumulate
for (int i = 0; i < magicFoos.length; i++) {
org.drools.Foo tehFoo = fooList[magicFoos[i]];
org.drools.runtime.rule.FactHandle fooFactHandle = ksession.getFactHandle(tehFoo);
tehFoo.setBar(barList[magicBars[i]]);
ksession.update(fooFactHandle, tehFoo);
ksession.fireAllRules();
}
ksession.dispose();
}
@Test
public void testJBRULES3326() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Message(!!!false)\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Message("test"));
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
}
@Test
public void testDispose() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Message()\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Message("test"));
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
try {
// the following should raise an IllegalStateException as the session was already disposed
ksession.fireAllRules();
fail("An IllegallStateException should have been raised as the session was disposed before the method call.");
} catch (IllegalStateException ise ) {
// success
}
}
@Test
public void testInnerEnum() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Triangle( type == Triangle.Type.UNCLASSIFIED )\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Triangle());
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
}
@Test
public void testNestedAccessors2() throws Exception {
String rule = "package org.drools\n" +
"rule 'rule1'" +
" salience 10\n" +
"when\n" +
" Cheesery( typedCheeses[0].type == 'stilton' );\n" +
"then\n" +
"end\n" +
"rule 'rule2'\n" +
"when\n" +
" Cheesery( typedCheeses[0].price == 10 );\n" +
"then\n" +
"end";
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
Cheesery c1 = new Cheesery();
c1.addCheese( new Cheese("stilton", 20) );
Cheesery c2 = new Cheesery();
c2.addCheese( new Cheese("brie", 10) );
Cheesery c3 = new Cheesery();
c3.addCheese( new Cheese("muzzarella", 30) );
ksession.insert( c1 );
ksession.insert( c2 );
ksession.insert( c3 );
ksession.fireAllRules();
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> captor = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael, times(2) ).afterActivationFired( captor.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> values = captor.getAllValues();
assertThat( (Cheesery) values.get( 0 ).getActivation().getObjects().get( 0 ), is( c1 ) );
assertThat( (Cheesery) values.get( 1 ).getActivation().getObjects().get( 0 ), is( c2 ) );
ksession.dispose();
}
@Test
public void testMVELConstraintsWithFloatingPointNumbersInScientificNotation() {
String rule = "package test; \n" +
"dialect \"mvel\"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean \n" +
" field : double \n" +
"end \n" +
"\n" +
"rule \"Init\" \n" +
"when \n" +
"then \n" +
"\t insert( new Bean( 1.0E-2 ) ); \n" +
"end \n" +
"\n" +
"rule \"Check\" \n" +
"when \n" +
"\t Bean( field < 1.0E-1 ) \n" +
"then \n" +
"\t list.add( \"OK\" ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession kSession = kbase.newStatefulKnowledgeSession();
List<String> list = new ArrayList<String>();
kSession.setGlobal( "list", list );
kSession.fireAllRules();
assertEquals( 1 , list.size() );
}
public static class A {
private String field1;
private String field2;
public A(String field1,
String field2) {
this.field1 = field1;
this.field2 = field2;
}
public String getField1() {
return field1;
}
public void setField1( String field1 ) {
this.field1 = field1;
}
public String getField2() {
return field2;
}
public void setField2( String field2 ) {
this.field2 = field2;
}
public String toString() {
return "A) " + field1 + ":" + field2;
}
}
@Test
public void testMvelDoubleInvocation() throws Exception {
String rule = "package org.drools\n" +
"import org.drools.integrationtests.MiscTest.TestUtility;\n" +
"import org.drools.integrationtests.MiscTest.TestFact;\n" +
"rule \"First Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value1\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"First Rule Fires\");\n" +
"end\n" +
"\n" +
"rule \"Second Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value2\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"Second Rule Fires\");\n" +
"end\n" +
"\n" +
"rule \"Third Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value3\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"Third Rule Fires\");\n" +
"end ";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
TestFact fact = new TestFact();
fact.setS("asdf");
fact.setI(10);
ksession.insert(fact);
ksession.fireAllRules();
ksession.dispose();
}
public static class TestUtility {
public static Boolean utilMethod(String s1, String s2) {
Boolean result = null;
if (s1 != null) {
result = s1.equals(s2);
}
logger.info("in utilMethod >" + s1 + "< >" + s2 + "< returns " + result);
return result;
}
}
public static class TestFact {
private int i;
private String s;
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
}
public String getS() {
return s;
}
public void setS(String s) {
this.s = s;
}
}
@Test
public void testUnwantedCoersion() throws Exception {
String rule = "package org.drools\n" +
"import org.drools.integrationtests.MiscTest.InnerBean;\n" +
"import org.drools.integrationtests.MiscTest.OuterBean;\n" +
"rule \"Test.Code One\"\n" +
"when\n" +
" OuterBean($code : inner.code in (\"1.50\", \"2.50\"))\n" +
"then\n" +
" System.out.println(\"Code compared values: 1.50, 2.50 - actual code value: \" + $code);\n" +
"end\n" +
"rule \"Test.Code Two\"\n" +
"when\n" +
" OuterBean($code : inner.code in (\"1.5\", \"2.5\"))\n" +
"then\n" +
" System.out.println(\"Code compared values: 1.5, 2.5 - actual code value: \" + $code);\n" +
"end\n" +
"rule \"Big Test ID One\"\n" +
"when\n" +
" OuterBean($id : id in (\"3.5\", \"4.5\"))\n" +
"then\n" +
" System.out.println(\"ID compared values: 3.5, 4.5 - actual ID value: \" + $id);\n" +
"end\n" +
"rule \"Big Test ID Two\"\n" +
"when\n" +
" OuterBean($id : id in ( \"3.0\", \"4.0\"))\n" +
"then\n" +
" System.out.println(\"ID compared values: 3.0, 4.0 - actual ID value: \" + $id);\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
InnerBean innerTest = new InnerBean();
innerTest.setCode("1.500");
ksession.insert(innerTest);
OuterBean outerTest = new OuterBean();
outerTest.setId("3");
outerTest.setInner(innerTest);
ksession.insert(outerTest);
OuterBean outerTest2 = new OuterBean();
outerTest2.setId("3.0");
outerTest2.setInner(innerTest);
ksession.insert(outerTest2);
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
public static class InnerBean {
private String code;
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
}
public static class OuterBean {
private InnerBean inner;
private String id;
public InnerBean getInner() {
return inner;
}
public void setInner(InnerBean inner) {
this.inner = inner;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
@Test
public void testShiftOperator() throws Exception {
String rule = "dialect \"mvel\"\n" +
"rule kickOff\n" +
"when\n" +
"then\n" +
" insert( Integer.valueOf( 1 ) );\n" +
" insert( Long.valueOf( 1 ) );\n" +
" insert( Integer.valueOf( 65552 ) ); // 0x10010\n" +
" insert( Long.valueOf( 65552 ) );\n" +
" insert( Integer.valueOf( 65568 ) ); // 0x10020\n" +
" insert( Long.valueOf( 65568 ) );\n" +
" insert( Integer.valueOf( 65536 ) ); // 0x10000\n" +
" insert( Long.valueOf( 65536L ) );\n" +
" insert( Long.valueOf( 4294967296L ) ); // 0x100000000L\n" +
"end\n" +
"rule test1\n" +
" salience -1\n" +
"when\n" +
" $a: Integer( $one: intValue == 1 )\n" +
" $b: Integer( $shift: intValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test1 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test2\n" +
" salience -2\n" +
"when\n" +
" $a: Integer( $one: intValue == 1 )\n" +
" $b: Long ( $shift: longValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test2 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test3\n" +
" salience -3\n" +
"when\n" +
" $a: Long ( $one: longValue == 1 )\n" +
" $b: Long ( $shift: longValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test3 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test4\n" +
" salience -4\n" +
"when\n" +
" $a: Long ( $one: longValue == 1 )\n" +
" $b: Integer( $shift: intValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test4 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
int rules = ksession.fireAllRules();
assertEquals(13, rules);
}
@Test
public void testRecursiveDeclaration() throws Exception {
String rule = "package org.drools\n" +
"declare Node\n" +
" value: String\n" +
" parent: Node\n" +
"end\n" +
"rule R1 when\n" +
" $parent: Node( value == \"parent\" )\n" +
" $child: Node( $value : value, parent == $parent )\n" +
"then\n" +
" System.out.println( $value );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType nodeType = kbase.getFactType( "org.drools", "Node" );
Object parent = nodeType.newInstance();
nodeType.set( parent, "value", "parent" );
ksession.insert( parent );
Object child = nodeType.newInstance();
nodeType.set( child, "value", "child" );
nodeType.set( child, "parent", parent );
ksession.insert( child );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testCircularDeclaration() throws Exception {
String rule = "package org.drools.test\n" +
"declare FactA\n" +
" fieldB: FactB\n" +
"end\n" +
"declare FactB\n" +
" fieldA: FactA\n" +
"end\n" +
"rule R1 when\n" +
" $fieldA : FactA( $fieldB : fieldB )\n" +
" FactB( this == $fieldB, fieldA == $fieldA )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType aType = kbase.getFactType( "org.drools.test", "FactA" );
Object a = aType.newInstance();
FactType bType = kbase.getFactType( "org.drools.test", "FactB" );
Object b = bType.newInstance();
aType.set( a, "fieldB", b );
bType.set( b, "fieldA", a );
ksession.insert( a );
ksession.insert( b );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testPatternMatchingOnThis() throws Exception {
String rule = "package org.drools\n" +
"rule R1 when\n" +
" $i1: Integer()\n" +
" $i2: Integer( this > $i1 )\n" +
"then\n" +
" System.out.println( $i2 + \" > \" + $i1 );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Integer(1) );
ksession.insert( new Integer(2) );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testArrayUsage() {
String str = "import org.drools.base.DroolsQuery;\n" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"query extract( String s )\n" +
" Object() \n" +
"end\n" +
"\n" +
"rule \"Intercept\"\n" +
"when\n" +
" DroolsQuery( name == \"extract\", $args : elements )\n" +
" $s : String( this == $args[$s.length() - $s.length()] )\n" +
" $s1 : String( this == $args[0] )\n" +
" $s2 : String( this == $args[$args.length - $args.length] )\n" +
"then\n" +
" retract( $s ); \n" +
" list.add( $s );\n" +
"end\n" +
"\n" +
"rule \"Exec\"\n" +
"when\n" +
" $s : String()\n" +
" ?extract( $s ; )\n" +
"then\n" +
" \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
int N = 2;
for ( int j = 0; j < N; j++ ) {
ksession.insert( "x" + j );
ksession.fireAllRules();
}
assertEquals( N, list.size() );
ksession.dispose();
}
@Test(timeout = 5000)
public void testEfficientBetaNodeNetworkUpdate() {
// [JBRULES-3372]
String str =
"declare SimpleMembership\n" +
" listId : String\n" +
" patientId : String\n" +
"end\n" +
"declare SimplePatientFact\n" +
" value : int\n" +
" patientId : String\n" +
"end\n" +
"rule \"A\"\n" +
"when\n" +
"$slm : SimpleMembership($pid : patientId, listId == \"5072\" )\n" +
"and not (\n" +
" (\n" +
" (\n" +
" SimplePatientFact(value == 1, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 2, patientId == $pid)\n" +
" )\n" +
" ) and (\n" +
" (\n" +
" SimplePatientFact(value == 6, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 7, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 8, patientId == $pid)\n" +
" )\n" +
" ) and (\n" +
" (\n" +
" SimplePatientFact(value == 9, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 10, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 11, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 12, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 13, patientId == $pid)\n" +
" )\n" +
" )\n" +
")\n" +
"then\n" +
" System.out.println(\"activated\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
}
@Test
public void testModifyCommand() {
String str =
"rule \"sample rule\"\n" +
" when\n" +
" then\n" +
" System.out.println(\"\\\"Hello world!\\\"\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p1 = new Person("John", "nobody", 25);
ksession.execute(CommandFactory.newInsert(p1));
org.drools.runtime.rule.FactHandle fh = ksession.getFactHandle(p1);
Person p = new Person("Frank", "nobody", 30);
List<Setter> setterList = new ArrayList<Setter>();
setterList.add(CommandFactory.newSetter("age", String.valueOf(p.getAge())));
setterList.add(CommandFactory.newSetter("name", p.getName()));
setterList.add(CommandFactory.newSetter("likes", p.getLikes()));
ksession.execute(CommandFactory.newModify(fh, setterList));
}
@Test
public void testMVELTypeCoercion() {
String str = "package org.drools.test; \n" +
"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean\n" +
// NOTICE: THIS WORKS WHEN THE FIELD IS "LIST", BUT USED TO WORK WITH ARRAYLIST TOO
" field : java.util.ArrayList\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( new java.util.ArrayList( java.util.Arrays.asList( \"x\" ) ) ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( $fld : field == [\"x\"] )\n" +
"then\n" +
" System.out.println( $fld );\n" +
" list.add( \"OK\" ); \n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kbConf.setOption(AssertBehaviorOption.EQUALITY);
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase( kbConf );
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "OK" ) );
ksession.dispose();
}
@Test
public void testPatternOnClass() throws Exception {
String rule = "import org.drools.reteoo.InitialFactImpl\n" +
"import org.drools.FactB\n" +
"rule \"Clear\" when\n" +
" $f: Object(class != FactB.class)\n" +
"then\n" +
" if( ! ($f instanceof InitialFactImpl) ){\n" +
" retract( $f );\n" +
" }\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new FactA());
ksession.insert(new FactA());
ksession.insert(new FactB());
ksession.insert(new FactB());
ksession.insert(new FactC());
ksession.insert(new FactC());
ksession.fireAllRules();
for (org.drools.runtime.rule.FactHandle fact : ksession.getFactHandles()) {
InternalFactHandle internalFact = (InternalFactHandle)fact;
assertTrue(internalFact.getObject() instanceof FactB);
}
}
@Test
public void testPatternOffset() throws Exception {
// JBRULES-3427
String str = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) or ( A( ) and B( ) ) " +
") and (\n" +
" A( ) or ( B( $bField : field ) and C( field != $bField ) )\n" +
")\n" +
"then\n" +
" System.out.println(\"rule fired\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType typeA = kbase.getFactType( "org.drools.test", "A" );
FactType typeB = kbase.getFactType( "org.drools.test", "B" );
FactType typeC = kbase.getFactType( "org.drools.test", "C" );
Object a = typeA.newInstance();
ksession.insert( a );
Object b = typeB.newInstance();
typeB.set( b, "field", 1 );
ksession.insert( b );
Object c = typeC.newInstance();
typeC.set( c, "field", 1 );
ksession.insert( c );
ksession.fireAllRules();
}
@Test
public void testCommentDelimiterInString() throws Exception {
// JBRULES-3401
String str = "rule x\n" +
"dialect \"mvel\"\n" +
"when\n" +
"then\n" +
"System.out.println( \"/*\" );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
}
public interface InterfaceA {
InterfaceB getB();
}
public interface InterfaceB { }
public static class ClassA implements InterfaceA {
private ClassB b = null;
public ClassB getB() {
return b;
}
public void setB(InterfaceB b) {
this.b = (ClassB)b;
}
}
public static class ClassB implements InterfaceB {
private String id = "123";
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ClassB classB = (ClassB) o;
if (id != null ? !id.equals(classB.id) : classB.id != null) return false;
return true;
}
@Override
public int hashCode() {
return Integer.valueOf( id );
}
}
@Test
public void testCovariance() throws Exception {
// JBRULES-3392
String str =
"import org.drools.integrationtests.MiscTest.*\n" +
"rule x\n" +
"when\n" +
" $b : ClassB( )\n" +
" $a : ClassA( b.id == $b.id )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ClassA a = new ClassA();
ClassB b = new ClassB();
a.setB(b);
ksession.insert(a);
ksession.insert(b);
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testRetractLeftTuple() throws Exception {
// JBRULES-3420
String str = "import org.drools.integrationtests.MiscTest.*\n" +
"rule R1 salience 3\n" +
"when\n" +
" $b : InterfaceB( )\n" +
" $a : ClassA( b == null )\n" +
"then\n" +
" $a.setB( $b );\n" +
" update( $a );\n" +
"end\n" +
"rule R2 salience 2\n" +
"when\n" +
" $b : ClassB( id == \"123\" )\n" +
" $a : ClassA( b != null && b.id == $b.id )\n" +
"then\n" +
" $b.setId( \"456\" );\n" +
" update( $b );\n" +
"end\n" +
"rule R3 salience 1\n" +
"when\n" +
" InterfaceA( $b : b )\n" +
"then\n" +
" retract( $b );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new ClassA());
ksession.insert(new ClassB());
assertEquals(3, ksession.fireAllRules());
}
@Test
public void testVariableBindingWithOR() throws Exception {
// JBRULES-3390
String str1 = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) and ( B( $bField : field ) or C( $cField : field ) ) " +
")\n" +
"then\n" +
" System.out.println($bField);\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str1.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
String str2 = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) and ( B( $field : field ) or C( $field : field ) ) " +
")\n" +
"then\n" +
" System.out.println($field);\n" +
"end\n";
KnowledgeBuilder kbuilder2 = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder2.add( ResourceFactory.newByteArrayResource(str2.getBytes()), ResourceType.DRL );
assertFalse(kbuilder2.hasErrors());
}
@Test
public void testModifySimple() {
String str ="package org.drools;\n" +
"\n" +
"rule \"test modify block\"\n" +
"when\n" +
" $p: Person( name == \"hungry\" )\n" +
"then\n" +
" modify( $p ) { setName(\"fine\") }\n" +
"end\n" +
"\n" +
"rule \"Log\"\n" +
"when\n" +
" $o: Object()\n" +
"then\n" +
" System.out.println( $o );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person();
p.setName( "hungry" );
ksession.insert( p );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testDeclaresWithArrayFields() throws Exception {
String rule = "package org.drools.test; \n" +
"import org.drools.test.Person;" +
"\n" +
" global java.util.List list;" +
"\n" +
"declare Cheese\n" +
" name : String = \"ched\" \n" +
"end \n" +
"" +
"declare X\n" +
" fld \t: String = \"xx\" @key \n" +
" achz\t: Cheese[] \n" +
" astr\t: String[] \n" + "\t= new String[] {\"x\", \"y11\" } \n" +
" aint\t: int[] \n" +
" sint\t: short[] \n" +
" bint\t: byte[] \n" +
" lint\t: long[] \n" +
" dint\t: double[] \n" +
" fint\t: float[] \n" +
" zint\t: Integer[] \n" + "\t= new Integer[] {2,3} @key \n" +
" aaaa\t: String[][] \n" +
" bbbb\t: int[][] \n" +
" aprs\t: Person[] \n" + "\t= new org.drools.test.Person[] { new org.drools.test.Man() }" +
"end\n" +
"\n" +
"rule \"Init\"\n" +
"when\n" +
"\n" +
"then\n" +
" X x = new X( \"xx\", " +
" new Cheese[0], " +
" new String[] { \"x\", \"y22\" }, " +
" new int[] { 7, 9 }, " +
" new short[] { 3, 4 }, " +
" new byte[] { 1, 2 }, " +
" new long[] { 100L, 200L }, " +
" new double[] { 3.2, 4.4 }, " +
" new float[] { 3.2f, 4.4f }, " +
" new Integer[] { 2, 3 }," +
" new String[2][3]," +
" new int[5][3]," +
" null " +
" ); \n" +
" insert( x );\n" +
" " +
" X x2 = new X(); \n" +
" x2.setAint( new int[2] ); \n " +
" x2.getAint()[0] = 7; \n" +
" insert( x2 );\n" +
" " +
" if ( x.hashCode() == x2.hashCode() ) list.add( \"hash\" ); \n" +
" " +
" if( x.equals( x2 ) ) list.add( \"equals\" ); \n" +
" " +
" list.add( x.getAint( )[0] ); \n" +
"end \n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" X( astr.length > 0, \n" +
" astr[0] == \"x\", \n" +
" $x : astr[1], \n" +
" aint[0] == 7 ) \n" +
"then\n" +
" list.add( $x );\n" +
"end \n" +
"";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "hash" ) );
assertTrue( list.contains( "equals" ) );
assertTrue( list.contains( 7 ) );
assertTrue( list.contains( "y11" ) );
assertTrue( list.contains( "y22" ) );
}
public static class Parent { }
public static class ChildA extends Parent {
private final int x;
public ChildA(int x) {
this.x = x;
}
public int getX() {
return x;
}
}
public static class ChildB extends Parent {
private final int x;
public ChildB(int x) {
this.x = x;
}
public int getX() {
return x;
}
}
@Test
public void testTypeUnsafe() throws Exception {
String str = "import org.drools.integrationtests.MiscTest.*\n" +
"declare\n" +
" Parent @typesafe(false)\n" +
"end\n" +
"rule R1\n" +
"when\n" +
" $a : Parent( x == 1 )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
for (int i = 0; i < 20; i++) {
ksession.insert(new ChildA(i % 10));
ksession.insert(new ChildB(i % 10));
}
assertEquals(4, ksession.fireAllRules());
// give time to async jitting to complete
Thread.sleep(100);
ksession.insert(new ChildA(1));
ksession.insert(new ChildB(1));
assertEquals(2, ksession.fireAllRules());
}
@Test
public void testConstructorWithOtherDefaults() {
String str = "" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"declare Bean\n" +
" kField : String @key\n" +
" sField : String = \"a\"\n" +
" iField : int = 10\n" +
" dField : double = 4.32\n" +
" aField : Long[] = new Long[] { 100L, 1000L }\n" +
"end" +
"\n" +
"rule \"Trig\"\n" +
"when\n" +
" Bean( kField == \"key\", sField == \"a\", iField == 10, dField == 4.32, aField[1] == 1000L ) \n" +
"then\n" +
" list.add( \"OK\" );\n" +
"end\n" +
"\n" +
"rule \"Exec\"\n" +
"when\n" +
"then\n" +
" insert( new Bean( \"key\") ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "OK" ) );
ksession.dispose();
}
@Test
public void testBindingToNullFieldWithEquality() {
// JBRULES-3396
String str = "package org.drools.test; \n" +
"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean\n" +
" id : String @key\n" +
" field : String\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( \"x\" ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( $fld : field )\n" +
"then\n" +
" System.out.println( $fld );\n" +
" list.add( \"OK\" ); \n" +
"end";
KnowledgeBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kbConf.setOption(AssertBehaviorOption.EQUALITY);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kbConf, str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue(list.contains("OK"));
ksession.dispose();
}
@Test
public void testCoercionOfStringValueWithoutQuotes() throws Exception {
// JBRULES-3080
String str = "package org.drools.test; \n" +
"declare A\n" +
" field : String\n" +
"end\n" +
"rule R when\n" +
" A( field == 12 )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType typeA = kbase.getFactType( "org.drools.test", "A" );
Object a = typeA.newInstance();
typeA.set( a, "field", "12" );
ksession.insert( a );
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testVarargConstraint() throws Exception {
// JBRULES-3268
String str = "package org.drools.test;\n" +
"import org.drools.integrationtests.MiscTest.VarargBean;\n" +
" global java.util.List list;\n" +
"\n" +
"rule R1 when\n" +
" VarargBean( isOddArgsNr(1, 2, 3) )\n" +
"then\n" +
" list.add(\"odd\");\n" +
"end\n" +
"rule R2 when\n" +
" VarargBean( isOddArgsNr(1, 2, 3, 4) )\n" +
"then\n" +
" list.add(\"even\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.insert(new VarargBean());
ksession.fireAllRules();
assertEquals(1, list.size());
assertTrue(list.contains("odd"));
}
public static class VarargBean {
public boolean isOddArgsNr(int... args) {
return args.length % 2 == 1;
}
}
@Test
public void testPackageImportWithMvelDialect() throws Exception {
// JBRULES-2244
String str = "package org.drools.test;\n" +
"import org.drools.*\n" +
"dialect \"mvel\"\n" +
"rule R1 no-loop when\n" +
" $p : Person( )" +
" $c : Cheese( )" +
"then\n" +
" modify($p) { setCheese($c) };\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("Mario", 38);
ksession.insert(p);
Cheese c = new Cheese("Gorgonzola");
ksession.insert(c);
assertEquals(1, ksession.fireAllRules());
assertSame(c, p.getCheese());
}
@Test
public void testNoMvelSyntaxInFunctions() throws Exception {
// JBRULES-3433
String str = "import java.util.*;\n" +
"dialect \"mvel\"\n" +
"function Integer englishToInt(String englishNumber) { \n" +
" Map m = [\"one\":1, \"two\":2, \"three\":3, \"four\":4, \"five\":5]; \n" +
" Object obj = m.get(englishNumber.toLowerCase()); \n" +
" return Integer.parseInt(obj.toString()); \n" +
"}\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testMissingClosingBraceOnModify() throws Exception {
// JBRULES-3436
String str = "package org.drools.test;\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" $p : Person( )" +
" $c : Cheese( )" +
"then\n" +
" modify($p) { setCheese($c) ;\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testPrimitiveToBoxedCoercionInMethodArgument() throws Exception {
String str = "package org.drools.test;\n" +
"import org.drools.integrationtests.MiscTest\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" Person( $ag1 : age )" +
" $p2 : Person( name == MiscTest.integer2String($ag1) )" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("42", 42);
ksession.insert(p);
assertEquals(1, ksession.fireAllRules());
}
public static String integer2String(Integer value) {
return "" + value;
}
@Test
public void testKeyedInterfaceField() {
//JBRULES-3441
String str = "package org.drools.integrationtest; \n" +
"\n" +
"import org.drools.integrationtests.MiscTest.*; \n" +
"" +
"global java.util.List list;" +
"" +
"declare Bean\n" +
" id : InterfaceB @key\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( new ClassB() ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( )\n" +
"then\n" +
" list.add( $b.hashCode() ); \n" +
" list.add( $b.equals( new Bean( new ClassB() ) ) ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( 31 + 123 ) );
assertTrue( list.contains( true ) );
ksession.dispose();
}
@Test
public void testDeclaredTypeAsFieldForAnotherDeclaredType() {
// JBRULES-3468
String str = "package com.sample\n" +
"\n" +
"import com.sample.*;\n" +
"\n" +
"declare Item\n" +
" id : int;\n" +
"end\n" +
"\n" +
"declare Priority\n" +
" name : String;\n" +
" priority : int;\n" +
"end\n" +
"\n" +
"declare Cap\n" +
" item : Item;\n" +
" name : String\n" +
"end\n" +
"\n" +
"rule \"split cart into items\"\n" +
"when\n" +
"then\n" +
" insert(new Item(1));\n" +
" insert(new Item(2));\n" +
" insert(new Item(3));\n" +
"end\n" +
"\n" +
"rule \"Priorities\"\n" +
"when\n" +
"then\n" +
" insert(new Priority(\"A\", 3));\n" +
" insert(new Priority(\"B\", 2));\n" +
" insert(new Priority(\"C\", 5));\n" +
"end\n" +
"\n" +
"rule \"Caps\"\n" +
"when\n" +
" $i : Item()\n" +
" $p : Priority($name : name)\n" +
"then\n" +
" insert(new Cap($i, $name));\n" +
"end\n" +
"\n" +
"rule \"test\"\n" +
"when\n" +
" $i : Item()\n" +
" Cap(item.id == $i.id)\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
assertEquals(20, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testCheckDuplicateVariables() throws Exception {
// JBRULES-3035
String str = "package com.sample\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" Person( $a: age, $a: name ) // this should cause a compile-time error\n" +
"then\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
str = "package com.sample\n" +
"rule R1 when\n" +
" accumulate( Object(), $c: count(1), $c: max(1) ) // this should cause a compile-time error\n" +
"then\n" +
"end";
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
str = "package com.sample\n" +
"rule R1 when\n" +
" Number($i: intValue) from accumulate( Object(), $i: count(1) ) // this should cause a compile-time error\n" +
"then\n" +
"end";
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testDeclaredTypesDefaultHashCode() {
// JBRULES-3481
String str = "package com.sample\n" +
"\n" +
"global java.util.List list; \n" +
"" +
"declare Bean\n" +
" id : int \n" +
"end\n" +
"\n" +
"declare KeyedBean\n" +
" id : int @key \n" +
"end\n" +
"\n" +
"\n" +
"rule Create\n" +
"when\n" +
"then\n" +
" list.add( new Bean(1) ); \n" +
" list.add( new Bean(2) ); \n" +
" list.add( new KeyedBean(1) ); \n" +
" list.add( new KeyedBean(1) ); \n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
ksession.dispose();
assertFalse( list.get( 0 ).hashCode() == 34 );
assertFalse( list.get( 1 ).hashCode() == 34 );
assertFalse( list.get( 0 ).hashCode() == list.get( 1 ).hashCode() );
assertNotSame( list.get( 0 ), list.get( 1 ) );
assertFalse( list.get( 0 ).equals( list.get( 1 ) ) );
assertTrue( list.get( 2 ).hashCode() == 32 );
assertTrue( list.get( 3 ).hashCode() == 32 );
assertNotSame( list.get( 2 ), list.get( 3 ) );
assertTrue( list.get( 2 ).equals( list.get( 3 ) ) );
}
@Test
public void testJittingConstraintWithInvocationOnLiteral() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"rule XXX when\n" +
" Person( name.toString().toLowerCase().contains( \"mark\".toString().toLowerCase() ) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testJittingMethodWithCharSequenceArg() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"rule XXX when\n" +
" Person( $n : name, $n.contains( \"mark\" ) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testMapAccessorWithPrimitiveKey() {
String str = "package com.sample\n" +
"import org.drools.integrationtests.MiscTest.MapContainerBean\n" +
"rule R1 when\n" +
" MapContainerBean( map[1] == \"one\" )\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" MapContainerBean( map[1+1] == \"two\" )\n" +
"then\n" +
"end\n" +
"rule R3 when\n" +
" MapContainerBean( map[this.get3()] == \"three\" )\n" +
"then\n" +
"end\n" +
"rule R4 when\n" +
" MapContainerBean( map[4] == null )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new MapContainerBean());
assertEquals(4, ksession.fireAllRules());
ksession.dispose();
}
public static class MapContainerBean {
private final Map<Integer, String> map = new HashMap<Integer, String>();
MapContainerBean() {
map.put(1, "one");
map.put(2, "two");
map.put(3, "three");
}
public Map<Integer, String> getMap() {
return map;
}
public int get3() {
return 3;
}
}
@Test
public void testFromWithStrictModeOff() {
// JBRULES-3533
String str =
"import java.util.Map;\n" +
"dialect \"mvel\"\n" +
"rule \"LowerCaseFrom\"\n" +
"when\n"+
" Map($valOne : this['keyOne'] !=null)\n" +
" $lowerValue : String() from $valOne.toLowerCase()\n" +
"then\n" +
" System.out.println( $valOne.toLowerCase() );\n" +
"end\n";
PackageBuilderConfiguration pkgBuilderCfg = new PackageBuilderConfiguration();
MVELDialectConfiguration mvelConf = (MVELDialectConfiguration) pkgBuilderCfg.getDialectConfiguration( "mvel" );
mvelConf.setStrict(false);
mvelConf.setLangLevel(5);
KnowledgeBase kbase = loadKnowledgeBaseFromString(pkgBuilderCfg, str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Map<String,String> testMap = new HashMap<String,String>();
testMap.put("keyOne", "valone");
testMap.put("valTwo", "valTwo");
ksession.insert(testMap);
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testFromWithStrictModeOn() {
// JBRULES-3533
String str =
"import java.util.Map;\n" +
"dialect \"mvel\"\n" +
"rule \"LowerCaseFrom\"\n" +
"when\n"+
" Map($valOne : this['keyOne'] !=null)\n" +
" $lowerValue : String() from $valOne.toLowerCase()\n" +
"then\n" +
" System.out.println( $valOne.toLowerCase() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testEntryPointWithVarIN() {
String str = "package org.drools.test;\n" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"rule \"In\"\n" +
"when\n" +
" $x : Integer()\n " +
"then\n" +
" drools.getEntryPoint(\"inX\").insert( $x );\n" +
"end\n" +
"\n" +
"rule \"Out\"\n" +
"when\n" +
" $i : Integer() from entry-point \"inX\"\n" +
"then\n" +
" list.add( $i );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( 10 );
List res = new ArrayList();
ksession.setGlobal( "list", res );
ksession.fireAllRules();
ksession.dispose();
assertTrue( res.contains( 10 ) );
}
@Test
public void testArithmeticExpressionWithNull() {
// JBRULES-3568
String str = "import org.drools.integrationtests.MiscTest.PrimitiveBean;\n" +
"rule R when\n" +
" PrimitiveBean(primitive/typed > 0.7)\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new PrimitiveBean(0.9, 1.1) );
ksession.insert( new PrimitiveBean(0.9, null) );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
public static class PrimitiveBean {
public final double primitive;
public final Double typed;
public PrimitiveBean(double primitive, Double typed) {
this.primitive = primitive;
this.typed = typed;
}
public double getPrimitive() {
return primitive;
}
public Double getTyped() {
return typed;
}
}
public void testMvelMatches() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"global java.util.List results;" +
"rule XXX when\n" +
" Person( $n : name ~= \"\\\\D.*\" )\n" +
"then\n" +
" results.add( $n ); \n " +
"end \n" +
"rule YY when\n" +
" Person( $a : age, $n : name ~= \"\\\\d\\\\D.*\" )\n" +
"then\n" +
" results.add( $a ); \n " +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List res = new ArrayList();
ksession.setGlobal( "results", res );
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.insert(new Person("1mike", 44));
ksession.insert(new Person("52matt", 44));
ksession.fireAllRules();
ksession.dispose();
assertEquals( 3, res.size() );
assertTrue( res.contains( "mark" ) );
assertTrue( res.contains( "mario" ) );
assertTrue( res.contains( 44 ) );
}
@Test
public void testRuleFlowGroupWithLockOnActivate() {
// JBRULES-3590
String str = "import org.drools.Person;\n" +
"import org.drools.Cheese;\n" +
"rule R1\n" +
"ruleflow-group \"group1\"\n" +
"lock-on-active true\n" +
"when\n" +
" $p : Person()\n" +
"then\n" +
" $p.setName(\"John\");\n" +
" update ($p);\n" +
"end\n" +
"rule R2\n" +
"ruleflow-group \"group1\"\n" +
"lock-on-active true\n" +
"when\n" +
" $p : Person( name == null )\n" +
" forall ( Cheese ( type == \"cheddar\" ))\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Person() );
ksession.insert( new Cheese("gorgonzola") );
((AgendaImpl)ksession.getAgenda()).activateRuleFlowGroup( "group1" );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testInstanceof() throws Exception {
// JBRULES-3591
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" Person( address instanceof LongAddress )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person mark = new Person("mark");
mark.setAddress(new LongAddress("uk"));
ksession.insert(mark);
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testFromNodeWithMultipleBetas() throws Exception {
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $p : Person( $name : name, $addresses : addresses )\n" +
" $c : Cheese( $type: type == $name )\n" +
" $a : Address( street == $type, suburb == $name ) from $addresses\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.addAddress(new Address("x", "x", "x"));
p.addAddress(new Address("y", "y", "y"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testMvelFunctionWithDeclaredTypeArg() {
// JBRULES-3562
String rule = "package test; \n" +
"dialect \"mvel\"\n" +
"global java.lang.StringBuilder value;\n" +
"function String getFieldValue(Bean bean) {" +
" return bean.getField();" +
"}" +
"declare Bean \n" +
" field : String \n" +
"end \n" +
"\n" +
"rule R1 \n" +
"when \n" +
"then \n" +
" insert( new Bean( \"mario\" ) ); \n" +
"end \n" +
"\n" +
"rule R2 \n" +
"when \n" +
" $bean : Bean( ) \n" +
"then \n" +
" value.append( getFieldValue($bean) ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
StringBuilder sb = new StringBuilder();
ksession.setGlobal("value", sb);
ksession.fireAllRules();
assertEquals("mario", sb.toString());
ksession.dispose();
}
public void testGenericsList() throws Exception {
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $c : Cheese( $type: type )\n" +
" $p : Person( $name : name, addresses.get(0).street == $type )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.addAddress(new Address("x", "x", "x"));
p.addAddress(new Address("y", "y", "y"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testGenericsOption() throws Exception {
// JBRULES-3579
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $c : Cheese( $type: type )\n" +
" $p : Person( $name : name, addressOption.get.street == $type )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.setAddress(new Address("x", "x", "x"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testRHSClone() {
// JBRULES-3539
String str = "import java.util.Map;\n"+
"dialect \"mvel\"\n"+
"rule \"RHSClone\"\n"+
"when\n"+
" Map($valOne : this['keyOne'] !=null)\n"+
"then\n"+
" System.out.println( $valOne.clone() );\n"+
"end\n";
PackageBuilderConfiguration pkgBuilderCfg = new PackageBuilderConfiguration();
MVELDialectConfiguration mvelConf = (MVELDialectConfiguration) pkgBuilderCfg.getDialectConfiguration( "mvel" );
mvelConf.setStrict(false);
mvelConf.setLangLevel(5);
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(pkgBuilderCfg);
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if (errors.size() > 0) {
for (KnowledgeBuilderError error: errors) {
System.err.println(error);
}
fail("Could not parse knowledge");
}
}
}
|
drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
|
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.integrationtests;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringReader;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import org.acme.insurance.Driver;
import org.acme.insurance.Policy;
import org.drools.ActivationListenerFactory;
import org.drools.Address;
import org.drools.Attribute;
import org.drools.Bar;
import org.drools.Cat;
import org.drools.Cell;
import org.drools.Cheese;
import org.drools.CheeseEqual;
import org.drools.Cheesery;
import org.drools.Cheesery.Maturity;
import org.drools.Child;
import org.drools.ClassObjectFilter;
import org.drools.CommonTestMethodBase;
import org.drools.DomainObjectHolder;
import org.drools.FactA;
import org.drools.FactB;
import org.drools.FactC;
import org.drools.FactHandle;
import org.drools.FirstClass;
import org.drools.FromTestClass;
import org.drools.Guess;
import org.drools.IndexedNumber;
import org.drools.KnowledgeBase;
import org.drools.KnowledgeBaseConfiguration;
import org.drools.KnowledgeBaseFactory;
import org.drools.LongAddress;
import org.drools.Message;
import org.drools.MockPersistentSet;
import org.drools.Move;
import org.drools.ObjectWithSet;
import org.drools.Order;
import org.drools.OrderItem;
import org.drools.OuterClass;
import org.drools.Person;
import org.drools.PersonFinal;
import org.drools.PersonInterface;
import org.drools.PersonWithEquals;
import org.drools.Pet;
import org.drools.PolymorphicFact;
import org.drools.Primitives;
import org.drools.RandomNumber;
import org.drools.RuleBase;
import org.drools.RuleBaseConfiguration;
import org.drools.RuleBaseFactory;
import org.drools.SecondClass;
import org.drools.Sensor;
import org.drools.SpecialString;
import org.drools.State;
import org.drools.StatefulSession;
import org.drools.StatelessSession;
import org.drools.StockTick;
import org.drools.TestParam;
import org.drools.Triangle;
import org.drools.Win;
import org.drools.WorkingMemory;
import org.drools.audit.WorkingMemoryConsoleLogger;
import org.drools.base.RuleNameEndsWithAgendaFilter;
import org.drools.base.RuleNameEqualsAgendaFilter;
import org.drools.base.RuleNameMatchesAgendaFilter;
import org.drools.base.RuleNameStartsWithAgendaFilter;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderConfiguration;
import org.drools.builder.KnowledgeBuilderError;
import org.drools.builder.KnowledgeBuilderErrors;
import org.drools.builder.KnowledgeBuilderFactory;
import org.drools.builder.ResourceType;
import org.drools.builder.conf.DefaultPackageNameOption;
import org.drools.command.CommandFactory;
import org.drools.command.Setter;
import org.drools.common.AbstractWorkingMemory;
import org.drools.common.DefaultAgenda;
import org.drools.common.DefaultFactHandle;
import org.drools.common.InternalFactHandle;
import org.drools.common.InternalWorkingMemory;
import org.drools.compiler.DescrBuildError;
import org.drools.compiler.DrlParser;
import org.drools.compiler.DroolsError;
import org.drools.compiler.PackageBuilder;
import org.drools.compiler.PackageBuilder.PackageMergeException;
import org.drools.compiler.PackageBuilderConfiguration;
import org.drools.compiler.ParserError;
import org.drools.compiler.xml.XmlDumper;
import org.drools.conf.AssertBehaviorOption;
import org.drools.definition.KnowledgePackage;
import org.drools.definition.rule.Rule;
import org.drools.definition.type.FactType;
import org.drools.event.ActivationCancelledEvent;
import org.drools.event.ActivationCreatedEvent;
import org.drools.event.AfterActivationFiredEvent;
import org.drools.event.AgendaEventListener;
import org.drools.event.AgendaGroupPoppedEvent;
import org.drools.event.AgendaGroupPushedEvent;
import org.drools.event.BeforeActivationFiredEvent;
import org.drools.event.DefaultWorkingMemoryEventListener;
import org.drools.event.ObjectInsertedEvent;
import org.drools.event.ObjectRetractedEvent;
import org.drools.event.ObjectUpdatedEvent;
import org.drools.event.RuleFlowGroupActivatedEvent;
import org.drools.event.RuleFlowGroupDeactivatedEvent;
import org.drools.event.WorkingMemoryEventListener;
import org.drools.impl.EnvironmentFactory;
import org.drools.impl.StatefulKnowledgeSessionImpl;
import org.drools.io.ResourceFactory;
import org.drools.lang.DrlDumper;
import org.drools.lang.descr.AttributeDescr;
import org.drools.lang.descr.PackageDescr;
import org.drools.lang.descr.RuleDescr;
import org.drools.marshalling.ObjectMarshallingStrategy;
import org.drools.marshalling.impl.ClassObjectMarshallingStrategyAcceptor;
import org.drools.marshalling.impl.IdentityPlaceholderResolverStrategy;
import org.drools.reteoo.LeftTuple;
import org.drools.reteoo.LeftTupleSource;
import org.drools.reteoo.ReteooWorkingMemory;
import org.drools.reteoo.RuleTerminalNode;
import org.drools.reteoo.TerminalNode;
import org.drools.reteoo.builder.BuildContext;
import org.drools.rule.GroupElement;
import org.drools.rule.InvalidRulePackage;
import org.drools.rule.MapBackedClassLoader;
import org.drools.rule.Package;
import org.drools.rule.builder.dialect.java.JavaDialectConfiguration;
import org.drools.rule.builder.dialect.mvel.MVELDialectConfiguration;
import org.drools.runtime.Environment;
import org.drools.runtime.EnvironmentName;
import org.drools.runtime.Globals;
import org.drools.runtime.StatefulKnowledgeSession;
import org.drools.runtime.conf.ClockTypeOption;
import org.drools.runtime.rule.WorkingMemoryEntryPoint;
import org.drools.runtime.rule.impl.AgendaImpl;
import org.drools.spi.ConsequenceExceptionHandler;
import org.drools.spi.GlobalResolver;
import org.drools.spi.PropagationContext;
import org.drools.time.SessionPseudoClock;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.mvel2.MVEL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Run all the tests with the ReteOO engine implementation
*/
public class MiscTest extends CommonTestMethodBase {
private static Logger logger = LoggerFactory.getLogger(MiscTest.class);
@Test
public void testImportFunctions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ImportFunctions.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final Cheese cheese = new Cheese( "stilton",
15 );
session.insert( cheese );
List list = new ArrayList();
session.setGlobal( "list",
list );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
int fired = session.fireAllRules();
list = (List) session.getGlobal( "list" );
assertEquals( 4,
fired );
assertEquals( 4,
list.size() );
assertEquals( "rule1",
list.get( 0 ) );
assertEquals( "rule2",
list.get( 1 ) );
assertEquals( "rule3",
list.get( 2 ) );
assertEquals( "rule4",
list.get( 3 ) );
}
@Test
public void testStaticFieldReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_StaticField.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
// will test serialisation of int and typesafe enums tests
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheesery cheesery1 = new Cheesery();
cheesery1.setStatus( Cheesery.SELLING_CHEESE );
cheesery1.setMaturity( Maturity.OLD );
session.insert( cheesery1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
final Cheesery cheesery2 = new Cheesery();
cheesery2.setStatus( Cheesery.MAKING_CHEESE );
cheesery2.setMaturity( Maturity.YOUNG );
session.insert( cheesery2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( cheesery1,
list.get( 0 ) );
assertEquals( cheesery2,
list.get( 1 ) );
}
@Test
public void testMetaConsequence() throws Exception {
final Package pkg = loadPackage( "test_MetaConsequence.drl" );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( new Person( "Michael" ) );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
results = (List) session.getGlobal( "results" );
session.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "bar",
(results.get( 0 )) );
assertEquals( "bar2",
(results.get( 1 )) );
}
@Test
public void testEnabledExpression() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_enabledExpression.drl" ) ) );
final Package pkg = builder.getPackage();
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( new Person( "Michael" ) );
// session = SerializationHelper.getSerialisedStatefulSession( session,
// ruleBase );
results = (List) session.getGlobal( "results" );
session.fireAllRules();
assertEquals( 3,
results.size() );
assertTrue( results.contains( "1" ) );
assertTrue( results.contains( "2" ) );
assertTrue( results.contains( "3" ) );
}
@Test
public void testGetStatefulKnowledgeSessions() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "empty.drl",
getClass() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession_1 = createKnowledgeSession(kbase);
String expected_1 = "expected_1";
String expected_2 = "expected_2";
org.drools.runtime.rule.FactHandle handle_1 = ksession_1.insert( expected_1 );
org.drools.runtime.rule.FactHandle handle_2 = ksession_1.insert( expected_2 );
ksession_1.fireAllRules();
Collection<StatefulKnowledgeSession> coll_1 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_1.size() == 1 );
StatefulKnowledgeSession ksession_2 = coll_1.iterator().next();
Object actual_1 = ksession_2.getObject( handle_1 );
Object actual_2 = ksession_2.getObject( handle_2 );
assertEquals( expected_1,
actual_1 );
assertEquals( expected_2,
actual_2 );
ksession_1.dispose();
Collection<StatefulKnowledgeSession> coll_2 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_2.size() == 0 );
// here to make sure it's safe to call dispose() twice
ksession_1.dispose();
Collection<StatefulKnowledgeSession> coll_3 = kbase.getStatefulKnowledgeSessions();
assertTrue( coll_3.size() == 0 );
}
@Test
public void testGetFactHandle() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "empty.drl",
getClass() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
for ( int i = 0; i < 20; i++ ) {
Object object = new Object();
ksession.insert( object );
org.drools.runtime.rule.FactHandle factHandle = ksession.getFactHandle( object );
assertNotNull( factHandle );
assertEquals( object,
ksession.getObject( factHandle ) );
}
ksession.dispose();
}
@Test
public void testPrimitiveArray() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_primitiveArray.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List result = new ArrayList();
session.setGlobal( "result",
result );
final Primitives p1 = new Primitives();
p1.setPrimitiveIntArray( new int[]{1, 2, 3} );
p1.setArrayAttribute( new String[]{"a", "b"} );
session.insert( p1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
result = (List) session.getGlobal( "result" );
session.fireAllRules();
assertEquals( 3,
result.size() );
assertEquals( 3,
((Integer) result.get( 0 )).intValue() );
assertEquals( 2,
((Integer) result.get( 1 )).intValue() );
assertEquals( 3,
((Integer) result.get( 2 )).intValue() );
}
@Test
public void testMVELSoundex() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "MVEL_soundex.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
Cheese c = new Cheese( "fubar",
2 );
ksession.insert( c );
ksession.fireAllRules();
assertEquals( 42,
c.getPrice() );
}
@Test
public void testMVELSoundexNoCharParam() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "MVEL_soundexNPE2500.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
Cheese foobarCheese = new Cheese( "foobar",
2 );
Cheese nullCheese = new Cheese( null,
2 );
Cheese starCheese = new Cheese( "*",
2 );
ksession.insert( foobarCheese );
ksession.insert( nullCheese );
ksession.insert( starCheese );
ksession.fireAllRules();
assertEquals( 42,
foobarCheese.getPrice() );
assertEquals( 2,
nullCheese.getPrice() );
assertEquals( 2,
starCheese.getPrice() );
}
@Test
public void testMVELRewrite() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_MVELrewrite.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
List results = new ArrayList();
ksession.setGlobal( "results",
results );
Cheese brie = new Cheese( "brie",
2 );
Cheese stilton = new Cheese( "stilton",
2 );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( brie );
cheesery.addCheese( stilton );
ksession.insert( cheesery );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( cheesery,
results.get( 0 ) );
}
@Test
public void testVariableDeclaration() throws Exception {
String str = "rule KickOff\n" +
"dialect \"mvel\"\n" +
"when\n" +
"then\n" +
"int i;\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
}
@Test
public void testMissingImport() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += "when \n";
str += " $i : Cheese() \n";
str += " MissingClass( fieldName == $i ) \n";
str += "then \n";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testInvalidModify1() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " no-loop \n";
str += "when \n";
str += " $i : Cheese() \n";
str += "then \n";
str += " modify( $i ); ";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testInvalidModify2() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " no-loop \n";
str += "when \n";
str += " $i : Cheese() \n";
str += "then \n";
str += " modify( $i ) { setType( \"stilton\" ); setType( \"stilton\" );}; ";
str += " list.add( $i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testIncrementOperator() throws Exception {
String str = "";
str += "package org.drools \n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " dialect \"java\" \n";
str += "when \n";
str += " $I : Integer() \n";
str += "then \n";
str += " int i = $I.intValue(); \n";
str += " i += 5; \n";
str += " list.add( i ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( 5 );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( 10,
list.get( 0 ) );
}
@Test
public void testKnowledgeRuntimeAccess() throws Exception {
String str = "";
str += "package org.test\n";
str += "import org.drools.Message\n";
str += "rule \"Hello World\"\n";
str += "when\n";
str += " Message( )\n";
str += "then\n";
str += " System.out.println( drools.getKnowledgeRuntime() );\n";
str += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Message( "help" ) );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testEvalWithBigDecimal() throws Exception {
String str = "";
str += "package org.drools \n";
str += "import java.math.BigDecimal; \n";
str += "global java.util.List list \n";
str += "rule rule1 \n";
str += " dialect \"java\" \n";
str += "when \n";
str += " $bd : BigDecimal() \n";
str += " eval( $bd.compareTo( BigDecimal.ZERO ) > 0 ) \n";
str += "then \n";
str += " list.add( $bd ); \n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.warn( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new BigDecimal( 1.5 ) );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( new BigDecimal( 1.5 ),
list.get( 0 ) );
}
@Test
public void testCustomGlobalResolver() throws Exception {
final Package pkg = loadPackage( "test_globalCustomResolver.drl" );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map map = new HashMap();
List list = new ArrayList();
String string = "stilton";
map.put( "list",
list );
map.put( "string",
string );
workingMemory.setGlobalResolver( new GlobalResolver() {
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
}
public void writeExternal( ObjectOutput out ) throws IOException {
}
public Object resolveGlobal( String identifier ) {
return map.get( identifier );
}
public void setGlobal( String identifier,
Object value ) {
map.put( identifier,
value );
}
public Object get( String identifier ) {
return resolveGlobal( identifier );
}
public void set( String identifier,
Object value ) {
setGlobal( identifier,
value );
}
public void setDelegate( Globals delegate ) {
}
} );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( 1,
list.size() );
assertEquals( new Integer( 5 ),
list.get( 0 ) );
}
@Test
public void testCustomGlobalResolverWithWorkingMemoryObject() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_globalCustomResolver.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map map = new HashMap();
List list = new ArrayList();
String string = "stilton";
map.put( "list",
list );
map.put( "string",
string );
workingMemory.setGlobalResolver( new GlobalResolver() {
public Object resolveGlobal( String identifier ) {
return map.get( identifier );
}
public void setGlobal( String identifier,
Object value ) {
map.put( identifier,
value );
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
}
public void writeExternal( ObjectOutput out ) throws IOException {
}
public Object get( String identifier ) {
return resolveGlobal( identifier );
}
public void set( String identifier,
Object value ) {
setGlobal( identifier,
value );
}
public void setDelegate( Globals delegate ) {
}
} );
Cheese bree = new Cheese();
bree.setPrice( 100 );
workingMemory.insert( bree );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( 5, list.get( 0 ) );
assertEquals( 6, list.get( 1 ) );
}
@Test
public void testFieldBiningsAndEvalSharing() throws Exception {
final String drl = "test_FieldBindingsAndEvalSharing.drl";
evalSharingTest( drl );
}
@Test
public void testFieldBiningsAndPredicateSharing() throws Exception {
final String drl = "test_FieldBindingsAndPredicateSharing.drl";
evalSharingTest( drl );
}
private void evalSharingTest( final String drl ) throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( drl ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
final TestParam tp1 = new TestParam();
tp1.setValue2( "boo" );
session.insert( tp1 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testGeneratedBeans1() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 2,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
// cheeseFact.getField( "type" ).set( cheese,
// "stilton" );
cheeseFact.set( cheese,
"type",
"stilton" );
assertEquals( "stilton",
cheeseFact.get( cheese,
"type" ) );
FactType personType = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
Object ps = personType.newInstance();
personType.set( ps,
"age",
42 );
Map<String, Object> personMap = personType.getAsMap( ps );
assertEquals( 42,
personMap.get( "age" ) );
personMap.put( "age",
43 );
personType.setFromMap( ps,
personMap );
assertEquals( 43,
personType.get( ps,
"age" ) );
// just documenting toString() result:
// assertEquals( "Cheese( type=stilton )",
// cheese.toString() );
// reading the field attribute, using the method chain
assertEquals( "stilton",
cheeseFact.getField( "type" ).get( cheese ) );
// creating a stateful session
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Object cg = cheeseFact.newInstance();
ksession.setGlobal( "cg",
cg );
List<Object> result = new ArrayList<Object>();
ksession.setGlobal( "list",
result );
// inserting fact
ksession.insert( cheese );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 1,
result.size() );
assertEquals( new Integer( 5 ),
result.get( 0 ) );
// creating a person that likes the cheese:
// Retrieve the generated fact type
FactType personFact = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
// Create a new Fact instance
Object person = personFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
personFact.getField( "likes" ).set( person,
cheese );
// demonstrating primitive type support
personFact.getField( "age" ).set( person,
7 );
// just documenting toString() result:
// assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )",
// person.toString() );
// inserting fact
ksession.insert( person );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 2,
result.size() );
assertEquals( person,
result.get( 1 ) );
}
@Test
public void testGeneratedBeansMVEL() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansMVEL.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 1,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType pf = kbase.getFactType( "mortgages",
"Applicant" );
FactType af = kbase.getFactType( "mortgages",
"LoanApplication" );
Object person = pf.newInstance();
pf.set( person,
"creditRating",
"OK" );
Object application = af.newInstance();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( person );
ksession.insert( application );
ksession.fireAllRules();
}
@Test
public void testGeneratedBeans2() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans2.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgePackage kpkg = kbuilder.getKnowledgePackages().toArray( new KnowledgePackage[1] )[0];
assertEquals( 2,
kpkg.getRules().size() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
cheeseFact.set( cheese,
"type",
"stilton" );
assertEquals( "stilton",
cheeseFact.get( cheese,
"type" ) );
// testing equals method
Object cheese2 = cheeseFact.newInstance();
cheeseFact.set( cheese2,
"type",
"stilton" );
assertEquals( cheese,
cheese2 );
FactType personType = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
Object ps = personType.newInstance();
personType.set( ps,
"name",
"mark" );
personType.set( ps,
"last",
"proctor" );
personType.set( ps,
"age",
42 );
Object ps2 = personType.newInstance();
personType.set( ps2,
"name",
"mark" );
personType.set( ps2,
"last",
"proctor" );
personType.set( ps2,
"age",
30 );
assertEquals( ps,
ps2 );
personType.set( ps2,
"last",
"little" );
assertFalse( ps.equals( ps2 ) );
// creating a stateful session
StatefulKnowledgeSession wm = createKnowledgeSession(kbase);
Object cg = cheeseFact.newInstance();
wm.setGlobal( "cg",
cg );
List result = new ArrayList();
wm.setGlobal( "list",
result );
// inserting fact
wm.insert( cheese );
// firing rules
wm.fireAllRules();
// checking results
assertEquals( 1,
result.size() );
assertEquals( new Integer( 5 ),
result.get( 0 ) );
// creating a person that likes the cheese:
// Retrieve the generated fact type
FactType personFact = kbase.getFactType( "org.drools.generatedbeans",
"Person" );
// Create a new Fact instance
Object person = personFact.newInstance();
// Set a field value using the more verbose method chain...
// should we add short cuts?
personFact.getField( "likes" ).set( person,
cheese );
// demonstrating primitive type support
personFact.getField( "age" ).set( person,
7 );
// just documenting toString() result:
// assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )",
// person.toString() );
// inserting fact
wm.insert( person );
// firing rules
wm.fireAllRules();
// checking results
assertEquals( 2,
result.size() );
assertEquals( person,
result.get( 1 ) );
}
@Test
public void testDeclaredFactAndFunction() throws Exception {
String rule = "package com.jboss.qa;\n";
rule += "global java.util.List list\n";
rule += "declare Address\n";
rule += " street: String\n";
rule += "end\n";
rule += "function void myFunction() {\n";
rule += "}\n";
rule += "rule \"r1\"\n";
rule += " dialect \"mvel\"\n";
rule += "when\n";
rule += " Address()\n";
rule += "then\n";
rule += " list.add(\"r1\");\n";
rule += "end\n";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
FactType addressFact = ruleBase.getFactType( "com.jboss.qa.Address" );
Object address = addressFact.newInstance();
session.insert( address );
session.fireAllRules();
list = (List) session.getGlobal( "list" );
assertEquals( 1,
list.size() );
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testTypeDeclarationOnSeparateResource() throws Exception {
System.setProperty( "drools.dump.dir", "target" );
String file1 = "package a.b.c\n" +
"declare SomePerson\n" +
" weight : double\n" +
" height : double\n" +
"end\n";
String file2 = "package a.b.c\n" +
"import org.drools.*\n" +
"declare Holder\n" +
" person : Person\n" +
"end\n" +
"rule \"create holder\"\n" +
" when\n" +
" person : Person( )\n" +
" not (\n" +
" Holder( person; )\n" +
" )\n" +
" then\n" +
" insert(new Holder(person));\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( file1 , file2 );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
assertEquals( 0,
ksession.fireAllRules() );
ksession.insert( new org.drools.Person("Bob") );
assertEquals( 1,
ksession.fireAllRules() );
assertEquals( 0,
ksession.fireAllRules() );
}
@Test
public void testUppercaseField() throws Exception {
String rule = "package org.drools.test;\n";
rule += "global java.util.List list\n";
rule += "declare Address\n";
rule += " Street: String\n";
rule += "end\n";
rule += "rule \"r1\"\n";
rule += "when\n";
rule += " Address($street: Street)\n";
rule += "then\n";
rule += " list.add($street);\n";
rule += "end\n";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "list",
new ArrayList<String>() );
FactType addressType = kbase.getFactType( "org.drools.test",
"Address" );
Object address = addressType.newInstance();
addressType.set( address,
"Street",
"5th Avenue" );
ksession.insert( address );
ksession.fireAllRules();
List list = (List) ksession.getGlobal( "list" );
assertEquals( 1,
list.size() );
assertEquals( "5th Avenue",
list.get( 0 ) );
ksession.dispose();
}
@Test
public void testUppercaseField2() throws Exception {
String rule = "package org.drools\n" +
"declare SomeFact\n" +
" Field : String\n" +
" aField : String\n" +
"end\n" +
"rule X\n" +
"when\n" +
" SomeFact( Field == \"foo\", aField == \"bar\" )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType factType = kbase.getFactType( "org.drools",
"SomeFact" );
Object fact = factType.newInstance();
factType.set( fact,
"Field",
"foo" );
factType.set( fact,
"aField",
"bar" );
ksession.insert( fact );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
ksession.dispose();
}
@Test
public void testNullHandling() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NullHandling.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese nullCheese = new Cheese( null,
2 );
session.insert( nullCheese );
final Person notNullPerson = new Person( "shoes butt back" );
notNullPerson.setBigDecimal( new BigDecimal( "42.42" ) );
session.insert( notNullPerson );
Person nullPerson = new Person( "whee" );
nullPerson.setBigDecimal( null );
session.insert( nullPerson );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
//System.out.println(((List) session.getGlobal("list")).get(0));
assertEquals( 3,
((List) session.getGlobal( "list" )).size() );
nullPerson = new Person( null );
session.insert( nullPerson );
session.fireAllRules();
assertEquals( 4,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testNullFieldOnCompositeSink() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_NullFieldOnCompositeSink.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Attribute() );
ksession.insert( new Message() );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( 1,
((List) ksession.getGlobal( "list" )).size() );
assertEquals( "X",
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testEmptyPattern() throws Exception {
// pre build the package
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EmptyPattern.drl" ) ) );
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 5,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testExplicitAnd() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_ExplicitAnd.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Message( "hola" ) );
ksession.fireAllRules();
assertEquals( 0,
list.size() );
ksession.insert( new Cheese( "brie",
33 ) );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( 1,
((List) ksession.getGlobal( "list" )).size() );
}
@Test
public void testHelloWorld() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "HelloWorld.drl",
getClass() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
// go !
final Message message = new Message( "hola" );
message.addToList( "hello" );
message.setNumber( 42 );
ksession.insert( message );
ksession.insert( "boo" );
// workingMemory = SerializationHelper.serializeObject(workingMemory);
ksession.fireAllRules();
assertTrue( message.isFired() );
assertEquals( message,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testExtends() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "extend_rule_test.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
//ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
//Test 2 levels of inheritance, and basic rule
List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese mycheese = new Cheese( "cheddar",
4 );
FactHandle handle = session.insert( mycheese );
session.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "rule 4",
list.get( 0 ) );
assertEquals( "rule 2b",
list.get( 1 ) );
//Test 2nd level (parent) to make sure rule honors the extend rule
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle );
final Cheese mycheese2 = new Cheese( "notcheddar",
4 );
FactHandle handle2 = session.insert( mycheese2 );
session.fireAllRules();
assertEquals( "rule 4",
list.get( 0 ) );
assertEquals( 1,
list.size() );
//Test 3 levels of inheritance, all levels
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle2 );
final Cheese mycheese3 = new Cheese( "stilton",
6 );
FactHandle handle3 = session.insert( mycheese3 );
session.fireAllRules();
//System.out.println(list.toString());
assertEquals( "rule 3",
list.get( 0 ) );
assertEquals( 1,
list.size() );
//Test 3 levels of inheritance, third only
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle3 );
final Cheese mycheese4 = new Cheese( "notstilton",
6 );
FactHandle handle4 = session.insert( mycheese4 );
session.fireAllRules();
//System.out.println(((List) session.getGlobal( "list" )).toString());
assertTrue( ((List) session.getGlobal( "list" )).size() == 0 );
//Test 3 levels of inheritance, 2nd only
list = new ArrayList();
session.setGlobal( "list",
list );
session.retract( handle4 );
final Cheese mycheese5 = new Cheese( "stilton",
7 );
FactHandle handle5 = session.insert( mycheese5 );
session.fireAllRules();
//System.out.println(((List) session.getGlobal( "list" )).toString());
assertEquals( 0,
list.size() );
}
@Test
public void testExtends2() {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
try {
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_RuleExtend.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List results = new ArrayList();
ksession.setGlobal( "results",
results );
final Cheese stilton = new Cheese( "stilton",
5 );
final Cheese cheddar = new Cheese( "cheddar",
7 );
final Cheese brie = new Cheese( "brie",
5 );
ksession.insert( stilton );
ksession.insert( cheddar );
ksession.insert( brie );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( "brie",
results.get( 1 ) );
} catch ( Exception e ) {
e.printStackTrace();
if ( kbuilder.hasErrors() ) logger.info( kbuilder.getErrors().toString() );
fail( "Unexpected exception: " + e.getMessage() );
}
}
@Test
public void testLatinLocale() throws Exception {
Locale defaultLoc = Locale.getDefault();
try {
// setting a locale that uses COMMA as decimal separator
Locale.setDefault( new Locale( "pt",
"BR" ) );
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_LatinLocale.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
final Cheese mycheese = new Cheese( "cheddar",
4 );
org.drools.runtime.rule.FactHandle handle = ksession.insert( mycheese );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "1",
results.get( 0 ) );
mycheese.setPrice( 8 );
mycheese.setDoublePrice( 8.50 );
ksession.update( handle,
mycheese );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "3",
results.get( 1 ) );
} finally {
Locale.setDefault( defaultLoc );
}
}
@Test
public void testLiteral() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "literal_rule_test.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( "stilton",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testLiteralWithEscapes() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_literal_with_escapes.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
String expected = "s\tti\"lto\nn";
final Cheese stilton = new Cheese( expected,
5 );
session.insert( stilton );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
int fired = session.fireAllRules();
assertEquals( 1,
fired );
assertEquals( expected,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testLiteralWithBoolean() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "literal_with_boolean.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
12 );
bill.setAlive( true );
session.insert( bill );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( bill,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testFactBindings() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FactBindings.drl" ) ) );
final Package pkg = builder.getPackage();
// add the package to a rulebase
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List events = new ArrayList();
final WorkingMemoryEventListener listener = new DefaultWorkingMemoryEventListener() {
public void objectUpdated( ObjectUpdatedEvent event ) {
events.add( event );
}
};
workingMemory.addEventListener( listener );
final Person bigCheese = new Person( "big cheese" );
final Cheese cheddar = new Cheese( "cheddar",
15 );
bigCheese.setCheese( cheddar );
final FactHandle bigCheeseHandle = workingMemory.insert( bigCheese );
final FactHandle cheddarHandle = workingMemory.insert( cheddar );
workingMemory.fireAllRules();
ObjectUpdatedEvent event = (ObjectUpdatedEvent) events.get( 0 );
assertSame( cheddarHandle,
event.getFactHandle() );
assertSame( cheddar,
event.getOldObject() );
assertSame( cheddar,
event.getObject() );
event = (ObjectUpdatedEvent) events.get( 1 );
assertSame( bigCheeseHandle,
event.getFactHandle() );
assertSame( bigCheese,
event.getOldObject() );
assertSame( bigCheese,
event.getObject() );
}
@Test
public void testPropertyChangeSupportOldAPI() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_PropertyChange.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
Environment env = EnvironmentFactory.newEnvironment();
env.set( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES,
new ObjectMarshallingStrategy[]{
new IdentityPlaceholderResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT )} );
StatefulSession session = ruleBase.newStatefulSession( null,
env );
final List list = new ArrayList();
session.setGlobal( "list",
list );
final State state = new State( "initial" );
session.insert( state,
true );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
state.setFlag( true );
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
state.setState( "finished" );
StatefulKnowledgeSession ksesion = SerializationHelper.getSerialisedStatefulKnowledgeSession( new StatefulKnowledgeSessionImpl( (ReteooWorkingMemory) session ),
// MarshallerFactory.newIdentityMarshallingStrategy(),
false );
ksesion.fireAllRules();
assertEquals( 3,
((List) session.getGlobal( "list" )).size() );
session.dispose();
// checks that the session removed itself from the bean listeners list
assertEquals( 0,
state.getPropertyChangeListeners().length );
}
@Test
public void testPropertyChangeSupportNewAPI() throws Exception {
final KnowledgeBuilder builder = KnowledgeBuilderFactory.newKnowledgeBuilder();
builder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_PropertyChangeTypeDecl.drl" ) ),
ResourceType.DRL );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Collection<KnowledgePackage> pkgs = builder.getKnowledgePackages();
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( pkgs );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession session = createKnowledgeSession(kbase);
final List list = new ArrayList();
session.setGlobal( "list",
list );
final State state = new State( "initial" );
session.insert( state );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
state.setFlag( true );
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
state.setState( "finished" );
session.dispose();
// checks that the session removed itself from the bean listeners list
assertEquals( 0,
state.getPropertyChangeListeners().length );
}
@Test
public void testDisconnectedFactHandle() {
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
DefaultFactHandle helloHandle = (DefaultFactHandle) ksession.insert( "hello" );
DefaultFactHandle goodbyeHandle = (DefaultFactHandle) ksession.insert( "goodbye" );
org.drools.runtime.rule.FactHandle key = new DefaultFactHandle( helloHandle.toExternalForm() );
assertEquals( "hello",
ksession.getObject( key ) );
key = new DefaultFactHandle( goodbyeHandle.toExternalForm() );
assertEquals( "goodbye",
ksession.getObject( key ) );
}
@Test
public void testBigDecimal() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "big_decimal_and_comparable.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
42 );
bill.setBigDecimal( new BigDecimal( "42" ) );
final PersonInterface ben = new Person( "ben",
null,
43 );
ben.setBigDecimal( new BigDecimal( "43" ) );
session.insert( bill );
session.insert(new Cheese("gorgonzola", 43));
session.insert( ben );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testBigDecimalIntegerLiteral() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "big_decimal_and_literal.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final PersonInterface bill = new Person( "bill",
null,
12 );
bill.setBigDecimal( new BigDecimal( "42" ) );
bill.setBigInteger( new BigInteger( "42" ) );
session.insert( bill );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 6,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testBigDecimalWithFromAndEval() throws Exception {
String rule = "package org.test;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $dec : java.math.BigDecimal() from java.math.BigDecimal.TEN;\n";
rule += " eval( $dec.compareTo(java.math.BigDecimal.ONE) > 0 )\n";
rule += "then\n";
rule += " System.out.println(\"OK!\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
session.fireAllRules();
}
@Test()
public void testImport() throws Exception {
// Same package as this test
String rule = "";
rule += "package org.drools.integrationtests;\n";
rule += "import java.lang.Math;\n";
rule += "rule \"Test Rule\"\n";
rule += " dialect \"mvel\"\n";
rule += " when\n";
rule += " then\n";
// Can't handle the TestFact.TEST
rule += " new TestFact(TestFact.TEST);\n";
rule += "end";
KnowledgeBuilder builder = KnowledgeBuilderFactory.newKnowledgeBuilder();
builder.add( ResourceFactory.newByteArrayResource( rule.getBytes() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
try {
kbase.addKnowledgePackages( builder.getKnowledgePackages() );
} catch ( Exception e ) {
e.printStackTrace();
fail( "Should execute with out exceptions" );
}
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.fireAllRules();
}
@Test
public void testMVELConsequenceWithMapsAndArrays() throws Exception {
String rule = "package org.test;\n";
rule += "import java.util.ArrayList\n";
rule += "import java.util.HashMap\n";
rule += "global java.util.List list\n";
rule += "rule \"Test Rule\"\n";
rule += " dialect \"mvel\"";
rule += "when\n";
rule += "then\n";
rule += " m = new HashMap();\n";
rule += " l = new ArrayList();\n";
rule += " l.add(\"first\");\n";
rule += " m.put(\"content\", l);\n";
rule += " System.out.println(((ArrayList)m[\"content\"])[0]);\n";
rule += " list.add(((ArrayList)m[\"content\"])[0]);\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
assertEquals( "first",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testCell() throws Exception {
final Cell cell1 = new Cell( 9 );
final Cell cell = new Cell( 0 );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "evalmodify.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBase ruleBase = getSinglethreadRuleBase();
Package pkg = builder.getPackage();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
Environment env = EnvironmentFactory.newEnvironment();
env.set( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES,
new ObjectMarshallingStrategy[]{
new IdentityPlaceholderResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT )} );
StatefulSession session = ruleBase.newStatefulSession( null,
env );
session.insert( cell1 );
FactHandle cellHandle = session.insert( cell );
StatefulKnowledgeSession ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( new StatefulKnowledgeSessionImpl( (ReteooWorkingMemory) session ),
// MarshallerFactory.newIdentityMarshallingStrategy(),
false );
session.fireAllRules();
assertEquals( 9,
cell.getValue() );
}
@Test
public void testNesting() throws Exception {
Person p = new Person();
p.setName( "Michael" );
Address add1 = new Address();
add1.setStreet( "High" );
Address add2 = new Address();
add2.setStreet( "Low" );
List l = new ArrayList();
l.add( add1 );
l.add( add2 );
p.setAddresses( l );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested_fields.drl" ) ) );
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
DrlParser parser = new DrlParser();
PackageDescr desc = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "nested_fields.drl" ) ) );
List packageAttrs = desc.getAttributes();
assertEquals( 1,
desc.getRules().size() );
assertEquals( 1,
packageAttrs.size() );
RuleDescr rule = (RuleDescr) desc.getRules().get( 0 );
Map<String, AttributeDescr> ruleAttrs = rule.getAttributes();
assertEquals( 1,
ruleAttrs.size() );
assertEquals( "mvel",
((AttributeDescr) ruleAttrs.get( "dialect" )).getValue() );
assertEquals( "dialect",
((AttributeDescr) ruleAttrs.get( "dialect" )).getName() );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
session.insert( p );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
}
@Test
public void testOr() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "or_test.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Cheese cheddar = new Cheese( "cheddar",
5 );
final FactHandle h = session.insert( cheddar );
session.fireAllRules();
// just one added
assertEquals( "got cheese",
list.get( 0 ) );
assertEquals( 1,
list.size() );
session.retract( h );
session.fireAllRules();
// still just one
assertEquals( 1,
list.size() );
session.insert( new Cheese( "stilton",
5 ) );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
// now have one more
assertEquals( 2,
((List) session.getGlobal( "list" )).size() );
}
@Test
public void testEval() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "eval_rule_test.drl" );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "five",
new Integer( 5 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
ksession.insert( stilton );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( stilton,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testJaninoEval() throws Exception {
KnowledgeBuilderConfiguration kbconf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
kbconf.setProperty( JavaDialectConfiguration.JAVA_COMPILER_PROPERTY, "JANINO" );
KnowledgeBase kbase = loadKnowledgeBase( kbconf, "eval_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "five",
new Integer( 5 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
5 );
ksession.insert( stilton );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( stilton,
((List) ksession.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testEvalMore() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "eval_rule_test_more.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "list",
list );
final Person foo = new Person( "foo" );
session.insert( foo );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( foo,
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testReturnValue() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "returnvalue_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "two",
new Integer( 2 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final PersonInterface peter = new Person( "peter",
null,
12 );
ksession.insert( peter );
final PersonInterface jane = new Person( "jane",
null,
10 );
ksession.insert( jane );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession, true );
ksession.fireAllRules();
assertEquals( jane,
((List) ksession.getGlobal( "list" )).get( 0 ) );
assertEquals( peter,
((List) ksession.getGlobal( "list" )).get( 1 ) );
}
@Test
public void testPredicate() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "predicate_rule_test.drl" );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.setGlobal( "two",
new Integer( 2 ) );
final List list = new ArrayList();
ksession.setGlobal( "list",
list );
final PersonInterface peter = new Person( "peter",
null,
12 );
ksession.insert( peter );
final PersonInterface jane = new Person( "jane",
null,
10 );
ksession.insert( jane );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
assertEquals( jane,
((List) ksession.getGlobal( "list" )).get( 0 ) );
assertEquals( peter,
((List) ksession.getGlobal( "list" )).get( 1 ) );
}
@Test
public void testNullBehaviour() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "null_behaviour.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final PersonInterface p1 = new Person( "michael",
"food",
40 );
final PersonInterface p2 = new Person( null,
"drink",
30 );
session.insert( p1 );
session.insert( p2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
}
@Test
public void testNullConstraint() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "null_constraint.drl" ) ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
}
assertFalse( builder.getErrors().toString(),
builder.hasErrors() );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List foo = new ArrayList();
session.setGlobal( "messages",
foo );
final PersonInterface p1 = new Person( null,
"food",
40 );
final Primitives p2 = new Primitives();
p2.setArrayAttribute( null );
session.insert( p1 );
session.insert( p2 );
session = SerializationHelper.getSerialisedStatefulSession( session,
ruleBase );
session.fireAllRules();
assertEquals( 2,
((List) session.getGlobal( "messages" )).size() );
}
@Test
public void testBasicFrom() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_From.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
kbase = SerializationHelper.serializeObject( kbase );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final List list1 = new ArrayList();
ksession.setGlobal( "list1",
list1 );
final List list2 = new ArrayList();
ksession.setGlobal( "list2",
list2 );
final List list3 = new ArrayList();
ksession.setGlobal( "list3",
list3 );
final Cheesery cheesery = new Cheesery();
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese cheddar = new Cheese( "cheddar",
15 );
cheesery.addCheese( stilton );
cheesery.addCheese( cheddar );
ksession.setGlobal( "cheesery",
cheesery );
ksession.insert( cheesery );
Person p = new Person( "stilton" );
ksession.insert( p );
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
ksession.fireAllRules();
// from using a global
assertEquals( 2,
((List) ksession.getGlobal( "list1" )).size() );
assertEquals( cheddar,
((List) ksession.getGlobal( "list1" )).get( 0 ) );
assertEquals( stilton,
((List) ksession.getGlobal( "list1" )).get( 1 ) );
// from using a declaration
assertEquals( 2,
((List) ksession.getGlobal( "list2" )).size() );
assertEquals( cheddar,
((List) ksession.getGlobal( "list2" )).get( 0 ) );
assertEquals( stilton,
((List) ksession.getGlobal( "list2" )).get( 1 ) );
// from using a declaration
assertEquals( 1,
((List) ksession.getGlobal( "list3" )).size() );
assertEquals( stilton,
((List) ksession.getGlobal( "list3" )).get( 0 ) );
}
@Test
public void testFromWithParams() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromWithParams.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
List list = new ArrayList();
final Object globalObject = new Object();
workingMemory.setGlobal( "list",
list );
workingMemory.setGlobal( "testObject",
new FromTestClass() );
workingMemory.setGlobal( "globalObject",
globalObject );
final Person bob = new Person( "bob" );
workingMemory.insert( bob );
// TODO java.io.NotSerializableException: org.mvel.util.FastList
// workingMemory = SerializationHelper.serializeObject(workingMemory);
workingMemory.fireAllRules();
assertEquals( 6,
((List) workingMemory.getGlobal( "list" )).size() );
final List array = (List) ((List) workingMemory.getGlobal( "list" )).get( 0 );
assertEquals( 3,
array.size() );
final Person p = (Person) array.get( 0 );
assertEquals( p,
bob );
assertEquals( new Integer( 42 ),
array.get( 1 ) );
final List nested = (List) array.get( 2 );
assertEquals( "x",
nested.get( 0 ) );
assertEquals( "y",
nested.get( 1 ) );
final Map map = (Map) ((List) workingMemory.getGlobal( "list" )).get( 1 );
assertEquals( 2,
map.keySet().size() );
assertTrue( map.keySet().contains( bob ) );
assertEquals( globalObject,
map.get( bob ) );
assertTrue( map.keySet().contains( "key1" ) );
final Map nestedMap = (Map) map.get( "key1" );
assertEquals( 1,
nestedMap.keySet().size() );
assertTrue( nestedMap.keySet().contains( "key2" ) );
assertEquals( "value2",
nestedMap.get( "key2" ) );
assertEquals( new Integer( 42 ),
((List) workingMemory.getGlobal( "list" )).get( 2 ) );
assertEquals( "literal",
((List) workingMemory.getGlobal( "list" )).get( 3 ) );
assertEquals( bob,
((List) workingMemory.getGlobal( "list" )).get( 4 ) );
assertEquals( globalObject,
((List) workingMemory.getGlobal( "list" )).get( 5 ) );
}
@Test
public void testFromWithNewConstructor() throws Exception {
DrlParser parser = new DrlParser();
PackageDescr descr = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "test_FromWithNewConstructor.drl" ) ) );
PackageBuilder builder = new PackageBuilder();
builder.addPackage( descr );
Package pkg = builder.getPackage();
pkg.checkValidity();
pkg = SerializationHelper.serializeObject( pkg );
}
/**
* @see JBRULES-1415 Certain uses of from causes NullPointerException in WorkingMemoryLogger
*/
@Test
public void testFromDeclarationWithWorkingMemoryLogger() throws Exception {
String rule = "package org.test;\n";
rule += "import org.drools.Cheesery\n";
rule += "import org.drools.Cheese\n";
rule += "global java.util.List list\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $cheesery : Cheesery()\n";
rule += " Cheese( $type : type) from $cheesery.cheeses\n";
rule += "then\n";
rule += " list.add( $type );\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( new Cheese( "stilton",
22 ) );
session.insert( cheesery );
// TODO java.io.EOFException
// session = SerializationHelper.serializeObject(session);
session.fireAllRules();
assertEquals( 1,
((List) session.getGlobal( "list" )).size() );
assertEquals( "stilton",
((List) session.getGlobal( "list" )).get( 0 ) );
}
@Test
public void testWithInvalidRule() throws Exception {
final PackageBuilderConfiguration conf = new PackageBuilderConfiguration();
final JavaDialectConfiguration jconf = (JavaDialectConfiguration) conf.getDialectConfiguration( "java" );
jconf.setCompiler( JavaDialectConfiguration.ECLIPSE );
final PackageBuilder builder = new PackageBuilder( conf );
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "invalid_rule.drl" ) ) );
final Package pkg = builder.getPackage();
// Mark: please check if the conseqeuence/should/shouldn't be built
// Rule badBoy = pkg.getRules()[0];
// assertFalse(badBoy.isValid());
RuntimeException runtime = null;
// this should ralph all over the place.
RuleBase ruleBase = getSinglethreadRuleBase();
try {
ruleBase.addPackage( pkg );
fail( "Should have thrown an exception as the rule is NOT VALID." );
} catch ( final RuntimeException e ) {
assertNotNull( e.getMessage() );
runtime = e;
}
ruleBase = SerializationHelper.serializeObject( ruleBase );
assertTrue( builder.getErrors().getErrors().length > 0 );
final String pretty = builder.getErrors().toString();
assertFalse( pretty.equals( "" ) );
assertEquals( pretty,
runtime.getMessage() );
}
@Test
public void testWithInvalidRule2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "invalid_rule2.drl" ) ) );
assertTrue( builder.hasErrors() );
String err = builder.getErrors().toString();
logger.info( err );
}
@Test
public void testErrorLineNumbers() throws Exception {
// this test aims to test semantic errors
// parser errors are another test case
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "errors_in_rule.drl" ) ) );
final Package pkg = builder.getPackage();
DroolsError[] errors = builder.getErrors().getErrors();
assertEquals( 3,
errors.length );
final DescrBuildError stiltonError = (DescrBuildError) errors[0];
assertTrue( stiltonError.getMessage().contains( "Stilton" ) );
assertNotNull( stiltonError.getDescr() );
assertTrue( stiltonError.getLine() != -1 );
// check that its getting it from the ruleDescr
assertEquals( stiltonError.getLine(),
stiltonError.getDescr().getLine() );
// check the absolute error line number (there are more).
assertEquals( 11,
stiltonError.getLine() );
final DescrBuildError poisonError = (DescrBuildError) errors[1];
assertTrue( poisonError.getMessage().contains( "Poison" ) );
assertEquals( 13,
poisonError.getLine() );
assertTrue( errors[2].getMessage().contains( "add" ) );
// now check the RHS, not being too specific yet, as long as it has the
// rules line number, not zero
final DescrBuildError rhsError = (DescrBuildError) errors[2];
assertTrue( rhsError.getLine() >= 8 && rhsError.getLine() <= 17 ); // TODO this should be 16
}
@Test
public void testErrorsParser() throws Exception {
final DrlParser parser = new DrlParser();
assertEquals( 0,
parser.getErrors().size() );
parser.parse( new InputStreamReader( getClass().getResourceAsStream( "errors_parser_multiple.drl" ) ) );
assertTrue( parser.hasErrors() );
assertTrue( parser.getErrors().size() > 0 );
assertTrue( parser.getErrors().get( 0 ) instanceof ParserError );
final ParserError first = ((ParserError) parser.getErrors().get( 0 ));
assertTrue( first.getMessage() != null );
assertFalse( first.getMessage().equals( "" ) );
}
@Test
public void testAssertRetract() throws Exception {
// postponed while I sort out KnowledgeHelperFixer
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "assert_retract.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final PersonInterface person = new Person( "michael",
"cheese" );
person.setStatus( "start" );
workingMemory.insert( person );
// TODO org.drools.spi.ConsequenceException: org.drools.FactException: Update error: handle not found for object:
// workingMemory = SerializationHelper.serializeObject(workingMemory);
workingMemory.fireAllRules();
List<String> results = (List<String>) workingMemory.getGlobal( "list" );
for( String result : results ) {
logger.info( result );
}
assertEquals( 5,
results.size() );
assertTrue( results.contains( "first" ) );
assertTrue( results.contains( "second" ) );
assertTrue( results.contains( "third" ) );
assertTrue( results.contains( "fourth" ) );
assertTrue( results.contains( "fifth" ) );
}
@Test
public void testPredicateAsFirstPattern() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "predicate_as_first_pattern.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese mussarela = new Cheese( "Mussarela",
35 );
workingMemory.insert( mussarela );
final Cheese provolone = new Cheese( "Provolone",
20 );
workingMemory.insert( provolone );
workingMemory.fireAllRules();
assertEquals( "The rule is being incorrectly fired",
35,
mussarela.getPrice() );
assertEquals( "Rule is incorrectly being fired",
20,
provolone.getPrice() );
}
@Test
public void testConsequenceException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceException.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
try {
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Consequence" );
} catch ( final org.drools.runtime.rule.ConsequenceException e ) {
assertEquals( "Throw Consequence Exception",
e.getActivation().getRule().getName() );
assertEquals( "this should throw an exception",
e.getCause().getMessage() );
}
}
@Test
public void testCustomConsequenceException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setConsequenceExceptionHandler( CustomConsequenceExceptionHandler.class.getName() );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertTrue( ((CustomConsequenceExceptionHandler) ((DefaultAgenda) workingMemory.getAgenda()).getConsequenceExceptionHandler()).isCalled() );
}
public static class CustomConsequenceExceptionHandler
implements
ConsequenceExceptionHandler {
private boolean called;
public void handleException( org.drools.spi.Activation activation,
org.drools.WorkingMemory workingMemory,
Exception exception ) {
this.called = true;
}
public boolean isCalled() {
return this.called;
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
called = in.readBoolean();
}
public void writeExternal( ObjectOutput out ) throws IOException {
out.writeBoolean( called );
}
}
@Test
public void testEvalException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Eval" );
} catch ( final Exception e ) {
assertEquals( "this should throw an exception",
e.getCause().getMessage() );
}
}
@Test
public void testPredicateException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_PredicateException.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the Predicate" );
} catch ( final Exception e ) {
Throwable cause = e.getCause();
if (cause instanceof InvocationTargetException) {
cause = ((InvocationTargetException)cause).getTargetException();
}
assertTrue( cause.getMessage().contains( "this should throw an exception" ) );
}
}
@Test
public void testReturnValueException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ReturnValueException.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Cheese brie = new Cheese( "brie",
12 );
try {
workingMemory.insert( brie );
workingMemory.fireAllRules();
fail( "Should throw an Exception from the ReturnValue" );
} catch ( final Exception e ) {
Throwable root = e;
while (root.getCause() != null) root = root.getCause();
root.getMessage().contains( "this should throw an exception" );
}
}
@Test
public void testMultiRestrictionFieldConstraint() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MultiRestrictionFieldConstraint.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list1 = new ArrayList();
workingMemory.setGlobal( "list1",
list1 );
final List list2 = new ArrayList();
workingMemory.setGlobal( "list2",
list2 );
final List list3 = new ArrayList();
workingMemory.setGlobal( "list3",
list3 );
final List list4 = new ArrayList();
workingMemory.setGlobal( "list4",
list4 );
final Person youngChili1 = new Person( "young chili1" );
youngChili1.setAge( 12 );
youngChili1.setHair( "blue" );
final Person youngChili2 = new Person( "young chili2" );
youngChili2.setAge( 25 );
youngChili2.setHair( "purple" );
final Person chili1 = new Person( "chili1" );
chili1.setAge( 35 );
chili1.setHair( "red" );
final Person chili2 = new Person( "chili2" );
chili2.setAge( 38 );
chili2.setHair( "indigigo" );
final Person oldChili1 = new Person( "old chili1" );
oldChili1.setAge( 45 );
oldChili1.setHair( "green" );
final Person oldChili2 = new Person( "old chili2" );
oldChili2.setAge( 48 );
oldChili2.setHair( "blue" );
workingMemory.insert( youngChili1 );
workingMemory.insert( youngChili2 );
workingMemory.insert( chili1 );
workingMemory.insert( chili2 );
workingMemory.insert( oldChili1 );
workingMemory.insert( oldChili2 );
workingMemory.fireAllRules();
assertEquals( 1,
list1.size() );
assertTrue( list1.contains( chili1 ) );
assertEquals( 2,
list2.size() );
assertTrue( list2.contains( chili1 ) );
assertTrue( list2.contains( chili2 ) );
assertEquals( 2,
list3.size() );
assertTrue( list3.contains( youngChili1 ) );
assertTrue( list3.contains( youngChili2 ) );
assertEquals( 2,
list4.size() );
assertTrue( list4.contains( youngChili1 ) );
assertTrue( list4.contains( chili1 ) );
}
@Test
@Ignore
public void testDumpers() throws Exception {
final DrlParser parser = new DrlParser();
final PackageDescr pkg = parser.parse( new InputStreamReader( getClass().getResourceAsStream( "test_Dumpers.drl" ) ) );
if ( parser.hasErrors() ) {
for ( DroolsError error : parser.getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
PackageBuilder builder = new PackageBuilder();
builder.addPackage( pkg );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
WorkingMemory workingMemory = ruleBase.newStatefulSession();
List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese brie = new Cheese( "brie",
12 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
final DrlDumper drlDumper = new DrlDumper();
final String drlResult = drlDumper.dump( pkg );
builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( drlResult ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
workingMemory = ruleBase.newStatefulSession();
list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
final XmlDumper xmlDumper = new XmlDumper();
final String xmlResult = xmlDumper.dump( pkg );
// System.out.println( xmlResult );
builder = new PackageBuilder();
builder.addPackageFromXml( new StringReader( xmlResult ) );
if ( builder.hasErrors() ) {
for ( DroolsError error : builder.getErrors().getErrors() ) {
logger.warn( error.toString() );
}
fail( parser.getErrors().toString() );
}
ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
workingMemory = ruleBase.newStatefulSession();
list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "3 1",
list.get( 0 ) );
assertEquals( "MAIN",
list.get( 1 ) );
assertEquals( "1 1",
list.get( 2 ) );
}
@Test
public void testContainsCheese() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ContainsCheese.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( stilton );
final Cheese brie = new Cheese( "brie",
10 );
workingMemory.insert( brie );
final Cheesery cheesery = new Cheesery();
cheesery.getCheeses().add( stilton );
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( brie,
list.get( 1 ) );
}
@Test
public void testDuplicateRuleNames() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DuplicateRuleName1.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DuplicateRuleName2.drl" ) ) );
ruleBase.addPackage( builder.getPackage() );
// @todo: this is from JBRULES-394 - maybe we should test more stuff
// here?
}
@Test
public void testNullValuesIndexing() throws Exception {
final Reader reader = new InputStreamReader( getClass().getResourceAsStream( "test_NullValuesIndexing.drl" ) );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( reader );
final Package pkg1 = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
// Adding person with null name and likes attributes
final PersonInterface bob = new Person( null,
null );
bob.setStatus( "P1" );
final PersonInterface pete = new Person( null,
null );
bob.setStatus( "P2" );
workingMemory.insert( bob );
workingMemory.insert( pete );
workingMemory.fireAllRules();
assertEquals( "Indexing with null values is not working correctly.",
"OK",
bob.getStatus() );
assertEquals( "Indexing with null values is not working correctly.",
"OK",
pete.getStatus() );
}
@Test
public void testEmptyRule() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EmptyRule.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.fireAllRules();
assertTrue( list.contains( "fired1" ) );
assertTrue( list.contains( "fired2" ) );
}
@Test
public void testjustEval() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NoPatterns.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
workingMemory.fireAllRules();
assertTrue( list.contains( "fired1" ) );
assertTrue( list.contains( "fired3" ) );
}
@Test
public void testOrWithBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrWithBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person hola = new Person( "hola" );
workingMemory.insert( hola );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
Cheese brie = new Cheese( "brie" );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertTrue( list.contains( hola ) );
assertTrue( list.contains( brie ) );
}
@Test
public void testJoinNodeModifyObject() throws Exception {
final Reader reader = new InputStreamReader( getClass().getResourceAsStream( "test_JoinNodeModifyObject.drl" ) );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( reader );
final Package pkg1 = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
try {
final List orderedFacts = new ArrayList();
final List errors = new ArrayList();
workingMemory.setGlobal( "orderedNumbers",
orderedFacts );
workingMemory.setGlobal( "errors",
errors );
final int MAX = 2;
for ( int i = 1; i <= MAX; i++ ) {
final IndexedNumber n = new IndexedNumber( i,
MAX - i + 1 );
workingMemory.insert( n );
}
workingMemory.fireAllRules();
assertTrue( "Processing generated errors: " + errors.toString(),
errors.isEmpty() );
for ( int i = 1; i <= MAX; i++ ) {
final IndexedNumber n = (IndexedNumber) orderedFacts.get( i - 1 );
assertEquals( "Fact is out of order",
i,
n.getIndex() );
}
} finally {
}
}
@Test
public void testInsurancePricingExample() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "insurance_pricing_example.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// now create some test data
final Driver driver = new Driver();
final Policy policy = new Policy();
ksession.insert( driver );
ksession.insert( policy );
ksession.fireAllRules();
assertEquals( 120,
policy.getBasePrice() );
}
@Test
public void testLLR() throws Exception {
// read in the source
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JoinNodeModifyTuple.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession,
true );
// 1st time
org.drools.Target tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.26544f ) );
tgt.setLon( new Float( 28.952137f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.8666667f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236874f ) );
tgt.setLon( new Float( 28.992579f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.8666667f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 2nd time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.265343f ) );
tgt.setLon( new Float( 28.952267f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236935f ) );
tgt.setLon( new Float( 28.992493f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 3d time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.26525f ) );
tgt.setLon( new Float( 28.952396f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9333333f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.236996f ) );
tgt.setLon( new Float( 28.992405f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9333333f ) );
ksession.insert( tgt );
ksession.fireAllRules();
// 4th time
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Anna" );
tgt.setLat( new Float( 60.265163f ) );
tgt.setLon( new Float( 28.952526f ) );
tgt.setCourse( new Float( 145.0f ) );
tgt.setSpeed( new Float( 12.0f ) );
tgt.setTime( new Float( 1.9666667f ) );
ksession.insert( tgt );
tgt = new org.drools.Target();
tgt.setLabel( "Santa-Maria" );
tgt.setLat( new Float( 60.237057f ) );
tgt.setLon( new Float( 28.99232f ) );
tgt.setCourse( new Float( 325.0f ) );
tgt.setSpeed( new Float( 8.0f ) );
tgt.setTime( new Float( 1.9666667f ) );
ksession.insert( tgt );
ksession.fireAllRules();
}
@Test
public void testReturnValueAndGlobal() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ReturnValueAndGlobal.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List matchlist = new ArrayList();
workingMemory.setGlobal( "matchingList",
matchlist );
final List nonmatchlist = new ArrayList();
workingMemory.setGlobal( "nonMatchingList",
nonmatchlist );
workingMemory.setGlobal( "cheeseType",
"stilton" );
final Cheese stilton1 = new Cheese( "stilton",
5 );
final Cheese stilton2 = new Cheese( "stilton",
7 );
final Cheese brie = new Cheese( "brie",
4 );
workingMemory.insert( stilton1 );
workingMemory.insert( stilton2 );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 2,
matchlist.size() );
assertEquals( 1,
nonmatchlist.size() );
}
@Test
public void testDeclaringAndUsingBindsInSamePattern() throws Exception {
final RuleBaseConfiguration config = new RuleBaseConfiguration();
config.setRemoveIdentities( true );
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclaringAndUsingBindsInSamePattern.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getRuleBase( config );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List sensors = new ArrayList();
workingMemory.setGlobal( "sensors",
sensors );
final Sensor sensor1 = new Sensor( 100,
150 );
workingMemory.insert( sensor1 );
workingMemory.fireAllRules();
assertEquals( 0,
sensors.size() );
final Sensor sensor2 = new Sensor( 200,
150 );
workingMemory.insert( sensor2 );
workingMemory.fireAllRules();
assertEquals( 3,
sensors.size() );
}
@Test
public void testMissingImports() {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_missing_import.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
fail( "Should have thrown an InvalidRulePackage" );
} catch ( final InvalidRulePackage e ) {
// everything fine
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Should have thrown an InvalidRulePackage Exception instead of " + e.getMessage() );
}
}
@Test
public void testNestedConditionalElements() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NestedConditionalElements.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final State state = new State( "SP" );
workingMemory.insert( state );
final Person bob = new Person( "Bob" );
bob.setStatus( state.getState() );
bob.setLikes( "stilton" );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
workingMemory.insert( new Cheese( bob.getLikes(),
10 ) );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testDeclarationUsage() throws Exception {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclarationUsage.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
fail( "Should have trown an exception" );
} catch ( final InvalidRulePackage e ) {
// success ... correct exception thrown
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Wrong exception raised: " + e.getMessage() );
}
}
@Test
public void testDeclareAndFrom() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_DeclareWithFrom.drl" );
FactType profileType = kbase.getFactType( "org.drools",
"Profile" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Object profile = profileType.newInstance();
Map<String, Integer> map = new HashMap<String, Integer>();
map.put( "internet",
Integer.valueOf( 2 ) );
profileType.set( profile,
"pageFreq",
map );
ksession.insert( profile );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testDeclarationNonExistingField() throws Exception {
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeclarationOfNonExistingField.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
fail( "Should have trown an exception" );
} catch ( final InvalidRulePackage e ) {
// success ... correct exception thrown
} catch ( final Exception e ) {
e.printStackTrace();
fail( "Wrong exception raised: " + e.getMessage() );
}
}
@Test
public void testUnbalancedTrees() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_UnbalancedTrees.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
wm.insert( new Cheese( "a",
10 ) );
wm.insert( new Cheese( "b",
10 ) );
wm.insert( new Cheese( "c",
10 ) );
wm.insert( new Cheese( "d",
10 ) );
final Cheese e = new Cheese( "e",
10 );
wm.insert( e );
wm.fireAllRules();
assertEquals( "Rule should have fired twice, seting the price to 30",
30,
e.getPrice() );
// success
}
@Test
public void testImportConflict() throws Exception {
RuleBase ruleBase = getSinglethreadRuleBase();
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ImportConflict.drl" ) ) );
final Package pkg = builder.getPackage();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
}
@Test
public void testEmptyIdentifier() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_emptyIdentifier.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List result = new ArrayList();
workingMemory.setGlobal( "results",
result );
final Person person = new Person( "bob" );
final Cheese cheese = new Cheese( "brie",
10 );
workingMemory.insert( person );
workingMemory.insert( cheese );
workingMemory.fireAllRules();
assertEquals( 4,
result.size() );
}
@Test
public void testDuplicateVariableBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_duplicateVariableBinding.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final Map result = new HashMap();
workingMemory.setGlobal( "results",
result );
final Cheese stilton = new Cheese( "stilton",
20 );
final Cheese brie = new Cheese( "brie",
10 );
workingMemory.insert( stilton );
workingMemory.insert( brie );
workingMemory.fireAllRules();
assertEquals( 5,
result.size() );
assertEquals( stilton.getPrice(),
((Integer) result.get( stilton.getType() )).intValue() );
assertEquals( brie.getPrice(),
((Integer) result.get( brie.getType() )).intValue() );
assertEquals( stilton.getPrice(),
((Integer) result.get( stilton )).intValue() );
assertEquals( brie.getPrice(),
((Integer) result.get( brie )).intValue() );
assertEquals( stilton.getPrice(),
((Integer) result.get( "test3" + stilton.getType() )).intValue() );
workingMemory.insert( new Person( "bob",
brie.getType() ) );
workingMemory.fireAllRules();
assertEquals( 6,
result.size() );
assertEquals( brie.getPrice(),
((Integer) result.get( "test3" + brie.getType() )).intValue() );
}
@Test
public void testShadowProxyInHirarchies() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyInHirarchies.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.insert( new Child( "gp" ) );
workingMemory.fireAllRules();
}
@Test
public void testSelfReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SelfReference.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Order order = new Order( 10,
"Bob" );
final OrderItem item1 = new OrderItem( order,
1 );
final OrderItem item2 = new OrderItem( order,
2 );
final OrderItem anotherItem1 = new OrderItem( null,
3 );
final OrderItem anotherItem2 = new OrderItem( null,
4 );
workingMemory.insert( order );
workingMemory.insert( item1 );
workingMemory.insert( item2 );
workingMemory.insert( anotherItem1 );
workingMemory.insert( anotherItem2 );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( item1 ) );
assertTrue( results.contains( item2 ) );
}
@Test
public void testNumberComparisons() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NumberComparisons.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
// asserting the sensor object
final RandomNumber rn = new RandomNumber();
rn.setValue( 10 );
workingMemory.insert( rn );
final Guess guess = new Guess();
guess.setValue( new Integer( 5 ) );
final FactHandle handle = workingMemory.insert( guess );
workingMemory.fireAllRules();
// HIGHER
assertEquals( 1,
list.size() );
assertEquals( "HIGHER",
list.get( 0 ) );
guess.setValue( new Integer( 15 ) );
workingMemory.update( handle,
guess );
workingMemory.fireAllRules();
// LOWER
assertEquals( 2,
list.size() );
assertEquals( "LOWER",
list.get( 1 ) );
guess.setValue( new Integer( 10 ) );
workingMemory.update( handle,
guess );
workingMemory.fireAllRules();
// CORRECT
assertEquals( 3,
list.size() );
assertEquals( "CORRECT",
list.get( 2 ) );
}
@Test
public void testEventModel() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EventModel.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
final List agendaList = new ArrayList();
final AgendaEventListener agendaEventListener = new AgendaEventListener() {
public void activationCancelled( ActivationCancelledEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void activationCreated( ActivationCreatedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void afterActivationFired( AfterActivationFiredEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void agendaGroupPopped( AgendaGroupPoppedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void agendaGroupPushed( AgendaGroupPushedEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void beforeActivationFired( BeforeActivationFiredEvent event,
WorkingMemory workingMemory ) {
agendaList.add( event );
}
public void afterRuleFlowGroupActivated(
RuleFlowGroupActivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void afterRuleFlowGroupDeactivated(
RuleFlowGroupDeactivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void beforeRuleFlowGroupActivated(
RuleFlowGroupActivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
public void beforeRuleFlowGroupDeactivated(
RuleFlowGroupDeactivatedEvent event,
WorkingMemory workingMemory ) {
// TODO Auto-generated method stub
}
};
final List wmList = new ArrayList();
final WorkingMemoryEventListener workingMemoryListener = new WorkingMemoryEventListener() {
public void objectInserted( ObjectInsertedEvent event ) {
wmList.add( event );
}
public void objectUpdated( ObjectUpdatedEvent event ) {
wmList.add( event );
}
public void objectRetracted( ObjectRetractedEvent event ) {
wmList.add( event );
}
};
wm.addEventListener( workingMemoryListener );
final Cheese stilton = new Cheese( "stilton",
15 );
final Cheese cheddar = new Cheese( "cheddar",
17 );
final FactHandle stiltonHandle = wm.insert( stilton );
final ObjectInsertedEvent oae = (ObjectInsertedEvent) wmList.get( 0 );
assertSame( stiltonHandle,
oae.getFactHandle() );
wm.update( stiltonHandle,
stilton );
final ObjectUpdatedEvent ome = (ObjectUpdatedEvent) wmList.get( 1 );
assertSame( stiltonHandle,
ome.getFactHandle() );
wm.retract( stiltonHandle );
final ObjectRetractedEvent ore = (ObjectRetractedEvent) wmList.get( 2 );
assertSame( stiltonHandle,
ore.getFactHandle() );
wm.insert( cheddar );
}
@Test
public void testImplicitDeclarations() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_implicitDeclarations.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.setGlobal( "factor",
new Double( 1.2 ) );
final Cheese cheese = new Cheese( "stilton",
10 );
workingMemory.insert( cheese );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testMVELImplicitWithFrom() {
String str = "" +
"package org.test \n" +
"import java.util.List \n" +
"global java.util.List list \n" +
"global java.util.List list2 \n" +
"rule \"show\" dialect \"mvel\" \n" +
"when \n" +
" $m : List( eval( size == 0 ) ) from [list] \n" +
"then \n" +
" list2.add('r1'); \n" +
"end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.setGlobal( "list2",
list );
ksession.fireAllRules();
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testJavaImplicitWithFrom() {
String str = "" +
"package org.test \n" +
"import java.util.List \n" +
"global java.util.List list \n" +
"global java.util.List list2 \n" +
"rule \"show\" dialect \"java\" \n" +
"when \n" +
" $m : List( eval( size == 0 ) ) from [list] \n" +
"then \n" +
" list2.add('r1'); \n" +
"end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.setGlobal( "list2",
list );
ksession.fireAllRules();
assertEquals( "r1",
list.get( 0 ) );
}
@Test
public void testCastingInsideEvals() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_castsInsideEval.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.setGlobal( "value",
new Integer( 20 ) );
workingMemory.fireAllRules();
}
@Test
public void testMemberOfAndNotMemberOf() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_memberOf.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese muzzarela = new Cheese( "muzzarela",
10 );
final Cheese brie = new Cheese( "brie",
15 );
workingMemory.insert( stilton );
workingMemory.insert( muzzarela );
final Cheesery cheesery = new Cheesery();
cheesery.getCheeses().add( stilton.getType() );
cheesery.getCheeses().add( brie.getType() );
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( muzzarela,
list.get( 1 ) );
}
@Test
public void testContainsInArray() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_contains_in_array.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Primitives p = new Primitives();
p.setStringArray( new String[]{"test1", "test3"} );
workingMemory.insert( p );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "ok1",
list.get( 0 ) );
assertEquals( "ok2",
list.get( 1 ) );
}
@Test
public void testNodeSharingNotExists() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_nodeSharingNotExists.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "rule1",
list.get( 0 ) );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "rule2",
list.get( 1 ) );
}
@Test
public void testNullBinding() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_nullBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Person( "bob" ) );
workingMemory.insert( new Person( null ) );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "OK",
list.get( 0 ) );
}
@Test
public void testModifyRetractWithFunction() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RetractModifyWithFunction.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final AbstractWorkingMemory workingMemory = (AbstractWorkingMemory) ruleBase.newStatefulSession();
final Cheese stilton = new Cheese( "stilton",
7 );
final Cheese muzzarella = new Cheese( "muzzarella",
9 );
final int sum = stilton.getPrice() + muzzarella.getPrice();
final FactHandle stiltonHandle = workingMemory.insert( stilton );
final FactHandle muzzarellaHandle = workingMemory.insert( muzzarella );
workingMemory.fireAllRules();
assertEquals( sum,
stilton.getPrice() );
assertEquals( 1,
workingMemory.getObjectStore().size() );
assertNotNull( workingMemory.getObject( stiltonHandle ) );
assertNotNull( workingMemory.getFactHandle( stilton ) );
assertNull( workingMemory.getObject( muzzarellaHandle ) );
assertNull( workingMemory.getFactHandle( muzzarella ) );
}
@Test
public void testConstraintConnectors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConstraintConnectors.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Person youngChili1 = new Person( "young chili1" );
youngChili1.setAge( 12 );
youngChili1.setHair( "blue" );
final Person youngChili2 = new Person( "young chili2" );
youngChili2.setAge( 25 );
youngChili2.setHair( "purple" );
final Person chili1 = new Person( "chili1" );
chili1.setAge( 35 );
chili1.setHair( "red" );
final Person chili2 = new Person( "chili2" );
chili2.setAge( 38 );
chili2.setHair( "indigigo" );
final Person oldChili1 = new Person( "old chili1" );
oldChili1.setAge( 45 );
oldChili1.setHair( "green" );
final Person oldChili2 = new Person( "old chili2" );
oldChili2.setAge( 48 );
oldChili2.setHair( "blue" );
final Person veryold = new Person( "very old" );
veryold.setAge( 99 );
veryold.setHair( "gray" );
workingMemory.insert( youngChili1 );
workingMemory.insert( youngChili2 );
workingMemory.insert( chili1 );
workingMemory.insert( chili2 );
workingMemory.insert( oldChili1 );
workingMemory.insert( oldChili2 );
workingMemory.insert( veryold );
workingMemory.fireAllRules();
assertEquals( 4,
results.size() );
assertEquals( chili1,
results.get( 0 ) );
assertEquals( oldChili1,
results.get( 1 ) );
assertEquals( youngChili1,
results.get( 2 ) );
assertEquals( veryold,
results.get( 3 ) );
}
@Test
public void testConnectorsAndOperators() throws Exception {
final KnowledgeBase kbase = SerializationHelper.serializeObject( loadKnowledgeBase( "test_ConstraintConnectorsAndOperators.drl" ) );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new StockTick( 1,
"RHT",
10,
1000 ) );
ksession.insert( new StockTick( 2,
"IBM",
10,
1100 ) );
final int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
}
@Test
public void testConstraintConnectorOr() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_ConstraintConnectorOr.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Person> results = new ArrayList<Person>();
ksession.setGlobal( "results",
results );
final Person mark = new Person( "Mark" );
mark.setAlive( true );
mark.setHappy( true );
final Person bush = new Person( "Bush" );
bush.setAlive( true );
bush.setHappy( false );
final Person conan = new Person( "Conan" );
conan.setAlive( false );
conan.setHappy( true );
final Person nero = new Person( "Nero" );
nero.setAlive( false );
nero.setHappy( false );
ksession.insert( mark );
ksession.insert( bush );
ksession.insert( conan );
ksession.insert( nero );
ksession.fireAllRules();
assertEquals( 3,
results.size() );
assertTrue( results.contains( mark ) );
assertTrue( results.contains( bush ) );
assertTrue( results.contains( conan ) );
}
@Test
public void testMatchesNotMatchesCheese() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MatchesNotMatches.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "list",
list );
final Cheese stilton = new Cheese( "stilton",
12 );
final Cheese stilton2 = new Cheese( "stilton2",
12 );
final Cheese agedStilton = new Cheese( "aged stilton",
12 );
final Cheese brie = new Cheese( "brie",
10 );
final Cheese brie2 = new Cheese( "brie2",
10 );
final Cheese muzzarella = new Cheese( "muzzarella",
10 );
final Cheese muzzarella2 = new Cheese( "muzzarella2",
10 );
final Cheese provolone = new Cheese( "provolone",
10 );
final Cheese provolone2 = new Cheese( "another cheese (provolone)",
10 );
workingMemory.insert( stilton );
workingMemory.insert( stilton2 );
workingMemory.insert( agedStilton );
workingMemory.insert( brie );
workingMemory.insert( brie2 );
workingMemory.insert( muzzarella );
workingMemory.insert( muzzarella2 );
workingMemory.insert( provolone );
workingMemory.insert( provolone2 );
workingMemory.fireAllRules();
logger.info( list.toString() );
assertEquals( 4,
list.size() );
assertEquals( stilton,
list.get( 0 ) );
assertEquals( brie,
list.get( 1 ) );
assertEquals( agedStilton,
list.get( 2 ) );
assertEquals( provolone,
list.get( 3 ) );
}
@Test
public void testAutomaticBindings() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AutoBindings.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person bob = new Person( "bob",
"stilton" );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( bob );
workingMemory.insert( stilton );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( bob,
list.get( 0 ) );
}
@Test
public void testMatchesMVEL() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MatchesMVEL.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
Map map = new HashMap();
map.put( "content",
"hello ;=" );
session.insert( map );
session.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testMatchesMVEL2() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_MatchesMVEL2.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map map = new HashMap();
map.put( "content",
"String with . and (routine)" );
ksession.insert( map );
int fired = ksession.fireAllRules();
assertEquals( 2,
fired );
}
@Test
public void testMatchesMVEL3() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_MatchesMVEL2.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map map = new HashMap();
map.put( "content",
"String with . and ()" );
ksession.insert( map );
int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
}
@Test
public void testQualifiedFieldReference() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_QualifiedFieldReference.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Person bob = new Person( "bob",
"stilton" );
final Cheese stilton = new Cheese( "stilton",
12 );
workingMemory.insert( bob );
workingMemory.insert( stilton );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( bob,
list.get( 0 ) );
}
@Test
public void testEvalInline() throws Exception {
final String text = "package org.drools\n" +
"rule \"inline eval\"\n" +
"when\n" +
" $str : String()\n" +
" Person( eval( name.startsWith($str) && age == 18) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( "b" );
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testMethodCalls() throws Exception {
final String text = "package org.drools\n" +
"rule \"method calls\"\n" +
"when\n" +
" Person( getName().substring(2) == 'b' )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAlphaExpression() throws Exception {
final String text = "package org.drools\n" +
"rule \"alpha\"\n" +
"when\n" +
" Person( 5 < 6 )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testEvalCE() throws Exception {
final String text = "package org.drools\n" +
"rule \"inline eval\"\n" +
"when\n" +
" $str : String()\n" +
" $p : Person()\n" +
" eval( $p.getName().startsWith($str) && $p.getName().endsWith($str) )" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( text );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( "b" );
ksession.insert( new Person( "mark",
50 ) );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
ksession.insert( new Person( "bob",
18 ) );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testEvalRewrite() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewrite.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 10,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
final Order order2 = new Order( 11,
"Bob" );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order2.addItem( item21 );
order2.addItem( item22 );
final Order order3 = new Order( 12,
"Bob" );
final OrderItem item31 = new OrderItem( order3,
1 );
final OrderItem item32 = new OrderItem( order3,
2 );
order3.addItem( item31 );
order3.addItem( item32 );
final Order order4 = new Order( 13,
"Bob" );
final OrderItem item41 = new OrderItem( order4,
1 );
final OrderItem item42 = new OrderItem( order4,
2 );
order4.addItem( item41 );
order4.addItem( item42 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.insert( order3 );
workingMemory.insert( item31 );
workingMemory.insert( item32 );
workingMemory.insert( order4 );
workingMemory.insert( item41 );
workingMemory.insert( item42 );
workingMemory.fireAllRules();
assertEquals( 5,
list.size() );
assertTrue( list.contains( item11 ) );
assertTrue( list.contains( item12 ) );
assertTrue( list.contains( item22 ) );
assertTrue( list.contains( order3 ) );
assertTrue( list.contains( order4 ) );
}
@Test
public void testMapAccess() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MapAccess.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Map map = new HashMap();
map.put( "name",
"Edson" );
map.put( "surname",
"Tirelli" );
map.put( "age",
"28" );
workingMemory.insert( map );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertTrue( list.contains( map ) );
}
@Test
public void testMapNullConstraint() throws Exception {
KnowledgeBase kbase = loadKnowledgeBase( "test_mapNullConstraints.drl" );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
new WorkingMemoryConsoleLogger( ksession );
Map addresses = new HashMap();
addresses.put( "home",
new Address( "home street" ) );
Person bob = new Person( "Bob" );
bob.setNamedAddresses( addresses );
ksession.insert( bob );
ksession.fireAllRules();
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 4 ) ).afterActivationFired( arg.capture() );
org.drools.event.rule.AfterActivationFiredEvent aaf = arg.getAllValues().get( 0 );
assertThat( aaf.getActivation().getRule().getName(),
is( "1. home != null" ) );
aaf = arg.getAllValues().get( 1 );
assertThat( aaf.getActivation().getRule().getName(),
is( "2. not home == null" ) );
aaf = arg.getAllValues().get( 2 );
assertThat( aaf.getActivation().getRule().getName(),
is( "7. work == null" ) );
aaf = arg.getAllValues().get( 3 );
assertThat( aaf.getActivation().getRule().getName(),
is( "8. not work != null" ) );
}
@Test
public void testNoneTypeSafeDeclarations() {
// same namespace
String str = "package org.drools\n" +
"global java.util.List list\n" +
"declare Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// different namespace with import
str = "package org.drools.test\n" +
"import org.drools.Person\n" +
"global java.util.List list\n" +
"declare Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// different namespace without import using qualified name
str = "package org.drools.test\n" +
"global java.util.List list\n" +
"declare org.drools.Person\n" +
" @typesafe(false)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : org.drools.Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
true );
// this should fail as it's not declared non typesafe
str = "package org.drools.test\n" +
"global java.util.List list\n" +
"declare org.drools.Person\n" +
" @typesafe(true)\n" +
"end\n" +
"rule testTypeSafe\n dialect \"mvel\" when\n" +
" $p : org.drools.Person( object.street == 's1' )\n" +
"then\n" +
" list.add( $p );\n" +
"end\n";
executeTypeSafeDeclarations( str,
false );
}
private void executeTypeSafeDeclarations( String str,
boolean mustSucceed ) {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
if ( mustSucceed ) {
fail( kbuilder.getErrors().toString() );
} else {
return;
}
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Address a = new Address( "s1" );
Person p = new Person( "yoda" );
p.setObject( a );
ksession.insert( p );
ksession.fireAllRules();
assertEquals( p,
list.get( 0 ) );
}
@Test
public void testMapAccessWithVariable() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MapAccessWithVariable.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Map map = new HashMap();
map.put( "name",
"Edson" );
map.put( "surname",
"Tirelli" );
map.put( "age",
"28" );
workingMemory.insert( map );
workingMemory.insert( "name" );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertTrue( list.contains( map ) );
}
// Drools does not support variables inside bindings yet... but we should...
@Test
public void testMapAccessWithVariable2() {
String str = "package org.drools;\n" +
"import java.util.Map;\n" +
"rule \"map access with variable\"\n" +
" when\n" +
" $key : String( )\n" +
" $p1 : Person( name == 'Bob', namedAddresses[$key] != null, $na : namedAddresses[$key] )\n" +
" $p2 : Person( name == 'Mark', namedAddresses[$key] == $na )\n" +
" then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertTrue( kbuilder.hasErrors() );
}
@Test
public void testHalt() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_halt.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules();
assertEquals( 10,
results.size() );
for ( int i = 0; i < 10; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
}
@Test
public void testFireLimit() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_fireLimit.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules();
assertEquals( 20,
results.size() );
for ( int i = 0; i < 20; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules( 10 );
assertEquals( 10,
results.size() );
for ( int i = 0; i < 10; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
workingMemory.insert( new Integer( 0 ) );
workingMemory.fireAllRules( -1 );
assertEquals( 20,
results.size() );
for ( int i = 0; i < 20; i++ ) {
assertEquals( new Integer( i ),
results.get( i ) );
}
results.clear();
}
@Test
public void testEqualitySupport() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_equalitySupport.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.EQUALITY );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
PersonWithEquals person = new PersonWithEquals( "bob",
30 );
workingMemory.insert( person );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "mark",
results.get( 0 ) );
}
@Test
public void testCharComparisons() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_charComparisons.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Primitives p1 = new Primitives();
p1.setCharPrimitive( 'a' );
p1.setStringAttribute( "b" );
Primitives p2 = new Primitives();
p2.setCharPrimitive( 'b' );
p2.setStringAttribute( "a" );
workingMemory.insert( p1 );
workingMemory.insert( p2 );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
assertEquals( "1",
results.get( 0 ) );
assertEquals( "2",
results.get( 1 ) );
assertEquals( "3",
results.get( 2 ) );
}
@Test
public void testAlphaNodeSharing() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_alphaNodeSharing.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setShareAlphaNodes( false );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Person p1 = new Person( "bob",
5 );
workingMemory.insert( p1 );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "1",
results.get( 0 ) );
assertEquals( "2",
results.get( 1 ) );
}
@Test
public void testSelfReference2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SelfReference2.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese() );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
}
@Test
public void testSelfJoinWithIndex() {
String drl = "";
drl += "package org.test\n";
drl += "import org.drools.Person\n";
drl += "global java.util.List list\n";
drl += "rule test1\n";
drl += "when\n";
drl += " $p1 : Person( $name : name, $age : age )\n";
drl += " $p2 : Person( name == $name, age < $age)\n";
drl += "then\n";
drl += " list.add( $p1 );\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p1 = new Person( "darth",
30 );
org.drools.runtime.rule.FactHandle fh1 = ksession.insert( p1 );
Person p2 = new Person( "darth",
25 );
org.drools.runtime.rule.FactHandle fh2 = ksession.insert( p2 ); // creates activation.
p1.setName( "yoda" );
ksession.update( fh1,
p1 ); // creates activation
ksession.fireAllRules();
assertEquals( 0,
list.size() );
}
@Test
public void testMergingDifferentPackages() throws Exception {
// using the same builder
try {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes2.drl" ) ) );
assertEquals( 2,
builder.getPackages().length );
Package pkg1 = builder.getPackageRegistry( "org.drools.package1" ).getPackage();
assertEquals( "rule 1",
pkg1.getRules()[0].getName() );
Package pkg2 = builder.getPackageRegistry( "org.drools.package2" ).getPackage();
assertEquals( "rule 1",
pkg2.getRules()[0].getName() );
} catch ( PackageMergeException e ) {
fail( "unexpected exception: " + e.getMessage() );
} catch ( RuntimeException e ) {
e.printStackTrace();
fail( "unexpected exception: " + e.getMessage() );
}
}
@Test
public void testSelfJoinAndNotWithIndex() {
String drl = "";
drl += "package org.test\n";
drl += "import org.drools.Person\n";
drl += "global java.util.List list\n";
drl += "rule test1\n";
drl += "when\n";
drl += " $p1 : Person( )\n";
drl += " not Person( name == $p1.name, age < $p1.age )\n";
drl += " $p2 : Person( name == $p1.name, likes != $p1.likes, age > $p1.age)\n";
drl += " not Person( name == $p1.name, likes == $p2.likes, age < $p2.age )\n";
drl += "then\n";
drl += " System.out.println( $p1 + \":\" + $p2 );\n";
drl += " list.add( $p1 );\n";
drl += " list.add( $p2 );\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p0 = new Person( "yoda",
0 );
p0.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh0 = ksession.insert( p0 );
Person p1 = new Person( "darth",
15 );
p1.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh1 = ksession.insert( p1 );
Person p2 = new Person( "darth",
25 );
p2.setLikes( "cheddar" );
org.drools.runtime.rule.FactHandle fh2 = ksession.insert( p2 ); // creates activation.
Person p3 = new Person( "darth",
30 );
p3.setLikes( "brie" );
org.drools.runtime.rule.FactHandle fh3 = ksession.insert( p3 );
ksession.fireAllRules();
assertEquals( 2,
list.size() );
assertSame( p1,
list.get( 0 ) );
assertSame( p3,
list.get( 1 ) );
p1.setName( "yoda" );
ksession.update( fh1,
p1 ); // creates activation
ksession.fireAllRules();
assertEquals( 4,
list.size() );
assertSame( p2,
list.get( 2 ) );
assertSame( p3,
list.get( 3 ) );
}
@Test
public void testMergingDifferentPackages2() throws Exception {
// using different builders
try {
final PackageBuilder builder1 = new PackageBuilder();
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
final Package pkg1 = builder1.getPackage();
assertEquals( 1,
pkg1.getRules().length );
final PackageBuilder builder2 = new PackageBuilder();
builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes2.drl" ) ) );
final Package pkg2 = builder2.getPackage();
assertEquals( 1,
pkg2.getRules().length );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase.addPackage( pkg2 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.insert( new Cheese( "brie",
5 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
2,
results.size() );
assertTrue( results.contains( "p1.r1" ) );
assertTrue( results.contains( "p2.r1" ) );
} catch ( PackageMergeException e ) {
fail( "Should not raise exception when merging different packages into the same rulebase: " + e.getMessage() );
} catch ( Exception e ) {
e.printStackTrace();
fail( "unexpected exception: " + e.getMessage() );
}
}
@Test
public void testMergePackageWithSameRuleNames() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MergePackageWithSameRuleNames1.drl" ) ) );
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_MergePackageWithSameRuleNames2.drl" ) ) );
ruleBase.addPackage( builder.getPackage() );
StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "rule1 for the package2",
results.get( 0 ) );
}
@Test
public void testRuleRemovalWithJoinedRootPattern() {
String str = "";
str += "package org.drools \n";
str += "rule rule1 \n";
str += "when \n";
str += " String() \n";
str += " Person() \n";
str += "then \n";
str += "end \n";
str += "rule rule2 \n";
str += "when \n";
str += " String() \n";
str += " Cheese() \n";
str += "then \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
DefaultFactHandle handle = (DefaultFactHandle) ksession.insert( "hello" );
LeftTuple leftTuple = handle.getFirstLeftTuple();
assertNotNull( leftTuple );
assertNotNull( leftTuple.getLeftParentNext() );
kbase.removeRule( "org.drools",
"rule2" );
leftTuple = handle.getFirstLeftTuple();
assertNotNull( leftTuple );
assertNull( leftTuple.getLeftParentNext() );
}
// JBRULES-1808
@Test
public void testKnowledgeHelperFixerInStrings() {
String str = "";
str += "package org.simple \n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += " no-loop true ";
str += "when \n";
str += " $fact : String() \n";
str += "then \n";
str += " list.add(\"This is an update()\"); \n";
str += " list.add(\"This is an update($fact)\"); \n";
str += " update($fact); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "hello" );
ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
list.size() );
assertEquals( "This is an update()",
list.get( 0 ) );
assertEquals( "This is an update($fact)",
list.get( 1 ) );
}
@Test
public void testEmptyAfterRetractInIndexedMemory() {
String str = "";
str += "package org.simple \n";
str += "import org.drools.Person\n";
str += "global java.util.List list \n";
str += "rule xxx dialect 'mvel' \n";
str += "when \n";
str += " Person( $name : name ) \n";
str += " $s : String( this == $name) \n";
str += "then \n";
str += " list.add($s); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p = new Person( "ackbar" );
org.drools.runtime.rule.FactHandle ph = ksession.insert( p );
org.drools.runtime.rule.FactHandle sh = ksession.insert( "ackbar" );
ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
list.size() );
assertEquals( "ackbar",
list.get( 0 ) );
}
@Test
public void testRuleReplacement() throws Exception {
// test rule replacement
final PackageBuilder builder1 = new PackageBuilder();
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes1.drl" ) ) );
builder1.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuleNameClashes3.drl" ) ) );
final Package pkg1 = builder1.getPackage();
assertEquals( 1,
pkg1.getRules().length );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg1 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.insert( new Cheese( "brie",
5 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
0,
results.size() );
workingMemory.insert( new Cheese( "muzzarella",
7 ) );
workingMemory.fireAllRules();
assertEquals( results.toString(),
1,
results.size() );
assertTrue( results.contains( "p1.r3" ) );
}
@Test
public void testBindingsOnConnectiveExpressions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_bindings.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
15 ) );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( new Integer( 15 ),
results.get( 1 ) );
}
@Test
public void testMultipleFroms() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_multipleFroms.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Cheesery cheesery = new Cheesery();
cheesery.addCheese( new Cheese( "stilton",
15 ) );
cheesery.addCheese( new Cheese( "brie",
10 ) );
workingMemory.setGlobal( "cheesery",
cheesery );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( 2,
((List) results.get( 0 )).size() );
assertEquals( 2,
((List) results.get( 1 )).size() );
}
@Test
public void testNullHashing() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NullHashing.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
15 ) );
workingMemory.insert( new Cheese( "",
10 ) );
workingMemory.insert( new Cheese( null,
8 ) );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
}
@Test
public void testDefaultBetaConstrains() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DefaultBetaConstraint.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final FirstClass first = new FirstClass( "1",
"2",
"3",
"4",
"5" );
final FactHandle handle = workingMemory.insert( first );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "NOT",
results.get( 0 ) );
workingMemory.insert( new SecondClass() );
workingMemory.update( handle,
first );
workingMemory.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "NOT",
results.get( 1 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( null,
"2",
"3",
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 3,
results.size() );
assertEquals( "NOT",
results.get( 2 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
null,
"3",
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 4,
results.size() );
assertEquals( "NOT",
results.get( 3 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
null,
"4",
"5" ) );
workingMemory.fireAllRules();
assertEquals( 5,
results.size() );
assertEquals( "NOT",
results.get( 4 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
null,
"5" ) );
workingMemory.fireAllRules();
assertEquals( 6,
results.size() );
assertEquals( "NOT",
results.get( 5 ) );
workingMemory.update( handle,
first );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
"4",
null ) );
workingMemory.fireAllRules();
assertEquals( 7,
results.size() );
assertEquals( "NOT",
results.get( 6 ) );
workingMemory.insert( new SecondClass( "1",
"2",
"3",
"4",
"5" ) );
workingMemory.update( handle,
first );
workingMemory.fireAllRules();
assertEquals( 8,
results.size() );
assertEquals( "EQUALS",
results.get( 7 ) );
}
@Test
public void testBooleanWrapper() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_BooleanWrapper.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Primitives p1 = new Primitives();
workingMemory.insert( p1 );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
Primitives p2 = new Primitives();
p2.setBooleanWrapper( Boolean.FALSE );
workingMemory.insert( p2 );
workingMemory.fireAllRules();
assertEquals( 0,
results.size() );
Primitives p3 = new Primitives();
p3.setBooleanWrapper( Boolean.TRUE );
workingMemory.insert( p3 );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testCrossProductRemovingIdentityEquals() throws Exception {
PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( MiscTest.class.getResourceAsStream( "test_CrossProductRemovingIdentityEquals.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
RuleBaseConfiguration conf = new RuleBaseConfiguration();
RuleBase rb = RuleBaseFactory.newRuleBase( conf );
rb.addPackage( builder.getPackage() );
rb = SerializationHelper.serializeObject( rb );
StatefulSession session = rb.newStatefulSession();
List list1 = new ArrayList();
List list2 = new ArrayList();
session.setGlobal( "list1",
list1 );
session.setGlobal( "list2",
list2 );
SpecialString first42 = new SpecialString( "42" );
SpecialString second43 = new SpecialString( "43" );
SpecialString world = new SpecialString( "World" );
session.insert( world );
session.insert( first42 );
session.insert( second43 );
//System.out.println( "Firing rules ..." );
session.fireAllRules();
assertEquals( 6,
list1.size() );
assertEquals( 6,
list2.size() );
assertEquals( first42,
list1.get( 0 ) );
assertEquals( world,
list1.get( 1 ) );
assertEquals( second43,
list1.get( 2 ) );
assertEquals( second43,
list1.get( 3 ) );
assertEquals( world,
list1.get( 4 ) );
assertEquals( first42,
list1.get( 5 ) );
assertEquals( second43,
list2.get( 0 ) );
assertEquals( second43,
list2.get( 1 ) );
assertEquals( first42,
list2.get( 2 ) );
assertEquals( world,
list2.get( 3 ) );
assertEquals( first42,
list2.get( 4 ) );
assertEquals( world,
list2.get( 5 ) );
}
@Test
public void testIterateObjects() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_IterateObjects.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10 ) );
workingMemory.fireAllRules();
Iterator events = workingMemory.iterateObjects( new ClassObjectFilter( PersonInterface.class ) );
assertTrue( events.hasNext() );
assertEquals( 1,
results.size() );
assertEquals( results.get( 0 ),
events.next() );
}
@Test
public void testNotInStatelessSession() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NotInStatelessSession.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setSequential( true );
RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatelessSession session = ruleBase.newStatelessSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.execute( "not integer" );
assertEquals( "not integer",
list.get( 0 ) );
}
@Test
public void testDynamicallyAddInitialFactRule() throws Exception {
PackageBuilder builder = new PackageBuilder();
String rule = "package org.drools.test\n" +
"global java.util.List list\n" +
"rule xxx when\n" +
" i:Integer()\n" +
"then\n" +
" list.add(i);\n" +
"end";
builder.addPackageFromDrl( new StringReader( rule ) );
Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.insert( new Integer( 5 ) );
session.fireAllRules();
assertEquals( new Integer( 5 ),
list.get( 0 ) );
builder = new PackageBuilder();
rule = "package org.drools.test\n" +
"global java.util.List list\n" +
"rule xxx when\n" +
"then\n" +
" list.add(\"x\");\n" +
"end";
builder.addPackageFromDrl( new StringReader( rule ) );
pkg = builder.getPackage();
// Make sure that this rule is fired as the Package is updated, it also tests that InitialFactImpl is still in the network
// even though the first rule didn't use it.
ruleBase.addPackage( pkg );
session.fireAllRules();
assertEquals( "x",
list.get( 1 ) );
}
@Test
public void testEvalRewriteWithSpecialOperators() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewriteWithSpecialOperators.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 10,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
final Order order2 = new Order( 11,
"Bob" );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order2.addItem( item21 );
order2.addItem( item22 );
final Order order3 = new Order( 12,
"Bob" );
final OrderItem item31 = new OrderItem( order3,
1 );
final OrderItem item32 = new OrderItem( order3,
2 );
final OrderItem item33 = new OrderItem( order3,
3 );
order3.addItem( item31 );
order3.addItem( item32 );
order3.addItem( item33 );
final Order order4 = new Order( 13,
"Bob" );
final OrderItem item41 = new OrderItem( order4,
1 );
final OrderItem item42 = new OrderItem( order4,
2 );
order4.addItem( item41 );
order4.addItem( item42 );
final Order order5 = new Order( 14,
"Mark" );
final OrderItem item51 = new OrderItem( order5,
1 );
final OrderItem item52 = new OrderItem( order5,
2 );
order5.addItem( item51 );
order5.addItem( item52 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.insert( order3 );
workingMemory.insert( item31 );
workingMemory.insert( item32 );
workingMemory.insert( item33 );
workingMemory.insert( order4 );
workingMemory.insert( item41 );
workingMemory.insert( item42 );
workingMemory.insert( order5 );
workingMemory.insert( item51 );
workingMemory.insert( item52 );
workingMemory.fireAllRules();
assertEquals( 9,
list.size() );
int index = 0;
assertEquals( item11,
list.get( index++ ) );
assertEquals( item12,
list.get( index++ ) );
assertEquals( item21,
list.get( index++ ) );
assertEquals( item22,
list.get( index++ ) );
assertEquals( item31,
list.get( index++ ) );
assertEquals( item33,
list.get( index++ ) );
assertEquals( item41,
list.get( index++ ) );
assertEquals( order5,
list.get( index++ ) );
assertEquals( order5,
list.get( index++ ) );
}
@Test
public void testImportColision() throws Exception {
final PackageBuilder builder = new PackageBuilder();
final PackageBuilder builder2 = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested1.drl" ) ) );
builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "nested2.drl" ) ) );
final Package pkg = builder.getPackage();
final Package pkg2 = builder2.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase.addPackage( pkg2 );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
workingMemory.insert( new FirstClass() );
workingMemory.insert( new SecondClass() );
workingMemory.insert( new FirstClass.AlternativeKey() );
workingMemory.insert( new SecondClass.AlternativeKey() );
workingMemory.fireAllRules();
}
@Test
public void testAutovivificationOfVariableRestrictions() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AutoVivificationVR.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
workingMemory.insert( new Cheese( "stilton",
10,
8 ) );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
}
@Test
public void testShadowProxyOnCollections() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyOnCollections.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
final Cheesery cheesery = new Cheesery();
workingMemory.insert( cheesery );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( 1,
cheesery.getCheeses().size() );
assertEquals( results.get( 0 ),
cheesery.getCheeses().get( 0 ) );
}
@Test
public void testShadowProxyOnCollections2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyOnCollections2.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final StatefulSession workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
List list = new ArrayList();
list.add( "example1" );
list.add( "example2" );
MockPersistentSet mockPersistentSet = new MockPersistentSet( false );
mockPersistentSet.addAll( list );
org.drools.ObjectWithSet objectWithSet = new ObjectWithSet();
objectWithSet.setSet( mockPersistentSet );
workingMemory.insert( objectWithSet );
workingMemory.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( "show",
objectWithSet.getMessage() );
}
@Test
public void testNestedAccessors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NestedAccessors.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 11,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 0,
list.size() );
final Order order2 = new Order( 12,
"Mark" );
Order.OrderStatus status = new Order.OrderStatus();
status.setActive( true );
order2.setStatus( status );
final OrderItem item21 = new OrderItem( order2,
1 );
final OrderItem item22 = new OrderItem( order2,
2 );
order1.addItem( item21 );
order1.addItem( item22 );
workingMemory.insert( order2 );
workingMemory.insert( item21 );
workingMemory.insert( item22 );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertSame( item21,
list.get( 0 ) );
assertSame( item22,
list.get( 1 ) );
}
@Test
public void testWorkingMemoryLoggerWithUnbalancedBranches() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Logger.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory wm = ruleBase.newStatefulSession();
try {
wm.fireAllRules();
wm.insert( new Cheese( "a",
10 ) );
wm.insert( new Cheese( "b",
11 ) );
wm.fireAllRules();
} catch ( Exception e ) {
e.printStackTrace();
fail( "No exception should be raised " );
}
}
@Test
public void testFromNestedAccessors() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromNestedAccessors.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 11,
"Bob" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertSame( order1.getStatus(),
list.get( 0 ) );
}
@Test
public void testFromArrayIteration() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FromArrayIteration.drl" ) ) );
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
final WorkingMemory session = ruleBase.newStatefulSession();
List list = new ArrayList();
session.setGlobal( "list",
list );
session.insert( new DomainObjectHolder() );
session.fireAllRules();
assertEquals( 3,
list.size() );
assertEquals( "Message3",
list.get( 0 ) );
assertEquals( "Message2",
list.get( 1 ) );
assertEquals( "Message1",
list.get( 2 ) );
}
@Test
public void testSubNetworks() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_SubNetworks.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
try {
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
} catch ( Exception e ) {
e.printStackTrace();
fail( "Should not raise any exception!" );
}
}
@Test
public void testFinalClass() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FinalClass.drl" ) ) );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( builder.getPackage() );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final PersonFinal bob = new PersonFinal();
bob.setName( "bob" );
bob.setStatus( null );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
// Dynamic addition of rules which use the final class are not supported yet
// final PackageBuilder builder2 = new PackageBuilder();
// builder2.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FinalClass2.drl" ) ) );
// ruleBase.addPackage( builder2.getPackage() );
//
// // it will automatically fire the rule
// assertEquals( 2,
// list.size() );
}
@Test
public void testEvalRewriteMatches() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_EvalRewriteMatches.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Order order1 = new Order( 14,
"Mark" );
final OrderItem item11 = new OrderItem( order1,
1 );
final OrderItem item12 = new OrderItem( order1,
2 );
order1.addItem( item11 );
order1.addItem( item12 );
workingMemory.insert( order1 );
workingMemory.insert( item11 );
workingMemory.insert( item12 );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertTrue( list.contains( item11 ) );
assertTrue( list.contains( item12 ) );
}
@Test
public void testConsequenceBuilderException() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ConsequenceBuilderException.drl" ) ) );
assertTrue( builder.hasErrors() );
}
@Test
public void testRuntimeTypeCoercion() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuntimeTypeCoercion.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final PolymorphicFact fact = new PolymorphicFact( new Integer( 10 ) );
final FactHandle handle = workingMemory.insert( fact );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( fact.getData(),
list.get( 0 ) );
fact.setData( "10" );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( fact.getData(),
list.get( 1 ) );
try {
fact.setData( new Boolean( true ) );
workingMemory.update( handle,
fact );
assertEquals( 2,
list.size() );
} catch (ClassCastException cce) { }
}
@Test
public void testRuntimeTypeCoercion2() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_RuntimeTypeCoercion2.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
final Primitives fact = new Primitives();
fact.setBooleanPrimitive( true );
fact.setBooleanWrapper( new Boolean( true ) );
fact.setObject( new Boolean( true ) );
fact.setCharPrimitive( 'X' );
final FactHandle handle = workingMemory.insert( fact );
workingMemory.fireAllRules();
int index = 0;
assertEquals( list.toString(),
4,
list.size() );
assertEquals( "boolean",
list.get( index++ ) );
assertEquals( "boolean wrapper",
list.get( index++ ) );
assertEquals( "boolean object",
list.get( index++ ) );
assertEquals( "char",
list.get( index++ ) );
fact.setBooleanPrimitive( false );
fact.setBooleanWrapper( null );
fact.setCharPrimitive( '\0' );
fact.setObject( new Character( 'X' ) );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 5,
list.size() );
assertEquals( "char object",
list.get( index++ ) );
fact.setObject( null );
workingMemory.update( handle,
fact );
workingMemory.fireAllRules();
assertEquals( 6,
list.size() );
assertEquals( "null object",
list.get( index++ ) );
}
@Test
public void testAlphaEvalWithOrCE() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AlphaEvalWithOrCE.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
FactA a = new FactA();
a.setField1( "a value" );
workingMemory.insert( a );
workingMemory.insert( new FactB() );
workingMemory.insert( new FactC() );
workingMemory.fireAllRules();
assertEquals( "should not have fired",
0,
list.size() );
}
@Test
public void testModifyRetractAndModifyInsert() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyRetractInsert.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob" );
bob.setStatus( "hungry" );
workingMemory.insert( bob );
workingMemory.insert( new Cheese() );
workingMemory.insert( new Cheese() );
workingMemory.fireAllRules( 2 );
assertEquals( "should have fired only once",
1,
list.size() );
}
@Test
public void testAlphaCompositeConstraints() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AlphaCompositeConstraints.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "bob",
30 );
workingMemory.insert( bob );
workingMemory.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testModifyBlock() throws Exception {
doModifyTest( "test_ModifyBlock.drl" );
}
@Test
public void testModifyBlockWithPolymorphism() throws Exception {
doModifyTest( "test_ModifyBlockWithPolymorphism.drl" );
}
private void doModifyTest(String drlResource) throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( drlResource ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob" );
bob.setStatus( "hungry" );
Cheese c = new Cheese();
workingMemory.insert( bob );
workingMemory.insert( c );
workingMemory.fireAllRules();
assertEquals( 10,
c.getPrice() );
assertEquals( "fine",
bob.getStatus() );
}
@Test
public void testModifyBlockWithFrom() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyBlockWithFrom.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List results = new ArrayList();
workingMemory.setGlobal( "results",
results );
Person bob = new Person( "Bob" );
Address addr = new Address( "abc" );
bob.addAddress( addr );
workingMemory.insert( bob );
workingMemory.insert( addr );
workingMemory.fireAllRules();
// modify worked
assertEquals( "12345",
addr.getZipCode() );
// chaining worked
assertEquals( 1,
results.size() );
assertEquals( addr,
results.get( 0 ) );
}
// this test requires mvel 1.2.19. Leaving it commented until mvel is released.
@Test
public void testJavaModifyBlock() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_JavaModifyBlock.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
Person bob = new Person( "Bob",
30 );
bob.setStatus( "hungry" );
workingMemory.insert( bob );
workingMemory.insert( new Cheese() );
workingMemory.insert( new Cheese() );
workingMemory.insert( new OuterClass.InnerClass( 1 ) );
workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
assertEquals( "full",
bob.getStatus() );
assertEquals( 31,
bob.getAge() );
assertEquals( 2,
((OuterClass.InnerClass) list.get( 1 )).getIntAttr() );
}
@Test
public void testOrCE() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrCE.drl" ) ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
Package pkg = builder.getPackage();
pkg = SerializationHelper.serializeObject( pkg );
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Cheese( "brie",
10 ) );
workingMemory.insert( new Person( "bob" ) );
workingMemory.fireAllRules();
assertEquals( "should have fired once",
1,
list.size() );
}
@Test
public void testOrWithAndUsingNestedBindings() {
String str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "global java.util.List jlist\n";
str += "rule rule1 dialect \"mvel\" \n";
str += "when\n";
str += "$a : Person( name == \"a\" )\n";
str += " (or $b : Person( name == \"b1\" )\n";
str += " (and $p : Person( name == \"p2\" )\n";
str += " $b : Person( name == \"b2\" ) )\n";
str += " (and $p : Person( name == \"p3\" )\n";
str += " $b : Person( name == \"b3\" ) )\n";
str += " )\n ";
str += "then\n";
str += " mlist.add( $b );\n";
str += "end\n";
str += "rule rule2 dialect \"java\" \n";
str += "when\n";
str += "$a : Person( name == \"a\" )\n";
str += " (or $b : Person( name == \"b1\" )\n";
str += " (and $p : Person( name == \"p2\" )\n";
str += " $b : Person( name == \"b2\" ) )\n";
str += " (and $p : Person( name == \"p3\" )\n";
str += " $b : Person( name == \"b3\" ) )\n";
str += " )\n ";
str += "then\n";
str += " jlist.add( $b );\n";
str += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
Person a = new Person( "a" );
Person b1 = new Person( "b1" );
Person p2 = new Person( "p2" );
Person b2 = new Person( "b2" );
Person p3 = new Person( "p3" );
Person b3 = new Person( "b3" );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
List mlist = new ArrayList();
List jlist = new ArrayList();
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b1 );
ksession.fireAllRules();
assertEquals( b1,
mlist.get( 0 ) );
assertEquals( b1,
jlist.get( 0 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b2 );
ksession.insert( p2 );
ksession.fireAllRules();
assertEquals( b2,
mlist.get( 1 ) );
assertEquals( b2,
jlist.get( 1 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
ksession.setGlobal( "mlist",
mlist );
ksession.setGlobal( "jlist",
jlist );
ksession.insert( a );
ksession.insert( b3 );
ksession.insert( p3 );
ksession.fireAllRules();
assertEquals( b3,
mlist.get( 2 ) );
assertEquals( b3,
jlist.get( 2 ) );
}
@Test
public void testFieldBindingOnWrongFieldName() {
//JBRULES-2527
String str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "rule rule1 \n";
str += "when\n";
str += " Person( $f : invalidFieldName, eval( $f != null ) )\n";
str += "then\n";
str += "end\n";
try {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "KnowledgeBuilder should have errors" );
}
} catch ( Exception e ) {
fail( "Exception should not be thrown " );
}
str = "";
str += "package org.drools\n";
str += "import org.drools.Person\n";
str += "global java.util.List mlist\n";
str += "rule rule1 \n";
str += "when\n";
str += " Person( $f : invalidFieldName, name == ( $f ) )\n";
str += "then\n";
str += "end\n";
try {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "KnowledgeBuilder should have errors" );
}
} catch ( Exception e ) {
e.printStackTrace();
fail( "Exception should not be thrown " );
}
}
@Test
public void testDeepNestedConstraints() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_DeepNestedConstraints.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new Person( "bob",
"muzzarela" ) );
workingMemory.insert( new Cheese( "brie",
10 ) );
workingMemory.insert( new Cheese( "muzzarela",
80 ) );
workingMemory.fireAllRules();
assertEquals( "should have fired twice",
2,
list.size() );
}
@Test
public void testGetFactHandleEqualityBehavior() throws Exception {
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.EQUALITY );
RuleBase ruleBase = RuleBaseFactory.newRuleBase( conf );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
CheeseEqual cheese = new CheeseEqual( "stilton",
10 );
session.insert( cheese );
FactHandle fh = session.getFactHandle( new CheeseEqual( "stilton",
10 ) );
assertNotNull( fh );
}
@Test
public void testGetFactHandleIdentityBehavior() throws Exception {
final RuleBaseConfiguration conf = new RuleBaseConfiguration();
conf.setAssertBehaviour( RuleBaseConfiguration.AssertBehaviour.IDENTITY );
RuleBase ruleBase = RuleBaseFactory.newRuleBase( conf );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
CheeseEqual cheese = new CheeseEqual( "stilton",
10 );
session.insert( cheese );
FactHandle fh1 = session.getFactHandle( new Cheese( "stilton",
10 ) );
assertNull( fh1 );
FactHandle fh2 = session.getFactHandle( cheese );
assertNotNull( fh2 );
}
@Test
public void testOrCEFollowedByEval() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_OrCEFollowedByEval.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
workingMemory.setGlobal( "results",
list );
workingMemory.insert( new FactA( "X" ) );
InternalFactHandle b = (InternalFactHandle) workingMemory.insert( new FactB( "X" ) );
workingMemory.fireAllRules();
assertEquals( "should have fired",
2,
list.size() );
assertTrue( list.contains( b.getObject() ) );
}
@Test
public void testNPEOnMVELAlphaPredicates() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_NPEOnMVELPredicate.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
final StatefulSession session = ruleBase.newStatefulSession();
final List list = new ArrayList();
session.setGlobal( "results",
list );
Cheese cheese = new Cheese( "stilton",
10 );
Cheesery cheesery = new Cheesery();
cheesery.addCheese( cheese );
Person bob = new Person( "bob",
"stilton" );
Cheese cheese2 = new Cheese();
bob.setCheese( cheese2 );
FactHandle p = session.insert( bob );
FactHandle c = session.insert( cheesery );
session.fireAllRules();
assertEquals( "should not have fired",
0,
list.size() );
cheese2.setType( "stilton" );
session.update( p,
bob );
session.fireAllRules();
assertEquals( 1,
list.size() );
}
@Test
public void testModifyWithLockOnActive() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyWithLockOnActive.drl" ) ) );
final Package pkg = builder.getPackage();
RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
ruleBase = SerializationHelper.serializeObject( ruleBase );
StatefulSession session = ruleBase.newStatefulSession();
final List results = new ArrayList();
session.setGlobal( "results",
results );
final Person bob = new Person( "Bob",
15 );
final Person mark = new Person( "Mark",
16 );
final Person michael = new Person( "Michael",
14 );
session.insert( bob );
session.insert( mark );
session.insert( michael );
session.setFocus( "feeding" );
session.fireAllRules( 5 );
assertEquals( 2,
((List) session.getGlobal( "results" )).size() );
}
@Test
public void testNPEOnParenthesis() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_ParenthesisUsage.drl" ) ),
ResourceType.DRL );
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final List<Person> results = new ArrayList<Person>();
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
session.setGlobal( "results",
results );
Person bob = new Person( "Bob",
20 );
bob.setAlive( true );
Person foo = new Person( "Foo",
0 );
foo.setAlive( false );
session.insert( bob );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( bob,
results.get( 0 ) );
session.insert( foo );
session.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( foo,
results.get( 1 ) );
}
@Test
public void testEvalWithLineBreaks() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_EvalWithLineBreaks.drl" ) ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final List<Person> results = new ArrayList<Person>();
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
session.setGlobal( "results",
results );
session.insert( Integer.valueOf( 10 ) );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( Integer.valueOf( 10 ),
results.get( 0 ) );
}
@Test
public void testDRLWithoutPackageDeclaration() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_NoPackageDeclaration.drl" ) ),
ResourceType.DRL );
final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// no package defined, so it is set to the default
final FactType factType = kbase.getFactType( "defaultpkg",
"Person" );
assertNotNull( factType );
final Object bob = factType.newInstance();
factType.set( bob,
"name",
"Bob" );
factType.set( bob,
"age",
Integer.valueOf( 30 ) );
final StatefulKnowledgeSession session = createKnowledgeSession(kbase);
final List results = new ArrayList();
session.setGlobal( "results",
results );
session.insert( bob );
session.fireAllRules();
assertEquals( 1,
results.size() );
assertEquals( bob,
results.get( 0 ) );
}
@Test
public void testKnowledgeContextJava() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_KnowledgeContextJava.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> list = new ArrayList<String>();
ksession.setGlobal( "list",
list );
ksession.insert( new Message() );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "Hello World",
list.get( 0 ) );
}
@Test
public void testListOfMaps() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_TestMapVariableRef.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
Map mapOne = new HashMap<String, Object>();
Map mapTwo = new HashMap<String, Object>();
mapOne.put( "MSG",
"testMessage" );
mapTwo.put( "MSGTWO",
"testMessage" );
list.add( mapOne );
list.add( mapTwo );
ksession.insert( list );
ksession.fireAllRules();
assertEquals( 3,
list.size() );
}
@Test
public void testKnowledgeContextMVEL() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_KnowledgeContextMVEL.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> list = new ArrayList<String>();
ksession.setGlobal( "list",
list );
ksession.insert( new Message() );
ksession.fireAllRules();
assertEquals( 1,
list.size() );
assertEquals( "Hello World",
list.get( 0 ) );
}
@Test
public void testJBRules2055() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2055.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.insert( new Cheese( "stilton" ) );
ksession.insert( new Cheese( "brie" ) );
ksession.insert( new Cheese( "muzzarella" ) );
ksession.insert( new Person( "bob",
"stilton" ) );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertEquals( "stilton",
results.get( 0 ) );
assertEquals( "brie",
results.get( 1 ) );
}
@Test
public void testJBRules2369() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2369.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
fail( "Error loading test_JBRules2369" );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
FactA a = new FactA();
FactB b = new FactB( Integer.valueOf( 0 ) );
org.drools.runtime.rule.FactHandle aHandle = ksession.insert( a );
org.drools.runtime.rule.FactHandle bHandle = ksession.insert( b );
ksession.fireAllRules();
assertEquals( 1,
results.size() );
ksession.update( aHandle,
a );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
}
@Test
public void testInsertionOrder() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_InsertionOrder.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.insert( new Move( 1,
2 ) );
ksession.insert( new Move( 2,
3 ) );
Win win2 = new Win( 2 );
Win win3 = new Win( 3 );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( win2 ) );
assertTrue( results.contains( win3 ) );
ksession.dispose();
ksession = createKnowledgeSession(kbase);
results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
// reverse the order of the inserts
ksession.insert( new Move( 2,
3 ) );
ksession.insert( new Move( 1,
2 ) );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( win2 ) );
assertTrue( results.contains( win3 ) );
}
@Test
public void testFireAllWhenFiringUntilHalt() {
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Runnable fireUntilHalt = new Runnable() {
public void run() {
ksession.fireUntilHalt();
}
};
Runnable fireAllRules = new Runnable() {
public void run() {
ksession.fireAllRules();
}
};
Thread t1 = new Thread( fireUntilHalt );
Thread t2 = new Thread( fireAllRules );
t1.start();
try {
Thread.currentThread().sleep( 500 );
} catch ( InterruptedException e ) {
}
t2.start();
// give the chance for t2 to finish
try {
Thread.currentThread().sleep( 1000 );
} catch ( InterruptedException e ) {
}
boolean aliveT2 = t2.isAlive();
ksession.halt();
try {
Thread.currentThread().sleep( 1000 );
} catch ( InterruptedException e ) {
}
boolean aliveT1 = t1.isAlive();
if ( t2.isAlive() ) {
t2.interrupt();
}
if ( t1.isAlive() ) {
t1.interrupt();
}
assertFalse( "T2 should have finished",
aliveT2 );
assertFalse( "T1 should have finished",
aliveT1 );
}
@Test @Ignore
public void testFireUntilHaltFailingAcrossEntryPoints() throws Exception {
String rule1 = "package org.drools\n";
rule1 += "global java.util.List list\n";
rule1 += "rule testFireUntilHalt\n";
rule1 += "when\n";
rule1 += " Cheese()\n";
rule1 += " $p : Person() from entry-point \"testep2\"\n";
rule1 += "then \n";
rule1 += " list.add( $p ) ;\n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.info( kbuilder.getErrors().toString() );
throw new RuntimeException( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final WorkingMemoryEntryPoint ep = ksession.getWorkingMemoryEntryPoint( "testep2" );
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( new Cheese( "cheddar" ) );
ksession.fireAllRules();
Runnable fireUntilHalt = new Runnable() {
public void run() {
ksession.fireUntilHalt();
}
};
Thread t1 = new Thread( fireUntilHalt );
t1.start();
Thread.currentThread().sleep( 500 );
ep.insert( new Person( "darth" ) );
Thread.currentThread().sleep( 500 );
ksession.halt();
t1.join( 5000 );
boolean alive = t1.isAlive();
if ( alive ) {
t1.interrupt();
}
assertFalse( "Thread should have died!",
alive );
assertEquals( 1,
list.size() );
}
@Test
public void testNetworkBuildErrorAcrossEntryPointsAndFroms() throws Exception {
String rule1 = "package org.drools\n";
rule1 += "global java.util.List list\n";
rule1 += "rule rule1\n";
rule1 += "when\n";
rule1 += " Cheese() from entry-point \"testep\"\n";
rule1 += " $p : Person() from list\n";
rule1 += "then \n";
rule1 += " list.add( \"rule1\" ) ;\n";
rule1 += " insert( $p );\n";
rule1 += "end\n";
rule1 += "rule rule2\n";
rule1 += "when\n";
rule1 += " $p : Person() \n";
rule1 += "then \n";
rule1 += " list.add( \"rule2\" ) ;\n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
logger.info( kbuilder.getErrors().toString() );
throw new RuntimeException( kbuilder.getErrors().toString() );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
final StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
final WorkingMemoryEntryPoint ep = ksession.getWorkingMemoryEntryPoint( "testep" );
List list = new ArrayList();
ksession.setGlobal( "list",
list );
list.add( new Person( "darth" ) );
ep.insert( new Cheese( "cheddar" ) );
ksession.fireAllRules();
assertEquals( 3,
list.size() );
}
@Test
public void testBindingToMissingField() throws Exception {
// JBRULES-3047
String rule1 = "package org.drools\n";
rule1 += "rule rule1\n";
rule1 += "when\n";
rule1 += " Integer( $i : noSuchField ) \n";
rule1 += " eval( $i > 0 )\n";
rule1 += "then \n";
rule1 += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( rule1.getBytes() ),
ResourceType.DRL );
if ( !kbuilder.hasErrors() ) {
fail( "this should have errors" );
}
}
@Test
public void testJBRules2140() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_JBRules2140.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results",
results );
ksession.fireAllRules();
assertEquals( 2,
results.size() );
assertTrue( results.contains( "java" ) );
assertTrue( results.contains( "mvel" ) );
}
@Test
public void testGeneratedBeansSerializable() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansSerializable.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
// test kbase serialization
kbase = SerializationHelper.serializeObject( kbase );
// Retrieve the generated fact type
FactType cheeseFact = kbase.getFactType( "org.drools.generatedbeans",
"Cheese" );
assertTrue( "Generated beans must be serializable",
Serializable.class.isAssignableFrom( cheeseFact.getFactClass() ) );
// Create a new Fact instance
Object cheese = cheeseFact.newInstance();
cheeseFact.set( cheese,
"type",
"stilton" );
// another instance
Object cheese2 = cheeseFact.newInstance();
cheeseFact.set( cheese2,
"type",
"brie" );
// creating a stateful session
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List<Number> results = new ArrayList<Number>();
ksession.setGlobal( "results",
results );
// inserting fact
ksession.insert( cheese );
ksession.insert( cheese2 );
// firing rules
ksession.fireAllRules();
// checking results
assertEquals( 1,
results.size() );
assertEquals( 2,
results.get( 0 ).intValue() );
}
@Test
public void testAddRemoveListeners() throws Exception {
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_AddRemoveListeners.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// creating listener as a jmock proxy
final org.drools.event.rule.WorkingMemoryEventListener wmeListener = mock( org.drools.event.rule.WorkingMemoryEventListener.class );
ksession.addEventListener( wmeListener );
// listener will be notified of both facts insertion
ksession.insert( new Cheese( "stilton" ) );
ksession.insert( wmeListener );
// firing rules will remove listener
ksession.fireAllRules();
// inserting another object into the working memory, listener should NOT be notified,
// since it is no longer listening.
ksession.insert( new Cheese( "brie" ) );
verify( wmeListener,
times( 2 ) ).objectInserted( any( org.drools.event.rule.ObjectInsertedEvent.class ) );
}
@Test
public void testInsert() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "import org.drools.Pet\n";
drl += "import java.util.ArrayList\n";
drl += "rule test\n";
drl += "when\n";
drl += "$person:Person()\n";
drl += "$pets : ArrayList()\n";
drl += " from collect( \n";
drl += " Pet(\n";
drl += " ownerName == $person.name\n";
drl += " )\n";
drl += " )\n";
drl += "then\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
fail( errors.toString() );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Toni" ) );
ksession.insert( new Pet( "Toni" ) );
}
@Test
public void testMemberOfNotWorkingWithOr() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.ArrayList;\n";
rule += "import org.drools.Person;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $list: ArrayList() \n";
rule += " ArrayList() \n";
rule += " from collect( \n";
rule += " Person( \n";
rule += " ( \n";
rule += " pet memberOf $list \n";
rule += " ) || ( \n";
rule += " pet == null \n";
rule += " ) \n";
rule += " ) \n";
rule += " )\n";
rule += "then\n";
rule += " System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person toni = new Person( "Toni",
12 );
toni.setPet( new Pet( "Mittens" ) );
session.insert( new ArrayList() );
session.insert( toni );
session.fireAllRules();
}
@Test
public void testUnNamed() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.ArrayList;\n";
rule += "import org.drools.Person;\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " $list: ArrayList() \n";
rule += " ArrayList() \n";
rule += " from collect( \n";
rule += " Person( \n";
rule += " ( \n";
rule += " pet memberOf $list \n";
rule += " ) || ( \n";
rule += " pet == null \n";
rule += " ) \n";
rule += " ) \n";
rule += " )\n";
rule += "then\n";
rule += " System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person toni = new Person( "Toni",
12 );
toni.setPet( new Pet( "Mittens" ) );
session.insert( new ArrayList() );
session.insert( toni );
session.fireAllRules();
}
@Test
// this isn't possible, we can only narrow with type safety, not widen.
// unless typesafe=false is used
public void testAccessFieldsFromSubClass() throws Exception {
// Exception in ClassFieldAccessorStore line: 116
String rule = "";
rule += "package org.drools;\n";
rule += "import org.drools.Person;\n";
rule += "import org.drools.Pet;\n";
rule += "import org.drools.Cat;\n";
rule += "declare Person @typesafe(false) end\n";
rule += "rule \"Test Rule\"\n";
rule += "when\n";
rule += " Person(\n";
rule += " pet.breed == \"Siamise\"\n";
rule += " )\n";
rule += "then\n";
rule += "System.out.println(\"hello person\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
if ( builder.hasErrors() ) {
fail( builder.getErrors().toString() );
}
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
Person person = new Person();
person.setPet( new Cat( "Mittens" ) );
session.insert( person );
session.fireAllRules();
}
@Test
public void testGenericsInRHS() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.Map;\n";
rule += "import java.util.HashMap;\n";
rule += "rule \"Test Rule\"\n";
rule += " when\n";
rule += " then\n";
rule += " Map<String,String> map = new HashMap<String,String>();\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
assertNotNull( session );
}
@Test
public void testActivationListener() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import java.util.Map;\n";
rule += "import java.util.HashMap;\n";
rule += "rule \"Test Rule\" @activationListener('blah')\n";
rule += " when\n";
rule += " String( this == \"xxx\" )\n ";
rule += " then\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
RuleBaseConfiguration conf = new RuleBaseConfiguration();
final List list = new ArrayList();
conf.addActivationListener( "blah",
new ActivationListenerFactory() {
public TerminalNode createActivationListener( int id,
LeftTupleSource source,
org.drools.rule.Rule rule,
GroupElement subrule,
int subruleIndex,
BuildContext context,
Object... args ) {
return new RuleTerminalNode( id,
source,
rule,
subrule,
0,
context ) {
@Override
public void assertLeftTuple( LeftTuple tuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "inserted" );
}
@Override
public void modifyLeftTuple( LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "updated" );
}
@Override
public void retractLeftTuple( LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
list.add( "retracted" );
}
};
}
} );
final RuleBase ruleBase = getRuleBase( conf );
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
FactHandle fh = session.insert( "xxx" );
session.update( fh,
"xxx" );
session.retract( fh );
assertEquals( "inserted",
list.get( 0 ) );
assertEquals( "updated",
list.get( 1 ) );
assertEquals( "retracted",
list.get( 2 ) );
assertNotNull( session );
}
@Test
public void testAccessingMapValues() throws Exception {
String rule = "";
rule += "package org.drools;\n";
rule += "import org.drools.Pet;\n";
rule += "rule \"Test Rule\"\n";
rule += " when\n";
rule += " $pet: Pet()\n";
rule += " Pet( \n";
rule += " ownerName == $pet.attributes[\"key\"] \n";
rule += " )\n";
rule += " then\n";
rule += " System.out.println(\"hi pet\");\n";
rule += "end";
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new StringReader( rule ) );
final org.drools.rule.Package pkg = builder.getPackage();
final RuleBase ruleBase = getSinglethreadRuleBase();
ruleBase.addPackage( pkg );
StatefulSession session = ruleBase.newStatefulSession();
assertNotNull( session );
Pet pet1 = new Pet( "Toni" );
pet1.getAttributes().put( "key",
"value" );
Pet pet2 = new Pet( "Toni" );
session.insert( pet1 );
session.insert( pet2 );
session.fireAllRules();
}
@Test
public void testClassLoaderHits() throws Exception {
final KnowledgeBuilderConfiguration conf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
//conf.setOption( ClassLoaderCacheOption.DISABLED );
final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder( conf );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeansMVEL.drl" ) ),
ResourceType.DRL );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_GeneratedBeans.drl" ) ),
ResourceType.DRL );
kbuilder.add( ResourceFactory.newInputStreamResource( getClass().getResourceAsStream( "test_NullFieldOnCompositeSink.drl" ) ),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
//((CompositeClassLoader)((PackageBuilderConfiguration)conf).getClassLoader()).dumpStats();
}
@Test
public void testMVELConsequenceWithoutSemiColon1() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "import org.drools.Pet\n";
drl += "rule test dialect 'mvel'\n";
drl += "when\n";
drl += "$person:Person()\n";
drl += "$pet:Pet()\n";
drl += "then\n";
drl += " retract($person) // some comment\n";
drl += " retract($pet) // another comment\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// create working memory mock listener
org.drools.event.rule.WorkingMemoryEventListener wml = Mockito.mock( org.drools.event.rule.WorkingMemoryEventListener.class );
ksession.addEventListener( wml );
org.drools.runtime.rule.FactHandle personFH = ksession.insert( new Person( "Toni" ) );
org.drools.runtime.rule.FactHandle petFH = ksession.insert( new Pet( "Toni" ) );
int fired = ksession.fireAllRules();
assertEquals( 1,
fired );
// capture the arguments and check that the retracts happened
ArgumentCaptor<org.drools.event.rule.ObjectRetractedEvent> retracts = ArgumentCaptor.forClass( org.drools.event.rule.ObjectRetractedEvent.class );
verify( wml,
times( 2 ) ).objectRetracted( retracts.capture() );
List<org.drools.event.rule.ObjectRetractedEvent> values = retracts.getAllValues();
assertThat( values.get( 0 ).getFactHandle(),
is( personFH ) );
assertThat( values.get( 1 ).getFactHandle(),
is( petFH ) );
}
@Test
public void testRuleMetaAttributes() throws Exception {
String drl = "";
drl += "package test\n";
drl += "rule \"test meta attributes\"\n";
drl += " @id(1234 ) @author( john_doe ) @text(\"It's an escaped\\\" string\" )\n";
drl += "when\n";
drl += "then\n";
drl += " // some comment\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
Rule rule = kbase.getRule( "test",
"test meta attributes" );
assertNotNull( rule );
assertThat( rule.getMetaAttribute( "id" ),
is( "1234" ) );
assertThat( rule.getMetaAttribute( "author" ),
is( "john_doe" ) );
assertThat( rule.getMetaAttribute( "text" ),
is( "It's an escaped\" string" ) );
}
// following test depends on MVEL: http://jira.codehaus.org/browse/MVEL-212
@Test
public void testMVELConsequenceUsingFactConstructors() throws Exception {
String drl = "";
drl += "package test\n";
drl += "import org.drools.Person\n";
drl += "global org.drools.runtime.StatefulKnowledgeSession ksession\n";
drl += "rule test dialect 'mvel'\n";
drl += "when\n";
drl += " $person:Person( name == 'mark' )\n";
drl += "then\n";
drl += " // below constructor for Person does not exist\n";
drl += " Person p = new Person( 'bob', 30, 555 )\n";
drl += " ksession.update(ksession.getFactHandle($person), new Person('bob', 30, 999, 453, 534, 534, 32))\n";
drl += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( drl ) ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testRuleChainingWithLogicalInserts() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newClassPathResource( "test_RuleChaining.drl",
getClass() ),
ResourceType.DRL );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if ( errors.size() > 0 ) {
for ( KnowledgeBuilderError error : errors ) {
logger.warn( error.toString() );
}
throw new IllegalArgumentException( "Could not parse knowledge." );
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
// create working memory mock listener
org.drools.event.rule.WorkingMemoryEventListener wml = Mockito.mock( org.drools.event.rule.WorkingMemoryEventListener.class );
org.drools.event.rule.AgendaEventListener ael = Mockito.mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( wml );
ksession.addEventListener( ael );
int fired = ksession.fireAllRules();
assertEquals( 3,
fired );
// capture the arguments and check that the rules fired in the proper sequence
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> actvs = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 3 ) ).afterActivationFired( actvs.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> values = actvs.getAllValues();
assertThat( values.get( 0 ).getActivation().getRule().getName(),
is( "init" ) );
assertThat( values.get( 1 ).getActivation().getRule().getName(),
is( "r1" ) );
assertThat( values.get( 2 ).getActivation().getRule().getName(),
is( "r2" ) );
verify( ael,
never() ).activationCancelled( any( org.drools.event.rule.ActivationCancelledEvent.class ) );
verify( wml,
times( 2 ) ).objectInserted( any( org.drools.event.rule.ObjectInsertedEvent.class ) );
verify( wml,
never() ).objectRetracted( any( org.drools.event.rule.ObjectRetractedEvent.class ) );
}
@Test
public void testOrWithReturnValueRestriction() throws Exception {
String fileName = "test_OrWithReturnValue.drl";
KnowledgeBase kbase = loadKnowledgeBase( fileName );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Cheese( "brie",
18 ) );
ksession.insert( new Cheese( "stilton",
8 ) );
ksession.insert( new Cheese( "brie",
28 ) );
int fired = ksession.fireAllRules();
assertEquals( 2,
fired );
}
@Test
public void testFromExprFollowedByNot() {
String rule = "";
rule += "package org.drools\n";
rule += "rule \"Rule 1\"\n";
rule += " when\n";
rule += " Person ($var: pet )\n";
rule += " Pet () from $var\n";
rule += " not Pet ()\n";
rule += " then\n";
rule += " System.out.println(\"Fire in the hole\");\n";
rule += "end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newReaderResource( new StringReader( rule ) ),
ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
Iterator<KnowledgeBuilderError> errors = kbuilder.getErrors().iterator();
while ( errors.hasNext() ) {
logger.info( "kbuilder error: " + errors.next().getMessage() );
}
}
assertFalse( kbuilder.hasErrors() );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
}
@Test
public void testLastMemoryEntryNotBug() {
// JBRULES-2809
// This occurs when a blocker is the last in the node's memory, or if there is only one fact in the node
// And it gets no opportunity to rematch with itself
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule x1 \n";
str += "when \n";
str += " $s : String( this == 'x1' ) \n";
str += " not A( this != null ) \n";
str += "then \n";
str += " list.add(\"fired x1\"); \n";
str += "end \n";
str += "rule x2 \n";
str += "when \n";
str += " $s : String( this == 'x2' ) \n";
str += " not A( field1 == $s, this != null ) \n"; // this ensures an index bucket
str += "then \n";
str += " list.add(\"fired x2\"); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "x1" );
ksession.insert( "x2" );
A a1 = new A( "x1",
null );
A a2 = new A( "x2",
null );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
// make sure the 'exists' is obeyed when fact is cycled causing add/remove node memory
ksession.update( fa1,
a1 );
ksession.update( fa2,
a2 );
ksession.fireAllRules();
assertEquals( 0,
list.size() );
ksession.dispose();
}
@Test
public void testLastMemoryEntryExistsBug() {
// JBRULES-2809
// This occurs when a blocker is the last in the node's memory, or if there is only one fact in the node
// And it gets no opportunity to rematch with itself
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule x1 \n";
str += "when \n";
str += " $s : String( this == 'x1' ) \n";
str += " exists A( this != null ) \n";
str += "then \n";
str += " list.add(\"fired x1\"); \n";
str += "end \n";
str += "rule x2 \n";
str += "when \n";
str += " $s : String( this == 'x2' ) \n";
str += " exists A( field1 == $s, this != null ) \n"; // this ensures an index bucket
str += "then \n";
str += " list.add(\"fired x2\"); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
ksession.insert( "x1" );
ksession.insert( "x2" );
A a1 = new A( "x1",
null );
A a2 = new A( "x2",
null );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
// make sure the 'exists' is obeyed when fact is cycled causing add/remove node memory
ksession.update( fa1,
a1 );
ksession.update( fa2,
a2 );
ksession.fireAllRules();
assertEquals( 2,
list.size() );
ksession.dispose();
}
@Test
public void testNotIterativeModifyBug() {
// JBRULES-2809
// This bug occurs when a tuple is modified, the remove/add puts it onto the memory end
// However before this was done it would attempt to find the next tuple, starting from itself
// This meant it would just re-add itself as the blocker, but then be moved to end of the memory
// If this tuple was then removed or changed, the blocked was unable to check previous tuples.
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $f1 : A() \n";
str += " not A(this != $f1, eval(field2 == $f1.getField2())) \n";
str += " eval( !$f1.getField1().equals(\"1\") ) \n";
str += "then \n";
str += " list.add($f1); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
A a1 = new A( "2",
"2" );
A a2 = new A( "1",
"2" );
A a3 = new A( "1",
"2" );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
FactHandle fa3 = (FactHandle) ksession.insert( a3 );
ksession.fireAllRules();
// a1 is blocked by a2
assertEquals( 0,
list.size() );
// modify a2, so that a1 is now blocked by a3
a2.setField2( "1" ); // Do
ksession.update( fa2,
a2 );
a2.setField2( "2" ); // Undo
ksession.update( fa2,
a2 );
// modify a3 to cycle, so that it goes on the memory end, but in a previous bug still blocked a1
ksession.update( fa3,
a3 );
a3.setField2( "1" ); // Do
ksession.update( fa3,
a3 );
ksession.fireAllRules();
assertEquals( 0,
list.size() ); // this should still now blocked by a2, but bug from previous update hanging onto blocked
ksession.dispose();
}
@Test
public void testModifyWithLiaToEval() {
String str = "";
str += "package org.simple \n";
str += "import " + Person.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $p : Person() \n";
str += " eval( $p.getAge() > 30 ) \n";
str += "then \n";
str += " list.add($p); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
Person p1 = new Person("darth", 25);
org.drools.runtime.rule.FactHandle fh = ksession.insert( p1 );
ksession.fireAllRules();
assertEquals( 0, list.size() );
p1.setAge( 35 );
ksession.update( fh, p1 );
ksession.fireAllRules();
assertEquals( 1, list.size() );
ksession.dispose();
}
@Test
public void testExistsIterativeModifyBug() {
// JBRULES-2809
// This bug occurs when a tuple is modified, the remove/add puts it onto the memory end
// However before this was done it would attempt to find the next tuple, starting from itself
// This meant it would just re-add itself as the blocker, but then be moved to end of the memory
// If this tuple was then removed or changed, the blocked was unable to check previous tuples.
String str = "";
str += "package org.simple \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule xxx \n";
str += "when \n";
str += " $f1 : A() \n";
str += " exists A(this != $f1, eval(field2 == $f1.getField2())) \n";
str += " eval( !$f1.getField1().equals(\"1\") ) \n";
str += "then \n";
str += " list.add($f1); \n";
str += "end \n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
List list = new ArrayList();
ksession.setGlobal( "list",
list );
A a1 = new A( "2",
"2" );
A a2 = new A( "1",
"2" );
A a3 = new A( "1",
"2" );
FactHandle fa1 = (FactHandle) ksession.insert( a1 );
FactHandle fa2 = (FactHandle) ksession.insert( a2 );
FactHandle fa3 = (FactHandle) ksession.insert( a3 );
// a2, a3 are blocked by a1
// modify a1, so that a1,a3 are now blocked by a2
a1.setField2( "1" ); // Do
ksession.update( fa1,
a1 );
a1.setField2( "2" ); // Undo
ksession.update( fa1,
a1 );
// modify a2, so that a1,a2 are now blocked by a3
a2.setField2( "1" ); // Do
ksession.update( fa2,
a2 );
a2.setField2( "2" ); // Undo
ksession.update( fa2,
a2 );
// modify a3 to cycle, so that it goes on the memory end, but in a previous bug still blocked a1
ksession.update( fa3,
a3 );
a3.setField2( "1" ); // Do
ksession.update( fa3,
a3 );
ksession.fireAllRules();
assertEquals( 1,
list.size() ); // a2 should still be blocked by a1, but bug from previous update hanging onto blocked
ksession.dispose();
}
@Test
public void testBindingsWithOr() throws InstantiationException,
IllegalAccessException {
// JBRULES-2917: matching of field==v1 || field==v2 breaks when variable binding is added
String str = "package org.drools\n" +
"declare Assignment\n" +
" source : int\n" +
" target : int\n" +
"end\n" +
"rule ValueIsTheSame1\n" +
"when\n" +
" Assignment( $t: target == 10 || target == source )\n" +
"then\n" +
"end\n" +
"rule ValueIsTheSame2\n" +
"when\n" +
" Assignment( $t: target == source || target == 10 )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType asgType = kbase.getFactType( "org.drools",
"Assignment" );
Object asg = asgType.newInstance();
asgType.set( asg,
"source",
10 );
asgType.set( asg,
"target",
10 );
ksession.insert( asg );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
rules );
}
@Test
public void testMVELClassReferences() throws InstantiationException,
IllegalAccessException {
String str = "package org.drools\n" +
"declare Assignment\n" +
" source : Class\n" +
" target : Class\n" +
"end\n" +
"rule ObjectIsAssignable1\n" +
"when\n" +
" Assignment( $t: target == java.lang.Object.class || target == source )\n" +
"then\n" +
"end\n" +
"rule ObjectIsAssignable2\n" +
"when\n" +
" Assignment( $t: target == source || target == java.lang.Object.class )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
FactType asgType = kbase.getFactType( "org.drools",
"Assignment" );
Object asg = asgType.newInstance();
asgType.set( asg,
"source",
Object.class );
asgType.set( asg,
"target",
Object.class );
ksession.insert( asg );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 2,
rules );
}
@Test
public void testNotMatchesSucceeds() throws InstantiationException,
IllegalAccessException {
// JBRULES-2914: Rule misfires due to "not matches" not working
String str = "package org.drools\n" +
"rule NotMatches\n" +
"when\n" +
" Person( name == null || (name != null && name not matches \"-.{2}x.*\" ) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person p = new Person( "-..x..xrwx" );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 0,
rules );
}
@Test
public void testNotMatchesFails() throws InstantiationException,
IllegalAccessException {
// JBRULES-2914: Rule misfires due to "not matches" not working
String str = "package org.drools\n" +
"rule NotMatches\n" +
"when\n" +
" Person( name == null || (name != null && name not matches \"-.{2}x.*\" ) )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person p = new Person( "d..x..xrwx" );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
rules );
}
@Test
public void testNotEqualsOperator() {
// JBRULES-3003: restriction evaluation returns 'false' for "trueField != falseField"
String str = "package org.drools\n" +
"rule NotEquals\n" +
"when\n" +
" Primitives( booleanPrimitive != booleanWrapper )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives p = new Primitives();
p.setBooleanPrimitive( true );
p.setBooleanWrapper( Boolean.FALSE );
ksession.insert( p );
int rules = ksession.fireAllRules();
ksession.dispose();
assertEquals( 1,
rules );
}
@Test
public void testNotContainsOperator() {
// JBRULES-2404: "not contains" operator doesn't work on nested fields
String str = "package org.drools\n" +
"rule NotContains\n" +
"when\n" +
" $oi : OrderItem( )\n" +
" $o : Order( items.values() not contains $oi )" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Order order1 = new Order( 1,
"XYZ" );
Order order2 = new Order( 2,
"ABC" );
OrderItem item11 = new OrderItem( order1,
1 );
order1.addItem( item11 );
OrderItem item21 = new OrderItem( order2,
1 );
order2.addItem( item21 );
ksession.insert( order1 );
ksession.insert( item11 );
// should not fire, as item11 is contained in order1.items
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
// should fire as item21 is not contained in order1.items
ksession.insert( item21 );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testOrWithFrom() {
// JBRULES-2274: Rule does not fire as expected using deep object model and nested 'or' clause
String str = "package org.drools\n" +
"rule NotContains\n" +
"when\n" +
" $oi1 : OrderItem( )\n" +
" $o1 : Order(number == 1) from $oi1.order; \n" +
" ( eval(true) or eval(true) )\n" +
" $oi2 : OrderItem( )\n" +
" $o2 : Order(number == 2) from $oi2.order; \n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Order order1 = new Order( 1,
"XYZ" );
Order order2 = new Order( 2,
"ABC" );
OrderItem item11 = new OrderItem( order1,
1 );
order1.addItem( item11 );
OrderItem item21 = new OrderItem( order2,
1 );
order2.addItem( item21 );
ksession.insert( order1 );
ksession.insert( order2 );
ksession.insert( item11 );
ksession.insert( item21 );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testSoundsLike() {
// JBRULES-2991: Operator soundslike is broken
String str = "package org.drools\n" +
"rule SoundsLike\n" +
"when\n" +
" Person( name soundslike \"Bob\" )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Bob" ) );
ksession.insert( new Person( "Mark" ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAgendaFilter1() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameStartsWithAgendaFilter af = new RuleNameStartsWithAgendaFilter( "B" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Bbb" ) );
}
@Test
public void testAgendaFilter2() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameEndsWithAgendaFilter af = new RuleNameEndsWithAgendaFilter( "a" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Aaa" ) );
}
@Test
public void testAgendaFilter3() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameMatchesAgendaFilter af = new RuleNameMatchesAgendaFilter( ".*b." );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Bbb" ) );
}
@Test
public void testAgendaFilter4() {
String str = "package org.drools\n" +
"rule Aaa when then end\n" +
"rule Bbb when then end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
RuleNameEqualsAgendaFilter af = new RuleNameEqualsAgendaFilter( "Aaa" );
int rules = ksession.fireAllRules( af );
assertEquals( 1,
rules );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> arg = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael ).afterActivationFired( arg.capture() );
assertThat( arg.getValue().getActivation().getRule().getName(),
is( "Aaa" ) );
}
@Test
public void testRestrictionsWithOr() {
// JBRULES-2203: NullPointerException When Using Conditional Element "or" in LHS Together with a Return Value Restriction
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Cheese( price == (1 + 1) );\n" +
" (or eval(true);\n" +
" eval(true);\n" +
" )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Cheese( "Stilton",
2 ) );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testMapModel() {
String str = "package org.drools\n" +
"import java.util.Map\n" +
"rule \"test\"\n" +
"when\n" +
" Map( type == \"Person\", name == \"Bob\" );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Map<String, String> mark = new HashMap<String, String>();
mark.put( "type",
"Person" );
mark.put( "name",
"Mark" );
ksession.insert( mark );
int rules = ksession.fireAllRules();
assertEquals( 0,
rules );
Map<String, String> bob = new HashMap<String, String>();
bob.put( "type",
"Person" );
bob.put( "name",
"Bob" );
ksession.insert( bob );
rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testConstraintExpression() {
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Person( 5*2 > 3 );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Person( "Bob" ) );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testMethodConstraint() {
String str = "package org.drools\n" +
"rule \"test\"\n" +
"when\n" +
" Person( isAlive() );\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person person = new Person( "Bob" );
person.setAlive( true );
ksession.insert( person );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testComplexOperator() {
String str = "package org.drools\n" +
"rule \"test in\"\n" +
"when\n" +
" Person( $name : name in (\"bob\", \"mark\") )\n" +
"then\n" +
" boolean test = $name != null;" +
"end\n" +
"rule \"test not in\"\n" +
"when\n" +
" Person( $name : name not in (\"joe\", \"doe\") )\n" +
"then\n" +
" boolean test = $name != null;" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person person = new Person( "bob" );
ksession.insert( person );
int rules = ksession.fireAllRules();
assertEquals( 2,
rules );
}
@Test
public void testEventsInDifferentPackages() {
String str = "package org.drools.test\n" +
"import org.drools.*\n" +
"declare StockTick\n" +
" @role( event )\n" +
"end\n" +
"rule r1\n" +
"when\n" +
"then\n" +
" StockTick st = new StockTick();\n" +
" st.setCompany(\"RHT\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testClassTypeAttributes() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" Primitives( classAttr == null )" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert( new Primitives() );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
public void testFreeFormExpressions() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" $p1 : Person( age > 2*10, 10 < age )\n" +
" $p2 : Person( age > 2*$p1.age )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Person bob = new Person( "bob",
30 );
Person mark = new Person( "mark",
61 );
ksession.insert( bob );
ksession.insert( mark );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testFreeFormExpressions2() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" $p1 : Cell( row == 2 )\n" +
" $p2 : Cell( row == $p1.row + 1, row == ($p1.row + 1), row == 1 + $p1.row, row == (1 + $p1.row) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Cell c1 = new Cell(1, 2, 0 );
Cell c2 = new Cell(1, 3, 0 );
ksession.insert( c1 );
ksession.insert( c2 );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testAddMissingResourceToPackageBuilder() throws Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
try {
kbuilder.add( ResourceFactory.newClassPathResource( "some.rf" ),
ResourceType.DRL );
fail( "adding a missing resource should fail" );
} catch ( RuntimeException e ) {
}
try {
kbuilder.add( ResourceFactory.newClassPathResource( "some.rf" ),
ResourceType.DRF );
fail( "adding a missing resource should fail" );
} catch ( RuntimeException e ) {
}
}
@Test
public void testJBRULES_2995() {
String str = "package org.drools\n" +
"rule r1\n" +
"when\n" +
" Primitives( classAttr == java.lang.String.class, \n" +
" eval(classAttr.equals( java.lang.String.class ) ),\n" +
" classAttr == String.class )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives primitives = new Primitives();
primitives.setClassAttr( String.class );
ksession.insert( primitives );
int rules = ksession.fireAllRules();
assertEquals( 1,
rules );
}
@Test
public void testJBRULES2872() {
String str = "package org.drools.test\n" +
"import org.drools.FactA\n" +
"rule X\n" +
"when\n" +
" FactA( enumVal == TestEnum.ONE || == TestEnum.TWO )\n" +
"then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
KnowledgeBuilderErrors errors = kbuilder.getErrors();
logger.info( errors.toString() );
assertEquals( 1,
errors.size() );
KnowledgeBuilderError error = errors.iterator().next();
assertEquals( 5,
error.getLines()[0] );
}
@Test
public void testJBRULES3030() {
String str = "package org.drools\n" +
"rule X\n" +
"when\n" +
" $gp : GrandParent()" +
" $ch : ChildHolder( child == $gp )\n" +
"then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertFalse( kbuilder.hasErrors() );
}
@Test
public void testJBRULES3111() {
String str = "package org.drools\n" +
"declare Bool123\n" +
" bool1 : boolean\n" +
" bool2 : boolean\n" +
" bool3 : boolean\n" +
"end\n" +
"declare Thing\n" +
" name : String\n" +
" bool123 : Bool123\n" +
"end\n" +
"rule kickOff\n" +
"when\n" +
"then\n" +
" insert( new Thing( \"one\", new Bool123( true, false, false ) ) );\n" +
" insert( new Thing( \"two\", new Bool123( false, false, false ) ) );\n" +
" insert( new Thing( \"three\", new Bool123( false, false, false ) ) );\n" +
"end\n" +
"rule r1\n" +
"when\n" +
" $t: Thing( bool123.bool1 == true )\n" +
"then\n" +
"end\n" +
"rule r2\n" +
"when\n" +
" $t: Thing( bool123.bool2 == true )\n" +
"then\n" +
"end\n" +
"rule r3\n" +
"when\n" +
" $t: Thing( bool123.bool3 == true )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
int rulesFired = ksession.fireAllRules();
assertEquals( 2,
rulesFired );
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> captor = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael,
times( 2 ) ).afterActivationFired( captor.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> aafe = captor.getAllValues();
Assert.assertThat( aafe.get( 0 ).getActivation().getRule().getName(),
is( "kickOff" ) );
Assert.assertThat( aafe.get( 1 ).getActivation().getRule().getName(),
is( "r1" ) );
Object value = aafe.get( 1 ).getActivation().getDeclarationValue( "$t" );
String name = (String) MVEL.eval( "$t.name",
Collections.singletonMap( "$t",
value ) );
Assert.assertThat( name,
is( "one" ) );
}
@Test
public void testBigLiterals() {
String str = "package org.drools\n" +
"rule X\n" +
"when\n" +
" Primitives( bigInteger == 10I, bigInteger < (50I), bigDecimal == 10B, bigDecimal < (50B) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
Primitives p = new Primitives();
p.setBigDecimal( BigDecimal.valueOf( 10 ) );
p.setBigInteger( BigInteger.valueOf( 10 ) );
ksession.insert( p );
int rulesFired = ksession.fireAllRules();
assertEquals( 1,
rulesFired );
}
@Test
public void testNonBooleanConstraint() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
"when\n" +
" $p1: Person( name + name )\n" +
"then\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
logger.info(kbuilder.getErrors().toString());
}
@Test
public void testModifyJava() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
"when\n" +
" $l : List() from collect ( Person( alive == false ) );\n" +
"then\n" +
" for(Object p : $l ) {\n" +
" Person p2 = (Person) p;\n" +
" modify(p2) { setAlive(true) }\n" +
" }\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
}
@Test
public void testModifyMVEL() {
String str = "package org.drools\n" +
"import java.util.List\n" +
"rule \"test\"\n" +
" dialect \"mvel\"\n" +
"when\n" +
" $l : List() from collect ( Person( alive == false ) );\n" +
"then\n" +
" for(Object p : $l ) {\n" +
" Person p2 = (Person) p;\n" +
" modify(p2) { setAlive(true) }\n" +
" }\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),
ResourceType.DRL );
Assert.assertFalse( kbuilder.getErrors().toString(),
kbuilder.hasErrors() );
}
@Test
public void testPackageNameOfTheBeast() throws Exception {
// JBRULES-2749 Various rules stop firing when they are in unlucky packagename and there is a function declared
String ruleFileContent1 = "package org.drools.integrationtests;\n" +
"function void myFunction() {\n" +
"}\n" +
"declare MyDeclaredType\n" +
" someProperty: boolean\n" +
"end";
String ruleFileContent2 = "package de.something;\n" + // FAILS
// String ruleFileContent2 = "package de.somethinga;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingb;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingc;\n" + // PASSES
// String ruleFileContent2 = "package de.somethingd;\n" + // PASSES
// String ruleFileContent2 = "package de.somethinge;\n" + // FAILS
// String ruleFileContent2 = "package de.somethingf;\n" + // FAILS
// String ruleFileContent2 = "package de.somethingg;\n" + // FAILS
"import org.drools.integrationtests.*;\n" +
"rule \"CheckMyDeclaredType\"\n" +
" when\n" +
" MyDeclaredType()\n" +
" then\n" +
" insertLogical(\"THIS-IS-MY-MARKER-STRING\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( ruleFileContent1,
ruleFileContent2 );
StatefulKnowledgeSession knowledgeSession = createKnowledgeSession(kbase);
final FactType myDeclaredFactType = kbase.getFactType( "org.drools.integrationtests",
"MyDeclaredType" );
Object myDeclaredFactInstance = myDeclaredFactType.newInstance();
knowledgeSession.insert( myDeclaredFactInstance );
int rulesFired = knowledgeSession.fireAllRules();
assertEquals( 1,
rulesFired );
knowledgeSession.dispose();
}
@Test
public void testGUVNOR578_2() throws Exception {
MapBackedClassLoader loader = new MapBackedClassLoader( this.getClass().getClassLoader() );
JarInputStream jis = new JarInputStream( this.getClass().getResourceAsStream( "/primespoc.jar" ) );
JarEntry entry = null;
byte[] buf = new byte[1024];
int len = 0;
while ( (entry = jis.getNextJarEntry()) != null ) {
if ( !entry.isDirectory() ) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
while ( (len = jis.read( buf )) >= 0 ) {
out.write( buf,
0,
len );
}
loader.addResource( entry.getName(),
out.toByteArray() );
}
}
List<JarInputStream> jarInputStreams = new ArrayList<JarInputStream>();
jarInputStreams.add( jis );
Properties properties = new Properties();
properties.setProperty( DefaultPackageNameOption.PROPERTY_NAME,
"foo.bar" );
PackageBuilder builder = new PackageBuilder( new PackageBuilderConfiguration( properties,
loader ) );
PackageDescr pc = new PackageDescr( "foo.bar" );
builder.addPackage( pc );
String header = "import fr.gouv.agriculture.dag.agorha.business.primes.SousPeriodePrimeAgent\n";
builder.addPackageFromDrl( new StringReader( header ) );
assertFalse( builder.hasErrors() );
String passingRule = "rule \"rule1\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
+ "SousPeriodePrimeAgent( echelle == \"abc\" )"
+ "then\n"
+ "end\n";
String failingRule = "rule \"rule2\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
+ "SousPeriodePrimeAgent( quotiteRemuneration == 123 , echelle == \"abc\" )"
+ "then\n"
+ "end\n";
builder.addPackageFromDrl( new StringReader( passingRule ) );
if ( builder.hasErrors() ) {
logger.warn( builder.getErrors().getErrors()[0].getMessage() );
}
assertFalse( builder.hasErrors() );
builder.addPackageFromDrl( new StringReader( failingRule ) );
if ( builder.hasErrors() ) {
logger.warn( builder.getErrors().getErrors()[0].getMessage() );
}
assertFalse( builder.hasErrors() );
}
@Test
public void testJBRULES3323() throws Exception {
//adding rules. it is important to add both since they reciprocate
StringBuilder rule = new StringBuilder();
rule.append("package de.orbitx.accumulatetesettest;\n");
rule.append("import java.util.Set;\n");
rule.append("import java.util.HashSet;\n");
rule.append("import org.drools.Foo;\n");
rule.append("import org.drools.Bar;\n");
rule.append("rule \"Sub optimal foo parallelism - this rule is causing NPE upon reverse\"\n");
rule.append("when\n");
rule.append("$foo : Foo($leftId : id, $leftBar : bar != null)\n");
rule.append("$fooSet : Set()\n");
rule.append("from accumulate ( Foo(id > $leftId, bar != null && != $leftBar, $bar : bar),\n");
rule.append("collectSet( $bar ) )\n");
rule.append("then\n");
rule.append("//System.out.println(\"ok\");\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
//adding test data
Bar[] barList = new Bar[3];
for (int i = 0; i < barList.length; i++) {
barList[i] = new Bar( String.valueOf( i ) );
}
org.drools.Foo[] fooList = new org.drools.Foo[4];
for (int i = 0; i < fooList.length; i++) {
fooList[i] = new org.drools.Foo( String.valueOf( i ), i == 3 ? barList[2] : barList[i] );
}
for (org.drools.Foo foo : fooList) {
ksession.insert(foo);
}
//the NPE is caused by exactly this sequence. of course there are more sequences but this
//appears to be the most short one
int[] magicFoos = new int[] { 3, 3, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 3, 3, 2, 2, 3, 1, 1 };
int[] magicBars = new int[] { 1, 2, 0, 1, 1, 0, 1, 2, 2, 1, 2, 0, 0, 2, 0, 2, 0, 0, 1 };
//upon final rule firing an NPE will be thrown in org.drools.rule.Accumulate
for (int i = 0; i < magicFoos.length; i++) {
org.drools.Foo tehFoo = fooList[magicFoos[i]];
org.drools.runtime.rule.FactHandle fooFactHandle = ksession.getFactHandle(tehFoo);
tehFoo.setBar(barList[magicBars[i]]);
ksession.update(fooFactHandle, tehFoo);
ksession.fireAllRules();
}
ksession.dispose();
}
@Test
public void testJBRULES3326() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Message(!!!false)\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Message("test"));
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
}
@Test
public void testDispose() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Message()\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Message("test"));
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
try {
// the following should raise an IllegalStateException as the session was already disposed
ksession.fireAllRules();
fail("An IllegallStateException should have been raised as the session was disposed before the method call.");
} catch (IllegalStateException ise ) {
// success
}
}
@Test
public void testInnerEnum() throws Exception {
StringBuilder rule = new StringBuilder();
rule.append("package org.drools\n");
rule.append("rule X\n");
rule.append("when\n");
rule.append(" Triangle( type == Triangle.Type.UNCLASSIFIED )\n");
rule.append("then\n");
rule.append("end\n");
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
ksession.insert(new Triangle());
int rules = ksession.fireAllRules();
assertEquals( 1, rules );
ksession.dispose();
}
@Test
public void testNestedAccessors2() throws Exception {
String rule = "package org.drools\n" +
"rule 'rule1'" +
" salience 10\n" +
"when\n" +
" Cheesery( typedCheeses[0].type == 'stilton' );\n" +
"then\n" +
"end\n" +
"rule 'rule2'\n" +
"when\n" +
" Cheesery( typedCheeses[0].price == 10 );\n" +
"then\n" +
"end";
//building stuff
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.drools.event.rule.AgendaEventListener ael = mock( org.drools.event.rule.AgendaEventListener.class );
ksession.addEventListener( ael );
Cheesery c1 = new Cheesery();
c1.addCheese( new Cheese("stilton", 20) );
Cheesery c2 = new Cheesery();
c2.addCheese( new Cheese("brie", 10) );
Cheesery c3 = new Cheesery();
c3.addCheese( new Cheese("muzzarella", 30) );
ksession.insert( c1 );
ksession.insert( c2 );
ksession.insert( c3 );
ksession.fireAllRules();
ArgumentCaptor<org.drools.event.rule.AfterActivationFiredEvent> captor = ArgumentCaptor.forClass( org.drools.event.rule.AfterActivationFiredEvent.class );
verify( ael, times(2) ).afterActivationFired( captor.capture() );
List<org.drools.event.rule.AfterActivationFiredEvent> values = captor.getAllValues();
assertThat( (Cheesery) values.get( 0 ).getActivation().getObjects().get( 0 ), is( c1 ) );
assertThat( (Cheesery) values.get( 1 ).getActivation().getObjects().get( 0 ), is( c2 ) );
ksession.dispose();
}
@Test
public void testMVELConstraintsWithFloatingPointNumbersInScientificNotation() {
String rule = "package test; \n" +
"dialect \"mvel\"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean \n" +
" field : double \n" +
"end \n" +
"\n" +
"rule \"Init\" \n" +
"when \n" +
"then \n" +
"\t insert( new Bean( 1.0E-2 ) ); \n" +
"end \n" +
"\n" +
"rule \"Check\" \n" +
"when \n" +
"\t Bean( field < 1.0E-1 ) \n" +
"then \n" +
"\t list.add( \"OK\" ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession kSession = kbase.newStatefulKnowledgeSession();
List<String> list = new ArrayList<String>();
kSession.setGlobal( "list", list );
kSession.fireAllRules();
assertEquals( 1 , list.size() );
}
public static class A {
private String field1;
private String field2;
public A(String field1,
String field2) {
this.field1 = field1;
this.field2 = field2;
}
public String getField1() {
return field1;
}
public void setField1( String field1 ) {
this.field1 = field1;
}
public String getField2() {
return field2;
}
public void setField2( String field2 ) {
this.field2 = field2;
}
public String toString() {
return "A) " + field1 + ":" + field2;
}
}
@Test
public void testMvelDoubleInvocation() throws Exception {
String rule = "package org.drools\n" +
"import org.drools.integrationtests.MiscTest.TestUtility;\n" +
"import org.drools.integrationtests.MiscTest.TestFact;\n" +
"rule \"First Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value1\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"First Rule Fires\");\n" +
"end\n" +
"\n" +
"rule \"Second Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value2\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"Second Rule Fires\");\n" +
"end\n" +
"\n" +
"rule \"Third Rule\"\n" +
" when\n" +
" $tf : TestFact(TestUtility.utilMethod(s, \"Value3\") == true\n" +
" && i > 0\n" +
" )\n" +
" then\n" +
" System.out.println(\"Third Rule Fires\");\n" +
"end ";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule.toString() );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
TestFact fact = new TestFact();
fact.setS("asdf");
fact.setI(10);
ksession.insert(fact);
ksession.fireAllRules();
ksession.dispose();
}
public static class TestUtility {
public static Boolean utilMethod(String s1, String s2) {
Boolean result = null;
if (s1 != null) {
result = s1.equals(s2);
}
logger.info("in utilMethod >" + s1 + "< >" + s2 + "< returns " + result);
return result;
}
}
public static class TestFact {
private int i;
private String s;
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
}
public String getS() {
return s;
}
public void setS(String s) {
this.s = s;
}
}
@Test
public void testUnwantedCoersion() throws Exception {
String rule = "package org.drools\n" +
"import org.drools.integrationtests.MiscTest.InnerBean;\n" +
"import org.drools.integrationtests.MiscTest.OuterBean;\n" +
"rule \"Test.Code One\"\n" +
"when\n" +
" OuterBean($code : inner.code in (\"1.50\", \"2.50\"))\n" +
"then\n" +
" System.out.println(\"Code compared values: 1.50, 2.50 - actual code value: \" + $code);\n" +
"end\n" +
"rule \"Test.Code Two\"\n" +
"when\n" +
" OuterBean($code : inner.code in (\"1.5\", \"2.5\"))\n" +
"then\n" +
" System.out.println(\"Code compared values: 1.5, 2.5 - actual code value: \" + $code);\n" +
"end\n" +
"rule \"Big Test ID One\"\n" +
"when\n" +
" OuterBean($id : id in (\"3.5\", \"4.5\"))\n" +
"then\n" +
" System.out.println(\"ID compared values: 3.5, 4.5 - actual ID value: \" + $id);\n" +
"end\n" +
"rule \"Big Test ID Two\"\n" +
"when\n" +
" OuterBean($id : id in ( \"3.0\", \"4.0\"))\n" +
"then\n" +
" System.out.println(\"ID compared values: 3.0, 4.0 - actual ID value: \" + $id);\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
InnerBean innerTest = new InnerBean();
innerTest.setCode("1.500");
ksession.insert(innerTest);
OuterBean outerTest = new OuterBean();
outerTest.setId("3");
outerTest.setInner(innerTest);
ksession.insert(outerTest);
OuterBean outerTest2 = new OuterBean();
outerTest2.setId("3.0");
outerTest2.setInner(innerTest);
ksession.insert(outerTest2);
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
public static class InnerBean {
private String code;
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
}
public static class OuterBean {
private InnerBean inner;
private String id;
public InnerBean getInner() {
return inner;
}
public void setInner(InnerBean inner) {
this.inner = inner;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
@Test
public void testShiftOperator() throws Exception {
String rule = "dialect \"mvel\"\n" +
"rule kickOff\n" +
"when\n" +
"then\n" +
" insert( Integer.valueOf( 1 ) );\n" +
" insert( Long.valueOf( 1 ) );\n" +
" insert( Integer.valueOf( 65552 ) ); // 0x10010\n" +
" insert( Long.valueOf( 65552 ) );\n" +
" insert( Integer.valueOf( 65568 ) ); // 0x10020\n" +
" insert( Long.valueOf( 65568 ) );\n" +
" insert( Integer.valueOf( 65536 ) ); // 0x10000\n" +
" insert( Long.valueOf( 65536L ) );\n" +
" insert( Long.valueOf( 4294967296L ) ); // 0x100000000L\n" +
"end\n" +
"rule test1\n" +
" salience -1\n" +
"when\n" +
" $a: Integer( $one: intValue == 1 )\n" +
" $b: Integer( $shift: intValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test1 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test2\n" +
" salience -2\n" +
"when\n" +
" $a: Integer( $one: intValue == 1 )\n" +
" $b: Long ( $shift: longValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test2 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test3\n" +
" salience -3\n" +
"when\n" +
" $a: Long ( $one: longValue == 1 )\n" +
" $b: Long ( $shift: longValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test3 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end\n" +
"rule test4\n" +
" salience -4\n" +
"when\n" +
" $a: Long ( $one: longValue == 1 )\n" +
" $b: Integer( $shift: intValue )\n" +
" $c: Integer( $i: intValue, intValue == ($one << $shift ) )\n" +
"then\n" +
" System.out.println( \"test4 \" + $a + \" << \" + $b + \" = \" + Integer.toHexString( $c ) );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
int rules = ksession.fireAllRules();
assertEquals(13, rules);
}
@Test
public void testRecursiveDeclaration() throws Exception {
String rule = "package org.drools\n" +
"declare Node\n" +
" value: String\n" +
" parent: Node\n" +
"end\n" +
"rule R1 when\n" +
" $parent: Node( value == \"parent\" )\n" +
" $child: Node( $value : value, parent == $parent )\n" +
"then\n" +
" System.out.println( $value );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType nodeType = kbase.getFactType( "org.drools", "Node" );
Object parent = nodeType.newInstance();
nodeType.set( parent, "value", "parent" );
ksession.insert( parent );
Object child = nodeType.newInstance();
nodeType.set( child, "value", "child" );
nodeType.set( child, "parent", parent );
ksession.insert( child );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testCircularDeclaration() throws Exception {
String rule = "package org.drools.test\n" +
"declare FactA\n" +
" fieldB: FactB\n" +
"end\n" +
"declare FactB\n" +
" fieldA: FactA\n" +
"end\n" +
"rule R1 when\n" +
" $fieldA : FactA( $fieldB : fieldB )\n" +
" FactB( this == $fieldB, fieldA == $fieldA )\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType aType = kbase.getFactType( "org.drools.test", "FactA" );
Object a = aType.newInstance();
FactType bType = kbase.getFactType( "org.drools.test", "FactB" );
Object b = bType.newInstance();
aType.set( a, "fieldB", b );
bType.set( b, "fieldA", a );
ksession.insert( a );
ksession.insert( b );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testPatternMatchingOnThis() throws Exception {
String rule = "package org.drools\n" +
"rule R1 when\n" +
" $i1: Integer()\n" +
" $i2: Integer( this > $i1 )\n" +
"then\n" +
" System.out.println( $i2 + \" > \" + $i1 );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Integer(1) );
ksession.insert( new Integer(2) );
int rules = ksession.fireAllRules();
assertEquals(1, rules);
}
@Test
public void testArrayUsage() {
String str = "import org.drools.base.DroolsQuery;\n" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"query extract( String s )\n" +
" Object() \n" +
"end\n" +
"\n" +
"rule \"Intercept\"\n" +
"when\n" +
" DroolsQuery( name == \"extract\", $args : elements )\n" +
" $s : String( this == $args[$s.length() - $s.length()] )\n" +
" $s1 : String( this == $args[0] )\n" +
" $s2 : String( this == $args[$args.length - $args.length] )\n" +
"then\n" +
" retract( $s ); \n" +
" list.add( $s );\n" +
"end\n" +
"\n" +
"rule \"Exec\"\n" +
"when\n" +
" $s : String()\n" +
" ?extract( $s ; )\n" +
"then\n" +
" \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
int N = 2;
for ( int j = 0; j < N; j++ ) {
ksession.insert( "x" + j );
ksession.fireAllRules();
}
assertEquals( N, list.size() );
ksession.dispose();
}
@Test(timeout = 5000)
public void testEfficientBetaNodeNetworkUpdate() {
// [JBRULES-3372]
String str =
"declare SimpleMembership\n" +
" listId : String\n" +
" patientId : String\n" +
"end\n" +
"declare SimplePatientFact\n" +
" value : int\n" +
" patientId : String\n" +
"end\n" +
"rule \"A\"\n" +
"when\n" +
"$slm : SimpleMembership($pid : patientId, listId == \"5072\" )\n" +
"and not (\n" +
" (\n" +
" (\n" +
" SimplePatientFact(value == 1, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 2, patientId == $pid)\n" +
" )\n" +
" ) and (\n" +
" (\n" +
" SimplePatientFact(value == 6, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 7, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 8, patientId == $pid)\n" +
" )\n" +
" ) and (\n" +
" (\n" +
" SimplePatientFact(value == 9, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 10, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 11, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 12, patientId == $pid)\n" +
" ) or (\n" +
" SimplePatientFact(value == 13, patientId == $pid)\n" +
" )\n" +
" )\n" +
")\n" +
"then\n" +
" System.out.println(\"activated\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
}
@Test
public void testModifyCommand() {
String str =
"rule \"sample rule\"\n" +
" when\n" +
" then\n" +
" System.out.println(\"\\\"Hello world!\\\"\");\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p1 = new Person("John", "nobody", 25);
ksession.execute(CommandFactory.newInsert(p1));
org.drools.runtime.rule.FactHandle fh = ksession.getFactHandle(p1);
Person p = new Person("Frank", "nobody", 30);
List<Setter> setterList = new ArrayList<Setter>();
setterList.add(CommandFactory.newSetter("age", String.valueOf(p.getAge())));
setterList.add(CommandFactory.newSetter("name", p.getName()));
setterList.add(CommandFactory.newSetter("likes", p.getLikes()));
ksession.execute(CommandFactory.newModify(fh, setterList));
}
@Test
public void testMVELTypeCoercion() {
String str = "package org.drools.test; \n" +
"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean\n" +
// NOTICE: THIS WORKS WHEN THE FIELD IS "LIST", BUT USED TO WORK WITH ARRAYLIST TOO
" field : java.util.ArrayList\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( new java.util.ArrayList( java.util.Arrays.asList( \"x\" ) ) ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( $fld : field == [\"x\"] )\n" +
"then\n" +
" System.out.println( $fld );\n" +
" list.add( \"OK\" ); \n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
if ( kbuilder.hasErrors() ) {
fail( kbuilder.getErrors().toString() );
}
KnowledgeBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kbConf.setOption(AssertBehaviorOption.EQUALITY);
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase( kbConf );
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "OK" ) );
ksession.dispose();
}
@Test
public void testPatternOnClass() throws Exception {
String rule = "import org.drools.reteoo.InitialFactImpl\n" +
"import org.drools.FactB\n" +
"rule \"Clear\" when\n" +
" $f: Object(class != FactB.class)\n" +
"then\n" +
" if( ! ($f instanceof InitialFactImpl) ){\n" +
" retract( $f );\n" +
" }\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new FactA());
ksession.insert(new FactA());
ksession.insert(new FactB());
ksession.insert(new FactB());
ksession.insert(new FactC());
ksession.insert(new FactC());
ksession.fireAllRules();
for (org.drools.runtime.rule.FactHandle fact : ksession.getFactHandles()) {
InternalFactHandle internalFact = (InternalFactHandle)fact;
assertTrue(internalFact.getObject() instanceof FactB);
}
}
@Test
public void testPatternOffset() throws Exception {
// JBRULES-3427
String str = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) or ( A( ) and B( ) ) " +
") and (\n" +
" A( ) or ( B( $bField : field ) and C( field != $bField ) )\n" +
")\n" +
"then\n" +
" System.out.println(\"rule fired\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType typeA = kbase.getFactType( "org.drools.test", "A" );
FactType typeB = kbase.getFactType( "org.drools.test", "B" );
FactType typeC = kbase.getFactType( "org.drools.test", "C" );
Object a = typeA.newInstance();
ksession.insert( a );
Object b = typeB.newInstance();
typeB.set( b, "field", 1 );
ksession.insert( b );
Object c = typeC.newInstance();
typeC.set( c, "field", 1 );
ksession.insert( c );
ksession.fireAllRules();
}
@Test
public void testCommentDelimiterInString() throws Exception {
// JBRULES-3401
String str = "rule x\n" +
"dialect \"mvel\"\n" +
"when\n" +
"then\n" +
"System.out.println( \"/*\" );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
}
public interface InterfaceA {
InterfaceB getB();
}
public interface InterfaceB { }
public static class ClassA implements InterfaceA {
private ClassB b = null;
public ClassB getB() {
return b;
}
public void setB(InterfaceB b) {
this.b = (ClassB)b;
}
}
public static class ClassB implements InterfaceB {
private String id = "123";
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ClassB classB = (ClassB) o;
if (id != null ? !id.equals(classB.id) : classB.id != null) return false;
return true;
}
@Override
public int hashCode() {
return Integer.valueOf( id );
}
}
@Test
public void testCovariance() throws Exception {
// JBRULES-3392
String str =
"import org.drools.integrationtests.MiscTest.*\n" +
"rule x\n" +
"when\n" +
" $b : ClassB( )\n" +
" $a : ClassA( b.id == $b.id )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ClassA a = new ClassA();
ClassB b = new ClassB();
a.setB(b);
ksession.insert(a);
ksession.insert(b);
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testRetractLeftTuple() throws Exception {
// JBRULES-3420
String str = "import org.drools.integrationtests.MiscTest.*\n" +
"rule R1 salience 3\n" +
"when\n" +
" $b : InterfaceB( )\n" +
" $a : ClassA( b == null )\n" +
"then\n" +
" $a.setB( $b );\n" +
" update( $a );\n" +
"end\n" +
"rule R2 salience 2\n" +
"when\n" +
" $b : ClassB( id == \"123\" )\n" +
" $a : ClassA( b != null && b.id == $b.id )\n" +
"then\n" +
" $b.setId( \"456\" );\n" +
" update( $b );\n" +
"end\n" +
"rule R3 salience 1\n" +
"when\n" +
" InterfaceA( $b : b )\n" +
"then\n" +
" retract( $b );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new ClassA());
ksession.insert(new ClassB());
assertEquals(3, ksession.fireAllRules());
}
@Test
public void testVariableBindingWithOR() throws Exception {
// JBRULES-3390
String str1 = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) and ( B( $bField : field ) or C( $cField : field ) ) " +
")\n" +
"then\n" +
" System.out.println($bField);\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str1.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
String str2 = "package org.drools.test; \n" +
"declare A\n" +
"end\n" +
"declare B\n" +
" field : int\n" +
"end\n" +
"declare C\n" +
" field : int\n" +
"end\n" +
"rule R when\n" +
"( " +
" A( ) and ( B( $field : field ) or C( $field : field ) ) " +
")\n" +
"then\n" +
" System.out.println($field);\n" +
"end\n";
KnowledgeBuilder kbuilder2 = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder2.add( ResourceFactory.newByteArrayResource(str2.getBytes()), ResourceType.DRL );
assertFalse(kbuilder2.hasErrors());
}
@Test
public void testModifySimple() {
String str ="package org.drools;\n" +
"\n" +
"rule \"test modify block\"\n" +
"when\n" +
" $p: Person( name == \"hungry\" )\n" +
"then\n" +
" modify( $p ) { setName(\"fine\") }\n" +
"end\n" +
"\n" +
"rule \"Log\"\n" +
"when\n" +
" $o: Object()\n" +
"then\n" +
" System.out.println( $o );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person();
p.setName( "hungry" );
ksession.insert( p );
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testDeclaresWithArrayFields() throws Exception {
String rule = "package org.drools.test; \n" +
"import org.drools.test.Person;" +
"\n" +
" global java.util.List list;" +
"\n" +
"declare Cheese\n" +
" name : String = \"ched\" \n" +
"end \n" +
"" +
"declare X\n" +
" fld \t: String = \"xx\" @key \n" +
" achz\t: Cheese[] \n" +
" astr\t: String[] \n" + "\t= new String[] {\"x\", \"y11\" } \n" +
" aint\t: int[] \n" +
" sint\t: short[] \n" +
" bint\t: byte[] \n" +
" lint\t: long[] \n" +
" dint\t: double[] \n" +
" fint\t: float[] \n" +
" zint\t: Integer[] \n" + "\t= new Integer[] {2,3} @key \n" +
" aaaa\t: String[][] \n" +
" bbbb\t: int[][] \n" +
" aprs\t: Person[] \n" + "\t= new org.drools.test.Person[] { new org.drools.test.Man() }" +
"end\n" +
"\n" +
"rule \"Init\"\n" +
"when\n" +
"\n" +
"then\n" +
" X x = new X( \"xx\", " +
" new Cheese[0], " +
" new String[] { \"x\", \"y22\" }, " +
" new int[] { 7, 9 }, " +
" new short[] { 3, 4 }, " +
" new byte[] { 1, 2 }, " +
" new long[] { 100L, 200L }, " +
" new double[] { 3.2, 4.4 }, " +
" new float[] { 3.2f, 4.4f }, " +
" new Integer[] { 2, 3 }," +
" new String[2][3]," +
" new int[5][3]," +
" null " +
" ); \n" +
" insert( x );\n" +
" " +
" X x2 = new X(); \n" +
" x2.setAint( new int[2] ); \n " +
" x2.getAint()[0] = 7; \n" +
" insert( x2 );\n" +
" " +
" if ( x.hashCode() == x2.hashCode() ) list.add( \"hash\" ); \n" +
" " +
" if( x.equals( x2 ) ) list.add( \"equals\" ); \n" +
" " +
" list.add( x.getAint( )[0] ); \n" +
"end \n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" X( astr.length > 0, \n" +
" astr[0] == \"x\", \n" +
" $x : astr[1], \n" +
" aint[0] == 7 ) \n" +
"then\n" +
" list.add( $x );\n" +
"end \n" +
"";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "hash" ) );
assertTrue( list.contains( "equals" ) );
assertTrue( list.contains( 7 ) );
assertTrue( list.contains( "y11" ) );
assertTrue( list.contains( "y22" ) );
}
public static class Parent { }
public static class ChildA extends Parent {
private final int x;
public ChildA(int x) {
this.x = x;
}
public int getX() {
return x;
}
}
public static class ChildB extends Parent {
private final int x;
public ChildB(int x) {
this.x = x;
}
public int getX() {
return x;
}
}
@Test
public void testTypeUnsafe() throws Exception {
String str = "import org.drools.integrationtests.MiscTest.*\n" +
"declare\n" +
" Parent @typesafe(false)\n" +
"end\n" +
"rule R1\n" +
"when\n" +
" $a : Parent( x == 1 )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
for (int i = 0; i < 20; i++) {
ksession.insert(new ChildA(i % 10));
ksession.insert(new ChildB(i % 10));
}
assertEquals(4, ksession.fireAllRules());
// give time to async jitting to complete
Thread.sleep(100);
ksession.insert(new ChildA(1));
ksession.insert(new ChildB(1));
assertEquals(2, ksession.fireAllRules());
}
@Test
public void testConstructorWithOtherDefaults() {
String str = "" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"declare Bean\n" +
" kField : String @key\n" +
" sField : String = \"a\"\n" +
" iField : int = 10\n" +
" dField : double = 4.32\n" +
" aField : Long[] = new Long[] { 100L, 1000L }\n" +
"end" +
"\n" +
"rule \"Trig\"\n" +
"when\n" +
" Bean( kField == \"key\", sField == \"a\", iField == 10, dField == 4.32, aField[1] == 1000L ) \n" +
"then\n" +
" list.add( \"OK\" );\n" +
"end\n" +
"\n" +
"rule \"Exec\"\n" +
"when\n" +
"then\n" +
" insert( new Bean( \"key\") ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( "OK" ) );
ksession.dispose();
}
@Test
public void testBindingToNullFieldWithEquality() {
// JBRULES-3396
String str = "package org.drools.test; \n" +
"\n" +
"global java.util.List list;" +
"\n" +
"declare Bean\n" +
" id : String @key\n" +
" field : String\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( \"x\" ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( $fld : field )\n" +
"then\n" +
" System.out.println( $fld );\n" +
" list.add( \"OK\" ); \n" +
"end";
KnowledgeBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kbConf.setOption(AssertBehaviorOption.EQUALITY);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kbConf, str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue(list.contains("OK"));
ksession.dispose();
}
@Test
public void testCoercionOfStringValueWithoutQuotes() throws Exception {
// JBRULES-3080
String str = "package org.drools.test; \n" +
"declare A\n" +
" field : String\n" +
"end\n" +
"rule R when\n" +
" A( field == 12 )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType typeA = kbase.getFactType( "org.drools.test", "A" );
Object a = typeA.newInstance();
typeA.set( a, "field", "12" );
ksession.insert( a );
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testVarargConstraint() throws Exception {
// JBRULES-3268
String str = "package org.drools.test;\n" +
"import org.drools.integrationtests.MiscTest.VarargBean;\n" +
" global java.util.List list;\n" +
"\n" +
"rule R1 when\n" +
" VarargBean( isOddArgsNr(1, 2, 3) )\n" +
"then\n" +
" list.add(\"odd\");\n" +
"end\n" +
"rule R2 when\n" +
" VarargBean( isOddArgsNr(1, 2, 3, 4) )\n" +
"then\n" +
" list.add(\"even\");\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.insert(new VarargBean());
ksession.fireAllRules();
assertEquals(1, list.size());
assertTrue(list.contains("odd"));
}
public static class VarargBean {
public boolean isOddArgsNr(int... args) {
return args.length % 2 == 1;
}
}
@Test
public void testPackageImportWithMvelDialect() throws Exception {
// JBRULES-2244
String str = "package org.drools.test;\n" +
"import org.drools.*\n" +
"dialect \"mvel\"\n" +
"rule R1 no-loop when\n" +
" $p : Person( )" +
" $c : Cheese( )" +
"then\n" +
" modify($p) { setCheese($c) };\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("Mario", 38);
ksession.insert(p);
Cheese c = new Cheese("Gorgonzola");
ksession.insert(c);
assertEquals(1, ksession.fireAllRules());
assertSame(c, p.getCheese());
}
@Test
public void testNoMvelSyntaxInFunctions() throws Exception {
// JBRULES-3433
String str = "import java.util.*;\n" +
"dialect \"mvel\"\n" +
"function Integer englishToInt(String englishNumber) { \n" +
" Map m = [\"one\":1, \"two\":2, \"three\":3, \"four\":4, \"five\":5]; \n" +
" Object obj = m.get(englishNumber.toLowerCase()); \n" +
" return Integer.parseInt(obj.toString()); \n" +
"}\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testMissingClosingBraceOnModify() throws Exception {
// JBRULES-3436
String str = "package org.drools.test;\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" $p : Person( )" +
" $c : Cheese( )" +
"then\n" +
" modify($p) { setCheese($c) ;\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testPrimitiveToBoxedCoercionInMethodArgument() throws Exception {
String str = "package org.drools.test;\n" +
"import org.drools.integrationtests.MiscTest\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" Person( $ag1 : age )" +
" $p2 : Person( name == MiscTest.integer2String($ag1) )" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("42", 42);
ksession.insert(p);
assertEquals(1, ksession.fireAllRules());
}
public static String integer2String(Integer value) {
return "" + value;
}
@Test
public void testKeyedInterfaceField() {
//JBRULES-3441
String str = "package org.drools.integrationtest; \n" +
"\n" +
"import org.drools.integrationtests.MiscTest.*; \n" +
"" +
"global java.util.List list;" +
"" +
"declare Bean\n" +
" id : InterfaceB @key\n" +
"end\n" +
"\n" +
"\n" +
"rule \"Init\"\n" +
"when \n" +
"then\n" +
" insert( new Bean( new ClassB() ) );\n" +
"end\n" +
"\n" +
"rule \"Check\"\n" +
"when\n" +
" $b : Bean( )\n" +
"then\n" +
" list.add( $b.hashCode() ); \n" +
" list.add( $b.equals( new Bean( new ClassB() ) ) ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
java.util.List list = new java.util.ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
assertTrue( list.contains( 31 + 123 ) );
assertTrue( list.contains( true ) );
ksession.dispose();
}
@Test
public void testDeclaredTypeAsFieldForAnotherDeclaredType() {
// JBRULES-3468
String str = "package com.sample\n" +
"\n" +
"import com.sample.*;\n" +
"\n" +
"declare Item\n" +
" id : int;\n" +
"end\n" +
"\n" +
"declare Priority\n" +
" name : String;\n" +
" priority : int;\n" +
"end\n" +
"\n" +
"declare Cap\n" +
" item : Item;\n" +
" name : String\n" +
"end\n" +
"\n" +
"rule \"split cart into items\"\n" +
"when\n" +
"then\n" +
" insert(new Item(1));\n" +
" insert(new Item(2));\n" +
" insert(new Item(3));\n" +
"end\n" +
"\n" +
"rule \"Priorities\"\n" +
"when\n" +
"then\n" +
" insert(new Priority(\"A\", 3));\n" +
" insert(new Priority(\"B\", 2));\n" +
" insert(new Priority(\"C\", 5));\n" +
"end\n" +
"\n" +
"rule \"Caps\"\n" +
"when\n" +
" $i : Item()\n" +
" $p : Priority($name : name)\n" +
"then\n" +
" insert(new Cap($i, $name));\n" +
"end\n" +
"\n" +
"rule \"test\"\n" +
"when\n" +
" $i : Item()\n" +
" Cap(item.id == $i.id)\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
assertEquals(20, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testCheckDuplicateVariables() throws Exception {
// JBRULES-3035
String str = "package com.sample\n" +
"import org.drools.*\n" +
"rule R1 when\n" +
" Person( $a: age, $a: name ) // this should cause a compile-time error\n" +
"then\n" +
"end";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
str = "package com.sample\n" +
"rule R1 when\n" +
" accumulate( Object(), $c: count(1), $c: max(1) ) // this should cause a compile-time error\n" +
"then\n" +
"end";
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
str = "package com.sample\n" +
"rule R1 when\n" +
" Number($i: intValue) from accumulate( Object(), $i: count(1) ) // this should cause a compile-time error\n" +
"then\n" +
"end";
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testDeclaredTypesDefaultHashCode() {
// JBRULES-3481
String str = "package com.sample\n" +
"\n" +
"global java.util.List list; \n" +
"" +
"declare Bean\n" +
" id : int \n" +
"end\n" +
"\n" +
"declare KeyedBean\n" +
" id : int @key \n" +
"end\n" +
"\n" +
"\n" +
"rule Create\n" +
"when\n" +
"then\n" +
" list.add( new Bean(1) ); \n" +
" list.add( new Bean(2) ); \n" +
" list.add( new KeyedBean(1) ); \n" +
" list.add( new KeyedBean(1) ); \n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List list = new ArrayList();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
ksession.dispose();
assertFalse( list.get( 0 ).hashCode() == 34 );
assertFalse( list.get( 1 ).hashCode() == 34 );
assertFalse( list.get( 0 ).hashCode() == list.get( 1 ).hashCode() );
assertNotSame( list.get( 0 ), list.get( 1 ) );
assertFalse( list.get( 0 ).equals( list.get( 1 ) ) );
assertTrue( list.get( 2 ).hashCode() == 32 );
assertTrue( list.get( 3 ).hashCode() == 32 );
assertNotSame( list.get( 2 ), list.get( 3 ) );
assertTrue( list.get( 2 ).equals( list.get( 3 ) ) );
}
@Test
public void testJittingConstraintWithInvocationOnLiteral() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"rule XXX when\n" +
" Person( name.toString().toLowerCase().contains( \"mark\".toString().toLowerCase() ) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testJittingMethodWithCharSequenceArg() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"rule XXX when\n" +
" Person( $n : name, $n.contains( \"mark\" ) )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testMapAccessorWithPrimitiveKey() {
String str = "package com.sample\n" +
"import org.drools.integrationtests.MiscTest.MapContainerBean\n" +
"rule R1 when\n" +
" MapContainerBean( map[1] == \"one\" )\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" MapContainerBean( map[1+1] == \"two\" )\n" +
"then\n" +
"end\n" +
"rule R3 when\n" +
" MapContainerBean( map[this.get3()] == \"three\" )\n" +
"then\n" +
"end\n" +
"rule R4 when\n" +
" MapContainerBean( map[4] == null )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert(new MapContainerBean());
assertEquals(4, ksession.fireAllRules());
ksession.dispose();
}
public static class MapContainerBean {
private final Map<Integer, String> map = new HashMap<Integer, String>();
MapContainerBean() {
map.put(1, "one");
map.put(2, "two");
map.put(3, "three");
}
public Map<Integer, String> getMap() {
return map;
}
public int get3() {
return 3;
}
}
@Test
public void testFromWithStrictModeOff() {
// JBRULES-3533
String str =
"import java.util.Map;\n" +
"dialect \"mvel\"\n" +
"rule \"LowerCaseFrom\"\n" +
"when\n"+
" Map($valOne : this['keyOne'] !=null)\n" +
" $lowerValue : String() from $valOne.toLowerCase()\n" +
"then\n" +
" System.out.println( $valOne.toLowerCase() );\n" +
"end\n";
PackageBuilderConfiguration pkgBuilderCfg = new PackageBuilderConfiguration();
MVELDialectConfiguration mvelConf = (MVELDialectConfiguration) pkgBuilderCfg.getDialectConfiguration( "mvel" );
mvelConf.setStrict(false);
mvelConf.setLangLevel(5);
KnowledgeBase kbase = loadKnowledgeBaseFromString(pkgBuilderCfg, str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Map<String,String> testMap = new HashMap<String,String>();
testMap.put("keyOne", "valone");
testMap.put("valTwo", "valTwo");
ksession.insert(testMap);
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testFromWithStrictModeOn() {
// JBRULES-3533
String str =
"import java.util.Map;\n" +
"dialect \"mvel\"\n" +
"rule \"LowerCaseFrom\"\n" +
"when\n"+
" Map($valOne : this['keyOne'] !=null)\n" +
" $lowerValue : String() from $valOne.toLowerCase()\n" +
"then\n" +
" System.out.println( $valOne.toLowerCase() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue( kbuilder.hasErrors() );
}
@Test
public void testEntryPointWithVarIN() {
String str = "package org.drools.test;\n" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"rule \"In\"\n" +
"when\n" +
" $x : Integer()\n " +
"then\n" +
" drools.getEntryPoint(\"inX\").insert( $x );\n" +
"end\n" +
"\n" +
"rule \"Out\"\n" +
"when\n" +
" $i : Integer() from entry-point \"inX\"\n" +
"then\n" +
" list.add( $i );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( 10 );
List res = new ArrayList();
ksession.setGlobal( "list", res );
ksession.fireAllRules();
ksession.dispose();
assertTrue( res.contains( 10 ) );
}
@Test
public void testArithmeticExpressionWithNull() {
// JBRULES-3568
String str = "import org.drools.integrationtests.MiscTest.PrimitiveBean;\n" +
"rule R when\n" +
" PrimitiveBean(primitive/typed > 0.7)\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new PrimitiveBean(0.9, 1.1) );
ksession.insert( new PrimitiveBean(0.9, null) );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
public static class PrimitiveBean {
public final double primitive;
public final Double typed;
public PrimitiveBean(double primitive, Double typed) {
this.primitive = primitive;
this.typed = typed;
}
public double getPrimitive() {
return primitive;
}
public Double getTyped() {
return typed;
}
}
public void testMvelMatches() {
String str = "package com.sample\n" +
"import org.drools.Person\n" +
"global java.util.List results;" +
"rule XXX when\n" +
" Person( $n : name ~= \"\\\\D.*\" )\n" +
"then\n" +
" results.add( $n ); \n " +
"end \n" +
"rule YY when\n" +
" Person( $a : age, $n : name ~= \"\\\\d\\\\D.*\" )\n" +
"then\n" +
" results.add( $a ); \n " +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List res = new ArrayList();
ksession.setGlobal( "results", res );
ksession.insert(new Person("mark", 37));
ksession.insert(new Person("mario", 38));
ksession.insert(new Person("1mike", 44));
ksession.insert(new Person("52matt", 44));
ksession.fireAllRules();
ksession.dispose();
assertEquals( 3, res.size() );
assertTrue( res.contains( "mark" ) );
assertTrue( res.contains( "mario" ) );
assertTrue( res.contains( 44 ) );
}
@Test
public void testRuleFlowGroupWithLockOnActivate() {
// JBRULES-3590
String str = "import org.drools.Person;\n" +
"import org.drools.Cheese;\n" +
"rule R1\n" +
"ruleflow-group \"group1\"\n" +
"lock-on-active true\n" +
"when\n" +
" $p : Person()\n" +
"then\n" +
" $p.setName(\"John\");\n" +
" update ($p);\n" +
"end\n" +
"rule R2\n" +
"ruleflow-group \"group1\"\n" +
"lock-on-active true\n" +
"when\n" +
" $p : Person( name == null )\n" +
" forall ( Cheese ( type == \"cheddar\" ))\n" +
"then\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Person() );
ksession.insert( new Cheese("gorgonzola") );
((AgendaImpl)ksession.getAgenda()).activateRuleFlowGroup( "group1" );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testInstanceof() throws Exception {
// JBRULES-3591
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" Person( address instanceof LongAddress )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person mark = new Person("mark");
mark.setAddress(new LongAddress("uk"));
ksession.insert(mark);
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testFromNodeWithMultipleBetas() throws Exception {
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $p : Person( $name : name, $addresses : addresses )\n" +
" $c : Cheese( $type: type == $name )\n" +
" $a : Address( street == $type, suburb == $name ) from $addresses\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.addAddress(new Address("x", "x", "x"));
p.addAddress(new Address("y", "y", "y"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
ksession.fireAllRules();
ksession.dispose();
}
@Test
public void testMvelFunctionWithDeclaredTypeArg() {
// JBRULES-3562
String rule = "package test; \n" +
"dialect \"mvel\"\n" +
"global java.lang.StringBuilder value;\n" +
"function String getFieldValue(Bean bean) {" +
" return bean.getField();" +
"}" +
"declare Bean \n" +
" field : String \n" +
"end \n" +
"\n" +
"rule R1 \n" +
"when \n" +
"then \n" +
" insert( new Bean( \"mario\" ) ); \n" +
"end \n" +
"\n" +
"rule R2 \n" +
"when \n" +
" $bean : Bean( ) \n" +
"then \n" +
" value.append( getFieldValue($bean) ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
StringBuilder sb = new StringBuilder();
ksession.setGlobal("value", sb);
ksession.fireAllRules();
assertEquals("mario", sb.toString());
ksession.dispose();
}
public void testGenericsList() throws Exception {
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $c : Cheese( $type: type )\n" +
" $p : Person( $name : name, addresses.get(0).street == $type )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.addAddress(new Address("x", "x", "x"));
p.addAddress(new Address("y", "y", "y"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test
public void testGenericsOption() throws Exception {
// JBRULES-3579
String str = "import org.drools.*;\n" +
"rule R1 when\n" +
" $c : Cheese( $type: type )\n" +
" $p : Person( $name : name, addressOption.get.street == $type )\n" +
"then\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
Person p = new Person("x");
p.setAddress(new Address("x", "x", "x"));
ksession.insert(p);
ksession.insert(new Cheese("x"));
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
}
|
[JBRULES-3539] add unit test to reproduce compilation error when calling clone() on an untyped object in the RHS
|
drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
|
[JBRULES-3539] add unit test to reproduce compilation error when calling clone() on an untyped object in the RHS
|
<ide><path>rools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
<ide> assertEquals(1, ksession.fireAllRules());
<ide> ksession.dispose();
<ide> }
<add>
<add> @Test
<add> public void testRHSClone() {
<add> // JBRULES-3539
<add> String str = "import java.util.Map;\n"+
<add> "dialect \"mvel\"\n"+
<add> "rule \"RHSClone\"\n"+
<add> "when\n"+
<add> " Map($valOne : this['keyOne'] !=null)\n"+
<add> "then\n"+
<add> " System.out.println( $valOne.clone() );\n"+
<add> "end\n";
<add>
<add> PackageBuilderConfiguration pkgBuilderCfg = new PackageBuilderConfiguration();
<add> MVELDialectConfiguration mvelConf = (MVELDialectConfiguration) pkgBuilderCfg.getDialectConfiguration( "mvel" );
<add> mvelConf.setStrict(false);
<add> mvelConf.setLangLevel(5);
<add> KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(pkgBuilderCfg);
<add> kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ),ResourceType.DRL );
<add> KnowledgeBuilderErrors errors = kbuilder.getErrors();
<add> if (errors.size() > 0) {
<add> for (KnowledgeBuilderError error: errors) {
<add> System.err.println(error);
<add> }
<add> fail("Could not parse knowledge");
<add> }
<add> }
<ide> }
|
|
Java
|
lgpl-2.1
|
9dff26e8472af5b5a1da468020bf12a6257254a5
| 0 |
joshkh/intermine,kimrutherford/intermine,tomck/intermine,JoeCarlson/intermine,elsiklab/intermine,joshkh/intermine,kimrutherford/intermine,kimrutherford/intermine,zebrafishmine/intermine,joshkh/intermine,justincc/intermine,tomck/intermine,elsiklab/intermine,tomck/intermine,justincc/intermine,elsiklab/intermine,drhee/toxoMine,JoeCarlson/intermine,joshkh/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,elsiklab/intermine,justincc/intermine,kimrutherford/intermine,joshkh/intermine,justincc/intermine,zebrafishmine/intermine,JoeCarlson/intermine,tomck/intermine,kimrutherford/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,JoeCarlson/intermine,tomck/intermine,zebrafishmine/intermine,joshkh/intermine,justincc/intermine,zebrafishmine/intermine,JoeCarlson/intermine,tomck/intermine,JoeCarlson/intermine,drhee/toxoMine,tomck/intermine,JoeCarlson/intermine,elsiklab/intermine,justincc/intermine,drhee/toxoMine,justincc/intermine,drhee/toxoMine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,justincc/intermine,drhee/toxoMine,kimrutherford/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,JoeCarlson/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,elsiklab/intermine,zebrafishmine/intermine,kimrutherford/intermine,drhee/toxoMine,elsiklab/intermine,joshkh/intermine,kimrutherford/intermine,kimrutherford/intermine
|
bio/webapp/src/org/intermine/bio/web/struts/CytoscapeNetworkController.java
|
package org.intermine.bio.web.struts;
/*
* Copyright (C) 2002-2011 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.tiles.ComponentContext;
import org.apache.struts.tiles.actions.TilesAction;
import org.intermine.api.InterMineAPI;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.Profile;
import org.intermine.api.query.PathQueryExecutor;
import org.intermine.bio.web.logic.CytoscapeNetworkDBQueryRunner;
import org.intermine.bio.web.logic.CytoscapeNetworkUtil;
import org.intermine.metadata.Model;
import org.intermine.model.InterMineObject;
import org.intermine.model.bio.Gene;
import org.intermine.model.bio.Protein;
import org.intermine.objectstore.ObjectStore;
import org.intermine.util.StringUtil;
import org.intermine.web.logic.session.SessionMethods;
/**
* This class contains the logics for interaction validation.
*
* @author Julie Sullivan
* @author Fengyuan Hu
*
*/
public class CytoscapeNetworkController extends TilesAction
{
@SuppressWarnings("unused")
private static final Logger LOG = Logger.getLogger(CytoscapeNetworkController.class);
/**
* {@inheritDoc}
*/
public ActionForward execute(ComponentContext context,
ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
HttpSession session = request.getSession(); // Get HttpSession
final InterMineAPI im = SessionMethods.getInterMineAPI(session); // Get InterMineAPI
ObjectStore os = im.getObjectStore(); // Get OS
Model model = im.getModel(); // Get Model
Profile profile = SessionMethods.getProfile(session); // Get Profile
PathQueryExecutor executor = im.getPathQueryExecutor(profile); // Get PathQueryExecutor
Set<Integer> startingFeatureSet = new HashSet<Integer>(); // feature: gene or protein
String featureType = "";
//=== Get Interaction information ===
Map<String, Set<String>> interactionInfoMap = CytoscapeNetworkUtil
.getInteractionInfo(model, executor);
//=== Handle object ===
// From gene report page
InterMineObject object = (InterMineObject) request.getAttribute("object");
// From list analysis page
InterMineBag bag = (InterMineBag) request.getAttribute("bag"); // OrthologueLinkController
if (bag != null) {
startingFeatureSet.addAll(bag.getContentsAsIds());
if ("Gene".equals(bag.getType())) {
featureType = "Gene";
} else if ("Protein".equals(bag.getType())) {
featureType = "Protein";
}
} else {
startingFeatureSet.add(object.getId());
if (object instanceof Gene) {
featureType = "Gene";
} else if (object instanceof Protein) {
featureType = "Protein";
}
}
//=== Query a full set of interacting genes ===
CytoscapeNetworkDBQueryRunner queryRunner = new CytoscapeNetworkDBQueryRunner();
Set<Integer> fullInteractingGeneSet = queryRunner.getInteractingGenes(
featureType, startingFeatureSet, model, executor);
request.setAttribute("fullInteractingGeneSet",
StringUtil.join(fullInteractingGeneSet, ","));
//=== Validation ===
if (interactionInfoMap == null) {
String dataNotIncludedMessage = "Interaction data is not included.";
request.setAttribute("dataNotIncludedMessage", dataNotIncludedMessage);
return null;
}
// Check if interaction data available for the organism
Gene aTestGene = (Gene) os.getObjectById(fullInteractingGeneSet.iterator().next());
String orgName = aTestGene.getOrganism().getName();
if (!interactionInfoMap.containsKey(orgName)) {
String orgWithNoDataMessage = "No interaction data found for "
+ orgName + " genes";
request.setAttribute("orgWithNoDataMessage", orgWithNoDataMessage);
return null;
}
return null;
}
}
|
CytoscapeNetwork displayer - removed CytoscapeNetworkController
|
bio/webapp/src/org/intermine/bio/web/struts/CytoscapeNetworkController.java
|
CytoscapeNetwork displayer - removed CytoscapeNetworkController
|
<ide><path>io/webapp/src/org/intermine/bio/web/struts/CytoscapeNetworkController.java
<del>package org.intermine.bio.web.struts;
<del>
<del>/*
<del> * Copyright (C) 2002-2011 FlyMine
<del> *
<del> * This code may be freely distributed and modified under the
<del> * terms of the GNU Lesser General Public Licence. This should
<del> * be distributed with the code. See the LICENSE file for more
<del> * information or http://www.gnu.org/copyleft/lesser.html.
<del> *
<del> */
<del>
<del>import java.util.HashSet;
<del>import java.util.Map;
<del>import java.util.Set;
<del>
<del>import javax.servlet.http.HttpServletRequest;
<del>import javax.servlet.http.HttpServletResponse;
<del>import javax.servlet.http.HttpSession;
<del>
<del>import org.apache.log4j.Logger;
<del>import org.apache.struts.action.ActionForm;
<del>import org.apache.struts.action.ActionForward;
<del>import org.apache.struts.action.ActionMapping;
<del>import org.apache.struts.tiles.ComponentContext;
<del>import org.apache.struts.tiles.actions.TilesAction;
<del>import org.intermine.api.InterMineAPI;
<del>import org.intermine.api.profile.InterMineBag;
<del>import org.intermine.api.profile.Profile;
<del>import org.intermine.api.query.PathQueryExecutor;
<del>import org.intermine.bio.web.logic.CytoscapeNetworkDBQueryRunner;
<del>import org.intermine.bio.web.logic.CytoscapeNetworkUtil;
<del>import org.intermine.metadata.Model;
<del>import org.intermine.model.InterMineObject;
<del>import org.intermine.model.bio.Gene;
<del>import org.intermine.model.bio.Protein;
<del>import org.intermine.objectstore.ObjectStore;
<del>import org.intermine.util.StringUtil;
<del>import org.intermine.web.logic.session.SessionMethods;
<del>
<del>/**
<del> * This class contains the logics for interaction validation.
<del> *
<del> * @author Julie Sullivan
<del> * @author Fengyuan Hu
<del> *
<del> */
<del>public class CytoscapeNetworkController extends TilesAction
<del>{
<del> @SuppressWarnings("unused")
<del> private static final Logger LOG = Logger.getLogger(CytoscapeNetworkController.class);
<del>
<del> /**
<del> * {@inheritDoc}
<del> */
<del> public ActionForward execute(ComponentContext context,
<del> ActionMapping mapping, ActionForm form, HttpServletRequest request,
<del> HttpServletResponse response) throws Exception {
<del>
<del> HttpSession session = request.getSession(); // Get HttpSession
<del> final InterMineAPI im = SessionMethods.getInterMineAPI(session); // Get InterMineAPI
<del> ObjectStore os = im.getObjectStore(); // Get OS
<del> Model model = im.getModel(); // Get Model
<del> Profile profile = SessionMethods.getProfile(session); // Get Profile
<del> PathQueryExecutor executor = im.getPathQueryExecutor(profile); // Get PathQueryExecutor
<del>
<del> Set<Integer> startingFeatureSet = new HashSet<Integer>(); // feature: gene or protein
<del> String featureType = "";
<del>
<del> //=== Get Interaction information ===
<del> Map<String, Set<String>> interactionInfoMap = CytoscapeNetworkUtil
<del> .getInteractionInfo(model, executor);
<del>
<del> //=== Handle object ===
<del> // From gene report page
<del> InterMineObject object = (InterMineObject) request.getAttribute("object");
<del> // From list analysis page
<del> InterMineBag bag = (InterMineBag) request.getAttribute("bag"); // OrthologueLinkController
<del>
<del> if (bag != null) {
<del> startingFeatureSet.addAll(bag.getContentsAsIds());
<del> if ("Gene".equals(bag.getType())) {
<del> featureType = "Gene";
<del> } else if ("Protein".equals(bag.getType())) {
<del> featureType = "Protein";
<del> }
<del> } else {
<del> startingFeatureSet.add(object.getId());
<del> if (object instanceof Gene) {
<del> featureType = "Gene";
<del> } else if (object instanceof Protein) {
<del> featureType = "Protein";
<del> }
<del> }
<del>
<del> //=== Query a full set of interacting genes ===
<del> CytoscapeNetworkDBQueryRunner queryRunner = new CytoscapeNetworkDBQueryRunner();
<del> Set<Integer> fullInteractingGeneSet = queryRunner.getInteractingGenes(
<del> featureType, startingFeatureSet, model, executor);
<del> request.setAttribute("fullInteractingGeneSet",
<del> StringUtil.join(fullInteractingGeneSet, ","));
<del>
<del> //=== Validation ===
<del> if (interactionInfoMap == null) {
<del> String dataNotIncludedMessage = "Interaction data is not included.";
<del> request.setAttribute("dataNotIncludedMessage", dataNotIncludedMessage);
<del> return null;
<del> }
<del>
<del> // Check if interaction data available for the organism
<del> Gene aTestGene = (Gene) os.getObjectById(fullInteractingGeneSet.iterator().next());
<del> String orgName = aTestGene.getOrganism().getName();
<del> if (!interactionInfoMap.containsKey(orgName)) {
<del> String orgWithNoDataMessage = "No interaction data found for "
<del> + orgName + " genes";
<del> request.setAttribute("orgWithNoDataMessage", orgWithNoDataMessage);
<del> return null;
<del> }
<del>
<del> return null;
<del> }
<del>
<del>
<del>}
|
||
Java
|
lgpl-2.1
|
ab0a39faa1287043f568212af4e405961f369312
| 0 |
geotools/geotools,geotools/geotools,geotools/geotools,geotools/geotools
|
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2011, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.swing.tool;
import org.geotools.swing.testutils.MockMapPane;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Unit tests for MapToolManager that do not require a graphics environment.
*
* @author Michael Bedward
* @since 8.0
* @source $URL$
* @version $Id$
*/
@Ignore("temp to fix hudson build")
public class MapToolManagerHeadlessTest {
private MockMapPane pane;
private MapToolManager manager;
@Before
public void setup() {
pane = new MockMapPane();
manager = new MapToolManager(pane);
}
@Test
public void setAndUnsetCursorTool() {
CursorTool tool = new InfoTool();
manager.setCursorTool(tool);
assertTrue(tool == manager.getCursorTool());
manager.setNoCursorTool();
assertNull(manager.getCursorTool());
}
}
|
modules/unsupported/swing/src/test/java/org/geotools/swing/tool/MapToolManagerHeadlessTest.java
|
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2011, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.swing.tool;
import org.geotools.swing.testutils.MockMapPane;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Unit tests for MapToolManager that do not require a graphics environment.
*
* @author Michael Bedward
* @since 8.0
* @source $URL$
* @version $Id$
*/
public class MapToolManagerHeadlessTest {
private MockMapPane pane;
private MapToolManager manager;
@Before
public void setup() {
pane = new MockMapPane();
manager = new MapToolManager(pane);
}
@Test
public void setAndUnsetCursorTool() {
CursorTool tool = new InfoTool();
manager.setCursorTool(tool);
assertTrue(tool == manager.getCursorTool());
manager.setNoCursorTool();
assertNull(manager.getCursorTool());
}
}
|
disabling test that is breaking hudson build
git-svn-id: b0f10281c9a1a817905b9aa75a7907aa928f8a7d@37782 e5c1c795-43da-0310-a71f-fac65c449510
|
modules/unsupported/swing/src/test/java/org/geotools/swing/tool/MapToolManagerHeadlessTest.java
|
disabling test that is breaking hudson build
|
<ide><path>odules/unsupported/swing/src/test/java/org/geotools/swing/tool/MapToolManagerHeadlessTest.java
<ide> import org.geotools.swing.testutils.MockMapPane;
<ide>
<ide> import org.junit.Before;
<add>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import static org.junit.Assert.*;
<ide>
<ide> * @source $URL$
<ide> * @version $Id$
<ide> */
<add>@Ignore("temp to fix hudson build")
<ide> public class MapToolManagerHeadlessTest {
<ide> private MockMapPane pane;
<ide> private MapToolManager manager;
|
|
Java
|
bsd-3-clause
|
592cbc642c446e675c9187085e8a0e72fd337400
| 0 |
wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy
|
/*
* Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.compiler.hsail.test;
import java.util.*;
import com.oracle.graal.compiler.hsail.test.infra.GraalKernelTester;
import org.junit.*;
/**
* Tests the spilling of integers into memory.
*/
public class StaticIntSpillTest extends GraalKernelTester {
static final int size = 100;
private int[] in = new int[size * 400];
@Result private int[] out = new int[size * 400];
public static void run(int[] out, int[] in, int gid) {
int id = gid;
int step = 20;
int sum0;
int sum1;
int sum2;
int sum3;
int sum4;
int sum5;
int sum6;
int sum7;
int sum8;
int sum9;
sum0 = sum1 = sum2 = sum3 = sum4 = sum5 = sum6 = sum7 = sum8 = sum9 = 0;
for (int i = 0; i < size; i += step) {
sum0 += in[i + 0];
sum1 += in[i + 1];
sum2 += in[i + 2];
sum3 += in[i + 3];
sum4 += in[i + 4];
sum5 += in[i + 5];
sum6 += in[i + 6];
sum7 += in[i + 7];
sum8 += in[i + 8];
sum9 += in[i + 9];
}
out[id * step + 0] = sum0;
out[id * step + 1] = sum1;
out[id * step + 2] = sum2;
out[id * step + 3] = sum3;
out[id * step + 4] = sum4;
out[id * step + 5] = sum5;
out[id * step + 6] = sum6;
out[id * step + 7] = sum7;
out[id * step + 8] = sum8;
out[id * step + 9] = sum9;
}
@Override
public void runTest() {
/**
* Call it for a range, specifying testmethod args (but not the fields it uses or the gid
* argument).
*
*/
Arrays.fill(out, 0);
Arrays.fill(in, 0);
dispatchMethodKernel(size, out, in);
}
// Marked to only run on hardware until simulator spill bug is fixed.
@Ignore
@Test
public void test() {
testGeneratedHsail();
}
}
|
graal/com.oracle.graal.compiler.hsail.test/src/com/oracle/graal/compiler/hsail/test/StaticIntSpillTest.java
|
/*
* Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.compiler.hsail.test;
import java.util.*;
import com.oracle.graal.compiler.hsail.test.infra.GraalKernelTester;
import org.junit.Test;
/**
* Tests the spilling of integers into memory.
*/
public class StaticIntSpillTest extends GraalKernelTester {
static final int size = 100;
private int[] in = new int[size * 400];
@Result private int[] out = new int[size * 400];
public static void run(int[] out, int[] in, int gid) {
int id = gid;
int step = 20;
int sum0;
int sum1;
int sum2;
int sum3;
int sum4;
int sum5;
int sum6;
int sum7;
int sum8;
int sum9;
sum0 = sum1 = sum2 = sum3 = sum4 = sum5 = sum6 = sum7 = sum8 = sum9 = 0;
for (int i = 0; i < size; i += step) {
sum0 += in[i + 0];
sum1 += in[i + 1];
sum2 += in[i + 2];
sum3 += in[i + 3];
sum4 += in[i + 4];
sum5 += in[i + 5];
sum6 += in[i + 6];
sum7 += in[i + 7];
sum8 += in[i + 8];
sum9 += in[i + 9];
}
out[id * step + 0] = sum0;
out[id * step + 1] = sum1;
out[id * step + 2] = sum2;
out[id * step + 3] = sum3;
out[id * step + 4] = sum4;
out[id * step + 5] = sum5;
out[id * step + 6] = sum6;
out[id * step + 7] = sum7;
out[id * step + 8] = sum8;
out[id * step + 9] = sum9;
}
@Override
public void runTest() {
/**
* Call it for a range, specifying testmethod args (but not the fields it uses or the gid
* argument).
*
*/
Arrays.fill(out, 0);
Arrays.fill(in, 0);
dispatchMethodKernel(size, out, in);
}
// Marked to only run on hardware until simulator spill bug is fixed.
@Test
public void test() {
testGeneratedHsail();
}
}
|
Disable StaticIntSpillTest until HSAIL backend problem is resolved
|
graal/com.oracle.graal.compiler.hsail.test/src/com/oracle/graal/compiler/hsail/test/StaticIntSpillTest.java
|
Disable StaticIntSpillTest until HSAIL backend problem is resolved
|
<ide><path>raal/com.oracle.graal.compiler.hsail.test/src/com/oracle/graal/compiler/hsail/test/StaticIntSpillTest.java
<ide> import java.util.*;
<ide>
<ide> import com.oracle.graal.compiler.hsail.test.infra.GraalKernelTester;
<del>import org.junit.Test;
<add>
<add>import org.junit.*;
<ide>
<ide> /**
<ide> * Tests the spilling of integers into memory.
<ide> }
<ide>
<ide> // Marked to only run on hardware until simulator spill bug is fixed.
<add> @Ignore
<ide> @Test
<ide> public void test() {
<ide> testGeneratedHsail();
|
|
Java
|
agpl-3.0
|
error: pathspec 'opennms-webapp/src/main/java/org/opennms/web/acegisecurity/UserAttributeLdapAuthoritiesPopulator.java' did not match any file(s) known to git
|
6271e719375f8c21f49855a8bd851a41f7a58a31
| 1 |
aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,aihua/opennms,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,aihua/opennms,rdkgit/opennms,tdefilip/opennms,tdefilip/opennms,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,tdefilip/opennms,aihua/opennms,aihua/opennms,rdkgit/opennms
|
package org.opennms.web.acegisecurity;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import org.acegisecurity.GrantedAuthority;
import org.acegisecurity.GrantedAuthorityImpl;
import org.acegisecurity.ldap.InitialDirContextFactory;
import org.acegisecurity.ldap.LdapDataAccessException;
import org.acegisecurity.providers.ldap.LdapAuthoritiesPopulator;
import org.acegisecurity.userdetails.ldap.LdapUserDetails;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.util.Assert;
public class UserAttributeLdapAuthoritiesPopulator implements LdapAuthoritiesPopulator {
private static final Log s_logger = LogFactory.getLog(UserAttributeLdapAuthoritiesPopulator.class);
private InitialDirContextFactory m_initialDirContextFactory;
private String m_userAttribute;
public UserAttributeLdapAuthoritiesPopulator(InitialDirContextFactory initialDirContextFactory, String userAttribute) {
Assert.notNull(initialDirContextFactory, "InitialDirContextFactory can not be null");
Assert.notNull(userAttribute, "UserAttribute can not be null");
m_initialDirContextFactory = initialDirContextFactory;
m_userAttribute = userAttribute;
}
public GrantedAuthority[] getGrantedAuthorities(LdapUserDetails userDetails) throws LdapDataAccessException {
GrantedAuthority[] defaultAuthorities = new GrantedAuthority[] { new GrantedAuthorityImpl("ROLE_USER") };
// new GrantedAuthority[0];
Assert.notNull(userDetails, "UserDetails can not be null");
Attributes attributes = userDetails.getAttributes();
Assert.notNull(attributes, "Attributes in the UserDetails object cannot be null");
Attribute attribute = attributes.get(m_userAttribute);
if (attribute == null) {
s_logger.info("User '" + userDetails.getDn() + "' does not have '" + m_userAttribute + "'. Returning [ROLE_USER].");
return defaultAuthorities;
}
try {
NamingEnumeration enumeration = attribute.getAll();
while (enumeration.hasMore()) {
Object o = enumeration.next();
s_logger.info("got attribute value for user '" + userDetails.getDn() + "': '" + o + "'");
}
} catch (NamingException e) {
s_logger.info("got namingexception: " + e.getMessage(), e);
}
s_logger.info("Returning default of [ROLE_USER] for '" + userDetails.getDn() + "'");
return defaultAuthorities;
}
}
|
opennms-webapp/src/main/java/org/opennms/web/acegisecurity/UserAttributeLdapAuthoritiesPopulator.java
|
Initial hack at a LdapAuthoritiesPopulator based on user attributes.
|
opennms-webapp/src/main/java/org/opennms/web/acegisecurity/UserAttributeLdapAuthoritiesPopulator.java
|
Initial hack at a LdapAuthoritiesPopulator based on user attributes.
|
<ide><path>pennms-webapp/src/main/java/org/opennms/web/acegisecurity/UserAttributeLdapAuthoritiesPopulator.java
<add>package org.opennms.web.acegisecurity;
<add>
<add>import javax.naming.NamingEnumeration;
<add>import javax.naming.NamingException;
<add>import javax.naming.directory.Attribute;
<add>import javax.naming.directory.Attributes;
<add>
<add>import org.acegisecurity.GrantedAuthority;
<add>import org.acegisecurity.GrantedAuthorityImpl;
<add>import org.acegisecurity.ldap.InitialDirContextFactory;
<add>import org.acegisecurity.ldap.LdapDataAccessException;
<add>import org.acegisecurity.providers.ldap.LdapAuthoritiesPopulator;
<add>import org.acegisecurity.userdetails.ldap.LdapUserDetails;
<add>import org.apache.commons.logging.Log;
<add>import org.apache.commons.logging.LogFactory;
<add>import org.springframework.util.Assert;
<add>
<add>public class UserAttributeLdapAuthoritiesPopulator implements LdapAuthoritiesPopulator {
<add>
<add> private static final Log s_logger = LogFactory.getLog(UserAttributeLdapAuthoritiesPopulator.class);
<add>
<add> private InitialDirContextFactory m_initialDirContextFactory;
<add> private String m_userAttribute;
<add>
<add> public UserAttributeLdapAuthoritiesPopulator(InitialDirContextFactory initialDirContextFactory, String userAttribute) {
<add> Assert.notNull(initialDirContextFactory, "InitialDirContextFactory can not be null");
<add> Assert.notNull(userAttribute, "UserAttribute can not be null");
<add> m_initialDirContextFactory = initialDirContextFactory;
<add> m_userAttribute = userAttribute;
<add> }
<add>
<add> public GrantedAuthority[] getGrantedAuthorities(LdapUserDetails userDetails) throws LdapDataAccessException {
<add> GrantedAuthority[] defaultAuthorities = new GrantedAuthority[] { new GrantedAuthorityImpl("ROLE_USER") };
<add> // new GrantedAuthority[0];
<add>
<add> Assert.notNull(userDetails, "UserDetails can not be null");
<add>
<add> Attributes attributes = userDetails.getAttributes();
<add> Assert.notNull(attributes, "Attributes in the UserDetails object cannot be null");
<add>
<add> Attribute attribute = attributes.get(m_userAttribute);
<add> if (attribute == null) {
<add> s_logger.info("User '" + userDetails.getDn() + "' does not have '" + m_userAttribute + "'. Returning [ROLE_USER].");
<add> return defaultAuthorities;
<add> }
<add>
<add> try {
<add> NamingEnumeration enumeration = attribute.getAll();
<add> while (enumeration.hasMore()) {
<add> Object o = enumeration.next();
<add> s_logger.info("got attribute value for user '" + userDetails.getDn() + "': '" + o + "'");
<add> }
<add> } catch (NamingException e) {
<add> s_logger.info("got namingexception: " + e.getMessage(), e);
<add> }
<add>
<add> s_logger.info("Returning default of [ROLE_USER] for '" + userDetails.getDn() + "'");
<add> return defaultAuthorities;
<add> }
<add>}
|
|
Java
|
apache-2.0
|
32f9f96a8861b3714bc1ef94e373e075547993f2
| 0 |
Netflix-Skunkworks/WSPerfLab,Netflix-Skunkworks/WSPerfLab,Netflix-Skunkworks/WSPerfLab
|
package perf.client;
import org.eclipse.jetty.client.api.Result;
/**
* @author Nitesh Kant
*/
public class WsClientDriver {
public static void main(String[] args) throws Exception {
validate(args);
AsyncIOClient.Builder clientBuilder = new AsyncIOClient.Builder();
String uri = args[0];
String concurrentClientsStr = args[1];
String requests = args[2];
System.out.println("Using test uri: [" + uri + "]");
System.out.println("Using concurrent clients: [" + concurrentClientsStr + "]");
System.out.println("Using total number of requests: [" + requests + "]");
int concurrentClients = Integer.parseInt(concurrentClientsStr);
try {
clientBuilder.withConcurrentClients(concurrentClients);
} catch (NumberFormatException e) {
System.err.println("Illegal concurrent clients value: " + concurrentClientsStr+ " should be an integer");
printUsageAndExit();
}
clientBuilder.withMaxConnections(Integer.MAX_VALUE);
try {
clientBuilder.withTotalRequests(Long.parseLong(requests));
} catch (NumberFormatException e) {
System.err.println("Illegal total requests: " + requests + " should be a long");
printUsageAndExit();
}
clientBuilder.withTestUrl(uri);
final AsyncIOClient client = clientBuilder.build();
client.start(new Runnable() {
@Override
public void run() {
TestResult result = client.getResult();
String resultAsJson = result.toJson();
System.out.println("****************************** Result **************************************");
System.out.println(resultAsJson);
System.out.println("****************************************************************************");
}
});
}
private static void validate(String[] args) throws IllegalArgumentException {
if (args.length < 3) {
printUsageAndExit();
}
}
private static void printUsageAndExit() {
System.err.println("Usage java perf.client.WsClientDriver <test_uri> <concurrent_clients> <requests>");
System.exit(-1);
}
}
|
ws-client/src/main/java/perf/client/WsClientDriver.java
|
package perf.client;
import org.eclipse.jetty.client.api.Result;
/**
* @author Nitesh Kant
*/
public class WsClientDriver {
public static void main(String[] args) throws Exception {
validate(args);
AsyncIOClient.Builder clientBuilder = new AsyncIOClient.Builder();
String uri = args[0];
String concurrentClientsStr = args[1];
String requests = args[2];
System.out.println("Using test uri: [" + uri + "]");
System.out.println("Using concurrent clients: [" + concurrentClientsStr + "]");
System.out.println("Using total number of requests: [" + requests + "]");
int concurrentClients = Integer.parseInt(concurrentClientsStr);
try {
clientBuilder.withConcurrentClients(concurrentClients);
} catch (NumberFormatException e) {
System.err.println("Illegal concurrent clients value: " + concurrentClientsStr+ " should be an integer");
printUsageAndExit();
}
clientBuilder.withMaxConnections(concurrentClients);
try {
clientBuilder.withTotalRequests(Long.parseLong(requests));
} catch (NumberFormatException e) {
System.err.println("Illegal total requests: " + requests + " should be a long");
printUsageAndExit();
}
clientBuilder.withTestUrl(uri);
final AsyncIOClient client = clientBuilder.build();
client.start(new Runnable() {
@Override
public void run() {
TestResult result = client.getResult();
String resultAsJson = result.toJson();
System.out.println("****************************** Result **************************************");
System.out.println(resultAsJson);
System.out.println("****************************************************************************");
}
});
}
private static void validate(String[] args) throws IllegalArgumentException {
if (args.length < 3) {
printUsageAndExit();
}
}
private static void printUsageAndExit() {
System.err.println("Usage java perf.client.WsClientDriver <test_uri> <concurrent_clients> <requests>");
System.exit(-1);
}
}
|
Setting Max connections to a high value for ws-client
|
ws-client/src/main/java/perf/client/WsClientDriver.java
|
Setting Max connections to a high value for ws-client
|
<ide><path>s-client/src/main/java/perf/client/WsClientDriver.java
<ide> printUsageAndExit();
<ide> }
<ide>
<del> clientBuilder.withMaxConnections(concurrentClients);
<add> clientBuilder.withMaxConnections(Integer.MAX_VALUE);
<ide> try {
<ide> clientBuilder.withTotalRequests(Long.parseLong(requests));
<ide> } catch (NumberFormatException e) {
|
|
JavaScript
|
mit
|
eb33112eae71a3444957fa10345f975e994e11f5
| 0 |
pkarw/vue-storefront,DivanteLtd/vue-storefront,DivanteLtd/vue-storefront,DivanteLtd/vue-storefront,pkarw/vue-storefront,pkarw/vue-storefront,DivanteLtd/vue-storefront,pkarw/vue-storefront
|
class LocalForageCacheDriver {
constructor (collection, useLocalCacheByDefault = true) {
const collectionName = collection._config.storeName
const dbName = collection._config.name
if (typeof global.$VS.localCache === 'undefined') {
global.$VS.localCache = {}
}
if (typeof global.$VS.localCache[dbName] === 'undefined') {
global.$VS.localCache[dbName] = {}
}
if (typeof global.$VS.localCache[dbName][collectionName] === 'undefined') {
global.$VS.localCache[dbName][collectionName] = {}
}
this._dbName = dbName
this._useLocalCacheByDefault = useLocalCacheByDefault
this._localCache = global.$VS.localCache[dbName][collectionName]
this._localForageCollection = collection
}
// Remove all keys from the datastore, effectively destroying all data in
// the app's key/value store!
clear (callback) {
return this._localForageCollection.clear(callback)
}
// Retrieve an item from the store. Unlike the original async_storage
// library in Gaia, we don't modify return values at all. If a key's value
// is `undefined`, we pass that value to the callback function.
getItem (key, callback) {
const self = this
const isCallbackCallable = (typeof callback !== 'undefined' && callback)
let isResolved = false
if (self._useLocalCacheByDefault && self._localCache[key]) {
// console.debug('Local cache fallback for GET', key)
return new Promise((resolve, reject) => {
const value = typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null
if (isCallbackCallable) callback(null, value)
resolve(value)
})
}
// console.debug('No local cache fallback for GET', key)
const promise = this._localForageCollection.getItem(key).then(result => {
if (!isResolved) {
if (isCallbackCallable) {
callback(null, result)
}
isResolved = true
} else {
console.debug('Skipping return value as it was previously resolved')
}
return result
}).catch(err => {
if (!isResolved) {
if (isCallbackCallable) callback(null, typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null)
}
isResolved = true
})
setTimeout(function () {
if (!isResolved) { // this is cache time out check
console.error('Cache not responding within 1s')
if (isCallbackCallable) callback(null, typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null)
}
}, 1000)
return promise
}
// Iterate over all items in the store.
iterate (iterator, callback) {
const self = this
const isIteratorCallable = (typeof iterator !== 'undefined' && iterator)
let globalIterationNumber = 1
if (this._useLocalCacheByDefault) {
// console.debug('Local cache iteration')
for (const localKey in self._localCache) {
if (isIteratorCallable) {
iterator(self._localCache[localKey], localKey, globalIterationNumber)
globalIterationNumber++
}
}
}
return this._localForageCollection.iterate(function (value, key, iterationNumber) {
if (isIteratorCallable) {
if (self._useLocalCacheByDefault) {
if (typeof self._localCache[key] === 'undefined') {
iterator(value, key, globalIterationNumber)
globalIterationNumber++
} else {
// console.debug('Skipping iteration key because local cache executed', key)
}
} else {
iterator(value, key, iterationNumber)
}
}
})
}
// Same as localStorage's key() method, except takes a callback.
key (n, callback) {
return this._localForageCollection.key(n, callback)
}
keys (callback) {
return this._localForageCollection.keys(callback)
}
// Supply the number of keys in the datastore to the callback function.
length (callback) {
return this._localForageCollection.length(callback)
}
// Remove an item from the store, nice and simple.
removeItem (key, callback) {
if (typeof self._localCache[key] !== 'undefined') {
delete typeof self._localCache[key]
}
return this._localForageCollection.removeItem(key, callback)
}
// Set a key's value and run an optional callback once the value is set.
// Unlike Gaia's implementation, the callback function is passed the value,
// in case you want to operate on that value only after you're sure it
// saved, or something like that.
setItem (key, value, callback) {
const self = this
const isCallbackCallable = (typeof callback !== 'undefined' && callback)
self._localCache[key] = value
const promise = this._localForageCollection.setItem(key, value).then(result => {
if (isCallbackCallable) {
callback(null, result)
}
}).catch(err => {
console.debug(err)
})
return promise
}
}
// The actual localForage object that we expose as a module or via a
// global.$VS. It's extended by pulling in one of our other libraries.
export default LocalForageCacheDriver
|
core/store/lib/storage.js
|
class LocalForageCacheDriver {
constructor (collection, useLocalCacheByDefault = true) {
const collectionName = collection._config.storeName
const dbName = collection._config.name
if (typeof global.$VS.localCache === 'undefined') {
global.$VS.localCache = {}
}
if (typeof global.$VS.localCache[dbName] === 'undefined') {
global.$VS.localCache[dbName] = {}
}
if (typeof global.$VS.localCache[dbName][collectionName] === 'undefined') {
global.$VS.localCache[dbName][collectionName] = {}
}
this._dbName = dbName
this._useLocalCacheByDefault = useLocalCacheByDefault
this._localCache = global.$VS.localCache[dbName][collectionName]
this._localForageCollection = collection
}
// Remove all keys from the datastore, effectively destroying all data in
// the app's key/value store!
clear (callback) {
return this._localForageCollection.clear(callback)
}
// Retrieve an item from the store. Unlike the original async_storage
// library in Gaia, we don't modify return values at all. If a key's value
// is `undefined`, we pass that value to the callback function.
getItem (key, callback) {
const self = this
const isCallbackCallable = (typeof callback !== 'undefined' && callback)
let isResolved = false
if (self._useLocalCacheByDefault && self._localCache[key]) {
// console.debug('Local cache fallback for GET', key)
return new Promise((resolve, reject) => {
const value = typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null
if (isCallbackCallable) callback(null, value)
resolve(value)
})
}
// console.debug('No local cache fallback for GET', key)
const promise = this._localForageCollection.getItem(key).then(result => {
if (!isResolved) {
if (isCallbackCallable) {
callback(null, result)
}
isResolved = true
} else {
console.debug('Skipping return value as it was previously resolved')
}
return result
}).catch(err => {
console.debug('UniversalStorage - GET - probably in SSR mode: ' + err)
if (!isResolved) {
if (isCallbackCallable) callback(null, typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null)
}
isResolved = true
})
setTimeout(function () {
if (!isResolved) { // this is cache time out check
console.error('Cache not responding within 1s')
if (isCallbackCallable) callback(null, typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null)
}
}, 1000)
return promise
}
// Iterate over all items in the store.
iterate (iterator, callback) {
const self = this
const isIteratorCallable = (typeof iterator !== 'undefined' && iterator)
let globalIterationNumber = 1
if (this._useLocalCacheByDefault) {
// console.debug('Local cache iteration')
for (const localKey in self._localCache) {
if (isIteratorCallable) {
iterator(self._localCache[localKey], localKey, globalIterationNumber)
globalIterationNumber++
}
}
}
return this._localForageCollection.iterate(function (value, key, iterationNumber) {
if (isIteratorCallable) {
if (self._useLocalCacheByDefault) {
if (typeof self._localCache[key] === 'undefined') {
iterator(value, key, globalIterationNumber)
globalIterationNumber++
} else {
// console.debug('Skipping iteration key because local cache executed', key)
}
} else {
iterator(value, key, iterationNumber)
}
}
})
}
// Same as localStorage's key() method, except takes a callback.
key (n, callback) {
return this._localForageCollection.key(n, callback)
}
keys (callback) {
return this._localForageCollection.keys(callback)
}
// Supply the number of keys in the datastore to the callback function.
length (callback) {
return this._localForageCollection.length(callback)
}
// Remove an item from the store, nice and simple.
removeItem (key, callback) {
if (typeof self._localCache[key] !== 'undefined') {
delete typeof self._localCache[key]
}
return this._localForageCollection.removeItem(key, callback)
}
// Set a key's value and run an optional callback once the value is set.
// Unlike Gaia's implementation, the callback function is passed the value,
// in case you want to operate on that value only after you're sure it
// saved, or something like that.
setItem (key, value, callback) {
const self = this
const isCallbackCallable = (typeof callback !== 'undefined' && callback)
self._localCache[key] = value
const promise = this._localForageCollection.setItem(key, value).then(result => {
if (isCallbackCallable) {
callback(null, result)
}
}).catch(err => {
console.debug('UniversalStorage - SET - probably in SSR mode: ' + err)
})
return promise
}
}
// The actual localForage object that we expose as a module or via a
// global.$VS. It's extended by pulling in one of our other libraries.
export default LocalForageCacheDriver
|
SSR error message for UniversalStorage removed
|
core/store/lib/storage.js
|
SSR error message for UniversalStorage removed
|
<ide><path>ore/store/lib/storage.js
<ide> }
<ide> return result
<ide> }).catch(err => {
<del> console.debug('UniversalStorage - GET - probably in SSR mode: ' + err)
<ide> if (!isResolved) {
<ide> if (isCallbackCallable) callback(null, typeof self._localCache[key] !== 'undefined' ? self._localCache[key] : null)
<ide> }
<ide> callback(null, result)
<ide> }
<ide> }).catch(err => {
<del> console.debug('UniversalStorage - SET - probably in SSR mode: ' + err)
<add> console.debug(err)
<ide> })
<ide>
<ide> return promise
|
|
Java
|
mit
|
c33ef0a71a026dce10c1cbdd9f8c0cd727b3191d
| 0 |
mcasperson/IridiumApplicationTesting,mcasperson/IridiumApplicationTesting,mcasperson/IridiumApplicationTesting
|
package au.com.agic.apptesting.utils.impl;
import au.com.agic.apptesting.constants.Constants;
import au.com.agic.apptesting.profiles.FileProfileAccess;
import au.com.agic.apptesting.profiles.configuration.*;
import au.com.agic.apptesting.profiles.dataset.DataSet;
import au.com.agic.apptesting.profiles.dataset.DatasetsFactory;
import au.com.agic.apptesting.profiles.dataset.DatasetsRootElement;
import au.com.agic.apptesting.profiles.dataset.Setting;
import au.com.agic.apptesting.utils.ApplicationUrlLoader;
import au.com.agic.apptesting.utils.SystemPropertyUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.validation.constraints.NotNull;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.*;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* Loads the application urls from configuration
*/
public class ApplicationUrlLoaderImpl implements ApplicationUrlLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationUrlLoaderImpl.class);
private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl();
private static final String CONFIG_FILE = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.CONFIGURATION);
private static final DatasetsFactory DATASETS_FACTORY = new DatasetsFactory();
private FileProfileAccess<Configuration> profileAccess = new FileProfileAccess<>(
CONFIG_FILE,
Configuration.class);
private Optional<DatasetsRootElement> datasets;
public void initialise() {
profileAccess = new FileProfileAccess<>(
CONFIG_FILE,
Configuration.class);
final String datsetsFile = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.DATA_SETS_PROFILE_SYSTEM_PROPERTY);
datasets = DATASETS_FACTORY.getDatasets(datsetsFile);
}
private String getAppUrl() {
final String appUrl = SYSTEM_PROPERTY_UTILS.getProperty(Constants.APP_URL_OVERRIDE_SYSTEM_PROPERTY);
if (StringUtils.isNotBlank(appUrl)) {
return appUrl;
}
return null;
}
@Override
public List<UrlMapping> getAppUrls(final String featureGroup) {
checkState(profileAccess != null, "initialise() must be called");
checkState(datasets != null, "initialise() must be called");
/*
Deal with the override. This system property takes precedence over
all other options.
*/
final String appUrlOverride = getAppUrl();
if (StringUtils.isNotBlank(appUrlOverride)) {
LOGGER.info("Getting URL from global system property");
return Arrays.asList(new UrlMapping(appUrlOverride));
}
/*
We can also define a collection of URLs as system properties.
*/
final List<String> normalisedKeys = SYSTEM_PROPERTY_UTILS.getNormalisedProperties();
final List<Url> systemPropValues = normalisedKeys.stream()
.map(Constants.APP_URL_OVERRIDE_SYSTEM_PROPERTY_REGEX::matcher)
.filter(Matcher::matches)
.map(x -> new Url(SYSTEM_PROPERTY_UTILS.getProperty(x.group(0)), x.group(1)))
.collect(Collectors.toList());
if (!systemPropValues.isEmpty()) {
LOGGER.info("Getting URL from specific system property");
return Arrays.asList(new UrlMapping(systemPropValues));
}
/*
The final option is to get the mappins from the csv or xml file
*/
final Optional<Configuration> configuration = profileAccess.getProfile();
if (configuration.isPresent()) {
LOGGER.info("Getting URL config file");
final List<UrlMapping> retValue = getUrlMappings(configuration.get(), featureGroup);
return getLimitedAppUrls(retValue);
}
/*
There are no mappings to return
*/
return new ArrayList<>();
}
private List<UrlMapping> getLimitedAppUrls(@NotNull final List<UrlMapping> completeList) {
checkNotNull(completeList);
final String limitedUrls = SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_URLS_SYSTEM_PROPERTY);
if (StringUtils.isNoneBlank(limitedUrls)) {
try {
Collections.shuffle(completeList, SecureRandom.getInstance("SHA1PRNG"));
final Integer limit = Integer.parseInt(limitedUrls);
final List<UrlMapping> subList = new ArrayList<>();
for (int i = 0; i < Math.min(limit, completeList.size()); ++i) {
subList.add(completeList.get(i));
}
return subList;
} catch (final NumberFormatException | NoSuchAlgorithmException ignored) {
/*
Invalid input that we ignore
*/
}
}
return completeList;
}
@Override
public Map<Integer, Map<String, String>> getDatasets() {
checkState(profileAccess != null, "initialise() must be called");
checkState(datasets != null, "initialise() must be called");
/*
It is possible that a profile does not exist with data sets for this featureGroup
*/
if (!datasets.isPresent()) {
return new HashMap<>();
}
return getDatasets(datasets.get());
}
private Map<Integer, Map<String, String>> getDatasets(@NotNull final DatasetsRootElement profile) {
checkNotNull(profile);
final Map<String, String> commonDataSet = getCommonDataset(profile);
final Map<Integer, Map<String, String>> dataSets = new HashMap<>();
int index = 0;
for (final DataSet dataSet : profile.getDataSets().getDataSets()) {
if (!dataSets.containsKey(index)) {
final Map<String, String> newMap = new HashMap<>(commonDataSet);
dataSets.put(index, newMap);
}
for (final Setting setting : dataSet.getSettings()) {
dataSets.get(index).put(setting.getName(), setting.getValue());
}
++index;
}
return dataSets;
}
private List<UrlMapping> getUrlMappings(@NotNull final Configuration configuration, final String app) {
checkNotNull(configuration);
return Optional.ofNullable(configuration)
.map(Configuration::getUrlMappings)
.map(URLMappings::getFeatureGroups)
.map(featureGroups ->
featureGroups.stream().filter(e -> StringUtils.endsWithIgnoreCase(app, e.getName()))
.findFirst()
.map(FeatureGroup::getUrlMappings)
.orElse(new ArrayList<>())
)
.get();
}
/**
* @param profile The combined profile
* @return The common data set values to be applied to all other datasets
*/
private Map<String, String> getCommonDataset(@NotNull final DatasetsRootElement profile) {
checkNotNull(profile);
final Map<String, String> commonDataSet = new HashMap<>();
profile.getDataSets().getCommonDataSet().getSettings().stream()
/*
Ensure we add the data set to a sequential index
*/
.forEach(e -> commonDataSet.put(e.getName(), e.getValue()));
return commonDataSet;
}
}
|
src/main/java/au/com/agic/apptesting/utils/impl/ApplicationUrlLoaderImpl.java
|
package au.com.agic.apptesting.utils.impl;
import au.com.agic.apptesting.constants.Constants;
import au.com.agic.apptesting.profiles.FileProfileAccess;
import au.com.agic.apptesting.profiles.configuration.*;
import au.com.agic.apptesting.profiles.dataset.DataSet;
import au.com.agic.apptesting.profiles.dataset.DatasetsFactory;
import au.com.agic.apptesting.profiles.dataset.DatasetsRootElement;
import au.com.agic.apptesting.profiles.dataset.Setting;
import au.com.agic.apptesting.utils.ApplicationUrlLoader;
import au.com.agic.apptesting.utils.SystemPropertyUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.validation.constraints.NotNull;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.*;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* Loads the application urls from configuration
*/
public class ApplicationUrlLoaderImpl implements ApplicationUrlLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationUrlLoaderImpl.class);
private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl();
private static final String DATASETS_FILE = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.DATA_SETS_PROFILE_SYSTEM_PROPERTY);
private static final String CONFIG_FILE = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.CONFIGURATION);
private static final DatasetsFactory DATASETS_FACTORY = new DatasetsFactory();
private FileProfileAccess<Configuration> profileAccess = new FileProfileAccess<>(
CONFIG_FILE,
Configuration.class);
private Optional<DatasetsRootElement> datasets = DATASETS_FACTORY.getDatasets(DATASETS_FILE);
public void initialise() {
profileAccess = new FileProfileAccess<>(
CONFIG_FILE,
Configuration.class);
datasets = DATASETS_FACTORY.getDatasets(DATASETS_FILE);
}
private String getAppUrl() {
final String appUrl = SYSTEM_PROPERTY_UTILS.getProperty(Constants.APP_URL_OVERRIDE_SYSTEM_PROPERTY);
if (StringUtils.isNotBlank(appUrl)) {
return appUrl;
}
return null;
}
@Override
public List<UrlMapping> getAppUrls(final String featureGroup) {
checkState(profileAccess != null, "initialise() must be called");
checkState(datasets != null, "initialise() must be called");
/*
Deal with the override. This system property takes precedence over
all other options.
*/
final String appUrlOverride = getAppUrl();
if (StringUtils.isNotBlank(appUrlOverride)) {
LOGGER.info("Getting URL from global system property");
return Arrays.asList(new UrlMapping(appUrlOverride));
}
/*
We can also define a collection of URLs as system properties.
*/
final List<String> normalisedKeys = SYSTEM_PROPERTY_UTILS.getNormalisedProperties();
final List<Url> systemPropValues = normalisedKeys.stream()
.map(Constants.APP_URL_OVERRIDE_SYSTEM_PROPERTY_REGEX::matcher)
.filter(Matcher::matches)
.map(x -> new Url(SYSTEM_PROPERTY_UTILS.getProperty(x.group(0)), x.group(1)))
.collect(Collectors.toList());
if (!systemPropValues.isEmpty()) {
LOGGER.info("Getting URL from specific system property");
return Arrays.asList(new UrlMapping(systemPropValues));
}
/*
The final option is to get the mappins from the csv or xml file
*/
final Optional<Configuration> configuration = profileAccess.getProfile();
if (configuration.isPresent()) {
LOGGER.info("Getting URL config file");
final List<UrlMapping> retValue = getUrlMappings(configuration.get(), featureGroup);
return getLimitedAppUrls(retValue);
}
/*
There are no mappings to return
*/
return new ArrayList<>();
}
private List<UrlMapping> getLimitedAppUrls(@NotNull final List<UrlMapping> completeList) {
checkNotNull(completeList);
final String limitedUrls = SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_URLS_SYSTEM_PROPERTY);
if (StringUtils.isNoneBlank(limitedUrls)) {
try {
Collections.shuffle(completeList, SecureRandom.getInstance("SHA1PRNG"));
final Integer limit = Integer.parseInt(limitedUrls);
final List<UrlMapping> subList = new ArrayList<>();
for (int i = 0; i < Math.min(limit, completeList.size()); ++i) {
subList.add(completeList.get(i));
}
return subList;
} catch (final NumberFormatException | NoSuchAlgorithmException ignored) {
/*
Invalid input that we ignore
*/
}
}
return completeList;
}
@Override
public Map<Integer, Map<String, String>> getDatasets() {
checkState(profileAccess != null, "initialise() must be called");
checkState(datasets != null, "initialise() must be called");
/*
It is possible that a profile does not exist with data sets for this featureGroup
*/
if (!datasets.isPresent()) {
return new HashMap<>();
}
return getDatasets(datasets.get());
}
private Map<Integer, Map<String, String>> getDatasets(@NotNull final DatasetsRootElement profile) {
checkNotNull(profile);
final Map<String, String> commonDataSet = getCommonDataset(profile);
final Map<Integer, Map<String, String>> dataSets = new HashMap<>();
int index = 0;
for (final DataSet dataSet : profile.getDataSets().getDataSets()) {
if (!dataSets.containsKey(index)) {
final Map<String, String> newMap = new HashMap<>(commonDataSet);
dataSets.put(index, newMap);
}
for (final Setting setting : dataSet.getSettings()) {
dataSets.get(index).put(setting.getName(), setting.getValue());
}
++index;
}
return dataSets;
}
private List<UrlMapping> getUrlMappings(@NotNull final Configuration configuration, final String app) {
checkNotNull(configuration);
return Optional.ofNullable(configuration)
.map(Configuration::getUrlMappings)
.map(URLMappings::getFeatureGroups)
.map(featureGroups ->
featureGroups.stream().filter(e -> StringUtils.endsWithIgnoreCase(app, e.getName()))
.findFirst()
.map(FeatureGroup::getUrlMappings)
.orElse(new ArrayList<>())
)
.get();
}
/**
* @param profile The combined profile
* @return The common data set values to be applied to all other datasets
*/
private Map<String, String> getCommonDataset(@NotNull final DatasetsRootElement profile) {
checkNotNull(profile);
final Map<String, String> commonDataSet = new HashMap<>();
profile.getDataSets().getCommonDataSet().getSettings().stream()
/*
Ensure we add the data set to a sequential index
*/
.forEach(e -> commonDataSet.put(e.getName(), e.getValue()));
return commonDataSet;
}
}
|
Fixed up reloading of datasets
|
src/main/java/au/com/agic/apptesting/utils/impl/ApplicationUrlLoaderImpl.java
|
Fixed up reloading of datasets
|
<ide><path>rc/main/java/au/com/agic/apptesting/utils/impl/ApplicationUrlLoaderImpl.java
<ide>
<ide> private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl();
<ide>
<del> private static final String DATASETS_FILE = SYSTEM_PROPERTY_UTILS.getProperty(
<del> Constants.DATA_SETS_PROFILE_SYSTEM_PROPERTY);
<ide> private static final String CONFIG_FILE = SYSTEM_PROPERTY_UTILS.getProperty(
<ide> Constants.CONFIGURATION);
<ide>
<ide> CONFIG_FILE,
<ide> Configuration.class);
<ide>
<del> private Optional<DatasetsRootElement> datasets = DATASETS_FACTORY.getDatasets(DATASETS_FILE);
<add> private Optional<DatasetsRootElement> datasets;
<ide>
<ide> public void initialise() {
<ide> profileAccess = new FileProfileAccess<>(
<ide> CONFIG_FILE,
<ide> Configuration.class);
<ide>
<del> datasets = DATASETS_FACTORY.getDatasets(DATASETS_FILE);
<add> final String datsetsFile = SYSTEM_PROPERTY_UTILS.getProperty(
<add> Constants.DATA_SETS_PROFILE_SYSTEM_PROPERTY);
<add>
<add> datasets = DATASETS_FACTORY.getDatasets(datsetsFile);
<ide> }
<ide>
<ide> private String getAppUrl() {
|
|
Java
|
apache-2.0
|
45b44e6a4027e1445bc037d5b9d1a3eb070ff5dd
| 0 |
sandeshh/incubator-apex-core,brightchen/apex-core,tweise/apex-core,brightchen/apex-core,mattqzhang/apex-core,PramodSSImmaneni/incubator-apex-core,chinmaykolhatkar/incubator-apex-core,klynchDS/incubator-apex-core,tushargosavi/apex-core,chinmaykolhatkar/incubator-apex-core,PramodSSImmaneni/apex-core,apache/incubator-apex-core,devtagare/incubator-apex-core,mt0803/incubator-apex-core,tushargosavi/incubator-apex-core,tweise/incubator-apex-core,tweise/incubator-apex-core,vrozov/apex-core,ishark/incubator-apex-core,vrozov/apex-core,ishark/incubator-apex-core,mt0803/incubator-apex-core,apache/incubator-apex-core,mattqzhang/apex-core,MalharJenkins/incubator-apex-core,tushargosavi/incubator-apex-core,deepak-narkhede/apex-core,brightchen/incubator-apex-core,klynchDS/incubator-apex-core,vrozov/incubator-apex-core,andyperlitch/incubator-apex-core,tweise/incubator-apex-core,vrozov/apex-core,apache/incubator-apex-core,simplifi-it/otterx,sandeshh/apex-core,vrozov/incubator-apex-core,deepak-narkhede/apex-core,amberarrow/incubator-apex-core,sandeshh/apex-core,tushargosavi/apex-core,simplifi-it/otterx,simplifi-it/otterx,PramodSSImmaneni/apex-core,devtagare/incubator-apex-core,sandeshh/incubator-apex-core,ishark/incubator-apex-core,PramodSSImmaneni/apex-core,chinmaykolhatkar/incubator-apex-core,andyperlitch/incubator-apex-core,vrozov/incubator-apex-core,sandeshh/incubator-apex-core,amberarrow/incubator-apex-core,MalharJenkins/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,sandeshh/apex-core,tweise/apex-core,brightchen/incubator-apex-core,devtagare/incubator-apex-core,deepak-narkhede/apex-core,aniruddhas/incubator-apex-core,brightchen/apex-core,tushargosavi/incubator-apex-core,tushargosavi/apex-core,PramodSSImmaneni/incubator-apex-core,aniruddhas/incubator-apex-core,mattqzhang/apex-core,tweise/apex-core
|
/**
* Copyright (c) 2012-2013 DataTorrent, Inc. All rights reserved.
*/
package com.datatorrent.stram.cli;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.*;
import jline.console.ConsoleReader;
import jline.console.completer.AggregateCompleter;
import jline.console.completer.ArgumentCompleter;
import jline.console.completer.Completer;
import jline.console.completer.FileNameCompleter;
import jline.console.completer.StringsCompleter;
import jline.console.history.FileHistory;
import jline.console.history.History;
import jline.console.history.MemoryHistory;
import javax.ws.rs.core.MediaType;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.log4j.Appender;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.tools.ant.DirectoryScanner;
import com.datatorrent.api.DAG;
import com.datatorrent.stram.StramClient;
import com.datatorrent.stram.client.RecordingsAgent;
import com.datatorrent.stram.client.RecordingsAgent.RecordingInfo;
import com.datatorrent.stram.client.StramAgent;
import com.datatorrent.stram.client.StramAppLauncher;
import com.datatorrent.stram.client.StramAppLauncher.AppFactory;
import com.datatorrent.stram.client.StramClientUtils;
import com.datatorrent.stram.client.StramClientUtils.ClientRMHelper;
import com.datatorrent.stram.client.StramClientUtils.YarnClientHelper;
import com.datatorrent.stram.client.WebServicesVersionConversion.IncompatibleVersionException;
import com.datatorrent.stram.codec.LogicalPlanSerializer;
import com.datatorrent.stram.license.GenerateLicenseRequest;
import com.datatorrent.stram.license.License;
import com.datatorrent.stram.license.LicensingAgentClient;
import com.datatorrent.stram.license.SubLicense;
import com.datatorrent.stram.license.util.Util;
import com.datatorrent.stram.plan.logical.AddStreamSinkRequest;
import com.datatorrent.stram.plan.logical.CreateOperatorRequest;
import com.datatorrent.stram.plan.logical.CreateStreamRequest;
import com.datatorrent.stram.plan.logical.LogicalPlan;
import com.datatorrent.stram.plan.logical.LogicalPlanRequest;
import com.datatorrent.stram.plan.logical.RemoveOperatorRequest;
import com.datatorrent.stram.plan.logical.RemoveStreamRequest;
import com.datatorrent.stram.plan.logical.SetOperatorAttributeRequest;
import com.datatorrent.stram.plan.logical.SetOperatorPropertyRequest;
import com.datatorrent.stram.plan.logical.SetPortAttributeRequest;
import com.datatorrent.stram.plan.logical.SetStreamAttributeRequest;
import com.datatorrent.stram.security.StramUserLogin;
import com.datatorrent.stram.util.VersionInfo;
import com.datatorrent.stram.util.WebServicesClient;
import com.datatorrent.stram.webapp.StramWebServices;
/**
*
* Provides command line interface for a streaming application on hadoop (yarn)<p>
*
* @since 0.3.2
*/
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public class DTCli
{
private static final Logger LOG = LoggerFactory.getLogger(DTCli.class);
private final Configuration conf = new YarnConfiguration();
private ClientRMHelper rmClient;
private ApplicationReport currentApp = null;
private boolean consolePresent;
private String[] commandsToExecute;
private final Map<String, CommandSpec> globalCommands = new TreeMap<String, CommandSpec>();
private final Map<String, CommandSpec> connectedCommands = new TreeMap<String, CommandSpec>();
private final Map<String, CommandSpec> logicalPlanChangeCommands = new TreeMap<String, CommandSpec>();
private final Map<String, String> aliases = new HashMap<String, String>();
private final Map<String, List<String>> macros = new HashMap<String, List<String>>();
private boolean changingLogicalPlan = false;
private final List<LogicalPlanRequest> logicalPlanRequestQueue = new ArrayList<LogicalPlanRequest>();
private FileHistory topLevelHistory;
private FileHistory changingLogicalPlanHistory;
private String jsonp;
private boolean raw = false;
private RecordingsAgent recordingsAgent;
private final ObjectMapper mapper = new ObjectMapper();
private String pagerCommand;
private Process pagerProcess;
private int verboseLevel = 0;
private static boolean lastCommandError = false;
private static class FileLineReader extends ConsoleReader
{
private final BufferedReader br;
FileLineReader(String fileName) throws IOException
{
super();
fileName = expandFileName(fileName, true);
br = new BufferedReader(new FileReader(fileName));
}
@Override
public String readLine(String prompt) throws IOException
{
return br.readLine();
}
public void close() throws IOException
{
br.close();
}
}
public static class Tokenizer
{
private static void appendToCommandBuffer(List<String> commandBuffer, StringBuffer buf, boolean potentialEmptyArg)
{
if (potentialEmptyArg || buf.length() > 0) {
commandBuffer.add(buf.toString());
buf.setLength(0);
}
}
private static List<String> startNewCommand(List<List<String>> resultBuffer)
{
List<String> newCommand = new ArrayList<String>();
resultBuffer.add(newCommand);
return newCommand;
}
public static List<String[]> tokenize(String commandLine)
{
List<List<String>> resultBuffer = new ArrayList<List<String>>();
List<String> commandBuffer = startNewCommand(resultBuffer);
if (commandLine != null) {
commandLine = ltrim(commandLine);
if (commandLine.startsWith("#")) {
return null;
}
int len = commandLine.length();
boolean insideQuotes = false;
boolean potentialEmptyArg = false;
StringBuffer buf = new StringBuffer();
for (@SuppressWarnings("AssignmentToForLoopParameter") int i = 0; i < len; ++i) {
char c = commandLine.charAt(i);
if (c == '"') {
potentialEmptyArg = true;
insideQuotes = !insideQuotes;
}
else if (c == '\\') {
if (len > i + 1) {
switch (commandLine.charAt(i + 1)) {
case 'n':
buf.append("\n");
break;
case 't':
buf.append("\t");
break;
case 'r':
buf.append("\r");
break;
case 'b':
buf.append("\b");
break;
case 'f':
buf.append("\f");
break;
default:
buf.append(commandLine.charAt(i + 1));
}
++i;
}
}
else {
if (insideQuotes) {
buf.append(c);
}
else {
if (c == ';') {
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
commandBuffer = startNewCommand(resultBuffer);
}
else if (Character.isWhitespace(c)) {
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
potentialEmptyArg = false;
if (len > i + 1 && commandLine.charAt(i + 1) == '#') {
break;
}
}
else {
buf.append(c);
}
}
}
}
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
}
List<String[]> result = new ArrayList<String[]>();
for (List<String> command : resultBuffer) {
String[] commandArray = new String[command.size()];
result.add(command.toArray(commandArray));
}
return result;
}
}
private interface Command
{
void execute(String[] args, ConsoleReader reader) throws Exception;
}
private static class Arg
{
final String name;
Arg(String name)
{
this.name = name;
}
@Override
public String toString()
{
return name;
}
}
private static class FileArg extends Arg
{
FileArg(String name)
{
super(name);
}
}
private static class CommandArg extends Arg
{
CommandArg(String name)
{
super(name);
}
}
private StramAppLauncher getStramAppLauncher(String jarfileUri, Configuration config) throws Exception
{
URI uri = new URI(jarfileUri);
String scheme = uri.getScheme();
StramAppLauncher appLauncher = null;
if (scheme == null || scheme.equals("file")) {
File jf = new File(uri.getPath());
appLauncher = new StramAppLauncher(jf, config);
}
else if (scheme.equals("hdfs")) {
FileSystem fs = FileSystem.get(uri, conf);
Path path = new Path(uri.getPath());
appLauncher = new StramAppLauncher(fs, path, config);
}
if (appLauncher != null) {
if (verboseLevel > 0) {
System.err.print(appLauncher.getMvnBuildClasspathOutput());
}
return appLauncher;
}
else {
throw new CliException("Scheme " + scheme + " not supported.");
}
}
private static class CommandSpec
{
Command command;
Arg[] requiredArgs;
Arg[] optionalArgs;
String description;
CommandSpec(Command command, Arg[] requiredArgs, Arg[] optionalArgs, String description)
{
this.command = command;
this.requiredArgs = requiredArgs;
this.optionalArgs = optionalArgs;
this.description = description;
}
void verifyArguments(String[] args) throws CliException
{
int minArgs = 0;
int maxArgs = 0;
if (requiredArgs != null) {
minArgs = requiredArgs.length;
maxArgs = requiredArgs.length;
}
if (optionalArgs != null) {
maxArgs += optionalArgs.length;
}
if (args.length - 1 < minArgs || args.length - 1 > maxArgs) {
throw new CliException("Command parameter error");
}
}
void printUsage(String cmd)
{
System.err.print("Usage: " + cmd);
if (requiredArgs != null) {
for (Arg arg : requiredArgs) {
System.err.print(" <" + arg + ">");
}
}
if (optionalArgs != null) {
for (Arg arg : optionalArgs) {
System.err.print(" [<" + arg + ">]");
}
}
System.err.println();
}
}
private static class OptionsCommandSpec extends CommandSpec
{
Options options;
OptionsCommandSpec(Command command, Arg[] requiredArgs, Arg[] optionalArgs, String description, Options options)
{
super(command, requiredArgs, optionalArgs, description);
this.options = options;
}
@Override
void verifyArguments(String[] args) throws CliException
{
try {
args = new PosixParser().parse(options, args).getArgs();
super.verifyArguments(args);
}
catch (Exception ex) {
throw new CliException("Command parameter error");
}
}
@Override
void printUsage(String cmd)
{
super.printUsage(cmd + ((options == null) ? "" : " [options]"));
if (options != null) {
System.out.println("Options:");
HelpFormatter formatter = new HelpFormatter();
PrintWriter pw = new PrintWriter(System.out);
formatter.printOptions(pw, 80, options, 4, 4);
pw.flush();
}
}
}
DTCli()
{
//
// Global command specification starts here
//
globalCommands.put("help", new CommandSpec(new HelpCommand(),
null,
new Arg[] {new CommandArg("command")},
"Show help"));
globalCommands.put("connect", new CommandSpec(new ConnectCommand(),
new Arg[] {new Arg("app-id")},
null,
"Connect to an app"));
globalCommands.put("launch", new OptionsCommandSpec(new LaunchCommand(),
new Arg[] {new FileArg("jar-file")},
new Arg[] {new Arg("class-name/property-file")},
"Launch an app", getLaunchCommandLineOptions()));
globalCommands.put("shutdown-app", new CommandSpec(new ShutdownAppCommand(),
new Arg[] {new Arg("app-id")},
null,
"Shutdown an app"));
globalCommands.put("list-apps", new CommandSpec(new ListAppsCommand(),
null,
new Arg[] {new Arg("pattern")},
"List applications"));
globalCommands.put("kill-app", new CommandSpec(new KillAppCommand(),
new Arg[] {new Arg("app-id")},
null,
"Kill an app"));
globalCommands.put("show-logical-plan", new OptionsCommandSpec(new ShowLogicalPlanCommand(),
new Arg[] {new FileArg("jar-file")},
new Arg[] {new Arg("class-name")},
"List apps in a jar or show logical plan of an app class",
getShowLogicalPlanCommandLineOptions()));
globalCommands.put("alias", new CommandSpec(new AliasCommand(),
new Arg[] {new Arg("alias-name"), new CommandArg("command")},
null,
"Create a command alias"));
globalCommands.put("source", new CommandSpec(new SourceCommand(),
new Arg[] {new FileArg("file")},
null,
"Execute the commands in a file"));
globalCommands.put("exit", new CommandSpec(new ExitCommand(),
null,
null,
"Exit the CLI"));
globalCommands.put("begin-macro", new CommandSpec(new BeginMacroCommand(),
new Arg[] {new Arg("name")},
null,
"Begin Macro Definition ($1...$9 to access parameters and type 'end' to end the definition)"));
globalCommands.put("dump-properties-file", new CommandSpec(new DumpPropertiesFileCommand(),
new Arg[] {new FileArg("out-file"), new FileArg("jar-file"), new Arg("class-name")},
null,
"Dump the properties file of an app class"));
globalCommands.put("get-app-info", new CommandSpec(new GetAppInfoCommand(),
new Arg[] {new Arg("app-id")},
null,
"Get the information of an app"));
globalCommands.put("set-pager", new CommandSpec(new SetPagerCommand(),
new Arg[] {new Arg("on/off")},
null,
"Set the pager program for output"));
globalCommands.put("generate-license-request", new CommandSpec(new GenerateLicenseRequestCommand(),
null,
null,
"Generate license request"));
globalCommands.put("activate-license", new CommandSpec(new ActivateLicenseCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Launch the license agent"));
globalCommands.put("deactivate-license", new CommandSpec(new DeactivateLicenseCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Stop the license agent"));
globalCommands.put("list-licenses", new CommandSpec(new ListLicensesCommand(),
null,
null,
"Show all IDs of all licenses"));
globalCommands.put("show-license-status", new CommandSpec(new ShowLicenseStatusCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Show the status of the license"));
//
// Connected command specification starts here
//
connectedCommands.put("list-containers", new CommandSpec(new ListContainersCommand(),
null,
null,
"List containers"));
connectedCommands.put("list-operators", new CommandSpec(new ListOperatorsCommand(),
null,
new Arg[] {new Arg("pattern")},
"List operators"));
connectedCommands.put("show-physical-plan", new CommandSpec(new ShowPhysicalPlanCommand(),
null,
null,
"Show physical plan"));
connectedCommands.put("kill-container", new CommandSpec(new KillContainerCommand(),
new Arg[] {new Arg("container-id")},
null,
"Kill a container"));
connectedCommands.put("shutdown-app", new CommandSpec(new ShutdownAppCommand(),
null,
new Arg[] {new Arg("app-id")},
"Shutdown an app"));
connectedCommands.put("kill-app", new CommandSpec(new KillAppCommand(),
null,
new Arg[] {new Arg("app-id")},
"Kill an app"));
connectedCommands.put("wait", new CommandSpec(new WaitCommand(),
new Arg[] {new Arg("timeout")},
null,
"Wait for completion of current application"));
connectedCommands.put("start-recording", new CommandSpec(new StartRecordingCommand(),
new Arg[] {new Arg("operator-id")},
new Arg[] {new Arg("port-name")},
"Start recording"));
connectedCommands.put("stop-recording", new CommandSpec(new StopRecordingCommand(),
new Arg[] {new Arg("operator-id")},
new Arg[] {new Arg("port-name")},
"Stop recording"));
connectedCommands.put("get-operator-attributes", new CommandSpec(new GetOperatorAttributesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("attribute-name")},
"Get attributes of an operator"));
connectedCommands.put("get-operator-properties", new CommandSpec(new GetOperatorPropertiesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("property-name")},
"Get properties of an operator"));
connectedCommands.put("get-physical-operator-properties", new CommandSpec(new GetPhysicalOperatorPropertiesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("property-name")},
"Get properties of an operator"));
connectedCommands.put("set-operator-property", new CommandSpec(new SetOperatorPropertyCommand(),
new Arg[] {new Arg("operator-name"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
connectedCommands.put("set-physical-operator-property", new CommandSpec(new SetPhysicalOperatorPropertyCommand(),
new Arg[] {new Arg("operator-id"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
connectedCommands.put("get-app-attributes", new CommandSpec(new GetAppAttributesCommand(),
null,
new Arg[] {new Arg("attribute-name")},
"Get attributes of the connected app"));
connectedCommands.put("get-port-attributes", new CommandSpec(new GetPortAttributesCommand(),
new Arg[] {new Arg("operator-name"), new Arg("port-name")},
new Arg[] {new Arg("attribute-name")},
"Get attributes of a port"));
connectedCommands.put("begin-logical-plan-change", new CommandSpec(new BeginLogicalPlanChangeCommand(),
null,
null,
"Begin Logical Plan Change"));
connectedCommands.put("show-logical-plan", new OptionsCommandSpec(new ShowLogicalPlanCommand(),
null,
new Arg[] {new FileArg("jar-file"), new Arg("class-name")},
"Show logical plan of an app class",
getShowLogicalPlanCommandLineOptions()));
connectedCommands.put("dump-properties-file", new CommandSpec(new DumpPropertiesFileCommand(),
new Arg[] {new FileArg("out-file")},
new Arg[] {new FileArg("jar-file"), new Arg("class-name")},
"Dump the properties file of an app class"));
connectedCommands.put("get-app-info", new CommandSpec(new GetAppInfoCommand(),
null,
new Arg[] {new Arg("app-id")},
"Get the information of an app"));
connectedCommands.put("create-alert", new CommandSpec(new CreateAlertCommand(),
new Arg[] {new Arg("name"), new FileArg("file")},
null,
"Create an alert with the name and the given file that contains the spec"));
connectedCommands.put("delete-alert", new CommandSpec(new DeleteAlertCommand(),
new Arg[] {new Arg("name")},
null,
"Delete an alert with the given name"));
connectedCommands.put("list-alerts", new CommandSpec(new ListAlertsCommand(),
null,
null,
"List all alerts"));
connectedCommands.put("get-recording-info", new CommandSpec(new GetRecordingInfoCommand(),
null,
new Arg[] {new Arg("operator-id"), new Arg("start-time")},
"Get tuple recording info"));
//
// Logical plan change command specification starts here
//
logicalPlanChangeCommands.put("help", new CommandSpec(new HelpCommand(),
null,
new Arg[] {new Arg("command")},
"Show help"));
logicalPlanChangeCommands.put("create-operator", new CommandSpec(new CreateOperatorCommand(),
new Arg[] {new Arg("operator-name"), new Arg("class-name")},
null,
"Create an operator"));
logicalPlanChangeCommands.put("create-stream", new CommandSpec(new CreateStreamCommand(),
new Arg[] {new Arg("stream-name"), new Arg("from-operator-name"), new Arg("from-port-name"), new Arg("to-operator-name"), new Arg("to-port-name")},
null,
"Create a stream"));
logicalPlanChangeCommands.put("add-stream-sink", new CommandSpec(new AddStreamSinkCommand(),
new Arg[] {new Arg("stream-name"), new Arg("to-operator-name"), new Arg("to-port-name")},
null,
"Add a sink to an existing stream"));
logicalPlanChangeCommands.put("remove-operator", new CommandSpec(new RemoveOperatorCommand(),
new Arg[] {new Arg("operator-name")},
null,
"Remove an operator"));
logicalPlanChangeCommands.put("remove-stream", new CommandSpec(new RemoveStreamCommand(),
new Arg[] {new Arg("stream-name")},
null,
"Remove a stream"));
logicalPlanChangeCommands.put("set-operator-property", new CommandSpec(new SetOperatorPropertyCommand(),
new Arg[] {new Arg("operator-name"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
logicalPlanChangeCommands.put("set-operator-attribute", new CommandSpec(new SetOperatorAttributeCommand(),
new Arg[] {new Arg("operator-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of an operator"));
logicalPlanChangeCommands.put("set-port-attribute", new CommandSpec(new SetPortAttributeCommand(),
new Arg[] {new Arg("operator-name"), new Arg("port-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of a port"));
logicalPlanChangeCommands.put("set-stream-attribute", new CommandSpec(new SetStreamAttributeCommand(),
new Arg[] {new Arg("stream-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of a stream"));
logicalPlanChangeCommands.put("show-queue", new CommandSpec(new ShowQueueCommand(),
null,
null,
"Show the queue of the plan change"));
logicalPlanChangeCommands.put("submit", new CommandSpec(new SubmitCommand(),
null,
null,
"Submit the plan change"));
logicalPlanChangeCommands.put("abort", new CommandSpec(new AbortCommand(),
null,
null,
"Abort the plan change"));
}
private void printJson(String json) throws IOException
{
PrintStream os = getOutputPrintStream();
if (jsonp != null) {
os.println(jsonp + "(" + json + ");");
}
else {
os.println(json);
}
os.flush();
closeOutputPrintStream(os);
}
private void printJson(JSONObject json) throws JSONException, IOException
{
printJson(raw ? json.toString() : json.toString(2));
}
private void printJson(JSONArray jsonArray, String name) throws JSONException, IOException
{
JSONObject json = new JSONObject();
json.put(name, jsonArray);
printJson(json);
}
private <K, V> void printJson(Map<K, V> map) throws IOException, JSONException
{
printJson(new JSONObject(mapper.writeValueAsString(map)));
}
private <T> void printJson(List<T> list, String name) throws IOException, JSONException
{
printJson(new JSONArray(mapper.writeValueAsString(list)), name);
}
private PrintStream getOutputPrintStream() throws IOException
{
if (pagerCommand == null) {
pagerProcess = null;
return System.out;
}
else {
pagerProcess = Runtime.getRuntime().exec(new String[] {"sh", "-c",
pagerCommand + " >/dev/tty"});
return new PrintStream(pagerProcess.getOutputStream());
}
}
private void closeOutputPrintStream(PrintStream os)
{
if (os != System.out) {
os.close();
try {
pagerProcess.waitFor();
}
catch (InterruptedException ex) {
LOG.debug("Interrupted");
}
}
}
private static String expandFileName(String fileName, boolean expandWildCard) throws IOException
{
if (fileName.matches("^[a-zA-Z]+:.*")) {
// it's a URL
return fileName;
}
// TODO: need to work with other users' home directory
if (fileName.startsWith("~" + File.separator)) {
fileName = System.getProperty("user.home") + fileName.substring(1);
}
fileName = new File(fileName).getCanonicalPath();
LOG.debug("Canonical path: {}", fileName);
if (expandWildCard) {
DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes(new String[] {fileName});
scanner.scan();
String[] files = scanner.getIncludedFiles();
if (files.length == 0) {
throw new CliException(fileName + " does not match any file");
}
else if (files.length > 1) {
throw new CliException(fileName + " matches more than one file");
}
return files[0];
}
else {
return fileName;
}
}
private static String[] expandFileNames(String fileName) throws IOException
{
// TODO: need to work with other users
if (fileName.matches("^[a-zA-Z]+:.*")) {
// it's a URL
return new String[] {fileName};
}
if (fileName.startsWith("~" + File.separator)) {
fileName = System.getProperty("user.home") + fileName.substring(1);
}
fileName = new File(fileName).getCanonicalPath();
LOG.debug("Canonical path: {}", fileName);
DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes(new String[] {fileName});
scanner.scan();
return scanner.getIncludedFiles();
}
private static String expandCommaSeparatedFiles(String filenames) throws IOException
{
String[] entries = filenames.split(",");
StringBuilder result = new StringBuilder();
for (String entry : entries) {
for (String file : expandFileNames(entry)) {
if (result.length() > 0) {
result.append(",");
}
result.append(file);
}
}
return result.toString();
}
protected ApplicationReport getApplication(String appId)
{
List<ApplicationReport> appList = getApplicationList();
if (StringUtils.isNumeric(appId)) {
int appSeq = Integer.parseInt(appId);
for (ApplicationReport ar : appList) {
if (ar.getApplicationId().getId() == appSeq) {
return ar;
}
}
}
else {
for (ApplicationReport ar : appList) {
if (ar.getApplicationId().toString().equals(appId)) {
return ar;
}
}
}
return null;
}
private static class CliException extends RuntimeException
{
private static final long serialVersionUID = 1L;
CliException(String msg, Throwable cause)
{
super(msg, cause);
}
CliException(String msg)
{
super(msg);
}
}
public void init(String[] args) throws IOException
{
consolePresent = (System.console() != null);
Options options = new Options();
options.addOption("e", true, "Commands are read from the argument");
options.addOption("v", false, "Verbose mode level 1");
options.addOption("vv", false, "Verbose mode level 2");
options.addOption("vvv", false, "Verbose mode level 3");
options.addOption("vvvv", false, "Verbose mode level 4");
options.addOption("r", false, "JSON Raw mode");
options.addOption("p", true, "JSONP padding function");
options.addOption("h", false, "Print this help");
CommandLineParser parser = new BasicParser();
try {
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("v")) {
verboseLevel = 1;
}
if (cmd.hasOption("vv")) {
verboseLevel = 2;
}
if (cmd.hasOption("vvv")) {
verboseLevel = 3;
}
if (cmd.hasOption("vvvv")) {
verboseLevel = 4;
}
if (cmd.hasOption("r")) {
raw = true;
}
if (cmd.hasOption("e")) {
commandsToExecute = cmd.getOptionValues("e");
consolePresent = false;
}
if (cmd.hasOption("p")) {
jsonp = cmd.getOptionValue("p");
}
if (cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(DTCli.class.getSimpleName(), options);
System.exit(0);
}
}
catch (ParseException ex) {
System.err.println("Invalid argument: " + ex);
System.exit(1);
}
Level logLevel;
switch (verboseLevel) {
case 0:
logLevel = Level.OFF;
break;
case 1:
logLevel = Level.ERROR;
break;
case 2:
logLevel = Level.WARN;
break;
case 3:
logLevel = Level.INFO;
break;
default:
logLevel = Level.DEBUG;
break;
}
for (org.apache.log4j.Logger logger : new org.apache.log4j.Logger[] {org.apache.log4j.Logger.getRootLogger(),
org.apache.log4j.Logger.getLogger(DTCli.class)}) {
@SuppressWarnings("unchecked")
Enumeration<Appender> allAppenders = logger.getAllAppenders();
while (allAppenders.hasMoreElements()) {
Appender appender = allAppenders.nextElement();
if (appender instanceof ConsoleAppender) {
((ConsoleAppender)appender).setThreshold(logLevel);
}
}
}
if (commandsToExecute != null) {
for (String command : commandsToExecute) {
LOG.debug("Command to be executed: {}", command);
}
}
StramClientUtils.addStramResources(conf);
StramAgent.setResourceManagerWebappAddress(conf.get(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:8088"));
// Need to initialize security before starting RPC for the credentials to
// take effect
StramUserLogin.attemptAuthentication(conf);
YarnClientHelper yarnClient = new YarnClientHelper(conf);
rmClient = new ClientRMHelper(yarnClient);
String socks = conf.get(CommonConfigurationKeysPublic.HADOOP_SOCKS_SERVER_KEY);
if (socks != null) {
int colon = socks.indexOf(':');
if (colon > 0) {
System.setProperty("socksProxyHost", socks.substring(0, colon));
System.setProperty("socksProxyPort", socks.substring(colon + 1));
}
}
}
private void processSourceFile(String fileName, ConsoleReader reader) throws FileNotFoundException, IOException
{
boolean consolePresentSaved = consolePresent;
consolePresent = false;
FileLineReader fr = null;
String line;
try {
fr = new FileLineReader(fileName);
while ((line = fr.readLine("")) != null) {
processLine(line, fr, true);
}
}
finally {
consolePresent = consolePresentSaved;
if (fr != null) {
fr.close();
}
}
}
private final static class MyNullCompleter implements Completer
{
public static final MyNullCompleter INSTANCE = new MyNullCompleter();
@Override
public int complete(final String buffer, final int cursor, final List<CharSequence> candidates)
{
candidates.add("");
return cursor;
}
}
private final static class MyFileNameCompleter extends FileNameCompleter
{
@Override
public int complete(final String buffer, final int cursor, final List<CharSequence> candidates)
{
int result = super.complete(buffer, cursor, candidates);
if (candidates.isEmpty()) {
candidates.add("");
result = cursor;
}
return result;
}
}
private List<Completer> defaultCompleters()
{
Map<String, CommandSpec> commands = new TreeMap<String, CommandSpec>();
commands.putAll(logicalPlanChangeCommands);
commands.putAll(connectedCommands);
commands.putAll(globalCommands);
List<Completer> completers = new LinkedList<Completer>();
for (Map.Entry<String, CommandSpec> entry : commands.entrySet()) {
String command = entry.getKey();
CommandSpec cs = entry.getValue();
List<Completer> argCompleters = new LinkedList<Completer>();
argCompleters.add(new StringsCompleter(command));
Arg[] args = (Arg[])ArrayUtils.addAll(cs.requiredArgs, cs.optionalArgs);
if (args != null) {
if (cs instanceof OptionsCommandSpec) {
// ugly hack because jline cannot dynamically change completer while user types
if (args[0] instanceof FileArg) {
for (int i = 0; i < 10; i++) {
argCompleters.add(new MyFileNameCompleter());
}
}
}
else {
for (Arg arg : args) {
if (arg instanceof FileArg) {
argCompleters.add(new MyFileNameCompleter());
}
else if (arg instanceof CommandArg) {
argCompleters.add(new StringsCompleter(commands.keySet().toArray(new String[] {})));
}
else {
argCompleters.add(MyNullCompleter.INSTANCE);
}
}
}
}
completers.add(new ArgumentCompleter(argCompleters));
}
List<Completer> argCompleters = new LinkedList<Completer>();
Set<String> set = new TreeSet<String>();
set.addAll(aliases.keySet());
set.addAll(macros.keySet());
argCompleters.add(new StringsCompleter(set.toArray(new String[] {})));
for (int i = 0; i < 10; i++) {
argCompleters.add(new MyFileNameCompleter());
}
completers.add(new ArgumentCompleter(argCompleters));
return completers;
}
private void setupCompleter(ConsoleReader reader)
{
reader.addCompleter(new AggregateCompleter(defaultCompleters()));
}
private void updateCompleter(ConsoleReader reader)
{
List<Completer> completers = new ArrayList<Completer>(reader.getCompleters());
for (Completer c : completers) {
reader.removeCompleter(c);
}
setupCompleter(reader);
}
private void setupHistory(ConsoleReader reader)
{
File historyFile = new File(StramClientUtils.getSettingsRootDir(), "cli_history");
historyFile.getParentFile().mkdirs();
try {
topLevelHistory = new FileHistory(historyFile);
reader.setHistory(topLevelHistory);
historyFile = new File(StramClientUtils.getSettingsRootDir(), "cli_history_clp");
changingLogicalPlanHistory = new FileHistory(historyFile);
}
catch (IOException ex) {
System.err.printf("Unable to open %s for writing.", historyFile);
}
}
private void setupAgents() throws IOException
{
recordingsAgent = new RecordingsAgent();
recordingsAgent.setup();
}
public void run() throws IOException
{
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(false);
try {
processSourceFile(System.getProperty("user.home") + "/.stram/clirc_system", reader);
processSourceFile(System.getProperty("user.home") + "/.stram/clirc", reader);
}
catch (Exception ex) {
// ignore
}
if (consolePresent) {
printWelcomeMessage();
printLicenseStatus();
setupCompleter(reader);
setupHistory(reader);
}
setupAgents();
String line;
PrintWriter out = new PrintWriter(System.out);
int i = 0;
while (true) {
if (commandsToExecute != null) {
if (i >= commandsToExecute.length) {
break;
}
line = commandsToExecute[i++];
}
else {
line = readLine(reader);
if (line == null) {
break;
}
}
processLine(line, reader, true);
out.flush();
}
if (topLevelHistory != null) {
topLevelHistory.flush();
}
if (changingLogicalPlanHistory != null) {
changingLogicalPlanHistory.flush();
}
if (consolePresent) {
System.out.println("exit");
}
}
private List<String> expandMacro(List<String> lines, String[] args)
{
List<String> expandedLines = new ArrayList<String>();
for (String line : lines) {
int previousIndex = 0;
StringBuilder expandedLine = new StringBuilder();
while (true) {
// Search for $0..$9 within the each line and replace by corresponding args
int currentIndex = line.indexOf('$', previousIndex);
if (currentIndex > 0 && line.length() > currentIndex + 1) {
int argIndex = line.charAt(currentIndex + 1) - '0';
if (args.length > argIndex && argIndex >= 0) {
// Replace $0 with macro name or $1..$9 with input arguments
expandedLine.append(line.substring(previousIndex, currentIndex)).append(args[argIndex]);
}
else if (argIndex >= 0 && argIndex <= 9) {
// Arguments for $1..$9 were not supplied - replace with empty strings
expandedLine.append(line.substring(previousIndex, currentIndex));
}
else {
// Outside valid arguments range - ignore and do not replace
expandedLine.append(line.substring(previousIndex, currentIndex + 2));
}
currentIndex += 2;
}
else {
expandedLine.append(line.substring(previousIndex));
expandedLines.add(expandedLine.toString());
break;
}
previousIndex = currentIndex;
}
}
return expandedLines;
}
private static String ltrim(String s)
{
int i = 0;
while (i < s.length() && Character.isWhitespace(s.charAt(i))) {
i++;
}
return s.substring(i);
}
private void processLine(String line, ConsoleReader reader, boolean expandMacroAlias)
{
try {
//LOG.debug("line: \"{}\"", line);
List<String[]> commands = Tokenizer.tokenize(line);
if (commands == null) {
return;
}
for (String[] args : commands) {
if (args.length == 0 || StringUtils.isBlank(args[0])) {
continue;
}
//LOG.debug("Got: {}", mapper.writeValueAsString(args));
if (expandMacroAlias) {
if (macros.containsKey(args[0])) {
List<String> macroItems = expandMacro(macros.get(args[0]), args);
for (String macroItem : macroItems) {
if (consolePresent) {
System.out.println("expanded-macro> " + macroItem);
}
processLine(macroItem, reader, false);
}
continue;
}
if (aliases.containsKey(args[0])) {
processLine(aliases.get(args[0]), reader, false);
continue;
}
}
CommandSpec cs = null;
if (changingLogicalPlan) {
cs = logicalPlanChangeCommands.get(args[0]);
}
else {
if (currentApp != null) {
cs = connectedCommands.get(args[0]);
}
if (cs == null) {
cs = globalCommands.get(args[0]);
}
}
if (cs == null) {
if (connectedCommands.get(args[0]) != null) {
System.err.println("\"" + args[0] + "\" is valid only when connected to an application. Type \"connect <appid>\" to connect to an application.");
}
else if (logicalPlanChangeCommands.get(args[0]) != null) {
System.err.println("\"" + args[0] + "\" is valid only when changing a logical plan. Type \"begin-logical-plan-change\" to change a logical plan");
}
else {
System.err.println("Invalid command '" + args[0] + "'. Type \"help\" for list of commands");
}
}
else {
try {
cs.verifyArguments(args);
}
catch (CliException ex) {
cs.printUsage(args[0]);
throw ex;
}
cs.command.execute(args, reader);
lastCommandError = false;
}
}
}
catch (CliException e) {
System.err.println(e.getMessage());
LOG.debug("Error processing line: " + line, e);
lastCommandError = true;
}
catch (Exception e) {
System.err.println("Unexpected error: " + e);
LOG.error("Error processing line: {}", line, e);
lastCommandError = true;
}
}
private void printWelcomeMessage()
{
System.out.println("DT CLI " + VersionInfo.getVersion() + " " + VersionInfo.getDate() + " " + VersionInfo.getRevision());
}
private void printLicenseStatus()
{
try {
JSONObject licenseStatus = getLicenseStatus(null);
if (!licenseStatus.has("agentAppId")) {
System.out.println("License agent is not running. Please run the license agent first by typing \"activate-license\"");
return;
}
if (licenseStatus.has("remainingLicensedMB")) {
int remainingLicensedMB = licenseStatus.getInt("remainingLicensedMB");
if (remainingLicensedMB > 0) {
System.out.println("You have " + remainingLicensedMB + "MB remaining for the current license.");
}
else {
System.out.println("You do not have any memory allowance left for the current license. Please contact DataTorrent, Inc. <[email protected]> for help.");
}
}
}
catch (Exception ex) {
LOG.error("Caught exception when getting license info", ex);
System.out.println("Error getting license status. Please contact DataTorrent, Inc. <[email protected]> for help.");
}
}
private void printHelp(String command, CommandSpec commandSpec, PrintStream os)
{
if (consolePresent) {
os.print("\033[0;93m");
os.print(command);
os.print("\033[0m");
}
else {
os.print(command);
}
if (commandSpec instanceof OptionsCommandSpec) {
OptionsCommandSpec ocs = (OptionsCommandSpec)commandSpec;
if (ocs.options != null) {
os.print(" [options]");
}
}
if (commandSpec.requiredArgs != null) {
for (Arg arg : commandSpec.requiredArgs) {
if (consolePresent) {
os.print(" \033[3m" + arg + "\033[0m");
}
else {
os.print(" <" + arg + ">");
}
}
}
if (commandSpec.optionalArgs != null) {
for (Arg arg : commandSpec.optionalArgs) {
if (consolePresent) {
os.print(" [\033[3m" + arg + "\033[0m]");
}
else {
os.print(" [<" + arg + ">]");
}
}
}
os.println("\n\t" + commandSpec.description);
if (commandSpec instanceof OptionsCommandSpec) {
OptionsCommandSpec ocs = (OptionsCommandSpec)commandSpec;
if (ocs.options != null) {
os.println("\tOptions:");
HelpFormatter formatter = new HelpFormatter();
PrintWriter pw = new PrintWriter(os);
formatter.printOptions(pw, 80, ocs.options, 12, 4);
pw.flush();
}
}
}
private void printHelp(Map<String, CommandSpec> commandSpecs, PrintStream os)
{
for (Map.Entry<String, CommandSpec> entry : commandSpecs.entrySet()) {
printHelp(entry.getKey(), entry.getValue(), os);
}
}
private String readLine(ConsoleReader reader)
throws IOException
{
String prompt = "";
if (consolePresent) {
if (changingLogicalPlan) {
prompt = "logical-plan-change";
}
else {
prompt = "dt";
}
if (currentApp != null) {
prompt += " (";
prompt += currentApp.getApplicationId().toString();
prompt += ") ";
}
prompt += "> ";
}
String line = reader.readLine(prompt);
if (line == null) {
return null;
}
return ltrim(line);
}
private List<ApplicationReport> getApplicationList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private List<ApplicationReport> getRunningApplicationList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE));
appsReq.setApplicationStates(EnumSet.of(YarnApplicationState.RUNNING));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private List<ApplicationReport> getLicenseList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE_LICENSE));
appsReq.setApplicationStates(EnumSet.of(YarnApplicationState.RUNNING));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private String getContainerLongId(String containerId)
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
int shortId = 0;
if (StringUtils.isNumeric(containerId)) {
shortId = Integer.parseInt(containerId);
}
try {
Object containersObj = json.get("containers");
JSONArray containers;
if (containersObj instanceof JSONArray) {
containers = (JSONArray)containersObj;
}
else {
containers = new JSONArray();
containers.put(containersObj);
}
if (containersObj != null) {
for (int o = containers.length(); o-- > 0;) {
JSONObject container = containers.getJSONObject(o);
String id = container.getString("id");
if (id.equals(containerId) || (shortId != 0 && (id.endsWith("_" + shortId) || id.endsWith("0" + shortId)))) {
return id;
}
}
}
}
catch (JSONException ex) {
}
return null;
}
private ApplicationReport assertRunningApp(ApplicationReport app)
{
ApplicationReport r;
try {
r = rmClient.getApplicationReport(app.getApplicationId());
if (r.getYarnApplicationState() != YarnApplicationState.RUNNING) {
String msg = String.format("Application %s not running (status %s)",
r.getApplicationId().getId(), r.getYarnApplicationState());
throw new CliException(msg);
}
}
catch (YarnException rmExc) {
throw new CliException("Unable to determine application status.", rmExc);
}
catch (IOException rmExc) {
throw new CliException("Unable to determine application status.", rmExc);
}
return r;
}
private ClientResponse getResource(String resourcePath, ApplicationReport appReport)
{
if (appReport == null) {
throw new CliException("No application selected");
}
if (StringUtils.isEmpty(appReport.getTrackingUrl()) || appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
appReport = null;
throw new CliException("Application terminated.");
}
WebServicesClient wsClient = new WebServicesClient();
Client client = wsClient.getClient();
client.setFollowRedirects(true);
WebResource r;
try {
r = StramAgent.getStramWebResource(wsClient, appReport.getApplicationId().toString());
}
catch (IncompatibleVersionException ex) {
throw new CliException("Incompatible stram version", ex);
}
if (r == null) {
throw new CliException("Application " + appReport.getApplicationId().toString() + " has not started");
}
r = r.path(resourcePath);
try {
return wsClient.process(r, ClientResponse.class, new WebServicesClient.WebServicesHandler<ClientResponse>()
{
@Override
public ClientResponse process(WebResource webResource, Class<ClientResponse> clazz)
{
ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (!MediaType.APPLICATION_JSON_TYPE.equals(response.getType())) {
throw new CliException("Unexpected response type " + response.getType());
}
return response;
}
});
}
catch (Exception e) {
// check the application status as above may have failed due application termination etc.
if (appReport == currentApp) {
currentApp = assertRunningApp(appReport);
}
throw new CliException("Failed to request " + r.getURI(), e);
}
}
private WebResource getStramWebResource(WebServicesClient webServicesClient, ApplicationReport appReport)
{
if (appReport == null) {
throw new CliException("No application selected");
}
// YARN-156 WebAppProxyServlet does not support POST - for now bypass it for this request
appReport = assertRunningApp(appReport); // or else "N/A" might be there..
try {
return StramAgent.getStramWebResource(webServicesClient, appReport.getApplicationId().toString());
}
catch (IncompatibleVersionException ex) {
throw new CliException("Incompatible Stram version", ex);
}
}
private List<AppFactory> getMatchingAppFactories(StramAppLauncher submitApp, String matchString)
{
try {
List<AppFactory> cfgList = submitApp.getBundledTopologies();
if (cfgList.isEmpty()) {
return null;
}
else if (matchString == null) {
return cfgList;
}
else {
List<AppFactory> result = new ArrayList<AppFactory>();
for (AppFactory ac : cfgList) {
if (ac.getName().matches(".*" + matchString + ".*")) {
result.add(ac);
}
}
return result;
}
}
catch (Exception ex) {
return null;
}
}
/*
* Below is the implementation of all commands
*/
private class HelpCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
PrintStream os = getOutputPrintStream();
if (args.length < 2) {
os.println("GLOBAL COMMANDS EXCEPT WHEN CHANGING LOGICAL PLAN:\n");
printHelp(globalCommands, os);
os.println();
os.println("COMMANDS WHEN CONNECTED TO AN APP (via connect <appid>) EXCEPT WHEN CHANGING LOGICAL PLAN:\n");
printHelp(connectedCommands, os);
os.println();
os.println("COMMANDS WHEN CHANGING LOGICAL PLAN (via begin-logical-plan-change):\n");
printHelp(logicalPlanChangeCommands, os);
os.println();
}
else {
if (args[1].equals("help")) {
printHelp("help", globalCommands.get("help"), os);
}
else {
boolean valid = false;
CommandSpec cs = globalCommands.get(args[1]);
if (cs != null) {
os.println("This usage is valid except when changing logical plan");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
cs = connectedCommands.get(args[1]);
if (cs != null) {
os.println("This usage is valid when connected to an app except when changing logical plan");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
cs = logicalPlanChangeCommands.get(args[1]);
if (cs != null) {
os.println("This usage is only valid when changing logical plan (via begin-logical-plan-change)");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
if (!valid) {
os.println("Help for \"" + args[1] + "\" does not exist.");
}
}
}
closeOutputPrintStream(os);
}
}
private class ConnectCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
currentApp = getApplication(args[1]);
if (currentApp == null) {
throw new CliException("Invalid application id: " + args[1]);
}
boolean connected = false;
try {
LOG.debug("Selected {} with tracking url {}", currentApp.getApplicationId(), currentApp.getTrackingUrl());
ClientResponse rsp = getResource(StramWebServices.PATH_INFO, currentApp);
rsp.getEntity(JSONObject.class);
connected = true; // set as current only upon successful connection
if (consolePresent) {
System.out.println("Connected to application " + currentApp.getApplicationId());
}
}
catch (CliException e) {
throw e; // pass on
}
finally {
if (!connected) {
//currentApp = null;
//currentDir = "/";
}
}
}
}
private class ActivateLicenseCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
byte[] licenseBytes;
if (args.length > 1) {
licenseBytes = StramClientUtils.getLicense(args[1]);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
License.validateLicense(licenseBytes);
LogicalPlan lp = new LogicalPlan();
lp.setAttribute(DAG.APPLICATION_NAME, licenseId);
lp.setAttribute(LogicalPlan.LICENSE, Base64.encodeBase64String(licenseBytes)); // TODO: obfuscate license passing
StramClient client = new StramClient(lp);
client.setApplicationType(StramClient.YARN_APPLICATION_TYPE_LICENSE);
client.startApplication();
System.err.println("Started license agent for " + licenseId);
}
}
private class DeactivateLicenseCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
byte[] licenseBytes;
if (args.length > 1) {
licenseBytes = StramClientUtils.getLicense(args[1]);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
License.validateLicense(licenseBytes);
// TODO: migrate CLI to use YarnClient and this here won't be needed
YarnClient clientRMService = YarnClient.createYarnClient();
try {
clientRMService.init(conf);
clientRMService.start();
ApplicationReport ar = LicensingAgentClient.getLicensingAgentAppReport(licenseId, clientRMService);
if (ar == null) {
throw new CliException("License not activated: " + licenseId);
}
rmClient.killApplication(ar.getApplicationId());
System.err.println("Stopped license agent for " + licenseId);
}
finally {
clientRMService.stop();
}
}
}
private static class LicenseInfo
{
int remainingLicensedMB;
long lastUpdate;
// add expiration date range here
}
private Map<String, LicenseInfo> getLicenseInfoMap() throws JSONException, IOException
{
List<ApplicationReport> runningApplicationList = getRunningApplicationList();
WebServicesClient webServicesClient = new WebServicesClient();
Map<String, LicenseInfo> licenseInfoMap = new HashMap<String, LicenseInfo>();
for (ApplicationReport ar : runningApplicationList) {
WebResource r = getStramWebResource(webServicesClient, ar).path(StramWebServices.PATH_INFO);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
if (!response.has("licenseInfoLastUpdate")) {
continue;
}
long lastUpdate = Long.valueOf(response.getString("licenseInfoLastUpdate"));
String licenseId = response.getString("licenseId");
int remainingLicensedMB = Integer.valueOf(response.getString("remainingLicensedMB"));
LicenseInfo licenseInfo;
if (licenseInfoMap.containsKey(licenseId)) {
licenseInfo = licenseInfoMap.get(licenseId);
if (licenseInfo.lastUpdate < lastUpdate) {
licenseInfo.remainingLicensedMB = remainingLicensedMB;
licenseInfo.lastUpdate = lastUpdate;
}
}
else {
licenseInfo = new LicenseInfo();
licenseInfo.remainingLicensedMB = remainingLicensedMB;
licenseInfo.lastUpdate = lastUpdate;
licenseInfoMap.put(licenseId, licenseInfo);
}
}
return licenseInfoMap;
}
private class ListLicensesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
Map<String, LicenseInfo> licenseInfoMap = getLicenseInfoMap();
try {
JSONArray jsonArray = new JSONArray();
List<ApplicationReport> licList = getLicenseList();
Collections.sort(licList, new Comparator<ApplicationReport>()
{
@Override
public int compare(ApplicationReport o1, ApplicationReport o2)
{
return o1.getApplicationId().getId() - o2.getApplicationId().getId();
}
});
for (ApplicationReport ar : licList) {
JSONObject jsonObj = new JSONObject();
jsonObj.put("id", ar.getName());
jsonObj.put("agentAppId", ar.getApplicationId().getId());
if (licenseInfoMap.containsKey(ar.getName())) {
jsonObj.put("remainingLicensedMB", licenseInfoMap.get(ar.getName()).remainingLicensedMB);
}
jsonArray.put(jsonObj);
}
printJson(jsonArray, "licenses");
}
catch (Exception ex) {
throw new CliException("Failed to retrieve license list", ex);
}
}
}
private JSONObject getLicenseStatus(String licenseFile) throws Exception
{
byte[] licenseBytes;
if (licenseFile != null) {
licenseBytes = StramClientUtils.getLicense(licenseFile);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseID = License.getLicenseID(licenseBytes);
SubLicense[] subLicenses = License.validateGetSubLicenses(licenseBytes);
JSONObject licenseObj = new JSONObject();
licenseObj.put("id", licenseID);
JSONArray sublicArray = new JSONArray();
SimpleDateFormat sdf = new SimpleDateFormat(SubLicense.DATE_FORMAT);
for (SubLicense sublic : subLicenses) {
JSONObject sublicObj = new JSONObject();
sublicObj.put("startDate", sdf.format(sublic.getStartDate()));
sublicObj.put("endDate", sdf.format(sublic.getEndDate()));
sublicObj.put("comment", sublic.getComment());
sublicObj.put("processorList", sublic.getProcessorListAsJSONArray());
sublicObj.put("constraint", sublic.getConstraint());
sublicObj.put("url", sublic.getUrl());
sublicArray.put(sublicObj);
}
licenseObj.put("sublicenses", sublicArray);
List<ApplicationReport> licList = getLicenseList();
for (ApplicationReport ar : licList) {
if (ar.getName().equals(licenseID)) {
licenseObj.put("agentAppId", ar.getApplicationId().toString());
break;
}
}
Map<String, LicenseInfo> licenseInfoMap = getLicenseInfoMap();
if (licenseInfoMap.containsKey(licenseID)) {
licenseObj.put("remainingLicensedMB", licenseInfoMap.get(licenseID).remainingLicensedMB);
}
return licenseObj;
}
private class ShowLicenseStatusCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
JSONObject licenseObj = getLicenseStatus(args.length > 1 ? args[1] : null);
printJson(licenseObj);
}
}
private class GenerateLicenseRequestCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String b64EncodedString = new GenerateLicenseRequest().getLicenseRequest(Util.getDefaultPublicKey());
System.out.println("-------------------------- Cut from below ------------------------------");
System.out.println(b64EncodedString);
System.out.println("------------------------------------------------------------------------");
}
}
private class LaunchCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String[] newArgs = new String[args.length - 1];
System.arraycopy(args, 1, newArgs, 0, args.length - 1);
LaunchCommandLineInfo commandLineInfo = getLaunchCommandLineInfo(newArgs);
if (commandLineInfo.configFile != null) {
commandLineInfo.configFile = expandFileName(commandLineInfo.configFile, true);
}
Configuration config = StramAppLauncher.getConfig(commandLineInfo.configFile, commandLineInfo.overrideProperties);
if (commandLineInfo.libjars != null) {
commandLineInfo.libjars = expandCommaSeparatedFiles(commandLineInfo.libjars);
config.set(StramAppLauncher.LIBJARS_CONF_KEY_NAME, commandLineInfo.libjars);
}
if (commandLineInfo.files != null) {
commandLineInfo.files = expandCommaSeparatedFiles(commandLineInfo.files);
config.set(StramAppLauncher.FILES_CONF_KEY_NAME, commandLineInfo.files);
}
if (commandLineInfo.archives != null) {
commandLineInfo.archives = expandCommaSeparatedFiles(commandLineInfo.archives);
config.set(StramAppLauncher.ARCHIVES_CONF_KEY_NAME, commandLineInfo.archives);
}
if (commandLineInfo.licenseFile != null) {
commandLineInfo.licenseFile = expandFileName(commandLineInfo.licenseFile, true);
}
String fileName = expandFileName(commandLineInfo.args[0], true);
StramAppLauncher submitApp = getStramAppLauncher(fileName, config);
submitApp.loadDependencies();
AppFactory appFactory = null;
if (commandLineInfo.args.length >= 2) {
File file = new File(commandLineInfo.args[1]);
if (file.exists()) {
appFactory = new StramAppLauncher.PropertyFileAppFactory(file);
}
}
if (appFactory == null) {
String matchString = commandLineInfo.args.length >= 2 ? commandLineInfo.args[1] : null;
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, matchString);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No matching applications bundled in jar.");
}
else if (matchingAppFactories.size() == 1) {
appFactory = matchingAppFactories.get(0);
}
else if (matchingAppFactories.size() > 1) {
// Display matching applications
for (int i = 0; i < matchingAppFactories.size(); i++) {
String appName = matchingAppFactories.get(i).getName();
String appAlias = submitApp.getLogicalPlanConfiguration().getAppAlias(appName);
if (appAlias != null) {
appName = appAlias;
}
System.out.printf("%3d. %s\n", i + 1, appName);
}
// Exit if not in interactive mode
if (!consolePresent) {
throw new CliException("More than one application in jar file match '" + matchString + "'");
}
else {
boolean useHistory = reader.isHistoryEnabled();
reader.setHistoryEnabled(false);
History previousHistory = reader.getHistory();
History dummyHistory = new MemoryHistory();
reader.setHistory(dummyHistory);
List<Completer> completers = new ArrayList<Completer>(reader.getCompleters());
for (Completer c : completers) {
reader.removeCompleter(c);
}
String optionLine = reader.readLine("Choose application: ");
reader.setHistoryEnabled(useHistory);
reader.setHistory(previousHistory);
for (Completer c : completers) {
reader.addCompleter(c);
}
try {
int option = Integer.parseInt(optionLine);
if (0 < option && option <= matchingAppFactories.size()) {
appFactory = matchingAppFactories.get(option - 1);
}
}
catch (Exception ex) {
// ignore
}
}
}
}
if (appFactory != null) {
if (!commandLineInfo.localMode) {
byte[] licenseBytes;
if (commandLineInfo.licenseFile != null) {
licenseBytes = StramClientUtils.getLicense(commandLineInfo.licenseFile);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
YarnClient clientRMService = YarnClient.createYarnClient();
clientRMService.init(conf);
clientRMService.start();
ApplicationReport ar = LicensingAgentClient.getLicensingAgentAppReport(licenseId, clientRMService);
if (ar == null) {
throw new CliException("License not activated. Please run activate-license first before launching any streaming application");
}
ApplicationId appId = submitApp.launchApp(appFactory);
currentApp = rmClient.getApplicationReport(appId);
printJson("{\"appId\": \"" + appId + "\"}");
}
else {
submitApp.runLocal(appFactory);
}
}
else {
System.err.println("No application specified.");
}
}
}
private class ShutdownAppCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ApplicationReport[] apps;
WebServicesClient webServicesClient = new WebServicesClient();
if (args.length == 1) {
if (currentApp == null) {
throw new CliException("No application selected");
}
else {
apps = new ApplicationReport[] {currentApp};
}
}
else {
apps = new ApplicationReport[args.length - 1];
for (int i = 1; i < args.length; i++) {
apps[i - 1] = getApplication(args[i]);
if (apps[i - 1] == null) {
throw new CliException("App " + args[i] + " not found!");
}
}
}
for (ApplicationReport app : apps) {
WebResource r = getStramWebResource(webServicesClient, app).path(StramWebServices.PATH_SHUTDOWN);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(clazz);
}
});
if (consolePresent) {
System.out.println("Shutdown requested: " + response);
}
currentApp = null;
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
}
private class ListAppsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
try {
JSONArray jsonArray = new JSONArray();
List<ApplicationReport> appList = getApplicationList();
Collections.sort(appList, new Comparator<ApplicationReport>()
{
@Override
public int compare(ApplicationReport o1, ApplicationReport o2)
{
return o1.getApplicationId().getId() - o2.getApplicationId().getId();
}
});
int totalCnt = 0;
int runningCnt = 0;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
for (ApplicationReport ar : appList) {
/*
* This is inefficient, but what the heck, if this can be passed through the command line, can anyone notice slowness.
*/
JSONObject jsonObj = new JSONObject();
jsonObj.put("startTime", sdf.format(new java.util.Date(ar.getStartTime())));
jsonObj.put("id", ar.getApplicationId().getId());
jsonObj.put("name", ar.getName());
jsonObj.put("state", ar.getYarnApplicationState().name());
jsonObj.put("trackingUrl", ar.getTrackingUrl());
jsonObj.put("finalStatus", ar.getFinalApplicationStatus());
totalCnt++;
if (ar.getYarnApplicationState() == YarnApplicationState.RUNNING) {
runningCnt++;
}
if (args.length > 1) {
@SuppressWarnings("unchecked")
Iterator<String> iterator = jsonObj.keys();
while (iterator.hasNext()) {
Object value = jsonObj.get(iterator.next());
if (value.toString().matches("(?i).*" + args[1] + ".*")) {
jsonArray.put(jsonObj);
break;
}
}
}
else {
jsonArray.put(jsonObj);
}
}
printJson(jsonArray, "apps");
if (consolePresent) {
System.out.println(runningCnt + " active, total " + totalCnt + " applications.");
}
}
catch (Exception ex) {
throw new CliException("Failed to retrieve application list", ex);
}
}
}
private class KillAppCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args.length == 1) {
if (currentApp == null) {
throw new CliException("No application selected");
}
else {
try {
rmClient.killApplication(currentApp.getApplicationId());
currentApp = null;
}
catch (YarnException e) {
throw new CliException("Failed to kill " + currentApp.getApplicationId(), e);
}
}
if (consolePresent) {
System.out.println("Kill app requested");
}
return;
}
ApplicationReport app = null;
int i = 0;
try {
while (++i < args.length) {
app = getApplication(args[i]);
rmClient.killApplication(app.getApplicationId());
if (app == currentApp) {
currentApp = null;
}
}
if (consolePresent) {
System.out.println("Kill app requested");
}
}
catch (YarnException e) {
throw new CliException("Failed to kill " + ((app == null || app.getApplicationId() == null) ? "unknown application" : app.getApplicationId()) + ". Aborting killing of any additional applications.", e);
}
catch (NumberFormatException nfe) {
throw new CliException("Invalid application Id " + args[i], nfe);
}
catch (NullPointerException npe) {
throw new CliException("Application with Id " + args[i] + " does not seem to be alive!", npe);
}
}
}
private class AliasCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args[1].equals(args[2])) {
throw new CliException("Alias to itself!");
}
aliases.put(args[1], args[2]);
if (consolePresent) {
System.out.println("Alias " + args[1] + " created.");
}
updateCompleter(reader);
}
}
private class SourceCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
processSourceFile(args[1], reader);
if (consolePresent) {
System.out.println("File " + args[1] + " sourced.");
}
}
}
private class ExitCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (topLevelHistory != null) {
topLevelHistory.flush();
}
if (changingLogicalPlanHistory != null) {
changingLogicalPlanHistory.flush();
}
System.exit(0);
}
}
private class ListContainersCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
if (args.length == 1) {
printJson(json);
}
else {
Object containersObj = json.get("containers");
JSONArray containers;
if (containersObj instanceof JSONArray) {
containers = (JSONArray)containersObj;
}
else {
containers = new JSONArray();
containers.put(containersObj);
}
if (containersObj == null) {
System.out.println("No containers found!");
}
else {
JSONArray resultContainers = new JSONArray();
for (int o = containers.length(); o-- > 0;) {
JSONObject container = containers.getJSONObject(o);
String id = container.getString("id");
if (id != null && !id.isEmpty()) {
for (int argc = args.length; argc-- > 1;) {
String s1 = "0" + args[argc];
String s2 = "_" + args[argc];
if (id.equals(args[argc]) || id.endsWith(s1) || id.endsWith(s2)) {
resultContainers.put(container);
}
}
}
}
printJson(resultContainers, "containers");
}
}
}
}
private class ListOperatorsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
if (args.length > 1) {
String singleKey = "" + json.keys().next();
JSONArray matches = new JSONArray();
// filter operators
JSONArray arr;
Object obj = json.get(singleKey);
if (obj instanceof JSONArray) {
arr = (JSONArray)obj;
}
else {
arr = new JSONArray();
arr.put(obj);
}
for (int i = 0; i < arr.length(); i++) {
JSONObject oper = arr.getJSONObject(i);
@SuppressWarnings("unchecked")
Iterator<String> keys = oper.keys();
while (keys.hasNext()) {
if (oper.get(keys.next()).toString().matches("(?i).*" + args[1] + ".*")) {
matches.put(oper);
break;
}
}
}
json.put(singleKey, matches);
}
printJson(json);
}
}
private class ShowPhysicalPlanCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN);
try {
printJson(webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(clazz);
}
}));
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class KillContainerCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String containerLongId = getContainerLongId(args[1]);
if (containerLongId == null) {
throw new CliException("Container " + args[1] + " not found");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS).path(containerLongId).path("kill");
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(clazz, new JSONObject());
}
});
if (consolePresent) {
System.out.println("Kill container requested: " + response);
}
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class WaitCommand implements Command
{
@Override
public void execute(String[] args, final ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
int timeout = Integer.valueOf(args[1]);
ClientRMHelper.AppStatusCallback cb = new ClientRMHelper.AppStatusCallback()
{
@Override
public boolean exitLoop(ApplicationReport report)
{
System.out.println("current status is: " + report.getYarnApplicationState());
try {
if (reader.getInput().available() > 0) {
return true;
}
}
catch (IOException e) {
LOG.error("Error checking for input.", e);
}
return false;
}
};
try {
boolean result = rmClient.waitForCompletion(currentApp.getApplicationId(), cb, timeout * 1000);
if (!result) {
System.err.println("Application terminated unsucessful.");
}
}
catch (YarnException e) {
throw new CliException("Failed to kill " + currentApp.getApplicationId(), e);
}
}
}
private class StartRecordingCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String opId = args[1];
String port = null;
if (args.length == 3) {
port = args[2];
}
printJson(recordingsAgent.startRecording(currentApp.getApplicationId().toString(), opId, port));
}
}
private class StopRecordingCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String opId = args[1];
String port = null;
if (args.length == 3) {
port = args[2];
}
printJson(recordingsAgent.stopRecording(currentApp.getApplicationId().toString(), opId, port));
}
}
private class GetRecordingInfoCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args.length <= 1) {
List<RecordingInfo> recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString());
printJson(recordingInfo, "recordings");
}
else if (args.length <= 2) {
String opId = args[1];
List<RecordingInfo> recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString(), opId);
printJson(recordingInfo, "recordings");
}
else {
String opId = args[1];
long startTime = Long.valueOf(args[2]);
RecordingInfo recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString(), opId, startTime);
printJson(new JSONObject(mapper.writeValueAsString(recordingInfo)));
}
}
}
private class GetAppAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN).path("attributes");
if (args.length > 1) {
r = r.queryParam("attributeName", args[1]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetOperatorAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("attributes");
if (args.length > 2) {
r = r.queryParam("attributeName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetPortAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path(args[2]).path("attributes");
if (args.length > 3) {
r = r.queryParam("attributeName", args[3]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetOperatorPropertiesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("properties");
if (args.length > 2) {
r = r.queryParam("propertyName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetPhysicalOperatorPropertiesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS).path(args[1]).path("properties");
if (args.length > 2) {
r = r.queryParam("propertyName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class SetOperatorPropertyCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
if (changingLogicalPlan) {
String operatorName = args[1];
String propertyName = args[2];
String propertyValue = args[3];
SetOperatorPropertyRequest request = new SetOperatorPropertyRequest();
request.setOperatorName(operatorName);
request.setPropertyName(propertyName);
request.setPropertyValue(propertyValue);
logicalPlanRequestQueue.add(request);
}
else {
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("properties");
final JSONObject request = new JSONObject();
request.put(args[2], args[3]);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, request);
}
});
printJson(response);
}
}
}
private class SetPhysicalOperatorPropertyCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS).path(args[1]).path("properties");
final JSONObject request = new JSONObject();
request.put(args[2], args[3]);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, request);
}
});
printJson(response);
}
}
private class BeginLogicalPlanChangeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
changingLogicalPlan = true;
reader.setHistory(changingLogicalPlanHistory);
}
}
private class ShowLogicalPlanCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String[] newArgs = new String[args.length - 1];
System.arraycopy(args, 1, newArgs, 0, args.length - 1);
ShowLogicalPlanCommandLineInfo commandLineInfo = getShowLogicalPlanCommandLineInfo(newArgs);
Configuration config = StramAppLauncher.getConfig(null, null);
if (commandLineInfo.libjars != null) {
commandLineInfo.libjars = expandCommaSeparatedFiles(commandLineInfo.libjars);
config.set(StramAppLauncher.LIBJARS_CONF_KEY_NAME, commandLineInfo.libjars);
}
if (commandLineInfo.args.length >= 2) {
String jarfile = expandFileName(commandLineInfo.args[0], true);
String appName = commandLineInfo.args[1];
StramAppLauncher submitApp = getStramAppLauncher(jarfile, config);
submitApp.loadDependencies();
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No application in jar file matches '" + appName + "'");
}
else if (matchingAppFactories.size() > 1) {
throw new CliException("More than one application in jar file match '" + appName + "'");
}
else {
AppFactory appFactory = matchingAppFactories.get(0);
LogicalPlan logicalPlan = submitApp.prepareDAG(appFactory);
Map<String, Object> map = new HashMap<String, Object>();
map.put("applicationName", appFactory.getName());
map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
printJson(map);
}
}
else if (commandLineInfo.args.length == 1) {
String jarfile = expandFileName(commandLineInfo.args[0], true);
StramAppLauncher submitApp = getStramAppLauncher(jarfile, config);
submitApp.loadDependencies();
List<Map<String, Object>> appList = new ArrayList<Map<String, Object>>();
List<AppFactory> appFactoryList = submitApp.getBundledTopologies();
for (AppFactory appFactory : appFactoryList) {
Map<String, Object> m = new HashMap<String, Object>();
m.put("name", appFactory.getName());
appList.add(m);
}
printJson(appList, "applications");
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
}
}
private class DumpPropertiesFileCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String outfilename = expandFileName(args[1], false);
if (args.length > 3) {
String jarfile = args[2];
String appName = args[3];
StramAppLauncher submitApp = getStramAppLauncher(jarfile, null);
submitApp.loadDependencies();
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No application in jar file matches '" + appName + "'");
}
else if (matchingAppFactories.size() > 1) {
throw new CliException("More than one application in jar file match '" + appName + "'");
}
else {
AppFactory appFactory = matchingAppFactories.get(0);
LogicalPlan logicalPlan = submitApp.prepareDAG(appFactory);
File file = new File(outfilename);
if (!file.exists()) {
file.createNewFile();
}
LogicalPlanSerializer.convertToProperties(logicalPlan).save(file);
}
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
File file = new File(outfilename);
if (!file.exists()) {
file.createNewFile();
}
LogicalPlanSerializer.convertToProperties(response).save(file);
}
System.out.println("Property file is saved at " + outfilename);
}
}
private class CreateOperatorCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String className = args[2];
CreateOperatorRequest request = new CreateOperatorRequest();
request.setOperatorName(operatorName);
request.setOperatorFQCN(className);
logicalPlanRequestQueue.add(request);
}
}
private class RemoveOperatorCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
RemoveOperatorRequest request = new RemoveOperatorRequest();
request.setOperatorName(operatorName);
logicalPlanRequestQueue.add(request);
}
}
private class CreateStreamCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String sourceOperatorName = args[2];
String sourcePortName = args[3];
String sinkOperatorName = args[4];
String sinkPortName = args[5];
CreateStreamRequest request = new CreateStreamRequest();
request.setStreamName(streamName);
request.setSourceOperatorName(sourceOperatorName);
request.setSinkOperatorName(sinkOperatorName);
request.setSourceOperatorPortName(sourcePortName);
request.setSinkOperatorPortName(sinkPortName);
logicalPlanRequestQueue.add(request);
}
}
private class AddStreamSinkCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String sinkOperatorName = args[2];
String sinkPortName = args[3];
AddStreamSinkRequest request = new AddStreamSinkRequest();
request.setStreamName(streamName);
request.setSinkOperatorName(sinkOperatorName);
request.setSinkOperatorPortName(sinkPortName);
logicalPlanRequestQueue.add(request);
}
}
private class RemoveStreamCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
RemoveStreamRequest request = new RemoveStreamRequest();
request.setStreamName(streamName);
logicalPlanRequestQueue.add(request);
}
}
private class SetOperatorAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetOperatorAttributeRequest request = new SetOperatorAttributeRequest();
request.setOperatorName(operatorName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class SetStreamAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetStreamAttributeRequest request = new SetStreamAttributeRequest();
request.setStreamName(streamName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class SetPortAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetPortAttributeRequest request = new SetPortAttributeRequest();
request.setOperatorName(operatorName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class AbortCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
logicalPlanRequestQueue.clear();
changingLogicalPlan = false;
reader.setHistory(topLevelHistory);
}
}
private class SubmitCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (logicalPlanRequestQueue.isEmpty()) {
throw new CliException("Nothing to submit. Type \"abort\" to abort change");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
try {
final Map<String, Object> m = new HashMap<String, Object>();
ObjectMapper mapper = new ObjectMapper();
m.put("requests", logicalPlanRequestQueue);
final JSONObject jsonRequest = new JSONObject(mapper.writeValueAsString(m));
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, jsonRequest);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
logicalPlanRequestQueue.clear();
changingLogicalPlan = false;
reader.setHistory(topLevelHistory);
}
}
private class ShowQueueCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
printJson(logicalPlanRequestQueue, "queue");
if (consolePresent) {
System.out.println("Total operations in queue: " + logicalPlanRequestQueue.size());
}
}
}
private class BeginMacroCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String name = args[1];
if (macros.containsKey(name) || aliases.containsKey(name)) {
System.err.println("Name '" + name + "' already exists.");
return;
}
try {
List<String> commands = new ArrayList<String>();
while (true) {
String line = reader.readLine("macro def (" + name + ") > ");
if (line.equals("end")) {
macros.put(name, commands);
updateCompleter(reader);
if (consolePresent) {
System.out.println("Macro '" + name + "' created.");
}
return;
}
else if (line.equals("abort")) {
System.err.println("Aborted");
return;
}
else {
commands.add(line);
}
}
}
catch (IOException ex) {
System.err.println("Aborted");
}
}
}
private class SetPagerCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args[1].equals("off")) {
pagerCommand = null;
}
else if (args[1].equals("on")) {
pagerCommand = "less -F -X -r";
}
else {
throw new CliException("set-pager parameter is either on or off.");
}
}
}
private class GetAppInfoCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ApplicationReport appReport;
if (args.length > 1) {
appReport = getApplication(args[1]);
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
appReport = currentApp;
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, appReport).path(StramWebServices.PATH_INFO);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
}
private class CreateAlertCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String fileName = expandFileName(args[2], true);
File f = new File(fileName);
if (!f.canRead()) {
throw new CliException("Cannot read " + fileName);
}
DataInputStream dis = new DataInputStream(new FileInputStream(f));
byte[] buffer = new byte[dis.available()];
dis.readFully(buffer);
final JSONObject json = new JSONObject(new String(buffer));
dis.close();
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_ALERTS + "/" + args[1]);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).put(clazz, json);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class DeleteAlertCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_ALERTS + "/" + args[1]);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).delete(clazz);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class ListAlertsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_ALERTS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
printJson(json);
}
}
@SuppressWarnings("static-access")
public static Options getLaunchCommandLineOptions()
{
Options options = new Options();
Option local = new Option("local", "Run application in local mode.");
Option configFile = OptionBuilder.withArgName("configuration file").hasArg().withDescription("Specify an application configuration file.").create("conf");
Option defProperty = OptionBuilder.withArgName("property=value").hasArg().withDescription("Use value for given property.").create("D");
Option libjars = OptionBuilder.withArgName("comma separated list of jars").hasArg().withDescription("Specify comma separated jar files to include in the classpath.").create("libjars");
Option files = OptionBuilder.withArgName("comma separated list of files").hasArg().withDescription("Specify comma separated files to be copied to the cluster.").create("files");
Option archives = OptionBuilder.withArgName("comma separated list of archives").hasArg().withDescription("Specify comma separated archives to be unarchived on the compute machines.").create("archives");
Option license = OptionBuilder.withArgName("license file").hasArg().withDescription("Specify the license file to launch the application").create("license");
options.addOption(local);
options.addOption(configFile);
options.addOption(defProperty);
options.addOption(libjars);
options.addOption(files);
options.addOption(archives);
options.addOption(license);
return options;
}
private static LaunchCommandLineInfo getLaunchCommandLineInfo(String[] args) throws ParseException
{
CommandLineParser parser = new PosixParser();
LaunchCommandLineInfo result = new LaunchCommandLineInfo();
CommandLine line = parser.parse(getLaunchCommandLineOptions(), args);
result.localMode = line.hasOption("local");
result.configFile = line.getOptionValue("conf");
String[] defs = line.getOptionValues("D");
if (defs != null) {
result.overrideProperties = new HashMap<String, String>();
for (String def : defs) {
int equal = def.indexOf('=');
if (equal < 0) {
result.overrideProperties.put(def, null);
}
else {
result.overrideProperties.put(def.substring(0, equal), def.substring(equal + 1));
}
}
}
result.libjars = line.getOptionValue("libjars");
result.files = line.getOptionValue("files");
result.archives = line.getOptionValue("archives");
result.licenseFile = line.getOptionValue("license");
result.args = line.getArgs();
return result;
}
private static class LaunchCommandLineInfo
{
boolean localMode;
String configFile;
Map<String, String> overrideProperties;
String libjars;
String files;
String archives;
String licenseFile;
String[] args;
}
@SuppressWarnings("static-access")
public static Options getShowLogicalPlanCommandLineOptions()
{
Options options = new Options();
Option libjars = OptionBuilder.withArgName("comma separated list of jars").hasArg().withDescription("Specify comma separated jar files to include in the classpath.").create("libjars");
options.addOption(libjars);
return options;
}
private static ShowLogicalPlanCommandLineInfo getShowLogicalPlanCommandLineInfo(String[] args) throws ParseException
{
CommandLineParser parser = new PosixParser();
ShowLogicalPlanCommandLineInfo result = new ShowLogicalPlanCommandLineInfo();
CommandLine line = parser.parse(getShowLogicalPlanCommandLineOptions(), args);
result.libjars = line.getOptionValue("libjars");
result.args = line.getArgs();
return result;
}
private static class ShowLogicalPlanCommandLineInfo
{
String libjars;
String[] args;
}
public static void main(String[] args) throws Exception
{
DTCli shell = new DTCli();
shell.init(args);
shell.run();
if (lastCommandError) {
System.exit(1);
}
}
}
|
engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
|
/**
* Copyright (c) 2012-2013 DataTorrent, Inc. All rights reserved.
*/
package com.datatorrent.stram.cli;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.*;
import jline.console.ConsoleReader;
import jline.console.completer.AggregateCompleter;
import jline.console.completer.ArgumentCompleter;
import jline.console.completer.Completer;
import jline.console.completer.FileNameCompleter;
import jline.console.completer.StringsCompleter;
import jline.console.history.FileHistory;
import jline.console.history.History;
import jline.console.history.MemoryHistory;
import javax.ws.rs.core.MediaType;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.log4j.Appender;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.tools.ant.DirectoryScanner;
import com.datatorrent.api.DAG;
import com.datatorrent.stram.StramClient;
import com.datatorrent.stram.client.RecordingsAgent;
import com.datatorrent.stram.client.RecordingsAgent.RecordingInfo;
import com.datatorrent.stram.client.StramAgent;
import com.datatorrent.stram.client.StramAppLauncher;
import com.datatorrent.stram.client.StramAppLauncher.AppFactory;
import com.datatorrent.stram.client.StramClientUtils;
import com.datatorrent.stram.client.StramClientUtils.ClientRMHelper;
import com.datatorrent.stram.client.StramClientUtils.YarnClientHelper;
import com.datatorrent.stram.client.WebServicesVersionConversion.IncompatibleVersionException;
import com.datatorrent.stram.codec.LogicalPlanSerializer;
import com.datatorrent.stram.license.GenerateLicenseRequest;
import com.datatorrent.stram.license.License;
import com.datatorrent.stram.license.LicensingAgentClient;
import com.datatorrent.stram.license.SubLicense;
import com.datatorrent.stram.license.util.Util;
import com.datatorrent.stram.plan.logical.AddStreamSinkRequest;
import com.datatorrent.stram.plan.logical.CreateOperatorRequest;
import com.datatorrent.stram.plan.logical.CreateStreamRequest;
import com.datatorrent.stram.plan.logical.LogicalPlan;
import com.datatorrent.stram.plan.logical.LogicalPlanRequest;
import com.datatorrent.stram.plan.logical.RemoveOperatorRequest;
import com.datatorrent.stram.plan.logical.RemoveStreamRequest;
import com.datatorrent.stram.plan.logical.SetOperatorAttributeRequest;
import com.datatorrent.stram.plan.logical.SetOperatorPropertyRequest;
import com.datatorrent.stram.plan.logical.SetPortAttributeRequest;
import com.datatorrent.stram.plan.logical.SetStreamAttributeRequest;
import com.datatorrent.stram.security.StramUserLogin;
import com.datatorrent.stram.util.VersionInfo;
import com.datatorrent.stram.util.WebServicesClient;
import com.datatorrent.stram.webapp.StramWebServices;
/**
*
* Provides command line interface for a streaming application on hadoop (yarn)<p>
*
* @since 0.3.2
*/
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public class DTCli
{
private static final Logger LOG = LoggerFactory.getLogger(DTCli.class);
private final Configuration conf = new YarnConfiguration();
private ClientRMHelper rmClient;
private ApplicationReport currentApp = null;
private boolean consolePresent;
private String[] commandsToExecute;
private final Map<String, CommandSpec> globalCommands = new TreeMap<String, CommandSpec>();
private final Map<String, CommandSpec> connectedCommands = new TreeMap<String, CommandSpec>();
private final Map<String, CommandSpec> logicalPlanChangeCommands = new TreeMap<String, CommandSpec>();
private final Map<String, String> aliases = new HashMap<String, String>();
private final Map<String, List<String>> macros = new HashMap<String, List<String>>();
private boolean changingLogicalPlan = false;
private final List<LogicalPlanRequest> logicalPlanRequestQueue = new ArrayList<LogicalPlanRequest>();
private FileHistory topLevelHistory;
private FileHistory changingLogicalPlanHistory;
private String jsonp;
private boolean raw = false;
private RecordingsAgent recordingsAgent;
private final ObjectMapper mapper = new ObjectMapper();
private String pagerCommand;
private Process pagerProcess;
private int verboseLevel = 0;
private static boolean lastCommandError = false;
private static class FileLineReader extends ConsoleReader
{
private final BufferedReader br;
FileLineReader(String fileName) throws IOException
{
super();
fileName = expandFileName(fileName, true);
br = new BufferedReader(new FileReader(fileName));
}
@Override
public String readLine(String prompt) throws IOException
{
return br.readLine();
}
public void close() throws IOException
{
br.close();
}
}
public static class Tokenizer
{
private static void appendToCommandBuffer(List<String> commandBuffer, StringBuffer buf, boolean potentialEmptyArg)
{
if (potentialEmptyArg || buf.length() > 0) {
commandBuffer.add(buf.toString());
buf.setLength(0);
}
}
private static List<String> startNewCommand(List<List<String>> resultBuffer)
{
List<String> newCommand = new ArrayList<String>();
resultBuffer.add(newCommand);
return newCommand;
}
public static List<String[]> tokenize(String commandLine)
{
List<List<String>> resultBuffer = new ArrayList<List<String>>();
List<String> commandBuffer = startNewCommand(resultBuffer);
if (commandLine != null) {
commandLine = ltrim(commandLine);
if (commandLine.startsWith("#")) {
return null;
}
int len = commandLine.length();
boolean insideQuotes = false;
boolean potentialEmptyArg = false;
StringBuffer buf = new StringBuffer();
for (@SuppressWarnings("AssignmentToForLoopParameter") int i = 0; i < len; ++i) {
char c = commandLine.charAt(i);
if (c == '"') {
potentialEmptyArg = true;
insideQuotes = !insideQuotes;
}
else if (c == '\\') {
if (len > i + 1) {
switch (commandLine.charAt(i + 1)) {
case 'n':
buf.append("\n");
break;
case 't':
buf.append("\t");
break;
case 'r':
buf.append("\r");
break;
case 'b':
buf.append("\b");
break;
case 'f':
buf.append("\f");
break;
default:
buf.append(commandLine.charAt(i + 1));
}
++i;
}
}
else {
if (insideQuotes) {
buf.append(c);
}
else {
if (c == ';') {
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
commandBuffer = startNewCommand(resultBuffer);
}
else if (Character.isWhitespace(c)) {
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
potentialEmptyArg = false;
if (len > i + 1 && commandLine.charAt(i + 1) == '#') {
break;
}
}
else {
buf.append(c);
}
}
}
}
appendToCommandBuffer(commandBuffer, buf, potentialEmptyArg);
}
List<String[]> result = new ArrayList<String[]>();
for (List<String> command : resultBuffer) {
String[] commandArray = new String[command.size()];
result.add(command.toArray(commandArray));
}
return result;
}
}
private interface Command
{
void execute(String[] args, ConsoleReader reader) throws Exception;
}
private static class Arg
{
final String name;
Arg(String name)
{
this.name = name;
}
@Override
public String toString()
{
return name;
}
}
private static class FileArg extends Arg
{
FileArg(String name)
{
super(name);
}
}
private static class CommandArg extends Arg
{
CommandArg(String name)
{
super(name);
}
}
private StramAppLauncher getStramAppLauncher(String jarfileUri, Configuration config) throws Exception
{
URI uri = new URI(jarfileUri);
String scheme = uri.getScheme();
StramAppLauncher appLauncher = null;
if (scheme == null || scheme.equals("file")) {
File jf = new File(uri.getPath());
appLauncher = new StramAppLauncher(jf, config);
}
else if (scheme.equals("hdfs")) {
FileSystem fs = FileSystem.get(uri, conf);
Path path = new Path(uri.getPath());
appLauncher = new StramAppLauncher(fs, path, config);
}
if (appLauncher != null) {
if (verboseLevel > 0) {
System.err.print(appLauncher.getMvnBuildClasspathOutput());
}
return appLauncher;
}
else {
throw new CliException("Scheme " + scheme + " not supported.");
}
}
private static class CommandSpec
{
Command command;
Arg[] requiredArgs;
Arg[] optionalArgs;
String description;
CommandSpec(Command command, Arg[] requiredArgs, Arg[] optionalArgs, String description)
{
this.command = command;
this.requiredArgs = requiredArgs;
this.optionalArgs = optionalArgs;
this.description = description;
}
void verifyArguments(String[] args) throws CliException
{
int minArgs = 0;
int maxArgs = 0;
if (requiredArgs != null) {
minArgs = requiredArgs.length;
maxArgs = requiredArgs.length;
}
if (optionalArgs != null) {
maxArgs += optionalArgs.length;
}
if (args.length - 1 < minArgs || args.length - 1 > maxArgs) {
throw new CliException("Command parameter error");
}
}
void printUsage(String cmd)
{
System.err.print("Usage: " + cmd);
if (requiredArgs != null) {
for (Arg arg : requiredArgs) {
System.err.print(" <" + arg + ">");
}
}
if (optionalArgs != null) {
for (Arg arg : optionalArgs) {
System.err.print(" [<" + arg + ">]");
}
}
System.err.println();
}
}
private static class OptionsCommandSpec extends CommandSpec
{
Options options;
OptionsCommandSpec(Command command, Arg[] requiredArgs, Arg[] optionalArgs, String description, Options options)
{
super(command, requiredArgs, optionalArgs, description);
this.options = options;
}
@Override
void verifyArguments(String[] args) throws CliException
{
try {
args = new PosixParser().parse(options, args).getArgs();
super.verifyArguments(args);
}
catch (Exception ex) {
throw new CliException("Command parameter error");
}
}
@Override
void printUsage(String cmd)
{
super.printUsage(cmd + ((options == null) ? "" : " [options]"));
if (options != null) {
System.out.println("Options:");
HelpFormatter formatter = new HelpFormatter();
PrintWriter pw = new PrintWriter(System.out);
formatter.printOptions(pw, 80, options, 4, 4);
pw.flush();
}
}
}
DTCli()
{
//
// Global command specification starts here
//
globalCommands.put("help", new CommandSpec(new HelpCommand(),
null,
new Arg[] {new CommandArg("command")},
"Show help"));
globalCommands.put("connect", new CommandSpec(new ConnectCommand(),
new Arg[] {new Arg("app-id")},
null,
"Connect to an app"));
globalCommands.put("launch", new OptionsCommandSpec(new LaunchCommand(),
new Arg[] {new FileArg("jar-file")},
new Arg[] {new Arg("class-name/property-file")},
"Launch an app", getLaunchCommandLineOptions()));
globalCommands.put("shutdown-app", new CommandSpec(new ShutdownAppCommand(),
new Arg[] {new Arg("app-id")},
null,
"Shutdown an app"));
globalCommands.put("list-apps", new CommandSpec(new ListAppsCommand(),
null,
new Arg[] {new Arg("pattern")},
"List applications"));
globalCommands.put("kill-app", new CommandSpec(new KillAppCommand(),
new Arg[] {new Arg("app-id")},
null,
"Kill an app"));
globalCommands.put("show-logical-plan", new OptionsCommandSpec(new ShowLogicalPlanCommand(),
new Arg[] {new FileArg("jar-file")},
new Arg[] {new Arg("class-name")},
"List apps in a jar or show logical plan of an app class",
getShowLogicalPlanCommandLineOptions()));
globalCommands.put("alias", new CommandSpec(new AliasCommand(),
new Arg[] {new Arg("alias-name"), new CommandArg("command")},
null,
"Create a command alias"));
globalCommands.put("source", new CommandSpec(new SourceCommand(),
new Arg[] {new FileArg("file")},
null,
"Execute the commands in a file"));
globalCommands.put("exit", new CommandSpec(new ExitCommand(),
null,
null,
"Exit the CLI"));
globalCommands.put("begin-macro", new CommandSpec(new BeginMacroCommand(),
new Arg[] {new Arg("name")},
null,
"Begin Macro Definition ($1...$9 to access parameters and type 'end' to end the definition)"));
globalCommands.put("dump-properties-file", new CommandSpec(new DumpPropertiesFileCommand(),
new Arg[] {new FileArg("out-file"), new FileArg("jar-file"), new Arg("class-name")},
null,
"Dump the properties file of an app class"));
globalCommands.put("get-app-info", new CommandSpec(new GetAppInfoCommand(),
new Arg[] {new Arg("app-id")},
null,
"Get the information of an app"));
globalCommands.put("set-pager", new CommandSpec(new SetPagerCommand(),
new Arg[] {new Arg("on/off")},
null,
"Set the pager program for output"));
globalCommands.put("generate-license-request", new CommandSpec(new GenerateLicenseRequestCommand(),
null,
null,
"Generate license request"));
globalCommands.put("activate-license", new CommandSpec(new ActivateLicenseCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Launch the license agent"));
globalCommands.put("deactivate-license", new CommandSpec(new DeactivateLicenseCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Stop the license agent"));
globalCommands.put("list-licenses", new CommandSpec(new ListLicensesCommand(),
null,
null,
"Show all IDs of all licenses"));
globalCommands.put("show-license-status", new CommandSpec(new ShowLicenseStatusCommand(),
null,
new Arg[] {new FileArg("license-file")},
"Show the status of the license"));
//
// Connected command specification starts here
//
connectedCommands.put("list-containers", new CommandSpec(new ListContainersCommand(),
null,
null,
"List containers"));
connectedCommands.put("list-operators", new CommandSpec(new ListOperatorsCommand(),
null,
new Arg[] {new Arg("pattern")},
"List operators"));
connectedCommands.put("show-physical-plan", new CommandSpec(new ShowPhysicalPlanCommand(),
null,
null,
"Show physical plan"));
connectedCommands.put("kill-container", new CommandSpec(new KillContainerCommand(),
new Arg[] {new Arg("container-id")},
null,
"Kill a container"));
connectedCommands.put("shutdown-app", new CommandSpec(new ShutdownAppCommand(),
null,
new Arg[] {new Arg("app-id")},
"Shutdown an app"));
connectedCommands.put("kill-app", new CommandSpec(new KillAppCommand(),
null,
new Arg[] {new Arg("app-id")},
"Kill an app"));
connectedCommands.put("wait", new CommandSpec(new WaitCommand(),
new Arg[] {new Arg("timeout")},
null,
"Wait for completion of current application"));
connectedCommands.put("start-recording", new CommandSpec(new StartRecordingCommand(),
new Arg[] {new Arg("operator-id")},
new Arg[] {new Arg("port-name")},
"Start recording"));
connectedCommands.put("stop-recording", new CommandSpec(new StopRecordingCommand(),
new Arg[] {new Arg("operator-id")},
new Arg[] {new Arg("port-name")},
"Stop recording"));
connectedCommands.put("get-operator-attributes", new CommandSpec(new GetOperatorAttributesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("attribute-name")},
"Get attributes of an operator"));
connectedCommands.put("get-operator-properties", new CommandSpec(new GetOperatorPropertiesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("property-name")},
"Get properties of an operator"));
connectedCommands.put("get-physical-operator-properties", new CommandSpec(new GetPhysicalOperatorPropertiesCommand(),
new Arg[] {new Arg("operator-name")},
new Arg[] {new Arg("property-name")},
"Get properties of an operator"));
connectedCommands.put("set-operator-property", new CommandSpec(new SetOperatorPropertyCommand(),
new Arg[] {new Arg("operator-name"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
connectedCommands.put("set-physical-operator-property", new CommandSpec(new SetPhysicalOperatorPropertyCommand(),
new Arg[] {new Arg("operator-id"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
connectedCommands.put("get-app-attributes", new CommandSpec(new GetAppAttributesCommand(),
null,
new Arg[] {new Arg("attribute-name")},
"Get attributes of the connected app"));
connectedCommands.put("get-port-attributes", new CommandSpec(new GetPortAttributesCommand(),
new Arg[] {new Arg("operator-name"), new Arg("port-name")},
new Arg[] {new Arg("attribute-name")},
"Get attributes of a port"));
connectedCommands.put("begin-logical-plan-change", new CommandSpec(new BeginLogicalPlanChangeCommand(),
null,
null,
"Begin Logical Plan Change"));
connectedCommands.put("show-logical-plan", new OptionsCommandSpec(new ShowLogicalPlanCommand(),
null,
new Arg[] {new FileArg("jar-file"), new Arg("class-name")},
"Show logical plan of an app class",
getShowLogicalPlanCommandLineOptions()));
connectedCommands.put("dump-properties-file", new CommandSpec(new DumpPropertiesFileCommand(),
new Arg[] {new FileArg("out-file")},
new Arg[] {new FileArg("jar-file"), new Arg("class-name")},
"Dump the properties file of an app class"));
connectedCommands.put("get-app-info", new CommandSpec(new GetAppInfoCommand(),
null,
new Arg[] {new Arg("app-id")},
"Get the information of an app"));
connectedCommands.put("create-alert", new CommandSpec(new CreateAlertCommand(),
new Arg[] {new Arg("name"), new FileArg("file")},
null,
"Create an alert with the name and the given file that contains the spec"));
connectedCommands.put("delete-alert", new CommandSpec(new DeleteAlertCommand(),
new Arg[] {new Arg("name")},
null,
"Delete an alert with the given name"));
connectedCommands.put("list-alerts", new CommandSpec(new ListAlertsCommand(),
null,
null,
"List all alerts"));
connectedCommands.put("get-recording-info", new CommandSpec(new GetRecordingInfoCommand(),
null,
new Arg[] {new Arg("operator-id"), new Arg("start-time")},
"Get tuple recording info"));
//
// Logical plan change command specification starts here
//
logicalPlanChangeCommands.put("help", new CommandSpec(new HelpCommand(),
null,
new Arg[] {new Arg("command")},
"Show help"));
logicalPlanChangeCommands.put("create-operator", new CommandSpec(new CreateOperatorCommand(),
new Arg[] {new Arg("operator-name"), new Arg("class-name")},
null,
"Create an operator"));
logicalPlanChangeCommands.put("create-stream", new CommandSpec(new CreateStreamCommand(),
new Arg[] {new Arg("stream-name"), new Arg("from-operator-name"), new Arg("from-port-name"), new Arg("to-operator-name"), new Arg("to-port-name")},
null,
"Create a stream"));
logicalPlanChangeCommands.put("add-stream-sink", new CommandSpec(new AddStreamSinkCommand(),
new Arg[] {new Arg("stream-name"), new Arg("to-operator-name"), new Arg("to-port-name")},
null,
"Add a sink to an existing stream"));
logicalPlanChangeCommands.put("remove-operator", new CommandSpec(new RemoveOperatorCommand(),
new Arg[] {new Arg("operator-name")},
null,
"Remove an operator"));
logicalPlanChangeCommands.put("remove-stream", new CommandSpec(new RemoveStreamCommand(),
new Arg[] {new Arg("stream-name")},
null,
"Remove a stream"));
logicalPlanChangeCommands.put("set-operator-property", new CommandSpec(new SetOperatorPropertyCommand(),
new Arg[] {new Arg("operator-name"), new Arg("property-name"), new Arg("property-value")},
null,
"Set a property of an operator"));
logicalPlanChangeCommands.put("set-operator-attribute", new CommandSpec(new SetOperatorAttributeCommand(),
new Arg[] {new Arg("operator-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of an operator"));
logicalPlanChangeCommands.put("set-port-attribute", new CommandSpec(new SetPortAttributeCommand(),
new Arg[] {new Arg("operator-name"), new Arg("port-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of a port"));
logicalPlanChangeCommands.put("set-stream-attribute", new CommandSpec(new SetStreamAttributeCommand(),
new Arg[] {new Arg("stream-name"), new Arg("attr-name"), new Arg("attr-value")},
null,
"Set an attribute of a stream"));
logicalPlanChangeCommands.put("show-queue", new CommandSpec(new ShowQueueCommand(),
null,
null,
"Show the queue of the plan change"));
logicalPlanChangeCommands.put("submit", new CommandSpec(new SubmitCommand(),
null,
null,
"Submit the plan change"));
logicalPlanChangeCommands.put("abort", new CommandSpec(new AbortCommand(),
null,
null,
"Abort the plan change"));
}
private void printJson(String json) throws IOException
{
PrintStream os = getOutputPrintStream();
if (jsonp != null) {
os.println(jsonp + "(" + json + ");");
}
else {
os.println(json);
}
os.flush();
closeOutputPrintStream(os);
}
private void printJson(JSONObject json) throws JSONException, IOException
{
printJson(raw ? json.toString() : json.toString(2));
}
private void printJson(JSONArray jsonArray, String name) throws JSONException, IOException
{
JSONObject json = new JSONObject();
json.put(name, jsonArray);
printJson(json);
}
private <K, V> void printJson(Map<K, V> map) throws IOException, JSONException
{
printJson(new JSONObject(mapper.writeValueAsString(map)));
}
private <T> void printJson(List<T> list, String name) throws IOException, JSONException
{
printJson(new JSONArray(mapper.writeValueAsString(list)), name);
}
private PrintStream getOutputPrintStream() throws IOException
{
if (pagerCommand == null) {
pagerProcess = null;
return System.out;
}
else {
pagerProcess = Runtime.getRuntime().exec(new String[] {"sh", "-c",
pagerCommand + " >/dev/tty"});
return new PrintStream(pagerProcess.getOutputStream());
}
}
private void closeOutputPrintStream(PrintStream os)
{
if (os != System.out) {
os.close();
try {
pagerProcess.waitFor();
}
catch (InterruptedException ex) {
LOG.debug("Interrupted");
}
}
}
private static String expandFileName(String fileName, boolean expandWildCard) throws IOException
{
if (fileName.matches("^[a-zA-Z]+:.*")) {
// it's a URL
return fileName;
}
// TODO: need to work with other users' home directory
if (fileName.startsWith("~" + File.separator)) {
fileName = System.getProperty("user.home") + fileName.substring(1);
}
fileName = new File(fileName).getCanonicalPath();
LOG.debug("Canonical path: {}", fileName);
if (expandWildCard) {
DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes(new String[] {fileName});
scanner.scan();
String[] files = scanner.getIncludedFiles();
if (files.length == 0) {
throw new CliException(fileName + " does not match any file");
}
else if (files.length > 1) {
throw new CliException(fileName + " matches more than one file");
}
return files[0];
}
else {
return fileName;
}
}
private static String[] expandFileNames(String fileName) throws IOException
{
// TODO: need to work with other users
if (fileName.matches("^[a-zA-Z]+:.*")) {
// it's a URL
return new String[] {fileName};
}
if (fileName.startsWith("~" + File.separator)) {
fileName = System.getProperty("user.home") + fileName.substring(1);
}
fileName = new File(fileName).getCanonicalPath();
LOG.debug("Canonical path: {}", fileName);
DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes(new String[] {fileName});
scanner.scan();
return scanner.getIncludedFiles();
}
private static String expandCommaSeparatedFiles(String filenames) throws IOException
{
String[] entries = filenames.split(",");
StringBuilder result = new StringBuilder();
for (String entry : entries) {
for (String file : expandFileNames(entry)) {
if (result.length() > 0) {
result.append(",");
}
result.append(file);
}
}
return result.toString();
}
protected ApplicationReport getApplication(String appId)
{
List<ApplicationReport> appList = getApplicationList();
if (StringUtils.isNumeric(appId)) {
int appSeq = Integer.parseInt(appId);
for (ApplicationReport ar : appList) {
if (ar.getApplicationId().getId() == appSeq) {
return ar;
}
}
}
else {
for (ApplicationReport ar : appList) {
if (ar.getApplicationId().toString().equals(appId)) {
return ar;
}
}
}
return null;
}
private static class CliException extends RuntimeException
{
private static final long serialVersionUID = 1L;
CliException(String msg, Throwable cause)
{
super(msg, cause);
}
CliException(String msg)
{
super(msg);
}
}
public void init(String[] args) throws IOException
{
consolePresent = (System.console() != null);
Options options = new Options();
options.addOption("e", true, "Commands are read from the argument");
options.addOption("v", false, "Verbose mode level 1");
options.addOption("vv", false, "Verbose mode level 2");
options.addOption("vvv", false, "Verbose mode level 3");
options.addOption("vvvv", false, "Verbose mode level 4");
options.addOption("r", false, "JSON Raw mode");
options.addOption("p", true, "JSONP padding function");
options.addOption("h", false, "Print this help");
CommandLineParser parser = new BasicParser();
try {
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("v")) {
verboseLevel = 1;
}
if (cmd.hasOption("vv")) {
verboseLevel = 2;
}
if (cmd.hasOption("vvv")) {
verboseLevel = 3;
}
if (cmd.hasOption("vvvv")) {
verboseLevel = 4;
}
if (cmd.hasOption("r")) {
raw = true;
}
if (cmd.hasOption("e")) {
commandsToExecute = cmd.getOptionValues("e");
consolePresent = false;
}
if (cmd.hasOption("p")) {
jsonp = cmd.getOptionValue("p");
}
if (cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(DTCli.class.getSimpleName(), options);
System.exit(0);
}
}
catch (ParseException ex) {
System.err.println("Invalid argument: " + ex);
System.exit(1);
}
Level logLevel;
switch (verboseLevel) {
case 0:
logLevel = Level.OFF;
break;
case 1:
logLevel = Level.ERROR;
break;
case 2:
logLevel = Level.WARN;
break;
case 3:
logLevel = Level.INFO;
break;
default:
logLevel = Level.DEBUG;
break;
}
for (org.apache.log4j.Logger logger : new org.apache.log4j.Logger[] {org.apache.log4j.Logger.getRootLogger(),
org.apache.log4j.Logger.getLogger(DTCli.class)}) {
@SuppressWarnings("unchecked")
Enumeration<Appender> allAppenders = logger.getAllAppenders();
while (allAppenders.hasMoreElements()) {
Appender appender = allAppenders.nextElement();
if (appender instanceof ConsoleAppender) {
((ConsoleAppender)appender).setThreshold(logLevel);
}
}
}
if (commandsToExecute != null) {
for (String command : commandsToExecute) {
LOG.debug("Command to be executed: {}", command);
}
}
StramClientUtils.addStramResources(conf);
StramAgent.setResourceManagerWebappAddress(conf.get(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:8088"));
// Need to initialize security before starting RPC for the credentials to
// take effect
StramUserLogin.attemptAuthentication(conf);
YarnClientHelper yarnClient = new YarnClientHelper(conf);
rmClient = new ClientRMHelper(yarnClient);
String socks = conf.get(CommonConfigurationKeysPublic.HADOOP_SOCKS_SERVER_KEY);
if (socks != null) {
int colon = socks.indexOf(':');
if (colon > 0) {
System.setProperty("socksProxyHost", socks.substring(0, colon));
System.setProperty("socksProxyPort", socks.substring(colon + 1));
}
}
}
private void processSourceFile(String fileName, ConsoleReader reader) throws FileNotFoundException, IOException
{
boolean consolePresentSaved = consolePresent;
consolePresent = false;
FileLineReader fr = new FileLineReader(fileName);
String line;
try {
while ((line = fr.readLine("")) != null) {
processLine(line, fr, true);
}
}
finally {
fr.close();
consolePresent = consolePresentSaved;
}
}
private final static class MyNullCompleter implements Completer
{
public static final MyNullCompleter INSTANCE = new MyNullCompleter();
@Override
public int complete(final String buffer, final int cursor, final List<CharSequence> candidates)
{
candidates.add("");
return cursor;
}
}
private final static class MyFileNameCompleter extends FileNameCompleter
{
@Override
public int complete(final String buffer, final int cursor, final List<CharSequence> candidates)
{
int result = super.complete(buffer, cursor, candidates);
if (candidates.isEmpty()) {
candidates.add("");
result = cursor;
}
return result;
}
}
private List<Completer> defaultCompleters()
{
Map<String, CommandSpec> commands = new TreeMap<String, CommandSpec>();
commands.putAll(logicalPlanChangeCommands);
commands.putAll(connectedCommands);
commands.putAll(globalCommands);
List<Completer> completers = new LinkedList<Completer>();
for (Map.Entry<String, CommandSpec> entry : commands.entrySet()) {
String command = entry.getKey();
CommandSpec cs = entry.getValue();
List<Completer> argCompleters = new LinkedList<Completer>();
argCompleters.add(new StringsCompleter(command));
Arg[] args = (Arg[])ArrayUtils.addAll(cs.requiredArgs, cs.optionalArgs);
if (args != null) {
if (cs instanceof OptionsCommandSpec) {
// ugly hack because jline cannot dynamically change completer while user types
if (args[0] instanceof FileArg) {
for (int i = 0; i < 10; i++) {
argCompleters.add(new MyFileNameCompleter());
}
}
}
else {
for (Arg arg : args) {
if (arg instanceof FileArg) {
argCompleters.add(new MyFileNameCompleter());
}
else if (arg instanceof CommandArg) {
argCompleters.add(new StringsCompleter(commands.keySet().toArray(new String[] {})));
}
else {
argCompleters.add(MyNullCompleter.INSTANCE);
}
}
}
}
completers.add(new ArgumentCompleter(argCompleters));
}
List<Completer> argCompleters = new LinkedList<Completer>();
Set<String> set = new TreeSet<String>();
set.addAll(aliases.keySet());
set.addAll(macros.keySet());
argCompleters.add(new StringsCompleter(set.toArray(new String[] {})));
for (int i = 0; i < 10; i++) {
argCompleters.add(new MyFileNameCompleter());
}
completers.add(new ArgumentCompleter(argCompleters));
return completers;
}
private void setupCompleter(ConsoleReader reader)
{
reader.addCompleter(new AggregateCompleter(defaultCompleters()));
}
private void updateCompleter(ConsoleReader reader)
{
List<Completer> completers = new ArrayList<Completer>(reader.getCompleters());
for (Completer c : completers) {
reader.removeCompleter(c);
}
setupCompleter(reader);
}
private void setupHistory(ConsoleReader reader)
{
File historyFile = new File(StramClientUtils.getSettingsRootDir(), "cli_history");
historyFile.getParentFile().mkdirs();
try {
topLevelHistory = new FileHistory(historyFile);
reader.setHistory(topLevelHistory);
historyFile = new File(StramClientUtils.getSettingsRootDir(), "cli_history_clp");
changingLogicalPlanHistory = new FileHistory(historyFile);
}
catch (IOException ex) {
System.err.printf("Unable to open %s for writing.", historyFile);
}
}
private void setupAgents() throws IOException
{
recordingsAgent = new RecordingsAgent();
recordingsAgent.setup();
}
public void run() throws IOException
{
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(false);
try {
processSourceFile(System.getProperty("user.home") + "/.stram/clirc_system", reader);
processSourceFile(System.getProperty("user.home") + "/.stram/clirc", reader);
}
catch (Exception ex) {
// ignore
}
if (consolePresent) {
printWelcomeMessage();
printLicenseStatus();
setupCompleter(reader);
setupHistory(reader);
}
setupAgents();
String line;
PrintWriter out = new PrintWriter(System.out);
int i = 0;
while (true) {
if (commandsToExecute != null) {
if (i >= commandsToExecute.length) {
break;
}
line = commandsToExecute[i++];
}
else {
line = readLine(reader);
if (line == null) {
break;
}
}
processLine(line, reader, true);
out.flush();
}
if (topLevelHistory != null) {
topLevelHistory.flush();
}
if (changingLogicalPlanHistory != null) {
changingLogicalPlanHistory.flush();
}
if (consolePresent) {
System.out.println("exit");
}
}
private List<String> expandMacro(List<String> lines, String[] args)
{
List<String> expandedLines = new ArrayList<String>();
for (String line : lines) {
int previousIndex = 0;
StringBuilder expandedLine = new StringBuilder();
while (true) {
// Search for $0..$9 within the each line and replace by corresponding args
int currentIndex = line.indexOf('$', previousIndex);
if (currentIndex > 0 && line.length() > currentIndex + 1) {
int argIndex = line.charAt(currentIndex + 1) - '0';
if (args.length > argIndex && argIndex >= 0) {
// Replace $0 with macro name or $1..$9 with input arguments
expandedLine.append(line.substring(previousIndex, currentIndex)).append(args[argIndex]);
}
else if (argIndex >= 0 && argIndex <= 9) {
// Arguments for $1..$9 were not supplied - replace with empty strings
expandedLine.append(line.substring(previousIndex, currentIndex));
}
else {
// Outside valid arguments range - ignore and do not replace
expandedLine.append(line.substring(previousIndex, currentIndex + 2));
}
currentIndex += 2;
}
else {
expandedLine.append(line.substring(previousIndex));
expandedLines.add(expandedLine.toString());
break;
}
previousIndex = currentIndex;
}
}
return expandedLines;
}
private static String ltrim(String s)
{
int i = 0;
while (i < s.length() && Character.isWhitespace(s.charAt(i))) {
i++;
}
return s.substring(i);
}
private void processLine(String line, ConsoleReader reader, boolean expandMacroAlias)
{
try {
//LOG.debug("line: \"{}\"", line);
List<String[]> commands = Tokenizer.tokenize(line);
if (commands == null) {
return;
}
for (String[] args : commands) {
if (args.length == 0 || StringUtils.isBlank(args[0])) {
continue;
}
//LOG.debug("Got: {}", mapper.writeValueAsString(args));
if (expandMacroAlias) {
if (macros.containsKey(args[0])) {
List<String> macroItems = expandMacro(macros.get(args[0]), args);
for (String macroItem : macroItems) {
if (consolePresent) {
System.out.println("expanded-macro> " + macroItem);
}
processLine(macroItem, reader, false);
}
continue;
}
if (aliases.containsKey(args[0])) {
processLine(aliases.get(args[0]), reader, false);
continue;
}
}
CommandSpec cs = null;
if (changingLogicalPlan) {
cs = logicalPlanChangeCommands.get(args[0]);
}
else {
if (currentApp != null) {
cs = connectedCommands.get(args[0]);
}
if (cs == null) {
cs = globalCommands.get(args[0]);
}
}
if (cs == null) {
if (connectedCommands.get(args[0]) != null) {
System.err.println("\"" + args[0] + "\" is valid only when connected to an application. Type \"connect <appid>\" to connect to an application.");
}
else if (logicalPlanChangeCommands.get(args[0]) != null) {
System.err.println("\"" + args[0] + "\" is valid only when changing a logical plan. Type \"begin-logical-plan-change\" to change a logical plan");
}
else {
System.err.println("Invalid command '" + args[0] + "'. Type \"help\" for list of commands");
}
}
else {
try {
cs.verifyArguments(args);
}
catch (CliException ex) {
cs.printUsage(args[0]);
throw ex;
}
cs.command.execute(args, reader);
lastCommandError = false;
}
}
}
catch (CliException e) {
System.err.println(e.getMessage());
LOG.debug("Error processing line: " + line, e);
lastCommandError = true;
}
catch (Exception e) {
System.err.println("Unexpected error: " + e);
LOG.error("Error processing line: {}", line, e);
lastCommandError = true;
}
}
private void printWelcomeMessage()
{
System.out.println("DT CLI " + VersionInfo.getVersion() + " " + VersionInfo.getDate() + " " + VersionInfo.getRevision());
}
private void printLicenseStatus()
{
try {
JSONObject licenseStatus = getLicenseStatus(null);
if (!licenseStatus.has("agentAppId")) {
System.out.println("License agent is not running. Please run the license agent first by typing \"activate-license\"");
return;
}
if (licenseStatus.has("remainingLicensedMB")) {
int remainingLicensedMB = licenseStatus.getInt("remainingLicensedMB");
if (remainingLicensedMB > 0) {
System.out.println("You have " + remainingLicensedMB + "MB remaining for the current license.");
}
else {
System.out.println("You do not have any memory allowance left for the current license. Please contact DataTorrent, Inc. <[email protected]> for help.");
}
}
}
catch (Exception ex) {
LOG.error("Caught exception when getting license info", ex);
System.out.println("Error getting license status. Please contact DataTorrent, Inc. <[email protected]> for help.");
}
}
private void printHelp(String command, CommandSpec commandSpec, PrintStream os)
{
if (consolePresent) {
os.print("\033[0;93m");
os.print(command);
os.print("\033[0m");
}
else {
os.print(command);
}
if (commandSpec instanceof OptionsCommandSpec) {
OptionsCommandSpec ocs = (OptionsCommandSpec)commandSpec;
if (ocs.options != null) {
os.print(" [options]");
}
}
if (commandSpec.requiredArgs != null) {
for (Arg arg : commandSpec.requiredArgs) {
if (consolePresent) {
os.print(" \033[3m" + arg + "\033[0m");
}
else {
os.print(" <" + arg + ">");
}
}
}
if (commandSpec.optionalArgs != null) {
for (Arg arg : commandSpec.optionalArgs) {
if (consolePresent) {
os.print(" [\033[3m" + arg + "\033[0m]");
}
else {
os.print(" [<" + arg + ">]");
}
}
}
os.println("\n\t" + commandSpec.description);
if (commandSpec instanceof OptionsCommandSpec) {
OptionsCommandSpec ocs = (OptionsCommandSpec)commandSpec;
if (ocs.options != null) {
os.println("\tOptions:");
HelpFormatter formatter = new HelpFormatter();
PrintWriter pw = new PrintWriter(os);
formatter.printOptions(pw, 80, ocs.options, 12, 4);
pw.flush();
}
}
}
private void printHelp(Map<String, CommandSpec> commandSpecs, PrintStream os)
{
for (Map.Entry<String, CommandSpec> entry : commandSpecs.entrySet()) {
printHelp(entry.getKey(), entry.getValue(), os);
}
}
private String readLine(ConsoleReader reader)
throws IOException
{
String prompt = "";
if (consolePresent) {
if (changingLogicalPlan) {
prompt = "logical-plan-change";
}
else {
prompt = "dt";
}
if (currentApp != null) {
prompt += " (";
prompt += currentApp.getApplicationId().toString();
prompt += ") ";
}
prompt += "> ";
}
String line = reader.readLine(prompt);
if (line == null) {
return null;
}
return ltrim(line);
}
private List<ApplicationReport> getApplicationList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private List<ApplicationReport> getRunningApplicationList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE));
appsReq.setApplicationStates(EnumSet.of(YarnApplicationState.RUNNING));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private List<ApplicationReport> getLicenseList()
{
try {
GetApplicationsRequest appsReq = GetApplicationsRequest.newInstance();
appsReq.setApplicationTypes(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE_LICENSE));
appsReq.setApplicationStates(EnumSet.of(YarnApplicationState.RUNNING));
return rmClient.clientRM.getApplications(appsReq).getApplicationList();
}
catch (Exception e) {
throw new CliException("Error getting application list from resource manager: " + e.getMessage(), e);
}
}
private String getContainerLongId(String containerId)
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
int shortId = 0;
if (StringUtils.isNumeric(containerId)) {
shortId = Integer.parseInt(containerId);
}
try {
Object containersObj = json.get("containers");
JSONArray containers;
if (containersObj instanceof JSONArray) {
containers = (JSONArray)containersObj;
}
else {
containers = new JSONArray();
containers.put(containersObj);
}
if (containersObj != null) {
for (int o = containers.length(); o-- > 0;) {
JSONObject container = containers.getJSONObject(o);
String id = container.getString("id");
if (id.equals(containerId) || (shortId != 0 && (id.endsWith("_" + shortId) || id.endsWith("0" + shortId)))) {
return id;
}
}
}
}
catch (JSONException ex) {
}
return null;
}
private ApplicationReport assertRunningApp(ApplicationReport app)
{
ApplicationReport r;
try {
r = rmClient.getApplicationReport(app.getApplicationId());
if (r.getYarnApplicationState() != YarnApplicationState.RUNNING) {
String msg = String.format("Application %s not running (status %s)",
r.getApplicationId().getId(), r.getYarnApplicationState());
throw new CliException(msg);
}
}
catch (YarnException rmExc) {
throw new CliException("Unable to determine application status.", rmExc);
}
catch (IOException rmExc) {
throw new CliException("Unable to determine application status.", rmExc);
}
return r;
}
private ClientResponse getResource(String resourcePath, ApplicationReport appReport)
{
if (appReport == null) {
throw new CliException("No application selected");
}
if (StringUtils.isEmpty(appReport.getTrackingUrl()) || appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
appReport = null;
throw new CliException("Application terminated.");
}
WebServicesClient wsClient = new WebServicesClient();
Client client = wsClient.getClient();
client.setFollowRedirects(true);
WebResource r;
try {
r = StramAgent.getStramWebResource(wsClient, appReport.getApplicationId().toString());
}
catch (IncompatibleVersionException ex) {
throw new CliException("Incompatible stram version", ex);
}
if (r == null) {
throw new CliException("Application " + appReport.getApplicationId().toString() + " has not started");
}
r = r.path(resourcePath);
try {
return wsClient.process(r, ClientResponse.class, new WebServicesClient.WebServicesHandler<ClientResponse>()
{
@Override
public ClientResponse process(WebResource webResource, Class<ClientResponse> clazz)
{
ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (!MediaType.APPLICATION_JSON_TYPE.equals(response.getType())) {
throw new CliException("Unexpected response type " + response.getType());
}
return response;
}
});
}
catch (Exception e) {
// check the application status as above may have failed due application termination etc.
if (appReport == currentApp) {
currentApp = assertRunningApp(appReport);
}
throw new CliException("Failed to request " + r.getURI(), e);
}
}
private WebResource getStramWebResource(WebServicesClient webServicesClient, ApplicationReport appReport)
{
if (appReport == null) {
throw new CliException("No application selected");
}
// YARN-156 WebAppProxyServlet does not support POST - for now bypass it for this request
appReport = assertRunningApp(appReport); // or else "N/A" might be there..
try {
return StramAgent.getStramWebResource(webServicesClient, appReport.getApplicationId().toString());
}
catch (IncompatibleVersionException ex) {
throw new CliException("Incompatible Stram version", ex);
}
}
private List<AppFactory> getMatchingAppFactories(StramAppLauncher submitApp, String matchString)
{
try {
List<AppFactory> cfgList = submitApp.getBundledTopologies();
if (cfgList.isEmpty()) {
return null;
}
else if (matchString == null) {
return cfgList;
}
else {
List<AppFactory> result = new ArrayList<AppFactory>();
for (AppFactory ac : cfgList) {
if (ac.getName().matches(".*" + matchString + ".*")) {
result.add(ac);
}
}
return result;
}
}
catch (Exception ex) {
return null;
}
}
/*
* Below is the implementation of all commands
*/
private class HelpCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
PrintStream os = getOutputPrintStream();
if (args.length < 2) {
os.println("GLOBAL COMMANDS EXCEPT WHEN CHANGING LOGICAL PLAN:\n");
printHelp(globalCommands, os);
os.println();
os.println("COMMANDS WHEN CONNECTED TO AN APP (via connect <appid>) EXCEPT WHEN CHANGING LOGICAL PLAN:\n");
printHelp(connectedCommands, os);
os.println();
os.println("COMMANDS WHEN CHANGING LOGICAL PLAN (via begin-logical-plan-change):\n");
printHelp(logicalPlanChangeCommands, os);
os.println();
}
else {
if (args[1].equals("help")) {
printHelp("help", globalCommands.get("help"), os);
}
else {
boolean valid = false;
CommandSpec cs = globalCommands.get(args[1]);
if (cs != null) {
os.println("This usage is valid except when changing logical plan");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
cs = connectedCommands.get(args[1]);
if (cs != null) {
os.println("This usage is valid when connected to an app except when changing logical plan");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
cs = logicalPlanChangeCommands.get(args[1]);
if (cs != null) {
os.println("This usage is only valid when changing logical plan (via begin-logical-plan-change)");
printHelp(args[1], cs, os);
os.println();
valid = true;
}
if (!valid) {
os.println("Help for \"" + args[1] + "\" does not exist.");
}
}
}
closeOutputPrintStream(os);
}
}
private class ConnectCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
currentApp = getApplication(args[1]);
if (currentApp == null) {
throw new CliException("Invalid application id: " + args[1]);
}
boolean connected = false;
try {
LOG.debug("Selected {} with tracking url {}", currentApp.getApplicationId(), currentApp.getTrackingUrl());
ClientResponse rsp = getResource(StramWebServices.PATH_INFO, currentApp);
rsp.getEntity(JSONObject.class);
connected = true; // set as current only upon successful connection
if (consolePresent) {
System.out.println("Connected to application " + currentApp.getApplicationId());
}
}
catch (CliException e) {
throw e; // pass on
}
finally {
if (!connected) {
//currentApp = null;
//currentDir = "/";
}
}
}
}
private class ActivateLicenseCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
byte[] licenseBytes;
if (args.length > 1) {
licenseBytes = StramClientUtils.getLicense(args[1]);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
License.validateLicense(licenseBytes);
LogicalPlan lp = new LogicalPlan();
lp.setAttribute(DAG.APPLICATION_NAME, licenseId);
lp.setAttribute(LogicalPlan.LICENSE, Base64.encodeBase64String(licenseBytes)); // TODO: obfuscate license passing
StramClient client = new StramClient(lp);
client.setApplicationType(StramClient.YARN_APPLICATION_TYPE_LICENSE);
client.startApplication();
System.err.println("Started license agent for " + licenseId);
}
}
private class DeactivateLicenseCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
byte[] licenseBytes;
if (args.length > 1) {
licenseBytes = StramClientUtils.getLicense(args[1]);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
License.validateLicense(licenseBytes);
// TODO: migrate CLI to use YarnClient and this here won't be needed
YarnClient clientRMService = YarnClient.createYarnClient();
try {
clientRMService.init(conf);
clientRMService.start();
ApplicationReport ar = LicensingAgentClient.getLicensingAgentAppReport(licenseId, clientRMService);
if (ar == null) {
throw new CliException("License not activated: " + licenseId);
}
rmClient.killApplication(ar.getApplicationId());
System.err.println("Stopped license agent for " + licenseId);
}
finally {
clientRMService.stop();
}
}
}
private static class LicenseInfo
{
int remainingLicensedMB;
long lastUpdate;
// add expiration date range here
}
private Map<String, LicenseInfo> getLicenseInfoMap() throws JSONException, IOException
{
List<ApplicationReport> runningApplicationList = getRunningApplicationList();
WebServicesClient webServicesClient = new WebServicesClient();
Map<String, LicenseInfo> licenseInfoMap = new HashMap<String, LicenseInfo>();
for (ApplicationReport ar : runningApplicationList) {
WebResource r = getStramWebResource(webServicesClient, ar).path(StramWebServices.PATH_INFO);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
if (!response.has("licenseInfoLastUpdate")) {
continue;
}
long lastUpdate = Long.valueOf(response.getString("licenseInfoLastUpdate"));
String licenseId = response.getString("licenseId");
int remainingLicensedMB = Integer.valueOf(response.getString("remainingLicensedMB"));
LicenseInfo licenseInfo;
if (licenseInfoMap.containsKey(licenseId)) {
licenseInfo = licenseInfoMap.get(licenseId);
if (licenseInfo.lastUpdate < lastUpdate) {
licenseInfo.remainingLicensedMB = remainingLicensedMB;
licenseInfo.lastUpdate = lastUpdate;
}
}
else {
licenseInfo = new LicenseInfo();
licenseInfo.remainingLicensedMB = remainingLicensedMB;
licenseInfo.lastUpdate = lastUpdate;
licenseInfoMap.put(licenseId, licenseInfo);
}
}
return licenseInfoMap;
}
private class ListLicensesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
Map<String, LicenseInfo> licenseInfoMap = getLicenseInfoMap();
try {
JSONArray jsonArray = new JSONArray();
List<ApplicationReport> licList = getLicenseList();
Collections.sort(licList, new Comparator<ApplicationReport>()
{
@Override
public int compare(ApplicationReport o1, ApplicationReport o2)
{
return o1.getApplicationId().getId() - o2.getApplicationId().getId();
}
});
for (ApplicationReport ar : licList) {
JSONObject jsonObj = new JSONObject();
jsonObj.put("id", ar.getName());
jsonObj.put("agentAppId", ar.getApplicationId().getId());
if (licenseInfoMap.containsKey(ar.getName())) {
jsonObj.put("remainingLicensedMB", licenseInfoMap.get(ar.getName()).remainingLicensedMB);
}
jsonArray.put(jsonObj);
}
printJson(jsonArray, "licenses");
}
catch (Exception ex) {
throw new CliException("Failed to retrieve license list", ex);
}
}
}
private JSONObject getLicenseStatus(String licenseFile) throws Exception
{
byte[] licenseBytes;
if (licenseFile != null) {
licenseBytes = StramClientUtils.getLicense(licenseFile);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseID = License.getLicenseID(licenseBytes);
SubLicense[] subLicenses = License.validateGetSubLicenses(licenseBytes);
JSONObject licenseObj = new JSONObject();
licenseObj.put("id", licenseID);
JSONArray sublicArray = new JSONArray();
SimpleDateFormat sdf = new SimpleDateFormat(SubLicense.DATE_FORMAT);
for (SubLicense sublic : subLicenses) {
JSONObject sublicObj = new JSONObject();
sublicObj.put("startDate", sdf.format(sublic.getStartDate()));
sublicObj.put("endDate", sdf.format(sublic.getEndDate()));
sublicObj.put("comment", sublic.getComment());
sublicObj.put("processorList", sublic.getProcessorListAsJSONArray());
sublicObj.put("constraint", sublic.getConstraint());
sublicObj.put("url", sublic.getUrl());
sublicArray.put(sublicObj);
}
licenseObj.put("sublicenses", sublicArray);
List<ApplicationReport> licList = getLicenseList();
for (ApplicationReport ar : licList) {
if (ar.getName().equals(licenseID)) {
licenseObj.put("agentAppId", ar.getApplicationId().toString());
break;
}
}
Map<String, LicenseInfo> licenseInfoMap = getLicenseInfoMap();
if (licenseInfoMap.containsKey(licenseID)) {
licenseObj.put("remainingLicensedMB", licenseInfoMap.get(licenseID).remainingLicensedMB);
}
return licenseObj;
}
private class ShowLicenseStatusCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
JSONObject licenseObj = getLicenseStatus(args.length > 1 ? args[1] : null);
printJson(licenseObj);
}
}
private class GenerateLicenseRequestCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String b64EncodedString = new GenerateLicenseRequest().getLicenseRequest(Util.getDefaultPublicKey());
System.out.println("-------------------------- Cut from below ------------------------------");
System.out.println(b64EncodedString);
System.out.println("------------------------------------------------------------------------");
}
}
private class LaunchCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String[] newArgs = new String[args.length - 1];
System.arraycopy(args, 1, newArgs, 0, args.length - 1);
LaunchCommandLineInfo commandLineInfo = getLaunchCommandLineInfo(newArgs);
if (commandLineInfo.configFile != null) {
commandLineInfo.configFile = expandFileName(commandLineInfo.configFile, true);
}
Configuration config = StramAppLauncher.getConfig(commandLineInfo.configFile, commandLineInfo.overrideProperties);
if (commandLineInfo.libjars != null) {
commandLineInfo.libjars = expandCommaSeparatedFiles(commandLineInfo.libjars);
config.set(StramAppLauncher.LIBJARS_CONF_KEY_NAME, commandLineInfo.libjars);
}
if (commandLineInfo.files != null) {
commandLineInfo.files = expandCommaSeparatedFiles(commandLineInfo.files);
config.set(StramAppLauncher.FILES_CONF_KEY_NAME, commandLineInfo.files);
}
if (commandLineInfo.archives != null) {
commandLineInfo.archives = expandCommaSeparatedFiles(commandLineInfo.archives);
config.set(StramAppLauncher.ARCHIVES_CONF_KEY_NAME, commandLineInfo.archives);
}
if (commandLineInfo.licenseFile != null) {
commandLineInfo.licenseFile = expandFileName(commandLineInfo.licenseFile, true);
}
String fileName = expandFileName(commandLineInfo.args[0], true);
StramAppLauncher submitApp = getStramAppLauncher(fileName, config);
submitApp.loadDependencies();
AppFactory appFactory = null;
if (commandLineInfo.args.length >= 2) {
File file = new File(commandLineInfo.args[1]);
if (file.exists()) {
appFactory = new StramAppLauncher.PropertyFileAppFactory(file);
}
}
if (appFactory == null) {
String matchString = commandLineInfo.args.length >= 2 ? commandLineInfo.args[1] : null;
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, matchString);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No matching applications bundled in jar.");
}
else if (matchingAppFactories.size() == 1) {
appFactory = matchingAppFactories.get(0);
}
else if (matchingAppFactories.size() > 1) {
// Display matching applications
for (int i = 0; i < matchingAppFactories.size(); i++) {
String appName = matchingAppFactories.get(i).getName();
String appAlias = submitApp.getLogicalPlanConfiguration().getAppAlias(appName);
if (appAlias != null) {
appName = appAlias;
}
System.out.printf("%3d. %s\n", i + 1, appName);
}
// Exit if not in interactive mode
if (!consolePresent) {
throw new CliException("More than one application in jar file match '" + matchString + "'");
}
else {
boolean useHistory = reader.isHistoryEnabled();
reader.setHistoryEnabled(false);
History previousHistory = reader.getHistory();
History dummyHistory = new MemoryHistory();
reader.setHistory(dummyHistory);
List<Completer> completers = new ArrayList<Completer>(reader.getCompleters());
for (Completer c : completers) {
reader.removeCompleter(c);
}
String optionLine = reader.readLine("Choose application: ");
reader.setHistoryEnabled(useHistory);
reader.setHistory(previousHistory);
for (Completer c : completers) {
reader.addCompleter(c);
}
try {
int option = Integer.parseInt(optionLine);
if (0 < option && option <= matchingAppFactories.size()) {
appFactory = matchingAppFactories.get(option - 1);
}
}
catch (Exception ex) {
// ignore
}
}
}
}
if (appFactory != null) {
if (!commandLineInfo.localMode) {
byte[] licenseBytes;
if (commandLineInfo.licenseFile != null) {
licenseBytes = StramClientUtils.getLicense(commandLineInfo.licenseFile);
}
else {
licenseBytes = StramClientUtils.getLicense(conf);
}
String licenseId = License.getLicenseID(licenseBytes);
YarnClient clientRMService = YarnClient.createYarnClient();
clientRMService.init(conf);
clientRMService.start();
ApplicationReport ar = LicensingAgentClient.getLicensingAgentAppReport(licenseId, clientRMService);
if (ar == null) {
throw new CliException("License not activated. Please run activate-license first before launching any streaming application");
}
ApplicationId appId = submitApp.launchApp(appFactory);
currentApp = rmClient.getApplicationReport(appId);
printJson("{\"appId\": \"" + appId + "\"}");
}
else {
submitApp.runLocal(appFactory);
}
}
else {
System.err.println("No application specified.");
}
}
}
private class ShutdownAppCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ApplicationReport[] apps;
WebServicesClient webServicesClient = new WebServicesClient();
if (args.length == 1) {
if (currentApp == null) {
throw new CliException("No application selected");
}
else {
apps = new ApplicationReport[] {currentApp};
}
}
else {
apps = new ApplicationReport[args.length - 1];
for (int i = 1; i < args.length; i++) {
apps[i - 1] = getApplication(args[i]);
if (apps[i - 1] == null) {
throw new CliException("App " + args[i] + " not found!");
}
}
}
for (ApplicationReport app : apps) {
WebResource r = getStramWebResource(webServicesClient, app).path(StramWebServices.PATH_SHUTDOWN);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(clazz);
}
});
if (consolePresent) {
System.out.println("Shutdown requested: " + response);
}
currentApp = null;
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
}
private class ListAppsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
try {
JSONArray jsonArray = new JSONArray();
List<ApplicationReport> appList = getApplicationList();
Collections.sort(appList, new Comparator<ApplicationReport>()
{
@Override
public int compare(ApplicationReport o1, ApplicationReport o2)
{
return o1.getApplicationId().getId() - o2.getApplicationId().getId();
}
});
int totalCnt = 0;
int runningCnt = 0;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
for (ApplicationReport ar : appList) {
/*
* This is inefficient, but what the heck, if this can be passed through the command line, can anyone notice slowness.
*/
JSONObject jsonObj = new JSONObject();
jsonObj.put("startTime", sdf.format(new java.util.Date(ar.getStartTime())));
jsonObj.put("id", ar.getApplicationId().getId());
jsonObj.put("name", ar.getName());
jsonObj.put("state", ar.getYarnApplicationState().name());
jsonObj.put("trackingUrl", ar.getTrackingUrl());
jsonObj.put("finalStatus", ar.getFinalApplicationStatus());
totalCnt++;
if (ar.getYarnApplicationState() == YarnApplicationState.RUNNING) {
runningCnt++;
}
if (args.length > 1) {
@SuppressWarnings("unchecked")
Iterator<String> iterator = jsonObj.keys();
while (iterator.hasNext()) {
Object value = jsonObj.get(iterator.next());
if (value.toString().matches("(?i).*" + args[1] + ".*")) {
jsonArray.put(jsonObj);
break;
}
}
}
else {
jsonArray.put(jsonObj);
}
}
printJson(jsonArray, "apps");
if (consolePresent) {
System.out.println(runningCnt + " active, total " + totalCnt + " applications.");
}
}
catch (Exception ex) {
throw new CliException("Failed to retrieve application list", ex);
}
}
}
private class KillAppCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args.length == 1) {
if (currentApp == null) {
throw new CliException("No application selected");
}
else {
try {
rmClient.killApplication(currentApp.getApplicationId());
currentApp = null;
}
catch (YarnException e) {
throw new CliException("Failed to kill " + currentApp.getApplicationId(), e);
}
}
if (consolePresent) {
System.out.println("Kill app requested");
}
return;
}
ApplicationReport app = null;
int i = 0;
try {
while (++i < args.length) {
app = getApplication(args[i]);
rmClient.killApplication(app.getApplicationId());
if (app == currentApp) {
currentApp = null;
}
}
if (consolePresent) {
System.out.println("Kill app requested");
}
}
catch (YarnException e) {
throw new CliException("Failed to kill " + ((app == null || app.getApplicationId() == null) ? "unknown application" : app.getApplicationId()) + ". Aborting killing of any additional applications.", e);
}
catch (NumberFormatException nfe) {
throw new CliException("Invalid application Id " + args[i], nfe);
}
catch (NullPointerException npe) {
throw new CliException("Application with Id " + args[i] + " does not seem to be alive!", npe);
}
}
}
private class AliasCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args[1].equals(args[2])) {
throw new CliException("Alias to itself!");
}
aliases.put(args[1], args[2]);
if (consolePresent) {
System.out.println("Alias " + args[1] + " created.");
}
updateCompleter(reader);
}
}
private class SourceCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
processSourceFile(args[1], reader);
if (consolePresent) {
System.out.println("File " + args[1] + " sourced.");
}
}
}
private class ExitCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (topLevelHistory != null) {
topLevelHistory.flush();
}
if (changingLogicalPlanHistory != null) {
changingLogicalPlanHistory.flush();
}
System.exit(0);
}
}
private class ListContainersCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
if (args.length == 1) {
printJson(json);
}
else {
Object containersObj = json.get("containers");
JSONArray containers;
if (containersObj instanceof JSONArray) {
containers = (JSONArray)containersObj;
}
else {
containers = new JSONArray();
containers.put(containersObj);
}
if (containersObj == null) {
System.out.println("No containers found!");
}
else {
JSONArray resultContainers = new JSONArray();
for (int o = containers.length(); o-- > 0;) {
JSONObject container = containers.getJSONObject(o);
String id = container.getString("id");
if (id != null && !id.isEmpty()) {
for (int argc = args.length; argc-- > 1;) {
String s1 = "0" + args[argc];
String s2 = "_" + args[argc];
if (id.equals(args[argc]) || id.endsWith(s1) || id.endsWith(s2)) {
resultContainers.put(container);
}
}
}
}
printJson(resultContainers, "containers");
}
}
}
}
private class ListOperatorsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
if (args.length > 1) {
String singleKey = "" + json.keys().next();
JSONArray matches = new JSONArray();
// filter operators
JSONArray arr;
Object obj = json.get(singleKey);
if (obj instanceof JSONArray) {
arr = (JSONArray)obj;
}
else {
arr = new JSONArray();
arr.put(obj);
}
for (int i = 0; i < arr.length(); i++) {
JSONObject oper = arr.getJSONObject(i);
@SuppressWarnings("unchecked")
Iterator<String> keys = oper.keys();
while (keys.hasNext()) {
if (oper.get(keys.next()).toString().matches("(?i).*" + args[1] + ".*")) {
matches.put(oper);
break;
}
}
}
json.put(singleKey, matches);
}
printJson(json);
}
}
private class ShowPhysicalPlanCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN);
try {
printJson(webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(clazz);
}
}));
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class KillContainerCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String containerLongId = getContainerLongId(args[1]);
if (containerLongId == null) {
throw new CliException("Container " + args[1] + " not found");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_CONTAINERS).path(containerLongId).path("kill");
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(clazz, new JSONObject());
}
});
if (consolePresent) {
System.out.println("Kill container requested: " + response);
}
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class WaitCommand implements Command
{
@Override
public void execute(String[] args, final ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
int timeout = Integer.valueOf(args[1]);
ClientRMHelper.AppStatusCallback cb = new ClientRMHelper.AppStatusCallback()
{
@Override
public boolean exitLoop(ApplicationReport report)
{
System.out.println("current status is: " + report.getYarnApplicationState());
try {
if (reader.getInput().available() > 0) {
return true;
}
}
catch (IOException e) {
LOG.error("Error checking for input.", e);
}
return false;
}
};
try {
boolean result = rmClient.waitForCompletion(currentApp.getApplicationId(), cb, timeout * 1000);
if (!result) {
System.err.println("Application terminated unsucessful.");
}
}
catch (YarnException e) {
throw new CliException("Failed to kill " + currentApp.getApplicationId(), e);
}
}
}
private class StartRecordingCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String opId = args[1];
String port = null;
if (args.length == 3) {
port = args[2];
}
printJson(recordingsAgent.startRecording(currentApp.getApplicationId().toString(), opId, port));
}
}
private class StopRecordingCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String opId = args[1];
String port = null;
if (args.length == 3) {
port = args[2];
}
printJson(recordingsAgent.stopRecording(currentApp.getApplicationId().toString(), opId, port));
}
}
private class GetRecordingInfoCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args.length <= 1) {
List<RecordingInfo> recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString());
printJson(recordingInfo, "recordings");
}
else if (args.length <= 2) {
String opId = args[1];
List<RecordingInfo> recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString(), opId);
printJson(recordingInfo, "recordings");
}
else {
String opId = args[1];
long startTime = Long.valueOf(args[2]);
RecordingInfo recordingInfo = recordingsAgent.getRecordingInfo(currentApp.getApplicationId().toString(), opId, startTime);
printJson(new JSONObject(mapper.writeValueAsString(recordingInfo)));
}
}
}
private class GetAppAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN).path("attributes");
if (args.length > 1) {
r = r.queryParam("attributeName", args[1]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetOperatorAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("attributes");
if (args.length > 2) {
r = r.queryParam("attributeName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetPortAttributesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path(args[2]).path("attributes");
if (args.length > 3) {
r = r.queryParam("attributeName", args[3]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetOperatorPropertiesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("properties");
if (args.length > 2) {
r = r.queryParam("propertyName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class GetPhysicalOperatorPropertiesCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS).path(args[1]).path("properties");
if (args.length > 2) {
r = r.queryParam("propertyName", args[2]);
}
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class SetOperatorPropertyCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
if (changingLogicalPlan) {
String operatorName = args[1];
String propertyName = args[2];
String propertyValue = args[3];
SetOperatorPropertyRequest request = new SetOperatorPropertyRequest();
request.setOperatorName(operatorName);
request.setPropertyName(propertyName);
request.setPropertyValue(propertyValue);
logicalPlanRequestQueue.add(request);
}
else {
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN_OPERATORS).path(args[1]).path("properties");
final JSONObject request = new JSONObject();
request.put(args[2], args[3]);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, request);
}
});
printJson(response);
}
}
}
private class SetPhysicalOperatorPropertyCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_PHYSICAL_PLAN_OPERATORS).path(args[1]).path("properties");
final JSONObject request = new JSONObject();
request.put(args[2], args[3]);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, request);
}
});
printJson(response);
}
}
private class BeginLogicalPlanChangeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
changingLogicalPlan = true;
reader.setHistory(changingLogicalPlanHistory);
}
}
private class ShowLogicalPlanCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String[] newArgs = new String[args.length - 1];
System.arraycopy(args, 1, newArgs, 0, args.length - 1);
ShowLogicalPlanCommandLineInfo commandLineInfo = getShowLogicalPlanCommandLineInfo(newArgs);
Configuration config = StramAppLauncher.getConfig(null, null);
if (commandLineInfo.libjars != null) {
commandLineInfo.libjars = expandCommaSeparatedFiles(commandLineInfo.libjars);
config.set(StramAppLauncher.LIBJARS_CONF_KEY_NAME, commandLineInfo.libjars);
}
if (commandLineInfo.args.length >= 2) {
String jarfile = expandFileName(commandLineInfo.args[0], true);
String appName = commandLineInfo.args[1];
StramAppLauncher submitApp = getStramAppLauncher(jarfile, config);
submitApp.loadDependencies();
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No application in jar file matches '" + appName + "'");
}
else if (matchingAppFactories.size() > 1) {
throw new CliException("More than one application in jar file match '" + appName + "'");
}
else {
AppFactory appFactory = matchingAppFactories.get(0);
LogicalPlan logicalPlan = submitApp.prepareDAG(appFactory);
Map<String, Object> map = new HashMap<String, Object>();
map.put("applicationName", appFactory.getName());
map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
printJson(map);
}
}
else if (commandLineInfo.args.length == 1) {
String jarfile = expandFileName(commandLineInfo.args[0], true);
StramAppLauncher submitApp = getStramAppLauncher(jarfile, config);
submitApp.loadDependencies();
List<Map<String, Object>> appList = new ArrayList<Map<String, Object>>();
List<AppFactory> appFactoryList = submitApp.getBundledTopologies();
for (AppFactory appFactory : appFactoryList) {
Map<String, Object> m = new HashMap<String, Object>();
m.put("name", appFactory.getName());
appList.add(m);
}
printJson(appList, "applications");
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
}
}
private class DumpPropertiesFileCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String outfilename = expandFileName(args[1], false);
if (args.length > 3) {
String jarfile = args[2];
String appName = args[3];
StramAppLauncher submitApp = getStramAppLauncher(jarfile, null);
submitApp.loadDependencies();
List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName);
if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
throw new CliException("No application in jar file matches '" + appName + "'");
}
else if (matchingAppFactories.size() > 1) {
throw new CliException("More than one application in jar file match '" + appName + "'");
}
else {
AppFactory appFactory = matchingAppFactories.get(0);
LogicalPlan logicalPlan = submitApp.prepareDAG(appFactory);
File file = new File(outfilename);
if (!file.exists()) {
file.createNewFile();
}
LogicalPlanSerializer.convertToProperties(logicalPlan).save(file);
}
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
File file = new File(outfilename);
if (!file.exists()) {
file.createNewFile();
}
LogicalPlanSerializer.convertToProperties(response).save(file);
}
System.out.println("Property file is saved at " + outfilename);
}
}
private class CreateOperatorCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String className = args[2];
CreateOperatorRequest request = new CreateOperatorRequest();
request.setOperatorName(operatorName);
request.setOperatorFQCN(className);
logicalPlanRequestQueue.add(request);
}
}
private class RemoveOperatorCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
RemoveOperatorRequest request = new RemoveOperatorRequest();
request.setOperatorName(operatorName);
logicalPlanRequestQueue.add(request);
}
}
private class CreateStreamCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String sourceOperatorName = args[2];
String sourcePortName = args[3];
String sinkOperatorName = args[4];
String sinkPortName = args[5];
CreateStreamRequest request = new CreateStreamRequest();
request.setStreamName(streamName);
request.setSourceOperatorName(sourceOperatorName);
request.setSinkOperatorName(sinkOperatorName);
request.setSourceOperatorPortName(sourcePortName);
request.setSinkOperatorPortName(sinkPortName);
logicalPlanRequestQueue.add(request);
}
}
private class AddStreamSinkCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String sinkOperatorName = args[2];
String sinkPortName = args[3];
AddStreamSinkRequest request = new AddStreamSinkRequest();
request.setStreamName(streamName);
request.setSinkOperatorName(sinkOperatorName);
request.setSinkOperatorPortName(sinkPortName);
logicalPlanRequestQueue.add(request);
}
}
private class RemoveStreamCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
RemoveStreamRequest request = new RemoveStreamRequest();
request.setStreamName(streamName);
logicalPlanRequestQueue.add(request);
}
}
private class SetOperatorAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetOperatorAttributeRequest request = new SetOperatorAttributeRequest();
request.setOperatorName(operatorName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class SetStreamAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String streamName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetStreamAttributeRequest request = new SetStreamAttributeRequest();
request.setStreamName(streamName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class SetPortAttributeCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String operatorName = args[1];
String attributeName = args[2];
String attributeValue = args[3];
SetPortAttributeRequest request = new SetPortAttributeRequest();
request.setOperatorName(operatorName);
request.setAttributeName(attributeName);
request.setAttributeValue(attributeValue);
logicalPlanRequestQueue.add(request);
}
}
private class AbortCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
logicalPlanRequestQueue.clear();
changingLogicalPlan = false;
reader.setHistory(topLevelHistory);
}
}
private class SubmitCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (logicalPlanRequestQueue.isEmpty()) {
throw new CliException("Nothing to submit. Type \"abort\" to abort change");
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_LOGICAL_PLAN);
try {
final Map<String, Object> m = new HashMap<String, Object>();
ObjectMapper mapper = new ObjectMapper();
m.put("requests", logicalPlanRequestQueue);
final JSONObject jsonRequest = new JSONObject(mapper.writeValueAsString(m));
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).post(JSONObject.class, jsonRequest);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
logicalPlanRequestQueue.clear();
changingLogicalPlan = false;
reader.setHistory(topLevelHistory);
}
}
private class ShowQueueCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
printJson(logicalPlanRequestQueue, "queue");
if (consolePresent) {
System.out.println("Total operations in queue: " + logicalPlanRequestQueue.size());
}
}
}
private class BeginMacroCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String name = args[1];
if (macros.containsKey(name) || aliases.containsKey(name)) {
System.err.println("Name '" + name + "' already exists.");
return;
}
try {
List<String> commands = new ArrayList<String>();
while (true) {
String line = reader.readLine("macro def (" + name + ") > ");
if (line.equals("end")) {
macros.put(name, commands);
updateCompleter(reader);
if (consolePresent) {
System.out.println("Macro '" + name + "' created.");
}
return;
}
else if (line.equals("abort")) {
System.err.println("Aborted");
return;
}
else {
commands.add(line);
}
}
}
catch (IOException ex) {
System.err.println("Aborted");
}
}
}
private class SetPagerCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
if (args[1].equals("off")) {
pagerCommand = null;
}
else if (args[1].equals("on")) {
pagerCommand = "less -F -X -r";
}
else {
throw new CliException("set-pager parameter is either on or off.");
}
}
}
private class GetAppInfoCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ApplicationReport appReport;
if (args.length > 1) {
appReport = getApplication(args[1]);
}
else {
if (currentApp == null) {
throw new CliException("No application selected");
}
appReport = currentApp;
}
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, appReport).path(StramWebServices.PATH_INFO);
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
}
});
printJson(response);
}
}
private class CreateAlertCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
String fileName = expandFileName(args[2], true);
File f = new File(fileName);
if (!f.canRead()) {
throw new CliException("Cannot read " + fileName);
}
DataInputStream dis = new DataInputStream(new FileInputStream(f));
byte[] buffer = new byte[dis.available()];
dis.readFully(buffer);
final JSONObject json = new JSONObject(new String(buffer));
dis.close();
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_ALERTS + "/" + args[1]);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).put(clazz, json);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class DeleteAlertCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
WebServicesClient webServicesClient = new WebServicesClient();
WebResource r = getStramWebResource(webServicesClient, currentApp).path(StramWebServices.PATH_ALERTS + "/" + args[1]);
try {
JSONObject response = webServicesClient.process(r, JSONObject.class, new WebServicesClient.WebServicesHandler<JSONObject>()
{
@Override
public JSONObject process(WebResource webResource, Class<JSONObject> clazz)
{
return webResource.accept(MediaType.APPLICATION_JSON).delete(clazz);
}
});
printJson(response);
}
catch (Exception e) {
throw new CliException("Failed to request " + r.getURI(), e);
}
}
}
private class ListAlertsCommand implements Command
{
@Override
public void execute(String[] args, ConsoleReader reader) throws Exception
{
ClientResponse rsp = getResource(StramWebServices.PATH_ALERTS, currentApp);
JSONObject json = rsp.getEntity(JSONObject.class);
printJson(json);
}
}
@SuppressWarnings("static-access")
public static Options getLaunchCommandLineOptions()
{
Options options = new Options();
Option local = new Option("local", "Run application in local mode.");
Option configFile = OptionBuilder.withArgName("configuration file").hasArg().withDescription("Specify an application configuration file.").create("conf");
Option defProperty = OptionBuilder.withArgName("property=value").hasArg().withDescription("Use value for given property.").create("D");
Option libjars = OptionBuilder.withArgName("comma separated list of jars").hasArg().withDescription("Specify comma separated jar files to include in the classpath.").create("libjars");
Option files = OptionBuilder.withArgName("comma separated list of files").hasArg().withDescription("Specify comma separated files to be copied to the cluster.").create("files");
Option archives = OptionBuilder.withArgName("comma separated list of archives").hasArg().withDescription("Specify comma separated archives to be unarchived on the compute machines.").create("archives");
Option license = OptionBuilder.withArgName("license file").hasArg().withDescription("Specify the license file to launch the application").create("license");
options.addOption(local);
options.addOption(configFile);
options.addOption(defProperty);
options.addOption(libjars);
options.addOption(files);
options.addOption(archives);
options.addOption(license);
return options;
}
private static LaunchCommandLineInfo getLaunchCommandLineInfo(String[] args) throws ParseException
{
CommandLineParser parser = new PosixParser();
LaunchCommandLineInfo result = new LaunchCommandLineInfo();
CommandLine line = parser.parse(getLaunchCommandLineOptions(), args);
result.localMode = line.hasOption("local");
result.configFile = line.getOptionValue("conf");
String[] defs = line.getOptionValues("D");
if (defs != null) {
result.overrideProperties = new HashMap<String, String>();
for (String def : defs) {
int equal = def.indexOf('=');
if (equal < 0) {
result.overrideProperties.put(def, null);
}
else {
result.overrideProperties.put(def.substring(0, equal), def.substring(equal + 1));
}
}
}
result.libjars = line.getOptionValue("libjars");
result.files = line.getOptionValue("files");
result.archives = line.getOptionValue("archives");
result.licenseFile = line.getOptionValue("license");
result.args = line.getArgs();
return result;
}
private static class LaunchCommandLineInfo
{
boolean localMode;
String configFile;
Map<String, String> overrideProperties;
String libjars;
String files;
String archives;
String licenseFile;
String[] args;
}
@SuppressWarnings("static-access")
public static Options getShowLogicalPlanCommandLineOptions()
{
Options options = new Options();
Option libjars = OptionBuilder.withArgName("comma separated list of jars").hasArg().withDescription("Specify comma separated jar files to include in the classpath.").create("libjars");
options.addOption(libjars);
return options;
}
private static ShowLogicalPlanCommandLineInfo getShowLogicalPlanCommandLineInfo(String[] args) throws ParseException
{
CommandLineParser parser = new PosixParser();
ShowLogicalPlanCommandLineInfo result = new ShowLogicalPlanCommandLineInfo();
CommandLine line = parser.parse(getShowLogicalPlanCommandLineOptions(), args);
result.libjars = line.getOptionValue("libjars");
result.args = line.getArgs();
return result;
}
private static class ShowLogicalPlanCommandLineInfo
{
String libjars;
String[] args;
}
public static void main(String[] args) throws Exception
{
DTCli shell = new DTCli();
shell.init(args);
shell.run();
if (lastCommandError) {
System.exit(1);
}
}
}
|
source file close properly
|
engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
|
source file close properly
|
<ide><path>ngine/src/main/java/com/datatorrent/stram/cli/DTCli.java
<ide> {
<ide> boolean consolePresentSaved = consolePresent;
<ide> consolePresent = false;
<del> FileLineReader fr = new FileLineReader(fileName);
<add> FileLineReader fr = null;
<ide> String line;
<ide> try {
<add> fr = new FileLineReader(fileName);
<ide> while ((line = fr.readLine("")) != null) {
<ide> processLine(line, fr, true);
<ide> }
<ide> }
<ide> finally {
<del> fr.close();
<ide> consolePresent = consolePresentSaved;
<add> if (fr != null) {
<add> fr.close();
<add> }
<ide> }
<ide> }
<ide>
|
|
JavaScript
|
mit
|
41b33d67561c20be8ea1fc8c159acb79572407a9
| 0 |
timoxley/slacker
|
"use strict"
var fork = require('child_process').fork
var fs = require('fs')
var path = require('path')
var domain = require('domain')
module.exports = Slacker
/**
* Create a Slacker instance for `service`.
*
* @param {String} service commandline path and arguments to run.
* @return {Slacker}
* @api public
*/
function Slacker(service) {
if (!(this instanceof Slacker)) return new Slacker(service)
this._service = service
this._timeout = 10000
}
/**
* Close down the service after `timeout` if
* there are no active connections.
*
* @param {Number} value timeout in milliseconds.
* @return {Slacker}
* @api public
*/
Slacker.prototype.timeout = function(value) {
this._timeout = parseInt(value, 10)
return this
}
/**
* Start listening on `port`. Calls `fn` when listening.
*
* @param {Number} port
* @param {Function} fn
* @return {Slacker}
* @api public
*/
Slacker.prototype.listen = function(port, fn) {
if (typeof port === 'function') {
fn = port
port = null
}
this._port = port = port || 0
fn = fn || function() {}
// here we go
spawn(this, fn)
return this
}
/**
* Kill the slacker. Disconnects everything.
*
* @api public
*/
Slacker.prototype.end =
Slacker.prototype.close = function() {
this._isClosed = true
this._child && this._child.disconnect()
}
// TODO: tidy this
function spawn(parent, fn) {
var port = parent._port
var timeout = parent._timeout
var args = parent._service
parent._child = undefined
domain
.create()
.on('error', function onError(err) {
console.error(err)
parent._child && parent._child.kill()
process.exit(1)
})
.run(function spawnProcess() {
var cmd = args.split(' ')[0]
cmd = path.normalize(cmd)
fs.exists(cmd, function(exists) {
if (parent._isClosed) return
if (!exists) return fn(new Error('command not found: ' + cmd))
parent._child = fork(__dirname + '/bin/spawn', [port, timeout].concat(args), {env: process.env})
.on('message', function onMessage(msg) {
if (parent._isClosed) return
if (port && msg != port) return
this.removeListener('listening', onMessage)
fn(null, parseInt(msg, 10))
})
process.once('exit', function() {
parent.close()
})
})
})
}
|
index.js
|
"use strict"
var fork = require('child_process').fork
var fs = require('fs')
var domain = require('domain')
module.exports = Slacker
/**
* Create a Slacker instance for `service`.
*
* @param {String} service commandline path and arguments to run.
* @return {Slacker}
* @api public
*/
function Slacker(service) {
if (!(this instanceof Slacker)) return new Slacker(service)
this._service = service
this._timeout = 10000
}
/**
* Close down the service after `timeout` if
* there are no active connections.
*
* @param {Number} value timeout in milliseconds.
* @return {Slacker}
* @api public
*/
Slacker.prototype.timeout = function(value) {
this._timeout = parseInt(value, 10)
return this
}
/**
* Start listening on `port`. Calls `fn` when listening.
*
* @param {Number} port
* @param {Function} fn
* @return {Slacker}
* @api public
*/
Slacker.prototype.listen = function(port, fn) {
if (typeof port === 'function') {
fn = port
port = null
}
this._port = port = port || 0
fn = fn || function() {}
// here we go
spawn(this, fn)
return this
}
/**
* Kill the slacker. Disconnects everything.
*
* @api public
*/
Slacker.prototype.end =
Slacker.prototype.close = function() {
this._isClosed = true
this._child && this._child.disconnect()
}
// TODO: tidy this
function spawn(parent, fn) {
var port = parent._port
var timeout = parent._timeout
var args = parent._service
parent._child = undefined
domain
.create()
.on('error', function onError(err) {
console.error(err)
parent._child && parent._child.kill()
process.exit(1)
})
.run(function spawnProcess() {
var cmd = args.split(' ')[0]
fs.exists(cmd, function(exists) {
if (parent._isClosed) return
if (!exists) return fn(new Error('command not found: ' + cmd))
parent._child = fork(__dirname + '/bin/spawn', [port, timeout].concat(args), {env: process.env})
.on('message', function onMessage(msg) {
if (parent._isClosed) return
if (port && msg != port) return
this.removeListener('listening', onMessage)
fn(null, parseInt(msg, 10))
})
process.once('exit', function() {
parent.close()
})
})
})
}
|
Normalize command path before checking existence.
|
index.js
|
Normalize command path before checking existence.
|
<ide><path>ndex.js
<ide>
<ide> var fork = require('child_process').fork
<ide> var fs = require('fs')
<add>var path = require('path')
<ide> var domain = require('domain')
<ide>
<ide> module.exports = Slacker
<ide> })
<ide> .run(function spawnProcess() {
<ide> var cmd = args.split(' ')[0]
<add> cmd = path.normalize(cmd)
<ide> fs.exists(cmd, function(exists) {
<ide> if (parent._isClosed) return
<ide> if (!exists) return fn(new Error('command not found: ' + cmd))
|
|
Java
|
mit
|
2c8d177d074a0cb3671e10b20f927f461ea8c302
| 0 |
RedbackThomson/OsuCelebrity,OsuCelebrity/OsuCelebrity
|
package me.reddev.osucelebrity.osu;
import static org.bytedeco.javacpp.opencv_core.cvSize;
import static org.bytedeco.javacpp.opencv_imgproc.CV_TM_CCOEFF_NORMED;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import me.reddev.osucelebrity.osu.OsuApplication.OsuApplicationSettings;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_core.CvPoint;
import org.bytedeco.javacpp.opencv_core.CvSize;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter.ToIplImage;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.color.ColorSpace;
import java.awt.event.InputEvent;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.imageio.ImageIO;
import javax.inject.Inject;
@Slf4j
@RequiredArgsConstructor(onConstructor = @__(@Inject))
public class OsuRobot {
private IplImage downloadButton;
private IplImage downloadButton2;
private IplImage downloadButton3;
private IplImage redownloadButton;
private IplImage statusWindowMarker;
private Robot robot;
private final OsuApplicationSettings settings;
List<Object> doNotGarbageCollect = new ArrayList<>();
boolean loaded = false;
{
try {
downloadButton = convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download.png"))));
downloadButton2 =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download2.png"))));
downloadButton3 =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download3.png"))));
redownloadButton =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("redownload.png"))));
statusWindowMarker =
convertAndKeepInMemory(toGrayScale(ImageIO.read(ClassLoader
.getSystemResourceAsStream("statusWindowMarker.png"))));
robot = new Robot();
robot.setAutoDelay(0);
loaded = true;
} catch (Exception e) {
log.warn("Error while initializing image processing", e);
}
}
private IplImage convertAndKeepInMemory(BufferedImage original) {
Java2DFrameConverter converter1 = new Java2DFrameConverter();
ToIplImage converter2 = new ToIplImage();
// we need to keep these from being garbage collected
Frame frame = converter1.convert(original);
doNotGarbageCollect.add(frame);
return converter2.convert(frame);
}
static final ColorConvertOp CONVERT_TO_GRAYSCALE = new ColorConvertOp(
ColorSpace.getInstance(ColorSpace.CS_GRAY), null);
static BufferedImage toGrayScale(BufferedImage colored) {
BufferedImage gray =
new BufferedImage(colored.getWidth(), colored.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
CONVERT_TO_GRAYSCALE.filter(colored, gray);
return gray;
}
boolean findAndClick(IplImage input1, IplImage target1) {
int iwidth = input1.width() - target1.width() + 1;
int iheight = input1.height() - target1.height() + 1;
double detectionScore;
try (DoublePointer min = new DoublePointer(1);
DoublePointer max = min;
CvPoint minLoc = new CvPoint(2);
CvPoint detectionLoc = new CvPoint(2);
CvSize cvSize = cvSize(iwidth, iheight);) {
IplImage map = IplImage.create(cvSize, 32, 1);
opencv_imgproc.cvMatchTemplate(input1, target1, map, CV_TM_CCOEFF_NORMED);
opencv_core.cvMinMaxLoc(map, min, max, minLoc, detectionLoc, null);
map.release();
detectionScore = max.get(0);
if (detectionScore < 0.9) {
return false;
}
int clickX = detectionLoc.x() + target1.width() / 2 + settings.getOsuClientXOffset();
int clickY = detectionLoc.y() + target1.height() / 2 + settings.getOsuClientYOffset();
log.debug("Score {} x {} y {}", detectionScore, clickX, clickY);
robot.mouseMove(clickX, clickY);
Thread.sleep(1);
robot.mouseMove(clickX, clickY);
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
Thread.sleep(100);
robot.mouseMove(clickX + 100, clickY + 100);
return true;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Takes a screenshot and tries to find all images.
*/
public void findImages() {
if (!loaded) {
return;
}
try {
Rectangle region =
new Rectangle(settings.getOsuClientXOffset(), settings.getOsuClientYOffset(),
settings.getOsuClientWidth(), settings.getOsuClientHeight());
IplImage screenshot = getScreenshot(region);
for (IplImage needle : new IplImage[] {downloadButton, downloadButton2, downloadButton3,
redownloadButton, statusWindowMarker}) {
if (findAndClick(screenshot, needle)) {
Thread.sleep(1000);
screenshot = getScreenshot(region);
if (findAndClick(screenshot, needle)) {
Thread.sleep(1000);
screenshot = getScreenshot(region);
}
}
}
screenshot.release();
} catch (Exception e) {
log.error("exception while trying to find", e);
}
}
private IplImage getScreenshot(Rectangle region) throws IOException {
BufferedImage bufferedScreenshot = toGrayScale(robot.createScreenCapture(region));
// writing this image is very fast, so I'd like to keep it for adjusting offsets
try {
ImageIO.write(bufferedScreenshot, "BMP", new File("screenshot.bmp"));
} catch (Exception e) {
log.debug("error writing screenshot", e);
}
Java2DFrameConverter converter1 = new Java2DFrameConverter();
ToIplImage converter2 = new ToIplImage();
Frame frame = converter1.convert(bufferedScreenshot);
IplImage screenshot = converter2.convert(frame);
return screenshot;
}
}
|
osuCelebrity-osu/src/main/java/me/reddev/osucelebrity/osu/OsuRobot.java
|
package me.reddev.osucelebrity.osu;
import static org.bytedeco.javacpp.opencv_core.cvSize;
import static org.bytedeco.javacpp.opencv_imgproc.CV_TM_CCOEFF_NORMED;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import me.reddev.osucelebrity.osu.OsuApplication.OsuApplicationSettings;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_core.CvPoint;
import org.bytedeco.javacpp.opencv_core.CvSize;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter.ToIplImage;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.color.ColorSpace;
import java.awt.event.InputEvent;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.imageio.ImageIO;
import javax.inject.Inject;
@Slf4j
@RequiredArgsConstructor(onConstructor = @__(@Inject))
public class OsuRobot {
private IplImage downloadButton;
private IplImage downloadButton2;
private IplImage downloadButton3;
private IplImage redownloadButton;
private IplImage statusWindowMarker;
private Robot robot;
private final OsuApplicationSettings settings;
List<Object> doNotGarbageCollect = new ArrayList<>();
boolean loaded = false;
{
try {
downloadButton = convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download.png"))));
downloadButton2 =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download2.png"))));
downloadButton3 =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("download3.png"))));
redownloadButton =
convertAndKeepInMemory(toGrayScale(ImageIO.read(new File("redownload.png"))));
statusWindowMarker =
convertAndKeepInMemory(toGrayScale(ImageIO.read(ClassLoader
.getSystemResourceAsStream("statusWindowMarker.png"))));
robot = new Robot();
robot.setAutoDelay(0);
loaded = true;
} catch (Exception e) {
log.warn("Error while initializing image processing", e);
}
}
private IplImage convertAndKeepInMemory(BufferedImage original) {
Java2DFrameConverter converter1 = new Java2DFrameConverter();
ToIplImage converter2 = new ToIplImage();
// we need to keep these from being garbage collected
Frame frame = converter1.convert(original);
doNotGarbageCollect.add(frame);
return converter2.convert(frame);
}
static final ColorConvertOp CONVERT_TO_GRAYSCALE = new ColorConvertOp(
ColorSpace.getInstance(ColorSpace.CS_GRAY), null);
static BufferedImage toGrayScale(BufferedImage colored) {
BufferedImage gray =
new BufferedImage(colored.getWidth(), colored.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
CONVERT_TO_GRAYSCALE.filter(colored, gray);
return gray;
}
boolean findAndClick(IplImage input1, IplImage target1) {
int iwidth = input1.width() - target1.width() + 1;
int iheight = input1.height() - target1.height() + 1;
double detectionScore;
try (DoublePointer min = new DoublePointer(1);
DoublePointer max = min;
CvPoint minLoc = new CvPoint(2);
CvPoint detectionLoc = new CvPoint(2);
CvSize cvSize = cvSize(iwidth, iheight);) {
IplImage map = IplImage.create(cvSize, 32, 1);
opencv_imgproc.cvMatchTemplate(input1, target1, map, CV_TM_CCOEFF_NORMED);
opencv_core.cvMinMaxLoc(map, min, max, minLoc, detectionLoc, null);
map.release();
detectionScore = max.get(0);
if (detectionScore < 0.9) {
return false;
}
int clickX = detectionLoc.x() + target1.width() / 2 + settings.getOsuClientXOffset();
int clickY = detectionLoc.y() + target1.height() / 2 + settings.getOsuClientYOffset();
log.debug("Score {} x {} y {}", detectionScore, clickX, clickY);
robot.mouseMove(clickX, clickY);
Thread.sleep(1);
robot.mouseMove(clickX, clickY);
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
return true;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Takes a screenshot and tries to find all images.
*/
public void findImages() {
if (!loaded) {
return;
}
try {
Rectangle region =
new Rectangle(settings.getOsuClientXOffset(), settings.getOsuClientYOffset(),
settings.getOsuClientWidth(), settings.getOsuClientHeight());
IplImage screenshot = getScreenshot(region);
for (IplImage needle : new IplImage[] {downloadButton, downloadButton2, downloadButton3,
redownloadButton, statusWindowMarker}) {
if (findAndClick(screenshot, needle)) {
Thread.sleep(1000);
screenshot = getScreenshot(region);
if (findAndClick(screenshot, needle)) {
Thread.sleep(1000);
screenshot = getScreenshot(region);
}
}
}
screenshot.release();
} catch (Exception e) {
log.error("exception while trying to find", e);
}
}
private IplImage getScreenshot(Rectangle region) throws IOException {
BufferedImage bufferedScreenshot = toGrayScale(robot.createScreenCapture(region));
// writing this image is very fast, so I'd like to keep it for adjusting offsets
try {
ImageIO.write(bufferedScreenshot, "BMP", new File("screenshot.bmp"));
} catch (Exception e) {
log.debug("error writing screenshot", e);
}
Java2DFrameConverter converter1 = new Java2DFrameConverter();
ToIplImage converter2 = new ToIplImage();
Frame frame = converter1.convert(bufferedScreenshot);
IplImage screenshot = converter2.convert(frame);
return screenshot;
}
}
|
OsuRobot: Move cursor away after clicking.
For clicks which need to need to be executed twice, we need to be able
to see what's underneath after we click.
|
osuCelebrity-osu/src/main/java/me/reddev/osucelebrity/osu/OsuRobot.java
|
OsuRobot: Move cursor away after clicking.
|
<ide><path>suCelebrity-osu/src/main/java/me/reddev/osucelebrity/osu/OsuRobot.java
<ide>
<ide> robot.mousePress(InputEvent.BUTTON1_MASK);
<ide> robot.mouseRelease(InputEvent.BUTTON1_MASK);
<add>
<add> Thread.sleep(100);
<add> robot.mouseMove(clickX + 100, clickY + 100);
<ide> return true;
<ide> } catch (RuntimeException e) {
<ide> throw e;
|
|
Java
|
apache-2.0
|
c5236e0da96eef709763988540f2931b15ab7684
| 0 |
googlemaps/android-maps-utils,googlemaps/android-maps-utils,googlemaps/android-maps-utils
|
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.maps.android.clustering.view;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.TimeInterpolator;
import android.animation.ValueAnimator;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.OvalShape;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.MessageQueue;
import android.util.SparseArray;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.Projection;
import com.google.android.gms.maps.model.BitmapDescriptor;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.maps.android.MarkerManager;
import com.google.maps.android.R;
import com.google.maps.android.clustering.Cluster;
import com.google.maps.android.clustering.ClusterItem;
import com.google.maps.android.clustering.ClusterManager;
import com.google.maps.android.geometry.Point;
import com.google.maps.android.projection.SphericalMercatorProjection;
import com.google.maps.android.ui.IconGenerator;
import com.google.maps.android.ui.SquareTextView;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* The default view for a ClusterManager. Markers are animated in and out of clusters.
*/
public class DefaultClusterRenderer<T extends ClusterItem> implements ClusterRenderer<T> {
private static final boolean SHOULD_ANIMATE = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
private final GoogleMap mMap;
private final IconGenerator mIconGenerator;
private final ClusterManager<T> mClusterManager;
private final float mDensity;
private boolean mAnimate;
private static final int[] BUCKETS = {10, 20, 50, 100, 200, 500, 1000};
private ShapeDrawable mColoredCircleBackground;
/**
* Markers that are currently on the map.
*/
private Set<MarkerWithPosition> mMarkers = Collections.newSetFromMap(
new ConcurrentHashMap<MarkerWithPosition, Boolean>());
/**
* Icons for each bucket.
*/
private SparseArray<BitmapDescriptor> mIcons = new SparseArray<BitmapDescriptor>();
/**
* Markers for single ClusterItems.
*/
private MarkerCache<T> mMarkerCache = new MarkerCache<T>();
/**
* If cluster size is less than this size, display individual markers.
*/
private int mMinClusterSize = 4;
/**
* The currently displayed set of clusters.
*/
private Set<? extends Cluster<T>> mClusters;
/**
* Lookup between markers and the associated cluster.
*/
private Map<Marker, Cluster<T>> mMarkerToCluster = new HashMap<Marker, Cluster<T>>();
private Map<Cluster<T>, Marker> mClusterToMarker = new HashMap<Cluster<T>, Marker>();
/**
* The target zoom level for the current set of clusters.
*/
private float mZoom;
private final ViewModifier mViewModifier = new ViewModifier();
private ClusterManager.OnClusterClickListener<T> mClickListener;
private ClusterManager.OnClusterInfoWindowClickListener<T> mInfoWindowClickListener;
private ClusterManager.OnClusterItemClickListener<T> mItemClickListener;
private ClusterManager.OnClusterItemInfoWindowClickListener<T> mItemInfoWindowClickListener;
public DefaultClusterRenderer(Context context, GoogleMap map, ClusterManager<T> clusterManager) {
mMap = map;
mAnimate = true;
mDensity = context.getResources().getDisplayMetrics().density;
mIconGenerator = new IconGenerator(context);
mIconGenerator.setContentView(makeSquareTextView(context));
mIconGenerator.setTextAppearance(R.style.amu_ClusterIcon_TextAppearance);
mIconGenerator.setBackground(makeClusterBackground());
mClusterManager = clusterManager;
}
@Override
public void onAdd() {
mClusterManager.getMarkerCollection().setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
return mItemClickListener != null && mItemClickListener.onClusterItemClick(mMarkerCache.get(marker));
}
});
mClusterManager.getMarkerCollection().setOnInfoWindowClickListener(new GoogleMap.OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick(Marker marker) {
if (mItemInfoWindowClickListener != null) {
mItemInfoWindowClickListener.onClusterItemInfoWindowClick(mMarkerCache.get(marker));
}
}
});
mClusterManager.getClusterMarkerCollection().setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
return mClickListener != null && mClickListener.onClusterClick(mMarkerToCluster.get(marker));
}
});
mClusterManager.getClusterMarkerCollection().setOnInfoWindowClickListener(new GoogleMap.OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick(Marker marker) {
if (mInfoWindowClickListener != null) {
mInfoWindowClickListener.onClusterInfoWindowClick(mMarkerToCluster.get(marker));
}
}
});
}
@Override
public void onRemove() {
mClusterManager.getMarkerCollection().setOnMarkerClickListener(null);
mClusterManager.getMarkerCollection().setOnInfoWindowClickListener(null);
mClusterManager.getClusterMarkerCollection().setOnMarkerClickListener(null);
mClusterManager.getClusterMarkerCollection().setOnInfoWindowClickListener(null);
}
private LayerDrawable makeClusterBackground() {
mColoredCircleBackground = new ShapeDrawable(new OvalShape());
ShapeDrawable outline = new ShapeDrawable(new OvalShape());
outline.getPaint().setColor(0x80ffffff); // Transparent white.
LayerDrawable background = new LayerDrawable(new Drawable[]{outline, mColoredCircleBackground});
int strokeWidth = (int) (mDensity * 3);
background.setLayerInset(1, strokeWidth, strokeWidth, strokeWidth, strokeWidth);
return background;
}
private SquareTextView makeSquareTextView(Context context) {
SquareTextView squareTextView = new SquareTextView(context);
ViewGroup.LayoutParams layoutParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
squareTextView.setLayoutParams(layoutParams);
squareTextView.setId(R.id.amu_text);
int twelveDpi = (int) (12 * mDensity);
squareTextView.setPadding(twelveDpi, twelveDpi, twelveDpi, twelveDpi);
return squareTextView;
}
protected int getColor(int clusterSize) {
final float hueRange = 220;
final float sizeRange = 300;
final float size = Math.min(clusterSize, sizeRange);
final float hue = (sizeRange - size) * (sizeRange - size) / (sizeRange * sizeRange) * hueRange;
return Color.HSVToColor(new float[]{
hue, 1f, .6f
});
}
protected String getClusterText(int bucket) {
if (bucket < BUCKETS[0]) {
return String.valueOf(bucket);
}
return String.valueOf(bucket) + "+";
}
/**
* Gets the "bucket" for a particular cluster. By default, uses the number of points within the
* cluster, bucketed to some set points.
*/
protected int getBucket(Cluster<T> cluster) {
int size = cluster.getSize();
if (size <= BUCKETS[0]) {
return size;
}
for (int i = 0; i < BUCKETS.length - 1; i++) {
if (size < BUCKETS[i + 1]) {
return BUCKETS[i];
}
}
return BUCKETS[BUCKETS.length - 1];
}
public int getMinClusterSize() {
return mMinClusterSize;
}
public void setMinClusterSize(int minClusterSize) {
mMinClusterSize = minClusterSize;
}
/**
* ViewModifier ensures only one re-rendering of the view occurs at a time, and schedules
* re-rendering, which is performed by the RenderTask.
*/
@SuppressLint("HandlerLeak")
private class ViewModifier extends Handler {
private static final int RUN_TASK = 0;
private static final int TASK_FINISHED = 1;
private boolean mViewModificationInProgress = false;
private RenderTask mNextClusters = null;
@Override
public void handleMessage(Message msg) {
if (msg.what == TASK_FINISHED) {
mViewModificationInProgress = false;
if (mNextClusters != null) {
// Run the task that was queued up.
sendEmptyMessage(RUN_TASK);
}
return;
}
removeMessages(RUN_TASK);
if (mViewModificationInProgress) {
// Busy - wait for the callback.
return;
}
if (mNextClusters == null) {
// Nothing to do.
return;
}
Projection projection = mMap.getProjection();
RenderTask renderTask;
synchronized (this) {
renderTask = mNextClusters;
mNextClusters = null;
mViewModificationInProgress = true;
}
renderTask.setCallback(new Runnable() {
@Override
public void run() {
sendEmptyMessage(TASK_FINISHED);
}
});
renderTask.setProjection(projection);
renderTask.setMapZoom(mMap.getCameraPosition().zoom);
new Thread(renderTask).start();
}
public void queue(Set<? extends Cluster<T>> clusters) {
synchronized (this) {
// Overwrite any pending cluster tasks - we don't care about intermediate states.
mNextClusters = new RenderTask(clusters);
}
sendEmptyMessage(RUN_TASK);
}
}
/**
* Determine whether the cluster should be rendered as individual markers or a cluster.
*/
protected boolean shouldRenderAsCluster(Cluster<T> cluster) {
return cluster.getSize() > mMinClusterSize;
}
/**
* Transforms the current view (represented by DefaultClusterRenderer.mClusters and DefaultClusterRenderer.mZoom) to a
* new zoom level and set of clusters.
* <p/>
* This must be run off the UI thread. Work is coordinated in the RenderTask, then queued up to
* be executed by a MarkerModifier.
* <p/>
* There are three stages for the render:
* <p/>
* 1. Markers are added to the map
* <p/>
* 2. Markers are animated to their final position
* <p/>
* 3. Any old markers are removed from the map
* <p/>
* When zooming in, markers are animated out from the nearest existing cluster. When zooming
* out, existing clusters are animated to the nearest new cluster.
*/
private class RenderTask implements Runnable {
final Set<? extends Cluster<T>> clusters;
private Runnable mCallback;
private Projection mProjection;
private SphericalMercatorProjection mSphericalMercatorProjection;
private float mMapZoom;
private RenderTask(Set<? extends Cluster<T>> clusters) {
this.clusters = clusters;
}
/**
* A callback to be run when all work has been completed.
*
* @param callback
*/
public void setCallback(Runnable callback) {
mCallback = callback;
}
public void setProjection(Projection projection) {
this.mProjection = projection;
}
public void setMapZoom(float zoom) {
this.mMapZoom = zoom;
this.mSphericalMercatorProjection = new SphericalMercatorProjection(256 * Math.pow(2, Math.min(zoom, mZoom)));
}
@SuppressLint("NewApi")
public void run() {
if (clusters.equals(DefaultClusterRenderer.this.mClusters)) {
mCallback.run();
return;
}
final MarkerModifier markerModifier = new MarkerModifier();
final float zoom = mMapZoom;
final boolean zoomingIn = zoom > mZoom;
final float zoomDelta = zoom - mZoom;
final Set<MarkerWithPosition> markersToRemove = mMarkers;
// Prevent crashes: https://issuetracker.google.com/issues/35827242
LatLngBounds visibleBounds;
try {
visibleBounds = mProjection.getVisibleRegion().latLngBounds;
} catch (Exception e) {
e.printStackTrace();
visibleBounds = LatLngBounds.builder()
.include(new LatLng(0, 0))
.build();
}
// TODO: Add some padding, so that markers can animate in from off-screen.
// Find all of the existing clusters that are on-screen. These are candidates for
// markers to animate from.
List<Point> existingClustersOnScreen = null;
if (DefaultClusterRenderer.this.mClusters != null && SHOULD_ANIMATE && mAnimate) {
existingClustersOnScreen = new ArrayList<Point>();
for (Cluster<T> c : DefaultClusterRenderer.this.mClusters) {
if (shouldRenderAsCluster(c) && visibleBounds.contains(c.getPosition())) {
Point point = mSphericalMercatorProjection.toPoint(c.getPosition());
existingClustersOnScreen.add(point);
}
}
}
// Create the new markers and animate them to their new positions.
final Set<MarkerWithPosition> newMarkers = Collections.newSetFromMap(
new ConcurrentHashMap<MarkerWithPosition, Boolean>());
for (Cluster<T> c : clusters) {
boolean onScreen = visibleBounds.contains(c.getPosition());
if (zoomingIn && onScreen && SHOULD_ANIMATE && mAnimate) {
Point point = mSphericalMercatorProjection.toPoint(c.getPosition());
Point closest = findClosestCluster(existingClustersOnScreen, point);
if (closest != null) {
LatLng animateTo = mSphericalMercatorProjection.toLatLng(closest);
markerModifier.add(true, new CreateMarkerTask(c, newMarkers, animateTo));
} else {
markerModifier.add(true, new CreateMarkerTask(c, newMarkers, null));
}
} else {
markerModifier.add(onScreen, new CreateMarkerTask(c, newMarkers, null));
}
}
// Wait for all markers to be added.
markerModifier.waitUntilFree();
// Don't remove any markers that were just added. This is basically anything that had
// a hit in the MarkerCache.
markersToRemove.removeAll(newMarkers);
// Find all of the new clusters that were added on-screen. These are candidates for
// markers to animate from.
List<Point> newClustersOnScreen = null;
if (SHOULD_ANIMATE && mAnimate) {
newClustersOnScreen = new ArrayList<Point>();
for (Cluster<T> c : clusters) {
if (shouldRenderAsCluster(c) && visibleBounds.contains(c.getPosition())) {
Point p = mSphericalMercatorProjection.toPoint(c.getPosition());
newClustersOnScreen.add(p);
}
}
}
// Remove the old markers, animating them into clusters if zooming out.
for (final MarkerWithPosition marker : markersToRemove) {
boolean onScreen = visibleBounds.contains(marker.position);
// Don't animate when zooming out more than 3 zoom levels.
// TODO: drop animation based on speed of device & number of markers to animate.
if (!zoomingIn && zoomDelta > -3 && onScreen && SHOULD_ANIMATE && mAnimate) {
final Point point = mSphericalMercatorProjection.toPoint(marker.position);
final Point closest = findClosestCluster(newClustersOnScreen, point);
if (closest != null) {
LatLng animateTo = mSphericalMercatorProjection.toLatLng(closest);
markerModifier.animateThenRemove(marker, marker.position, animateTo);
} else {
markerModifier.remove(true, marker.marker);
}
} else {
markerModifier.remove(onScreen, marker.marker);
}
}
markerModifier.waitUntilFree();
mMarkers = newMarkers;
DefaultClusterRenderer.this.mClusters = clusters;
mZoom = zoom;
mCallback.run();
}
}
@Override
public void onClustersChanged(Set<? extends Cluster<T>> clusters) {
mViewModifier.queue(clusters);
}
@Override
public void setOnClusterClickListener(ClusterManager.OnClusterClickListener<T> listener) {
mClickListener = listener;
}
@Override
public void setOnClusterInfoWindowClickListener(ClusterManager.OnClusterInfoWindowClickListener<T> listener) {
mInfoWindowClickListener = listener;
}
@Override
public void setOnClusterItemClickListener(ClusterManager.OnClusterItemClickListener<T> listener) {
mItemClickListener = listener;
}
@Override
public void setOnClusterItemInfoWindowClickListener(ClusterManager.OnClusterItemInfoWindowClickListener<T> listener) {
mItemInfoWindowClickListener = listener;
}
@Override
public void setAnimation(boolean animate) {
mAnimate = animate;
}
private static double distanceSquared(Point a, Point b) {
return (a.x - b.x) * (a.x - b.x) + (a.y - b.y) * (a.y - b.y);
}
private Point findClosestCluster(List<Point> markers, Point point) {
if (markers == null || markers.isEmpty()) return null;
int maxDistance = mClusterManager.getAlgorithm().getMaxDistanceBetweenClusteredItems();
double minDistSquared = maxDistance * maxDistance;
Point closest = null;
for (Point candidate : markers) {
double dist = distanceSquared(candidate, point);
if (dist < minDistSquared) {
closest = candidate;
minDistSquared = dist;
}
}
return closest;
}
/**
* Handles all markerWithPosition manipulations on the map. Work (such as adding, removing, or
* animating a markerWithPosition) is performed while trying not to block the rest of the app's
* UI.
*/
@SuppressLint("HandlerLeak")
private class MarkerModifier extends Handler implements MessageQueue.IdleHandler {
private static final int BLANK = 0;
private final Lock lock = new ReentrantLock();
private final Condition busyCondition = lock.newCondition();
private Queue<CreateMarkerTask> mCreateMarkerTasks = new LinkedList<CreateMarkerTask>();
private Queue<CreateMarkerTask> mOnScreenCreateMarkerTasks = new LinkedList<CreateMarkerTask>();
private Queue<Marker> mRemoveMarkerTasks = new LinkedList<Marker>();
private Queue<Marker> mOnScreenRemoveMarkerTasks = new LinkedList<Marker>();
private Queue<AnimationTask> mAnimationTasks = new LinkedList<AnimationTask>();
/**
* Whether the idle listener has been added to the UI thread's MessageQueue.
*/
private boolean mListenerAdded;
private MarkerModifier() {
super(Looper.getMainLooper());
}
/**
* Creates markers for a cluster some time in the future.
*
* @param priority whether this operation should have priority.
*/
public void add(boolean priority, CreateMarkerTask c) {
lock.lock();
sendEmptyMessage(BLANK);
if (priority) {
mOnScreenCreateMarkerTasks.add(c);
} else {
mCreateMarkerTasks.add(c);
}
lock.unlock();
}
/**
* Removes a markerWithPosition some time in the future.
*
* @param priority whether this operation should have priority.
* @param m the markerWithPosition to remove.
*/
public void remove(boolean priority, Marker m) {
lock.lock();
sendEmptyMessage(BLANK);
if (priority) {
mOnScreenRemoveMarkerTasks.add(m);
} else {
mRemoveMarkerTasks.add(m);
}
lock.unlock();
}
/**
* Animates a markerWithPosition some time in the future.
*
* @param marker the markerWithPosition to animate.
* @param from the position to animate from.
* @param to the position to animate to.
*/
public void animate(MarkerWithPosition marker, LatLng from, LatLng to) {
lock.lock();
mAnimationTasks.add(new AnimationTask(marker, from, to));
lock.unlock();
}
/**
* Animates a markerWithPosition some time in the future, and removes it when the animation
* is complete.
*
* @param marker the markerWithPosition to animate.
* @param from the position to animate from.
* @param to the position to animate to.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void animateThenRemove(MarkerWithPosition marker, LatLng from, LatLng to) {
lock.lock();
AnimationTask animationTask = new AnimationTask(marker, from, to);
animationTask.removeOnAnimationComplete(mClusterManager.getMarkerManager());
mAnimationTasks.add(animationTask);
lock.unlock();
}
@Override
public void handleMessage(Message msg) {
if (!mListenerAdded) {
Looper.myQueue().addIdleHandler(this);
mListenerAdded = true;
}
removeMessages(BLANK);
lock.lock();
try {
// Perform up to 10 tasks at once.
// Consider only performing 10 remove tasks, not adds and animations.
// Removes are relatively slow and are much better when batched.
for (int i = 0; i < 10; i++) {
performNextTask();
}
if (!isBusy()) {
mListenerAdded = false;
Looper.myQueue().removeIdleHandler(this);
// Signal any other threads that are waiting.
busyCondition.signalAll();
} else {
// Sometimes the idle queue may not be called - schedule up some work regardless
// of whether the UI thread is busy or not.
// TODO: try to remove this.
sendEmptyMessageDelayed(BLANK, 10);
}
} finally {
lock.unlock();
}
}
/**
* Perform the next task. Prioritise any on-screen work.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void performNextTask() {
if (!mOnScreenRemoveMarkerTasks.isEmpty()) {
removeMarker(mOnScreenRemoveMarkerTasks.poll());
} else if (!mAnimationTasks.isEmpty()) {
mAnimationTasks.poll().perform();
} else if (!mOnScreenCreateMarkerTasks.isEmpty()) {
mOnScreenCreateMarkerTasks.poll().perform(this);
} else if (!mCreateMarkerTasks.isEmpty()) {
mCreateMarkerTasks.poll().perform(this);
} else if (!mRemoveMarkerTasks.isEmpty()) {
removeMarker(mRemoveMarkerTasks.poll());
}
}
private void removeMarker(Marker m) {
Cluster<T> cluster = mMarkerToCluster.get(m);
mClusterToMarker.remove(cluster);
mMarkerCache.remove(m);
mMarkerToCluster.remove(m);
mClusterManager.getMarkerManager().remove(m);
}
/**
* @return true if there is still work to be processed.
*/
public boolean isBusy() {
try {
lock.lock();
return !(mCreateMarkerTasks.isEmpty() && mOnScreenCreateMarkerTasks.isEmpty() &&
mOnScreenRemoveMarkerTasks.isEmpty() && mRemoveMarkerTasks.isEmpty() &&
mAnimationTasks.isEmpty()
);
} finally {
lock.unlock();
}
}
/**
* Blocks the calling thread until all work has been processed.
*/
public void waitUntilFree() {
while (isBusy()) {
// Sometimes the idle queue may not be called - schedule up some work regardless
// of whether the UI thread is busy or not.
// TODO: try to remove this.
sendEmptyMessage(BLANK);
lock.lock();
try {
if (isBusy()) {
busyCondition.await();
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
} finally {
lock.unlock();
}
}
}
@Override
public boolean queueIdle() {
// When the UI is not busy, schedule some work.
sendEmptyMessage(BLANK);
return true;
}
}
/**
* A cache of markers representing individual ClusterItems.
*/
private static class MarkerCache<T> {
private Map<T, Marker> mCache = new HashMap<T, Marker>();
private Map<Marker, T> mCacheReverse = new HashMap<Marker, T>();
public Marker get(T item) {
return mCache.get(item);
}
public T get(Marker m) {
return mCacheReverse.get(m);
}
public void put(T item, Marker m) {
mCache.put(item, m);
mCacheReverse.put(m, item);
}
public void remove(Marker m) {
T item = mCacheReverse.get(m);
mCacheReverse.remove(m);
mCache.remove(item);
}
}
/**
* Called before the marker for a ClusterItem is added to the map.
*/
protected void onBeforeClusterItemRendered(T item, MarkerOptions markerOptions) {
}
/**
* Called before the marker for a Cluster is added to the map.
* The default implementation draws a circle with a rough count of the number of items.
*/
protected void onBeforeClusterRendered(Cluster<T> cluster, MarkerOptions markerOptions) {
int bucket = getBucket(cluster);
BitmapDescriptor descriptor = mIcons.get(bucket);
if (descriptor == null) {
mColoredCircleBackground.getPaint().setColor(getColor(bucket));
descriptor = BitmapDescriptorFactory.fromBitmap(mIconGenerator.makeIcon(getClusterText(bucket)));
mIcons.put(bucket, descriptor);
}
// TODO: consider adding anchor(.5, .5) (Individual markers will overlap more often)
markerOptions.icon(descriptor);
}
/**
* Called after the marker for a Cluster has been added to the map.
*/
protected void onClusterRendered(Cluster<T> cluster, Marker marker) {
}
/**
* Called after the marker for a ClusterItem has been added to the map.
*/
protected void onClusterItemRendered(T clusterItem, Marker marker) {
}
/**
* Get the marker from a ClusterItem
* @param clusterItem ClusterItem which you will obtain its marker
* @return a marker from a ClusterItem or null if it does not exists
*/
public Marker getMarker(T clusterItem) {
return mMarkerCache.get(clusterItem);
}
/**
* Get the ClusterItem from a marker
* @param marker which you will obtain its ClusterItem
* @return a ClusterItem from a marker or null if it does not exists
*/
public T getClusterItem(Marker marker) {
return mMarkerCache.get(marker);
}
/**
* Get the marker from a Cluster
* @param cluster which you will obtain its marker
* @return a marker from a cluster or null if it does not exists
*/
public Marker getMarker(Cluster<T> cluster) {
return mClusterToMarker.get(cluster);
}
/**
* Get the Cluster from a marker
* @param marker which you will obtain its Cluster
* @return a Cluster from a marker or null if it does not exists
*/
public Cluster<T> getCluster(Marker marker) {
return mMarkerToCluster.get(marker);
}
/**
* Creates markerWithPosition(s) for a particular cluster, animating it if necessary.
*/
private class CreateMarkerTask {
private final Cluster<T> cluster;
private final Set<MarkerWithPosition> newMarkers;
private final LatLng animateFrom;
/**
* @param c the cluster to render.
* @param markersAdded a collection of markers to append any created markers.
* @param animateFrom the location to animate the markerWithPosition from, or null if no
* animation is required.
*/
public CreateMarkerTask(Cluster<T> c, Set<MarkerWithPosition> markersAdded, LatLng animateFrom) {
this.cluster = c;
this.newMarkers = markersAdded;
this.animateFrom = animateFrom;
}
private void perform(MarkerModifier markerModifier) {
// Don't show small clusters. Render the markers inside, instead.
if (!shouldRenderAsCluster(cluster)) {
for (T item : cluster.getItems()) {
Marker marker = mMarkerCache.get(item);
MarkerWithPosition markerWithPosition;
if (marker == null) {
MarkerOptions markerOptions = new MarkerOptions();
if (animateFrom != null) {
markerOptions.position(animateFrom);
} else {
markerOptions.position(item.getPosition());
}
if (!(item.getTitle()== null) && !(item.getSnippet() == null)) {
markerOptions.title(item.getTitle());
markerOptions.snippet(item.getSnippet());
} else if (!(item.getSnippet() == null)) {
markerOptions.title(item.getSnippet());
} else if (!(item.getTitle() == null)) {
markerOptions.title(item.getTitle());
}
onBeforeClusterItemRendered(item, markerOptions);
marker = mClusterManager.getMarkerCollection().addMarker(markerOptions);
markerWithPosition = new MarkerWithPosition(marker);
mMarkerCache.put(item, marker);
if (animateFrom != null) {
markerModifier.animate(markerWithPosition, animateFrom, item.getPosition());
}
} else {
markerWithPosition = new MarkerWithPosition(marker);
}
onClusterItemRendered(item, marker);
newMarkers.add(markerWithPosition);
}
return;
}
Marker marker = mClusterToMarker.get(cluster);
MarkerWithPosition markerWithPosition;
if (marker == null) {
MarkerOptions markerOptions = new MarkerOptions().
position(animateFrom == null ? cluster.getPosition() : animateFrom);
onBeforeClusterRendered(cluster, markerOptions);
marker = mClusterManager.getClusterMarkerCollection().addMarker(markerOptions);
mMarkerToCluster.put(marker, cluster);
mClusterToMarker.put(cluster, marker);
markerWithPosition = new MarkerWithPosition(marker);
if (animateFrom != null) {
markerModifier.animate(markerWithPosition, animateFrom, cluster.getPosition());
}
} else {
markerWithPosition = new MarkerWithPosition(marker);
}
onClusterRendered(cluster, marker);
newMarkers.add(markerWithPosition);
}
}
/**
* A Marker and its position. Marker.getPosition() must be called from the UI thread, so this
* object allows lookup from other threads.
*/
private static class MarkerWithPosition {
private final Marker marker;
private LatLng position;
private MarkerWithPosition(Marker marker) {
this.marker = marker;
position = marker.getPosition();
}
@Override
public boolean equals(Object other) {
if (other instanceof MarkerWithPosition) {
return marker.equals(((MarkerWithPosition) other).marker);
}
return false;
}
@Override
public int hashCode() {
return marker.hashCode();
}
}
private static final TimeInterpolator ANIMATION_INTERP = new DecelerateInterpolator();
/**
* Animates a markerWithPosition from one position to another. TODO: improve performance for
* slow devices (e.g. Nexus S).
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB_MR1)
private class AnimationTask extends AnimatorListenerAdapter implements ValueAnimator.AnimatorUpdateListener {
private final MarkerWithPosition markerWithPosition;
private final Marker marker;
private final LatLng from;
private final LatLng to;
private boolean mRemoveOnComplete;
private MarkerManager mMarkerManager;
private AnimationTask(MarkerWithPosition markerWithPosition, LatLng from, LatLng to) {
this.markerWithPosition = markerWithPosition;
this.marker = markerWithPosition.marker;
this.from = from;
this.to = to;
}
public void perform() {
ValueAnimator valueAnimator = ValueAnimator.ofFloat(0.0f, 1.0f);
valueAnimator.setInterpolator(ANIMATION_INTERP);
valueAnimator.addUpdateListener(this);
valueAnimator.addListener(this);
valueAnimator.start();
}
@Override
public void onAnimationEnd(Animator animation) {
if (mRemoveOnComplete) {
Cluster<T> cluster = mMarkerToCluster.get(marker);
mClusterToMarker.remove(cluster);
mMarkerCache.remove(marker);
mMarkerToCluster.remove(marker);
mMarkerManager.remove(marker);
}
markerWithPosition.position = to;
}
public void removeOnAnimationComplete(MarkerManager markerManager) {
mMarkerManager = markerManager;
mRemoveOnComplete = true;
}
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
float fraction = valueAnimator.getAnimatedFraction();
double lat = (to.latitude - from.latitude) * fraction + from.latitude;
double lngDelta = to.longitude - from.longitude;
// Take the shortest path across the 180th meridian.
if (Math.abs(lngDelta) > 180) {
lngDelta -= Math.signum(lngDelta) * 360;
}
double lng = lngDelta * fraction + from.longitude;
LatLng position = new LatLng(lat, lng);
marker.setPosition(position);
}
}
}
|
library/src/com/google/maps/android/clustering/view/DefaultClusterRenderer.java
|
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.maps.android.clustering.view;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.TimeInterpolator;
import android.animation.ValueAnimator;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.OvalShape;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.MessageQueue;
import android.util.SparseArray;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.Projection;
import com.google.android.gms.maps.model.BitmapDescriptor;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.maps.android.MarkerManager;
import com.google.maps.android.R;
import com.google.maps.android.clustering.Cluster;
import com.google.maps.android.clustering.ClusterItem;
import com.google.maps.android.clustering.ClusterManager;
import com.google.maps.android.geometry.Point;
import com.google.maps.android.projection.SphericalMercatorProjection;
import com.google.maps.android.ui.SquareTextView;
import com.google.maps.android.ui.IconGenerator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.ConcurrentHashMap;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* The default view for a ClusterManager. Markers are animated in and out of clusters.
*/
public class DefaultClusterRenderer<T extends ClusterItem> implements ClusterRenderer<T> {
private static final boolean SHOULD_ANIMATE = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
private final GoogleMap mMap;
private final IconGenerator mIconGenerator;
private final ClusterManager<T> mClusterManager;
private final float mDensity;
private boolean mAnimate;
private static final int[] BUCKETS = {10, 20, 50, 100, 200, 500, 1000};
private ShapeDrawable mColoredCircleBackground;
/**
* Markers that are currently on the map.
*/
private Set<MarkerWithPosition> mMarkers = Collections.newSetFromMap(
new ConcurrentHashMap<MarkerWithPosition, Boolean>());
/**
* Icons for each bucket.
*/
private SparseArray<BitmapDescriptor> mIcons = new SparseArray<BitmapDescriptor>();
/**
* Markers for single ClusterItems.
*/
private MarkerCache<T> mMarkerCache = new MarkerCache<T>();
/**
* If cluster size is less than this size, display individual markers.
*/
private int mMinClusterSize = 4;
/**
* The currently displayed set of clusters.
*/
private Set<? extends Cluster<T>> mClusters;
/**
* Lookup between markers and the associated cluster.
*/
private Map<Marker, Cluster<T>> mMarkerToCluster = new HashMap<Marker, Cluster<T>>();
private Map<Cluster<T>, Marker> mClusterToMarker = new HashMap<Cluster<T>, Marker>();
/**
* The target zoom level for the current set of clusters.
*/
private float mZoom;
private final ViewModifier mViewModifier = new ViewModifier();
private ClusterManager.OnClusterClickListener<T> mClickListener;
private ClusterManager.OnClusterInfoWindowClickListener<T> mInfoWindowClickListener;
private ClusterManager.OnClusterItemClickListener<T> mItemClickListener;
private ClusterManager.OnClusterItemInfoWindowClickListener<T> mItemInfoWindowClickListener;
public DefaultClusterRenderer(Context context, GoogleMap map, ClusterManager<T> clusterManager) {
mMap = map;
mAnimate = true;
mDensity = context.getResources().getDisplayMetrics().density;
mIconGenerator = new IconGenerator(context);
mIconGenerator.setContentView(makeSquareTextView(context));
mIconGenerator.setTextAppearance(R.style.amu_ClusterIcon_TextAppearance);
mIconGenerator.setBackground(makeClusterBackground());
mClusterManager = clusterManager;
}
@Override
public void onAdd() {
mClusterManager.getMarkerCollection().setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
return mItemClickListener != null && mItemClickListener.onClusterItemClick(mMarkerCache.get(marker));
}
});
mClusterManager.getMarkerCollection().setOnInfoWindowClickListener(new GoogleMap.OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick(Marker marker) {
if (mItemInfoWindowClickListener != null) {
mItemInfoWindowClickListener.onClusterItemInfoWindowClick(mMarkerCache.get(marker));
}
}
});
mClusterManager.getClusterMarkerCollection().setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
return mClickListener != null && mClickListener.onClusterClick(mMarkerToCluster.get(marker));
}
});
mClusterManager.getClusterMarkerCollection().setOnInfoWindowClickListener(new GoogleMap.OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick(Marker marker) {
if (mInfoWindowClickListener != null) {
mInfoWindowClickListener.onClusterInfoWindowClick(mMarkerToCluster.get(marker));
}
}
});
}
@Override
public void onRemove() {
mClusterManager.getMarkerCollection().setOnMarkerClickListener(null);
mClusterManager.getMarkerCollection().setOnInfoWindowClickListener(null);
mClusterManager.getClusterMarkerCollection().setOnMarkerClickListener(null);
mClusterManager.getClusterMarkerCollection().setOnInfoWindowClickListener(null);
}
private LayerDrawable makeClusterBackground() {
mColoredCircleBackground = new ShapeDrawable(new OvalShape());
ShapeDrawable outline = new ShapeDrawable(new OvalShape());
outline.getPaint().setColor(0x80ffffff); // Transparent white.
LayerDrawable background = new LayerDrawable(new Drawable[]{outline, mColoredCircleBackground});
int strokeWidth = (int) (mDensity * 3);
background.setLayerInset(1, strokeWidth, strokeWidth, strokeWidth, strokeWidth);
return background;
}
private SquareTextView makeSquareTextView(Context context) {
SquareTextView squareTextView = new SquareTextView(context);
ViewGroup.LayoutParams layoutParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
squareTextView.setLayoutParams(layoutParams);
squareTextView.setId(R.id.amu_text);
int twelveDpi = (int) (12 * mDensity);
squareTextView.setPadding(twelveDpi, twelveDpi, twelveDpi, twelveDpi);
return squareTextView;
}
protected int getColor(int clusterSize) {
final float hueRange = 220;
final float sizeRange = 300;
final float size = Math.min(clusterSize, sizeRange);
final float hue = (sizeRange - size) * (sizeRange - size) / (sizeRange * sizeRange) * hueRange;
return Color.HSVToColor(new float[]{
hue, 1f, .6f
});
}
protected String getClusterText(int bucket) {
if (bucket < BUCKETS[0]) {
return String.valueOf(bucket);
}
return String.valueOf(bucket) + "+";
}
/**
* Gets the "bucket" for a particular cluster. By default, uses the number of points within the
* cluster, bucketed to some set points.
*/
protected int getBucket(Cluster<T> cluster) {
int size = cluster.getSize();
if (size <= BUCKETS[0]) {
return size;
}
for (int i = 0; i < BUCKETS.length - 1; i++) {
if (size < BUCKETS[i + 1]) {
return BUCKETS[i];
}
}
return BUCKETS[BUCKETS.length - 1];
}
public int getMinClusterSize() {
return mMinClusterSize;
}
public void setMinClusterSize(int minClusterSize) {
mMinClusterSize = minClusterSize;
}
/**
* ViewModifier ensures only one re-rendering of the view occurs at a time, and schedules
* re-rendering, which is performed by the RenderTask.
*/
@SuppressLint("HandlerLeak")
private class ViewModifier extends Handler {
private static final int RUN_TASK = 0;
private static final int TASK_FINISHED = 1;
private boolean mViewModificationInProgress = false;
private RenderTask mNextClusters = null;
@Override
public void handleMessage(Message msg) {
if (msg.what == TASK_FINISHED) {
mViewModificationInProgress = false;
if (mNextClusters != null) {
// Run the task that was queued up.
sendEmptyMessage(RUN_TASK);
}
return;
}
removeMessages(RUN_TASK);
if (mViewModificationInProgress) {
// Busy - wait for the callback.
return;
}
if (mNextClusters == null) {
// Nothing to do.
return;
}
Projection projection = mMap.getProjection();
RenderTask renderTask;
synchronized (this) {
renderTask = mNextClusters;
mNextClusters = null;
mViewModificationInProgress = true;
}
renderTask.setCallback(new Runnable() {
@Override
public void run() {
sendEmptyMessage(TASK_FINISHED);
}
});
renderTask.setProjection(projection);
renderTask.setMapZoom(mMap.getCameraPosition().zoom);
new Thread(renderTask).start();
}
public void queue(Set<? extends Cluster<T>> clusters) {
synchronized (this) {
// Overwrite any pending cluster tasks - we don't care about intermediate states.
mNextClusters = new RenderTask(clusters);
}
sendEmptyMessage(RUN_TASK);
}
}
/**
* Determine whether the cluster should be rendered as individual markers or a cluster.
*/
protected boolean shouldRenderAsCluster(Cluster<T> cluster) {
return cluster.getSize() > mMinClusterSize;
}
/**
* Transforms the current view (represented by DefaultClusterRenderer.mClusters and DefaultClusterRenderer.mZoom) to a
* new zoom level and set of clusters.
* <p/>
* This must be run off the UI thread. Work is coordinated in the RenderTask, then queued up to
* be executed by a MarkerModifier.
* <p/>
* There are three stages for the render:
* <p/>
* 1. Markers are added to the map
* <p/>
* 2. Markers are animated to their final position
* <p/>
* 3. Any old markers are removed from the map
* <p/>
* When zooming in, markers are animated out from the nearest existing cluster. When zooming
* out, existing clusters are animated to the nearest new cluster.
*/
private class RenderTask implements Runnable {
final Set<? extends Cluster<T>> clusters;
private Runnable mCallback;
private Projection mProjection;
private SphericalMercatorProjection mSphericalMercatorProjection;
private float mMapZoom;
private RenderTask(Set<? extends Cluster<T>> clusters) {
this.clusters = clusters;
}
/**
* A callback to be run when all work has been completed.
*
* @param callback
*/
public void setCallback(Runnable callback) {
mCallback = callback;
}
public void setProjection(Projection projection) {
this.mProjection = projection;
}
public void setMapZoom(float zoom) {
this.mMapZoom = zoom;
this.mSphericalMercatorProjection = new SphericalMercatorProjection(256 * Math.pow(2, Math.min(zoom, mZoom)));
}
@SuppressLint("NewApi")
public void run() {
if (clusters.equals(DefaultClusterRenderer.this.mClusters)) {
mCallback.run();
return;
}
final MarkerModifier markerModifier = new MarkerModifier();
final float zoom = mMapZoom;
final boolean zoomingIn = zoom > mZoom;
final float zoomDelta = zoom - mZoom;
final Set<MarkerWithPosition> markersToRemove = mMarkers;
final LatLngBounds visibleBounds = mProjection.getVisibleRegion().latLngBounds;
// TODO: Add some padding, so that markers can animate in from off-screen.
// Find all of the existing clusters that are on-screen. These are candidates for
// markers to animate from.
List<Point> existingClustersOnScreen = null;
if (DefaultClusterRenderer.this.mClusters != null && SHOULD_ANIMATE && mAnimate) {
existingClustersOnScreen = new ArrayList<Point>();
for (Cluster<T> c : DefaultClusterRenderer.this.mClusters) {
if (shouldRenderAsCluster(c) && visibleBounds.contains(c.getPosition())) {
Point point = mSphericalMercatorProjection.toPoint(c.getPosition());
existingClustersOnScreen.add(point);
}
}
}
// Create the new markers and animate them to their new positions.
final Set<MarkerWithPosition> newMarkers = Collections.newSetFromMap(
new ConcurrentHashMap<MarkerWithPosition, Boolean>());
for (Cluster<T> c : clusters) {
boolean onScreen = visibleBounds.contains(c.getPosition());
if (zoomingIn && onScreen && SHOULD_ANIMATE && mAnimate) {
Point point = mSphericalMercatorProjection.toPoint(c.getPosition());
Point closest = findClosestCluster(existingClustersOnScreen, point);
if (closest != null) {
LatLng animateTo = mSphericalMercatorProjection.toLatLng(closest);
markerModifier.add(true, new CreateMarkerTask(c, newMarkers, animateTo));
} else {
markerModifier.add(true, new CreateMarkerTask(c, newMarkers, null));
}
} else {
markerModifier.add(onScreen, new CreateMarkerTask(c, newMarkers, null));
}
}
// Wait for all markers to be added.
markerModifier.waitUntilFree();
// Don't remove any markers that were just added. This is basically anything that had
// a hit in the MarkerCache.
markersToRemove.removeAll(newMarkers);
// Find all of the new clusters that were added on-screen. These are candidates for
// markers to animate from.
List<Point> newClustersOnScreen = null;
if (SHOULD_ANIMATE && mAnimate) {
newClustersOnScreen = new ArrayList<Point>();
for (Cluster<T> c : clusters) {
if (shouldRenderAsCluster(c) && visibleBounds.contains(c.getPosition())) {
Point p = mSphericalMercatorProjection.toPoint(c.getPosition());
newClustersOnScreen.add(p);
}
}
}
// Remove the old markers, animating them into clusters if zooming out.
for (final MarkerWithPosition marker : markersToRemove) {
boolean onScreen = visibleBounds.contains(marker.position);
// Don't animate when zooming out more than 3 zoom levels.
// TODO: drop animation based on speed of device & number of markers to animate.
if (!zoomingIn && zoomDelta > -3 && onScreen && SHOULD_ANIMATE && mAnimate) {
final Point point = mSphericalMercatorProjection.toPoint(marker.position);
final Point closest = findClosestCluster(newClustersOnScreen, point);
if (closest != null) {
LatLng animateTo = mSphericalMercatorProjection.toLatLng(closest);
markerModifier.animateThenRemove(marker, marker.position, animateTo);
} else {
markerModifier.remove(true, marker.marker);
}
} else {
markerModifier.remove(onScreen, marker.marker);
}
}
markerModifier.waitUntilFree();
mMarkers = newMarkers;
DefaultClusterRenderer.this.mClusters = clusters;
mZoom = zoom;
mCallback.run();
}
}
@Override
public void onClustersChanged(Set<? extends Cluster<T>> clusters) {
mViewModifier.queue(clusters);
}
@Override
public void setOnClusterClickListener(ClusterManager.OnClusterClickListener<T> listener) {
mClickListener = listener;
}
@Override
public void setOnClusterInfoWindowClickListener(ClusterManager.OnClusterInfoWindowClickListener<T> listener) {
mInfoWindowClickListener = listener;
}
@Override
public void setOnClusterItemClickListener(ClusterManager.OnClusterItemClickListener<T> listener) {
mItemClickListener = listener;
}
@Override
public void setOnClusterItemInfoWindowClickListener(ClusterManager.OnClusterItemInfoWindowClickListener<T> listener) {
mItemInfoWindowClickListener = listener;
}
@Override
public void setAnimation(boolean animate) {
mAnimate = animate;
}
private static double distanceSquared(Point a, Point b) {
return (a.x - b.x) * (a.x - b.x) + (a.y - b.y) * (a.y - b.y);
}
private Point findClosestCluster(List<Point> markers, Point point) {
if (markers == null || markers.isEmpty()) return null;
int maxDistance = mClusterManager.getAlgorithm().getMaxDistanceBetweenClusteredItems();
double minDistSquared = maxDistance * maxDistance;
Point closest = null;
for (Point candidate : markers) {
double dist = distanceSquared(candidate, point);
if (dist < minDistSquared) {
closest = candidate;
minDistSquared = dist;
}
}
return closest;
}
/**
* Handles all markerWithPosition manipulations on the map. Work (such as adding, removing, or
* animating a markerWithPosition) is performed while trying not to block the rest of the app's
* UI.
*/
@SuppressLint("HandlerLeak")
private class MarkerModifier extends Handler implements MessageQueue.IdleHandler {
private static final int BLANK = 0;
private final Lock lock = new ReentrantLock();
private final Condition busyCondition = lock.newCondition();
private Queue<CreateMarkerTask> mCreateMarkerTasks = new LinkedList<CreateMarkerTask>();
private Queue<CreateMarkerTask> mOnScreenCreateMarkerTasks = new LinkedList<CreateMarkerTask>();
private Queue<Marker> mRemoveMarkerTasks = new LinkedList<Marker>();
private Queue<Marker> mOnScreenRemoveMarkerTasks = new LinkedList<Marker>();
private Queue<AnimationTask> mAnimationTasks = new LinkedList<AnimationTask>();
/**
* Whether the idle listener has been added to the UI thread's MessageQueue.
*/
private boolean mListenerAdded;
private MarkerModifier() {
super(Looper.getMainLooper());
}
/**
* Creates markers for a cluster some time in the future.
*
* @param priority whether this operation should have priority.
*/
public void add(boolean priority, CreateMarkerTask c) {
lock.lock();
sendEmptyMessage(BLANK);
if (priority) {
mOnScreenCreateMarkerTasks.add(c);
} else {
mCreateMarkerTasks.add(c);
}
lock.unlock();
}
/**
* Removes a markerWithPosition some time in the future.
*
* @param priority whether this operation should have priority.
* @param m the markerWithPosition to remove.
*/
public void remove(boolean priority, Marker m) {
lock.lock();
sendEmptyMessage(BLANK);
if (priority) {
mOnScreenRemoveMarkerTasks.add(m);
} else {
mRemoveMarkerTasks.add(m);
}
lock.unlock();
}
/**
* Animates a markerWithPosition some time in the future.
*
* @param marker the markerWithPosition to animate.
* @param from the position to animate from.
* @param to the position to animate to.
*/
public void animate(MarkerWithPosition marker, LatLng from, LatLng to) {
lock.lock();
mAnimationTasks.add(new AnimationTask(marker, from, to));
lock.unlock();
}
/**
* Animates a markerWithPosition some time in the future, and removes it when the animation
* is complete.
*
* @param marker the markerWithPosition to animate.
* @param from the position to animate from.
* @param to the position to animate to.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void animateThenRemove(MarkerWithPosition marker, LatLng from, LatLng to) {
lock.lock();
AnimationTask animationTask = new AnimationTask(marker, from, to);
animationTask.removeOnAnimationComplete(mClusterManager.getMarkerManager());
mAnimationTasks.add(animationTask);
lock.unlock();
}
@Override
public void handleMessage(Message msg) {
if (!mListenerAdded) {
Looper.myQueue().addIdleHandler(this);
mListenerAdded = true;
}
removeMessages(BLANK);
lock.lock();
try {
// Perform up to 10 tasks at once.
// Consider only performing 10 remove tasks, not adds and animations.
// Removes are relatively slow and are much better when batched.
for (int i = 0; i < 10; i++) {
performNextTask();
}
if (!isBusy()) {
mListenerAdded = false;
Looper.myQueue().removeIdleHandler(this);
// Signal any other threads that are waiting.
busyCondition.signalAll();
} else {
// Sometimes the idle queue may not be called - schedule up some work regardless
// of whether the UI thread is busy or not.
// TODO: try to remove this.
sendEmptyMessageDelayed(BLANK, 10);
}
} finally {
lock.unlock();
}
}
/**
* Perform the next task. Prioritise any on-screen work.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void performNextTask() {
if (!mOnScreenRemoveMarkerTasks.isEmpty()) {
removeMarker(mOnScreenRemoveMarkerTasks.poll());
} else if (!mAnimationTasks.isEmpty()) {
mAnimationTasks.poll().perform();
} else if (!mOnScreenCreateMarkerTasks.isEmpty()) {
mOnScreenCreateMarkerTasks.poll().perform(this);
} else if (!mCreateMarkerTasks.isEmpty()) {
mCreateMarkerTasks.poll().perform(this);
} else if (!mRemoveMarkerTasks.isEmpty()) {
removeMarker(mRemoveMarkerTasks.poll());
}
}
private void removeMarker(Marker m) {
Cluster<T> cluster = mMarkerToCluster.get(m);
mClusterToMarker.remove(cluster);
mMarkerCache.remove(m);
mMarkerToCluster.remove(m);
mClusterManager.getMarkerManager().remove(m);
}
/**
* @return true if there is still work to be processed.
*/
public boolean isBusy() {
try {
lock.lock();
return !(mCreateMarkerTasks.isEmpty() && mOnScreenCreateMarkerTasks.isEmpty() &&
mOnScreenRemoveMarkerTasks.isEmpty() && mRemoveMarkerTasks.isEmpty() &&
mAnimationTasks.isEmpty()
);
} finally {
lock.unlock();
}
}
/**
* Blocks the calling thread until all work has been processed.
*/
public void waitUntilFree() {
while (isBusy()) {
// Sometimes the idle queue may not be called - schedule up some work regardless
// of whether the UI thread is busy or not.
// TODO: try to remove this.
sendEmptyMessage(BLANK);
lock.lock();
try {
if (isBusy()) {
busyCondition.await();
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
} finally {
lock.unlock();
}
}
}
@Override
public boolean queueIdle() {
// When the UI is not busy, schedule some work.
sendEmptyMessage(BLANK);
return true;
}
}
/**
* A cache of markers representing individual ClusterItems.
*/
private static class MarkerCache<T> {
private Map<T, Marker> mCache = new HashMap<T, Marker>();
private Map<Marker, T> mCacheReverse = new HashMap<Marker, T>();
public Marker get(T item) {
return mCache.get(item);
}
public T get(Marker m) {
return mCacheReverse.get(m);
}
public void put(T item, Marker m) {
mCache.put(item, m);
mCacheReverse.put(m, item);
}
public void remove(Marker m) {
T item = mCacheReverse.get(m);
mCacheReverse.remove(m);
mCache.remove(item);
}
}
/**
* Called before the marker for a ClusterItem is added to the map.
*/
protected void onBeforeClusterItemRendered(T item, MarkerOptions markerOptions) {
}
/**
* Called before the marker for a Cluster is added to the map.
* The default implementation draws a circle with a rough count of the number of items.
*/
protected void onBeforeClusterRendered(Cluster<T> cluster, MarkerOptions markerOptions) {
int bucket = getBucket(cluster);
BitmapDescriptor descriptor = mIcons.get(bucket);
if (descriptor == null) {
mColoredCircleBackground.getPaint().setColor(getColor(bucket));
descriptor = BitmapDescriptorFactory.fromBitmap(mIconGenerator.makeIcon(getClusterText(bucket)));
mIcons.put(bucket, descriptor);
}
// TODO: consider adding anchor(.5, .5) (Individual markers will overlap more often)
markerOptions.icon(descriptor);
}
/**
* Called after the marker for a Cluster has been added to the map.
*/
protected void onClusterRendered(Cluster<T> cluster, Marker marker) {
}
/**
* Called after the marker for a ClusterItem has been added to the map.
*/
protected void onClusterItemRendered(T clusterItem, Marker marker) {
}
/**
* Get the marker from a ClusterItem
* @param clusterItem ClusterItem which you will obtain its marker
* @return a marker from a ClusterItem or null if it does not exists
*/
public Marker getMarker(T clusterItem) {
return mMarkerCache.get(clusterItem);
}
/**
* Get the ClusterItem from a marker
* @param marker which you will obtain its ClusterItem
* @return a ClusterItem from a marker or null if it does not exists
*/
public T getClusterItem(Marker marker) {
return mMarkerCache.get(marker);
}
/**
* Get the marker from a Cluster
* @param cluster which you will obtain its marker
* @return a marker from a cluster or null if it does not exists
*/
public Marker getMarker(Cluster<T> cluster) {
return mClusterToMarker.get(cluster);
}
/**
* Get the Cluster from a marker
* @param marker which you will obtain its Cluster
* @return a Cluster from a marker or null if it does not exists
*/
public Cluster<T> getCluster(Marker marker) {
return mMarkerToCluster.get(marker);
}
/**
* Creates markerWithPosition(s) for a particular cluster, animating it if necessary.
*/
private class CreateMarkerTask {
private final Cluster<T> cluster;
private final Set<MarkerWithPosition> newMarkers;
private final LatLng animateFrom;
/**
* @param c the cluster to render.
* @param markersAdded a collection of markers to append any created markers.
* @param animateFrom the location to animate the markerWithPosition from, or null if no
* animation is required.
*/
public CreateMarkerTask(Cluster<T> c, Set<MarkerWithPosition> markersAdded, LatLng animateFrom) {
this.cluster = c;
this.newMarkers = markersAdded;
this.animateFrom = animateFrom;
}
private void perform(MarkerModifier markerModifier) {
// Don't show small clusters. Render the markers inside, instead.
if (!shouldRenderAsCluster(cluster)) {
for (T item : cluster.getItems()) {
Marker marker = mMarkerCache.get(item);
MarkerWithPosition markerWithPosition;
if (marker == null) {
MarkerOptions markerOptions = new MarkerOptions();
if (animateFrom != null) {
markerOptions.position(animateFrom);
} else {
markerOptions.position(item.getPosition());
}
if (!(item.getTitle()== null) && !(item.getSnippet() == null)) {
markerOptions.title(item.getTitle());
markerOptions.snippet(item.getSnippet());
} else if (!(item.getSnippet() == null)) {
markerOptions.title(item.getSnippet());
} else if (!(item.getTitle() == null)) {
markerOptions.title(item.getTitle());
}
onBeforeClusterItemRendered(item, markerOptions);
marker = mClusterManager.getMarkerCollection().addMarker(markerOptions);
markerWithPosition = new MarkerWithPosition(marker);
mMarkerCache.put(item, marker);
if (animateFrom != null) {
markerModifier.animate(markerWithPosition, animateFrom, item.getPosition());
}
} else {
markerWithPosition = new MarkerWithPosition(marker);
}
onClusterItemRendered(item, marker);
newMarkers.add(markerWithPosition);
}
return;
}
Marker marker = mClusterToMarker.get(cluster);
MarkerWithPosition markerWithPosition;
if (marker == null) {
MarkerOptions markerOptions = new MarkerOptions().
position(animateFrom == null ? cluster.getPosition() : animateFrom);
onBeforeClusterRendered(cluster, markerOptions);
marker = mClusterManager.getClusterMarkerCollection().addMarker(markerOptions);
mMarkerToCluster.put(marker, cluster);
mClusterToMarker.put(cluster, marker);
markerWithPosition = new MarkerWithPosition(marker);
if (animateFrom != null) {
markerModifier.animate(markerWithPosition, animateFrom, cluster.getPosition());
}
} else {
markerWithPosition = new MarkerWithPosition(marker);
}
onClusterRendered(cluster, marker);
newMarkers.add(markerWithPosition);
}
}
/**
* A Marker and its position. Marker.getPosition() must be called from the UI thread, so this
* object allows lookup from other threads.
*/
private static class MarkerWithPosition {
private final Marker marker;
private LatLng position;
private MarkerWithPosition(Marker marker) {
this.marker = marker;
position = marker.getPosition();
}
@Override
public boolean equals(Object other) {
if (other instanceof MarkerWithPosition) {
return marker.equals(((MarkerWithPosition) other).marker);
}
return false;
}
@Override
public int hashCode() {
return marker.hashCode();
}
}
private static final TimeInterpolator ANIMATION_INTERP = new DecelerateInterpolator();
/**
* Animates a markerWithPosition from one position to another. TODO: improve performance for
* slow devices (e.g. Nexus S).
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB_MR1)
private class AnimationTask extends AnimatorListenerAdapter implements ValueAnimator.AnimatorUpdateListener {
private final MarkerWithPosition markerWithPosition;
private final Marker marker;
private final LatLng from;
private final LatLng to;
private boolean mRemoveOnComplete;
private MarkerManager mMarkerManager;
private AnimationTask(MarkerWithPosition markerWithPosition, LatLng from, LatLng to) {
this.markerWithPosition = markerWithPosition;
this.marker = markerWithPosition.marker;
this.from = from;
this.to = to;
}
public void perform() {
ValueAnimator valueAnimator = ValueAnimator.ofFloat(0.0f, 1.0f);
valueAnimator.setInterpolator(ANIMATION_INTERP);
valueAnimator.addUpdateListener(this);
valueAnimator.addListener(this);
valueAnimator.start();
}
@Override
public void onAnimationEnd(Animator animation) {
if (mRemoveOnComplete) {
Cluster<T> cluster = mMarkerToCluster.get(marker);
mClusterToMarker.remove(cluster);
mMarkerCache.remove(marker);
mMarkerToCluster.remove(marker);
mMarkerManager.remove(marker);
}
markerWithPosition.position = to;
}
public void removeOnAnimationComplete(MarkerManager markerManager) {
mMarkerManager = markerManager;
mRemoveOnComplete = true;
}
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
float fraction = valueAnimator.getAnimatedFraction();
double lat = (to.latitude - from.latitude) * fraction + from.latitude;
double lngDelta = to.longitude - from.longitude;
// Take the shortest path across the 180th meridian.
if (Math.abs(lngDelta) > 180) {
lngDelta -= Math.signum(lngDelta) * 360;
}
double lng = lngDelta * fraction + from.longitude;
LatLng position = new LatLng(lat, lng);
marker.setPosition(position);
}
}
}
|
Prevent crashes: https://issuetracker.google.com/issues/35827242
|
library/src/com/google/maps/android/clustering/view/DefaultClusterRenderer.java
|
Prevent crashes: https://issuetracker.google.com/issues/35827242
|
<ide><path>ibrary/src/com/google/maps/android/clustering/view/DefaultClusterRenderer.java
<ide> import com.google.maps.android.clustering.ClusterManager;
<ide> import com.google.maps.android.geometry.Point;
<ide> import com.google.maps.android.projection.SphericalMercatorProjection;
<add>import com.google.maps.android.ui.IconGenerator;
<ide> import com.google.maps.android.ui.SquareTextView;
<del>import com.google.maps.android.ui.IconGenerator;
<ide>
<ide> import java.util.ArrayList;
<ide> import java.util.Collections;
<del>import java.util.concurrent.ConcurrentHashMap;
<ide> import java.util.HashMap;
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Queue;
<ide> import java.util.Set;
<add>import java.util.concurrent.ConcurrentHashMap;
<ide> import java.util.concurrent.locks.Condition;
<ide> import java.util.concurrent.locks.Lock;
<ide> import java.util.concurrent.locks.ReentrantLock;
<ide> final float zoomDelta = zoom - mZoom;
<ide>
<ide> final Set<MarkerWithPosition> markersToRemove = mMarkers;
<del> final LatLngBounds visibleBounds = mProjection.getVisibleRegion().latLngBounds;
<add> // Prevent crashes: https://issuetracker.google.com/issues/35827242
<add> LatLngBounds visibleBounds;
<add> try {
<add> visibleBounds = mProjection.getVisibleRegion().latLngBounds;
<add> } catch (Exception e) {
<add> e.printStackTrace();
<add> visibleBounds = LatLngBounds.builder()
<add> .include(new LatLng(0, 0))
<add> .build();
<add> }
<ide> // TODO: Add some padding, so that markers can animate in from off-screen.
<ide>
<ide> // Find all of the existing clusters that are on-screen. These are candidates for
|
|
Java
|
apache-2.0
|
fa2c990ab22e8ffdfbfca1a55a83eb489dcad73a
| 0 |
Wechat-Group/WxJava,Wechat-Group/WxJava
|
package me.chanjar.weixin.mp.bean.freepublish;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import lombok.NoArgsConstructor;
import me.chanjar.weixin.common.util.json.WxGsonBuilder;
import java.io.Serializable;
/**
* 一条发布的图文记录
*
* @author dragon
* @date 2021-10-23
*/
@NoArgsConstructor
@Data
public class WxMpFreePublishArticles implements Serializable {
private static final long serialVersionUID = -6435229818150835883L;
/**
* 标题
*/
@SerializedName("title")
private String title;
/**
* 作者
*/
@SerializedName("author")
private String author;
/**
* 图文消息的摘要,仅有单图文消息才有摘要,多图文此处为空。
*/
@SerializedName("digest")
private String digest;
/**
* 图文消息的具体内容,支持HTML标签,必须少于2万字符,小于1M,且此处会去除JS
*/
@SerializedName("content")
private String content;
/**
* 图文消息的原文地址,即点击“阅读原文”后的URL
*/
@SerializedName("content_source_url")
private String contentSourceUrl;
/**
* 图文消息的封面图片素材id(一定是永久MediaID)
*/
@SerializedName("thumb_media_id")
private String thumbMediaId;
/**
* 是否显示封面,0为false,即不显示,1为true,即显示(默认)
*/
@SerializedName("show_cover_pic")
private Integer showCoverPic;
/**
* 是否打开评论,0不打开(默认),1打开
*/
@SerializedName("need_open_comment")
private Integer needOpenComment;
/**
* 是否粉丝才可评论,0所有人可评论(默认),1粉丝才可评论
*/
@SerializedName("only_fans_can_comment")
private Integer onlyFansCanComment;
/**
* 图文消息的封面url
*/
@SerializedName("thumb_url")
private String thumbUrl;
/*
* ===== 上面的参数,就是草稿箱的内容的字段,为了后续扩展,单独写一份====
*/
/**
* 草稿的临时链接
*/
@SerializedName("url")
private String url;
/**
* 该图文是否被删除
*/
@SerializedName("is_deleted")
private Boolean isDeleted;
public static WxMpFreePublishArticles fromJson(String json) {
return WxGsonBuilder.create().fromJson(json, WxMpFreePublishArticles.class);
}
}
|
weixin-java-mp/src/main/java/me/chanjar/weixin/mp/bean/freepublish/WxMpFreePublishArticles.java
|
package me.chanjar.weixin.mp.bean.freepublish;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import lombok.NoArgsConstructor;
import me.chanjar.weixin.common.util.json.WxGsonBuilder;
import java.io.Serializable;
/**
* 一条发布的图文记录
*
* @author dragon
* @date 2021-10-23
*/
@NoArgsConstructor
@Data
public class WxMpFreePublishArticles implements Serializable {
private static final long serialVersionUID = -6435229818150835883L;
/**
* 标题
*/
@SerializedName("title")
private String title;
/**
* 作者
*/
@SerializedName("author")
private String author;
/**
* 图文消息的摘要,仅有单图文消息才有摘要,多图文此处为空。
*/
@SerializedName("digest")
private String digest;
/**
* 图文消息的具体内容,支持HTML标签,必须少于2万字符,小于1M,且此处会去除JS
*/
@SerializedName("content")
private String content;
/**
* 图文消息的原文地址,即点击“阅读原文”后的URL
*/
@SerializedName("content_source_url")
private String contentSourceUrl;
/**
* 图文消息的封面图片素材id(一定是永久MediaID)
*/
@SerializedName("thumb_media_id")
private String thumbMediaId;
/**
* 是否显示封面,0为false,即不显示,1为true,即显示(默认)
*/
@SerializedName("show_cover_pic")
private Integer showCoverPic;
/**
* 是否打开评论,0不打开(默认),1打开
*/
@SerializedName("need_open_comment")
private Integer needOpenComment;
/**
* 是否粉丝才可评论,0所有人可评论(默认),1粉丝才可评论
*/
@SerializedName("only_fans_can_comment")
private Integer onlyFansCanComment;
/*
* ===== 上面的参数,就是草稿箱的内容的字段,为了后续扩展,单独写一份====
*/
/**
* 草稿的临时链接
*/
@SerializedName("url")
private String url;
/**
* 该图文是否被删除
*/
@SerializedName("is_deleted")
private Boolean isDeleted;
public static WxMpFreePublishArticles fromJson(String json) {
return WxGsonBuilder.create().fromJson(json, WxMpFreePublishArticles.class);
}
}
|
:art: #2434 【公众号】微信发布能力接口文章信息返回数据增加thumb_url
|
weixin-java-mp/src/main/java/me/chanjar/weixin/mp/bean/freepublish/WxMpFreePublishArticles.java
|
:art: #2434 【公众号】微信发布能力接口文章信息返回数据增加thumb_url
|
<ide><path>eixin-java-mp/src/main/java/me/chanjar/weixin/mp/bean/freepublish/WxMpFreePublishArticles.java
<ide> @SerializedName("only_fans_can_comment")
<ide> private Integer onlyFansCanComment;
<ide>
<add> /**
<add> * 图文消息的封面url
<add> */
<add> @SerializedName("thumb_url")
<add> private String thumbUrl;
<add>
<ide> /*
<ide> * ===== 上面的参数,就是草稿箱的内容的字段,为了后续扩展,单独写一份====
<ide> */
|
|
JavaScript
|
mit
|
b2dcb1fbdb242c2efca3133a233a57dce6e12097
| 0 |
achambers/ember-caliper,achambers/ember-caliper
|
/* jshint node: true */
'use strict';
module.exports = {
name: 'ember-caliper',
included: function(app) {
this._super.included(app);
app.import(app.bowerDirectory + '/caliper-ember/dist/caliper.ember.min.js');
},
contentFor: function(type, config) {
if (type === 'caliper') {
var caliper = config.caliper || {};
var options = {config: caliper}
var result = '<script type="text/javascript">var Caliper = ' + JSON.stringify(options) + ';</script>';
return result;
}
}
};
|
index.js
|
/* jshint node: true */
'use strict';
module.exports = {
name: 'ember-caliper'
};
|
Now we have a {{content-for 'caliper'}} tag
This tag will inject the caliper config into the HEAD of he document.
The addon will also include the caliper.js file once it has been
installed with bower
|
index.js
|
Now we have a {{content-for 'caliper'}} tag
|
<ide><path>ndex.js
<ide> 'use strict';
<ide>
<ide> module.exports = {
<del> name: 'ember-caliper'
<add> name: 'ember-caliper',
<add>
<add> included: function(app) {
<add> this._super.included(app);
<add>
<add> app.import(app.bowerDirectory + '/caliper-ember/dist/caliper.ember.min.js');
<add> },
<add>
<add> contentFor: function(type, config) {
<add> if (type === 'caliper') {
<add> var caliper = config.caliper || {};
<add> var options = {config: caliper}
<add> var result = '<script type="text/javascript">var Caliper = ' + JSON.stringify(options) + ';</script>';
<add>
<add> return result;
<add> }
<add> }
<ide> };
|
|
Java
|
apache-2.0
|
98e4fc4decac2ab90978142ee78f7b77678dc732
| 0 |
ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,waans11/incubator-asterixdb,apache/incubator-asterixdb,waans11/incubator-asterixdb,ty1er/incubator-asterixdb,apache/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,apache/incubator-asterixdb,ty1er/incubator-asterixdb,ty1er/incubator-asterixdb,ty1er/incubator-asterixdb,waans11/incubator-asterixdb,heriram/incubator-asterixdb,ecarm002/incubator-asterixdb,ecarm002/incubator-asterixdb,kisskys/incubator-asterixdb,kisskys/incubator-asterixdb,kisskys/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,ty1er/incubator-asterixdb,heriram/incubator-asterixdb,waans11/incubator-asterixdb,apache/incubator-asterixdb,apache/incubator-asterixdb,heriram/incubator-asterixdb,ty1er/incubator-asterixdb,waans11/incubator-asterixdb,apache/incubator-asterixdb,kisskys/incubator-asterixdb,apache/incubator-asterixdb,waans11/incubator-asterixdb,kisskys/incubator-asterixdb,kisskys/incubator-asterixdb,waans11/incubator-asterixdb,kisskys/incubator-asterixdb,ecarm002/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.operators.std;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class StreamProjectRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
private final boolean flushFramesRapidly;
public StreamProjectRuntimeFactory(int[] projectionList, boolean flushFramesRapidly) {
super(projectionList);
this.flushFramesRapidly = flushFramesRapidly;
}
public StreamProjectRuntimeFactory(int[] projectionList) {
this(projectionList, false);
}
@Override
public String toString() {
return "stream-project " + Arrays.toString(projectionList);
}
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx)
throws AlgebricksException {
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private boolean first = true;
@Override
public void open() throws HyracksDataException {
writer.open();
if (first) {
first = false;
initAccessAppend(ctx);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
// what if numOfTuples is 0?
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
if (nTuple == 0) {
appender.flush(writer);
} else {
int t = 0;
if (nTuple > 1) {
for (; t < nTuple - 1; t++) {
appendProjectionToFrame(t, projectionList);
}
}
if (flushFramesRapidly) {
// Whenever all the tuples in the incoming frame have been consumed, the project operator
// will push its frame to the next operator; i.e., it won't wait until the frame gets full.
appendProjectionToFrame(t, projectionList, true);
} else {
appendProjectionToFrame(t, projectionList);
}
}
}
@Override
public void flush() throws HyracksDataException {
appender.flush(writer);
}
};
}
}
|
algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.operators.std;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class StreamProjectRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
private final boolean flushFramesRapidly;
public StreamProjectRuntimeFactory(int[] projectionList, boolean flushFramesRapidly) {
super(projectionList);
this.flushFramesRapidly = flushFramesRapidly;
}
public StreamProjectRuntimeFactory(int[] projectionList) {
this(projectionList, false);
}
@Override
public String toString() {
return "stream-project " + Arrays.toString(projectionList);
}
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx)
throws AlgebricksException {
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private boolean first = true;
@Override
public void open() throws HyracksDataException {
writer.open();
if (first) {
first = false;
initAccessAppend(ctx);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
int t = 0;
if (nTuple > 1) {
for (; t < nTuple - 1; t++) {
appendProjectionToFrame(t, projectionList);
}
}
if (flushFramesRapidly) {
// Whenever all the tuples in the incoming frame have been consumed, the project operator
// will push its frame to the next operator; i.e., it won't wait until the frame gets full.
appendProjectionToFrame(t, projectionList, true);
} else {
appendProjectionToFrame(t, projectionList);
}
}
@Override
public void flush() throws HyracksDataException {
appender.flush(writer);
}
};
}
}
|
Allow Project Runtime to Pass Through an Empty Frame
Before this change, project runtime expects at least a single record.
Now it can also process an empty frame.
Change-Id: I87dc6eb83a748f7f91610e7d11ebaec9be914e29
Reviewed-on: https://asterix-gerrit.ics.uci.edu/634
Tested-by: Jenkins <d95b56ce41a2e1ac4cecdd398defd7414407cc08@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <[email protected]>
|
algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
|
Allow Project Runtime to Pass Through an Empty Frame
|
<ide><path>lgebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
<ide>
<ide> @Override
<ide> public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
<add> // what if numOfTuples is 0?
<ide> tAccess.reset(buffer);
<ide> int nTuple = tAccess.getTupleCount();
<del>
<del> int t = 0;
<del> if (nTuple > 1) {
<del> for (; t < nTuple - 1; t++) {
<add> if (nTuple == 0) {
<add> appender.flush(writer);
<add> } else {
<add> int t = 0;
<add> if (nTuple > 1) {
<add> for (; t < nTuple - 1; t++) {
<add> appendProjectionToFrame(t, projectionList);
<add> }
<add> }
<add> if (flushFramesRapidly) {
<add> // Whenever all the tuples in the incoming frame have been consumed, the project operator
<add> // will push its frame to the next operator; i.e., it won't wait until the frame gets full.
<add> appendProjectionToFrame(t, projectionList, true);
<add> } else {
<ide> appendProjectionToFrame(t, projectionList);
<ide> }
<ide> }
<del> if (flushFramesRapidly) {
<del> // Whenever all the tuples in the incoming frame have been consumed, the project operator
<del> // will push its frame to the next operator; i.e., it won't wait until the frame gets full.
<del> appendProjectionToFrame(t, projectionList, true);
<del> } else {
<del> appendProjectionToFrame(t, projectionList);
<del> }
<del>
<ide> }
<ide>
<ide> @Override
|
|
Java
|
agpl-3.0
|
178c789a318402951ff8307efccbb0b54009bfe5
| 0 |
smith750/kfs,ua-eas/kfs-devops-automation-fork,kkronenb/kfs,smith750/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,kuali/kfs,kuali/kfs,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,bhutchinson/kfs,UniversityOfHawaii/kfs,kuali/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,kuali/kfs,ua-eas/kfs-devops-automation-fork,kkronenb/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,bhutchinson/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,smith750/kfs,smith750/kfs,bhutchinson/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,kuali/kfs,kkronenb/kfs,ua-eas/kfs,quikkian-ua-devops/kfs,ua-eas/kfs,quikkian-ua-devops/will-financials
|
/*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.service.impl;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.core.KualiModule;
import org.kuali.core.bo.BusinessObject;
import org.kuali.core.bo.Parameter;
import org.kuali.core.bo.ParameterDetailType;
import org.kuali.core.datadictionary.BusinessObjectEntry;
import org.kuali.core.datadictionary.DocumentEntry;
import org.kuali.core.datadictionary.TransactionalDocumentEntry;
import org.kuali.core.document.TransactionalDocument;
import org.kuali.core.service.BusinessObjectService;
import org.kuali.core.service.DataDictionaryService;
import org.kuali.core.service.KualiModuleService;
import org.kuali.core.util.cache.MethodCacheInterceptor;
import org.kuali.core.util.cache.MethodCacheNoCopyInterceptor;
import org.kuali.core.util.spring.CacheNoCopy;
import org.kuali.core.util.spring.Cached;
import org.kuali.kfs.batch.Step;
import org.kuali.kfs.context.SpringContext;
import org.kuali.kfs.service.ParameterEvaluator;
import org.kuali.kfs.service.ParameterService;
import org.kuali.kfs.service.impl.ParameterConstants.COMPONENT;
import org.kuali.kfs.service.impl.ParameterConstants.NAMESPACE;
import org.kuali.rice.kns.util.KNSConstants;
/**
* See ParameterService. The componentClass must be the business object, document, or step class that the parameter is associated
* with. Implementations of this class know how to translate that to a namespace (for ParameterService Impl, determine what module
* the Class is associated with by parsing the package) and detail type (for ParameterServiceImpl, document Class --> use simple
* class name minus the word Document / business object Class --> use simple class name, batch step class --> use the simple class
* name). In cases where the parameter is applicable to all documents, all lookups, all batch steps, or all components in a
* particular module, you should pass in the appropriate constant class in ParameterConstants for the component Class (e.g. all
* purchasing documents = PURCHASING_DOCUMENT.class, all purchasing lookups = PURCHASING_LOOKUP.class, all purchasing batch steps =
* PURCHASING_BATCH.class, and all purchasing components = PURCHASING_ALL.class). In addition, certain methods take
* constrainingValue and constrainedValue Strings. The constrainedValue is the value that you want to compare to the Parameter
* value, and the constrainingValue is used for complex parameters that limit one field value based on the value of another field,
* e.g VALID_OBJECT_LEVELS_BY_OBJECT_TYPE.
*/
public class ParameterServiceImpl implements ParameterService {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(ParameterServiceImpl.class);
private static List<ParameterDetailType> components = new ArrayList<ParameterDetailType>();
private DataDictionaryService dataDictionaryService;
private KualiModuleService moduleService;
private BusinessObjectService businessObjectService;
private ThreadLocal<Map<String,Parameter>> parameterCache = new ThreadLocal<Map<String,Parameter>>();
/**
* @see org.kuali.kfs.service.ParameterService#parameterExists(java.lang.Class componentClass, java.lang.String parameterName)
*/
public boolean parameterExists(Class componentClass, String parameterName) {
return getParameterWithoutExceptions(getNamespace(componentClass), getDetailType(componentClass), parameterName) != null;
}
/**
* This method provides a convenient way to access the value of indicator parameters with Y/N values. Y is translated to true
* and N is translated to false.
*
* @param componentClass
* @param parameterName
* @return boolean value of Yes/No indicator parameter
*/
public boolean getIndicatorParameter(Class componentClass, String parameterName) {
return "Y".equals(getParameter(componentClass, parameterName).getParameterValue());
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterValue(java.lang.Class componentClass, java.lang.String parameterName)
*/
public String getParameterValue(Class componentClass, String parameterName) {
return getParameter(componentClass, parameterName).getParameterValue();
}
/**
* This will look for constrainingValue=<value to return> within the parameter text and return that if it is found. Otherwise,
* it will return null. Note, that if constrainingValue=value1,value2... (commas specific to the ParameterServiceImpl
* implementation) is found it will still return null, because calling this method states the assumption that there is only one
* value within the parameter text that corresponds to the constraining value.
*
* @param componentClass
* @param parameterName
* @param constrainingValue
* @return derived value String or null
*/
public String getParameterValue(Class componentClass, String parameterName, String constrainingValue) {
List<String> parameterValues = getParameterValues(componentClass, parameterName, constrainingValue);
if (parameterValues.size() == 1) {
return parameterValues.get(0);
}
return null;
}
/**
* This method can be used to parse the value of a parameter by splitting on a semi-colon.
*
* @param componentClass
* @param parameterName
* @return parsed List of String parameter values
*/
public List<String> getParameterValues(Class componentClass, String parameterName) {
return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName)) );
}
/**
* This method looks for constrainingValue=<some text> within the parameter text and splits that text on a comma to generate
* the List to return.
*
* @param componentClass
* @param parameterName
* @param constrainingValue
* @return derived values List<String> or an empty list if no values are found
*/
public List<String> getParameterValues(Class componentClass, String parameterName, String constrainingValue) {
return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName), constrainingValue) );
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName and the values of the Parameter.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName and the values of the Parameter
*/
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName) {
return getParameterEvaluator(getParameter(componentClass, parameterName));
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName, the values of the Parameter, the knowledge of whether the
* values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName, the values of the Parameter, the knowledge of whether the values are allowed or denied, and the
* constrainedValue
*/
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainedValue) {
return getParameterEvaluator(getParameter(componentClass, parameterName), constrainedValue);
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName, the values of the Parameter that correspond to the specified
* constrainingValue, the knowledge of whether the values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName, the values of the Parameter that correspond to the specified constrainingValue, the knowledge of
* whether the values are allowed or denied, and the constrainedValue
*/
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainingValue, String constrainedValue) {
return getParameterEvaluator(getParameter(componentClass, parameterName), constrainingValue, constrainedValue);
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and allowParameterName or to the specified componentClass and denyParameterName
* (depending on which restricts based on the constraining value) or an instance of AlwaysSucceedParameterEvaluatorImpl if
* neither restricts, the values of the Parameter that correspond to the specified constrainingValue, the knowledge of whether
* the values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param allowParameterName
* @param denyParameterName
* @param constrainingValue
* @param constrainedValue
* @return AlwaysSucceedParameterEvaluatorImpl or ParameterEvaluator instance initialized with the Parameter that corresponds to
* the constrainingValue restriction, the values of the Parameter that correspond to the specified constrainingValue,
* the knowledge of whether the values are allowed or denied, and the constrainedValue
*/
public ParameterEvaluator getParameterEvaluator(Class componentClass, String allowParameterName, String denyParameterName, String constrainingValue, String constrainedValue) {
Parameter allowParameter = getParameter(componentClass, allowParameterName);
Parameter denyParameter = getParameter(componentClass, denyParameterName);
if (!getParameterValues(allowParameter, constrainingValue).isEmpty() && !getParameterValues(denyParameter, constrainingValue).isEmpty()) {
throw new IllegalArgumentException("The getParameterEvaluator(Class componentClass, String allowParameterName, String denyParameterName, String constrainingValue, String constrainedValue) method of ParameterServiceImpl does not facilitate evaluation of combination allow and deny parameters that both have values for the constraining value: " + allowParameterName + " / " + denyParameterName + " / " + constrainingValue);
}
if (getParameterValues(allowParameter, constrainingValue).isEmpty() && getParameterValues(denyParameter, constrainingValue).isEmpty()) {
return AlwaysSucceedParameterEvaluatorImpl.getInstance();
}
return getParameterEvaluator(getParameterValues(denyParameter, constrainingValue).isEmpty() ? allowParameter : denyParameter, constrainingValue, constrainedValue);
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
* constrainedValue)
*/
public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainedValue) {
List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
for (Parameter parameter : getParameters(componentClass)) {
parameterEvaluators.add(getParameterEvaluator(parameter, constrainedValue));
}
return parameterEvaluators;
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
* constrainingValue, java.lang.String constrainedValue)
*/
public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainingValue, String constrainedValue) {
List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
for (Parameter parameter : getParameters(componentClass)) {
parameterEvaluators.add(getParameterEvaluator(parameter, constrainingValue, constrainedValue));
}
return parameterEvaluators;
}
/**
* This method derived ParameterDetailedTypes from the DataDictionary for all BusinessObjects and Documents and from Spring for
* all batch Steps.
*
* @return List<ParameterDetailedType> containing the detailed types derived from the data dictionary and Spring
*/
public List<ParameterDetailType> getNonDatabaseDetailTypes() {
if (components.isEmpty()) {
Map<String, ParameterDetailType> uniqueParameterDetailTypeMap = new HashMap<String, ParameterDetailType>();
//dataDictionaryService.getDataDictionary().forceCompleteDataDictionaryLoad();
for (BusinessObjectEntry businessObjectEntry : dataDictionaryService.getDataDictionary().getBusinessObjectEntries().values()) {
ParameterDetailType parameterDetailType = getParameterDetailType(businessObjectEntry.getBusinessObjectClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for business object class: " + businessObjectEntry.getBusinessObjectClass(), e);
}
}
for (DocumentEntry documentEntry : dataDictionaryService.getDataDictionary().getDocumentEntries().values()) {
if (documentEntry instanceof TransactionalDocumentEntry) {
ParameterDetailType parameterDetailType = getParameterDetailType(documentEntry.getDocumentClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for transactional document class: " + documentEntry.getDocumentClass(), e);
}
}
}
for (Step step : SpringContext.getBeansOfType(Step.class).values()) {
ParameterDetailType parameterDetailType = getParameterDetailType(step.getClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for step class: " + step.getClass(), e);
}
}
components.addAll(uniqueParameterDetailTypeMap.values());
}
return Collections.unmodifiableList(components);
}
/**
* @see org.kuali.kfs.service.ParameterService#setParameterForTesting(java.lang.Class componentClass, java.lang.String
* parameterName, java.lang.String parameterText)
*/
public void setParameterForTesting(Class componentClass, String parameterName, String parameterText) {
Parameter parameter = (Parameter) getParameter(componentClass, parameterName);
parameter.setParameterValue(parameterText);
SpringContext.getBean(BusinessObjectService.class).save(parameter);
try {
removeCachedMethod(ParameterService.class.getMethod("getParameterValue", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
removeCachedMethod(ParameterService.class.getMethod("getIndicatorParameter", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
removeCachedMethod(ParameterService.class.getMethod("getParameterValues", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
}
catch (Exception e) {
throw new RuntimeException(new StringBuffer("The setParameterForTesting of ParameterServiceImpl failed: ").append(componentClass).append(" / ").append(parameterName).toString(), e);
}
}
private String getNamespace(Class documentOrStepClass) {
if (documentOrStepClass != null) {
if (documentOrStepClass.isAnnotationPresent(NAMESPACE.class)) {
return ((NAMESPACE) documentOrStepClass.getAnnotation(NAMESPACE.class)).namespace();
}
KualiModule module = moduleService.getResponsibleModule(documentOrStepClass);
if (module != null) {
return ParameterConstants.FINANCIAL_NAMESPACE_PREFIX + module.getModuleCode();
}
if (documentOrStepClass.getName().startsWith("org.kuali.core")) {
return ParameterConstants.NERVOUS_SYSTEM_NAMESPACE;
}
if (documentOrStepClass.getName().startsWith("org.kuali.kfs")) {
return ParameterConstants.FINANCIAL_SYSTEM_NAMESPACE;
}
throw new IllegalArgumentException("The getNamespace method of ParameterUtils requires documentOrStepClass with a package prefix of org.kuali.core, org.kuali.kfs, or org.kuali.module");
}
else {
throw new IllegalArgumentException("The getNamespace method of ParameterUtils requires non-null documentOrStepClass");
}
}
private String getDetailType(Class documentOrStepClass) {
if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
return ((COMPONENT) documentOrStepClass.getAnnotation(COMPONENT.class)).component();
}
if (TransactionalDocument.class.isAssignableFrom(documentOrStepClass)) {
return documentOrStepClass.getSimpleName().replace("Document", "");
}
else if (BusinessObject.class.isAssignableFrom(documentOrStepClass) || Step.class.isAssignableFrom(documentOrStepClass)) {
return documentOrStepClass.getSimpleName();
}
throw new IllegalArgumentException("The getDetailType method of ParameterServiceImpl requires TransactionalDocument, BusinessObject, or Step class");
}
private String getDetailTypeName(Class documentOrStepClass) {
if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
BusinessObjectEntry boe = dataDictionaryService.getDataDictionary().getBusinessObjectEntry(documentOrStepClass.getName());
if (boe != null) {
return boe.getObjectLabel();
}
else {
return ((COMPONENT) documentOrStepClass.getAnnotation(COMPONENT.class)).component();
}
}
if (TransactionalDocument.class.isAssignableFrom(documentOrStepClass)) {
return dataDictionaryService.getDocumentLabelByClass(documentOrStepClass);
}
else if (BusinessObject.class.isAssignableFrom(documentOrStepClass) || Step.class.isAssignableFrom(documentOrStepClass)) {
BusinessObjectEntry boe = dataDictionaryService.getDataDictionary().getBusinessObjectEntry(documentOrStepClass.getName());
if (boe != null) {
return boe.getObjectLabel();
}
else {
return documentOrStepClass.getSimpleName();
}
}
throw new IllegalArgumentException("The getDetailTypeName method of ParameterServiceImpl requires TransactionalDocument, BusinessObject, or Step class");
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter) {
ParameterEvaluatorImpl parameterEvaluator = new ParameterEvaluatorImpl();
parameterEvaluator.setParameter(parameter);
parameterEvaluator.setConstraintIsAllow(constraintIsAllow(parameter));
parameterEvaluator.setValues(getParameterValues(parameter));
return parameterEvaluator;
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter, String constrainedValue) {
ParameterEvaluator parameterEvaluator = getParameterEvaluator(parameter);
parameterEvaluator.setConstrainedValue(constrainedValue);
return parameterEvaluator;
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter, String constrainingValue, String constrainedValue) {
ParameterEvaluator parameterEvaluator = getParameterEvaluator(parameter, constrainedValue);
((ParameterEvaluatorImpl) parameterEvaluator).setValues(getParameterValues(parameter, constrainingValue));
return parameterEvaluator;
}
private ParameterDetailType getParameterDetailType(Class documentOrStepClass) {
String detailTypeString = getDetailType(documentOrStepClass);
String detailTypeName = getDetailTypeName(documentOrStepClass);
ParameterDetailType detailType = new ParameterDetailType(getNamespace(documentOrStepClass), detailTypeString, (detailTypeName == null) ? detailTypeString : detailTypeName);
detailType.refreshNonUpdateableReferences();
return detailType;
}
private Parameter getParameter(Class componentClass, String parameterName) {
if (parameterCache.get() == null) {
parameterCache.set(new HashMap<String,Parameter>());
}
String key = componentClass.toString() + ":" + parameterName;
Object value = parameterCache.get().get(key);
if (value != null) {
return (Parameter) value;
}
Parameter parameter = getParameter(getNamespace(componentClass), getDetailType(componentClass), parameterName);
if (parameter == null) {
throw new IllegalArgumentException("The getParameter method of ParameterServiceImpl requires a componentClass and parameterName that correspond to an existing parameter");
}
parameterCache.get().put(key, parameter);
return parameter;
}
private List<String> getParameterValues(Parameter parameter, String constrainingValue) {
List<String> constraintValuePairs = getParameterValues(parameter);
for (String pair : constraintValuePairs) {
if (StringUtils.equals(constrainingValue, StringUtils.substringBefore(pair, "="))) {
return Arrays.asList(StringUtils.substringAfter(pair, "=").split(","));
}
}
return Collections.EMPTY_LIST;
}
private List<String> getParameterValues(Parameter parameter) {
if (parameter == null || StringUtils.isBlank(parameter.getParameterValue())) {
return Collections.EMPTY_LIST;
}
return Arrays.asList(parameter.getParameterValue().split(";"));
}
private List<Parameter> getParameters(Class componentClass) {
Map<String, String> fieldValues = new HashMap<String, String>();
fieldValues.put("parameterNamespaceCode", getNamespace(componentClass));
fieldValues.put("parameterDetailTypeCode", getDetailType(componentClass));
return new ArrayList<Parameter>(businessObjectService.findMatching(Parameter.class, fieldValues));
}
private Parameter getParameter(String namespaceCode, String detailTypeCode, String parameterName) {
if (StringUtils.isBlank(namespaceCode) || StringUtils.isBlank(detailTypeCode) || StringUtils.isBlank(parameterName)) {
throw new IllegalArgumentException("The getParameter method of KualiConfigurationServiceImpl requires a non-blank namespaceCode, parameterDetailTypeCode, and parameterName");
}
Parameter param = getParameterWithoutExceptions(namespaceCode, detailTypeCode, parameterName);
if (param == null) {
throw new IllegalArgumentException("The getParameter method of KualiConfigurationServiceImpl was unable to find parameter: " + namespaceCode + " / " + detailTypeCode + " / " + parameterName);
}
return param;
}
private Parameter getParameterWithoutExceptions(String namespaceCode, String detailTypeCode, String parameterName) {
HashMap<String, String> crit = new HashMap<String, String>(3);
crit.put("parameterNamespaceCode", namespaceCode);
crit.put("parameterDetailTypeCode", detailTypeCode);
crit.put("parameterName", parameterName);
Parameter param = (Parameter) businessObjectService.findByPrimaryKey(Parameter.class, crit);
return param;
}
private void removeCachedMethod(Method method, Object[] arguments) {
MethodCacheInterceptor methodCacheInterceptor = SpringContext.getBean(MethodCacheInterceptor.class);
String cacheKey = methodCacheInterceptor.buildCacheKey(method.toString(), arguments);
methodCacheInterceptor.removeCacheKey(cacheKey);
MethodCacheNoCopyInterceptor methodCacheNoCopyInterceptor = SpringContext.getBean(MethodCacheNoCopyInterceptor.class);
methodCacheNoCopyInterceptor.removeCacheKey(cacheKey);
}
private boolean constraintIsAllow(Parameter parameter) {
return KNSConstants.APC_ALLOWED_OPERATOR.equals(parameter.getParameterConstraintCode());
}
public void setDataDictionaryService(DataDictionaryService dataDictionaryService) {
this.dataDictionaryService = dataDictionaryService;
}
public void setModuleService(KualiModuleService moduleService) {
this.moduleService = moduleService;
}
public void setBusinessObjectService(BusinessObjectService businessObjectService) {
this.businessObjectService = businessObjectService;
}
}
|
work/src/org/kuali/kfs/sys/service/impl/ParameterServiceImpl.java
|
/*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.service.impl;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.core.KualiModule;
import org.kuali.core.bo.BusinessObject;
import org.kuali.core.bo.Parameter;
import org.kuali.core.bo.ParameterDetailType;
import org.kuali.core.datadictionary.BusinessObjectEntry;
import org.kuali.core.datadictionary.DocumentEntry;
import org.kuali.core.datadictionary.TransactionalDocumentEntry;
import org.kuali.core.document.TransactionalDocument;
import org.kuali.core.service.BusinessObjectService;
import org.kuali.core.service.DataDictionaryService;
import org.kuali.core.service.KualiModuleService;
import org.kuali.core.util.cache.MethodCacheInterceptor;
import org.kuali.core.util.cache.MethodCacheNoCopyInterceptor;
import org.kuali.core.util.spring.CacheNoCopy;
import org.kuali.core.util.spring.Cached;
import org.kuali.kfs.batch.Step;
import org.kuali.kfs.context.SpringContext;
import org.kuali.kfs.service.ParameterEvaluator;
import org.kuali.kfs.service.ParameterService;
import org.kuali.kfs.service.impl.ParameterConstants.COMPONENT;
import org.kuali.kfs.service.impl.ParameterConstants.NAMESPACE;
import org.kuali.rice.kns.util.KNSConstants;
/**
* See ParameterService. The componentClass must be the business object, document, or step class that the parameter is associated
* with. Implementations of this class know how to translate that to a namespace (for ParameterService Impl, determine what module
* the Class is associated with by parsing the package) and detail type (for ParameterServiceImpl, document Class --> use simple
* class name minus the word Document / business object Class --> use simple class name, batch step class --> use the simple class
* name). In cases where the parameter is applicable to all documents, all lookups, all batch steps, or all components in a
* particular module, you should pass in the appropriate constant class in ParameterConstants for the component Class (e.g. all
* purchasing documents = PURCHASING_DOCUMENT.class, all purchasing lookups = PURCHASING_LOOKUP.class, all purchasing batch steps =
* PURCHASING_BATCH.class, and all purchasing components = PURCHASING_ALL.class). In addition, certain methods take
* constrainingValue and constrainedValue Strings. The constrainedValue is the value that you want to compare to the Parameter
* value, and the constrainingValue is used for complex parameters that limit one field value based on the value of another field,
* e.g VALID_OBJECT_LEVELS_BY_OBJECT_TYPE.
*/
public class ParameterServiceImpl implements ParameterService {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(ParameterServiceImpl.class);
private static List<ParameterDetailType> components = new ArrayList<ParameterDetailType>();
private DataDictionaryService dataDictionaryService;
private KualiModuleService moduleService;
private BusinessObjectService businessObjectService;
/**
* @see org.kuali.kfs.service.ParameterService#parameterExists(java.lang.Class componentClass, java.lang.String parameterName)
*/
@CacheNoCopy
public boolean parameterExists(Class componentClass, String parameterName) {
return getParameterWithoutExceptions(getNamespace(componentClass), getDetailType(componentClass), parameterName) != null;
}
/**
* This method provides a convenient way to access the value of indicator parameters with Y/N values. Y is translated to true
* and N is translated to false.
*
* @param componentClass
* @param parameterName
* @return boolean value of Yes/No indicator parameter
*/
@CacheNoCopy
public boolean getIndicatorParameter(Class componentClass, String parameterName) {
return "Y".equals(getParameter(componentClass, parameterName).getParameterValue());
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterValue(java.lang.Class componentClass, java.lang.String parameterName)
*/
@CacheNoCopy
public String getParameterValue(Class componentClass, String parameterName) {
return getParameter(componentClass, parameterName).getParameterValue();
}
/**
* This will look for constrainingValue=<value to return> within the parameter text and return that if it is found. Otherwise,
* it will return null. Note, that if constrainingValue=value1,value2... (commas specific to the ParameterServiceImpl
* implementation) is found it will still return null, because calling this method states the assumption that there is only one
* value within the parameter text that corresponds to the constraining value.
*
* @param componentClass
* @param parameterName
* @param constrainingValue
* @return derived value String or null
*/
@CacheNoCopy
public String getParameterValue(Class componentClass, String parameterName, String constrainingValue) {
List<String> parameterValues = getParameterValues(componentClass, parameterName, constrainingValue);
if (parameterValues.size() == 1) {
return parameterValues.get(0);
}
return null;
}
/**
* This method can be used to parse the value of a parameter by splitting on a semi-colon.
*
* @param componentClass
* @param parameterName
* @return parsed List of String parameter values
*/
@CacheNoCopy
public List<String> getParameterValues(Class componentClass, String parameterName) {
return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName)) );
}
/**
* This method looks for constrainingValue=<some text> within the parameter text and splits that text on a comma to generate
* the List to return.
*
* @param componentClass
* @param parameterName
* @param constrainingValue
* @return derived values List<String> or an empty list if no values are found
*/
@CacheNoCopy
public List<String> getParameterValues(Class componentClass, String parameterName, String constrainingValue) {
return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName), constrainingValue) );
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName and the values of the Parameter.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName and the values of the Parameter
*/
@Cached
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName) {
return getParameterEvaluator(getParameter(componentClass, parameterName));
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName, the values of the Parameter, the knowledge of whether the
* values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName, the values of the Parameter, the knowledge of whether the values are allowed or denied, and the
* constrainedValue
*/
@Cached
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainedValue) {
return getParameterEvaluator(getParameter(componentClass, parameterName), constrainedValue);
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and parameterName, the values of the Parameter that correspond to the specified
* constrainingValue, the knowledge of whether the values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param parameterName
* @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
* parameterName, the values of the Parameter that correspond to the specified constrainingValue, the knowledge of
* whether the values are allowed or denied, and the constrainedValue
*/
@Cached
public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainingValue, String constrainedValue) {
return getParameterEvaluator(getParameter(componentClass, parameterName), constrainingValue, constrainedValue);
}
/**
* This method will return an instance of the parameterEvaluator bean defined in Spring, initialized with the Parameter
* corresponding to the specified componentClass and allowParameterName or to the specified componentClass and denyParameterName
* (depending on which restricts based on the constraining value) or an instance of AlwaysSucceedParameterEvaluatorImpl if
* neither restricts, the values of the Parameter that correspond to the specified constrainingValue, the knowledge of whether
* the values are allowed or denied, and the constrainedValue.
*
* @param componentClass
* @param allowParameterName
* @param denyParameterName
* @param constrainingValue
* @param constrainedValue
* @return AlwaysSucceedParameterEvaluatorImpl or ParameterEvaluator instance initialized with the Parameter that corresponds to
* the constrainingValue restriction, the values of the Parameter that correspond to the specified constrainingValue,
* the knowledge of whether the values are allowed or denied, and the constrainedValue
*/
@Cached
public ParameterEvaluator getParameterEvaluator(Class componentClass, String allowParameterName, String denyParameterName, String constrainingValue, String constrainedValue) {
Parameter allowParameter = getParameter(componentClass, allowParameterName);
Parameter denyParameter = getParameter(componentClass, denyParameterName);
if (!getParameterValues(allowParameter, constrainingValue).isEmpty() && !getParameterValues(denyParameter, constrainingValue).isEmpty()) {
throw new IllegalArgumentException("The getParameterEvaluator(Class componentClass, String allowParameterName, String denyParameterName, String constrainingValue, String constrainedValue) method of ParameterServiceImpl does not facilitate evaluation of combination allow and deny parameters that both have values for the constraining value: " + allowParameterName + " / " + denyParameterName + " / " + constrainingValue);
}
if (getParameterValues(allowParameter, constrainingValue).isEmpty() && getParameterValues(denyParameter, constrainingValue).isEmpty()) {
return AlwaysSucceedParameterEvaluatorImpl.getInstance();
}
return getParameterEvaluator(getParameterValues(denyParameter, constrainingValue).isEmpty() ? allowParameter : denyParameter, constrainingValue, constrainedValue);
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
* constrainedValue)
*/
@Cached
public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainedValue) {
List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
for (Parameter parameter : getParameters(componentClass)) {
parameterEvaluators.add(getParameterEvaluator(parameter, constrainedValue));
}
return parameterEvaluators;
}
/**
* @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
* constrainingValue, java.lang.String constrainedValue)
*/
@Cached
public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainingValue, String constrainedValue) {
List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
for (Parameter parameter : getParameters(componentClass)) {
parameterEvaluators.add(getParameterEvaluator(parameter, constrainingValue, constrainedValue));
}
return parameterEvaluators;
}
/**
* This method derived ParameterDetailedTypes from the DataDictionary for all BusinessObjects and Documents and from Spring for
* all batch Steps.
*
* @return List<ParameterDetailedType> containing the detailed types derived from the data dictionary and Spring
*/
@Cached
public List<ParameterDetailType> getNonDatabaseDetailTypes() {
if (components.isEmpty()) {
Map<String, ParameterDetailType> uniqueParameterDetailTypeMap = new HashMap<String, ParameterDetailType>();
//dataDictionaryService.getDataDictionary().forceCompleteDataDictionaryLoad();
for (BusinessObjectEntry businessObjectEntry : dataDictionaryService.getDataDictionary().getBusinessObjectEntries().values()) {
ParameterDetailType parameterDetailType = getParameterDetailType(businessObjectEntry.getBusinessObjectClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for business object class: " + businessObjectEntry.getBusinessObjectClass(), e);
}
}
for (DocumentEntry documentEntry : dataDictionaryService.getDataDictionary().getDocumentEntries().values()) {
if (documentEntry instanceof TransactionalDocumentEntry) {
ParameterDetailType parameterDetailType = getParameterDetailType(documentEntry.getDocumentClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for transactional document class: " + documentEntry.getDocumentClass(), e);
}
}
}
for (Step step : SpringContext.getBeansOfType(Step.class).values()) {
ParameterDetailType parameterDetailType = getParameterDetailType(step.getClass());
try {
uniqueParameterDetailTypeMap.put(parameterDetailType.getParameterDetailTypeCode(), parameterDetailType);
}
catch (Exception e) {
LOG.error("The getDataDictionaryAndSpringComponents method of ParameterUtils encountered an exception while trying to create the detail type for step class: " + step.getClass(), e);
}
}
components.addAll(uniqueParameterDetailTypeMap.values());
}
return Collections.unmodifiableList(components);
}
/**
* @see org.kuali.kfs.service.ParameterService#setParameterForTesting(java.lang.Class componentClass, java.lang.String
* parameterName, java.lang.String parameterText)
*/
public void setParameterForTesting(Class componentClass, String parameterName, String parameterText) {
Parameter parameter = (Parameter) getParameter(componentClass, parameterName);
parameter.setParameterValue(parameterText);
SpringContext.getBean(BusinessObjectService.class).save(parameter);
try {
removeCachedMethod(ParameterService.class.getMethod("getParameterValue", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
removeCachedMethod(ParameterService.class.getMethod("getIndicatorParameter", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
removeCachedMethod(ParameterService.class.getMethod("getParameterValues", new Class[] { Class.class, String.class }), new Object[] { componentClass, parameterName });
}
catch (Exception e) {
throw new RuntimeException(new StringBuffer("The setParameterForTesting of ParameterServiceImpl failed: ").append(componentClass).append(" / ").append(parameterName).toString(), e);
}
}
@CacheNoCopy
private String getNamespace(Class documentOrStepClass) {
if (documentOrStepClass != null) {
if (documentOrStepClass.isAnnotationPresent(NAMESPACE.class)) {
return ((NAMESPACE) documentOrStepClass.getAnnotation(NAMESPACE.class)).namespace();
}
KualiModule module = moduleService.getResponsibleModule(documentOrStepClass);
if (module != null) {
return ParameterConstants.FINANCIAL_NAMESPACE_PREFIX + module.getModuleCode();
}
if (documentOrStepClass.getName().startsWith("org.kuali.core")) {
return ParameterConstants.NERVOUS_SYSTEM_NAMESPACE;
}
if (documentOrStepClass.getName().startsWith("org.kuali.kfs")) {
return ParameterConstants.FINANCIAL_SYSTEM_NAMESPACE;
}
throw new IllegalArgumentException("The getNamespace method of ParameterUtils requires documentOrStepClass with a package prefix of org.kuali.core, org.kuali.kfs, or org.kuali.module");
}
else {
throw new IllegalArgumentException("The getNamespace method of ParameterUtils requires non-null documentOrStepClass");
}
}
@CacheNoCopy
private String getDetailType(Class documentOrStepClass) {
if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
return ((COMPONENT) documentOrStepClass.getAnnotation(COMPONENT.class)).component();
}
if (TransactionalDocument.class.isAssignableFrom(documentOrStepClass)) {
return documentOrStepClass.getSimpleName().replace("Document", "");
}
else if (BusinessObject.class.isAssignableFrom(documentOrStepClass) || Step.class.isAssignableFrom(documentOrStepClass)) {
return documentOrStepClass.getSimpleName();
}
throw new IllegalArgumentException("The getDetailType method of ParameterServiceImpl requires TransactionalDocument, BusinessObject, or Step class");
}
@CacheNoCopy
private String getDetailTypeName(Class documentOrStepClass) {
if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
BusinessObjectEntry boe = dataDictionaryService.getDataDictionary().getBusinessObjectEntry(documentOrStepClass.getName());
if (boe != null) {
return boe.getObjectLabel();
}
else {
return ((COMPONENT) documentOrStepClass.getAnnotation(COMPONENT.class)).component();
}
}
if (TransactionalDocument.class.isAssignableFrom(documentOrStepClass)) {
return dataDictionaryService.getDocumentLabelByClass(documentOrStepClass);
}
else if (BusinessObject.class.isAssignableFrom(documentOrStepClass) || Step.class.isAssignableFrom(documentOrStepClass)) {
BusinessObjectEntry boe = dataDictionaryService.getDataDictionary().getBusinessObjectEntry(documentOrStepClass.getName());
if (boe != null) {
return boe.getObjectLabel();
}
else {
return documentOrStepClass.getSimpleName();
}
}
throw new IllegalArgumentException("The getDetailTypeName method of ParameterServiceImpl requires TransactionalDocument, BusinessObject, or Step class");
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter) {
ParameterEvaluatorImpl parameterEvaluator = new ParameterEvaluatorImpl();
parameterEvaluator.setParameter(parameter);
parameterEvaluator.setConstraintIsAllow(constraintIsAllow(parameter));
parameterEvaluator.setValues(getParameterValues(parameter));
return parameterEvaluator;
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter, String constrainedValue) {
ParameterEvaluator parameterEvaluator = getParameterEvaluator(parameter);
parameterEvaluator.setConstrainedValue(constrainedValue);
return parameterEvaluator;
}
private ParameterEvaluator getParameterEvaluator(Parameter parameter, String constrainingValue, String constrainedValue) {
ParameterEvaluator parameterEvaluator = getParameterEvaluator(parameter, constrainedValue);
((ParameterEvaluatorImpl) parameterEvaluator).setValues(getParameterValues(parameter, constrainingValue));
return parameterEvaluator;
}
private ParameterDetailType getParameterDetailType(Class documentOrStepClass) {
String detailTypeString = getDetailType(documentOrStepClass);
String detailTypeName = getDetailTypeName(documentOrStepClass);
ParameterDetailType detailType = new ParameterDetailType(getNamespace(documentOrStepClass), detailTypeString, (detailTypeName == null) ? detailTypeString : detailTypeName);
detailType.refreshNonUpdateableReferences();
return detailType;
}
private Parameter getParameter(Class componentClass, String parameterName) {
Parameter parameter = getParameter(getNamespace(componentClass), getDetailType(componentClass), parameterName);
if (parameter == null) {
throw new IllegalArgumentException("The getParameter method of ParameterServiceImpl requires a componentClass and parameterName that correspond to an existing parameter");
}
return parameter;
}
private List<String> getParameterValues(Parameter parameter, String constrainingValue) {
List<String> constraintValuePairs = getParameterValues(parameter);
for (String pair : constraintValuePairs) {
if (StringUtils.equals(constrainingValue, StringUtils.substringBefore(pair, "="))) {
return Arrays.asList(StringUtils.substringAfter(pair, "=").split(","));
}
}
return Collections.EMPTY_LIST;
}
private List<String> getParameterValues(Parameter parameter) {
if (parameter == null || StringUtils.isBlank(parameter.getParameterValue())) {
return Collections.EMPTY_LIST;
}
return Arrays.asList(parameter.getParameterValue().split(";"));
}
private List<Parameter> getParameters(Class componentClass) {
Map<String, String> fieldValues = new HashMap<String, String>();
fieldValues.put("parameterNamespaceCode", getNamespace(componentClass));
fieldValues.put("parameterDetailTypeCode", getDetailType(componentClass));
return new ArrayList<Parameter>(businessObjectService.findMatching(Parameter.class, fieldValues));
}
private Parameter getParameter(String namespaceCode, String detailTypeCode, String parameterName) {
if (StringUtils.isBlank(namespaceCode) || StringUtils.isBlank(detailTypeCode) || StringUtils.isBlank(parameterName)) {
throw new IllegalArgumentException("The getParameter method of KualiConfigurationServiceImpl requires a non-blank namespaceCode, parameterDetailTypeCode, and parameterName");
}
Parameter param = getParameterWithoutExceptions(namespaceCode, detailTypeCode, parameterName);
if (param == null) {
throw new IllegalArgumentException("The getParameter method of KualiConfigurationServiceImpl was unable to find parameter: " + namespaceCode + " / " + detailTypeCode + " / " + parameterName);
}
return param;
}
private Parameter getParameterWithoutExceptions(String namespaceCode, String detailTypeCode, String parameterName) {
HashMap<String, String> crit = new HashMap<String, String>(3);
crit.put("parameterNamespaceCode", namespaceCode);
crit.put("parameterDetailTypeCode", detailTypeCode);
crit.put("parameterName", parameterName);
Parameter param = (Parameter) businessObjectService.findByPrimaryKey(Parameter.class, crit);
return param;
}
private void removeCachedMethod(Method method, Object[] arguments) {
MethodCacheInterceptor methodCacheInterceptor = SpringContext.getBean(MethodCacheInterceptor.class);
String cacheKey = methodCacheInterceptor.buildCacheKey(method.toString(), arguments);
methodCacheInterceptor.removeCacheKey(cacheKey);
MethodCacheNoCopyInterceptor methodCacheNoCopyInterceptor = SpringContext.getBean(MethodCacheNoCopyInterceptor.class);
methodCacheNoCopyInterceptor.removeCacheKey(cacheKey);
}
private boolean constraintIsAllow(Parameter parameter) {
return KNSConstants.APC_ALLOWED_OPERATOR.equals(parameter.getParameterConstraintCode());
}
public void setDataDictionaryService(DataDictionaryService dataDictionaryService) {
this.dataDictionaryService = dataDictionaryService;
}
public void setModuleService(KualiModuleService moduleService) {
this.moduleService = moduleService;
}
public void setBusinessObjectService(BusinessObjectService businessObjectService) {
this.businessObjectService = businessObjectService;
}
}
|
KFSMI-980
|
work/src/org/kuali/kfs/sys/service/impl/ParameterServiceImpl.java
|
KFSMI-980
|
<ide><path>ork/src/org/kuali/kfs/sys/service/impl/ParameterServiceImpl.java
<ide> private DataDictionaryService dataDictionaryService;
<ide> private KualiModuleService moduleService;
<ide> private BusinessObjectService businessObjectService;
<add> private ThreadLocal<Map<String,Parameter>> parameterCache = new ThreadLocal<Map<String,Parameter>>();
<ide>
<ide> /**
<ide> * @see org.kuali.kfs.service.ParameterService#parameterExists(java.lang.Class componentClass, java.lang.String parameterName)
<ide> */
<del> @CacheNoCopy
<ide> public boolean parameterExists(Class componentClass, String parameterName) {
<ide> return getParameterWithoutExceptions(getNamespace(componentClass), getDetailType(componentClass), parameterName) != null;
<ide> }
<ide> * @param parameterName
<ide> * @return boolean value of Yes/No indicator parameter
<ide> */
<del> @CacheNoCopy
<ide> public boolean getIndicatorParameter(Class componentClass, String parameterName) {
<ide> return "Y".equals(getParameter(componentClass, parameterName).getParameterValue());
<ide> }
<ide> /**
<ide> * @see org.kuali.kfs.service.ParameterService#getParameterValue(java.lang.Class componentClass, java.lang.String parameterName)
<ide> */
<del> @CacheNoCopy
<ide> public String getParameterValue(Class componentClass, String parameterName) {
<ide> return getParameter(componentClass, parameterName).getParameterValue();
<ide> }
<ide> * @param constrainingValue
<ide> * @return derived value String or null
<ide> */
<del> @CacheNoCopy
<ide> public String getParameterValue(Class componentClass, String parameterName, String constrainingValue) {
<ide> List<String> parameterValues = getParameterValues(componentClass, parameterName, constrainingValue);
<ide> if (parameterValues.size() == 1) {
<ide> * @param parameterName
<ide> * @return parsed List of String parameter values
<ide> */
<del> @CacheNoCopy
<ide> public List<String> getParameterValues(Class componentClass, String parameterName) {
<ide> return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName)) );
<ide> }
<ide> * @param constrainingValue
<ide> * @return derived values List<String> or an empty list if no values are found
<ide> */
<del> @CacheNoCopy
<ide> public List<String> getParameterValues(Class componentClass, String parameterName, String constrainingValue) {
<ide> return Collections.unmodifiableList( getParameterValues(getParameter(componentClass, parameterName), constrainingValue) );
<ide> }
<ide> * @return ParameterEvaluator instance initialized with the Parameter corresponding to the specified componentClass and
<ide> * parameterName and the values of the Parameter
<ide> */
<del> @Cached
<ide> public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName) {
<ide> return getParameterEvaluator(getParameter(componentClass, parameterName));
<ide> }
<ide> * parameterName, the values of the Parameter, the knowledge of whether the values are allowed or denied, and the
<ide> * constrainedValue
<ide> */
<del> @Cached
<ide> public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainedValue) {
<ide> return getParameterEvaluator(getParameter(componentClass, parameterName), constrainedValue);
<ide> }
<ide> * parameterName, the values of the Parameter that correspond to the specified constrainingValue, the knowledge of
<ide> * whether the values are allowed or denied, and the constrainedValue
<ide> */
<del> @Cached
<ide> public ParameterEvaluator getParameterEvaluator(Class componentClass, String parameterName, String constrainingValue, String constrainedValue) {
<ide> return getParameterEvaluator(getParameter(componentClass, parameterName), constrainingValue, constrainedValue);
<ide> }
<ide> * the constrainingValue restriction, the values of the Parameter that correspond to the specified constrainingValue,
<ide> * the knowledge of whether the values are allowed or denied, and the constrainedValue
<ide> */
<del> @Cached
<ide> public ParameterEvaluator getParameterEvaluator(Class componentClass, String allowParameterName, String denyParameterName, String constrainingValue, String constrainedValue) {
<ide> Parameter allowParameter = getParameter(componentClass, allowParameterName);
<ide> Parameter denyParameter = getParameter(componentClass, denyParameterName);
<ide> * @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
<ide> * constrainedValue)
<ide> */
<del> @Cached
<ide> public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainedValue) {
<ide> List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
<ide> for (Parameter parameter : getParameters(componentClass)) {
<ide> * @see org.kuali.kfs.service.ParameterService#getParameterEvaluators(java.lang.Class componentClass, java.lang.String
<ide> * constrainingValue, java.lang.String constrainedValue)
<ide> */
<del> @Cached
<ide> public List<ParameterEvaluator> getParameterEvaluators(Class componentClass, String constrainingValue, String constrainedValue) {
<ide> List<ParameterEvaluator> parameterEvaluators = new ArrayList<ParameterEvaluator>();
<ide> for (Parameter parameter : getParameters(componentClass)) {
<ide> *
<ide> * @return List<ParameterDetailedType> containing the detailed types derived from the data dictionary and Spring
<ide> */
<del> @Cached
<ide> public List<ParameterDetailType> getNonDatabaseDetailTypes() {
<ide> if (components.isEmpty()) {
<ide> Map<String, ParameterDetailType> uniqueParameterDetailTypeMap = new HashMap<String, ParameterDetailType>();
<ide> }
<ide> }
<ide>
<del> @CacheNoCopy
<ide> private String getNamespace(Class documentOrStepClass) {
<ide> if (documentOrStepClass != null) {
<ide> if (documentOrStepClass.isAnnotationPresent(NAMESPACE.class)) {
<ide> }
<ide> }
<ide>
<del> @CacheNoCopy
<ide> private String getDetailType(Class documentOrStepClass) {
<ide> if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
<ide> return ((COMPONENT) documentOrStepClass.getAnnotation(COMPONENT.class)).component();
<ide> throw new IllegalArgumentException("The getDetailType method of ParameterServiceImpl requires TransactionalDocument, BusinessObject, or Step class");
<ide> }
<ide>
<del> @CacheNoCopy
<ide> private String getDetailTypeName(Class documentOrStepClass) {
<ide> if (documentOrStepClass.isAnnotationPresent(COMPONENT.class)) {
<ide> BusinessObjectEntry boe = dataDictionaryService.getDataDictionary().getBusinessObjectEntry(documentOrStepClass.getName());
<ide> }
<ide>
<ide> private Parameter getParameter(Class componentClass, String parameterName) {
<add> if (parameterCache.get() == null) {
<add> parameterCache.set(new HashMap<String,Parameter>());
<add> }
<add> String key = componentClass.toString() + ":" + parameterName;
<add> Object value = parameterCache.get().get(key);
<add> if (value != null) {
<add> return (Parameter) value;
<add> }
<ide> Parameter parameter = getParameter(getNamespace(componentClass), getDetailType(componentClass), parameterName);
<ide> if (parameter == null) {
<ide> throw new IllegalArgumentException("The getParameter method of ParameterServiceImpl requires a componentClass and parameterName that correspond to an existing parameter");
<ide> }
<add> parameterCache.get().put(key, parameter);
<ide> return parameter;
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
e7bcd901615415a102808a8fee95060662a02930
| 0 |
b2ihealthcare/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl
|
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.validation.constraints.global;
import static com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProviderImpl.create;
import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Maps.toMap;
import static com.google.common.collect.Sets.newHashSet;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.SubMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.b2international.commons.http.ExtendedLocale;
import com.b2international.commons.pcj.LongSets;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProvider;
import com.b2international.snowowl.core.validation.GlobalConstraintStatus;
import com.b2international.snowowl.core.validation.IGlobalConstraint;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.core.domain.Acceptability;
import com.b2international.snowowl.snomed.core.domain.ISnomedDescription;
import com.b2international.snowowl.snomed.core.domain.SnomedDescriptions;
import com.b2international.snowowl.snomed.core.lang.LanguageSetting;
import com.b2international.snowowl.snomed.datastore.SnomedTerminologyBrowser;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.google.common.base.Function;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
/**
* Global constraint, which checks if there is more than one concept with the same active and preferred fully specified name.
*
*/
public class UniqueFSNGlobalConstraint implements IGlobalConstraint {
private static final Logger LOG = LoggerFactory.getLogger(UniqueFSNGlobalConstraint.class);
private static final String ID = "com.b2international.snowowl.snomed.validation.examples.uniqueFSNGlobalConstraint";
@Override
public GlobalConstraintStatus validate(final IBranchPath branchPath, final IProgressMonitor monitor) {
LOG.info("Validating global FSN uniqueness constraint...");
final SubMonitor subMonitor = SubMonitor.convert(monitor, 3);
if (subMonitor.isCanceled()) {
return createEmptyStatus();
}
try {
final IEventBus bus = ApplicationContext.getServiceForClass(IEventBus.class);
final List<ExtendedLocale> locales = ApplicationContext.getServiceForClass(LanguageSetting.class).getLanguagePreference();
final SnomedTerminologyBrowser terminologyBrowser = ApplicationContext.getServiceForClass(SnomedTerminologyBrowser.class);
final Set<Long> activeConceptIds = LongSets.toSet(terminologyBrowser.getAllActiveConceptIds(branchPath));
subMonitor.worked(1);
final SnomedDescriptions descriptions = SnomedRequests.prepareSearchDescription()
.filterByActive(true)
.filterByType(Concepts.FULLY_SPECIFIED_NAME)
.filterByAcceptability(Acceptability.PREFERRED)
.filterByExtendedLocales(locales)
.filterByConceptId(activeConceptIds)
.all()
.build(branchPath.getPath())
.executeSync(bus);
subMonitor.worked(1);
if (subMonitor.isCanceled()) {
return createEmptyStatus();
}
final HashMultimap<String, String> fsnToIdsMap = HashMultimap.<String, String>create();
for (final ISnomedDescription description : descriptions.getItems()) {
fsnToIdsMap.put(description.getTerm(), description.getConceptId());
}
final Map<String, IdAndTerminologyComponentIdProvider> allConcepts = getAllConcepts(fsnToIdsMap);
final Collection<IdAndTerminologyComponentIdProvider> violatingConcepts = newHashSet();
for (final Entry<String, Collection<String>> entry : fsnToIdsMap.asMap().entrySet()) {
if (entry.getValue().size() > 1) {
for (final String conceptId : entry.getValue()) {
violatingConcepts.add(createConcept(conceptId));
allConcepts.remove(conceptId);
}
}
}
subMonitor.worked(1);
LOG.info("Global FSN uniqueness validation finished.");
return new GlobalConstraintStatus(ID, violatingConcepts, allConcepts.values());
} catch (final Exception e) {
LOG.info("Exception happened during FSN uniqueness validation.", e);
}
// FIXME return error status, as the calculation failed
return createEmptyStatus();
}
private IdAndTerminologyComponentIdProvider createConcept(final String conceptId) {
return create(conceptId, SnomedTerminologyComponentConstants.CONCEPT_NUMBER);
}
private GlobalConstraintStatus createEmptyStatus() {
return new GlobalConstraintStatus(ID,
Collections.<IdAndTerminologyComponentIdProvider>emptySet(),
Collections.<IdAndTerminologyComponentIdProvider>emptySet());
}
private Map<String, IdAndTerminologyComponentIdProvider> getAllConcepts(final Multimap<String, String> fsnToIdsMapping) {
return newHashMap(toMap(newHashSet(fsnToIdsMapping.values()), new Function<String, IdAndTerminologyComponentIdProvider>() {
@Override
public IdAndTerminologyComponentIdProvider apply(final String conceptId) {
return create(conceptId, CONCEPT_NUMBER);
}
}));
}
}
|
snomed/com.b2international.snowowl.snomed.validation.constraints/src/com/b2international/snowowl/snomed/validation/constraints/global/UniqueFSNGlobalConstraint.java
|
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.validation.constraints.global;
import static com.b2international.snowowl.core.ApplicationContext.getServiceForClass;
import static com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProviderImpl.create;
import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Maps.toMap;
import static com.google.common.collect.Sets.newHashSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.SubMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProvider;
import com.b2international.snowowl.core.validation.GlobalConstraintStatus;
import com.b2international.snowowl.core.validation.IGlobalConstraint;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.datastore.ILanguageConfigurationProvider;
import com.b2international.snowowl.snomed.datastore.services.ISnomedComponentService;
import com.google.common.base.Function;
import com.google.common.collect.Multimap;
/**
* Global constraint, which checks if there is more than concept with the same active fully specified name.
*
*/
public class UniqueFSNGlobalConstraint implements IGlobalConstraint {
private static final Logger LOG = LoggerFactory.getLogger(UniqueFSNGlobalConstraint.class);
private static final String ID = "com.b2international.snowowl.snomed.validation.examples.uniqueFSNGlobalConstraint";
@Override
public GlobalConstraintStatus validate(IBranchPath branchPath, IProgressMonitor monitor) {
LOG.info("Started global FSN validation constraint validation rule");
final SubMonitor subMonitor = SubMonitor.convert(monitor);
try {
final String languageRefSetId = getServiceForClass(ILanguageConfigurationProvider.class).getLanguageConfiguration().getLanguageRefSetId();
final Multimap<String, String> fsnToIdsMapping = getServiceForClass(ISnomedComponentService.class).getFullySpecifiedNameToIdsMapping(branchPath, languageRefSetId);
final Map<String, IdAndTerminologyComponentIdProvider> allConcepts = getAllConcepts(fsnToIdsMapping);
final Collection<IdAndTerminologyComponentIdProvider> violatingConcepts = newHashSet();
for (final Entry<String, Collection<String>> entry : fsnToIdsMapping.asMap().entrySet()) {
if (entry.getValue().size() > 1) {
for (final String conceptId : entry.getValue()) {
violatingConcepts.add(createConcept(conceptId));
allConcepts.remove(conceptId);
}
}
}
return new GlobalConstraintStatus(ID, violatingConcepts, allConcepts.values());
} catch (Exception e) {
LOG.error("Exception happened during FSN uniqueness validation.", e);
} finally {
LOG.info("Completed global FSN validation constraint validation rule.");
subMonitor.done();
}
// FIXME return error status, as the calculation failed
return createEmptyStatus();
}
private IdAndTerminologyComponentIdProvider createConcept(final String conceptId) {
return create(conceptId, SnomedTerminologyComponentConstants.CONCEPT_NUMBER);
}
private GlobalConstraintStatus createEmptyStatus() {
return new GlobalConstraintStatus(ID,
Collections.<IdAndTerminologyComponentIdProvider>emptySet(),
Collections.<IdAndTerminologyComponentIdProvider>emptySet());
}
private Map<String, IdAndTerminologyComponentIdProvider> getAllConcepts(final Multimap<String, String> fsnToIdsMapping) {
return newHashMap(toMap(newHashSet(fsnToIdsMapping.values()), new Function<String, IdAndTerminologyComponentIdProvider>() {
public IdAndTerminologyComponentIdProvider apply(String conceptId) {
return create(conceptId, CONCEPT_NUMBER);
}
}));
}
}
|
SO-1858: Use request for global FSN validation
https://snowowl.atlassian.net/browse/SO-1858
|
snomed/com.b2international.snowowl.snomed.validation.constraints/src/com/b2international/snowowl/snomed/validation/constraints/global/UniqueFSNGlobalConstraint.java
|
SO-1858: Use request for global FSN validation
|
<ide><path>nomed/com.b2international.snowowl.snomed.validation.constraints/src/com/b2international/snowowl/snomed/validation/constraints/global/UniqueFSNGlobalConstraint.java
<ide> */
<ide> package com.b2international.snowowl.snomed.validation.constraints.global;
<ide>
<del>import static com.b2international.snowowl.core.ApplicationContext.getServiceForClass;
<ide> import static com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProviderImpl.create;
<ide> import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER;
<ide> import static com.google.common.collect.Maps.newHashMap;
<ide>
<ide> import java.util.Collection;
<ide> import java.util.Collections;
<add>import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Map.Entry;
<add>import java.util.Set;
<ide>
<ide> import org.eclipse.core.runtime.IProgressMonitor;
<ide> import org.eclipse.core.runtime.SubMonitor;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide>
<add>import com.b2international.commons.http.ExtendedLocale;
<add>import com.b2international.commons.pcj.LongSets;
<add>import com.b2international.snowowl.core.ApplicationContext;
<ide> import com.b2international.snowowl.core.api.IBranchPath;
<ide> import com.b2international.snowowl.core.api.IdAndTerminologyComponentIdProvider;
<ide> import com.b2international.snowowl.core.validation.GlobalConstraintStatus;
<ide> import com.b2international.snowowl.core.validation.IGlobalConstraint;
<add>import com.b2international.snowowl.eventbus.IEventBus;
<add>import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
<ide> import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
<del>import com.b2international.snowowl.snomed.datastore.ILanguageConfigurationProvider;
<del>import com.b2international.snowowl.snomed.datastore.services.ISnomedComponentService;
<add>import com.b2international.snowowl.snomed.core.domain.Acceptability;
<add>import com.b2international.snowowl.snomed.core.domain.ISnomedDescription;
<add>import com.b2international.snowowl.snomed.core.domain.SnomedDescriptions;
<add>import com.b2international.snowowl.snomed.core.lang.LanguageSetting;
<add>import com.b2international.snowowl.snomed.datastore.SnomedTerminologyBrowser;
<add>import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
<ide> import com.google.common.base.Function;
<add>import com.google.common.collect.HashMultimap;
<ide> import com.google.common.collect.Multimap;
<ide>
<ide> /**
<del> * Global constraint, which checks if there is more than concept with the same active fully specified name.
<add> * Global constraint, which checks if there is more than one concept with the same active and preferred fully specified name.
<ide> *
<ide> */
<ide> public class UniqueFSNGlobalConstraint implements IGlobalConstraint {
<ide>
<ide> private static final Logger LOG = LoggerFactory.getLogger(UniqueFSNGlobalConstraint.class);
<add>
<ide> private static final String ID = "com.b2international.snowowl.snomed.validation.examples.uniqueFSNGlobalConstraint";
<ide>
<ide> @Override
<del> public GlobalConstraintStatus validate(IBranchPath branchPath, IProgressMonitor monitor) {
<del> LOG.info("Started global FSN validation constraint validation rule");
<del> final SubMonitor subMonitor = SubMonitor.convert(monitor);
<add> public GlobalConstraintStatus validate(final IBranchPath branchPath, final IProgressMonitor monitor) {
<add>
<add> LOG.info("Validating global FSN uniqueness constraint...");
<add>
<add> final SubMonitor subMonitor = SubMonitor.convert(monitor, 3);
<add>
<add> if (subMonitor.isCanceled()) {
<add> return createEmptyStatus();
<add> }
<add>
<ide> try {
<add>
<add> final IEventBus bus = ApplicationContext.getServiceForClass(IEventBus.class);
<add> final List<ExtendedLocale> locales = ApplicationContext.getServiceForClass(LanguageSetting.class).getLanguagePreference();
<add> final SnomedTerminologyBrowser terminologyBrowser = ApplicationContext.getServiceForClass(SnomedTerminologyBrowser.class);
<ide>
<del> final String languageRefSetId = getServiceForClass(ILanguageConfigurationProvider.class).getLanguageConfiguration().getLanguageRefSetId();
<del> final Multimap<String, String> fsnToIdsMapping = getServiceForClass(ISnomedComponentService.class).getFullySpecifiedNameToIdsMapping(branchPath, languageRefSetId);
<del> final Map<String, IdAndTerminologyComponentIdProvider> allConcepts = getAllConcepts(fsnToIdsMapping);
<add> final Set<Long> activeConceptIds = LongSets.toSet(terminologyBrowser.getAllActiveConceptIds(branchPath));
<add>
<add> subMonitor.worked(1);
<add>
<add> final SnomedDescriptions descriptions = SnomedRequests.prepareSearchDescription()
<add> .filterByActive(true)
<add> .filterByType(Concepts.FULLY_SPECIFIED_NAME)
<add> .filterByAcceptability(Acceptability.PREFERRED)
<add> .filterByExtendedLocales(locales)
<add> .filterByConceptId(activeConceptIds)
<add> .all()
<add> .build(branchPath.getPath())
<add> .executeSync(bus);
<add>
<add> subMonitor.worked(1);
<add>
<add> if (subMonitor.isCanceled()) {
<add> return createEmptyStatus();
<add> }
<add>
<add> final HashMultimap<String, String> fsnToIdsMap = HashMultimap.<String, String>create();
<add>
<add> for (final ISnomedDescription description : descriptions.getItems()) {
<add> fsnToIdsMap.put(description.getTerm(), description.getConceptId());
<add> }
<add>
<add> final Map<String, IdAndTerminologyComponentIdProvider> allConcepts = getAllConcepts(fsnToIdsMap);
<ide> final Collection<IdAndTerminologyComponentIdProvider> violatingConcepts = newHashSet();
<ide>
<del> for (final Entry<String, Collection<String>> entry : fsnToIdsMapping.asMap().entrySet()) {
<add> for (final Entry<String, Collection<String>> entry : fsnToIdsMap.asMap().entrySet()) {
<ide> if (entry.getValue().size() > 1) {
<ide> for (final String conceptId : entry.getValue()) {
<ide> violatingConcepts.add(createConcept(conceptId));
<ide> }
<ide> }
<ide>
<add> subMonitor.worked(1);
<add>
<add> LOG.info("Global FSN uniqueness validation finished.");
<add>
<ide> return new GlobalConstraintStatus(ID, violatingConcepts, allConcepts.values());
<del> } catch (Exception e) {
<del> LOG.error("Exception happened during FSN uniqueness validation.", e);
<del> } finally {
<del> LOG.info("Completed global FSN validation constraint validation rule.");
<del> subMonitor.done();
<add>
<add> } catch (final Exception e) {
<add> LOG.info("Exception happened during FSN uniqueness validation.", e);
<ide> }
<add>
<ide> // FIXME return error status, as the calculation failed
<ide> return createEmptyStatus();
<ide> }
<ide>
<ide> private Map<String, IdAndTerminologyComponentIdProvider> getAllConcepts(final Multimap<String, String> fsnToIdsMapping) {
<ide> return newHashMap(toMap(newHashSet(fsnToIdsMapping.values()), new Function<String, IdAndTerminologyComponentIdProvider>() {
<del> public IdAndTerminologyComponentIdProvider apply(String conceptId) {
<add> @Override
<add> public IdAndTerminologyComponentIdProvider apply(final String conceptId) {
<ide> return create(conceptId, CONCEPT_NUMBER);
<ide> }
<ide> }));
|
|
Java
|
apache-2.0
|
23c1a0a4d675c204c02aa9ac2c835ba8bd17661e
| 0 |
ryano144/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,kool79/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,allotria/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,caot/intellij-community,allotria/intellij-community,diorcety/intellij-community,apixandru/intellij-community,petteyg/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,supersven/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,slisson/intellij-community,allotria/intellij-community,da1z/intellij-community,consulo/consulo,FHannes/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,supersven/intellij-community,semonte/intellij-community,xfournet/intellij-community,clumsy/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,robovm/robovm-studio,jexp/idea2,nicolargo/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,jexp/idea2,ol-loginov/intellij-community,supersven/intellij-community,da1z/intellij-community,hurricup/intellij-community,hurricup/intellij-community,clumsy/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,ernestp/consulo,jagguli/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,suncycheng/intellij-community,allotria/intellij-community,caot/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,kool79/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,supersven/intellij-community,jexp/idea2,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,signed/intellij-community,semonte/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,slisson/intellij-community,asedunov/intellij-community,supersven/intellij-community,fitermay/intellij-community,jagguli/intellij-community,clumsy/intellij-community,samthor/intellij-community,tmpgit/intellij-community,izonder/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,supersven/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,samthor/intellij-community,signed/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,jexp/idea2,idea4bsd/idea4bsd,muntasirsyed/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,da1z/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,ernestp/consulo,alphafoobar/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,ibinti/intellij-community,slisson/intellij-community,orekyuu/intellij-community,slisson/intellij-community,amith01994/intellij-community,fitermay/intellij-community,hurricup/intellij-community,kdwink/intellij-community,izonder/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,FHannes/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,caot/intellij-community,youdonghai/intellij-community,slisson/intellij-community,blademainer/intellij-community,da1z/intellij-community,samthor/intellij-community,slisson/intellij-community,allotria/intellij-community,jagguli/intellij-community,ibinti/intellij-community,consulo/consulo,ftomassetti/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,izonder/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,retomerz/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,diorcety/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,da1z/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,jexp/idea2,nicolargo/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,fnouama/intellij-community,signed/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,vvv1559/intellij-community,holmes/intellij-community,izonder/intellij-community,apixandru/intellij-community,izonder/intellij-community,signed/intellij-community,semonte/intellij-community,FHannes/intellij-community,dslomov/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,joewalnes/idea-community,michaelgallacher/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,signed/intellij-community,slisson/intellij-community,xfournet/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,hurricup/intellij-community,adedayo/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,caot/intellij-community,clumsy/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,consulo/consulo,TangHao1987/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,joewalnes/idea-community,petteyg/intellij-community,dslomov/intellij-community,asedunov/intellij-community,fitermay/intellij-community,asedunov/intellij-community,holmes/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,fitermay/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,kool79/intellij-community,semonte/intellij-community,izonder/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,da1z/intellij-community,supersven/intellij-community,ernestp/consulo,vladmm/intellij-community,joewalnes/idea-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,blademainer/intellij-community,supersven/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,caot/intellij-community,kool79/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,consulo/consulo,semonte/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,fitermay/intellij-community,holmes/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,samthor/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,ryano144/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,kool79/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,asedunov/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,ibinti/intellij-community,blademainer/intellij-community,samthor/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,clumsy/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,izonder/intellij-community,kool79/intellij-community,petteyg/intellij-community,vladmm/intellij-community,ernestp/consulo,signed/intellij-community,gnuhub/intellij-community,caot/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,FHannes/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,samthor/intellij-community,apixandru/intellij-community,signed/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,jagguli/intellij-community,fnouama/intellij-community,asedunov/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,kool79/intellij-community,diorcety/intellij-community,caot/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,ryano144/intellij-community,blademainer/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,jexp/idea2,izonder/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,holmes/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,FHannes/intellij-community,kdwink/intellij-community,signed/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,vvv1559/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,kool79/intellij-community,fitermay/intellij-community,kool79/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,holmes/intellij-community,consulo/consulo,ftomassetti/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,semonte/intellij-community,consulo/consulo,ibinti/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,kdwink/intellij-community,asedunov/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,signed/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,adedayo/intellij-community,da1z/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,caot/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,da1z/intellij-community,gnuhub/intellij-community,samthor/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,supersven/intellij-community,ryano144/intellij-community,hurricup/intellij-community,slisson/intellij-community,samthor/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,allotria/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,allotria/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,kool79/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,da1z/intellij-community,holmes/intellij-community,ibinti/intellij-community,ibinti/intellij-community,robovm/robovm-studio,apixandru/intellij-community,ibinti/intellij-community,retomerz/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,fnouama/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,amith01994/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,robovm/robovm-studio,dslomov/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,hurricup/intellij-community,holmes/intellij-community,jexp/idea2,apixandru/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,apixandru/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,jexp/idea2,retomerz/intellij-community,lucafavatella/intellij-community,caot/intellij-community,ol-loginov/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,xfournet/intellij-community,apixandru/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,jagguli/intellij-community,da1z/intellij-community,amith01994/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,caot/intellij-community,samthor/intellij-community,hurricup/intellij-community,xfournet/intellij-community,samthor/intellij-community,orekyuu/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,caot/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,signed/intellij-community,xfournet/intellij-community,joewalnes/idea-community,signed/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,ernestp/consulo,ftomassetti/intellij-community,ryano144/intellij-community,robovm/robovm-studio,vladmm/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community
|
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.psi.*;
import com.intellij.util.IncorrectOperationException;
/**
* @author Mike
*/
public class CreateClassFromUsageAction extends CreateFromUsageBaseAction {
private static final Logger LOG = Logger.getInstance(
"#com.intellij.codeInsight.daemon.impl.quickfix.CreateClassFromUsageAction");
private final boolean myCreateInterface;
private final SmartPsiElementPointer myRefElement;
public CreateClassFromUsageAction(PsiJavaCodeReferenceElement refElement, boolean createInterface) {
myRefElement = SmartPointerManager.getInstance(refElement.getProject()).createLazyPointer(refElement);
myCreateInterface = createInterface;
}
public String getText(String varName) {
if (myCreateInterface) {
return QuickFixBundle.message("create.class.from.usage.interface.text", varName);
}
else {
return QuickFixBundle.message("create.class.from.usage.class.text", varName);
}
}
protected void invokeImpl(PsiClass targetClass) {
if (CreateFromUsageUtils.isValidReference(getRefElement(), true)) {
return;
}
final String superClassName;
if (getRefElement().getParent().getParent()instanceof PsiMethod) {
PsiMethod method = (PsiMethod)getRefElement().getParent().getParent();
if (method.getThrowsList() == getRefElement().getParent()) {
superClassName = "java.lang.Exception";
}
else superClassName = null;
}
else superClassName = null;
ApplicationManager.getApplication().runWriteAction(
new Runnable() {
public void run() {
final PsiClass aClass = CreateFromUsageUtils.createClass(getRefElement(), myCreateInterface, superClassName);
if (aClass == null) return;
try {
getRefElement().bindToElement(aClass);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
OpenFileDescriptor descriptor = new OpenFileDescriptor(getRefElement().getProject(), aClass.getContainingFile().getVirtualFile(),
aClass.getTextOffset());
FileEditorManager.getInstance(aClass.getProject()).openTextEditor(descriptor, true);
}
}
);
}
protected boolean isValidElement(PsiElement element) {
return CreateFromUsageUtils.isValidReference((PsiReference)element, true);
}
protected PsiElement getElement() {
if (!getRefElement().isValid() || !getRefElement().getManager().isInProject(getRefElement())) return null;
if (!CreateFromUsageUtils.isValidReference(getRefElement(), true) &&
getRefElement().getReferenceNameElement() != null && checkClassName(getRefElement().getReferenceName())) {
PsiElement parent = getRefElement().getParent();
if (parent instanceof PsiTypeElement) {
if (parent.getParent() instanceof PsiReferenceParameterList) return getRefElement();
while (parent.getParent() instanceof PsiTypeElement) parent = parent.getParent();
if (parent.getParent() instanceof PsiVariable || parent.getParent() instanceof PsiMethod ||
parent.getParent() instanceof PsiClassObjectAccessExpression ||
parent.getParent() instanceof PsiTypeCastExpression ||
(parent.getParent() instanceof PsiInstanceOfExpression && ((PsiInstanceOfExpression)parent.getParent()).getCheckType() == parent)) {
return getRefElement();
}
}
else if (parent instanceof PsiReferenceList) {
if (parent.getParent() instanceof PsiClass) {
PsiClass psiClass = (PsiClass)parent.getParent();
if (psiClass.getExtendsList() == parent) {
if (!myCreateInterface && !psiClass.isInterface()) return getRefElement();
if (myCreateInterface && psiClass.isInterface()) return getRefElement();
}
if (psiClass.getImplementsList() == parent && myCreateInterface) return getRefElement();
}
else if (parent.getParent() instanceof PsiMethod) {
PsiMethod method = (PsiMethod)parent.getParent();
if (method.getThrowsList() == parent && !myCreateInterface) return getRefElement();
}
}
else if (parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getBaseClassReference() == getRefElement()) {
return getRefElement();
}
}
if (getRefElement()instanceof PsiReferenceExpression) {
PsiReferenceExpression referenceExpression = (PsiReferenceExpression)getRefElement();
PsiElement parent = referenceExpression.getParent();
if (parent instanceof PsiMethodCallExpression) {
return null;
}
if (parent.getParent() instanceof PsiMethodCallExpression && myCreateInterface) return null;
if (referenceExpression.getReferenceNameElement() != null &&
checkClassName(referenceExpression.getReferenceName()) &&
!CreateFromUsageUtils.isValidReference(referenceExpression, true)) {
return referenceExpression;
}
}
return null;
}
protected boolean isAllowOuterTargetClass() {
return false;
}
private boolean checkClassName(String name) {
return Character.isUpperCase(name.charAt(0));
}
protected boolean isAvailableImpl(int offset) {
PsiElement nameElement = getRefElement().getReferenceNameElement();
if (nameElement == null) return false;
PsiElement parent = getRefElement().getParent();
if (parent instanceof PsiExpression && !(parent instanceof PsiReferenceExpression)) return false;
if (shouldShowTag(offset, nameElement, getRefElement())) {
setText(getText(nameElement.getText()));
return true;
}
return false;
}
public String getFamilyName() {
return QuickFixBundle.message("create.class.from.usage.family");
}
public boolean startInWriteAction() {
return false;
}
public PsiJavaCodeReferenceElement getRefElement() {
return (PsiJavaCodeReferenceElement)myRefElement.getElement();
}
}
|
codeInsight/impl/com/intellij/codeInsight/daemon/impl/quickfix/CreateClassFromUsageAction.java
|
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.psi.*;
import com.intellij.util.IncorrectOperationException;
/**
* @author Mike
*/
public class CreateClassFromUsageAction extends CreateFromUsageBaseAction {
private static final Logger LOG = Logger.getInstance(
"#com.intellij.codeInsight.daemon.impl.quickfix.CreateClassFromUsageAction");
private final boolean myCreateInterface;
private final SmartPsiElementPointer myRefElement;
public CreateClassFromUsageAction(PsiJavaCodeReferenceElement refElement, boolean createInterface) {
myRefElement = SmartPointerManager.getInstance(refElement.getProject()).createLazyPointer(refElement);
myCreateInterface = createInterface;
}
public String getText(String varName) {
if (myCreateInterface) {
return QuickFixBundle.message("create.class.from.usage.interface.text", varName);
}
else {
return QuickFixBundle.message("create.class.from.usage.class.text", varName);
}
}
protected void invokeImpl(PsiClass targetClass) {
if (CreateFromUsageUtils.isValidReference(getRefElement(), true)) {
return;
}
final String superClassName;
if (getRefElement().getParent().getParent()instanceof PsiMethod) {
PsiMethod method = (PsiMethod)getRefElement().getParent().getParent();
if (method.getThrowsList() == getRefElement().getParent()) {
superClassName = "java.lang.Exception";
}
else superClassName = null;
}
else superClassName = null;
ApplicationManager.getApplication().runWriteAction(
new Runnable() {
public void run() {
final PsiClass aClass = CreateFromUsageUtils.createClass(getRefElement(), myCreateInterface, superClassName);
if (aClass == null) return;
try {
getRefElement().bindToElement(aClass);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
OpenFileDescriptor descriptor = new OpenFileDescriptor(getRefElement().getProject(), aClass.getContainingFile().getVirtualFile(),
aClass.getTextOffset());
FileEditorManager.getInstance(aClass.getProject()).openTextEditor(descriptor, true);
}
}
);
}
protected boolean isValidElement(PsiElement element) {
return CreateFromUsageUtils.isValidReference((PsiReference)element, true);
}
protected PsiElement getElement() {
if (!getRefElement().isValid() || !getRefElement().getManager().isInProject(getRefElement())) return null;
if (!CreateFromUsageUtils.isValidReference(getRefElement(), true) &&
getRefElement().getReferenceNameElement() != null && checkClassName(getRefElement().getReferenceName())) {
PsiElement parent = getRefElement().getParent();
if (parent instanceof PsiTypeElement) {
if (parent.getParent() instanceof PsiReferenceParameterList) return getRefElement();
while (parent.getParent() instanceof PsiTypeElement) parent = parent.getParent();
if (parent.getParent() instanceof PsiVariable || parent.getParent() instanceof PsiMethod ||
parent.getParent() instanceof PsiClassObjectAccessExpression ||
parent.getParent() instanceof PsiTypeCastExpression ||
(parent.getParent() instanceof PsiInstanceOfExpression && ((PsiInstanceOfExpression)parent.getParent()).getCheckType() == parent)) {
return getRefElement();
}
}
else if (parent instanceof PsiReferenceList) {
if (parent.getParent() instanceof PsiClass) {
PsiClass psiClass = (PsiClass)parent.getParent();
if (psiClass.getExtendsList() == parent) {
if (!myCreateInterface && !psiClass.isInterface()) return getRefElement();
if (myCreateInterface && psiClass.isInterface()) return getRefElement();
}
if (psiClass.getImplementsList() == parent && myCreateInterface) return getRefElement();
}
else if (parent.getParent() instanceof PsiMethod) {
PsiMethod method = (PsiMethod)parent.getParent();
if (method.getThrowsList() == parent && !myCreateInterface) return getRefElement();
}
}
else if (parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getBaseClassReference() == getRefElement()) {
return getRefElement();
}
}
if (getRefElement()instanceof PsiReferenceExpression) {
PsiReferenceExpression referenceExpression = (PsiReferenceExpression)getRefElement();
PsiElement parent = referenceExpression.getParent();
if (parent instanceof PsiMethodCallExpression) {
return null;
}
if (parent.getParent() instanceof PsiMethodCallExpression && myCreateInterface) return null;
if (referenceExpression.getReferenceNameElement() != null &&
checkClassName(referenceExpression.getReferenceName()) &&
!CreateFromUsageUtils.isValidReference(referenceExpression, true)) {
return referenceExpression;
}
}
return null;
}
private boolean checkClassName(String name) {
return Character.isUpperCase(name.charAt(0));
}
protected boolean isAvailableImpl(int offset) {
PsiElement nameElement = getRefElement().getReferenceNameElement();
if (nameElement == null) return false;
PsiElement parent = getRefElement().getParent();
if (parent instanceof PsiExpression && !(parent instanceof PsiReferenceExpression)) return false;
if (shouldShowTag(offset, nameElement, getRefElement())) {
setText(getText(nameElement.getText()));
return true;
}
return false;
}
public String getFamilyName() {
return QuickFixBundle.message("create.class.from.usage.family");
}
public boolean startInWriteAction() {
return false;
}
public PsiJavaCodeReferenceElement getRefElement() {
return (PsiJavaCodeReferenceElement)myRefElement.getElement();
}
}
|
cleanup
|
codeInsight/impl/com/intellij/codeInsight/daemon/impl/quickfix/CreateClassFromUsageAction.java
|
cleanup
|
<ide><path>odeInsight/impl/com/intellij/codeInsight/daemon/impl/quickfix/CreateClassFromUsageAction.java
<ide> return null;
<ide> }
<ide>
<add> protected boolean isAllowOuterTargetClass() {
<add> return false;
<add> }
<add>
<ide> private boolean checkClassName(String name) {
<ide> return Character.isUpperCase(name.charAt(0));
<ide> }
|
|
JavaScript
|
mit
|
ca52d2b1a3b875e0b2e75d03afacda6fb30b1928
| 0 |
AlexMcArdle/r6maps,AlexMcArdle/r6maps,AlexMcArdle/r6maps,capajon/r6maps,capajon/r6maps,capajon/r6maps
|
'use strict';
var R6MapsLangTermsJapanese = (function(R6MapsLangTerms, undefined) {
var name = 'ja',
terms = {
general: {
pageTitle: '',
pageTitleSelectMap: 'R6Maps.com - マップを選択',
pageTitleStart: '',
cameraViewCaption: '{floorName} カメラ視点',
shortcutTip: 'ショットカット: {shortcut}',
menu: 'メニュー',
about: 'R6Mapsについて',
languageHeader: '言語',
optionsHeader: '設定',
labelLosOpacity: 'カメラ視界の透明さ',
labelPercent: '{int}%',
labelLosDefault: '(Default)',
labelLos105: '(あれ?)',
labelLos110: '(そんなばかな!)',
labelRoomLabelStyle: '部屋名の文字スタイル',
labelNumberFloorsToDisplay: '階の数を選択',
lockPanning: 'パンニング解除',
lockZooming: 'ズーミング解除',
fullScreen: '全画面',
enableScreenshots: 'スクリーンショット許可',
contributions: '寄付',
supportSiteNote: 'このサイトをサポートしたい方には寄付ボタンをクリックしてください'
},
roomLabelStyles: {
Dark: '暗い',
Light: '明るい(標準)',
DarkAndLarge: '大きく暗い',
LightAndLarge: '大きく明るい',
DisplayNone: 'オフ'
},
floorDisplayOptions: {
one: 'フル',
two: '分割',
four: 'グリッド'
},
selectMaps: {
selectAMap: 'マップを選択',
homeLink: 'マップを選択'
},
compass: {
letterN: '北',
letterE: '東',
letterS: '南',
letterW: '西'
},
floorNames: {
basement: { full: '地下', short: 'B' },
firstFloor: { full: '1階', short: '1' },
secondFloor: { full: '2階', short: '2' },
thirdFloor: { full: '3階', short: '3' },
fourthFloor: { full: '4階', short: '4' },
roof: { full: '屋上', short: 'R' }
},
objectives: {
bombShortA: 'A',
bombShortB: 'B',
bomb: '爆弾',
hostageShort: 'H',
hostage: '人質',
secureShort: 'S',
secure: '確保',
showAll: '全て見る'
},
legend: {
breakableWalls: '破壊できる壁',
breakableFloorTraps: '破壊できる床の<br/>落とし戸',
ceilingHatches: '破壊できる天井の<br/>落とし戸',
lineOfSightWalls: '視界の壁',
lineOfSightFloors: '視界の床',
droneTunnels: 'ドローンのトンネル',
objectives: '目標',
insertionPoints: '出現地点',
securityCameras: '監視カメラ',
skylights: '天窓',
onFloorAboveOrBelow: '上か下の階にある',
cameraLineOfSight: 'カメラの視界',
ladders: 'はしご'
},
mapNames: {
bank: '銀行',
bartlett: 'バートレット大学',
border: '国境',
chalet: '山荘',
club: 'クラブハウス',
coastline: '海岸線',
consulate: '領事館',
favela: 'ファべーラ',
hereford: 'ヘレフォード基地',
house: '民家',
kanal: '運河',
kafe: 'カフェ・ドストエフスキー',
oregon: 'オレゴン',
plane: '大統領専用機',
skyscraper: '高層ビル',
yacht: 'ヨット'
},
mapRooms: {
bank: {
spawnBoulevard: '大通り',
spawnBackAlley: '裏路地',
printerRoom: 'コピー室',
parkingLot: '駐車場',
boulevard: '大通り',
jewelryFront: '宝石店前',
plaza: '広場',
mainEntrance: '正面玄関',
garageRamp: 'ガレージ<br/>スロープ',
exteriorParking: '駐車場',
garageRoof: 'ガレージ屋上',
alleyAccess: '裏路地入口',
backAlleyRooftop: '裏路地屋根',
backAlley: '裏路地',
highRoof: '銀行屋上 (上)',
lowRoof: '銀行屋上 (下)',
vault: '金庫',
goldVault: '金保管室',
serverRoomStairs: 'サーバールーム階段',
serverRoom: 'サーバールーム',
CCTVRoom: '監視室',
loadingDock: '搬出口',
secureHallway: '保管室廊下',
sewer: '下水道',
lockers: 'ロッカー',
vaultLobby: '金庫前ロビー',
vaultEntrance: '金庫入口',
mainStairway: 'メイン階段',
bankGarage: '銀行ガレージ',
elevators: 'エレベーター',
tellersOffice: '窓口オフィス',
archives: '資料室',
tellers: '窓口',
loanOffice: 'ローンオフィス',
officeHallway: 'オフィス廊下',
skylightStairwell: '吹き抜け階段',
lobby: 'ロビー',
openArea: '空き部屋',
staffRoom: 'スタッフルーム',
electricalRoom: '電子機器室',
adminOffice: '管理事務室',
ATMs: 'ATMs',
executiveHallway: 'VIP専用通路',
frontDesk: '受付',
executiveLounge: 'VIPラウンジ',
CEOOffice: 'CEOオフィス',
janitorCloset: '用務室',
hallway: '廊下',
terrace: 'テラス',
stockTradingRoom: '証券取引ルーム',
conferenceRoom: '会議室'
},
bartlett: {
archwayHall: 'アーチホール',
archwaylHallway: '',
backAlley: '裏路地',
bathroom: 'トイレ',
campusField: '校庭',
classroom: '教室',
coatRoom: 'クローク<br/>ルーム',
compassHallway: '羅針盤エリア',
courtyard: '中庭',
centralHallway: '中央廊下',
diningRoom: 'ダイニングルーム',
eastBalcony: '',
eastCorridor: '東通路',
eastStairs: '東階段',
festival: 'フェスティバル会場',
frontEntrance: '正面入口',
frontOffice: 'フロントオフィス',
frontPatio: '正面パティオ',
gardenPass: '脇道',
kitchen: 'キッチン',
lobby: 'ロビー',
lounge: 'ラウンジ',
lowerLibrary: '図書室1階',
mainGate: 'メインゲート',
mainOffice: 'メインオフィス',
modelHall: 'モデルホール',
pantry: '食料庫',
parking: '駐車場',
pianoRoom: 'ピアノルーム',
readingRoom: '読書室',
roof: '屋上',
rowingMuseum: 'ボート展示室',
serviceRoom: 'サービス<br/>ルーム',
terrace: 'テラス',
trophyRoom: '記念品室',
upperLibrary: '図書室2階',
vistaHallway: '吹き抜け<br/>の廊下',
westBalcony: '',
westCorridor: '西通路'
},
border: {
armoryLockers: '武器庫<br/>ロッカー',
tellers: '窓口',
ventilationRoom: '換気室',
exitHallway: '出口廊下',
supplyCorridor: '備品通路',
detention: '留置所',
customsInspection: '税関検査',
customsDesk: '税関<br/>デスク',
centralStairs: '中央階段',
serverRoom: 'サーバールーム',
supplyRoom: '備品室',
workshop: '作業室',
mainLobby: 'メインロビー',
bathroom: 'トイレ',
waitingRoom: '待合室',
eastStairs: '東階段',
passportCheck: 'パスポート<br/>チェック',
archives: '資料室',
offices: 'オフィス',
officesHallway: 'オフィス廊下',
fountain: '噴水',
mainHallway: 'メイン<br/>廊下',
armoryDesk: '武器庫<br/>デスク',
securityRoom: 'セキュリティ<br/>ルーム',
breakRoom: '休憩室',
spawnEastVehicleEntrance: '東側車両入口',
spawnValley: '流域',
spawnWestVehicleExit: '西側車両出口',
westTower: '西側タワー',
pedestrianExit: '歩行者用出口',
valley: '流域',
parkingLotEntrance: '駐車場入口',
parkingLot: '駐車場',
westRoad: '西側道路',
vehicleCustoms: '車両用税関',
crashScene: '衝突現場',
eastRoad: '東側道路',
pedestrianEntrance: '歩行者用<br/>入口',
pedestrianCustoms: '歩行者用税関',
watchTower: '監視タワー',
eastAlley: '東側路地',
parkingLotAlley: '駐車場路地',
northBalcony: '北側バルコニー',
eastBalcony: '東側<br/>バルコニー',
westBalcony: '西側<br/>バルコニー',
southBalcony: '南側バルコニー',
roof: '屋上'
},
chalet: {
spawnFrontYard: 'フロントヤード',
spawnCampfire: 'キャンプファイア',
spawnCliffside: '崖際',
spawnLakeside: 'レイクサイド',
libraryStairs: '図書室階段',
snowmobileGarageCorridor: 'スノーモービル<br/>ガレージ<br/>前通路',
snowmobileGarage: 'スノーモービル<br/>ガレージ',
greatRoomStairs: 'リビング階段',
storageRoom: '貯蔵室',
wineCellar: 'ワイン<br/>セラー',
wineStock: 'ワイン<br/>貯蔵室',
basementHallway: '地下通路',
backyardStairs: 'バックヤード<br/>階段',
mainStairs: 'メイン階段',
mainGarage: 'メインガレージ',
garageEntrance: 'メインガレージ<br/>入口',
westEntrance: '西入口',
gamingRoomHallway: ' 娯楽室前<br/>廊下',
gamingRoom: '娯楽室',
bar: 'バー',
greatRoom: 'リビング',
diningRoom: 'ダイニングルーム',
mainEntrance: '正面玄関',
trophyRoom: '記念品室',
kitchenHallway: 'キッチン<br/>前廊下',
kitchen: 'キッチン',
libraryHallway: '図書室廊下',
libraryEntrance: '図書室前',
library: '図書室',
bedroomTerrace: 'ベッドルーム<br/>テラス',
fireplaceHallway: 'リビング2階廊下',
bedroomHallway: 'ベッドルーム<br/>前廊下',
masterBathroom: 'メインバスルーム',
masterBedroom: 'メインベッドルーム',
office: 'オフィス',
woodenTrail: '森の小道',
campfireWood: 'キャンプファイア<br/>周辺の森',
backyard: 'バックヤード',
gazeebo: '展望台',
cliffsideStairs: '断崖エリア<br/>階段',
cliffsideWoods: '崖際の森',
backyardPatio: 'バックヤード<br/>パティオ',
officeBalcony: 'オフィス<br/>バルコニー',
helipadTrail: 'ヘリポートへの道',
helipad: 'ヘリポート',
frontYardPatio: 'フロントヤード<br/>パティオ',
frontYard: 'フロントヤード',
bathroomBalcony: 'バスルーム<br/>バルコニー',
libraryBalcony: '図書室<br/>バルコニー',
bedroomBalcony: 'ベッドルーム<br/>バルコニー',
snowmobiles: 'スノーモービル'
},
club: {
spawnMainEntrance: 'メインゲート',
spawnShippingDocks: '搬入口',
spawnWarehouse: '倉庫',
spawnConstructionSite: '建設現場',
easternSubroof: '東側サブルーフ',
constructionSite: '建設現場',
container: 'コンテナ',
graffitiArea: '落書きエリア',
recreationArea: '娯楽エリア',
junkyard: '廃品置場',
VIPParking: 'VIP専用駐車場',
mainGate: 'メインゲート',
parking: '駐車場',
kennels: '犬舎',
trash: 'ゴミ収集所',
centralSubroof: '中央<br/>サブルーフ',
easternRoof: '東側屋上',
centralRoof: '中央ルーフ',
westernRoof: '西側屋上',
balcony: 'バルコニー',
escapeTunnel: '避難用<br/>トンネル',
arsenalRoom: '武器保管室',
basementHallway: '地下廊下',
memorialRoom: '記念室',
utilityRoom: '収納室',
oilPit: 'オイルピット',
centralStairs: '中央階段',
church: '祭壇',
frontPorch: 'フロントポーチ',
garage: 'ガレージ',
lobby: 'ロビー',
stockRoom: '倉庫',
garageStorage: 'ガレージ<br/>倉庫',
lounge: 'ラウンジ',
bar: 'バー',
centralHallway: '中央廊下',
kitchen: 'キッチン',
kitchenEntrance: 'キッチン裏口',
westernHallway: '西側廊下',
stripClub: 'ナイトクラブ',
junkyardEntrance: '廃品置場側<br/>出入口',
sideEntrance: '通用口',
changingRoom: '更衣室',
bedroom: 'ベッドルーム',
bathroom: 'バスルーム',
bedroomHallway: 'ベッドルーム前廊下',
logisticOffice: '物流オフィス',
gym: 'ジム',
secretStash: '隠し金庫',
CCTVRoom: '監視室',
cashRoom: '金庫室',
easternStairs: '東側階段'
},
coastline: {
aquarium: 'アクアリウム',
backAlley: '裏通路',
balcony: 'バルコニー',
bathroom: 'バスルーム',
billiardsRoom: 'ビリヤードルーム',
blueBar: 'ブルー・バー',
cantina: 'バー',
courtyard: '中庭',
djBooth: 'DJブース',
garageRoof: 'ガレージ屋上',
hallOfFame: 'ホールオブフェイム',
hallway: '廊下',
hookahDeck: 'フッカー<br/>デッキ<br/>(表示されてない)',
hookahLounge: 'フッカーラウンジ',
kitchen: 'キッチン',
mainEntrance: '正面入口',
mainLobby: 'メインロビー',
northStairs: '北階段',
office: 'オフィス',
penthouse: 'ペントハウス',
pool: 'プール',
poolEntrance: 'プール入口',
poolSide: 'プールサイド',
rooftop: '屋上',
ruins: '廃墟',
securityRoom: 'セキュリティ<br/>ルーム',
serviceEntrance: '勝手入口',
southHallway: '南廊下',
southPromenade: '南通路',
southStairs: '南階段',
sunriseBar: 'サンライズ・バー',
sunRoom: 'サンルーム',
theater: '劇場',
terrace: 'テラス',
toilets: 'トイレ',
vipLounge: 'VIPラウンジ',
walkway: '通路'
},
consulate: {
spawnRiotBarricade: 'バリケード',
spawnPoliceLine: '警察警戒線',
spawnGasStation: 'ガソリンスタンド',
spawnSideEntrance: '通用口',
exitStairs: '非常階段',
garage: 'ガレージ',
basementCorridor: '地下通路',
securityRoom: 'セキュリティ<br/>ルーム',
cafeteria: 'カフェテリア',
mainStairs: 'メイン階段',
lockerHallway: 'ロッカー室',
serviceStairs: '従業員用<br/>階段',
electricRoom: '配電室',
storageRoom: '貯蔵室',
archives: '資料室',
archivesCorridor: '資料室前<br/>廊下',
pressRoom: '会見室',
westCorridor: '西通路',
publicBathroom: '共同バスルーム',
antechamber: '控え室',
lobby: 'ロビー',
eastCorridor: '東通路',
tellers: 'ビザ申請<br/>窓口',
visaOffice: 'ビザ申請<br/>オフィス',
visaEntrance: 'ビザ申請<br/>エントランス',
frontDoor: 'フロントドア',
balcony: 'バルコニー',
copyRoom: 'コピー室',
cabinet: '納戸',
administrationOffice: '管理事務室',
breakRoom: '休憩室',
frontOffice: 'フロントオフィス',
meetingRoom: '会議室',
hallway: '廊下',
consulFrontDesk: '領事応接室',
privateBathroom: '専用トイレ',
waitingRoom: '待合室',
consulateOffice: '領事オフィス',
garageWay: 'ガレージ進入路',
courtyard: '中庭',
backCourtyard: '裏庭',
sideEntrance: '通用口',
dumpster: 'ゴミ捨て場',
parking: '駐車場',
gardens: 'ガーデン',
fountain: '噴水',
emergencyExit: '非常口',
garageRoof: 'ガレージ<br/>屋上',
memorialGarden: '記念庭園',
policeLine: '警察警戒線',
riotBarracade: 'バリケード',
eastFrontYard: '東フロントヤード',
westFrontYard: '西フロントヤード',
frontAlley: '正面路地',
buildingRoof: '建物屋上'
},
favela: {
packagingRoom: '包装室',
footballApartment: 'フットボール<br/>アパート',
armoryRoom: '武器庫',
auntsApartment: 'アント<br/>アパート',
auntsBedroom: 'アント<br/>ベッドルーム',
growRoom: '育成室',
bikersApartment: 'バイカー<br/>アパート',
methLab: '薬物ラボ',
footballBedroom: 'フットボール<br/>ベッドルーム',
footballOffice: 'フットボール<br/>オフィス',
bikersBedroom: 'バイカー<br/>ベッドルーム',
backStairs: '裏階段',
auntsHall: 'アントホール',
kidsRoom: '子供部屋',
mainStairs: 'メイン階段',
stairHall: '階段ホール',
roof: '屋根',
laundryRoom: '洗濯室',
vaultRoom: '金庫室',
bikersGarage: 'バイカー<br/>ガレージ',
backAlley: '裏通り',
schoolAlley: '学校通り',
footballPitch: 'フットボール場',
market: '市場',
marketAlley: '',
schoolRooftops: '',
street: '通り',
rooftops: '屋上',
courtyard: '中庭',
accessAlley: '連絡通路',
shop: '店<br/>(表示されてない)',
marketRooftops: '市場屋上'
},
hereford: {
spawnTrainingCourse: '訓練場',
spawnParking: '兵舎',
spawnShootingRange: '射撃練習場',
armory: '武器庫',
lockers: 'ロッカー',
corridor: '通路',
mainStairs: 'メイン階段',
maintenanceArea: 'メンテナンス<br/>エリア',
briefingRoom: '作戦会議室',
basementEntrance: '地下入口',
garage: 'ガレージ',
TVRoom: 'AVルーム',
garageCorridor: 'ガレージ通路',
kitchen: 'キッチン',
corridor1: '1F通路',
diningRoom: 'ダイニング<br/>ルーム',
pianoLounge: 'ピアノラウンジ',
office: 'オフィス',
masterBedroom: 'メインベッドルーム',
backAccess: '裏口',
laundryRoom: '洗濯室',
bathroom: 'バスルーム',
kidsBedroom: '子供部屋',
ballisticMatDepot: '防弾マット<br/>保管庫',
storage: '倉庫',
storageCorridor: '3F通路',
dummyDepot: 'ダミー保管庫',
workshop: '作業室',
shootingRangeEastEntrance: '射撃練習場<br/>東ルート',
shootingRangeWestEntrance: '射撃練習場<br/>西ルート',
tireSetting: 'タイヤ設置場所',
observationRamp: '監視スロープ',
barracks: '兵舎',
busBackAlley: '大型車用通路',
rappelTower: 'ラペリングタワー',
terrace: 'テラス',
frontAccess: '正面口ルート',
chapelGate: 'チャペルゲート',
forkliftArea: 'フォークリフト<br/>エリア',
sideStairsAlley: '側面階段<br/>路地',
sideStairs: '側面階段',
garageTop: 'ガレージ屋根',
rooftop: '屋上',
parkingEntrance: ''
},
house: {
spawnConstructionSite: '資材置き場',
spawnRiverDocks: '桟橋',
spawnAPCArea: '装甲車エリア',
spawnSideStreet: '脇道',
depot: '保管庫',
trainingRoom: '訓練室',
kitchenStairs: 'キッチン階段',
sideStairs: '側面階段',
laundryRoom: '洗濯室',
garage: 'ガレージ',
livingRoom: 'リビング<br/>ルーム',
backEntrance: '裏口',
lobby: 'ロビー',
kitchen: 'キッチン',
office: 'オフィス',
diningRoom: 'ダイニング<br/>ルーム',
workshop: '作業室',
kidsBedroom: '子供部屋',
upperHallway: '2F廊下',
lobbyStairs: 'ロビー<br/>階段',
walkIn: 'ウォークイン',
masterBedroom: 'メインベッドルーム',
bathroom: 'バスルーム',
sideStreet: '脇道',
garageEntrance: 'ガレージ入口',
garden: 'ガーデン',
backAlley: '裏庭',
patio: 'パティオ',
jacuzzi: 'ジェットバス',
basementStairs: '地下階段',
treehouseAlley: 'ツリーハウス',
frontYard: 'フロントヤード',
frontStreet: '正面通り',
frontPorch: 'フロントポーチ',
backPorch: 'バックポーチ',
backPorchTop: 'バックポーチ屋根',
frontPorchTop: 'フロントポーチ屋根',
rooftop: '屋上'
},
kanal: {
spawnFloatingDock: '浮きドック',
spawnSailboats: '帆船',
spawnConstructionSite: '建設現場',
boatGarage: 'ボートガレージ',
boatSupplies: 'ボート倉庫',
pipes: '漏水区画',
boatSuppliesHallway: 'ボート<br/>倉庫前<br/>廊下',
lockerRoom: 'ロッカールーム',
coastGuardStairs: '沿岸警備<br/>階段',
showers: 'シャワー<br/>ルーム',
showersCorridor: 'シャワー<br/>ルーム前',
loadingDock: '搬出口',
machineHallway: '機械室前<br/>廊下',
controlCenterStairs: '管制センター<br/>階段',
holdingRoom: '待合室',
holdingRoomHallway: '待合室<br/>廊下',
radio: '無線室',
coastGuardOffice: '沿岸警備<br/>オフィス',
archives: '資料室',
coastGuardHall: '事務所前<br/>廊下',
mainEntrance: '正面玄関',
lounge: 'ラウンジ',
bridge: '渡り廊下',
modelRoom: 'モデルルーム',
securityRoom: 'セキュリティ<br/>ルーム',
projectorRoom: '映写室',
mapsOffice: '海図管理室',
mapsOfficeHallway: '海図管理室前<br/>廊下',
kitchen: 'キッチン',
cafeteria: 'カフェテリア',
plantsHallway: 'プラント育成区画',
thirdFloorExit: '3階出口',
controlRoom: '管制室',
electricRoom: '配電室',
controlRoomHallway: '管制室廊下',
serverRoom: 'サーバー<br/>ルーム',
lockgate: '水門',
quayContainers: '埠頭コンテナ',
lockgateTunnel: '水門トンネル',
waterWalkway: '水上通路',
quayConstruction: '埠頭建設現場',
constructionSite: '建設現場',
constructionEntrance: '建設現場<br/>入口',
parkingAlley: '駐車場路地',
parkingEntrance: '駐車場入口',
middleRoad: '中央道路',
forkliftAlley: 'フォークリフト用<br/>通路',
frontLawn: '前庭',
basementStairs: '地下階段',
coastGuardRoof: '沿岸警備<br/>屋上',
bridgeRoof: 'ブリッジ<br/>ルーフ',
roofBrickPile: 'レンガ屋根',
balcony: 'テラス',
controlCenterRoof: '管制センター<br/>屋上',
dockStairs: 'ドック階段',
parking: '駐車場',
boatCrane: 'ボートクレーン',
mapArchives: ''
},
kafe: {
spawnRiverDocks: '桟橋',
spawnChristmasMarket: 'クリスマスマーケット',
spawnPark: '公園',
laundryRoom: '洗濯室',
frontStairs: '正面階段',
bakery: 'ベーカリー',
bakeryKitchen: 'ベーカリー<br/>キッチン',
kitchenPrep: 'キッチン<br/>準備室',
kitchenGrill: 'キッチン<br/>グリル',
kitchenPickUp: 'キッチン<br/>配膳室',
coldRoom: '冷蔵室',
diningRoom: 'カフェ1F',
backStairs: '裏口<br/>階段',
coldRoomCorridor: '冷蔵室前<br/>通路',
VIPSection: 'VIP席',
barStairs: 'バー階段',
storage: '倉庫',
museumEntrance: '博物館<br/>入口',
miningRoom: '掘削機<br/>展示室',
trainMuseum: '列車展示室',
pillarDiningRoom: 'カフェ2F',
mainCorridor: 'メイン通路',
readingRoomCorridor: '読書室通路',
readingRoom: '読書室',
fireplaceHall: '暖炉ホール',
cigarShop: 'シガーショップ',
cigarLounge: 'シガーラウンジ',
bar: 'バー',
barBackstore: 'バー倉庫',
washrooms: '洗面所',
washroomCorridor: '洗面所前通路',
cocktailLounge: 'カクテル<br/>ラウンジ',
cocktailLoungeEntrance: 'カクテル<br/>ラウンジ<br/>入口',
westMainStreet: 'メインストリート西',
mainStreet: 'メインストリート',
eastMainStreet: '',
bakeryParking: 'ベーカリー<br/>駐車場',
bakeryRoof: 'ベーカリー屋上',
cafeRoofTop: 'カフェ屋上',
terrace: 'テラス',
backAlley: '裏路地',
garrage: 'ガレージ',
parkAlley: '公園路地'
},
oregon: {
spawnJunkyard: '廃品置場',
spawnStreet: '通り',
spawnConstructionSite: '建設現場',
towerStairs: 'タワー階段',
boilerRoom: 'ボイラー室',
electricRoom: '配電室',
bunkerEntrance: 'バンカー<br/>入口',
bunker: 'バンカー',
basementCorridor: '地下通路',
supplyRoom: '備品質',
laundryRoom: '洗濯室',
laundryStorage: '洗濯室<br/>倉庫',
laundryStairs: '洗濯室<br/>階段',
office: 'オフィス',
diningHall: '食堂',
diningHallCorridor: '食堂通路',
showers: 'シャワールーム',
kitchen: 'キッチン',
bathroom: '共同トイレ',
dormStairs: '共同寝室<br/>の階段',
pantry: '食料庫',
bathroomCorridor: '共同トイレ前',
classroom: '教室',
lobby: 'ロビー',
mainStairs: 'メイン<br/>階段',
meetingHall: '会議ホール',
rearStage: 'ホール裏',
garage: 'ガレージ',
officeStorage: 'オフィス<br/>備品置き場',
kidsDorm: '子供部屋',
dormMainHall: '共同寝室(大)',
smallDorms: '共同寝室(小)',
armoryCorridor: '武器庫通路',
masterBedroom: 'メイン<br/>ベッドルーム',
armory: '武器庫',
walkIn: 'ウォークイン',
attic: '裏屋根',
watchTower: '監視タワー',
busYard: 'バスヤード',
junkyard: '廃品置き場',
farmlands: '農地',
shootingRange: '射撃練習場',
constructionSite: '建設現場',
parking: '駐車場',
mainEntrance: '正面玄関',
street: '通り',
balcony: 'テラス',
diningHallRoof: '食堂屋上',
officeRoof: 'オフィス屋上',
meetingHallEntrance: '会議室入口',
garageRoof: 'ガレージ屋上',
dormsRoof: '共同寝室<br/>屋根',
meetingHallRoof: '会議ホール<br/>屋根',
supplyCloset: ''
},
plane: {
spawnOfficialEntrance: '前方搭乗口',
spawnReporterEntrance: '記者エントランス',
spawnServiceEntrance: '前方サービス<br/>エントランス',
pressBathroom: 'プレス<br/>バスルーム',
meetingRoom: '会議室',
frontHallway: '正面廊下',
executiveOffice: 'VIPオフィス',
mainEntrance: '前方搭乗口',
frontStairs: '正面階段',
pantry: '食料庫',
kitchen: 'キッチン',
executiveHallway: 'VIP専用通路',
executiveBedroom: 'VIP<br/>ベッドルーム',
changeRoom: '更衣室',
laund: '洗濯室',
frontServiceEntrance: '前方サービス<br/>エントランス',
rightWing: '右翼',
backServiceEntrance: '後方サービス<br/>ドア',
reporterEntrance: '後方搭乗口',
leftWing: '左翼',
staffSection: 'スタッフ席',
securityRoom: 'セキュリティ<br/>ルーム',
pressSectionA: '記者席A',
pressSectionB: '記者席B',
backStairs: '後方階段',
cargoHold: '貨物庫',
serviceCorridor: '従業員用通路',
storage: '倉庫',
luggageHold: '荷物庫',
firstAidStation: '救護室',
cargoFrontEntrance: '貨物用<br/>正面入口',
cockpitStairs: 'コックピット<br/>階段',
cabinStaff: 'スタッフルーム',
radioCabin: '無線室',
cabin: 'コックピット',
caterer: 'サービスドア',
serverRoomA: 'サーバールームA',
serverRoomB: 'サーバールームB',
technicalSeating: 'テックシート',
ladderRoom: ''
},
skyscraper: {
helipad: 'ヘリポート',
tower: 'タワー',
ventilationUnits: '空調設備',
kitchen: 'キッチン',
pantry: '食料庫',
deliveryRoom: 'デリバリー<br/>ルーム',
houseLobby: '建物ロビー',
houseEntrance: '建物入口',
mainEntrance: '正面玄関',
reception: '受付',
bedroom: 'ベッドルーム',
closet: 'クローゼット',
bathroom: 'バスルーム',
houseStairs: '建物<br/>階段',
restaurant: 'レストラン',
toilet: 'トイレ',
bbq: 'BBQ',
backHallway: '裏廊下',
mainStairs: 'メイン<br/>階段',
geishaRoom: '芸者部屋',
hallway: '廊下',
karaoke: 'カラオケ',
teaRoom: '茶室',
taiko: '太鼓',
terrace: 'テラス',
backStairs: '裏階段',
houseBalcony: '建物<br/>バルコニー',
exhibition: '展示',
lounge: 'ラウンジ',
workOffice: 'オフィス',
clearance: 'VIP<br/>クリアランス',
peacefullTree: '平穏の木',
contemplationGarden: '沈思の庭園',
westGarden: '庭園(西)',
bridge: '橋',
gazeebo: '展望台',
restBalcony: 'レストラン<br/>バルコニー',
northGarden: '庭園(北)',
eastGarden: '庭園(東)',
sandGarden: '枯山水',
sidePath: '脇道',
sideStairs: '側面階段',
dragonStatue: '竜の像',
coveredWalkway: '屋根付き<br/>通路'
},
yacht: {
spawnSubmarine: '潜水艦',
spawnZodiak: 'ゾディアック',
spawnSnowMobile: 'スノーモービル',
mapsRoom: 'マップルーム',
cockpit: 'コックピット',
cockpitHallway: 'コックピット前廊下',
captainsOffice: '船長室',
cockpitBalcony: 'コックピット<br/>バルコニー',
topDeckStairs: 'トップ<br/>デッキ<br/>階段',
helipadEntrance: 'ヘリポート<br/>入口',
helipad: 'ヘリポート',
spaDeck: 'スパ<br/>デッキ',
eastDeck: '東デッキ',
westDeck: '西デッキ',
frontDeck: 'フロント<br/>デッキ',
masterBedroom: 'メイン<br/>ベッドルーム',
casino: 'カジノ',
pokerRoom: 'ポーカー<br/>ルーム',
bathroom: 'バスルーム',
bedroomHallway: 'ベッドルーム前廊下',
casinoHallway: 'カジノ前廊下',
globeHallway: '地球儀廊下',
lounge: 'ラウンジ',
cafeteria: 'カフェテリア',
engine: 'エンジン',
backEntrance: 'エンジン<br/>後方入口',
rearDeck: '後方<br/>デッキ',
serverRoom: 'サーバー<br/>ルーム',
engineStorage: 'エンジン<br/>スペース',
engineControl: 'エンジン<br/>制御装置',
backStairs: '後方<br/>階段',
emergencyExit: '非常口',
engineHallway: 'エンジン前廊下',
frontStairs: '正面<br/>階段',
kitchen: 'キッチン',
staffDormitory: 'スタッフ用<br/>共同部屋',
westBalcony: '西バルコニー',
eastBalcony: '東バルコニー',
kitchenHallway: 'キッチン前廊下',
kitchenStairs: 'キッチン階段',
kitchenPantry: 'キッチン<br/>食料庫',
infirmary: '医療室',
borealSubRoom: 'アクラーク号<br/>船倉',
cafeteriaHallway: 'カフェテリア前廊下',
engineUtility: 'エンジン設備',
submarine: '潜水艦',
westGlacier: '西側グレーシャー',
eastHullBreach: '船体東側<br/>ブリーチ',
eastGlacier: '東側グレーシャー',
frozenRiver: '冷たい川',
zodiac: 'ゾディアック',
westHullBreach: '船体西側ブリーチ',
kingOfTheWorld: '',
roof: '',
anchorName: '',
aklarkSubEntrance: ''
}
}
};
R6MapsLangTerms.registerLanguage(name, terms);
return {
name: name,
terms: terms
};
})(R6MapsLangTerms);
|
dev/js/langs/r6-maps.lang-terms.ja.js
|
'use strict';
var R6MapsLangTermsJapanese = (function(R6MapsLangTerms, undefined) {
var name = 'ja',
terms = {
general: {
pageTitle: '',
pageTitleSelectMap: 'R6Maps.com - マップを選択',
pageTitleStart: '',
cameraViewCaption: '{floorName} カメラ視点',
shortcutTip: 'ショットカット: {shortcut}',
menu: 'メニュー',
about: 'R6Mapsについて',
languageHeader: '言語',
optionsHeader: '設定',
labelLosOpacity: 'カメラ視界の透明さ',
labelPercent: '{int}%',
labelLosDefault: '(Default)',
labelLos105: '(あれ?)',
labelLos110: '(そんなばかな!)',
labelRoomLabelStyle: '部屋名の文字スタイル',
labelNumberFloorsToDisplay: '階の数を選択',
lockPanning: 'パンニング解除',
lockZooming: 'ズーミング解除',
fullScreen: '全画面',
enableScreenshots: 'スクリーンショット許可',
contributions: '寄付',
supportSiteNote: 'このサイトをサポートしたい方には寄付ボタンをクリックしてください'
},
roomLabelStyles: {
Dark: '暗い',
Light: '明るい(標準)',
DarkAndLarge: '大きく暗い',
LightAndLarge: '大きく明るい',
DisplayNone: 'オフ'
},
floorDisplayOptions: {
one: 'フル',
two: '分割',
four: 'グリッド'
},
selectMaps: {
selectAMap: 'マップを選択',
homeLink: 'マップを選択'
},
compass: {
letterN: '北',
letterE: '東',
letterS: '南',
letterW: '西'
},
floorNames: {
basement: { full: '地下', short: 'B' },
firstFloor: { full: '1階', short: '1' },
secondFloor: { full: '2階', short: '2' },
thirdFloor: { full: '3階', short: '3' },
fourthFloor: { full: '4階', short: '4' },
roof: { full: '屋上', short: 'R' }
},
objectives: {
bombShortA: 'A',
bombShortB: 'B',
bomb: '爆弾',
hostageShort: 'H',
hostage: '人質',
secureShort: 'S',
secure: '確保',
showAll: '全て見る'
},
legend: {
breakableWalls: '破壊できる壁',
breakableFloorTraps: '破壊できる床の<br/>落とし戸',
ceilingHatches: '破壊できる天井の<br/>落とし戸',
lineOfSightWalls: '視界の壁',
lineOfSightFloors: '視界の床',
droneTunnels: 'ドローンのトンネル',
objectives: '目標',
insertionPoints: '出現地点',
securityCameras: '監視カメラ',
skylights: '天窓',
onFloorAboveOrBelow: '上か下の階にある',
cameraLineOfSight: 'カメラの視界',
ladders: 'はしご'
},
mapNames: {
bank: '銀行',
bartlett: 'バートレット大学',
border: '国境',
chalet: '山荘',
club: 'クラブハウス',
consulate: '領事館',
favela: 'ファべーラ',
hereford: 'ヘレフォード基地',
house: '民家',
kanal: '運河',
kafe: 'カフェ・ドストエフスキー',
oregon: 'オレゴン',
plane: '大統領専用機',
skyscraper: '高層ビル',
yacht: 'ヨット'
},
mapRooms: {
bank: {
spawnBoulevard: '大通り',
spawnBackAlley: '裏路地',
printerRoom: 'コピー室',
parkingLot: '駐車場',
boulevard: '大通り',
jewelryFront: '宝石店前',
plaza: '広場',
mainEntrance: '正面玄関',
garageRamp: 'ガレージ<br/>スロープ',
exteriorParking: '駐車場',
garageRoof: 'ガレージ屋上',
alleyAccess: '裏路地入口',
backAlleyRooftop: '裏路地屋根',
backAlley: '裏路地',
highRoof: '銀行屋上 (上)',
lowRoof: '銀行屋上 (下)',
vault: '金庫',
goldVault: '金保管室',
serverRoomStairs: 'サーバールーム階段',
serverRoom: 'サーバールーム',
CCTVRoom: '監視室',
loadingDock: '搬出口',
secureHallway: '保管室廊下',
sewer: '下水道',
lockers: 'ロッカー',
vaultLobby: '金庫前ロビー',
vaultEntrance: '金庫入口',
mainStairway: 'メイン階段',
bankGarage: '銀行ガレージ',
elevators: 'エレベーター',
tellersOffice: '窓口オフィス',
archives: '資料室',
tellers: '窓口',
loanOffice: 'ローンオフィス',
officeHallway: 'オフィス廊下',
skylightStairwell: '吹き抜け階段',
lobby: 'ロビー',
openArea: '空き部屋',
staffRoom: 'スタッフルーム',
electricalRoom: '電子機器室',
adminOffice: '管理事務室',
ATMs: 'ATMs',
executiveHallway: 'VIP専用通路',
frontDesk: '受付',
executiveLounge: 'VIPラウンジ',
CEOOffice: 'CEOオフィス',
janitorCloset: '用務室',
hallway: '廊下',
terrace: 'テラス',
stockTradingRoom: '証券取引ルーム',
conferenceRoom: '会議室'
},
bartlett: {
archwayHall: 'アーチホール',
archwaylHallway: '',
backAlley: '裏路地',
bathroom: 'トイレ',
campusField: '校庭',
classroom: '教室',
coatRoom: 'クローク<br/>ルーム',
compassHallway: '羅針盤エリア',
courtyard: '中庭',
centralHallway: '中央廊下',
diningRoom: 'ダイニングルーム',
eastBalcony: '',
eastCorridor: '東通路',
eastStairs: '東階段',
festival: 'フェスティバル会場',
frontEntrance: '正面入口',
frontOffice: 'フロントオフィス',
frontPatio: '正面パティオ',
gardenPass: '脇道',
kitchen: 'キッチン',
lobby: 'ロビー',
lounge: 'ラウンジ',
lowerLibrary: '図書室1階',
mainGate: 'メインゲート',
mainOffice: 'メインオフィス',
modelHall: 'モデルホール',
pantry: '食料庫',
parking: '駐車場',
pianoRoom: 'ピアノルーム',
readingRoom: '読書室',
roof: '屋上',
rowingMuseum: 'ボート展示室',
serviceRoom: 'サービス<br/>ルーム',
terrace: 'テラス',
trophyRoom: '記念品室',
upperLibrary: '図書室2階',
vistaHallway: '吹き抜け<br/>の廊下',
westBalcony: '',
westCorridor: '西通路'
},
border: {
armoryLockers: '武器庫<br/>ロッカー',
tellers: '窓口',
ventilationRoom: '換気室',
exitHallway: '出口廊下',
supplyCorridor: '備品通路',
detention: '留置所',
customsInspection: '税関検査',
customsDesk: '税関<br/>デスク',
centralStairs: '中央階段',
serverRoom: 'サーバールーム',
supplyRoom: '備品室',
workshop: '作業室',
mainLobby: 'メインロビー',
bathroom: 'トイレ',
waitingRoom: '待合室',
eastStairs: '東階段',
passportCheck: 'パスポート<br/>チェック',
archives: '資料室',
offices: 'オフィス',
officesHallway: 'オフィス廊下',
fountain: '噴水',
mainHallway: 'メイン<br/>廊下',
armoryDesk: '武器庫<br/>デスク',
securityRoom: 'セキュリティ<br/>ルーム',
breakRoom: '休憩室',
spawnEastVehicleEntrance: '東側車両入口',
spawnValley: '流域',
spawnWestVehicleExit: '西側車両出口',
westTower: '西側タワー',
pedestrianExit: '歩行者用出口',
valley: '流域',
parkingLotEntrance: '駐車場入口',
parkingLot: '駐車場',
westRoad: '西側道路',
vehicleCustoms: '車両用税関',
crashScene: '衝突現場',
eastRoad: '東側道路',
pedestrianEntrance: '歩行者用<br/>入口',
pedestrianCustoms: '歩行者用税関',
watchTower: '監視タワー',
eastAlley: '東側路地',
parkingLotAlley: '駐車場路地',
northBalcony: '北側バルコニー',
eastBalcony: '東側<br/>バルコニー',
westBalcony: '西側<br/>バルコニー',
southBalcony: '南側バルコニー',
roof: '屋上'
},
chalet: {
spawnFrontYard: 'フロントヤード',
spawnCampfire: 'キャンプファイア',
spawnCliffside: '崖際',
spawnLakeside: 'レイクサイド',
libraryStairs: '図書室階段',
snowmobileGarageCorridor: 'スノーモービル<br/>ガレージ<br/>前通路',
snowmobileGarage: 'スノーモービル<br/>ガレージ',
greatRoomStairs: 'リビング階段',
storageRoom: '貯蔵室',
wineCellar: 'ワイン<br/>セラー',
wineStock: 'ワイン<br/>貯蔵室',
basementHallway: '地下通路',
backyardStairs: 'バックヤード<br/>階段',
mainStairs: 'メイン階段',
mainGarage: 'メインガレージ',
garageEntrance: 'メインガレージ<br/>入口',
westEntrance: '西入口',
gamingRoomHallway: ' 娯楽室前<br/>廊下',
gamingRoom: '娯楽室',
bar: 'バー',
greatRoom: 'リビング',
diningRoom: 'ダイニングルーム',
mainEntrance: '正面玄関',
trophyRoom: '記念品室',
kitchenHallway: 'キッチン<br/>前廊下',
kitchen: 'キッチン',
libraryHallway: '図書室廊下',
libraryEntrance: '図書室前',
library: '図書室',
bedroomTerrace: 'ベッドルーム<br/>テラス',
fireplaceHallway: 'リビング2階廊下',
bedroomHallway: 'ベッドルーム<br/>前廊下',
masterBathroom: 'メインバスルーム',
masterBedroom: 'メインベッドルーム',
office: 'オフィス',
woodenTrail: '森の小道',
campfireWood: 'キャンプファイア<br/>周辺の森',
backyard: 'バックヤード',
gazeebo: '展望台',
cliffsideStairs: '断崖エリア<br/>階段',
cliffsideWoods: '崖際の森',
backyardPatio: 'バックヤード<br/>パティオ',
officeBalcony: 'オフィス<br/>バルコニー',
helipadTrail: 'ヘリポートへの道',
helipad: 'ヘリポート',
frontYardPatio: 'フロントヤード<br/>パティオ',
frontYard: 'フロントヤード',
bathroomBalcony: 'バスルーム<br/>バルコニー',
libraryBalcony: '図書室<br/>バルコニー',
bedroomBalcony: 'ベッドルーム<br/>バルコニー',
snowmobiles: 'スノーモービル'
},
club: {
spawnMainEntrance: 'メインゲート',
spawnShippingDocks: '搬入口',
spawnWarehouse: '倉庫',
spawnConstructionSite: '建設現場',
easternSubroof: '東側サブルーフ',
constructionSite: '建設現場',
container: 'コンテナ',
graffitiArea: '落書きエリア',
recreationArea: '娯楽エリア',
junkyard: '廃品置場',
VIPParking: 'VIP専用駐車場',
mainGate: 'メインゲート',
parking: '駐車場',
kennels: '犬舎',
trash: 'ゴミ収集所',
centralSubroof: '中央<br/>サブルーフ',
easternRoof: '東側屋上',
centralRoof: '中央ルーフ',
westernRoof: '西側屋上',
balcony: 'バルコニー',
escapeTunnel: '避難用<br/>トンネル',
arsenalRoom: '武器保管室',
basementHallway: '地下廊下',
memorialRoom: '記念室',
utilityRoom: '収納室',
oilPit: 'オイルピット',
centralStairs: '中央階段',
church: '祭壇',
frontPorch: 'フロントポーチ',
garage: 'ガレージ',
lobby: 'ロビー',
stockRoom: '倉庫',
garageStorage: 'ガレージ<br/>倉庫',
lounge: 'ラウンジ',
bar: 'バー',
centralHallway: '中央廊下',
kitchen: 'キッチン',
kitchenEntrance: 'キッチン裏口',
westernHallway: '西側廊下',
stripClub: 'ナイトクラブ',
junkyardEntrance: '廃品置場側<br/>出入口',
sideEntrance: '通用口',
changingRoom: '更衣室',
bedroom: 'ベッドルーム',
bathroom: 'バスルーム',
bedroomHallway: 'ベッドルーム前廊下',
logisticOffice: '物流オフィス',
gym: 'ジム',
secretStash: '隠し金庫',
CCTVRoom: '監視室',
cashRoom: '金庫室',
easternStairs: '東側階段'
},
coastline: {
aquarium: 'アクアリウム',
backAlley: '裏通路',
balcony: 'バルコニー',
bathroom: 'バスルーム',
billiardsRoom: 'ビリヤードルーム',
blueBar: 'ブルー・バー',
cantina: 'バー',
courtyard: '中庭',
djBooth: 'DJブース',
garageRoof: 'ガレージ屋上',
hallOfFame: 'ホールオブフェイム',
hallway: '廊下',
hookahDeck: 'フッカー<br/>デッキ<br/>(表示されてない)',
hookahLounge: 'フッカーラウンジ',
kitchen: 'キッチン',
mainEntrance: '正面入口',
mainLobby: 'メインロビー',
northStairs: '北階段',
office: 'オフィス',
penthouse: 'ペントハウス',
pool: 'プール',
poolEntrance: 'プール入口',
poolSide: 'プールサイド',
rooftop: '屋上',
ruins: '廃墟',
securityRoom: 'セキュリティ<br/>ルーム',
serviceEntrance: '勝手入口',
southHallway: '南廊下',
southPromenade: '南通路',
southStairs: '南階段',
sunriseBar: 'サンライズ・バー',
sunRoom: 'サンルーム',
theater: '劇場',
terrace: 'テラス',
toilets: 'トイレ',
vipLounge: 'VIPラウンジ',
walkway: '通路'
},
consulate: {
spawnRiotBarricade: 'バリケード',
spawnPoliceLine: '警察警戒線',
spawnGasStation: 'ガソリンスタンド',
spawnSideEntrance: '通用口',
exitStairs: '非常階段',
garage: 'ガレージ',
basementCorridor: '地下通路',
securityRoom: 'セキュリティ<br/>ルーム',
cafeteria: 'カフェテリア',
mainStairs: 'メイン階段',
lockerHallway: 'ロッカー室',
serviceStairs: '従業員用<br/>階段',
electricRoom: '配電室',
storageRoom: '貯蔵室',
archives: '資料室',
archivesCorridor: '資料室前<br/>廊下',
pressRoom: '会見室',
westCorridor: '西通路',
publicBathroom: '共同バスルーム',
antechamber: '控え室',
lobby: 'ロビー',
eastCorridor: '東通路',
tellers: 'ビザ申請<br/>窓口',
visaOffice: 'ビザ申請<br/>オフィス',
visaEntrance: 'ビザ申請<br/>エントランス',
frontDoor: 'フロントドア',
balcony: 'バルコニー',
copyRoom: 'コピー室',
cabinet: '納戸',
administrationOffice: '管理事務室',
breakRoom: '休憩室',
frontOffice: 'フロントオフィス',
meetingRoom: '会議室',
hallway: '廊下',
consulFrontDesk: '領事応接室',
privateBathroom: '専用トイレ',
waitingRoom: '待合室',
consulateOffice: '領事オフィス',
garageWay: 'ガレージ進入路',
courtyard: '中庭',
backCourtyard: '裏庭',
sideEntrance: '通用口',
dumpster: 'ゴミ捨て場',
parking: '駐車場',
gardens: 'ガーデン',
fountain: '噴水',
emergencyExit: '非常口',
garageRoof: 'ガレージ<br/>屋上',
memorialGarden: '記念庭園',
policeLine: '警察警戒線',
riotBarracade: 'バリケード',
eastFrontYard: '東フロントヤード',
westFrontYard: '西フロントヤード',
frontAlley: '正面路地',
buildingRoof: '建物屋上'
},
favela: {
packagingRoom: '包装室',
footballApartment: 'フットボール<br/>アパート',
armoryRoom: '武器庫',
auntsApartment: 'アント<br/>アパート',
auntsBedroom: 'アント<br/>ベッドルーム',
growRoom: '育成室',
bikersApartment: 'バイカー<br/>アパート',
methLab: '薬物ラボ',
footballBedroom: 'フットボール<br/>ベッドルーム',
footballOffice: 'フットボール<br/>オフィス',
bikersBedroom: 'バイカー<br/>ベッドルーム',
backStairs: '裏階段',
auntsHall: 'アントホール',
kidsRoom: '子供部屋',
mainStairs: 'メイン階段',
stairHall: '階段ホール',
roof: '屋根',
laundryRoom: '洗濯室',
vaultRoom: '金庫室',
bikersGarage: 'バイカー<br/>ガレージ',
backAlley: '裏通り',
schoolAlley: '学校通り',
footballPitch: 'フットボール場',
market: '市場',
marketAlley: '',
schoolRooftops: '',
street: '通り',
rooftops: '屋上',
courtyard: '中庭',
accessAlley: '連絡通路',
shop: '店<br/>(表示されてない)',
marketRooftops: '市場屋上'
},
hereford: {
spawnTrainingCourse: '訓練場',
spawnParking: '兵舎',
spawnShootingRange: '射撃練習場',
armory: '武器庫',
lockers: 'ロッカー',
corridor: '通路',
mainStairs: 'メイン階段',
maintenanceArea: 'メンテナンス<br/>エリア',
briefingRoom: '作戦会議室',
basementEntrance: '地下入口',
garage: 'ガレージ',
TVRoom: 'AVルーム',
garageCorridor: 'ガレージ通路',
kitchen: 'キッチン',
corridor1: '1F通路',
diningRoom: 'ダイニング<br/>ルーム',
pianoLounge: 'ピアノラウンジ',
office: 'オフィス',
masterBedroom: 'メインベッドルーム',
backAccess: '裏口',
laundryRoom: '洗濯室',
bathroom: 'バスルーム',
kidsBedroom: '子供部屋',
ballisticMatDepot: '防弾マット<br/>保管庫',
storage: '倉庫',
storageCorridor: '3F通路',
dummyDepot: 'ダミー保管庫',
workshop: '作業室',
shootingRangeEastEntrance: '射撃練習場<br/>東ルート',
shootingRangeWestEntrance: '射撃練習場<br/>西ルート',
tireSetting: 'タイヤ設置場所',
observationRamp: '監視スロープ',
barracks: '兵舎',
busBackAlley: '大型車用通路',
rappelTower: 'ラペリングタワー',
terrace: 'テラス',
frontAccess: '正面口ルート',
chapelGate: 'チャペルゲート',
forkliftArea: 'フォークリフト<br/>エリア',
sideStairsAlley: '側面階段<br/>路地',
sideStairs: '側面階段',
garageTop: 'ガレージ屋根',
rooftop: '屋上',
parkingEntrance: ''
},
house: {
spawnConstructionSite: '資材置き場',
spawnRiverDocks: '桟橋',
spawnAPCArea: '装甲車エリア',
spawnSideStreet: '脇道',
depot: '保管庫',
trainingRoom: '訓練室',
kitchenStairs: 'キッチン階段',
sideStairs: '側面階段',
laundryRoom: '洗濯室',
garage: 'ガレージ',
livingRoom: 'リビング<br/>ルーム',
backEntrance: '裏口',
lobby: 'ロビー',
kitchen: 'キッチン',
office: 'オフィス',
diningRoom: 'ダイニング<br/>ルーム',
workshop: '作業室',
kidsBedroom: '子供部屋',
upperHallway: '2F廊下',
lobbyStairs: 'ロビー<br/>階段',
walkIn: 'ウォークイン',
masterBedroom: 'メインベッドルーム',
bathroom: 'バスルーム',
sideStreet: '脇道',
garageEntrance: 'ガレージ入口',
garden: 'ガーデン',
backAlley: '裏庭',
patio: 'パティオ',
jacuzzi: 'ジェットバス',
basementStairs: '地下階段',
treehouseAlley: 'ツリーハウス',
frontYard: 'フロントヤード',
frontStreet: '正面通り',
frontPorch: 'フロントポーチ',
backPorch: 'バックポーチ',
backPorchTop: 'バックポーチ屋根',
frontPorchTop: 'フロントポーチ屋根',
rooftop: '屋上'
},
kanal: {
spawnFloatingDock: '浮きドック',
spawnSailboats: '帆船',
spawnConstructionSite: '建設現場',
boatGarage: 'ボートガレージ',
boatSupplies: 'ボート倉庫',
pipes: '漏水区画',
boatSuppliesHallway: 'ボート<br/>倉庫前<br/>廊下',
lockerRoom: 'ロッカールーム',
coastGuardStairs: '沿岸警備<br/>階段',
showers: 'シャワー<br/>ルーム',
showersCorridor: 'シャワー<br/>ルーム前',
loadingDock: '搬出口',
machineHallway: '機械室前<br/>廊下',
controlCenterStairs: '管制センター<br/>階段',
holdingRoom: '待合室',
holdingRoomHallway: '待合室<br/>廊下',
radio: '無線室',
coastGuardOffice: '沿岸警備<br/>オフィス',
archives: '資料室',
coastGuardHall: '事務所前<br/>廊下',
mainEntrance: '正面玄関',
lounge: 'ラウンジ',
bridge: '渡り廊下',
modelRoom: 'モデルルーム',
securityRoom: 'セキュリティ<br/>ルーム',
projectorRoom: '映写室',
mapsOffice: '海図管理室',
mapsOfficeHallway: '海図管理室前<br/>廊下',
kitchen: 'キッチン',
cafeteria: 'カフェテリア',
plantsHallway: 'プラント育成区画',
thirdFloorExit: '3階出口',
controlRoom: '管制室',
electricRoom: '配電室',
controlRoomHallway: '管制室廊下',
serverRoom: 'サーバー<br/>ルーム',
lockgate: '水門',
quayContainers: '埠頭コンテナ',
lockgateTunnel: '水門トンネル',
waterWalkway: '水上通路',
quayConstruction: '埠頭建設現場',
constructionSite: '建設現場',
constructionEntrance: '建設現場<br/>入口',
parkingAlley: '駐車場路地',
parkingEntrance: '駐車場入口',
middleRoad: '中央道路',
forkliftAlley: 'フォークリフト用<br/>通路',
frontLawn: '前庭',
basementStairs: '地下階段',
coastGuardRoof: '沿岸警備<br/>屋上',
bridgeRoof: 'ブリッジ<br/>ルーフ',
roofBrickPile: 'レンガ屋根',
balcony: 'テラス',
controlCenterRoof: '管制センター<br/>屋上',
dockStairs: 'ドック階段',
parking: '駐車場',
boatCrane: 'ボートクレーン',
mapArchives: ''
},
kafe: {
spawnRiverDocks: '桟橋',
spawnChristmasMarket: 'クリスマスマーケット',
spawnPark: '公園',
laundryRoom: '洗濯室',
frontStairs: '正面階段',
bakery: 'ベーカリー',
bakeryKitchen: 'ベーカリー<br/>キッチン',
kitchenPrep: 'キッチン<br/>準備室',
kitchenGrill: 'キッチン<br/>グリル',
kitchenPickUp: 'キッチン<br/>配膳室',
coldRoom: '冷蔵室',
diningRoom: 'カフェ1F',
backStairs: '裏口<br/>階段',
coldRoomCorridor: '冷蔵室前<br/>通路',
VIPSection: 'VIP席',
barStairs: 'バー階段',
storage: '倉庫',
museumEntrance: '博物館<br/>入口',
miningRoom: '掘削機<br/>展示室',
trainMuseum: '列車展示室',
pillarDiningRoom: 'カフェ2F',
mainCorridor: 'メイン通路',
readingRoomCorridor: '読書室通路',
readingRoom: '読書室',
fireplaceHall: '暖炉ホール',
cigarShop: 'シガーショップ',
cigarLounge: 'シガーラウンジ',
bar: 'バー',
barBackstore: 'バー倉庫',
washrooms: '洗面所',
washroomCorridor: '洗面所前通路',
cocktailLounge: 'カクテル<br/>ラウンジ',
cocktailLoungeEntrance: 'カクテル<br/>ラウンジ<br/>入口',
westMainStreet: 'メインストリート西',
mainStreet: 'メインストリート',
eastMainStreet: '',
bakeryParking: 'ベーカリー<br/>駐車場',
bakeryRoof: 'ベーカリー屋上',
cafeRoofTop: 'カフェ屋上',
terrace: 'テラス',
backAlley: '裏路地',
garrage: 'ガレージ',
parkAlley: '公園路地'
},
oregon: {
spawnJunkyard: '廃品置場',
spawnStreet: '通り',
spawnConstructionSite: '建設現場',
towerStairs: 'タワー階段',
boilerRoom: 'ボイラー室',
electricRoom: '配電室',
bunkerEntrance: 'バンカー<br/>入口',
bunker: 'バンカー',
basementCorridor: '地下通路',
supplyRoom: '備品質',
laundryRoom: '洗濯室',
laundryStorage: '洗濯室<br/>倉庫',
laundryStairs: '洗濯室<br/>階段',
office: 'オフィス',
diningHall: '食堂',
diningHallCorridor: '食堂通路',
showers: 'シャワールーム',
kitchen: 'キッチン',
bathroom: '共同トイレ',
dormStairs: '共同寝室<br/>の階段',
pantry: '食料庫',
bathroomCorridor: '共同トイレ前',
classroom: '教室',
lobby: 'ロビー',
mainStairs: 'メイン<br/>階段',
meetingHall: '会議ホール',
rearStage: 'ホール裏',
garage: 'ガレージ',
officeStorage: 'オフィス<br/>備品置き場',
kidsDorm: '子供部屋',
dormMainHall: '共同寝室(大)',
smallDorms: '共同寝室(小)',
armoryCorridor: '武器庫通路',
masterBedroom: 'メイン<br/>ベッドルーム',
armory: '武器庫',
walkIn: 'ウォークイン',
attic: '裏屋根',
watchTower: '監視タワー',
busYard: 'バスヤード',
junkyard: '廃品置き場',
farmlands: '農地',
shootingRange: '射撃練習場',
constructionSite: '建設現場',
parking: '駐車場',
mainEntrance: '正面玄関',
street: '通り',
balcony: 'テラス',
diningHallRoof: '食堂屋上',
officeRoof: 'オフィス屋上',
meetingHallEntrance: '会議室入口',
garageRoof: 'ガレージ屋上',
dormsRoof: '共同寝室<br/>屋根',
meetingHallRoof: '会議ホール<br/>屋根',
supplyCloset: ''
},
plane: {
spawnOfficialEntrance: '前方搭乗口',
spawnReporterEntrance: '記者エントランス',
spawnServiceEntrance: '前方サービス<br/>エントランス',
pressBathroom: 'プレス<br/>バスルーム',
meetingRoom: '会議室',
frontHallway: '正面廊下',
executiveOffice: 'VIPオフィス',
mainEntrance: '前方搭乗口',
frontStairs: '正面階段',
pantry: '食料庫',
kitchen: 'キッチン',
executiveHallway: 'VIP専用通路',
executiveBedroom: 'VIP<br/>ベッドルーム',
changeRoom: '更衣室',
laund: '洗濯室',
frontServiceEntrance: '前方サービス<br/>エントランス',
rightWing: '右翼',
backServiceEntrance: '後方サービス<br/>ドア',
reporterEntrance: '後方搭乗口',
leftWing: '左翼',
staffSection: 'スタッフ席',
securityRoom: 'セキュリティ<br/>ルーム',
pressSectionA: '記者席A',
pressSectionB: '記者席B',
backStairs: '後方階段',
cargoHold: '貨物庫',
serviceCorridor: '従業員用通路',
storage: '倉庫',
luggageHold: '荷物庫',
firstAidStation: '救護室',
cargoFrontEntrance: '貨物用<br/>正面入口',
cockpitStairs: 'コックピット<br/>階段',
cabinStaff: 'スタッフルーム',
radioCabin: '無線室',
cabin: 'コックピット',
caterer: 'サービスドア',
serverRoomA: 'サーバールームA',
serverRoomB: 'サーバールームB',
technicalSeating: 'テックシート',
ladderRoom: ''
},
skyscraper: {
helipad: 'ヘリポート',
tower: 'タワー',
ventilationUnits: '空調設備',
kitchen: 'キッチン',
pantry: '食料庫',
deliveryRoom: 'デリバリー<br/>ルーム',
houseLobby: '建物ロビー',
houseEntrance: '建物入口',
mainEntrance: '正面玄関',
reception: '受付',
bedroom: 'ベッドルーム',
closet: 'クローゼット',
bathroom: 'バスルーム',
houseStairs: '建物<br/>階段',
restaurant: 'レストラン',
toilet: 'トイレ',
bbq: 'BBQ',
backHallway: '裏廊下',
mainStairs: 'メイン<br/>階段',
geishaRoom: '芸者部屋',
hallway: '廊下',
karaoke: 'カラオケ',
teaRoom: '茶室',
taiko: '太鼓',
terrace: 'テラス',
backStairs: '裏階段',
houseBalcony: '建物<br/>バルコニー',
exhibition: '展示',
lounge: 'ラウンジ',
workOffice: 'オフィス',
clearance: 'VIP<br/>クリアランス',
peacefullTree: '平穏の木',
contemplationGarden: '沈思の庭園',
westGarden: '庭園(西)',
bridge: '橋',
gazeebo: '展望台',
restBalcony: 'レストラン<br/>バルコニー',
northGarden: '庭園(北)',
eastGarden: '庭園(東)',
sandGarden: '枯山水',
sidePath: '脇道',
sideStairs: '側面階段',
dragonStatue: '竜の像',
coveredWalkway: '屋根付き<br/>通路'
},
yacht: {
spawnSubmarine: '潜水艦',
spawnZodiak: 'ゾディアック',
spawnSnowMobile: 'スノーモービル',
mapsRoom: 'マップルーム',
cockpit: 'コックピット',
cockpitHallway: 'コックピット前廊下',
captainsOffice: '船長室',
cockpitBalcony: 'コックピット<br/>バルコニー',
topDeckStairs: 'トップ<br/>デッキ<br/>階段',
helipadEntrance: 'ヘリポート<br/>入口',
helipad: 'ヘリポート',
spaDeck: 'スパ<br/>デッキ',
eastDeck: '東デッキ',
westDeck: '西デッキ',
frontDeck: 'フロント<br/>デッキ',
masterBedroom: 'メイン<br/>ベッドルーム',
casino: 'カジノ',
pokerRoom: 'ポーカー<br/>ルーム',
bathroom: 'バスルーム',
bedroomHallway: 'ベッドルーム前廊下',
casinoHallway: 'カジノ前廊下',
globeHallway: '地球儀廊下',
lounge: 'ラウンジ',
cafeteria: 'カフェテリア',
engine: 'エンジン',
backEntrance: 'エンジン<br/>後方入口',
rearDeck: '後方<br/>デッキ',
serverRoom: 'サーバー<br/>ルーム',
engineStorage: 'エンジン<br/>スペース',
engineControl: 'エンジン<br/>制御装置',
backStairs: '後方<br/>階段',
emergencyExit: '非常口',
engineHallway: 'エンジン前廊下',
frontStairs: '正面<br/>階段',
kitchen: 'キッチン',
staffDormitory: 'スタッフ用<br/>共同部屋',
westBalcony: '西バルコニー',
eastBalcony: '東バルコニー',
kitchenHallway: 'キッチン前廊下',
kitchenStairs: 'キッチン階段',
kitchenPantry: 'キッチン<br/>食料庫',
infirmary: '医療室',
borealSubRoom: 'アクラーク号<br/>船倉',
cafeteriaHallway: 'カフェテリア前廊下',
engineUtility: 'エンジン設備',
submarine: '潜水艦',
westGlacier: '西側グレーシャー',
eastHullBreach: '船体東側<br/>ブリーチ',
eastGlacier: '東側グレーシャー',
frozenRiver: '冷たい川',
zodiac: 'ゾディアック',
westHullBreach: '船体西側ブリーチ',
kingOfTheWorld: '',
roof: '',
anchorName: '',
aklarkSubEntrance: ''
}
}
};
R6MapsLangTerms.registerLanguage(name, terms);
return {
name: name,
terms: terms
};
})(R6MapsLangTerms);
|
Japanese translations for coastline map name
|
dev/js/langs/r6-maps.lang-terms.ja.js
|
Japanese translations for coastline map name
|
<ide><path>ev/js/langs/r6-maps.lang-terms.ja.js
<ide> border: '国境',
<ide> chalet: '山荘',
<ide> club: 'クラブハウス',
<add> coastline: '海岸線',
<ide> consulate: '領事館',
<ide> favela: 'ファべーラ',
<ide> hereford: 'ヘレフォード基地',
|
|
Java
|
mit
|
0c2c54e1a157b6a1e4891de30c9833b29710f891
| 0 |
metamolecular/mx
|
/*
* MX - Essential Cheminformatics
*
* Copyright (c) 2007-2009 Metamolecular, LLC
*
* http://metamolecular.com/mx
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.metamolecular.mx.fingerprint;
import com.metamolecular.mx.model.Atom;
import com.metamolecular.mx.model.Molecule;
import com.metamolecular.mx.query.AromaticAtomFilter;
import com.metamolecular.mx.ring.HanserRingFinder;
import com.metamolecular.mx.ring.RingFilter;
import com.metamolecular.mx.walk.DefaultWalker;
import com.metamolecular.mx.walk.PathWriter;
import com.metamolecular.mx.walk.Walker;
import java.util.BitSet;
import java.util.HashSet;
import java.util.Set;
/**
* @author Richard L. Apodaca <rapodaca at metamolecular.com>
*/
public class PathFingerprinter implements Fingerprinter
{
private PathWriter writer;
private BloomFilter bloomFilter;
private Walker walker;
private RingFilter filter;
private Set<Atom> aromatics;
public PathFingerprinter()
{
this(new RingFilter(new AromaticAtomFilter(), new HanserRingFinder()));
}
public PathFingerprinter(RingFilter filter)
{
this.bloomFilter = new BloomFilter(1024);
this.writer = new PathWriter(bloomFilter);
this.walker = new DefaultWalker();
this.filter = filter;
this.aromatics = new HashSet();
walker.setMaximumDepth(7);
}
public RingFilter getRingFilter()
{
return filter;
}
public void setRingFilter(RingFilter filter)
{
this.filter = filter;
}
public void setMaximumPathDepth(int maxDepth)
{
walker.setMaximumDepth(maxDepth);
}
public int getMaximumPathDepth()
{
return walker.getMaximumDepth();
}
public void setFingerprintLength(int length)
{
this.bloomFilter = new BloomFilter(length);
}
public int getFingerprintLength()
{
return bloomFilter.getBitArraySize();
}
public BitSet getFingerprint(Molecule molecule)
{
bloomFilter.clear();
findAromatics(molecule);
// System.out.println(molecule.countAtoms());
for (int i = 0; i < molecule.countAtoms(); i++)
{
Atom atom = molecule.getAtom(i);
walker.walk(atom, writer);
}
return bloomFilter.toBitSet();
}
private void findAromatics(Molecule molecule)
{
aromatics.clear();
filter.filterAtoms(molecule, aromatics);
for (Atom atom : aromatics)
{
aromatics.add(atom);
}
writer.setAromatics(aromatics);
}
}
|
src/com/metamolecular/mx/fingerprint/PathFingerprinter.java
|
/*
* MX - Essential Cheminformatics
*
* Copyright (c) 2007-2009 Metamolecular, LLC
*
* http://metamolecular.com/mx
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.metamolecular.mx.fingerprint;
import com.metamolecular.mx.model.Atom;
import com.metamolecular.mx.model.Molecule;
import com.metamolecular.mx.query.AromaticAtomFilter;
import com.metamolecular.mx.ring.HanserRingFinder;
import com.metamolecular.mx.ring.RingFilter;
import com.metamolecular.mx.walk.DefaultWalker;
import com.metamolecular.mx.walk.PathWriter;
import com.metamolecular.mx.walk.Walker;
import java.util.BitSet;
import java.util.HashSet;
import java.util.Set;
/**
* @author Richard L. Apodaca <rapodaca at metamolecular.com>
*/
public class PathFingerprinter implements Fingerprinter
{
private PathWriter writer;
private BloomFilter bloomFilter;
private Walker walker;
private RingFilter filter;
private Set<Atom> aromatics;
public PathFingerprinter()
{
this(new RingFilter(new AromaticAtomFilter(), new HanserRingFinder()));
}
public PathFingerprinter(RingFilter filter)
{
this.bloomFilter = new BloomFilter(1024);
this.writer = new PathWriter(bloomFilter);
this.walker = new DefaultWalker();
this.filter = filter;
this.aromatics = new HashSet();
walker.setMaximumDepth(7);
}
public RingFilter getRingFilter()
{
return filter;
}
public void setRingFilter(RingFilter filter)
{
this.filter = filter;
}
public void setMaximumPathDepth(int maxDepth)
{
walker.setMaximumDepth(maxDepth);
}
public int getMaximumPathDepth()
{
return walker.getMaximumDepth();
}
public void setFingerprintLength(int length)
{
this.bloomFilter = new BloomFilter(length);
}
public int getFingerprintLength()
{
return bloomFilter.getBitArraySize();
}
public BitSet getFingerprint(Molecule molecule)
{
bloomFilter.clear();
findAromatics(molecule);
// System.out.println(molecule.countAtoms());
for (int i = 1; i < molecule.countAtoms(); i++)
{
Atom atom = molecule.getAtom(i);
walker.walk(atom, writer);
}
return bloomFilter.toBitSet();
}
private void findAromatics(Molecule molecule)
{
aromatics.clear();
filter.filterAtoms(molecule, aromatics);
for (Atom atom : aromatics)
{
aromatics.add(atom);
}
writer.setAromatics(aromatics);
}
}
|
fixed counter loop
|
src/com/metamolecular/mx/fingerprint/PathFingerprinter.java
|
fixed counter loop
|
<ide><path>rc/com/metamolecular/mx/fingerprint/PathFingerprinter.java
<ide>
<ide> // System.out.println(molecule.countAtoms());
<ide>
<del> for (int i = 1; i < molecule.countAtoms(); i++)
<add> for (int i = 0; i < molecule.countAtoms(); i++)
<ide> {
<ide> Atom atom = molecule.getAtom(i);
<ide>
|
|
Java
|
lgpl-2.1
|
37f2f95e98d80ebce1555e7f4b98998f4a34e2b7
| 0 |
juanmjacobs/kettle,cwarden/kettle,cwarden/kettle,cwarden/kettle,juanmjacobs/kettle,juanmjacobs/kettle
|
package org.pentaho.di.core.reflection;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.List;
import org.pentaho.di.core.Condition;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.logging.LogWriter;
public class StringSearcher
{
public static final void findMetaData(Object object, int level, List<StringSearchResult> stringList, Object parentObject, Object grandParentObject)
{
// System.out.println(Const.rightPad(" ", level)+"Finding strings in "+object.toString());
if (level>5) return;
Class<? extends Object> baseClass = object.getClass();
Field[] fields = baseClass.getDeclaredFields();
for (int i=0;i<fields.length;i++)
{
Field field = fields[i];
boolean processThisOne = true;
if ( (field.getModifiers()&Modifier.FINAL ) > 0) processThisOne=false;
if ( (field.getModifiers()&Modifier.STATIC) > 0) processThisOne=false;
if ( field.toString().indexOf("org.pentaho.di")<0 ) processThisOne=false; // Stay in this code-base.
if (processThisOne)
{
try
{
Object obj = field.get(object);
if (obj!=null)
{
if (obj instanceof String)
{
// OK, let's add the String
stringList.add(new StringSearchResult((String)obj, parentObject, grandParentObject, field.getName()));
}
else
if (obj instanceof String[])
{
String[] array = (String[])obj;
for (int x=0;x<array.length;x++)
{
if (array[x]!=null)
{
stringList.add(new StringSearchResult(array[x], parentObject, grandParentObject, field.getName()+" #"+(x+1)));
}
}
}
else
if (obj instanceof Boolean)
{
// OK, let's add the String
stringList.add(new StringSearchResult(((Boolean)obj).toString(), parentObject, grandParentObject, field.getName()+" (Boolean)"));
}
else
if (obj instanceof Condition)
{
stringList.add(new StringSearchResult(((Condition)obj).toString(), parentObject, grandParentObject, field.getName()+" (Condition)"));
}
else
if (obj instanceof Object[])
{
for (int j=0;j<((Object[])obj).length;j++) findMetaData( ((Object[])obj)[j], level+1, stringList, parentObject, grandParentObject);
}
else {
findMetaData(obj, level+1, stringList, parentObject, grandParentObject);
}
}
}
catch(IllegalAccessException e)
{
// OK, it's private, let's see if we can go there later on using getters and setters...
// fileName becomes: getFileName();
Method method = findMethod(baseClass, field.getName());
if (method!=null)
{
String fullMethod = baseClass.getName()+"."+method.getName()+"()";
// OK, how do we get the value now?
try
{
// System.out.println(Const.rightPad(" ", level)+" Invoking method: "+fullMethod+", on object: "+object.toString());
Object string = method.invoke(object, (Object[])null);
if (string!=null)
{
if (string instanceof String)
{
stringList.add(new StringSearchResult((String)string, parentObject, grandParentObject, field.getName()));
// System.out.println(Const.rightPad(" ", level)+" "+field.getName()+" : method "+fullMethod+" --> "+((String)string));
}
else
if (string instanceof String[])
{
String[] array = (String[])string;
for (int x=0;x<array.length;x++)
{
if (array[x]!=null)
{
stringList.add(new StringSearchResult(array[x], parentObject, grandParentObject, field.getName()+" #"+(x+1)));
/// System.out.println(Const.rightPad(" ", level)+" "+field.getName()+" : method "+fullMethod+" --> String #"+x+" = "+array[x]);
}
}
}
else
if (string instanceof Boolean)
{
// OK, let's add the String
stringList.add(new StringSearchResult(((Boolean)string).toString(), parentObject, grandParentObject, field.getName()+" (Boolean)"));
}
else
if (string instanceof Condition)
{
stringList.add(new StringSearchResult(((Condition)string).toString(), parentObject, grandParentObject, field.getName()+" (Condition)"));
}
else
if (string instanceof Object[])
{
for (int j=0;j<((Object[])string).length;j++) findMetaData( ((Object[])string)[j], level+1, stringList, parentObject, grandParentObject);
}
else
{
findMetaData(string, level+1, stringList, parentObject, grandParentObject);
}
}
}
catch(Exception ex)
{
LogWriter.getInstance().logDebug("StringSearcher", Const.rightPad(" ", level)+" Unable to get access to method "+fullMethod+" : "+e.toString());
}
}
}
}
}
}
private static Method findMethod(Class<? extends Object> baseClass, String name)
{
// baseClass.getMethod(methodName[m], null);
Method[] methods = baseClass.getDeclaredMethods();
Method method = null;
// getName()
if (method==null)
{
String getter = constructGetter(name);
method = searchGetter(getter, baseClass, methods);
}
// isName()
if (method==null)
{
String getter = constructIsGetter(name);
method = searchGetter(getter, baseClass, methods);
}
// name()
if (method==null)
{
String getter = name;
method = searchGetter(getter, baseClass, methods);
}
return method;
}
private static Method searchGetter(String getter, Class<?> baseClass, Method[] methods)
{
Method method =null;
try
{
method=baseClass.getMethod(getter);
}
catch(Exception e)
{
// Nope try case insensitive.
for (int i=0;i<methods.length;i++)
{
String methodName = methods[i].getName();
if (methodName.equalsIgnoreCase(getter))
{
return methods[i];
}
}
}
return method;
}
public static final String constructGetter(String name)
{
StringBuffer buf = new StringBuffer();
buf.append("get");
buf.append(name.substring(0,1).toUpperCase());
buf.append(name.substring(1));
return buf.toString();
}
public static final String constructIsGetter(String name)
{
StringBuffer buf = new StringBuffer();
buf.append("is");
buf.append(name.substring(0,1).toUpperCase());
buf.append(name.substring(1));
return buf.toString();
}
}
|
src/org/pentaho/di/core/reflection/StringSearcher.java
|
package org.pentaho.di.core.reflection;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.List;
import org.pentaho.di.core.Condition;
import org.pentaho.di.core.Const;
public class StringSearcher
{
public static final void findMetaData(Object object, int level, List<StringSearchResult> stringList, Object parentObject, Object grandParentObject)
{
// System.out.println(Const.rightPad(" ", level)+"Finding strings in "+object.toString());
if (level>5) return;
Class<? extends Object> baseClass = object.getClass();
Field[] fields = baseClass.getDeclaredFields();
for (int i=0;i<fields.length;i++)
{
Field field = fields[i];
boolean processThisOne = true;
if ( (field.getModifiers()&Modifier.FINAL ) > 0) processThisOne=false;
if ( (field.getModifiers()&Modifier.STATIC) > 0) processThisOne=false;
if ( field.toString().indexOf("org.pentaho.di")<0 ) processThisOne=false; // Stay in this code-base.
if (processThisOne)
{
try
{
Object obj = field.get(object);
if (obj!=null)
{
if (obj instanceof String)
{
// OK, let's add the String
stringList.add(new StringSearchResult((String)obj, parentObject, grandParentObject, field.getName()));
}
else
if (obj instanceof String[])
{
String[] array = (String[])obj;
for (int x=0;x<array.length;x++)
{
if (array[x]!=null)
{
stringList.add(new StringSearchResult(array[x], parentObject, grandParentObject, field.getName()+" #"+(x+1)));
}
}
}
else
if (obj instanceof Boolean)
{
// OK, let's add the String
stringList.add(new StringSearchResult(((Boolean)obj).toString(), parentObject, grandParentObject, field.getName()+" (Boolean)"));
}
else
if (obj instanceof Condition)
{
stringList.add(new StringSearchResult(((Condition)obj).toString(), parentObject, grandParentObject, field.getName()+" (Condition)"));
}
else
if (obj instanceof Object[])
{
for (int j=0;j<((Object[])obj).length;j++) findMetaData( ((Object[])obj)[j], level+1, stringList, parentObject, grandParentObject);
}
else {
findMetaData(obj, level+1, stringList, parentObject, grandParentObject);
}
}
}
catch(IllegalAccessException e)
{
// OK, it's private, let's see if we can go there later on using getters and setters...
// fileName becomes: getFileName();
Method method = findMethod(baseClass, field.getName());
if (method!=null)
{
String fullMethod = baseClass.getName()+"."+method.getName()+"()";
// OK, how do we get the value now?
try
{
// System.out.println(Const.rightPad(" ", level)+" Invoking method: "+fullMethod+", on object: "+object.toString());
Object string = method.invoke(object, (Object[])null);
if (string!=null)
{
if (string instanceof String)
{
stringList.add(new StringSearchResult((String)string, parentObject, grandParentObject, field.getName()));
// System.out.println(Const.rightPad(" ", level)+" "+field.getName()+" : method "+fullMethod+" --> "+((String)string));
}
else
if (string instanceof String[])
{
String[] array = (String[])string;
for (int x=0;x<array.length;x++)
{
if (array[x]!=null)
{
stringList.add(new StringSearchResult(array[x], parentObject, grandParentObject, field.getName()+" #"+(x+1)));
/// System.out.println(Const.rightPad(" ", level)+" "+field.getName()+" : method "+fullMethod+" --> String #"+x+" = "+array[x]);
}
}
}
else
if (string instanceof Boolean)
{
// OK, let's add the String
stringList.add(new StringSearchResult(((Boolean)string).toString(), parentObject, grandParentObject, field.getName()+" (Boolean)"));
}
else
if (string instanceof Condition)
{
stringList.add(new StringSearchResult(((Condition)string).toString(), parentObject, grandParentObject, field.getName()+" (Condition)"));
}
else
if (string instanceof Object[])
{
for (int j=0;j<((Object[])string).length;j++) findMetaData( ((Object[])string)[j], level+1, stringList, parentObject, grandParentObject);
}
else
{
findMetaData(string, level+1, stringList, parentObject, grandParentObject);
}
}
}
catch(Exception ex)
{
System.out.println(Const.rightPad(" ", level)+" Unable to get access to method "+fullMethod+" : "+e.toString());
}
}
}
}
}
}
private static Method findMethod(Class<? extends Object> baseClass, String name)
{
// baseClass.getMethod(methodName[m], null);
Method[] methods = baseClass.getDeclaredMethods();
Method method = null;
// getName()
if (method==null)
{
String getter = constructGetter(name);
method = searchGetter(getter, baseClass, methods);
}
// isName()
if (method==null)
{
String getter = constructIsGetter(name);
method = searchGetter(getter, baseClass, methods);
}
// name()
if (method==null)
{
String getter = name;
method = searchGetter(getter, baseClass, methods);
}
return method;
}
private static Method searchGetter(String getter, Class<?> baseClass, Method[] methods)
{
Method method =null;
try
{
method=baseClass.getMethod(getter);
}
catch(Exception e)
{
// Nope try case insensitive.
for (int i=0;i<methods.length;i++)
{
String methodName = methods[i].getName();
if (methodName.equalsIgnoreCase(getter))
{
return methods[i];
}
}
}
return method;
}
public static final String constructGetter(String name)
{
StringBuffer buf = new StringBuffer();
buf.append("get");
buf.append(name.substring(0,1).toUpperCase());
buf.append(name.substring(1));
return buf.toString();
}
public static final String constructIsGetter(String name)
{
StringBuffer buf = new StringBuffer();
buf.append("is");
buf.append(name.substring(0,1).toUpperCase());
buf.append(name.substring(1));
return buf.toString();
}
}
|
Cosmetical fix for PDI-162
git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@5461 5fb7f6ec-07c1-534a-b4ca-9155e429e800
|
src/org/pentaho/di/core/reflection/StringSearcher.java
|
Cosmetical fix for PDI-162
|
<ide><path>rc/org/pentaho/di/core/reflection/StringSearcher.java
<ide>
<ide> import org.pentaho.di.core.Condition;
<ide> import org.pentaho.di.core.Const;
<add>import org.pentaho.di.core.logging.LogWriter;
<ide>
<ide>
<ide> public class StringSearcher
<ide> }
<ide> catch(Exception ex)
<ide> {
<del> System.out.println(Const.rightPad(" ", level)+" Unable to get access to method "+fullMethod+" : "+e.toString());
<add> LogWriter.getInstance().logDebug("StringSearcher", Const.rightPad(" ", level)+" Unable to get access to method "+fullMethod+" : "+e.toString());
<ide> }
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
e747a90bb2a887611ce80785313ec4b3822a5cc2
| 0 |
IrinIv/Java_course,IrinIv/Java_course
|
package ru.stqa.pft.addressbook.tests;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import ru.stqa.pft.addressbook.model.Contacts;
import ru.stqa.pft.addressbook.model.GroupData;
import ru.stqa.pft.addressbook.model.Groups;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by IrinaIv on 6/23/2017.
*/
public class ContactDeleteFromGroupTests extends TestBase {
@BeforeMethod
public void ensurePreconditions() {
app.goTo().groupPage();
if (app.db().groups().size() == 0) {
app.goTo().groupPage();
app.group().create(new GroupData().withName("test3"));
}
}
@Test
public void testContactDeleteFromGroup() {
app.contact().homePage();
Groups groups = app.db().groups();
Contacts before = app.db().contacts();
app.contact().selectGroup();
Contacts after = app.contact().all();
ContactData deletedContact = after.iterator().next();
app.contact().deleteContactFromGroup(deletedContact);
Contacts selected = app.contact().all().inSelectedGroup(deletedContact);
Contacts withoutselected = app.contact().all().withOutSelected(deletedContact);
assertThat(((app.contact().all().withOutSelected(deletedContact))).size(), equalTo(selected.size() - 1));
}
}
|
addressbook-web-test/src/test/java/ru/stqa/pft/addressbook/tests/ContactDeleteFromGroupTests.java
|
package ru.stqa.pft.addressbook.tests;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import ru.stqa.pft.addressbook.model.Contacts;
import ru.stqa.pft.addressbook.model.GroupData;
import ru.stqa.pft.addressbook.model.Groups;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by IrinaIv on 6/23/2017.
*/
public class ContactDeleteFromGroupTests extends TestBase {
@BeforeMethod
public void ensurePreconditions() {
app.goTo().groupPage();
if (app.db().groups().size() == 0) {
app.goTo().groupPage();
app.group().create(new GroupData().withName("test3"));
}
}
@Test
public void testContactDeleteFromGroup() {
app.contact().homePage();
Groups groups = app.db().groups();
Contacts before = app.db().contacts();
app.contact().selectGroup();
Contacts after = app.contact().all();
ContactData deletedContact = after.iterator().next();
app.contact().deleteContactFromGroup(deletedContact);
Contacts selected = app.db().contacts().withOutSelected(deletedContact);
assertThat(((app.db().contacts().withOutSelected(deletedContact))).size(), equalTo(selected.size()));
}
}
|
Changed ContactDeleteFromGroupTests
|
addressbook-web-test/src/test/java/ru/stqa/pft/addressbook/tests/ContactDeleteFromGroupTests.java
|
Changed ContactDeleteFromGroupTests
|
<ide><path>ddressbook-web-test/src/test/java/ru/stqa/pft/addressbook/tests/ContactDeleteFromGroupTests.java
<ide> Contacts after = app.contact().all();
<ide> ContactData deletedContact = after.iterator().next();
<ide> app.contact().deleteContactFromGroup(deletedContact);
<del> Contacts selected = app.db().contacts().withOutSelected(deletedContact);
<add> Contacts selected = app.contact().all().inSelectedGroup(deletedContact);
<add> Contacts withoutselected = app.contact().all().withOutSelected(deletedContact);
<ide>
<del> assertThat(((app.db().contacts().withOutSelected(deletedContact))).size(), equalTo(selected.size()));
<add> assertThat(((app.contact().all().withOutSelected(deletedContact))).size(), equalTo(selected.size() - 1));
<ide>
<ide> }
<ide>
|
|
Java
|
epl-1.0
|
e94ca3ffa41f85c1733e883563d6a6bfa4b11757
| 0 |
rohitmohan96/ceylon-ide-eclipse,rohitmohan96/ceylon-ide-eclipse
|
/*
* Copyright Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the authors tag. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*
* This particular file is subject to the "Classpath" exception as provided in the
* LICENSE file that accompanied this code.
*
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License,
* along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.redhat.ceylon.eclipse.core.model.loader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.eclipse.core.resources.IFile;
import org.eclipse.jdt.core.IClassFile;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.core.WorkingCopyOwner;
import org.eclipse.jdt.core.compiler.CharOperation;
import org.eclipse.jdt.internal.compiler.CompilationResult;
import org.eclipse.jdt.internal.compiler.DefaultErrorHandlingPolicies;
import org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration;
import org.eclipse.jdt.internal.compiler.env.AccessRestriction;
import org.eclipse.jdt.internal.compiler.env.IBinaryType;
import org.eclipse.jdt.internal.compiler.env.ICompilationUnit;
import org.eclipse.jdt.internal.compiler.env.ISourceType;
import org.eclipse.jdt.internal.compiler.impl.CompilerOptions;
import org.eclipse.jdt.internal.compiler.impl.ITypeRequestor;
import org.eclipse.jdt.internal.compiler.lookup.BinaryTypeBinding;
import org.eclipse.jdt.internal.compiler.lookup.LookupEnvironment;
import org.eclipse.jdt.internal.compiler.lookup.PackageBinding;
import org.eclipse.jdt.internal.compiler.lookup.ProblemReasons;
import org.eclipse.jdt.internal.compiler.lookup.ProblemReferenceBinding;
import org.eclipse.jdt.internal.compiler.lookup.ReferenceBinding;
import org.eclipse.jdt.internal.compiler.parser.Parser;
import org.eclipse.jdt.internal.compiler.parser.SourceTypeConverter;
import org.eclipse.jdt.internal.compiler.problem.AbortCompilationUnit;
import org.eclipse.jdt.internal.compiler.problem.DefaultProblemFactory;
import org.eclipse.jdt.internal.compiler.problem.ProblemReporter;
import org.eclipse.jdt.internal.core.ClassFile;
import org.eclipse.jdt.internal.core.JavaProject;
import org.eclipse.jdt.internal.core.SourceTypeElementInfo;
import org.eclipse.jdt.internal.core.search.BasicSearchEngine;
import com.redhat.ceylon.cmr.api.ArtifactResult;
import com.redhat.ceylon.compiler.java.loader.TypeFactory;
import com.redhat.ceylon.compiler.java.util.Util;
import com.redhat.ceylon.compiler.loader.AbstractModelLoader;
import com.redhat.ceylon.compiler.loader.TypeParser;
import com.redhat.ceylon.compiler.loader.mirror.ClassMirror;
import com.redhat.ceylon.compiler.loader.mirror.MethodMirror;
import com.redhat.ceylon.compiler.loader.model.LazyClass;
import com.redhat.ceylon.compiler.loader.model.LazyInterface;
import com.redhat.ceylon.compiler.loader.model.LazyMethod;
import com.redhat.ceylon.compiler.loader.model.LazyModule;
import com.redhat.ceylon.compiler.loader.model.LazyPackage;
import com.redhat.ceylon.compiler.loader.model.LazyValue;
import com.redhat.ceylon.compiler.typechecker.analyzer.ModuleManager;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.model.Class;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.ExternalUnit;
import com.redhat.ceylon.compiler.typechecker.model.Interface;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.Modules;
import com.redhat.ceylon.compiler.typechecker.model.Package;
import com.redhat.ceylon.compiler.typechecker.model.Unit;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.eclipse.core.model.CeylonDeclaration;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.JDTClass;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.JDTMethod;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.SourceClass;
import com.redhat.ceylon.eclipse.core.model.loader.model.JDTModule;
import com.redhat.ceylon.eclipse.core.model.loader.model.JDTModuleManager;
import com.redhat.ceylon.eclipse.imp.builder.CeylonBuilder;
/**
* A model loader which uses the JDT model.
*
* @author David Festal <[email protected]>
*/
public class JDTModelLoader extends AbstractModelLoader {
private IJavaProject javaProject;
private CompilerOptions compilerOptions;
private ProblemReporter problemReporter;
private LookupEnvironment lookupEnvironment;
private boolean mustResetLookupEnvironment = false;
public JDTModelLoader(final ModuleManager moduleManager, final Modules modules){
this.moduleManager = moduleManager;
this.modules = modules;
this.typeFactory = new TypeFactory(moduleManager.getContext()) {
@Override
public Package getPackage() {
if(super.getPackage() == null){
super.setPackage(modules.getLanguageModule().getDirectPackage("ceylon.language"));
}
return super.getPackage();
}
/**
* Search for a declaration in the language module.
*/
private Map<String, Declaration> languageModuledeclarations = new HashMap<String, Declaration>();
public Declaration getLanguageModuleDeclaration(String name) {
if (languageModuledeclarations.containsKey(name)) {
return languageModuledeclarations.get(name);
}
languageModuledeclarations.put(name, null);
Declaration decl = super.getLanguageModuleDeclaration(name);
languageModuledeclarations.put(name, decl);
return decl;
}
};
this.typeParser = new TypeParser(this, typeFactory);
javaProject = ((JDTModuleManager)moduleManager).getJavaProject();
compilerOptions = new CompilerOptions(javaProject.getOptions(true));
compilerOptions.ignoreMethodBodies = true;
compilerOptions.storeAnnotations = true;
problemReporter = new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
try {
lookupEnvironment = new LookupEnvironment(new ITypeRequestor() {
private Parser basicParser;
@Override
public void accept(ISourceType[] sourceTypes, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
// case of SearchableEnvironment of an IJavaProject is used
ISourceType sourceType = sourceTypes[0];
while (sourceType.getEnclosingType() != null)
sourceType = sourceType.getEnclosingType();
if (sourceType instanceof SourceTypeElementInfo) {
// get source
SourceTypeElementInfo elementInfo = (SourceTypeElementInfo) sourceType;
IType type = elementInfo.getHandle();
ICompilationUnit sourceUnit = (ICompilationUnit) type.getCompilationUnit();
accept(sourceUnit, accessRestriction);
} else {
CompilationResult result = new CompilationResult(sourceType.getFileName(), 1, 1, 0);
CompilationUnitDeclaration unit =
SourceTypeConverter.buildCompilationUnit(
sourceTypes,
SourceTypeConverter.FIELD_AND_METHOD // need field and methods
| SourceTypeConverter.MEMBER_TYPE, // need member types
// no need for field initialization
lookupEnvironment.problemReporter,
result);
lookupEnvironment.buildTypeBindings(unit, accessRestriction);
lookupEnvironment.completeTypeBindings(unit, true);
}
}
@Override
public void accept(IBinaryType binaryType, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
lookupEnvironment.createBinaryTypeFrom(binaryType, packageBinding, accessRestriction);
}
@Override
public void accept(ICompilationUnit sourceUnit,
AccessRestriction accessRestriction) {
// Switch the current policy and compilation result for this unit to the requested one.
CompilationResult unitResult = new CompilationResult(sourceUnit, 1, 1, compilerOptions.maxProblemsPerUnit);
try {
CompilationUnitDeclaration parsedUnit = basicParser().dietParse(sourceUnit, unitResult);
lookupEnvironment.buildTypeBindings(parsedUnit, accessRestriction);
lookupEnvironment.completeTypeBindings(parsedUnit, true);
} catch (AbortCompilationUnit e) {
// at this point, currentCompilationUnitResult may not be sourceUnit, but some other
// one requested further along to resolve sourceUnit.
if (unitResult.compilationUnit == sourceUnit) { // only report once
//requestor.acceptResult(unitResult.tagAsAccepted());
} else {
throw e; // want to abort enclosing request to compile
}
}
// Display unit error in debug mode
if (BasicSearchEngine.VERBOSE) {
if (unitResult.problemCount > 0) {
System.out.println(unitResult);
}
}
}
private Parser basicParser() {
if (this.basicParser == null) {
ProblemReporter problemReporter =
new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
this.basicParser = new Parser(problemReporter, false);
this.basicParser.reportOnlyOneSyntaxError = true;
}
return this.basicParser;
}
}, compilerOptions, problemReporter, ((JavaProject)javaProject).newSearchableNameEnvironment((WorkingCopyOwner)null));
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// TODO : remove when the bug in the AbstractModelLoader is corrected
@Override
public LazyPackage findOrCreatePackage(Module module, String pkgName) {
LazyPackage pkg = super.findOrCreatePackage(module, pkgName);
if ("".equals(pkgName)) {
pkg.setName(Collections.<String>emptyList());
}
Module currentModule = pkg.getModule();
if (currentModule.equals(modules.getDefaultModule()) && ! currentModule.equals(module)) {
currentModule.getPackages().remove(pkg);
pkg.setModule(null);
if (module != null) {
module.getPackages().add(pkg);
pkg.setModule(module);
}
}
return pkg;
}
@Override
public void loadStandardModules() {
/*
* We start by loading java.lang and ceylon.language because we will need them no matter what.
*/
Module javaModule = findOrCreateModule("java.lang");
Package javaLangPackage = findOrCreatePackage(javaModule, "java.lang");
javaLangPackage.setShared(true);
loadPackage("java.lang", false);
loadPackage("com.redhat.ceylon.compiler.java.metadata", false);
}
@Override
public void loadPackage(String packageName, boolean loadDeclarations) {
packageName = Util.quoteJavaKeywords(packageName);
if(loadDeclarations && !loadedPackages.add(packageName)){
return;
}
if(!loadDeclarations)
return;
Module module = lookupModule(packageName);
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
List<IPackageFragmentRoot> roots = jdtModule.getPackageFragmentRoots();
IPackageFragment packageFragment = null;
for (IPackageFragmentRoot root : roots) {
try {
if (CeylonBuilder.isCeylonSourceEntry(root.getRawClasspathEntry())) {
packageFragment = root.getPackageFragment(packageName);
if(packageFragment.exists() && loadDeclarations) {
try {
for (IClassFile classFile : packageFragment.getClassFiles()) {
IType type = classFile.getType();
if (! type.isMember() && !sourceDeclarations.containsKey(type.getFullyQualifiedName())) {
convertToDeclaration(type.getFullyQualifiedName(), DeclarationType.VALUE);
}
}
for (org.eclipse.jdt.core.ICompilationUnit compilationUnit : packageFragment.getCompilationUnits()) {
for (IType type : compilationUnit.getTypes()) {
if (! type.isMember() && !sourceDeclarations.containsKey(type.getFullyQualifiedName())) {
convertToDeclaration(type.getFullyQualifiedName(), DeclarationType.VALUE);
}
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
}
}
}
private Module lookupModule(String packageName) {
Module module = lookupModuleInternal(packageName);
if (module != null) {
return module;
}
return modules.getDefaultModule();
}
public Module lookupModuleInternal(String packageName) {
for(Module module : modules.getListOfModules()){
if(module instanceof LazyModule){
if(((LazyModule)module).containsPackage(packageName))
return module;
}else if(isSubPackage(module.getNameAsString(), packageName))
return module;
}
return null;
}
private boolean isSubPackage(String moduleName, String pkgName) {
return pkgName.equals(moduleName)
|| pkgName.startsWith(moduleName+".");
}
synchronized private LookupEnvironment getLookupEnvironment() {
if (mustResetLookupEnvironment) {
lookupEnvironment.reset();
mustResetLookupEnvironment = false;
}
return lookupEnvironment;
}
@Override
public synchronized ClassMirror lookupNewClassMirror(String name) {
if (sourceDeclarations.containsKey(name)) {
return new SourceClass(sourceDeclarations.get(name));
}
try {
IType type = javaProject.findType(name);
if (type == null) {
return null;
}
LookupEnvironment theLookupEnvironment = getLookupEnvironment();
if (type.isBinary()) {
ClassFile classFile = (ClassFile) type.getClassFile();
if (classFile != null) {
IBinaryType binaryType = classFile.getBinaryTypeInfo((IFile) classFile.getCorrespondingResource(), true);
BinaryTypeBinding binaryTypeBinding = theLookupEnvironment.cacheBinaryType(binaryType, null);
if (binaryTypeBinding == null) {
char[][] compoundName = CharOperation.splitOn('/', binaryType.getName());
ReferenceBinding existingType = theLookupEnvironment.getCachedType(compoundName);
if (existingType == null || ! (existingType instanceof BinaryTypeBinding)) {
return null;
}
binaryTypeBinding = (BinaryTypeBinding) existingType;
}
return new JDTClass(binaryTypeBinding, theLookupEnvironment);
}
} else {
char[][] compoundName = CharOperation.splitOn('.', type.getFullyQualifiedName().toCharArray());
ReferenceBinding referenceBinding = theLookupEnvironment.getType(compoundName);
if (referenceBinding != null) {
if (referenceBinding instanceof ProblemReferenceBinding) {
ProblemReferenceBinding problemReferenceBinding = (ProblemReferenceBinding) referenceBinding;
if (problemReferenceBinding.problemId() == ProblemReasons.InternalNameProvided) {
referenceBinding = problemReferenceBinding.closestReferenceMatch();
} else {
System.out.println(ProblemReferenceBinding.problemReasonString(problemReferenceBinding.problemId()));
return null;
}
}
return new JDTClass(referenceBinding, theLookupEnvironment);
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
return null;
}
@Override
public Declaration convertToDeclaration(String typeName,
DeclarationType declarationType) {
if (typeName.startsWith("ceylon.language")) {
return typeFactory.getLanguageModuleDeclaration(typeName.substring(typeName.lastIndexOf('.') + 1));
}
if (sourceDeclarations.containsKey(typeName)) {
return sourceDeclarations.get(typeName).getModelDeclaration();
}
try {
return super.convertToDeclaration(typeName, declarationType);
} catch(RuntimeException e) {
return null;
}
}
@Override
public void addModuleToClassPath(Module module, ArtifactResult artifact) {}
@Override
protected boolean isOverridingMethod(MethodMirror methodSymbol) {
return ((JDTMethod)methodSymbol).isOverridingMethod();
}
@Override
public Module findOrCreateModule(String pkgName) {
java.util.List<String> moduleName;
boolean isJava = false;
boolean defaultModule = false;
Module module = lookupModuleInternal(pkgName);
if (module != null) {
return module;
}
// FIXME: this is a rather simplistic view of the world
if(pkgName == null){
moduleName = Arrays.asList(Module.DEFAULT_MODULE_NAME);
defaultModule = true;
}else if(pkgName.startsWith("java.")){
moduleName = Arrays.asList("java");
isJava = true;
} else if(pkgName.startsWith("sun.")){
moduleName = Arrays.asList("sun");
isJava = true;
} else if(pkgName.startsWith("ceylon.language."))
moduleName = Arrays.asList("ceylon","language");
else{
moduleName = Arrays.asList(Module.DEFAULT_MODULE_NAME);
defaultModule = true;
}
module = moduleManager.getOrCreateModule(moduleName, null);
// make sure that when we load the ceylon language module we set it to where
// the typechecker will look for it
if(pkgName != null
&& pkgName.startsWith("ceylon.language.")
&& modules.getLanguageModule() == null){
modules.setLanguageModule(module);
}
if (module instanceof LazyModule) {
((LazyModule)module).setJava(isJava);
}
// FIXME: this can't be that easy.
module.setAvailable(true);
module.setDefault(defaultModule);
return module;
}
@Override
protected Unit getCompiledUnit(LazyPackage pkg, ClassMirror classMirror) {
Unit unit = null;
JDTClass jdtClass = ((JDTClass)classMirror);
String unitName = jdtClass.getFileName();
if (!jdtClass.isBinary()) {
for (Unit unitToTest : pkg.getUnits()) {
if (unitToTest.getFilename().equals(unitName)) {
return unitToTest;
}
}
}
unit = new ExternalUnit();
unit.setFilename(jdtClass.getFileName());
unit.setPackage(pkg);
return unit;
}
@Override
protected void logError(String message) {
System.err.println("ERROR: "+message);
}
@Override
protected void logWarning(String message) {
System.err.println("WARNING: "+message);
}
@Override
protected void logVerbose(String message) {
System.err.println("NOTE: "+message);
}
@Override
public void removeDeclarations(List<Declaration> declarations) {
List<Declaration> allDeclarations = new ArrayList<Declaration>(declarations.size());
allDeclarations.addAll(declarations);
for (Declaration declaration : declarations) {
retrieveInnerDeclarations(declaration, allDeclarations);
}
super.removeDeclarations(allDeclarations);
mustResetLookupEnvironment = true;
}
private void retrieveInnerDeclarations(Declaration declaration,
List<Declaration> allDeclarations) {
List<Declaration> members = declaration.getMembers();
allDeclarations.addAll(members);
for (Declaration member : members) {
retrieveInnerDeclarations(member, allDeclarations);
}
}
private Map<String, CeylonDeclaration> sourceDeclarations = new TreeMap<String, CeylonDeclaration>();
public void setupSourceFileObjects(List<PhasedUnit> phasedUnits) {
for (final PhasedUnit unit : phasedUnits) {
final String pkgName = unit.getPackage().getQualifiedNameString();
unit.getCompilationUnit().visit(new SourceDeclarationVisitor(){
@Override
public void loadFromSource(Tree.Declaration decl) {
String name = Util.quoteIfJavaKeyword(decl.getIdentifier().getText());
String fqn = pkgName.isEmpty() ? name : pkgName+"."+name;
sourceDeclarations.put(fqn, new CeylonDeclaration(unit, decl));
}
});
}
}
public void clearCachesOnPackage(String packageName) {
List<String> keysToRemove = new ArrayList<String>(classMirrorCache.size());
for (Entry<String, ClassMirror> element : classMirrorCache.entrySet()) {
if (element.getValue() == null) {
String className = element.getKey();
if (className != null) {
String classPackageName =className.replaceAll("\\.[^\\.]+$", "");
if (classPackageName.equals(packageName)) {
keysToRemove.add(className);
}
}
}
}
for (String keyToRemove : keysToRemove) {
classMirrorCache.remove(keyToRemove);
}
loadedPackages.remove(packageName);
}
@Override
protected Declaration makeToplevelAttribute(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeToplevelAttribute(classMirror);
}
@Override
protected Declaration makeToplevelMethod(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeToplevelMethod(classMirror);
}
@Override
protected Class makeLazyClass(ClassMirror classMirror, Class superClass,
MethodMirror constructor, boolean forTopLevelObject) {
if (classMirror instanceof SourceClass) {
return (Class) ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeLazyClass(classMirror, superClass, constructor,
forTopLevelObject);
}
@Override
protected Interface makeLazyInterface(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return (Interface) ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeLazyInterface(classMirror);
}
public TypeFactory getTypeFactory() {
return (TypeFactory) typeFactory;
}
}
|
plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/core/model/loader/JDTModelLoader.java
|
/*
* Copyright Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the authors tag. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*
* This particular file is subject to the "Classpath" exception as provided in the
* LICENSE file that accompanied this code.
*
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License,
* along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.redhat.ceylon.eclipse.core.model.loader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.eclipse.core.resources.IFile;
import org.eclipse.jdt.core.IClassFile;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.core.WorkingCopyOwner;
import org.eclipse.jdt.core.compiler.CharOperation;
import org.eclipse.jdt.internal.compiler.CompilationResult;
import org.eclipse.jdt.internal.compiler.DefaultErrorHandlingPolicies;
import org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration;
import org.eclipse.jdt.internal.compiler.env.AccessRestriction;
import org.eclipse.jdt.internal.compiler.env.IBinaryType;
import org.eclipse.jdt.internal.compiler.env.ICompilationUnit;
import org.eclipse.jdt.internal.compiler.env.ISourceType;
import org.eclipse.jdt.internal.compiler.impl.CompilerOptions;
import org.eclipse.jdt.internal.compiler.impl.ITypeRequestor;
import org.eclipse.jdt.internal.compiler.lookup.BinaryTypeBinding;
import org.eclipse.jdt.internal.compiler.lookup.LookupEnvironment;
import org.eclipse.jdt.internal.compiler.lookup.PackageBinding;
import org.eclipse.jdt.internal.compiler.lookup.ProblemReasons;
import org.eclipse.jdt.internal.compiler.lookup.ProblemReferenceBinding;
import org.eclipse.jdt.internal.compiler.lookup.ReferenceBinding;
import org.eclipse.jdt.internal.compiler.parser.Parser;
import org.eclipse.jdt.internal.compiler.parser.SourceTypeConverter;
import org.eclipse.jdt.internal.compiler.problem.AbortCompilationUnit;
import org.eclipse.jdt.internal.compiler.problem.DefaultProblemFactory;
import org.eclipse.jdt.internal.compiler.problem.ProblemReporter;
import org.eclipse.jdt.internal.core.ClassFile;
import org.eclipse.jdt.internal.core.JavaProject;
import org.eclipse.jdt.internal.core.SourceTypeElementInfo;
import org.eclipse.jdt.internal.core.search.BasicSearchEngine;
import com.redhat.ceylon.cmr.api.ArtifactResult;
import com.redhat.ceylon.compiler.java.loader.TypeFactory;
import com.redhat.ceylon.compiler.java.util.Util;
import com.redhat.ceylon.compiler.loader.AbstractModelLoader;
import com.redhat.ceylon.compiler.loader.TypeParser;
import com.redhat.ceylon.compiler.loader.mirror.ClassMirror;
import com.redhat.ceylon.compiler.loader.mirror.MethodMirror;
import com.redhat.ceylon.compiler.loader.model.LazyClass;
import com.redhat.ceylon.compiler.loader.model.LazyInterface;
import com.redhat.ceylon.compiler.loader.model.LazyMethod;
import com.redhat.ceylon.compiler.loader.model.LazyModule;
import com.redhat.ceylon.compiler.loader.model.LazyPackage;
import com.redhat.ceylon.compiler.loader.model.LazyValue;
import com.redhat.ceylon.compiler.typechecker.analyzer.ModuleManager;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.model.Class;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.ExternalUnit;
import com.redhat.ceylon.compiler.typechecker.model.Interface;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.Modules;
import com.redhat.ceylon.compiler.typechecker.model.Package;
import com.redhat.ceylon.compiler.typechecker.model.Unit;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.eclipse.core.model.CeylonDeclaration;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.JDTClass;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.JDTMethod;
import com.redhat.ceylon.eclipse.core.model.loader.mirror.SourceClass;
import com.redhat.ceylon.eclipse.core.model.loader.model.JDTModule;
import com.redhat.ceylon.eclipse.core.model.loader.model.JDTModuleManager;
import com.redhat.ceylon.eclipse.imp.builder.CeylonBuilder;
/**
* A model loader which uses the JDT model.
*
* @author David Festal <[email protected]>
*/
public class JDTModelLoader extends AbstractModelLoader {
private IJavaProject javaProject;
private CompilerOptions compilerOptions;
private ProblemReporter problemReporter;
private LookupEnvironment lookupEnvironment;
private boolean mustResetLookupEnvironment = false;
public JDTModelLoader(final ModuleManager moduleManager, final Modules modules){
this.moduleManager = moduleManager;
this.modules = modules;
this.typeFactory = new TypeFactory(moduleManager.getContext()) {
@Override
public Package getPackage() {
if(super.getPackage() == null){
super.setPackage(modules.getLanguageModule().getDirectPackage("ceylon.language"));
}
return super.getPackage();
}
/**
* Search for a declaration in the language module.
*/
private Map<String, Declaration> languageModuledeclarations = new HashMap<String, Declaration>();
public Declaration getLanguageModuleDeclaration(String name) {
if (languageModuledeclarations.containsKey(name)) {
return languageModuledeclarations.get(name);
}
languageModuledeclarations.put(name, null);
Declaration decl = super.getLanguageModuleDeclaration(name);
languageModuledeclarations.put(name, decl);
return decl;
}
};
this.typeParser = new TypeParser(this, typeFactory);
javaProject = ((JDTModuleManager)moduleManager).getJavaProject();
compilerOptions = new CompilerOptions(javaProject.getOptions(true));
compilerOptions.ignoreMethodBodies = true;
compilerOptions.storeAnnotations = true;
problemReporter = new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
try {
lookupEnvironment = new LookupEnvironment(new ITypeRequestor() {
private Parser basicParser;
@Override
public void accept(ISourceType[] sourceTypes, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
// case of SearchableEnvironment of an IJavaProject is used
ISourceType sourceType = sourceTypes[0];
while (sourceType.getEnclosingType() != null)
sourceType = sourceType.getEnclosingType();
if (sourceType instanceof SourceTypeElementInfo) {
// get source
SourceTypeElementInfo elementInfo = (SourceTypeElementInfo) sourceType;
IType type = elementInfo.getHandle();
ICompilationUnit sourceUnit = (ICompilationUnit) type.getCompilationUnit();
accept(sourceUnit, accessRestriction);
} else {
CompilationResult result = new CompilationResult(sourceType.getFileName(), 1, 1, 0);
CompilationUnitDeclaration unit =
SourceTypeConverter.buildCompilationUnit(
sourceTypes,
SourceTypeConverter.FIELD_AND_METHOD // need field and methods
| SourceTypeConverter.MEMBER_TYPE, // need member types
// no need for field initialization
lookupEnvironment.problemReporter,
result);
lookupEnvironment.buildTypeBindings(unit, accessRestriction);
lookupEnvironment.completeTypeBindings(unit, true);
}
}
@Override
public void accept(IBinaryType binaryType, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
lookupEnvironment.createBinaryTypeFrom(binaryType, packageBinding, accessRestriction);
}
@Override
public void accept(ICompilationUnit sourceUnit,
AccessRestriction accessRestriction) {
// Switch the current policy and compilation result for this unit to the requested one.
CompilationResult unitResult = new CompilationResult(sourceUnit, 1, 1, compilerOptions.maxProblemsPerUnit);
try {
CompilationUnitDeclaration parsedUnit = basicParser().dietParse(sourceUnit, unitResult);
lookupEnvironment.buildTypeBindings(parsedUnit, accessRestriction);
lookupEnvironment.completeTypeBindings(parsedUnit, true);
} catch (AbortCompilationUnit e) {
// at this point, currentCompilationUnitResult may not be sourceUnit, but some other
// one requested further along to resolve sourceUnit.
if (unitResult.compilationUnit == sourceUnit) { // only report once
//requestor.acceptResult(unitResult.tagAsAccepted());
} else {
throw e; // want to abort enclosing request to compile
}
}
// Display unit error in debug mode
if (BasicSearchEngine.VERBOSE) {
if (unitResult.problemCount > 0) {
System.out.println(unitResult);
}
}
}
private Parser basicParser() {
if (this.basicParser == null) {
ProblemReporter problemReporter =
new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
this.basicParser = new Parser(problemReporter, false);
this.basicParser.reportOnlyOneSyntaxError = true;
}
return this.basicParser;
}
}, compilerOptions, problemReporter, ((JavaProject)javaProject).newSearchableNameEnvironment((WorkingCopyOwner)null));
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// TODO : remove when the bug in the AbstractModelLoader is corrected
@Override
public LazyPackage findOrCreatePackage(Module module, String pkgName) {
LazyPackage pkg = super.findOrCreatePackage(module, pkgName);
if ("".equals(pkgName)) {
pkg.setName(Collections.<String>emptyList());
}
Module currentModule = pkg.getModule();
if (currentModule.equals(modules.getDefaultModule()) && ! currentModule.equals(module)) {
currentModule.getPackages().remove(pkg);
pkg.setModule(null);
if (module != null) {
module.getPackages().add(pkg);
pkg.setModule(module);
}
}
return pkg;
}
@Override
public void loadStandardModules() {
/*
* We start by loading java.lang and ceylon.language because we will need them no matter what.
*/
Module javaModule = findOrCreateModule("java.lang");
Package javaLangPackage = findOrCreatePackage(javaModule, "java.lang");
javaLangPackage.setShared(true);
loadPackage("java.lang", false);
loadPackage("com.redhat.ceylon.compiler.java.metadata", false);
}
@Override
public void loadPackage(String packageName, boolean loadDeclarations) {
packageName = Util.quoteJavaKeywords(packageName);
if(loadDeclarations && !loadedPackages.add(packageName)){
return;
}
if(!loadDeclarations)
return;
Module module = lookupModule(packageName);
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
List<IPackageFragmentRoot> roots = jdtModule.getPackageFragmentRoots();
IPackageFragment packageFragment = null;
for (IPackageFragmentRoot root : roots) {
try {
if (CeylonBuilder.isCeylonSourceEntry(root.getRawClasspathEntry())) {
packageFragment = root.getPackageFragment(packageName);
if(packageFragment.exists() && loadDeclarations) {
try {
for (IClassFile classFile : packageFragment.getClassFiles()) {
IType type = classFile.getType();
if (! type.isMember() && !sourceDeclarations.containsKey(type.getFullyQualifiedName())) {
convertToDeclaration(type.getFullyQualifiedName(), DeclarationType.VALUE);
}
}
for (org.eclipse.jdt.core.ICompilationUnit compilationUnit : packageFragment.getCompilationUnits()) {
for (IType type : compilationUnit.getTypes()) {
if (! type.isMember() && !sourceDeclarations.containsKey(type.getFullyQualifiedName())) {
convertToDeclaration(type.getFullyQualifiedName(), DeclarationType.VALUE);
}
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
}
}
}
private Module lookupModule(String packageName) {
Module module = lookupModuleInternal(packageName);
if (module != null) {
return module;
}
return modules.getDefaultModule();
}
public Module lookupModuleInternal(String packageName) {
for(Module module : modules.getListOfModules()){
if(module instanceof LazyModule){
if(((LazyModule)module).containsPackage(packageName))
return module;
}else if(isSubPackage(module.getNameAsString(), packageName))
return module;
}
return null;
}
private boolean isSubPackage(String moduleName, String pkgName) {
return pkgName.equals(moduleName)
|| pkgName.startsWith(moduleName+".");
}
synchronized private LookupEnvironment getLookupEnvironment() {
if (mustResetLookupEnvironment) {
lookupEnvironment.reset();
mustResetLookupEnvironment = false;
}
return lookupEnvironment;
}
@Override
public synchronized ClassMirror lookupNewClassMirror(String name) {
if (sourceDeclarations.containsKey(name)) {
return new SourceClass(sourceDeclarations.get(name));
}
try {
IType type = javaProject.findType(name);
if (type == null) {
return null;
}
LookupEnvironment theLookupEnvironment = getLookupEnvironment();
if (type.isBinary()) {
ClassFile classFile = (ClassFile) type.getClassFile();
if (classFile != null) {
IBinaryType binaryType = classFile.getBinaryTypeInfo((IFile) classFile.getCorrespondingResource(), true);
BinaryTypeBinding binaryTypeBinding = theLookupEnvironment.cacheBinaryType(binaryType, null);
if (binaryTypeBinding == null) {
char[][] compoundName = CharOperation.splitOn('/', binaryType.getName());
ReferenceBinding existingType = theLookupEnvironment.getCachedType(compoundName);
if (existingType == null || ! (existingType instanceof BinaryTypeBinding)) {
return null;
}
binaryTypeBinding = (BinaryTypeBinding) existingType;
}
return new JDTClass(binaryTypeBinding, theLookupEnvironment);
}
} else {
char[][] compoundName = CharOperation.splitOn('.', type.getFullyQualifiedName().toCharArray());
ReferenceBinding referenceBinding = theLookupEnvironment.getType(compoundName);
if (referenceBinding != null) {
if (referenceBinding instanceof ProblemReferenceBinding) {
ProblemReferenceBinding problemReferenceBinding = (ProblemReferenceBinding) referenceBinding;
if (problemReferenceBinding.problemId() == ProblemReasons.InternalNameProvided) {
referenceBinding = problemReferenceBinding.closestReferenceMatch();
} else {
System.out.println(ProblemReferenceBinding.problemReasonString(problemReferenceBinding.problemId()));
return null;
}
}
return new JDTClass(referenceBinding, theLookupEnvironment);
}
}
} catch (JavaModelException e) {
e.printStackTrace();
}
return null;
}
@Override
public Declaration convertToDeclaration(String typeName,
DeclarationType declarationType) {
if (typeName.startsWith("ceylon.language")) {
return typeFactory.getLanguageModuleDeclaration(typeName.substring(typeName.lastIndexOf('.') + 1));
}
try {
return super.convertToDeclaration(typeName, declarationType);
} catch(RuntimeException e) {
return null;
}
}
@Override
public void addModuleToClassPath(Module module, ArtifactResult artifact) {}
@Override
protected boolean isOverridingMethod(MethodMirror methodSymbol) {
return ((JDTMethod)methodSymbol).isOverridingMethod();
}
@Override
public Module findOrCreateModule(String pkgName) {
java.util.List<String> moduleName;
boolean isJava = false;
boolean defaultModule = false;
Module module = lookupModuleInternal(pkgName);
if (module != null) {
return module;
}
// FIXME: this is a rather simplistic view of the world
if(pkgName == null){
moduleName = Arrays.asList(Module.DEFAULT_MODULE_NAME);
defaultModule = true;
}else if(pkgName.startsWith("java.")){
moduleName = Arrays.asList("java");
isJava = true;
} else if(pkgName.startsWith("sun.")){
moduleName = Arrays.asList("sun");
isJava = true;
} else if(pkgName.startsWith("ceylon.language."))
moduleName = Arrays.asList("ceylon","language");
else{
moduleName = Arrays.asList(Module.DEFAULT_MODULE_NAME);
defaultModule = true;
}
module = moduleManager.getOrCreateModule(moduleName, null);
// make sure that when we load the ceylon language module we set it to where
// the typechecker will look for it
if(pkgName != null
&& pkgName.startsWith("ceylon.language.")
&& modules.getLanguageModule() == null){
modules.setLanguageModule(module);
}
if (module instanceof LazyModule) {
((LazyModule)module).setJava(isJava);
}
// FIXME: this can't be that easy.
module.setAvailable(true);
module.setDefault(defaultModule);
return module;
}
@Override
protected Unit getCompiledUnit(LazyPackage pkg, ClassMirror classMirror) {
Unit unit = null;
JDTClass jdtClass = ((JDTClass)classMirror);
String unitName = jdtClass.getFileName();
if (!jdtClass.isBinary()) {
for (Unit unitToTest : pkg.getUnits()) {
if (unitToTest.getFilename().equals(unitName)) {
return unitToTest;
}
}
}
unit = new ExternalUnit();
unit.setFilename(jdtClass.getFileName());
unit.setPackage(pkg);
return unit;
}
@Override
protected void logError(String message) {
System.err.println("ERROR: "+message);
}
@Override
protected void logWarning(String message) {
System.err.println("WARNING: "+message);
}
@Override
protected void logVerbose(String message) {
System.err.println("NOTE: "+message);
}
@Override
public void removeDeclarations(List<Declaration> declarations) {
List<Declaration> allDeclarations = new ArrayList<Declaration>(declarations.size());
allDeclarations.addAll(declarations);
for (Declaration declaration : declarations) {
retrieveInnerDeclarations(declaration, allDeclarations);
}
super.removeDeclarations(allDeclarations);
mustResetLookupEnvironment = true;
}
private void retrieveInnerDeclarations(Declaration declaration,
List<Declaration> allDeclarations) {
List<Declaration> members = declaration.getMembers();
allDeclarations.addAll(members);
for (Declaration member : members) {
retrieveInnerDeclarations(member, allDeclarations);
}
}
private Map<String, CeylonDeclaration> sourceDeclarations = new TreeMap<String, CeylonDeclaration>();
public void setupSourceFileObjects(List<PhasedUnit> phasedUnits) {
for (final PhasedUnit unit : phasedUnits) {
final String pkgName = unit.getPackage().getQualifiedNameString();
unit.getCompilationUnit().visit(new SourceDeclarationVisitor(){
@Override
public void loadFromSource(Tree.Declaration decl) {
String name = Util.quoteIfJavaKeyword(decl.getIdentifier().getText());
String fqn = pkgName.isEmpty() ? name : pkgName+"."+name;
sourceDeclarations.put(fqn, new CeylonDeclaration(unit, decl));
}
});
}
}
public void clearCachesOnPackage(String packageName) {
List<String> keysToRemove = new ArrayList<String>(classMirrorCache.size());
for (Entry<String, ClassMirror> element : classMirrorCache.entrySet()) {
if (element.getValue() == null) {
String className = element.getKey();
if (className != null) {
String classPackageName =className.replaceAll("\\.[^\\.]+$", "");
if (classPackageName.equals(packageName)) {
keysToRemove.add(className);
}
}
}
}
for (String keyToRemove : keysToRemove) {
classMirrorCache.remove(keyToRemove);
}
loadedPackages.remove(packageName);
}
@Override
protected Declaration makeToplevelAttribute(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeToplevelAttribute(classMirror);
}
@Override
protected Declaration makeToplevelMethod(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeToplevelMethod(classMirror);
}
@Override
protected Class makeLazyClass(ClassMirror classMirror, Class superClass,
MethodMirror constructor, boolean forTopLevelObject) {
if (classMirror instanceof SourceClass) {
return (Class) ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeLazyClass(classMirror, superClass, constructor,
forTopLevelObject);
}
@Override
protected Interface makeLazyInterface(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return (Interface) ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeLazyInterface(classMirror);
}
public TypeFactory getTypeFactory() {
return (TypeFactory) typeFactory;
}
}
|
In convertToDeclaration(), return lazy-loaded decls for source decls
|
plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/core/model/loader/JDTModelLoader.java
|
In convertToDeclaration(), return lazy-loaded decls for source decls
|
<ide><path>lugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/core/model/loader/JDTModelLoader.java
<ide> if (typeName.startsWith("ceylon.language")) {
<ide> return typeFactory.getLanguageModuleDeclaration(typeName.substring(typeName.lastIndexOf('.') + 1));
<ide> }
<add> if (sourceDeclarations.containsKey(typeName)) {
<add> return sourceDeclarations.get(typeName).getModelDeclaration();
<add> }
<ide> try {
<ide> return super.convertToDeclaration(typeName, declarationType);
<ide> } catch(RuntimeException e) {
|
|
JavaScript
|
mit
|
4cb86b8d0009ff9bccc37b4a77d0cf28e22df4cd
| 0 |
dsteplight/organize-folder
|
'use strict';
var chokidar = require('chokidar');
const readChunk = require('read-chunk'); // npm install read-chunk
const fileType = require('file-type');
const mv = require('mv');
const path_object = require('path');
var fs = require('fs');
var Buffer = require('buffer').Buffer;
var log = console.log.bind(console);
var config = {};
config['watched_folder'] = '/Users/dsteplight/Downloads';
config['jpeg_folder'] = '/Users/dsteplight/Documents/JPEG';
config['pdf_folder'] = '/Users/dsteplight/Documents/PDF';
for (var key in config)
{
if ((typeof config[key]) != 'undefined' && key != 'watched_folder') {
console.log(config[key]);
}
}
var watcher = chokidar.watch(config.watched_folder, {
ignored: /[\/\\]\./,
interval: 10000,
persistent: true,
depth: 0
});
watcher
.on('add', function(path) {
//const buffer = readChunk.sync(path, 0, 262);
//var path_type = fileType(buffer);
const allowed_file_types = [".jpg", ".jpeg"];
const file_name = path_object.basename(path);
const file_type = path_object.extname(path);
const target_folder = config.jpeg_folder;
const targeted_file = target_folder+path_object.sep+file_name;
if( file_type !== null )
{
if( allowed_file_types.indexOf(file_type) !== -1)
{
mv(path, targeted_file, function(err) {
log('This file has been moved to ', targeted_file);
});
}
}
})
.on('addDir', function(path) {
// console.log(path);
})
.on('change', function(path) {
})
.on('unlink', function(path) {
})
.on('unlinkDir', function(path) {
})
.on('error', function(error) {
})
.on('ready', function() { log('Initial scan complete. Ready for changes.'); })
|
clean-folder.js
|
var chokidar = require('chokidar');
const readChunk = require('read-chunk'); // npm install read-chunk
const fileType = require('file-type');
const mv = require('mv');
const path_object = require('path');
var fs = require('fs');
var Buffer = require('buffer').Buffer;
var log = console.log.bind(console);
const config = {
'watched_folder': '/Users/dsteplight/Downloads',
'jpeg_folder': '/Users/dsteplight/Documents/JPEG',
'pdf_folder': '/Users/dsteplight/Documents/PDF'
};
var watcher = chokidar.watch(config.watched_folder, {
ignored: /[\/\\]\./,
interval: 10000,
persistent: true,
depth: 0
});
watcher
.on('add', function(path) {
//const buffer = readChunk.sync(path, 0, 262);
//var path_type = fileType(buffer);
const allowed_file_types = [".jpg", ".jpeg"];
const file_name = path_object.basename(path);
const file_type = path_object.extname(path);
const target_folder = config.jpeg_folder;
const targeted_file = target_folder+path_object.sep+file_name;
if( file_type !== null )
{
if( allowed_file_types.indexOf(file_type) !== -1)
{
mv(path, targeted_file, function(err) {
log('This file has been moved to ', targeted_file);
});
}
}
})
.on('addDir', function(path) {
console.log(path);
})
.on('change', function(path) {
})
.on('unlink', function(path) {
})
.on('unlinkDir', function(path) {
})
.on('error', function(error) {
})
.on('ready', function() { log('Initial scan complete. Ready for changes.'); })
|
looping through configs folders except watched folder
|
clean-folder.js
|
looping through configs folders except watched folder
|
<ide><path>lean-folder.js
<add>'use strict';
<ide> var chokidar = require('chokidar');
<ide> const readChunk = require('read-chunk'); // npm install read-chunk
<ide> const fileType = require('file-type');
<ide>
<ide> var log = console.log.bind(console);
<ide>
<add>var config = {};
<add> config['watched_folder'] = '/Users/dsteplight/Downloads';
<add> config['jpeg_folder'] = '/Users/dsteplight/Documents/JPEG';
<add> config['pdf_folder'] = '/Users/dsteplight/Documents/PDF';
<ide>
<del>const config = {
<del> 'watched_folder': '/Users/dsteplight/Downloads',
<del> 'jpeg_folder': '/Users/dsteplight/Documents/JPEG',
<del> 'pdf_folder': '/Users/dsteplight/Documents/PDF'
<del>};
<add>for (var key in config)
<add>{
<add> if ((typeof config[key]) != 'undefined' && key != 'watched_folder') {
<add> console.log(config[key]);
<add> }
<add>}
<ide>
<ide> var watcher = chokidar.watch(config.watched_folder, {
<ide> ignored: /[\/\\]\./,
<ide>
<ide> })
<ide> .on('addDir', function(path) {
<del> console.log(path);
<add> // console.log(path);
<ide> })
<ide> .on('change', function(path) {
<ide>
|
|
Java
|
mit
|
856651f37b1d60a89393a2b4df8d8b0dd7d0c81b
| 0 |
testpress/android,testpress/android,testpress/android,testpress/android,testpress/android
|
package in.testpress.testpress.core;
import android.content.Context;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.TimeZone;
import in.testpress.testpress.TestpressApplication;
import in.testpress.testpress.models.Category;
import in.testpress.testpress.models.CategoryDao;
import in.testpress.testpress.models.DaoSession;
import in.testpress.testpress.models.Post;
import in.testpress.testpress.models.PostDao;
import in.testpress.testpress.models.TestpressApiResponse;
import in.testpress.testpress.util.Ln;
import retrofit.RetrofitError;
public class PostsPager extends ResourcePager<Post> {
TestpressApiResponse<Post> response;
public PostsPager(TestpressService service, Context context) {
super(service);
}
@Override
public ResourcePager<Post> clear() {
response = null;
super.clear();
return this;
}
@Override
protected Object getId(Post resource) {
return resource.getId();
}
@Override
public List<Post> getItems(int page, int size) throws RetrofitError {
String url;
if (response == null) {
url = Constants.Http.URL_POSTS_FRAG;
} else {
try {
URL full = new URL(response.getNext());
url = full.getFile().substring(1);
} catch (MalformedURLException e) {
e.printStackTrace();
url = null;
}
}
if (url != null) {
response = service.getPosts(url, queryParams);
return response.getResults();
}
return Collections.emptyList();
}
@Override
public boolean next() throws IOException {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
boolean emptyPage = false;
networkFail = false;
try {
for (int i = 0; i < count && hasNext(); i++) {
Ln.d("PostsPager Getting Items");
List<Post> resourcePage = getItems(page, -1);
Ln.d("PostsPager Items Received");
emptyPage = resourcePage.isEmpty();
if (emptyPage)
break;
Ln.d("Looping through resources");
for (Post resource : resourcePage) {
resource = register(resource);
if (resource == null)
continue;
if(resource.category != null) {
resource.setCategory(resource.category);
}
Ln.d("Category ID " + resource.getCategoryId());
resource.setCreatedDate(simpleDateFormat.parse(resource.getCreated()).getTime());
resources.put(getId(resource), resource);
}
Ln.d("Looping resources over");
}
// Set page to count value if first call after call to reset()
if (count > 1) {
page = count;
count = 1;
}
page++;
} catch (ParseException e) {
Ln.d("ParseException " + e);
} catch (Exception e) {
hasMore = false;
networkFail = true;
throw e;
}
hasMore = hasNext() && !emptyPage;
return hasMore;
}
@Override
public boolean hasNext() {
if (response == null || response.getNext() != null) {
return true;
}
return false;
}
public int getTotalCount() {
return response.getCount();
}
}
|
app/src/main/java/in/testpress/testpress/core/PostsPager.java
|
package in.testpress.testpress.core;
import android.content.Context;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import in.testpress.testpress.TestpressApplication;
import in.testpress.testpress.models.Category;
import in.testpress.testpress.models.CategoryDao;
import in.testpress.testpress.models.DaoSession;
import in.testpress.testpress.models.Post;
import in.testpress.testpress.models.PostDao;
import in.testpress.testpress.models.TestpressApiResponse;
import in.testpress.testpress.util.Ln;
import retrofit.RetrofitError;
public class PostsPager extends ResourcePager<Post> {
TestpressApiResponse<Post> response;
public PostsPager(TestpressService service, Context context) {
super(service);
}
@Override
public ResourcePager<Post> clear() {
response = null;
super.clear();
return this;
}
@Override
protected Object getId(Post resource) {
return resource.getId();
}
@Override
public List<Post> getItems(int page, int size) throws RetrofitError {
String url;
if (response == null) {
url = Constants.Http.URL_POSTS_FRAG;
} else {
try {
URL full = new URL(response.getNext());
url = full.getFile().substring(1);
} catch (MalformedURLException e) {
e.printStackTrace();
url = null;
}
}
if (url != null) {
response = service.getPosts(url, queryParams);
return response.getResults();
}
return Collections.emptyList();
}
@Override
public boolean next() throws IOException {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
boolean emptyPage = false;
networkFail = false;
try {
for (int i = 0; i < count && hasNext(); i++) {
Ln.d("PostsPager Getting Items");
List<Post> resourcePage = getItems(page, -1);
Ln.d("PostsPager Items Received");
emptyPage = resourcePage.isEmpty();
if (emptyPage)
break;
Ln.d("Looping through resources");
for (Post resource : resourcePage) {
resource = register(resource);
if (resource == null)
continue;
if(resource.category != null) {
resource.setCategory(resource.category);
}
Ln.d("Category ID " + resource.getCategoryId());
resource.setCreatedDate(simpleDateFormat.parse(resource.getCreated()).getTime());
resources.put(getId(resource), resource);
}
Ln.d("Looping resources over");
}
// Set page to count value if first call after call to reset()
if (count > 1) {
page = count;
count = 1;
}
page++;
} catch (ParseException e) {
Ln.d("ParseException " + e);
} catch (Exception e) {
hasMore = false;
networkFail = true;
throw e;
}
hasMore = hasNext() && !emptyPage;
return hasMore;
}
@Override
public boolean hasNext() {
if (response == null || response.getNext() != null) {
return true;
}
return false;
}
public int getTotalCount() {
return response.getCount();
}
}
|
Fix date created display in Posts list
There was a mismatch of timezones. Now the input string is parsed
as UTC timezone.
|
app/src/main/java/in/testpress/testpress/core/PostsPager.java
|
Fix date created display in Posts list
|
<ide><path>pp/src/main/java/in/testpress/testpress/core/PostsPager.java
<ide> import java.util.ArrayList;
<ide> import java.util.Collections;
<ide> import java.util.List;
<add>import java.util.TimeZone;
<ide>
<ide> import in.testpress.testpress.TestpressApplication;
<ide> import in.testpress.testpress.models.Category;
<ide> @Override
<ide> public boolean next() throws IOException {
<ide> SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
<add> simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
<add>
<ide> boolean emptyPage = false;
<ide> networkFail = false;
<ide> try {
|
|
Java
|
epl-1.0
|
49d2953c06074b95fcb9108ed9067b3279789331
| 0 |
ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,css-iter/cs-studio,css-iter/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio
|
/*
* Copyright (c) 2010 Stiftung Deutsches Elektronen-Synchrotron,
* Member of the Helmholtz Association, (DESY), HAMBURG, GERMANY.
*
* THIS SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "../AS IS" BASIS.
* WITHOUT WARRANTY OF ANY KIND, EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR PARTICULAR PURPOSE AND
* NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE. SHOULD THE SOFTWARE PROVE DEFECTIVE
* IN ANY RESPECT, THE USER ASSUMES THE COST OF ANY NECESSARY SERVICING, REPAIR OR
* CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE.
* NO USE OF ANY SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
* DESY HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS,
* OR MODIFICATIONS.
* THE FULL LICENSE SPECIFYING FOR THE SOFTWARE THE REDISTRIBUTION, MODIFICATION,
* USAGE AND OTHER RIGHTS AND OBLIGATIONS IS INCLUDED WITH THE DISTRIBUTION OF THIS
* PROJECT IN THE FILE LICENSE.HTML. IF THE LICENSE IS NOT INCLUDED YOU MAY FIND A COPY
* AT HTTP://WWW.DESY.DE/LEGAL/LICENSE.HTM
*
* $Id$
*/
package org.csstudio.platform.test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.apache.log4j.Logger;
import org.csstudio.platform.logging.CentralLogger;
import org.eclipse.core.runtime.Platform;
import org.osgi.framework.Bundle;
/**
* Test data provider that yields access to test configuration files that are
* specific for different test sites (e.g. SNS, DESY, ITER).
*
* Specify your site in your eclipse launch configuration in the VM arguments list with e.g.:
* -DsiteId=DESY
*
* In your plugin create an ini file, e.g. desyTestConfiguration.ini, where the file
* name prefix is the configured in the SiteId configuration and the file name main part
* is TestConfiguration.ini
*
* @author bknerr
* @author $Author$
* @version $Revision$
* @since 14.07.2010
*/
public final class TestDataProvider {
private static final Logger LOG = CentralLogger.getInstance().getLogger(TestDataProvider.class);
private static final String CONFIGURATION_FILE_SUFFIX = "TestConfiguration.ini";
private static final String SENSITIVE_FILE_KEY = "sensitiveConfigFilePath";
private static TestDataProvider INSTANCE;
private static Properties PROPERTIES;
private final String _pluginId;
/**
* Constructor.
* @throws IOException
*/
private TestDataProvider(@Nonnull final String pluginId) throws IOException {
_pluginId = pluginId;
PROPERTIES = new Properties();
}
/**
* @param pluginId
* @param testConfigFileName
* @throws FileNotFoundException
* @throws IOException
*/
private static void loadProperties(@Nonnull final String pluginId,
@Nonnull final String testConfigFileName)
throws FileNotFoundException, IOException {
openStreamAndLoadProps(pluginId, testConfigFileName);
final String secretFile = findSensitiveDataFile();
if (secretFile != null) {
openStreamAndLoadProps(pluginId, secretFile);
}
}
/**
* @param pluginId
* @param fileName
* @return
*/
@CheckForNull
private static String findSensitiveDataFile() {
if (PROPERTIES != null) {
final String secretFilePath = (String) PROPERTIES.get(SENSITIVE_FILE_KEY);
if (secretFilePath != null) {
return secretFilePath;
}
}
return null;
}
/**
* @param pluginId
* @param testConfigFileName
* @throws MalformedURLException
* @throws FileNotFoundException
* @throws IOException
*/
private static void openStreamAndLoadProps(@Nonnull final String pluginId,
@Nonnull final String testConfigFileName)
throws MalformedURLException, FileNotFoundException, IOException {
InputStream openStream = null;
try {
final URL resource = locateResource(pluginId, testConfigFileName);
openStream = resource.openStream();
PROPERTIES.load(openStream);
} finally {
if (openStream != null) {
openStream.close();
}
}
}
/**
* @param pluginId
* @param testConfigFileName
* @return
* @throws MalformedURLException
* @throws FileNotFoundException
*/
@Nonnull
private static URL locateResource(@Nonnull final String pluginId,
@Nonnull final String testConfigFileName) throws MalformedURLException,
FileNotFoundException {
final Bundle bundle = Platform.getBundle(pluginId);
URL resource = null;
if (bundle == null) {
LOG.warn("Bundle could not be located. Try to find config file via current working dir.");
final String curDir = System.getProperty("user.dir");
final File configFile = new File(curDir + File.separator + testConfigFileName);
resource = configFile.toURL();
} else {
resource = bundle.getResource(testConfigFileName);
}
if (resource == null) {
throw new FileNotFoundException("Test configuration file for plugin " + pluginId +
" and file name " + testConfigFileName +
" does not exist");
}
return resource;
}
/**
* Retrieve test config property from file
* @param key
* @return the property object
*/
@CheckForNull
public Object get(@Nonnull final String key) {
return PROPERTIES.get(key);
}
/**
* @param pluginId id of the plugin in which the tests and their config file reside
* @return the instance of the data provider
* @throws TestProviderException
*/
@Nonnull
public static TestDataProvider getInstance(@Nonnull final String pluginId)
throws TestProviderException {
String testConfigFileName = "";
try {
synchronized (TestDataProvider.class) {
if (INSTANCE == null) {
INSTANCE = new TestDataProvider(pluginId);
testConfigFileName = createSiteSpecificName();
loadProperties(pluginId, testConfigFileName);
}
}
if (! INSTANCE._pluginId.equals(pluginId)) {
TestDataProvider.PROPERTIES.clear();
testConfigFileName = createSiteSpecificName();
loadProperties(pluginId, testConfigFileName);
}
return INSTANCE;
} catch (final IOException e) {
throw new TestProviderException("Test config file " + testConfigFileName + " couldn't be found or opened.", e);
}
}
@Nonnull
private static String createSiteSpecificName() throws IllegalArgumentException {
final String siteProp = System.getProperty("siteId");
if (siteProp == null) {
throw new IllegalArgumentException("There isn't any jvm arg -DsiteId=xxx configured. Please do so in your launch configuration.");
}
SiteId site;
try {
site = SiteId.valueOf(siteProp);
} catch (final IllegalArgumentException e) {
throw new IllegalArgumentException("The site enum type for jvm arg -DsiteId="+ siteProp +" is unknown. ", e);
}
final String testConfigFileName = site.getPrefix() + CONFIGURATION_FILE_SUFFIX;
return testConfigFileName;
}
}
|
core/plugins/org.csstudio.platform/src/org/csstudio/platform/test/TestDataProvider.java
|
/*
* Copyright (c) 2010 Stiftung Deutsches Elektronen-Synchrotron,
* Member of the Helmholtz Association, (DESY), HAMBURG, GERMANY.
*
* THIS SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "../AS IS" BASIS.
* WITHOUT WARRANTY OF ANY KIND, EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR PARTICULAR PURPOSE AND
* NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE. SHOULD THE SOFTWARE PROVE DEFECTIVE
* IN ANY RESPECT, THE USER ASSUMES THE COST OF ANY NECESSARY SERVICING, REPAIR OR
* CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE.
* NO USE OF ANY SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
* DESY HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS,
* OR MODIFICATIONS.
* THE FULL LICENSE SPECIFYING FOR THE SOFTWARE THE REDISTRIBUTION, MODIFICATION,
* USAGE AND OTHER RIGHTS AND OBLIGATIONS IS INCLUDED WITH THE DISTRIBUTION OF THIS
* PROJECT IN THE FILE LICENSE.HTML. IF THE LICENSE IS NOT INCLUDED YOU MAY FIND A COPY
* AT HTTP://WWW.DESY.DE/LEGAL/LICENSE.HTM
*
* $Id$
*/
package org.csstudio.platform.test;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Properties;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.eclipse.core.runtime.Platform;
import org.osgi.framework.Bundle;
/**
* Test data provider that yields access to test configuration files that are
* specific for different test sites (e.g. SNS, DESY, ITER).
*
* Specify your site in your eclipse launch configuration in the VM arguments list with e.g.:
* -DsiteId=DESY
*
* In your plugin create an ini file, e.g. desyTestConfiguration.ini, where the file
* name prefix is the configured in the SiteId configuration and the file name main part
* is TestConfiguration.ini
*
* @author bknerr
* @author $Author$
* @version $Revision$
* @since 14.07.2010
*/
public final class TestDataProvider {
private static final String CONFIGURATION_FILE_SUFFIX = "TestConfiguration.ini";
private static TestDataProvider INSTANCE;
private static Properties PROPERTIES;
private final String _pluginId;
/**
* Constructor.
* @throws IOException
*/
private TestDataProvider(@Nonnull final String pluginId) throws IOException {
_pluginId = pluginId;
PROPERTIES = new Properties();
}
/**
* @param pluginId
* @param testConfigFileName
* @throws FileNotFoundException
* @throws IOException
*/
private static void loadProperties(@Nonnull final String pluginId,
@Nonnull final String testConfigFileName)
throws FileNotFoundException, IOException {
InputStream openStream = null;
try {
final Bundle bundle = Platform.getBundle(pluginId);
final URL resource = bundle.getResource(testConfigFileName);
if (resource == null) {
throw new FileNotFoundException("Test configuration file for plugin " + pluginId +
" and file name " + testConfigFileName +
" does not exist");
}
openStream = resource.openStream();
PROPERTIES.load(openStream);
} finally {
if (openStream != null) {
openStream.close();
}
}
}
/**
* Retrieve test config property from file
* @param key
* @return the property object
*/
@CheckForNull
public Object get(@Nonnull final String key) {
return PROPERTIES.get(key);
}
/**
* @param pluginId id of the plugin in which the tests and their config file reside
* @return the instance of the data provider
* @throws TestProviderException
*/
@Nonnull
public static TestDataProvider getInstance(@Nonnull final String pluginId)
throws TestProviderException {
String testConfigFileName = "";
try {
synchronized (TestDataProvider.class) {
if (INSTANCE == null) {
INSTANCE = new TestDataProvider(pluginId);
testConfigFileName = createSiteSpecificName();
loadProperties(pluginId, testConfigFileName);
}
}
if (! INSTANCE._pluginId.equals(pluginId)) {
TestDataProvider.PROPERTIES.clear();
testConfigFileName = createSiteSpecificName();
loadProperties(pluginId, testConfigFileName);
}
return INSTANCE;
} catch (final IOException e) {
throw new TestProviderException("Test config file " + testConfigFileName + " couldn't be found or opened.", e);
}
}
@Nonnull
private static String createSiteSpecificName() throws IllegalArgumentException {
final String siteProp = System.getProperty("siteId");
if (siteProp == null) {
throw new IllegalArgumentException("There isn't any jvm arg -DsiteId=xxx configured. Please do so in your launch configuration.");
}
SiteId site;
try {
site = SiteId.valueOf(siteProp);
} catch (final IllegalArgumentException e) {
throw new IllegalArgumentException("The site enum type for jvm arg -DsiteId="+ siteProp +" is unknown. ", e);
}
final String testConfigFileName = site.getPrefix() + CONFIGURATION_FILE_SUFFIX;
return testConfigFileName;
}
}
|
o.c.platform.test: Bundle framework isn't required anymore (fall back to cur working dir), sensitive data can reside in its own file and is automatically located.
See https://sourceforge.net/apps/trac/cs-studio/wiki/UnitTests
|
core/plugins/org.csstudio.platform/src/org/csstudio/platform/test/TestDataProvider.java
|
o.c.platform.test: Bundle framework isn't required anymore (fall back to cur working dir), sensitive data can reside in its own file and is automatically located.
|
<ide><path>ore/plugins/org.csstudio.platform/src/org/csstudio/platform/test/TestDataProvider.java
<ide> */
<ide> package org.csstudio.platform.test;
<ide>
<add>import java.io.File;
<ide> import java.io.FileNotFoundException;
<ide> import java.io.IOException;
<ide> import java.io.InputStream;
<add>import java.net.MalformedURLException;
<ide> import java.net.URL;
<ide> import java.util.Properties;
<ide>
<ide> import javax.annotation.CheckForNull;
<ide> import javax.annotation.Nonnull;
<ide>
<add>import org.apache.log4j.Logger;
<add>import org.csstudio.platform.logging.CentralLogger;
<ide> import org.eclipse.core.runtime.Platform;
<ide> import org.osgi.framework.Bundle;
<ide>
<ide> */
<ide> public final class TestDataProvider {
<ide>
<add> private static final Logger LOG = CentralLogger.getInstance().getLogger(TestDataProvider.class);
<add>
<ide> private static final String CONFIGURATION_FILE_SUFFIX = "TestConfiguration.ini";
<add>
<add> private static final String SENSITIVE_FILE_KEY = "sensitiveConfigFilePath";
<ide>
<ide> private static TestDataProvider INSTANCE;
<ide>
<ide> @Nonnull final String testConfigFileName)
<ide> throws FileNotFoundException, IOException {
<ide>
<add> openStreamAndLoadProps(pluginId, testConfigFileName);
<add> final String secretFile = findSensitiveDataFile();
<add> if (secretFile != null) {
<add> openStreamAndLoadProps(pluginId, secretFile);
<add> }
<add> }
<add>
<add> /**
<add> * @param pluginId
<add> * @param fileName
<add> * @return
<add> */
<add> @CheckForNull
<add> private static String findSensitiveDataFile() {
<add>
<add> if (PROPERTIES != null) {
<add> final String secretFilePath = (String) PROPERTIES.get(SENSITIVE_FILE_KEY);
<add> if (secretFilePath != null) {
<add> return secretFilePath;
<add> }
<add> }
<add> return null;
<add> }
<add>
<add> /**
<add> * @param pluginId
<add> * @param testConfigFileName
<add> * @throws MalformedURLException
<add> * @throws FileNotFoundException
<add> * @throws IOException
<add> */
<add> private static void openStreamAndLoadProps(@Nonnull final String pluginId,
<add> @Nonnull final String testConfigFileName)
<add> throws MalformedURLException, FileNotFoundException, IOException {
<add>
<ide> InputStream openStream = null;
<ide> try {
<del> final Bundle bundle = Platform.getBundle(pluginId);
<del> final URL resource = bundle.getResource(testConfigFileName);
<del>
<del> if (resource == null) {
<del> throw new FileNotFoundException("Test configuration file for plugin " + pluginId +
<del> " and file name " + testConfigFileName +
<del> " does not exist");
<del> }
<add> final URL resource = locateResource(pluginId, testConfigFileName);
<ide> openStream = resource.openStream();
<ide> PROPERTIES.load(openStream);
<ide> } finally {
<ide> openStream.close();
<ide> }
<ide> }
<add> }
<add>
<add> /**
<add> * @param pluginId
<add> * @param testConfigFileName
<add> * @return
<add> * @throws MalformedURLException
<add> * @throws FileNotFoundException
<add> */
<add> @Nonnull
<add> private static URL locateResource(@Nonnull final String pluginId,
<add> @Nonnull final String testConfigFileName) throws MalformedURLException,
<add> FileNotFoundException {
<add> final Bundle bundle = Platform.getBundle(pluginId);
<add> URL resource = null;
<add> if (bundle == null) {
<add> LOG.warn("Bundle could not be located. Try to find config file via current working dir.");
<add>
<add> final String curDir = System.getProperty("user.dir");
<add> final File configFile = new File(curDir + File.separator + testConfigFileName);
<add> resource = configFile.toURL();
<add> } else {
<add> resource = bundle.getResource(testConfigFileName);
<add> }
<add>
<add> if (resource == null) {
<add> throw new FileNotFoundException("Test configuration file for plugin " + pluginId +
<add> " and file name " + testConfigFileName +
<add> " does not exist");
<add> }
<add> return resource;
<ide> }
<ide>
<ide> /**
|
|
Java
|
mit
|
4db322ebafa76c0dd4e26cefc86d8ec35561b0f2
| 0 |
codistmonk/IMJ
|
package imj2.tools;
import static imj2.tools.MultiresolutionSegmentationTest.getColorGradient;
import static java.awt.Color.BLACK;
import static java.awt.Color.RED;
import static java.awt.Color.WHITE;
import static java.lang.Math.min;
import static net.sourceforge.aprog.swing.SwingTools.show;
import imj2.tools.Image2DComponent.Painter;
import imj2.tools.RegionShrinkingTest.AutoMouseAdapter;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import net.sourceforge.aprog.tools.Tools;
import org.junit.Test;
/**
* @author codistmonk (creation 2014-02-23)
*/
public final class TiledParticleSegmentationTest {
/**
* {@value}.
*/
public static final int NORTH = 0;
/**
* {@value}.
*/
public static final int WEST = 1;
/**
* {@value}.
*/
public static final int EAST = 2;
/**
* {@value}.
*/
public static final int SOUTH = 3;
@Test
public final void test() {
final SimpleImageView imageView = new SimpleImageView();
new AutoMouseAdapter(imageView.getImageHolder()) {
private int cellSize = 8;
private final Painter<SimpleImageView> painter = new Painter<SimpleImageView>() {
private final Canvas canvas;
{
this.canvas = new Canvas();
imageView.getPainters().add(this);
}
@Override
public final void paint(final Graphics2D g, final SimpleImageView component,
final int width, final int height) {
final BufferedImage image = imageView.getImage();
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
this.canvas.setFormat(imageWidth, imageHeight, BufferedImage.TYPE_BYTE_GRAY);
this.canvas.clear(BLACK);
final int s = getCellSize();
for (int tileY = 0; tileY + 2 < imageHeight; tileY += s) {
final int tileLastY = imageHeight <= tileY + s + 2 ? imageHeight - 1 : min(imageHeight - 1, tileY + s);
for (int tileX = 0; tileX + 2 < imageWidth; tileX += s) {
final int tileLastX = imageWidth <= tileX + s + 2 ? imageWidth - 1 : min(imageWidth - 1, tileX + s);
final int northY = tileY;
final int westX = tileX;
final int eastX = tileLastX;
final int southY = tileLastY;
final int northX = findMaximumGradientX(image, northY, westX + 1, eastX - 1);
final int westY = findMaximumGradientY(image, westX, northY + 1, southY - 1);
final int eastY = findMaximumGradientY(image, eastX, northY + 1, southY - 1);
final int southX = findMaximumGradientX(image, southY, westX + 1, eastX - 1);
this.canvas.getGraphics().setColor(WHITE);
this.canvas.getGraphics().drawLine(northX, northY, southX, southY);
this.canvas.getGraphics().drawLine(westX, westY, eastX, eastY);
}
}
for (int y = 0; y < imageHeight; ++y) {
for (int x = 0; x < imageWidth; ++x) {
if ((this.canvas.getImage().getRGB(x, y) & 0x00FFFFFF) != 0) {
imageView.getBufferImage().setRGB(x, y, RED.getRGB());
}
}
}
for (int tileY = 0; tileY < imageHeight; tileY += s) {
for (int tileX = 0; tileX < imageWidth; tileX += s) {
g.setColor(Color.YELLOW);
g.drawOval(tileX - 1, tileY - 1, 2, 2);
}
}
}
/**
* {@value}.
*/
private static final long serialVersionUID = -8170474943200742892L;
};
public final int getCellSize() {
return this.cellSize;
}
@Override
protected final void cleanup() {
imageView.getPainters().remove(this.painter);
}
/**
* {@value}.
*/
private static final long serialVersionUID = -6497489818537320168L;
};
show(imageView, this.getClass().getSimpleName(), true);
}
public static final int findMaximumGradientX(final BufferedImage image, final int y, final int firstX, final int lastX) {
int maximumGradient = 0;
int result = (firstX + lastX) / 2;
for (int x = firstX; x <= lastX; ++x) {
final int gradient = getColorGradient(image, x, y);
if (maximumGradient < gradient) {
maximumGradient = gradient;
result = x;
}
}
return result;
}
public static final int findMaximumGradientY(final BufferedImage image, final int x, final int firstY, final int lastY) {
int maximumGradient = 0;
int result = (firstY + lastY) / 2;
for (int y = firstY; y <= lastY; ++y) {
final int gradient = getColorGradient(image, x, y);
if (maximumGradient < gradient) {
maximumGradient = gradient;
result = y;
}
}
return result;
}
}
|
IMJ/test/imj2/tools/TiledParticleSegmentationTest.java
|
package imj2.tools;
import static imj2.tools.MultiresolutionSegmentationTest.getColorGradient;
import static java.awt.Color.BLACK;
import static java.awt.Color.RED;
import static java.awt.Color.WHITE;
import static java.lang.Math.min;
import static net.sourceforge.aprog.swing.SwingTools.show;
import imj2.tools.Image2DComponent.Painter;
import imj2.tools.RegionShrinkingTest.AutoMouseAdapter;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import org.junit.Test;
/**
* @author codistmonk (creation 2014-02-23)
*/
public final class TiledParticleSegmentationTest {
/**
* {@value}.
*/
public static final int NORTH = 0;
/**
* {@value}.
*/
public static final int WEST = 1;
/**
* {@value}.
*/
public static final int EAST = 2;
/**
* {@value}.
*/
public static final int SOUTH = 3;
@Test
public final void test() {
final SimpleImageView imageView = new SimpleImageView();
new AutoMouseAdapter(imageView.getImageHolder()) {
private int cellSize = 8;
private final Painter<SimpleImageView> painter = new Painter<SimpleImageView>() {
private final Canvas canvas;
{
this.canvas = new Canvas();
imageView.getPainters().add(this);
}
@Override
public final void paint(final Graphics2D g, final SimpleImageView component,
final int width, final int height) {
final BufferedImage image = imageView.getImage();
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
this.canvas.setFormat(imageWidth, imageHeight, BufferedImage.TYPE_BYTE_GRAY);
this.canvas.clear(BLACK);
final int s = getCellSize();
for (int tileY = 0; tileY < imageHeight; tileY += s) {
final int tileLastY = min(imageHeight - 1, tileY + s);
for (int tileX = 0; tileX < imageWidth; tileX += s) {
final int tileLastX = min(imageWidth - 1, tileX + s);
final int northY = tileY;
final int westX = tileX;
final int eastX = tileLastX;
final int southY = tileLastY;
final int northX = findMaximumGradientX(image, northY, westX + 1, eastX - 1);
final int westY = findMaximumGradientY(image, westX, northY + 1, southY - 1);
final int eastY = findMaximumGradientY(image, eastX, northY + 1, southY - 1);
final int southX = findMaximumGradientX(image, southY, westX + 1, eastX - 1);
this.canvas.getGraphics().setColor(WHITE);
this.canvas.getGraphics().drawLine(northX, northY, southX, southY);
this.canvas.getGraphics().drawLine(westX, westY, eastX, eastY);
}
}
for (int y = 0; y < imageHeight; ++y) {
for (int x = 0; x < imageWidth; ++x) {
if ((this.canvas.getImage().getRGB(x, y) & 0x00FFFFFF) != 0) {
imageView.getBufferImage().setRGB(x, y, RED.getRGB());
}
}
}
for (int tileY = 0; tileY < imageHeight; tileY += s) {
for (int tileX = 0; tileX < imageWidth; tileX += s) {
g.setColor(Color.YELLOW);
g.drawOval(tileX - 1, tileY - 1, 2, 2);
}
}
}
/**
* {@value}.
*/
private static final long serialVersionUID = -8170474943200742892L;
};
public final int getCellSize() {
return this.cellSize;
}
@Override
protected final void cleanup() {
imageView.getPainters().remove(this.painter);
}
/**
* {@value}.
*/
private static final long serialVersionUID = -6497489818537320168L;
};
show(imageView, this.getClass().getSimpleName(), true);
}
public static final int findMaximumGradientX(final BufferedImage image, final int y, final int firstX, final int lastX) {
int maximumGradient = 0;
int result = (firstX + lastX) / 2;
for (int x = firstX; x <= lastX; ++x) {
final int gradient = getColorGradient(image, x, y);
if (maximumGradient < gradient) {
maximumGradient = gradient;
result = x;
}
}
return result;
}
public static final int findMaximumGradientY(final BufferedImage image, final int x, final int firstY, final int lastY) {
int maximumGradient = 0;
int result = (firstY + lastY) / 2;
for (int y = firstY; y <= lastY; ++y) {
final int gradient = getColorGradient(image, x, y);
if (maximumGradient < gradient) {
maximumGradient = gradient;
result = y;
}
}
return result;
}
}
|
[IMJ][imj2] Fixed a defect (end tiles too small in some images) in TiledParticlesSegmentation.
|
IMJ/test/imj2/tools/TiledParticleSegmentationTest.java
|
[IMJ][imj2] Fixed a defect (end tiles too small in some images) in TiledParticlesSegmentation.
|
<ide><path>MJ/test/imj2/tools/TiledParticleSegmentationTest.java
<ide> import static java.awt.Color.WHITE;
<ide> import static java.lang.Math.min;
<ide> import static net.sourceforge.aprog.swing.SwingTools.show;
<del>
<ide> import imj2.tools.Image2DComponent.Painter;
<ide> import imj2.tools.RegionShrinkingTest.AutoMouseAdapter;
<ide>
<ide> import java.awt.Color;
<ide> import java.awt.Graphics2D;
<ide> import java.awt.image.BufferedImage;
<add>
<add>import net.sourceforge.aprog.tools.Tools;
<ide>
<ide> import org.junit.Test;
<ide>
<ide>
<ide> final int s = getCellSize();
<ide>
<del> for (int tileY = 0; tileY < imageHeight; tileY += s) {
<del> final int tileLastY = min(imageHeight - 1, tileY + s);
<add> for (int tileY = 0; tileY + 2 < imageHeight; tileY += s) {
<add> final int tileLastY = imageHeight <= tileY + s + 2 ? imageHeight - 1 : min(imageHeight - 1, tileY + s);
<ide>
<del> for (int tileX = 0; tileX < imageWidth; tileX += s) {
<del> final int tileLastX = min(imageWidth - 1, tileX + s);
<add> for (int tileX = 0; tileX + 2 < imageWidth; tileX += s) {
<add> final int tileLastX = imageWidth <= tileX + s + 2 ? imageWidth - 1 : min(imageWidth - 1, tileX + s);
<ide> final int northY = tileY;
<ide> final int westX = tileX;
<ide> final int eastX = tileLastX;
|
|
Java
|
mit
|
f2d75383a018546ba98e5efc24c4d575ffaa7c94
| 0 |
microsoftgraph/msgraph-sdk-java
|
package com.microsoft.graph.functional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.io.IOException;
import com.microsoft.graph.content.BatchRequestContent;
import com.microsoft.graph.content.BatchResponseContent;
import com.microsoft.graph.http.HttpMethod;
import com.microsoft.graph.httpcore.HttpClients;
import com.microsoft.graph.requests.GraphServiceClient;
import com.microsoft.graph.models.User;
import com.microsoft.graph.requests.UserRequest;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@Disabled
public class BatchTests {
@Test
public void GetsABatchFromRequests() throws IOException{
final TestBase testBase = new TestBase();
final GraphServiceClient graphServiceClient = testBase.graphClient;
final BatchRequestContent batchContent = new BatchRequestContent();
final String meGetId = batchContent.addBatchRequestStep(graphServiceClient.me()
.buildRequest());
assertNotNull(meGetId);
final String usersGetId = batchContent.addBatchRequestStep(graphServiceClient.users()
.buildRequest()
.filter("accountEnabled eq true")
.expand("manager")
.top(5),
HttpMethod.GET,
null,
meGetId);
final User userToAdd = new User();
userToAdd.givenName = "Darrel";
final String userPostId = batchContent.addBatchRequestStep(graphServiceClient.users()
.buildRequest(),
HttpMethod.POST,
userToAdd,
usersGetId);
final BatchResponseContent responseContent = testBase.graphClient.batch().buildRequest().post(batchContent);
assertEquals(400, responseContent.getResponseById(userPostId).status); //400:we're not providing enough properties for the call to go through
assertEquals(200, responseContent.getResponseById(meGetId).status);
assertEquals(200, responseContent.getResponseById(usersGetId).status);
final User me = responseContent.getResponseById(meGetId).getDeserializedBody(User.class);
assertNotNull(me.displayName);
}
}
|
src/test/java/com/microsoft/graph/functional/BatchTests.java
|
package com.microsoft.graph.functional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.io.IOException;
import com.microsoft.graph.content.BatchRequestContent;
import com.microsoft.graph.content.BatchResponseContent;
import com.microsoft.graph.http.HttpMethod;
import com.microsoft.graph.httpcore.HttpClients;
import com.microsoft.graph.requests.GraphServiceClient;
import com.microsoft.graph.models.User;
import com.microsoft.graph.requests.UserRequest;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@Disabled
public class BatchTests {
@Test
public void GetsABatchFromRequests() throws IOException{
final TestBase testBase = new TestBase();
final GraphServiceClient graphServiceClient = testBase.graphClient;
final BatchRequestContent batchContent = new BatchRequestContent();
final String meGetId = batchContent.addBatchRequestStep(graphServiceClient.me()
.buildRequest());
assertNotNull(meGetId);
final String usersGetId = batchContent.addBatchRequestStep(graphServiceClient.users()
.buildRequest()
.filter("accountEnabled eq true")
.expand("manager")
.top(5),
HttpMethod.GET,
null,
meGetId);
final User userToAdd = new User();
userToAdd.givenName = "Darrel";
final String userPostId = batchContent.addBatchRequestStep(graphServiceClient.users()
.buildRequest(),
HttpMethod.POST,
userToAdd,
usersGetId);
final BatchResponseContent responseContent = testBase.graphClient.batch().buildRequest().post(batchContent);
assertEquals(400, responseContent.getResponseById(userPostId).status); //400:we're not providing enough properties for the call to go through
assertEquals(200, responseContent.getResponseById(meGetId).status);
assertEquals(200, responseContent.getResponseById(usersGetId).status);
final User me = responseContent.getResponseById(meGetId).getDeserializedBody(testBase.graphClient.getSerializer(), User.class);
assertNotNull(me.displayName);
}
}
|
- updates unit test following removal of serializer parameter
|
src/test/java/com/microsoft/graph/functional/BatchTests.java
|
- updates unit test following removal of serializer parameter
|
<ide><path>rc/test/java/com/microsoft/graph/functional/BatchTests.java
<ide> assertEquals(400, responseContent.getResponseById(userPostId).status); //400:we're not providing enough properties for the call to go through
<ide> assertEquals(200, responseContent.getResponseById(meGetId).status);
<ide> assertEquals(200, responseContent.getResponseById(usersGetId).status);
<del> final User me = responseContent.getResponseById(meGetId).getDeserializedBody(testBase.graphClient.getSerializer(), User.class);
<add> final User me = responseContent.getResponseById(meGetId).getDeserializedBody(User.class);
<ide> assertNotNull(me.displayName);
<ide> }
<ide> }
|
|
Java
|
agpl-3.0
|
4fd4da3e7eee4bec33352c61822866ca66a4da74
| 0 |
deepstupid/sphinx5
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.frontend.endpoint;
import edu.cmu.sphinx.frontend.Audio;
import edu.cmu.sphinx.frontend.AudioSource;
import edu.cmu.sphinx.frontend.DataProcessor;
import edu.cmu.sphinx.frontend.FrontEnd;
import edu.cmu.sphinx.frontend.Signal;
import edu.cmu.sphinx.util.SphinxProperties;
import edu.cmu.sphinx.util.LogMath;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
/**
* Converts a stream of Audio objects, marked as speech and non-speech,
* into utterances. This is done by inserting SPEECH_START and
* SPEECH_END signals into the stream.
*/
public class SpeechMarker extends DataProcessor implements AudioSource {
public static final String PROP_PREFIX =
"edu.cmu.sphinx.frontend.endpoint.SpeechMarker.";
/**
* The SphinxProperty for the minimum amount of time in speech
* (in milliseconds) to be considered as utterance start.
*/
public static final String PROP_START_SPEECH =
PROP_PREFIX + "startSpeech";
/**
* The default value of PROP_START_SPEECH.
*/
public static final int PROP_START_SPEECH_DEFAULT = 200;
/**
* The SphinxProperty for the amount of time in silence
* (in milliseconds) to be considered as utterance end.
*/
public static final String PROP_END_SILENCE = PROP_PREFIX + "endSilence";
/**
* The default value of PROP_END_SILENCE.
*/
public static final int PROP_END_SILENCE_DEFAULT = 500;
/**
* The SphinxProperty for the amount of time (in milliseconds)
* before speech start to be included as speech data.
*/
public static final String PROP_SPEECH_LEADER =
PROP_PREFIX + "speechLeader";
/**
* The default value of PROP_SPEECH_LEADER.
*/
public static final int PROP_SPEECH_LEADER_DEFAULT = 100;
/**
* The SphinxProperty for the amount of time (in milliseconds)
* after speech ends to be included as speech data.
*/
public static final String PROP_SPEECH_TRAILER =
PROP_PREFIX + "speechTrailer";
/**
* The default value of PROP_SPEECH_TRAILER.
*/
public static final int PROP_SPEECH_TRAILER_DEFAULT = 100;
private AudioSource predecessor;
private List outputQueue;
private boolean inSpeech;
private int startSpeechTime;
private int endSilenceTime;
private int speechLeader;
private int speechTrailer;
private int sampleRate;
/**
* Initializes this SpeechMarker with the given name, context,
* and AudioSource predecessor.
*
* @param name the name of this SpeechMarker
* @param context the context of the SphinxProperties this
* SpeechMarker uses
* @param props the SphinxProperties to read properties from
* @param predecessor the AudioSource where this SpeechMarker
* gets Cepstrum from
*
* @throws java.io.IOException
*/
public void initialize(String name, String context,
SphinxProperties props,
AudioSource predecessor) throws IOException {
super.initialize(name, context, props);
this.predecessor = predecessor;
this.outputQueue = new ArrayList();
setProperties();
reset();
}
/**
* Sets the properties for this SpeechMarker.
*/
private void setProperties() {
SphinxProperties props = getSphinxProperties();
startSpeechTime =
props.getInt(PROP_START_SPEECH, PROP_START_SPEECH_DEFAULT);
endSilenceTime =
props.getInt(PROP_END_SILENCE, PROP_END_SILENCE_DEFAULT);
speechLeader =
props.getInt(PROP_SPEECH_LEADER, PROP_SPEECH_LEADER_DEFAULT);
speechTrailer =
props.getInt(PROP_SPEECH_TRAILER, PROP_SPEECH_TRAILER_DEFAULT);
sampleRate =
props.getInt(FrontEnd.PROP_SAMPLE_RATE,
FrontEnd.PROP_SAMPLE_RATE_DEFAULT);
}
/**
* Resets this SpeechMarker to a starting state.
*/
private void reset() {
inSpeech = false;
}
/**
* Returns the next Audio object.
*
* @return the next Audio object, or null if none available
*
* @throws java.io.IOException if an error occurred
*
* @see Audio
*/
public Audio getAudio() throws IOException {
if (outputQueue.size() == 0) {
if (!inSpeech) {
readInitialFrames();
} else {
Audio audio = readAudio();
if (audio.hasContent()) {
sendToQueue(audio);
if (!audio.isSpeech()) {
inSpeech = !(readEndFrames(audio));
}
} else if (audio.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(audio);
inSpeech = false;
} else if (audio.hasSignal(Signal.UTTERANCE_START)) {
throw new Error("Got UTTERANCE_START while in speech");
}
}
}
if (outputQueue.size() > 0) {
Audio audio = (Audio) outputQueue.remove(0);
return audio;
} else {
return null;
}
}
private Audio readAudio() throws IOException {
Audio audio = predecessor.getAudio();
/*
if (audio != null) {
String speech = "";
if (audio.hasContent() && audio.isSpeech()) {
speech = " *";
}
System.out.println("SpeechMarker: incoming: " +
audio.getSignal() + speech);
}
*/
return audio;
}
private int numUttStarts;
private int numUttEnds;
private void sendToQueue(Audio audio) {
// now add the audio
outputQueue.add(audio);
if (audio.hasSignal(Signal.UTTERANCE_START)) {
numUttEnds = 0;
numUttStarts++;
if (numUttStarts > 1) {
throw new Error("Too many utterance starts");
}
} else if (audio.hasSignal(Signal.UTTERANCE_END)) {
numUttStarts = 0;
numUttEnds++;
if (numUttEnds > 1) {
throw new Error("Too many utterance ends");
}
}
}
/**
* Returns the amount of audio data in milliseconds in the
* given Audio object.
*
* @param audio the Audio object
*
* @return the amount of audio data in milliseconds
*/
public int getAudioTime(Audio audio) {
return (int) (audio.getSamples().length * 1000.0f / sampleRate);
}
/**
* Read the starting frames until the utterance has started.
*/
private void readInitialFrames() throws IOException {
int nonSpeechTime = 0;
int minSpeechTime = (startSpeechTime > speechLeader) ?
startSpeechTime : speechLeader;
while (!inSpeech) {
Audio audio = readAudio();
if (audio == null) {
return;
} else {
sendToQueue(audio);
if (audio.hasContent()) {
nonSpeechTime += getAudioTime(audio);
if (audio.isSpeech()) {
boolean speechStarted = handleFirstSpeech(audio);
if (speechStarted) {
// System.out.println("Speech started !!!");
addSpeechStart();
inSpeech = true;
break;
}
}
}
}
int i = 0;
// prune any excessive non-speech
while (nonSpeechTime > minSpeechTime) {
Audio next = (Audio) outputQueue.get(i);
if (next.hasContent()) {
int audioTime = getAudioTime(next);
if (nonSpeechTime - audioTime >= minSpeechTime) {
next = (Audio) outputQueue.remove(i);
nonSpeechTime -= audioTime;
}
} else {
/*
System.out.println
("Not removed ("+i+"): "+next.getSignal());
*/
}
i++;
}
}
}
/**
* Handles an Audio object that can possibly be the first in
* an utterance.
*
* @param audio the Audio to handle
*
* @return true if utterance/speech has started for real, false otherwise
*/
private boolean handleFirstSpeech(Audio audio) throws IOException {
int speechTime = getAudioTime(audio);
// System.out.println("Entering handleFirstSpeech()");
while (speechTime < startSpeechTime) {
Audio next = readAudio();
sendToQueue(next);
if (!next.isSpeech()) {
return false;
} else {
speechTime += getAudioTime(audio);
}
}
return true;
}
/**
* Backtrack from the current position to add an SPEECH_START Signal
* to the outputQueue.
*/
private void addSpeechStart() {
int silenceLength = 0;
ListIterator i = outputQueue.listIterator(outputQueue.size()-1);
// backtrack until we have 'speechLeader' amount of non-speech
while (silenceLength < speechLeader && i.hasPrevious()) {
Audio current = (Audio) i.previous();
if (current.hasContent()) {
if (current.isSpeech()) {
silenceLength = 0;
} else {
silenceLength += getAudioTime(current);
}
} else if (current.hasSignal(Signal.UTTERANCE_START)) {
i.next(); // put the SPEECH_START after the UTTERANCE_START
break;
} else if (current.hasSignal(Signal.UTTERANCE_END)) {
throw new Error("No UTTERANCE_START after UTTERANCE_END");
}
}
// add the SPEECH_START
i.add(new Audio(Signal.SPEECH_START));
}
/**
* Given a non-speech frame, try to read more non-speech frames
* until we think its the end of utterance.
*
* @param audio a non-speech frame
*
* @return true if speech has really ended, false if speech
* has not ended
*/
private boolean readEndFrames(Audio audio) throws IOException {
boolean speechEndAdded = false;
boolean readTrailer = true;
int originalLast = outputQueue.size() - 1;
int silenceLength = getAudioTime(audio);
// read ahead until we have 'endSilenceTime' amount of silence
while (silenceLength < endSilenceTime) {
Audio next = readAudio();
if (next.hasContent()) {
sendToQueue(next);
if (next.isSpeech()) {
// if speech is detected again, we're still in
// an utterance
return false;
} else {
// it is non-speech
silenceLength += getAudioTime(next);
}
} else if (next.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(next);
readTrailer = false;
break;
} else {
throw new Error("Illegal signal: " + next.getSignal());
}
}
if (readTrailer) {
// read ahead until we have 'speechTrailer' amount of silence
while (!speechEndAdded && silenceLength < speechTrailer) {
Audio next = readAudio();
if (next.hasContent()) {
if (next.isSpeech()) {
// if we have hit speech again, then the current
// speech should end
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(next);
speechEndAdded = true;
break;
} else {
silenceLength += getAudioTime(next);
sendToQueue(next);
}
} else if (next.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(next);
speechEndAdded = true;
} else {
throw new Error("Illegal signal: " + next.getSignal());
}
}
}
if (!speechEndAdded) {
// iterator from the end of speech and read till we
// have 'speechTrailer' amount of non-speech, and
// then add an SPEECH_END
ListIterator i = outputQueue.listIterator(originalLast);
silenceLength = 0;
while (silenceLength < speechTrailer && i.hasNext()) {
Audio next = (Audio) i.next();
if (next.hasSignal(Signal.UTTERANCE_END)) {
i.previous();
break;
} else {
assert !next.isSpeech();
silenceLength += getAudioTime(next);
}
}
i.add(new Audio(Signal.SPEECH_END));
}
// System.out.println("Speech ended !!!");
return true;
}
}
|
edu/cmu/sphinx/frontend/endpoint/SpeechMarker.java
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.frontend.endpoint;
import edu.cmu.sphinx.frontend.Audio;
import edu.cmu.sphinx.frontend.AudioSource;
import edu.cmu.sphinx.frontend.DataProcessor;
import edu.cmu.sphinx.frontend.FrontEnd;
import edu.cmu.sphinx.frontend.Signal;
import edu.cmu.sphinx.util.SphinxProperties;
import edu.cmu.sphinx.util.LogMath;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
/**
* Converts a stream of Audio objects, marked as speech and non-speech,
* into utterances. This is done by inserting SPEECH_START and
* SPEECH_END signals into the stream.
*/
public class SpeechMarker extends DataProcessor implements AudioSource {
public static final String PROP_PREFIX =
"edu.cmu.sphinx.frontend.endpoint.SpeechMarker.";
/**
* The SphinxProperty for the minimum amount of time in speech
* (in milliseconds) to be considered as utterance start.
*/
public static final String PROP_START_SPEECH =
PROP_PREFIX + "startSpeech";
/**
* The default value of PROP_START_SPEECH.
*/
public static final int PROP_START_SPEECH_DEFAULT = 200;
/**
* The SphinxProperty for the amount of time in silence
* (in milliseconds) to be considered as utterance end.
*/
public static final String PROP_END_SILENCE = PROP_PREFIX + "endSilence";
/**
* The default value of PROP_END_SILENCE.
*/
public static final int PROP_END_SILENCE_DEFAULT = 500;
/**
* The SphinxProperty for the amount of time (in milliseconds)
* before speech start to be included as speech data.
*/
public static final String PROP_SPEECH_LEADER =
PROP_PREFIX + "speechLeader";
/**
* The default value of PROP_SPEECH_LEADER.
*/
public static final int PROP_SPEECH_LEADER_DEFAULT = 100;
/**
* The SphinxProperty for the amount of time (in milliseconds)
* after speech ends to be included as speech data.
*/
public static final String PROP_SPEECH_TRAILER =
PROP_PREFIX + "speechTrailer";
/**
* The default value of PROP_SPEECH_TRAILER.
*/
public static final int PROP_SPEECH_TRAILER_DEFAULT = 100;
private AudioSource predecessor;
private List outputQueue;
private boolean inSpeech;
private int startSpeechTime;
private int endSilenceTime;
private int speechLeader;
private int speechTrailer;
private int sampleRate;
/**
* Initializes this SpeechMarker with the given name, context,
* and AudioSource predecessor.
*
* @param name the name of this SpeechMarker
* @param context the context of the SphinxProperties this
* SpeechMarker uses
* @param props the SphinxProperties to read properties from
* @param predecessor the AudioSource where this SpeechMarker
* gets Cepstrum from
*
* @throws java.io.IOException
*/
public void initialize(String name, String context,
SphinxProperties props,
AudioSource predecessor) throws IOException {
super.initialize(name, context, props);
this.predecessor = predecessor;
this.outputQueue = new ArrayList();
setProperties();
reset();
}
/**
* Sets the properties for this SpeechMarker.
*/
private void setProperties() {
SphinxProperties props = getSphinxProperties();
startSpeechTime =
props.getInt(PROP_START_SPEECH, PROP_START_SPEECH_DEFAULT);
endSilenceTime =
props.getInt(PROP_END_SILENCE, PROP_END_SILENCE_DEFAULT);
speechLeader =
props.getInt(PROP_SPEECH_LEADER, PROP_SPEECH_LEADER_DEFAULT);
speechTrailer =
props.getInt(PROP_SPEECH_TRAILER, PROP_SPEECH_TRAILER_DEFAULT);
sampleRate =
props.getInt(FrontEnd.PROP_SAMPLE_RATE,
FrontEnd.PROP_SAMPLE_RATE_DEFAULT);
}
/**
* Resets this SpeechMarker to a starting state.
*/
private void reset() {
inSpeech = false;
}
/**
* Returns the next Audio object.
*
* @return the next Audio object, or null if none available
*
* @throws java.io.IOException if an error occurred
*
* @see Audio
*/
public Audio getAudio() throws IOException {
if (outputQueue.size() == 0) {
if (!inSpeech) {
readInitialFrames();
} else {
Audio audio = readAudio();
if (audio.hasContent()) {
sendToQueue(audio);
if (!audio.isSpeech()) {
inSpeech = !(readEndFrames(audio));
}
} else if (audio.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(audio);
inSpeech = false;
} else if (audio.hasSignal(Signal.UTTERANCE_START)) {
throw new Error("Got UTTERANCE_START while in speech");
}
}
}
if (outputQueue.size() > 0) {
Audio audio = (Audio) outputQueue.remove(0);
return audio;
} else {
return null;
}
}
private Audio readAudio() throws IOException {
Audio audio = predecessor.getAudio();
/*
if (audio != null) {
String speech = "";
if (audio.hasContent() && audio.isSpeech()) {
speech = " *";
}
System.out.println("SpeechMarker: incoming: " +
audio.getSignal() + speech);
}
*/
return audio;
}
private int numUttStarts;
private int numUttEnds;
private void sendToQueue(Audio audio) {
outputQueue.add(audio);
if (audio.hasSignal(Signal.UTTERANCE_START)) {
numUttEnds = 0;
numUttStarts++;
if (numUttStarts > 1) {
throw new Error("Too many utterance starts");
}
} else if (audio.hasSignal(Signal.UTTERANCE_END)) {
numUttStarts = 0;
numUttEnds++;
if (numUttEnds > 1) {
throw new Error("Too many utterance ends");
}
}
}
/**
* Returns the amount of audio data in milliseconds in the
* given Audio object.
*
* @param audio the Audio object
*
* @return the amount of audio data in milliseconds
*/
public int getAudioTime(Audio audio) {
return (int) (audio.getSamples().length * 1000.0f / sampleRate);
}
/**
* Read the starting frames until the utterance has started.
*/
private void readInitialFrames() throws IOException {
while (!inSpeech) {
Audio audio = readAudio();
if (audio == null) {
return;
} else {
sendToQueue(audio);
if (audio.hasContent()) {
if (audio.isSpeech()) {
boolean speechStarted = handleFirstSpeech(audio);
if (speechStarted) {
// System.out.println("Speech started !!!");
addSpeechStart();
inSpeech = true;
break;
}
}
}
}
}
}
/**
* Handles an Audio object that can possibly be the first in
* an utterance.
*
* @param audio the Audio to handle
*
* @return true if utterance/speech has started for real, false otherwise
*/
private boolean handleFirstSpeech(Audio audio) throws IOException {
int speechTime = getAudioTime(audio);
// System.out.println("Entering handleFirstSpeech()");
while (speechTime < startSpeechTime) {
Audio next = readAudio();
sendToQueue(next);
if (!next.isSpeech()) {
return false;
} else {
speechTime += getAudioTime(audio);
}
}
return true;
}
/**
* Backtrack from the current position to add an SPEECH_START Signal
* to the outputQueue.
*/
private void addSpeechStart() {
int silenceLength = 0;
ListIterator i = outputQueue.listIterator(outputQueue.size()-1);
// backtrack until we have 'speechLeader' amount of non-speech
while (silenceLength < speechLeader && i.hasPrevious()) {
Audio current = (Audio) i.previous();
if (current.hasContent()) {
if (current.isSpeech()) {
silenceLength = 0;
} else {
silenceLength += getAudioTime(current);
}
} else if (current.hasSignal(Signal.UTTERANCE_START)) {
i.next(); // put the SPEECH_START after the UTTERANCE_START
break;
} else if (current.hasSignal(Signal.UTTERANCE_END)) {
throw new Error("No UTTERANCE_START after UTTERANCE_END");
}
}
// add the SPEECH_START
i.add(new Audio(Signal.SPEECH_START));
}
/**
* Given a non-speech frame, try to read more non-speech frames
* until we think its the end of utterance.
*
* @param audio a non-speech frame
*
* @return true if speech has really ended, false if speech
* has not ended
*/
private boolean readEndFrames(Audio audio) throws IOException {
boolean speechEndAdded = false;
boolean readTrailer = true;
int originalLast = outputQueue.size() - 1;
int silenceLength = getAudioTime(audio);
// read ahead until we have 'endSilenceTime' amount of silence
while (silenceLength < endSilenceTime) {
Audio next = readAudio();
if (next.hasContent()) {
sendToQueue(next);
if (next.isSpeech()) {
// if speech is detected again, we're still in
// an utterance
return false;
} else {
// it is non-speech
silenceLength += getAudioTime(next);
}
} else if (next.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(next);
readTrailer = false;
break;
} else {
throw new Error("Illegal signal: " + next.getSignal());
}
}
if (readTrailer) {
// read ahead until we have 'speechTrailer' amount of silence
while (!speechEndAdded && silenceLength < speechTrailer) {
Audio next = readAudio();
if (next.hasContent()) {
if (next.isSpeech()) {
// if we have hit speech again, then the current
// speech should end
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(next);
speechEndAdded = true;
break;
} else {
silenceLength += getAudioTime(next);
sendToQueue(next);
}
} else if (next.hasSignal(Signal.UTTERANCE_END)) {
sendToQueue(new Audio(Signal.SPEECH_END));
sendToQueue(next);
speechEndAdded = true;
} else {
throw new Error("Illegal signal: " + next.getSignal());
}
}
}
if (!speechEndAdded) {
// iterator from the end of speech and read till we
// have 'speechTrailer' amount of non-speech, and
// then add an SPEECH_END
ListIterator i = outputQueue.listIterator(originalLast);
silenceLength = 0;
while (silenceLength < speechTrailer && i.hasNext()) {
Audio next = (Audio) i.next();
if (next.hasSignal(Signal.UTTERANCE_END)) {
i.previous();
break;
} else {
assert !next.isSpeech();
silenceLength += getAudioTime(next);
}
}
i.add(new Audio(Signal.SPEECH_END));
}
// System.out.println("Speech ended !!!");
return true;
}
}
|
Debugged the OutOfMemoryError caused by listening to and caching non-speech
after an utterance.
git-svn-id: a8b04003a33e1d3e001b9d20391fa392a9f62d91@2188 94700074-3cef-4d97-a70e-9c8c206c02f5
|
edu/cmu/sphinx/frontend/endpoint/SpeechMarker.java
|
Debugged the OutOfMemoryError caused by listening to and caching non-speech after an utterance.
|
<ide><path>du/cmu/sphinx/frontend/endpoint/SpeechMarker.java
<ide> private int numUttEnds;
<ide>
<ide> private void sendToQueue(Audio audio) {
<add> // now add the audio
<ide> outputQueue.add(audio);
<ide> if (audio.hasSignal(Signal.UTTERANCE_START)) {
<ide> numUttEnds = 0;
<ide> * Read the starting frames until the utterance has started.
<ide> */
<ide> private void readInitialFrames() throws IOException {
<add> int nonSpeechTime = 0;
<add> int minSpeechTime = (startSpeechTime > speechLeader) ?
<add> startSpeechTime : speechLeader;
<add>
<ide> while (!inSpeech) {
<ide> Audio audio = readAudio();
<ide> if (audio == null) {
<ide> } else {
<ide> sendToQueue(audio);
<ide> if (audio.hasContent()) {
<add> nonSpeechTime += getAudioTime(audio);
<ide> if (audio.isSpeech()) {
<ide> boolean speechStarted = handleFirstSpeech(audio);
<ide> if (speechStarted) {
<ide> }
<ide> }
<ide> }
<add> }
<add> int i = 0;
<add> // prune any excessive non-speech
<add> while (nonSpeechTime > minSpeechTime) {
<add> Audio next = (Audio) outputQueue.get(i);
<add> if (next.hasContent()) {
<add> int audioTime = getAudioTime(next);
<add> if (nonSpeechTime - audioTime >= minSpeechTime) {
<add> next = (Audio) outputQueue.remove(i);
<add> nonSpeechTime -= audioTime;
<add> }
<add> } else {
<add> /*
<add> System.out.println
<add> ("Not removed ("+i+"): "+next.getSignal());
<add> */
<add> }
<add> i++;
<ide> }
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
ae6a62b2ef6449a788b16f3e5704887f9a857f85
| 0 |
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.Future;
import com.google.common.primitives.Longs;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.core.CloseHook;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.StrField;
import org.apache.solr.schema.TrieField;
import org.apache.solr.core.SolrCore;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.util.NamedList;
/**
* syntax fq={!hash workers=11 worker=4 keys=field1,field2}
* */
public class HashQParserPlugin extends QParserPlugin {
public static final String NAME = "hash";
public void init(NamedList params) {
}
public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
return new HashQParser(query, localParams, params, request);
}
private class HashQParser extends QParser {
public HashQParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
super(query, localParams, params, request);
}
public Query parse() {
int workers = localParams.getInt("workers");
int worker = localParams.getInt("worker");
String keys = params.get("partitionKeys");
return new HashQuery(keys, workers, worker);
}
}
private class HashQuery extends ExtendedQueryBase implements PostFilter {
private String keysParam;
private int workers;
private int worker;
public boolean getCache() {
if(getCost() > 99) {
return false;
} else {
return super.getCache();
}
}
public int hashCode() {
return keysParam.hashCode()+workers+worker+(int)getBoost();
}
public boolean equals(Object o) {
if (o instanceof HashQuery) {
HashQuery h = (HashQuery)o;
if(keysParam.equals(h.keysParam) && workers == h.workers && worker == h.worker && getBoost() == h.getBoost()) {
return true;
}
}
return false;
}
public HashQuery(String keysParam, int workers, int worker) {
this.keysParam = keysParam;
this.workers = workers;
this.worker = worker;
}
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
String[] keys = keysParam.split(",");
SolrIndexSearcher solrIndexSearcher = (SolrIndexSearcher)searcher;
IndexReaderContext context = solrIndexSearcher.getTopReaderContext();
List<LeafReaderContext> leaves = context.leaves();
FixedBitSet[] fixedBitSets = new FixedBitSet[leaves.size()];
for(LeafReaderContext leaf : leaves) {
try {
SegmentPartitioner segmentPartitioner = new SegmentPartitioner(leaf,worker,workers, keys, solrIndexSearcher);
segmentPartitioner.run();
fixedBitSets[segmentPartitioner.context.ord] = segmentPartitioner.docs;
} catch(Exception e) {
throw new IOException(e);
}
}
ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new BitsFilter(fixedBitSets));
return constantScoreQuery.createWeight(searcher, false);
}
public class BitsFilter extends Filter {
private FixedBitSet[] bitSets;
public BitsFilter(FixedBitSet[] bitSets) {
this.bitSets = bitSets;
}
public String toString(String s) {
return s;
}
public DocIdSet getDocIdSet(LeafReaderContext context, Bits bits) {
return BitsFilteredDocIdSet.wrap(new BitDocIdSet(bitSets[context.ord]), bits);
}
}
class SegmentPartitioner implements Runnable {
public LeafReaderContext context;
private int worker;
private int workers;
private HashKey k;
public FixedBitSet docs;
public SegmentPartitioner(LeafReaderContext context,
int worker,
int workers,
String[] keys,
SolrIndexSearcher solrIndexSearcher) {
this.context = context;
this.worker = worker;
this.workers = workers;
HashKey[] hashKeys = new HashKey[keys.length];
IndexSchema schema = solrIndexSearcher.getSchema();
for(int i=0; i<keys.length; i++) {
String key = keys[i];
FieldType ft = schema.getField(key).getType();
HashKey h = null;
if(ft instanceof StrField) {
h = new BytesHash(key, ft);
} else {
h = new NumericHash(key);
}
hashKeys[i] = h;
}
k = (hashKeys.length > 1) ? new CompositeHash(hashKeys) : hashKeys[0];
}
public void run() {
LeafReader reader = context.reader();
try {
k.setNextReader(context);
this.docs = new FixedBitSet(reader.maxDoc());
int maxDoc = reader.maxDoc();
for(int i=0; i<maxDoc; i++) {
if((k.hashCode(i) & 0x7FFFFFFF) % workers == worker) {
docs.set(i);
}
}
}catch(Exception e) {
throw new RuntimeException(e);
}
}
}
public DelegatingCollector getFilterCollector(IndexSearcher indexSearcher) {
String[] keys = keysParam.split(",");
HashKey[] hashKeys = new HashKey[keys.length];
SolrIndexSearcher searcher = (SolrIndexSearcher)indexSearcher;
IndexSchema schema = searcher.getSchema();
for(int i=0; i<keys.length; i++) {
String key = keys[i];
FieldType ft = schema.getField(key).getType();
HashKey h = null;
if(ft instanceof StrField) {
h = new BytesHash(key, ft);
} else {
h = new NumericHash(key);
}
hashKeys[i] = h;
}
HashKey k = (hashKeys.length > 1) ? new CompositeHash(hashKeys) : hashKeys[0];
return new HashCollector(k, workers, worker);
}
}
private class HashCollector extends DelegatingCollector {
private int worker;
private int workers;
private HashKey hashKey;
private LeafCollector leafCollector;
public HashCollector(HashKey hashKey, int workers, int worker) {
this.hashKey = hashKey;
this.workers = workers;
this.worker = worker;
}
public void setScorer(Scorer scorer) throws IOException{
leafCollector.setScorer(scorer);
}
public void doSetNextReader(LeafReaderContext context) throws IOException {
this.hashKey.setNextReader(context);
this.leafCollector = delegate.getLeafCollector(context);
}
public void collect(int doc) throws IOException {
if((hashKey.hashCode(doc) & 0x7FFFFFFF) % workers == worker) {
leafCollector.collect(doc);
}
}
}
private interface HashKey {
public void setNextReader(LeafReaderContext reader) throws IOException;
public long hashCode(int doc);
}
private class BytesHash implements HashKey {
private SortedDocValues values;
private String field;
private FieldType fieldType;
private CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
public BytesHash(String field, FieldType fieldType) {
this.field = field;
this.fieldType = fieldType;
}
public void setNextReader(LeafReaderContext context) throws IOException {
values = context.reader().getSortedDocValues(field);
}
public long hashCode(int doc) {
BytesRef ref = values.get(doc);
this.fieldType.indexedToReadable(ref, charsRefBuilder);
CharsRef charsRef = charsRefBuilder.get();
return charsRef.hashCode();
}
}
private class NumericHash implements HashKey {
private NumericDocValues values;
private String field;
public NumericHash(String field) {
this.field = field;
}
public void setNextReader(LeafReaderContext context) throws IOException {
values = context.reader().getNumericDocValues(field);
}
public long hashCode(int doc) {
long l = values.get(doc);
return Longs.hashCode(l);
}
}
private class ZeroHash implements HashKey {
public long hashCode(int doc) {
return 0;
}
public void setNextReader(LeafReaderContext context) {
}
}
private class CompositeHash implements HashKey {
private HashKey key1;
private HashKey key2;
private HashKey key3;
private HashKey key4;
public CompositeHash(HashKey[] hashKeys) {
key1 = hashKeys[0];
key2 = hashKeys[1];
key3 = (hashKeys.length > 2) ? hashKeys[2] : new ZeroHash();
key4 = (hashKeys.length > 3) ? hashKeys[3] : new ZeroHash();
}
public void setNextReader(LeafReaderContext context) throws IOException {
key1.setNextReader(context);
key2.setNextReader(context);
key3.setNextReader(context);
key4.setNextReader(context);
}
public long hashCode(int doc) {
return key1.hashCode(doc)+key2.hashCode(doc)+key3.hashCode(doc)+key4.hashCode(doc);
}
}
}
|
solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.Future;
import com.google.common.primitives.Longs;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.core.CloseHook;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.StrField;
import org.apache.solr.schema.TrieField;
import org.apache.solr.core.SolrCore;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.util.NamedList;
/**
* syntax fq={!hash workers=11 worker=4 keys=field1,field2}
* */
public class HashQParserPlugin extends QParserPlugin {
public static final String NAME = "hash";
private static Semaphore semaphore = new Semaphore(8,true);
private static ExecutorService threadPool = Executors.newCachedThreadPool(new SolrjNamedThreadFactory("HashQParserPlugin"));
private static boolean init = true;
private static synchronized void closeHook(SolrCore core) {
if(init) {
init = false;
core.addCloseHook(new CloseHook() {
@Override
public void preClose(SolrCore core) {
threadPool.shutdown();
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void postClose(SolrCore core) {
//To change body of implemented methods use File | Settings | File Templates.
}
});
}
}
public void init(NamedList params) {
}
public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
closeHook(request.getSearcher().getCore());
return new HashQParser(query, localParams, params, request);
}
private class HashQParser extends QParser {
public HashQParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
super(query, localParams, params, request);
}
public Query parse() {
int workers = localParams.getInt("workers");
int worker = localParams.getInt("worker");
String keys = params.get("partitionKeys");
return new HashQuery(keys, workers, worker);
}
}
private class HashQuery extends ExtendedQueryBase implements PostFilter {
private String keysParam;
private int workers;
private int worker;
public boolean getCache() {
if(getCost() > 99) {
return false;
} else {
return super.getCache();
}
}
public int hashCode() {
return keysParam.hashCode()+workers+worker+(int)getBoost();
}
public boolean equals(Object o) {
if (o instanceof HashQuery) {
HashQuery h = (HashQuery)o;
if(keysParam.equals(h.keysParam) && workers == h.workers && worker == h.worker && getBoost() == h.getBoost()) {
return true;
}
}
return false;
}
public HashQuery(String keysParam, int workers, int worker) {
this.keysParam = keysParam;
this.workers = workers;
this.worker = worker;
}
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
String[] keys = keysParam.split(",");
SolrIndexSearcher solrIndexSearcher = (SolrIndexSearcher)searcher;
IndexReaderContext context = solrIndexSearcher.getTopReaderContext();
List<LeafReaderContext> leaves = context.leaves();
ArrayBlockingQueue queue = new ArrayBlockingQueue(leaves.size());
for(LeafReaderContext leaf : leaves) {
try {
semaphore.acquire();
SegmentPartitioner segmentPartitioner = new SegmentPartitioner(leaf,worker,workers, keys, solrIndexSearcher, queue,semaphore);
threadPool.execute(segmentPartitioner);
} catch(Exception e) {
throw new IOException(e);
}
}
FixedBitSet[] fixedBitSets = new FixedBitSet[leaves.size()];
for(int i=0; i<leaves.size(); i++) {
try {
SegmentPartitioner segmentPartitioner = (SegmentPartitioner)queue.take();
fixedBitSets[segmentPartitioner.context.ord] = segmentPartitioner.docs;
}catch(Exception e) {
throw new IOException(e);
}
}
ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new BitsFilter(fixedBitSets));
return constantScoreQuery.createWeight(searcher, false);
}
public class BitsFilter extends Filter {
private FixedBitSet[] bitSets;
public BitsFilter(FixedBitSet[] bitSets) {
this.bitSets = bitSets;
}
public String toString(String s) {
return s;
}
public DocIdSet getDocIdSet(LeafReaderContext context, Bits bits) {
return BitsFilteredDocIdSet.wrap(new BitDocIdSet(bitSets[context.ord]), bits);
}
}
class SegmentPartitioner implements Runnable {
public LeafReaderContext context;
private int worker;
private int workers;
private HashKey k;
private Semaphore sem;
private ArrayBlockingQueue queue;
public FixedBitSet docs;
public SegmentPartitioner(LeafReaderContext context,
int worker,
int workers,
String[] keys,
SolrIndexSearcher solrIndexSearcher,
ArrayBlockingQueue queue, Semaphore sem) {
this.context = context;
this.worker = worker;
this.workers = workers;
this.queue = queue;
this.sem = sem;
HashKey[] hashKeys = new HashKey[keys.length];
IndexSchema schema = solrIndexSearcher.getSchema();
for(int i=0; i<keys.length; i++) {
String key = keys[i];
FieldType ft = schema.getField(key).getType();
HashKey h = null;
if(ft instanceof StrField) {
h = new BytesHash(key, ft);
} else {
h = new NumericHash(key);
}
hashKeys[i] = h;
}
k = (hashKeys.length > 1) ? new CompositeHash(hashKeys) : hashKeys[0];
}
public void run() {
LeafReader reader = context.reader();
try {
k.setNextReader(context);
this.docs = new FixedBitSet(reader.maxDoc());
int maxDoc = reader.maxDoc();
for(int i=0; i<maxDoc; i++) {
if((k.hashCode(i) & 0x7FFFFFFF) % workers == worker) {
docs.set(i);
}
}
}catch(Exception e) {
throw new RuntimeException(e);
} finally {
sem.release();
queue.add(this);
}
}
}
public DelegatingCollector getFilterCollector(IndexSearcher indexSearcher) {
String[] keys = keysParam.split(",");
HashKey[] hashKeys = new HashKey[keys.length];
SolrIndexSearcher searcher = (SolrIndexSearcher)indexSearcher;
IndexSchema schema = searcher.getSchema();
for(int i=0; i<keys.length; i++) {
String key = keys[i];
FieldType ft = schema.getField(key).getType();
HashKey h = null;
if(ft instanceof StrField) {
h = new BytesHash(key, ft);
} else {
h = new NumericHash(key);
}
hashKeys[i] = h;
}
HashKey k = (hashKeys.length > 1) ? new CompositeHash(hashKeys) : hashKeys[0];
return new HashCollector(k, workers, worker);
}
}
private class HashCollector extends DelegatingCollector {
private int worker;
private int workers;
private HashKey hashKey;
private LeafCollector leafCollector;
public HashCollector(HashKey hashKey, int workers, int worker) {
this.hashKey = hashKey;
this.workers = workers;
this.worker = worker;
}
public void setScorer(Scorer scorer) throws IOException{
leafCollector.setScorer(scorer);
}
public void doSetNextReader(LeafReaderContext context) throws IOException {
this.hashKey.setNextReader(context);
this.leafCollector = delegate.getLeafCollector(context);
}
public void collect(int doc) throws IOException {
if((hashKey.hashCode(doc) & 0x7FFFFFFF) % workers == worker) {
leafCollector.collect(doc);
}
}
}
private interface HashKey {
public void setNextReader(LeafReaderContext reader) throws IOException;
public long hashCode(int doc);
}
private class BytesHash implements HashKey {
private SortedDocValues values;
private String field;
private FieldType fieldType;
private CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
public BytesHash(String field, FieldType fieldType) {
this.field = field;
this.fieldType = fieldType;
}
public void setNextReader(LeafReaderContext context) throws IOException {
values = context.reader().getSortedDocValues(field);
}
public long hashCode(int doc) {
BytesRef ref = values.get(doc);
this.fieldType.indexedToReadable(ref, charsRefBuilder);
CharsRef charsRef = charsRefBuilder.get();
return charsRef.hashCode();
}
}
private class NumericHash implements HashKey {
private NumericDocValues values;
private String field;
public NumericHash(String field) {
this.field = field;
}
public void setNextReader(LeafReaderContext context) throws IOException {
values = context.reader().getNumericDocValues(field);
}
public long hashCode(int doc) {
long l = values.get(doc);
return Longs.hashCode(l);
}
}
private class ZeroHash implements HashKey {
public long hashCode(int doc) {
return 0;
}
public void setNextReader(LeafReaderContext context) {
}
}
private class CompositeHash implements HashKey {
private HashKey key1;
private HashKey key2;
private HashKey key3;
private HashKey key4;
public CompositeHash(HashKey[] hashKeys) {
key1 = hashKeys[0];
key2 = hashKeys[1];
key3 = (hashKeys.length > 2) ? hashKeys[2] : new ZeroHash();
key4 = (hashKeys.length > 3) ? hashKeys[3] : new ZeroHash();
}
public void setNextReader(LeafReaderContext context) throws IOException {
key1.setNextReader(context);
key2.setNextReader(context);
key3.setNextReader(context);
key4.setNextReader(context);
}
public long hashCode(int doc) {
return key1.hashCode(doc)+key2.hashCode(doc)+key3.hashCode(doc)+key4.hashCode(doc);
}
}
}
|
SOLR-7224: HashQParserPlugin test failures due to java.util.concurrent.RejectedExecutionException
git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1665517 13f79535-47bb-0310-9956-ffa450edef68
|
solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
|
SOLR-7224: HashQParserPlugin test failures due to java.util.concurrent.RejectedExecutionException
|
<ide><path>olr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
<ide> public class HashQParserPlugin extends QParserPlugin {
<ide>
<ide> public static final String NAME = "hash";
<del> private static Semaphore semaphore = new Semaphore(8,true);
<del> private static ExecutorService threadPool = Executors.newCachedThreadPool(new SolrjNamedThreadFactory("HashQParserPlugin"));
<del> private static boolean init = true;
<del>
<del> private static synchronized void closeHook(SolrCore core) {
<del> if(init) {
<del> init = false;
<del> core.addCloseHook(new CloseHook() {
<del> @Override
<del> public void preClose(SolrCore core) {
<del> threadPool.shutdown();
<del> //To change body of implemented methods use File | Settings | File Templates.
<del> }
<del>
<del> @Override
<del> public void postClose(SolrCore core) {
<del> //To change body of implemented methods use File | Settings | File Templates.
<del> }
<del> });
<del> }
<del> }
<add>
<ide>
<ide> public void init(NamedList params) {
<ide>
<ide> }
<ide>
<ide> public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
<del> closeHook(request.getSearcher().getCore());
<ide> return new HashQParser(query, localParams, params, request);
<ide> }
<ide>
<ide> IndexReaderContext context = solrIndexSearcher.getTopReaderContext();
<ide>
<ide> List<LeafReaderContext> leaves = context.leaves();
<del> ArrayBlockingQueue queue = new ArrayBlockingQueue(leaves.size());
<del>
<add> FixedBitSet[] fixedBitSets = new FixedBitSet[leaves.size()];
<ide>
<ide> for(LeafReaderContext leaf : leaves) {
<ide> try {
<del> semaphore.acquire();
<del> SegmentPartitioner segmentPartitioner = new SegmentPartitioner(leaf,worker,workers, keys, solrIndexSearcher, queue,semaphore);
<del> threadPool.execute(segmentPartitioner);
<add> SegmentPartitioner segmentPartitioner = new SegmentPartitioner(leaf,worker,workers, keys, solrIndexSearcher);
<add> segmentPartitioner.run();
<add> fixedBitSets[segmentPartitioner.context.ord] = segmentPartitioner.docs;
<ide> } catch(Exception e) {
<del> throw new IOException(e);
<del> }
<del> }
<del>
<del> FixedBitSet[] fixedBitSets = new FixedBitSet[leaves.size()];
<del> for(int i=0; i<leaves.size(); i++) {
<del> try {
<del> SegmentPartitioner segmentPartitioner = (SegmentPartitioner)queue.take();
<del> fixedBitSets[segmentPartitioner.context.ord] = segmentPartitioner.docs;
<del> }catch(Exception e) {
<ide> throw new IOException(e);
<ide> }
<ide> }
<ide> private int worker;
<ide> private int workers;
<ide> private HashKey k;
<del> private Semaphore sem;
<del> private ArrayBlockingQueue queue;
<ide> public FixedBitSet docs;
<ide> public SegmentPartitioner(LeafReaderContext context,
<ide> int worker,
<ide> int workers,
<ide> String[] keys,
<del> SolrIndexSearcher solrIndexSearcher,
<del> ArrayBlockingQueue queue, Semaphore sem) {
<add> SolrIndexSearcher solrIndexSearcher) {
<ide> this.context = context;
<ide> this.worker = worker;
<ide> this.workers = workers;
<del> this.queue = queue;
<del> this.sem = sem;
<ide>
<ide> HashKey[] hashKeys = new HashKey[keys.length];
<ide> IndexSchema schema = solrIndexSearcher.getSchema();
<ide> }
<ide> }catch(Exception e) {
<ide> throw new RuntimeException(e);
<del> } finally {
<del> sem.release();
<del> queue.add(this);
<ide> }
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
b98683ee6fdf3cd8065bf3da90a69a6a83c68dad
| 0 |
afilimonov/jackrabbit,sdmcraft/jackrabbit,Overseas-Student-Living/jackrabbit,Kast0rTr0y/jackrabbit,afilimonov/jackrabbit,Kast0rTr0y/jackrabbit,kigsmtua/jackrabbit,SylvesterAbreu/jackrabbit,tripodsan/jackrabbit,Overseas-Student-Living/jackrabbit,sdmcraft/jackrabbit,Kast0rTr0y/jackrabbit,tripodsan/jackrabbit,SylvesterAbreu/jackrabbit,bartosz-grabski/jackrabbit,Overseas-Student-Living/jackrabbit,SylvesterAbreu/jackrabbit,afilimonov/jackrabbit,sdmcraft/jackrabbit,bartosz-grabski/jackrabbit,bartosz-grabski/jackrabbit,kigsmtua/jackrabbit,tripodsan/jackrabbit,kigsmtua/jackrabbit
|
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.webdav;
import org.apache.jackrabbit.webdav.xml.Namespace;
import org.apache.jackrabbit.webdav.util.HttpDateFormat;
import java.text.DateFormat;
/**
* <code>DavConstants</code> provide constants for request and response
* headers, Xml elements and property names defined by
* <a href="http://www.ietf.org/rfc/rfc2518.txt">RFC 2518</a>. In addition
* common date formats (creation date and modification time) are included.
*/
public interface DavConstants {
/**
* Default Namespace constant
*/
public static final Namespace NAMESPACE = Namespace.getNamespace("D", "DAV:");
//---< Headers (Names and Value Constants) >--------------------------------
public static final String HEADER_DAV = "DAV";
public static final String HEADER_DESTINATION = "Destination";
public static final String HEADER_IF = "If";
public static final String HEADER_AUTHORIZATION = "Authorization";
public static final String HEADER_CONTENT_TYPE = "Content-Type";
public static final String HEADER_CONTENT_LENGTH = "Content-Length";
public static final String HEADER_CONTENT_LANGUAGE = "Content-Language";
public static final String HEADER_ETAG = "ETag";
public static final String HEADER_LAST_MODIFIED = "Last-Modified";
//--------------------------------------------------< Lock-Token Header >---
public static final String HEADER_LOCK_TOKEN = "Lock-Token";
public static final String OPAQUE_LOCK_TOKEN_PREFIX = "opaquelocktoken:";
//-----------------------------------------------------< Timeout Header >---
public static final String HEADER_TIMEOUT = "Timeout";
public static final String TIMEOUT_INFINITE = "Infinite";
// RFC 2518: timeout value for TimeType "Second" MUST NOT be greater than 2^32-1
public static final long INFINITE_TIMEOUT = Integer.MAX_VALUE;
public static final long UNDEFINED_TIMEOUT = Integer.MIN_VALUE;
//---------------------------------------------------< Overwrite Header >---
public static final String HEADER_OVERWRITE = "Overwrite";
//-------------------------------------------------------< Depth Header >---
public static final String HEADER_DEPTH = "Depth";
public static final String DEPTH_INFINITY_S = "infinity";
public static final int DEPTH_INFINITY = Integer.MAX_VALUE;
public static final int DEPTH_0 = 0;
public static final int DEPTH_1 = 1;
//---< XML Element, Attribute Names >---------------------------------------
public static final String XML_ALLPROP = "allprop";
public static final String XML_COLLECTION = "collection";
public static final String XML_DST = "dst";
public static final String XML_HREF = "href";
public static final String XML_KEEPALIVE = "keepalive";
public static final String XML_LINK = "link";
public static final String XML_MULTISTATUS = "multistatus";
public static final String XML_OMIT = "omit";
public static final String XML_PROP = "prop";
public static final String XML_PROPERTYBEHAVIOR = "propertybehavior";
public static final String XML_PROPERTYUPDATE = "propertyupdate";
public static final String XML_PROPFIND = "propfind";
public static final String XML_PROPNAME = "propname";
public static final String XML_PROPSTAT = "propstat";
public static final String XML_REMOVE = "remove";
public static final String XML_RESPONSE = "response";
public static final String XML_RESPONSEDESCRIPTION = "responsedescription";
public static final String XML_SET = "set";
public static final String XML_SOURCE = "source";
public static final String XML_STATUS = "status";
//------------------------------------------------------------< locking >---
public static final String XML_ACTIVELOCK = "activelock";
public static final String XML_DEPTH = "depth";
public static final String XML_LOCKTOKEN = "locktoken";
public static final String XML_TIMEOUT = "timeout";
public static final String XML_LOCKSCOPE = "lockscope";
public static final String XML_EXCLUSIVE = "exclusive";
public static final String XML_SHARED = "shared";
public static final String XML_LOCKENTRY = "lockentry";
public static final String XML_LOCKINFO = "lockinfo";
public static final String XML_LOCKTYPE = "locktype";
public static final String XML_WRITE = "write";
public static final String XML_OWNER = "owner";
//---< Property Names >-----------------------------------------------------
/*
* Webdav property names as defined by RFC 2518<br>
* Note: Microsoft webdav clients as well as Webdrive request additional
* property (e.g. href, name, owner, isRootLocation, isCollection) within the
* default namespace, which are are ignored by this implementation, except
* for the 'isCollection' property, needed for XP built-in clients.
*/
public static final String PROPERTY_CREATIONDATE = "creationdate";
public static final String PROPERTY_DISPLAYNAME = "displayname";
public static final String PROPERTY_GETCONTENTLANGUAGE = "getcontentlanguage";
public static final String PROPERTY_GETCONTENTLENGTH = "getcontentlength";
public static final String PROPERTY_GETCONTENTTYPE = "getcontenttype";
public static final String PROPERTY_GETETAG = "getetag";
public static final String PROPERTY_GETLASTMODIFIED = "getlastmodified";
public static final String PROPERTY_LOCKDISCOVERY = "lockdiscovery";
public static final String PROPERTY_RESOURCETYPE = "resourcetype";
public static final String PROPERTY_SOURCE = "source";
public static final String PROPERTY_SUPPORTEDLOCK = "supportedlock";
//---< PropFind Constants >-------------------------------------------------
public static final int PROPFIND_BY_PROPERTY = 0;
public static final int PROPFIND_ALL_PROP = 1;
public static final int PROPFIND_PROPERTY_NAMES = 2;
//---< Date Format Constants >----------------------------------------------
/**
* modificationDate date format per RFC 1123
*/
public static DateFormat modificationDateFormat = new HttpDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
/**
* Simple date format for the creation date ISO representation (partial).
*/
public static DateFormat creationDateFormat = new HttpDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
}
|
jcr-server/webdav/src/java/org/apache/jackrabbit/webdav/DavConstants.java
|
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.webdav;
import org.apache.jackrabbit.webdav.xml.Namespace;
import org.apache.jackrabbit.webdav.util.HttpDateFormat;
import java.text.DateFormat;
/**
* <code>DavConstants</code> provide constants for request and response
* headers, Xml elements and property names defined by
* <a href="http://www.ietf.org/rfc/rfc2518.txt">RFC 2518</a>. In addition
* common date formats (creation date and modification time) are included.
*/
public interface DavConstants {
/**
* Request and response headers and some value constants
*/
//-------------------------------------------------------------- Headers ---
public static final String HEADER_DAV = "DAV";
public static final String HEADER_DESTINATION = "Destination";
public static final String HEADER_IF = "If";
public static final String HEADER_AUTHORIZATION = "Authorization";
public static final String HEADER_CONTENT_TYPE = "Content-Type";
public static final String HEADER_CONTENT_LENGTH = "Content-Length";
public static final String HEADER_CONTENT_LANGUAGE = "Content-Language";
public static final String HEADER_ETAG = "ETag";
public static final String HEADER_LAST_MODIFIED = "Last-Modified";
//---------------------------------------------------- Lock-Token header ---
public static final String HEADER_LOCK_TOKEN = "Lock-Token";
public static final String OPAQUE_LOCK_TOKEN_PREFIX = "opaquelocktoken:";
//------------------------------------------------------- Timeout header ---
public static final String HEADER_TIMEOUT = "Timeout";
public static final String TIMEOUT_INFINITE = "Infinite";
// RFC 2518: timeout value for TimeType "Second" MUST NOT be greater than 2^32-1
public static final long INFINITE_TIMEOUT = Integer.MAX_VALUE;
public static final long UNDEFINED_TIMEOUT = Integer.MIN_VALUE;
//----------------------------------------------------- Overwrite header ---
public static final String HEADER_OVERWRITE = "Overwrite";
//--------------------------------------------------------- Depth header ---
public static final String HEADER_DEPTH = "Depth";
public static final String DEPTH_INFINITY_S = "infinity";
public static final int DEPTH_INFINITY = Integer.MAX_VALUE;
public static final int DEPTH_0 = 0;
public static final int DEPTH_1 = 1;
/**
* Default Namespace constant
*/
public static final Namespace NAMESPACE = Namespace.getNamespace("D", "DAV:");
/**
* Xml element names used for response and request body
*/
public static final String XML_ALLPROP = "allprop";
public static final String XML_COLLECTION = "collection";
public static final String XML_DST = "dst";
public static final String XML_HREF = "href";
public static final String XML_KEEPALIVE = "keepalive";
public static final String XML_LINK = "link";
public static final String XML_MULTISTATUS = "multistatus";
public static final String XML_OMIT = "omit";
public static final String XML_PROP = "prop";
public static final String XML_PROPERTYBEHAVIOR = "propertybehavior";
public static final String XML_PROPERTYUPDATE = "propertyupdate";
public static final String XML_PROPFIND = "propfind";
public static final String XML_PROPNAME = "propname";
public static final String XML_PROPSTAT = "propstat";
public static final String XML_REMOVE = "remove";
public static final String XML_RESPONSE = "response";
public static final String XML_RESPONSEDESCRIPTION = "responsedescription";
public static final String XML_SET = "set";
public static final String XML_SOURCE = "source";
public static final String XML_STATUS = "status";
/**
* XML element names related to locking
*/
public static final String XML_ACTIVELOCK = "activelock";
public static final String XML_DEPTH = "depth";
public static final String XML_LOCKTOKEN = "locktoken";
public static final String XML_TIMEOUT = "timeout";
public static final String XML_LOCKSCOPE = "lockscope";
public static final String XML_EXCLUSIVE = "exclusive";
public static final String XML_SHARED = "shared";
public static final String XML_LOCKENTRY = "lockentry";
public static final String XML_LOCKINFO = "lockinfo";
public static final String XML_LOCKTYPE = "locktype";
public static final String XML_WRITE = "write";
public static final String XML_OWNER = "owner";
/**
* Webdav property names as defined by RFC 2518<br>
* Note: Microsoft webdav clients as well as Webdrive request additional
* property (e.g. href, name, owner, isRootLocation, isCollection) within the
* default namespace, which are are ignored by this implementation, except
* for the 'isCollection' property, needed for XP built-in clients.
*/
public static final String PROPERTY_CREATIONDATE = "creationdate";
public static final String PROPERTY_DISPLAYNAME = "displayname";
public static final String PROPERTY_GETCONTENTLANGUAGE = "getcontentlanguage";
public static final String PROPERTY_GETCONTENTLENGTH = "getcontentlength";
public static final String PROPERTY_GETCONTENTTYPE = "getcontenttype";
public static final String PROPERTY_GETETAG = "getetag";
public static final String PROPERTY_GETLASTMODIFIED = "getlastmodified";
public static final String PROPERTY_LOCKDISCOVERY = "lockdiscovery";
public static final String PROPERTY_RESOURCETYPE = "resourcetype";
public static final String PROPERTY_SOURCE = "source";
public static final String PROPERTY_SUPPORTEDLOCK = "supportedlock";
//--------------------------------------------------- Propfind constants ---
public static final int PROPFIND_BY_PROPERTY = 0;
public static final int PROPFIND_ALL_PROP = 1;
public static final int PROPFIND_PROPERTY_NAMES = 2;
//--------------------------------------------------------- date formats ---
/**
* modificationDate date format per RFC 1123
*/
public static DateFormat modificationDateFormat = new HttpDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
/**
* Simple date format for the creation date ISO representation (partial).
*/
public static DateFormat creationDateFormat = new HttpDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
}
|
commented separation lines
git-svn-id: e3d4743c6b03e3f6fd6d8117e7616b4c02d9b980@385488 13f79535-47bb-0310-9956-ffa450edef68
|
jcr-server/webdav/src/java/org/apache/jackrabbit/webdav/DavConstants.java
|
commented separation lines
|
<ide><path>cr-server/webdav/src/java/org/apache/jackrabbit/webdav/DavConstants.java
<ide> public interface DavConstants {
<ide>
<ide> /**
<del> * Request and response headers and some value constants
<add> * Default Namespace constant
<ide> */
<del> //-------------------------------------------------------------- Headers ---
<add> public static final Namespace NAMESPACE = Namespace.getNamespace("D", "DAV:");
<add>
<add> //---< Headers (Names and Value Constants) >--------------------------------
<ide> public static final String HEADER_DAV = "DAV";
<ide> public static final String HEADER_DESTINATION = "Destination";
<ide> public static final String HEADER_IF = "If";
<ide> public static final String HEADER_ETAG = "ETag";
<ide> public static final String HEADER_LAST_MODIFIED = "Last-Modified";
<ide>
<del> //---------------------------------------------------- Lock-Token header ---
<add> //--------------------------------------------------< Lock-Token Header >---
<ide> public static final String HEADER_LOCK_TOKEN = "Lock-Token";
<ide> public static final String OPAQUE_LOCK_TOKEN_PREFIX = "opaquelocktoken:";
<ide>
<del> //------------------------------------------------------- Timeout header ---
<add> //-----------------------------------------------------< Timeout Header >---
<ide> public static final String HEADER_TIMEOUT = "Timeout";
<ide> public static final String TIMEOUT_INFINITE = "Infinite";
<ide> // RFC 2518: timeout value for TimeType "Second" MUST NOT be greater than 2^32-1
<ide> public static final long INFINITE_TIMEOUT = Integer.MAX_VALUE;
<ide> public static final long UNDEFINED_TIMEOUT = Integer.MIN_VALUE;
<ide>
<del> //----------------------------------------------------- Overwrite header ---
<add> //---------------------------------------------------< Overwrite Header >---
<ide> public static final String HEADER_OVERWRITE = "Overwrite";
<ide>
<del> //--------------------------------------------------------- Depth header ---
<add> //-------------------------------------------------------< Depth Header >---
<ide> public static final String HEADER_DEPTH = "Depth";
<ide> public static final String DEPTH_INFINITY_S = "infinity";
<ide> public static final int DEPTH_INFINITY = Integer.MAX_VALUE;
<ide> public static final int DEPTH_0 = 0;
<ide> public static final int DEPTH_1 = 1;
<ide>
<del> /**
<del> * Default Namespace constant
<del> */
<del> public static final Namespace NAMESPACE = Namespace.getNamespace("D", "DAV:");
<del>
<del> /**
<del> * Xml element names used for response and request body
<del> */
<add> //---< XML Element, Attribute Names >---------------------------------------
<ide> public static final String XML_ALLPROP = "allprop";
<ide> public static final String XML_COLLECTION = "collection";
<ide> public static final String XML_DST = "dst";
<ide> public static final String XML_SOURCE = "source";
<ide> public static final String XML_STATUS = "status";
<ide>
<del> /**
<del> * XML element names related to locking
<del> */
<add> //------------------------------------------------------------< locking >---
<ide> public static final String XML_ACTIVELOCK = "activelock";
<ide> public static final String XML_DEPTH = "depth";
<ide> public static final String XML_LOCKTOKEN = "locktoken";
<ide> public static final String XML_WRITE = "write";
<ide> public static final String XML_OWNER = "owner";
<ide>
<del> /**
<add> //---< Property Names >-----------------------------------------------------
<add> /*
<ide> * Webdav property names as defined by RFC 2518<br>
<ide> * Note: Microsoft webdav clients as well as Webdrive request additional
<ide> * property (e.g. href, name, owner, isRootLocation, isCollection) within the
<ide> public static final String PROPERTY_SOURCE = "source";
<ide> public static final String PROPERTY_SUPPORTEDLOCK = "supportedlock";
<ide>
<del> //--------------------------------------------------- Propfind constants ---
<add> //---< PropFind Constants >-------------------------------------------------
<ide> public static final int PROPFIND_BY_PROPERTY = 0;
<ide> public static final int PROPFIND_ALL_PROP = 1;
<ide> public static final int PROPFIND_PROPERTY_NAMES = 2;
<ide>
<del> //--------------------------------------------------------- date formats ---
<add> //---< Date Format Constants >----------------------------------------------
<ide> /**
<ide> * modificationDate date format per RFC 1123
<ide> */
|
|
Java
|
apache-2.0
|
e2aa1ffa39322839b4b0586f7318f98a38ea9d7c
| 0 |
xfournet/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,signed/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,izonder/intellij-community,fitermay/intellij-community,allotria/intellij-community,vladmm/intellij-community,retomerz/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,kool79/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,signed/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,blademainer/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,ibinti/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,ibinti/intellij-community,hurricup/intellij-community,adedayo/intellij-community,kool79/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,ibinti/intellij-community,samthor/intellij-community,holmes/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,allotria/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,hurricup/intellij-community,samthor/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,caot/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,allotria/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,slisson/intellij-community,clumsy/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,akosyakov/intellij-community,supersven/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,asedunov/intellij-community,da1z/intellij-community,da1z/intellij-community,izonder/intellij-community,adedayo/intellij-community,caot/intellij-community,allotria/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,semonte/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,fnouama/intellij-community,da1z/intellij-community,izonder/intellij-community,kool79/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,ahb0327/intellij-community,signed/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,apixandru/intellij-community,holmes/intellij-community,kdwink/intellij-community,kdwink/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,supersven/intellij-community,signed/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,adedayo/intellij-community,robovm/robovm-studio,jagguli/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,petteyg/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,suncycheng/intellij-community,izonder/intellij-community,slisson/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,amith01994/intellij-community,kdwink/intellij-community,signed/intellij-community,petteyg/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,signed/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,allotria/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,caot/intellij-community,caot/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,fnouama/intellij-community,signed/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,slisson/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,samthor/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,jagguli/intellij-community,allotria/intellij-community,blademainer/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,samthor/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,slisson/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,asedunov/intellij-community,caot/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,clumsy/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,da1z/intellij-community,slisson/intellij-community,xfournet/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,kool79/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,hurricup/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,kdwink/intellij-community,kool79/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,ibinti/intellij-community,asedunov/intellij-community,fitermay/intellij-community,samthor/intellij-community,samthor/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,FHannes/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,FHannes/intellij-community,blademainer/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,adedayo/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,petteyg/intellij-community,ibinti/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,hurricup/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,allotria/intellij-community,blademainer/intellij-community,ryano144/intellij-community,slisson/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,semonte/intellij-community,gnuhub/intellij-community,caot/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,dslomov/intellij-community,caot/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,slisson/intellij-community,asedunov/intellij-community,kdwink/intellij-community,FHannes/intellij-community,ryano144/intellij-community,supersven/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,supersven/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,supersven/intellij-community,caot/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,signed/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,holmes/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,kdwink/intellij-community,izonder/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,da1z/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,xfournet/intellij-community,ryano144/intellij-community,kdwink/intellij-community,xfournet/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,da1z/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,samthor/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,izonder/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,kool79/intellij-community,caot/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,dslomov/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,da1z/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,clumsy/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,ibinti/intellij-community,hurricup/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,semonte/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,ibinti/intellij-community,signed/intellij-community,apixandru/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,kool79/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,amith01994/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,signed/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,holmes/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,robovm/robovm-studio,apixandru/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,kool79/intellij-community,semonte/intellij-community,semonte/intellij-community,kool79/intellij-community,vladmm/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,supersven/intellij-community,blademainer/intellij-community,ibinti/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,ryano144/intellij-community,robovm/robovm-studio,FHannes/intellij-community,clumsy/intellij-community,apixandru/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,retomerz/intellij-community,dslomov/intellij-community,robovm/robovm-studio,retomerz/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,clumsy/intellij-community,clumsy/intellij-community,semonte/intellij-community,kool79/intellij-community,blademainer/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,signed/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,izonder/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,diorcety/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,holmes/intellij-community,gnuhub/intellij-community,holmes/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated on Wed Nov 07 17:26:02 MSK 2007
// DTD/Schema : plugin.dtd
package org.jetbrains.idea.devkit.dom;
import com.intellij.util.xml.Convert;
import com.intellij.util.xml.GenericAttributeValue;
import com.intellij.util.xml.GenericDomValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.idea.devkit.dom.impl.IdeaPluginConverter;
@Convert(IdeaPluginConverter.class)
public interface Dependency extends GenericDomValue<IdeaPlugin> {
@NotNull
GenericAttributeValue<Boolean> getOptional();
@NotNull
GenericAttributeValue<String> getConfigFile();
}
|
plugins/devkit/src/dom/Dependency.java
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated on Wed Nov 07 17:26:02 MSK 2007
// DTD/Schema : plugin.dtd
package org.jetbrains.idea.devkit.dom;
import com.intellij.util.xml.Convert;
import com.intellij.util.xml.GenericAttributeValue;
import com.intellij.util.xml.GenericDomValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.idea.devkit.dom.impl.IdeaPluginConverter;
@Convert(IdeaPluginConverter.class)
public interface Dependency extends GenericDomValue<IdeaPlugin> {
@NotNull
GenericAttributeValue<String> getOptional();
@NotNull
GenericAttributeValue<String> getConfigFile();
}
|
DevKit: <depends> "optional" -> boolean
|
plugins/devkit/src/dom/Dependency.java
|
DevKit: <depends> "optional" -> boolean
|
<ide><path>lugins/devkit/src/dom/Dependency.java
<ide> /*
<del> * Copyright 2000-2009 JetBrains s.r.o.
<add> * Copyright 2000-2013 JetBrains s.r.o.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> @Convert(IdeaPluginConverter.class)
<ide> public interface Dependency extends GenericDomValue<IdeaPlugin> {
<del> @NotNull
<del> GenericAttributeValue<String> getOptional();
<del> @NotNull
<del> GenericAttributeValue<String> getConfigFile();
<add> @NotNull
<add> GenericAttributeValue<Boolean> getOptional();
<add>
<add> @NotNull
<add> GenericAttributeValue<String> getConfigFile();
<ide> }
|
|
JavaScript
|
bsd-2-clause
|
867cad3ee21827b9f48878f5a72edaa6f4afb9c9
| 0 |
pingwing/giap-mapy,pingwing/giap-mapy,pingwing/giap-mapy,pingwing/giap-mapy,pingwing/giap-mapy
|
//default language code, can be overwritten with lang parameter in URL
var lang = "en"; //for available codes see array availableLanguages in file GlobalOptions.js
//Help file (must be a local file)
var helpfile = "help_en.html";
//Servername (optional) and path and name name of QGIS mapserver FCGI-file
//either with or without server-name - without servername recommended for easier porting to other servers
//do not add a ? or & after the .fcgi extension
var serverAndCGI = "/cgi-bin/qgis_mapserv.fcgi";
//Define whether you want to use the GetProjectSettings extension of QGIS Server
//for more configuration options in the project.
//Set this to false to use GetCapabilities for older QGIS Server versions (<= 1.8).
var useGetProjectSettings = true;
// show the layerOrderTab in the GUI
var showLayerOrderTab = true;
// use geodesic measures, i.e. not planar measures
// this is useful if a projection with high distortion of length/area is used, eg.g. GoogleMercator
var useGeodesicMeasurement = true;
//search box for queries while typing
//enable to use GeoNames search
var useGeoNamesSearchBox = true;
//URL for custom search scripts
var searchBoxQueryURL = null; // "/wsgi/search.wsgi?query=";
var searchBoxGetGeomURL = null; // "/wsgi/getSearchGeom.wsgi";
//use a URL shortener for your permalink function
var permaLinkURLShortener = null; // "/wsgi/createShortPermalink.wsgi";
// enable to use commercial Google and Bing layers (also add BingApiKey in WebgisInit.js)
var enableBingCommercialMaps = false;
var enableGoogleCommercialMaps = true;
var enableBGMaps = false;
if (enableBingCommercialMaps || enableGoogleCommercialMaps) {
enableBGMaps = true;
}
// do not show fields in ObjectIdentification results that have null values
var suppressEmptyValues = true;
// hide geometry in ObjectIdentification results (should be only false if there is a good reason to do so)
var suppressInfoGeometry = true;
// do show field names in click-popup during object identification
var showFieldNamesInClickPopup = true;
// max-width and max-height of the feature-info popup can be controlled in site/css/popup.css
//config for QGIS.SearchPanel
var simpleWmsSearch = {
title: "Search continent",
query: 'simpleWmsSearch',
useWmsRequest: true,
queryLayer: "Country",
formItems: [
{
xtype: 'textfield',
name: 'name',
fieldLabel: "Name",
allowBlank: false,
blankText: "Please enter a name (e.g. 'africa')",
filterOp: "="
}
],
gridColumns: [
{header: 'Name', dataIndex: 'name', menuDisabled: 'true'}
],
selectionLayer: 'Country',
selectionZoom: 0,
doZoomToExtent: true
};
var urlRewriteSearch = {
title: "Search letter",
query: 'samplesearch',
formItems: [
{
xtype: 'hidden',
name: 'query',
value: 'samplesearch'
},
{
xtype: 'textfield',
name: 'colour',
fieldLabel: "Colour",
allowBlank: false,
blankText: "Please enter a colour (e.g. 'orange')"
}
],
gridColumns: [
{header: 'PKUID', dataIndex: 'pkuid', menuDisabled: 'true'},
{header: 'Colour', dataIndex: 'colour', menuDisabled: 'true'}
],
selectionLayer: 'Hello',
selectionZoom: 1
};
//list of configs for QGIS.SearchPanel per map name
var mapSearchPanelConfigs = {
"helloworld": [simpleWmsSearch, urlRewriteSearch]
};
//templates to define tooltips for a layer, to be shown on hover identify. The layer fields must be wrapped inside <%%> special tags.
//if a layers field is found with the name "tooltip" its content will have precedence over this configuration
var tooltipTemplates = {
'Country':{
template: "Look for the country on Google Search: <a href='http://www.google.it/#output=search&q=<%name%>' target='_blank'><%name%></a>"
},
};
//define whether you want to display a map theme switcher
//note that you have to also link a gis-project-listing.js file containing a valid
//project listing structure - the root object is called 'gis_projects'
//have a look at the template file and documentation for the correct json structure
var mapThemeSwitcherActive = true;
//you can provide an alternative template for the theme-switcher - see also file ThemeSwitcher.js (ThemeSwitcher.prototype.initialize)
var themeSwitcherTemplate = null;
//first part of titlebar text
var titleBarText = "GIS-Browser - "; // will be appended with project title
// header logo image and link
var headerLogoImg = null; // path to image, set null for no logo
var headerLogoHeight = 60; // logo image height in pixels
var headerLogoLink = ""; // logo links to this URL
var headerTermsOfUseText = null; // set null for no link
var headerTermsOfUseLink = ""; // URL to terms of use
// optional project title per map name
var projectTitles = {
"helloworld": "Hello World"
};
//EPSG projection code of your QGIS project
var epsgcode = 3857;
//background transparency for the QGIS server generated layer (commercial background layers not effected)
//set to true if you want the background to be transparent, layer image will be bigger (32 vs 24bit)
var qgisLayerTransparency = true;
// OpenLayers global options
// see http://dev.openlayers.org/releases/OpenLayers-2.10/doc/apidocs/files/OpenLayers/Map-js.html
var MapOptions = {
projection: new OpenLayers.Projection("EPSG:"+epsgcode),
units: "m",
// maxScale:50,
// minScale:40000000,
// numZoomLevels:20,
fractionalZoom: enableBGMaps ? false : true,
transitionEffect:"resize",
controls: []
};
// Options for the main map layer (OpenLayers.layer)
//see http://dev.openlayers.org/releases/OpenLayers-2.12/doc/apidocs/files/OpenLayers/Layer-js.html
var LayerOptions = {
buffer:0,
singleTile:true,
ratio:1,
transitionEffect:"resize",
isBaseLayer: false,
projection:"EPSG:"+epsgcode,
yx: {"EPSG:900913": false}
// If your projection is known to have an inverse axis order in WMS 1.3 compared to WMS 1.1 enter true for yx.
// For EPSG:900913 OpenLayers should know it by default but because of a bug in OL 2.12 we enter it here.
};
//overview map settings - do not change variable names!
var OverviewMapOptions = {
projection: new OpenLayers.Projection("EPSG:"+epsgcode),
units: "m",
maxScale:50,
minScale:300000000,
transitionEffect:"resize"
};
var OverviewMapSize = new OpenLayers.Size(200,200);
var overviewLayer = new OpenLayers.Layer.WMS("Overview-Map",
"/cgi-bin/qgis_mapserv.fcgi?map=/home/web/qgis-web-client/projects/naturalearth_110million.qgs",
{layers:"Land",format:"image/png"},
{buffer:0,singleTile:true,transitionEffect:"resize"});
//print options - scales and dpi
var printCapabilities={
"scales":[
{"name":"1:100","value":"100"},
{"name":"1:200","value":"200"},
{"name":"1:250","value":"250"},
{"name":"1:500","value":"500"},
{"name":"1:1'000","value":"1000"},
{"name":"1:2'000","value":"2000"},
{"name":"1:3'000","value":"3000"},
{"name":"1:5'000","value":"5000"},
{"name":"1:7'500","value":"7500"},
{"name":"1:10'000","value":"10000"},
{"name":"1:12'000","value":"12000"},
{"name":"1:15'000","value":"15000"},
{"name":"1:20'000","value":"20000"},
{"name":"1:25'000","value":"25000"},
{"name":"1:30'000","value":"30000"},
{"name":"1:50'000","value":"50000"},
{"name":"1:75'000","value":"75000"},
{"name":"1:100'000","value":"100000"},
{"name":"1:250'000","value":"250000"},
{"name":"1:500'000","value":"500000"},
{"name":"1:750'000","value":"750000"},
{"name":"1:1'000'000","value":"1000000"},
{"name":"1:2'500'000","value":"2500000"},
{"name":"1:5'000'000","value":"5000000"},
{"name":"1:7'500'000","value":"7500000"},
{"name":"1:10'000'000","value":"10000000"},
{"name":"1:15'000'000","value":"15000000"},
{"name":"1:20'000'000","value":"20000000"},
{"name":"1:25'000'000","value":"25000000"},
{"name":"1:30'000'000","value":"30000000"},
{"name":"1:35'000'000","value":"35000000"},
{"name":"1:50'000'000","value":"50000000"},
{"name":"1:60'000'000","value":"60000000"},
{"name":"1:75'000'000","value":"75000000"},
{"name":"1:100'000'000","value":"100000000"},
{"name":"1:125'000'000","value":"125000000"},
{"name":"1:150'000'000","value":"150000000"}
],
"dpis":[
{"name":"150 dpi","value":"150"},
{"name":"300 dpi","value":"300"},
{"name":"600 dpi","value":"600"},
{"name":"1200 dpi","value":"1200"}
],
"layouts":[]
};
// <------------ No changes should be needed below here ------------------>
//new namespace for QGIS extensions
//do not modify those three lines
if (!window.QGIS) {
window.QGIS = {};
}
//styling definitions for highlightLayer
//is used for hightlighting features (GetFeatureInfo and search result visualization)
//see http://dev.openlayers.org/releases/OpenLayers-2.10/doc/apidocs/files/OpenLayers/Style-js.html
var symbolizersHighLightLayer = {
"Point": {
pointRadius: 4,
graphicName: "circle",
fillColor: "#FF8C00",
fillOpacity: 0.3,
strokeWidth: 1,
strokeColor: "#FF8C00"
},
"Line": {
strokeWidth: 3,
strokeOpacity: 1,
strokeColor: "#FF8C00",
strokeDashstyle: "dash"
},
"Polygon": {
strokeWidth: 2,
strokeColor: "#FF8C00",
fillColor: "none"
}
};
//styling for measure controls (distance and area)
var sketchSymbolizersMeasureControls = {
"Point": {
pointRadius: 4,
graphicName: "square",
fillColor: "#FFFFFF",
fillOpacity: 1,
strokeWidth: 1,
strokeOpacity: 1,
strokeColor: "#FF0000"
},
"Line": {
strokeWidth: 3,
strokeOpacity: 1,
strokeColor: "#FF0000",
strokeDashstyle: "dash"
},
"Polygon": {
strokeWidth: 2,
strokeOpacity: 1,
strokeColor: "#FF0000",
fillColor: "#FFFFFF",
fillOpacity: 0.3
}
};
|
site/js/GlobalOptions.js
|
//default language code, can be overwritten with lang parameter in URL
var lang = "en"; //for available codes see array availableLanguages in file GlobalOptions.js
//Help file (must be a local file)
var helpfile = "help_en.html";
//Servername (optional) and path and name name of QGIS mapserver FCGI-file
//either with or without server-name - without servername recommended for easier porting to other servers
//do not add a ? or & after the .fcgi extension
var serverAndCGI = "/cgi-bin/qgis_mapserv.fcgi";
//Define whether you want to use the GetProjectSettings extension of QGIS Server
//for more configuration options in the project.
//Set this to false to use GetCapabilities for older QGIS Server versions (<= 1.8).
var useGetProjectSettings = true;
// show the layerOrderTab in the GUI
var showLayerOrderTab = true;
// use geodesic measures, i.e. not planar measures
// this is useful if a projection with high distortion of length/area is used, eg.g. GoogleMercator
var useGeodesicMeasurement = true;
//search box for queries while typing
//enable to use GeoNames search
var useGeoNamesSearchBox = true;
//URL for custom search scripts
var searchBoxQueryURL = null; // "/wsgi/search.wsgi?query=";
var searchBoxGetGeomURL = null; // "/wsgi/getSearchGeom.wsgi";
// enable to use commercial Google and Bing layers (also add BingApiKey in WebgisInit.js)
var enableBingCommercialMaps = false;
var enableGoogleCommercialMaps = true;
var enableBGMaps = false;
if (enableBingCommercialMaps || enableGoogleCommercialMaps) {
enableBGMaps = true;
}
// do not show fields in ObjectIdentification results that have null values
var suppressEmptyValues = true;
// hide geometry in ObjectIdentification results (should be only false if there is a good reason to do so)
var suppressInfoGeometry = true;
// do show field names in click-popup during object identification
var showFieldNamesInClickPopup = true;
// max-width and max-height of the feature-info popup can be controlled in site/css/popup.css
//config for QGIS.SearchPanel
var simpleWmsSearch = {
title: "Search continent",
query: 'simpleWmsSearch',
useWmsRequest: true,
queryLayer: "Country",
formItems: [
{
xtype: 'textfield',
name: 'name',
fieldLabel: "Name",
allowBlank: false,
blankText: "Please enter a name (e.g. 'africa')",
filterOp: "="
}
],
gridColumns: [
{header: 'Name', dataIndex: 'name', menuDisabled: 'true'}
],
selectionLayer: 'Country',
selectionZoom: 0,
doZoomToExtent: true
};
var urlRewriteSearch = {
title: "Search letter",
query: 'samplesearch',
formItems: [
{
xtype: 'hidden',
name: 'query',
value: 'samplesearch'
},
{
xtype: 'textfield',
name: 'colour',
fieldLabel: "Colour",
allowBlank: false,
blankText: "Please enter a colour (e.g. 'orange')"
}
],
gridColumns: [
{header: 'PKUID', dataIndex: 'pkuid', menuDisabled: 'true'},
{header: 'Colour', dataIndex: 'colour', menuDisabled: 'true'}
],
selectionLayer: 'Hello',
selectionZoom: 1
};
//list of configs for QGIS.SearchPanel per map name
var mapSearchPanelConfigs = {
"helloworld": [simpleWmsSearch, urlRewriteSearch]
};
//templates to define tooltips for a layer, to be shown on hover identify. The layer fields must be wrapped inside <%%> special tags.
//if a layers field is found with the name "tooltip" its content will have precedence over this configuration
var tooltipTemplates = {
'Country':{
template: "Look for the country on Google Search: <a href='http://www.google.it/#output=search&q=<%name%>' target='_blank'><%name%></a>"
},
};
//define whether you want to display a map theme switcher
//note that you have to also link a gis-project-listing.js file containing a valid
//project listing structure - the root object is called 'gis_projects'
//have a look at the template file and documentation for the correct json structure
var mapThemeSwitcherActive = true;
//you can provide an alternative template for the theme-switcher - see also file ThemeSwitcher.js (ThemeSwitcher.prototype.initialize)
var themeSwitcherTemplate = null;
//first part of titlebar text
var titleBarText = "GIS-Browser - "; // will be appended with project title
// header logo image and link
var headerLogoImg = null; // path to image, set null for no logo
var headerLogoHeight = 60; // logo image height in pixels
var headerLogoLink = ""; // logo links to this URL
var headerTermsOfUseText = null; // set null for no link
var headerTermsOfUseLink = ""; // URL to terms of use
// optional project title per map name
var projectTitles = {
"helloworld": "Hello World"
};
//EPSG projection code of your QGIS project
var epsgcode = 3857;
//background transparency for the QGIS server generated layer (commercial background layers not effected)
//set to true if you want the background to be transparent, layer image will be bigger (32 vs 24bit)
var qgisLayerTransparency = true;
// OpenLayers global options
// see http://dev.openlayers.org/releases/OpenLayers-2.10/doc/apidocs/files/OpenLayers/Map-js.html
var MapOptions = {
projection: new OpenLayers.Projection("EPSG:"+epsgcode),
units: "m",
// maxScale:50,
// minScale:40000000,
// numZoomLevels:20,
fractionalZoom: enableBGMaps ? false : true,
transitionEffect:"resize",
controls: []
};
// Options for the main map layer (OpenLayers.layer)
//see http://dev.openlayers.org/releases/OpenLayers-2.12/doc/apidocs/files/OpenLayers/Layer-js.html
var LayerOptions = {
buffer:0,
singleTile:true,
ratio:1,
transitionEffect:"resize",
isBaseLayer: false,
projection:"EPSG:"+epsgcode,
yx: {"EPSG:900913": false}
// If your projection is known to have an inverse axis order in WMS 1.3 compared to WMS 1.1 enter true for yx.
// For EPSG:900913 OpenLayers should know it by default but because of a bug in OL 2.12 we enter it here.
};
//overview map settings - do not change variable names!
var OverviewMapOptions = {
projection: new OpenLayers.Projection("EPSG:"+epsgcode),
units: "m",
maxScale:50,
minScale:300000000,
transitionEffect:"resize"
};
var OverviewMapSize = new OpenLayers.Size(200,200);
var overviewLayer = new OpenLayers.Layer.WMS("Overview-Map",
"/cgi-bin/qgis_mapserv.fcgi?map=/home/web/qgis-web-client/projects/naturalearth_110million.qgs",
{layers:"Land",format:"image/png"},
{buffer:0,singleTile:true,transitionEffect:"resize"});
//print options - scales and dpi
var printCapabilities={
"scales":[
{"name":"1:100","value":"100"},
{"name":"1:200","value":"200"},
{"name":"1:250","value":"250"},
{"name":"1:500","value":"500"},
{"name":"1:1'000","value":"1000"},
{"name":"1:2'000","value":"2000"},
{"name":"1:3'000","value":"3000"},
{"name":"1:5'000","value":"5000"},
{"name":"1:7'500","value":"7500"},
{"name":"1:10'000","value":"10000"},
{"name":"1:12'000","value":"12000"},
{"name":"1:15'000","value":"15000"},
{"name":"1:20'000","value":"20000"},
{"name":"1:25'000","value":"25000"},
{"name":"1:30'000","value":"30000"},
{"name":"1:50'000","value":"50000"},
{"name":"1:75'000","value":"75000"},
{"name":"1:100'000","value":"100000"},
{"name":"1:250'000","value":"250000"},
{"name":"1:500'000","value":"500000"},
{"name":"1:750'000","value":"750000"},
{"name":"1:1'000'000","value":"1000000"},
{"name":"1:2'500'000","value":"2500000"},
{"name":"1:5'000'000","value":"5000000"},
{"name":"1:7'500'000","value":"7500000"},
{"name":"1:10'000'000","value":"10000000"},
{"name":"1:15'000'000","value":"15000000"},
{"name":"1:20'000'000","value":"20000000"},
{"name":"1:25'000'000","value":"25000000"},
{"name":"1:30'000'000","value":"30000000"},
{"name":"1:35'000'000","value":"35000000"},
{"name":"1:50'000'000","value":"50000000"},
{"name":"1:60'000'000","value":"60000000"},
{"name":"1:75'000'000","value":"75000000"},
{"name":"1:100'000'000","value":"100000000"},
{"name":"1:125'000'000","value":"125000000"},
{"name":"1:150'000'000","value":"150000000"}
],
"dpis":[
{"name":"150 dpi","value":"150"},
{"name":"300 dpi","value":"300"},
{"name":"600 dpi","value":"600"},
{"name":"1200 dpi","value":"1200"}
],
"layouts":[]
};
// <------------ No changes should be needed below here ------------------>
//new namespace for QGIS extensions
//do not modify those three lines
if (!window.QGIS) {
window.QGIS = {};
}
//styling definitions for highlightLayer
//is used for hightlighting features (GetFeatureInfo and search result visualization)
//see http://dev.openlayers.org/releases/OpenLayers-2.10/doc/apidocs/files/OpenLayers/Style-js.html
var symbolizersHighLightLayer = {
"Point": {
pointRadius: 4,
graphicName: "circle",
fillColor: "#FF8C00",
fillOpacity: 0.3,
strokeWidth: 1,
strokeColor: "#FF8C00"
},
"Line": {
strokeWidth: 3,
strokeOpacity: 1,
strokeColor: "#FF8C00",
strokeDashstyle: "dash"
},
"Polygon": {
strokeWidth: 2,
strokeColor: "#FF8C00",
fillColor: "none"
}
};
//styling for measure controls (distance and area)
var sketchSymbolizersMeasureControls = {
"Point": {
pointRadius: 4,
graphicName: "square",
fillColor: "#FFFFFF",
fillOpacity: 1,
strokeWidth: 1,
strokeOpacity: 1,
strokeColor: "#FF0000"
},
"Line": {
strokeWidth: 3,
strokeOpacity: 1,
strokeColor: "#FF0000",
strokeDashstyle: "dash"
},
"Polygon": {
strokeWidth: 2,
strokeOpacity: 1,
strokeColor: "#FF0000",
fillColor: "#FFFFFF",
fillOpacity: 0.3
}
};
|
Reenable shortPermalinks which disappeared
|
site/js/GlobalOptions.js
|
Reenable shortPermalinks which disappeared
|
<ide><path>ite/js/GlobalOptions.js
<ide> //URL for custom search scripts
<ide> var searchBoxQueryURL = null; // "/wsgi/search.wsgi?query=";
<ide> var searchBoxGetGeomURL = null; // "/wsgi/getSearchGeom.wsgi";
<add>
<add>//use a URL shortener for your permalink function
<add>var permaLinkURLShortener = null; // "/wsgi/createShortPermalink.wsgi";
<ide>
<ide> // enable to use commercial Google and Bing layers (also add BingApiKey in WebgisInit.js)
<ide> var enableBingCommercialMaps = false;
|
|
Java
|
apache-2.0
|
1378f7c8107a3a38558049d2beb42024dbc38b53
| 0 |
openbaton/openbaton-client,openbaton/openbaton-client,openbaton/openbaton-client
|
/*
* Copyright (c) 2016 Open Baton (http://www.openbaton.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.openbaton.sdk;
import org.apache.http.annotation.ThreadSafe;
import org.openbaton.catalogue.security.Project;
import org.openbaton.sdk.api.exception.SDKException;
import org.openbaton.sdk.api.rest.*;
/**
* This class serves as a creator of request agents for the NFVO. These agents can be obtained by
* get methods and provide methods for sending requests to the NFVO API. The agents have the same
* configuration as the NFVORequestor object from which they are obtained. In this way it is easier
* to get the appropriate agents that are needed without calling the particular constructors each
* time.
*/
@ThreadSafe
public final class NFVORequestor {
private String username;
private String password;
private String projectId;
private boolean sslEnabled;
private String nfvoIp;
private String nfvoPort;
private String version;
private ConfigurationAgent configurationAgent;
private NetworkServiceDescriptorAgent networkServiceDescriptorAgent;
private NetworkServiceRecordAgent networkServiceRecordAgent;
private VimInstanceAgent vimInstanceAgent;
private VirtualLinkAgent virtualLinkAgent;
private VirtualNetworkFunctionDescriptorAgent virtualNetworkFunctionDescriptorAgent;
private VNFFGAgent vnffgAgent;
private EventAgent eventAgent;
private VNFPackageAgent vnfPackageAgent;
private ProjectAgent projectAgent;
private UserAgent userAgent;
private KeyAgent keyAgent;
// if a new agent is added please keep in mind to update the resetAgents method
/**
* Constructor for the NFVORequestor.
*
* @param username the username used for sending requests
* @param password the password used for sending requests
* @param projectId the NFVO Project's ID that will be used in the requests to the NFVO
* @param sslEnabled true if the NFVO uses SSL
* @param nfvoIp the IP address of the NFVO to which the requests are sent
* @param nfvoPort the port on which the NFVO runs
* @param version the API version
*/
public NFVORequestor(
String username,
String password,
String projectId,
boolean sslEnabled,
String nfvoIp,
String nfvoPort,
String version) {
this.username = username;
this.password = password;
this.projectId = projectId;
this.sslEnabled = sslEnabled;
this.nfvoIp = nfvoIp;
this.nfvoPort = nfvoPort;
this.version = version;
}
/**
* Constructor for the NFVORequestor, which takes the Project's name instead of the Project ID.
* This constructor sends a request to the NFVO and checks if a Project with the given name
* exists.
*
* @param username the username used for sending requests
* @param password the password used for sending requests
* @param sslEnabled true if the NFVO uses SSL
* @param projectName the name of the NFVO Project that will be used in the requests to the NFVO
* @param nfvoIp the IP address of the NFVO to which the requests are sent
* @param nfvoPort the port on which the NFVO runs
* @param version the API version
* @throws SDKException
*/
public NFVORequestor(
String username,
String password,
boolean sslEnabled,
String projectName,
String nfvoIp,
String nfvoPort,
String version)
throws SDKException {
this.username = username;
this.password = password;
this.sslEnabled = sslEnabled;
this.nfvoIp = nfvoIp;
this.nfvoPort = nfvoPort;
this.version = version;
try {
this.projectId = getProjectIdForProjectName(projectName);
} catch (SDKException e) {
throw new SDKException(
"Could not create the NFVORequestor", e.getStackTraceElements(), e.getReason());
}
}
/**
* Returns a ConfigurationAgent with which requests regarding Configurations can be sent to the
* NFVO.
*
* @return a ConfigurationAgent
*/
public synchronized ConfigurationAgent getConfigurationAgent() {
if (this.configurationAgent == null)
this.configurationAgent =
new ConfigurationAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.configurationAgent;
}
/**
* Returns a NetworkServiceDescriptorAgent with which requests regarding NetworkServiceDescriptors
* can be sent to the NFVO.
*
* @return a NetworkServiceDescriptorAgent
*/
public synchronized NetworkServiceDescriptorAgent getNetworkServiceDescriptorAgent() {
if (this.networkServiceDescriptorAgent == null)
this.networkServiceDescriptorAgent =
new NetworkServiceDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.networkServiceDescriptorAgent;
}
/**
* Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
* VirtualNetworkFunctionDescriptors can be sent to the NFVO.
*
* @return a VirtualNetworkFunctionDescriptorAgent
*/
public synchronized VirtualNetworkFunctionDescriptorAgent
getVirtualNetworkFunctionDescriptorAgent() {
if (this.virtualNetworkFunctionDescriptorAgent == null)
this.virtualNetworkFunctionDescriptorAgent =
new VirtualNetworkFunctionDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualNetworkFunctionDescriptorAgent;
}
/**
* Returns a NetworkServiceRecordAgent with which requests regarding NetworkServiceRecords can be
* sent to the NFVO.
*
* @return a NetworkServiceRecordAgent
*/
public synchronized NetworkServiceRecordAgent getNetworkServiceRecordAgent() {
if (this.networkServiceRecordAgent == null)
this.networkServiceRecordAgent =
new NetworkServiceRecordAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.networkServiceRecordAgent;
}
/**
* Returns a VimInstanceAgent with which requests regarding VimInstances can be sent to the NFVO.
*
* @return a VimInstanceAgent
*/
public synchronized VimInstanceAgent getVimInstanceAgent() {
if (this.vimInstanceAgent == null)
this.vimInstanceAgent =
new VimInstanceAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vimInstanceAgent;
}
/**
* Returns a VirtualLinkAgent with which requests regarding VirtualLinks can be sent to the NFVO.
*
* @return a VirtualLinkAgent
*/
public synchronized VirtualLinkAgent getVirtualLinkAgent() {
if (this.virtualLinkAgent == null)
this.virtualLinkAgent =
new VirtualLinkAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualLinkAgent;
}
/**
* Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
* VirtualNetworkFunctionDescriptors can be sent to the NFVO.
*
* @return a VirtualNetworkFunctionDescriptorAgent
*/
public synchronized VirtualNetworkFunctionDescriptorAgent
getVirtualNetworkFunctionDescriptorRestAgent() {
if (this.virtualNetworkFunctionDescriptorAgent == null)
this.virtualNetworkFunctionDescriptorAgent =
new VirtualNetworkFunctionDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualNetworkFunctionDescriptorAgent;
}
/**
* Returns a VNFFGAgent with which requests regarding VNFFGAgent can be sent to the NFVO.
*
* @return a VNFFGAgent
*/
public synchronized VNFFGAgent getVNFFGAgent() {
if (this.vnffgAgent == null)
this.vnffgAgent =
new VNFFGAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vnffgAgent;
}
/**
* Returns an EventAgent with which requests regarding Events can be sent to the NFVO.
*
* @return an EventAgent
*/
public synchronized EventAgent getEventAgent() {
if (this.eventAgent == null)
this.eventAgent =
new EventAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.eventAgent;
}
/**
* Returns a VNFPackageAgent with which requests regarding VNFPackages can be sent to the NFVO.
*
* @return a VNFPackageAgent
*/
public synchronized VNFPackageAgent getVNFPackageAgent() {
if (this.vnfPackageAgent == null)
this.vnfPackageAgent =
new VNFPackageAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vnfPackageAgent;
}
/**
* Returns a ProjectAgent with which requests regarding Projects can be sent to the NFVO.
*
* @return a ProjectAgent
*/
public synchronized ProjectAgent getProjectAgent() {
if (this.projectAgent == null)
this.projectAgent =
new ProjectAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.projectAgent;
}
/**
* Returns a UserAgent with which requests regarding Users can be sent to the NFVO.
*
* @return a UserAgent
*/
public synchronized UserAgent getUserAgent() {
if (this.userAgent == null)
this.userAgent =
new UserAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.userAgent;
}
/**
* Returns a KeyAgent with which requests regarding Keys can be sent to the NFVO.
*
* @return a KeyAgent
*/
public synchronized KeyAgent getKeyAgent() {
if (this.keyAgent == null)
this.keyAgent =
new KeyAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.keyAgent;
}
/**
* Set the NFVORequestor's project id. See the {@link #switchProject(String) switchProject} method
* for a more convenient alternative.
*
* @param projectId
*/
public synchronized void setProjectId(String projectId) {
// Set the agents to null so that no outdated agent is returned
resetAgents();
this.projectId = projectId;
}
/**
* Get the NFVORequestor's project id.
*
* @return the current project id
*/
public synchronized String getProjectId() {
return this.projectId;
}
/**
* Change the project related to this NFVORequestor. This is a convenient alternative for the
* {@link #setProjectId(String) setProjectId} method. It throws an SDKException if no project
* exists with the given projectName.
*
* @param projectName the name of the project to switch to
* @throws SDKException
*/
public synchronized void switchProject(String projectName) throws SDKException {
try {
this.projectId = getProjectIdForProjectName(projectName);
// Set the agents to null so that no outdated agent is returned
resetAgents();
} catch (SDKException e) {
throw new SDKException(
"Could not switch to project " + projectName, e.getStackTraceElements(), e.getReason());
}
}
/**
* Return the project id for a given project name.
*
* @param projectName
* @return the project id for the given project name
* @throws SDKException
*/
private String getProjectIdForProjectName(String projectName) throws SDKException {
try {
for (Project project : this.getProjectAgent().findAll()) {
if (project.getName().equals(projectName)) {
return project.getId();
}
}
} catch (ClassNotFoundException e) {
throw new SDKException(e.getCause());
}
throw new SDKException(
"Did not find a Project named " + projectName,
null,
"Did not find a Project named " + projectName);
}
/** Set all the agent objects to null. */
private void resetAgents() {
this.configurationAgent = null;
this.keyAgent = null;
this.userAgent = null;
this.vnfPackageAgent = null;
this.projectAgent = null;
this.eventAgent = null;
this.vnffgAgent = null;
this.virtualNetworkFunctionDescriptorAgent = null;
this.virtualLinkAgent = null;
this.vimInstanceAgent = null;
this.networkServiceDescriptorAgent = null;
this.networkServiceRecordAgent = null;
}
}
|
sdk/src/main/java/org/openbaton/sdk/NFVORequestor.java
|
/*
* Copyright (c) 2016 Open Baton (http://www.openbaton.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.openbaton.sdk;
import org.apache.http.annotation.ThreadSafe;
import org.openbaton.catalogue.security.Project;
import org.openbaton.sdk.api.exception.SDKException;
import org.openbaton.sdk.api.rest.*;
/**
* This class serves as a creator of request agents for the NFVO. These agents can be obtained by
* get methods and provide methods for sending requests to the NFVO API. The agents have the same
* configuration as the NFVORequestor object from which they are obtained. In this way it is easier
* to get the appropriate agents that are needed without calling the particular constructors each
* time.
*/
@ThreadSafe
public final class NFVORequestor {
private String username;
private String password;
private String projectId;
private boolean sslEnabled;
private String nfvoIp;
private String nfvoPort;
private String version;
private ConfigurationAgent configurationAgent;
private NetworkServiceDescriptorAgent networkServiceDescriptorAgent;
private NetworkServiceRecordAgent networkServiceRecordAgent;
private VimInstanceAgent vimInstanceAgent;
private VirtualLinkAgent virtualLinkAgent;
private VirtualNetworkFunctionDescriptorAgent virtualNetworkFunctionDescriptorAgent;
private VNFFGAgent vnffgAgent;
private EventAgent eventAgent;
private VNFPackageAgent vnfPackageAgent;
private ProjectAgent projectAgent;
private UserAgent userAgent;
private KeyAgent keyAgent;
/**
* Constructor for the NFVORequestor.
*
* @param username the username used for sending requests
* @param password the password used for sending requests
* @param projectId the NFVO Project's ID that will be used in the requests to the NFVO
* @param sslEnabled true if the NFVO uses SSL
* @param nfvoIp the IP address of the NFVO to which the requests are sent
* @param nfvoPort the port on which the NFVO runs
* @param version the API version
*/
public NFVORequestor(
String username,
String password,
String projectId,
boolean sslEnabled,
String nfvoIp,
String nfvoPort,
String version) {
this.username = username;
this.password = password;
this.projectId = projectId;
this.sslEnabled = sslEnabled;
this.nfvoIp = nfvoIp;
this.nfvoPort = nfvoPort;
this.version = version;
}
/**
* Constructor for the NFVORequestor, which takes the Project's name instead of the Project ID.
* This constructor sends a request to the NFVO and checks if a Project with the given name
* exists.
*
* @param username the username used for sending requests
* @param password the password used for sending requests
* @param sslEnabled true if the NFVO uses SSL
* @param projectName the name of the NFVO Project that will be used in the requests to the NFVO
* @param nfvoIp the IP address of the NFVO to which the requests are sent
* @param nfvoPort the port on which the NFVO runs
* @param version the API version
* @throws SDKException
*/
public NFVORequestor(
String username,
String password,
boolean sslEnabled,
String projectName,
String nfvoIp,
String nfvoPort,
String version)
throws SDKException {
this.username = username;
this.password = password;
this.sslEnabled = sslEnabled;
this.nfvoIp = nfvoIp;
this.nfvoPort = nfvoPort;
this.version = version;
this.projectId = "";
try {
for (Project project : this.getProjectAgent().findAll()) {
if (project.getName().equals(projectName)) {
this.projectId = project.getId();
break;
}
}
} catch (ClassNotFoundException e) {
throw new SDKException(e.getCause());
}
if (this.projectId.equals(""))
throw new SDKException(
"Could not create the NFVORequestor",
null,
"Did not find a Project named " + projectName);
}
/**
* Returns a ConfigurationAgent with which requests regarding Configurations can be sent to the
* NFVO.
*
* @return a ConfigurationAgent
*/
public ConfigurationAgent getConfigurationAgent() {
if (this.configurationAgent == null)
this.configurationAgent =
new ConfigurationAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.configurationAgent;
}
/**
* Returns a NetworkServiceDescriptorAgent with which requests regarding NetworkServiceDescriptors
* can be sent to the NFVO.
*
* @return a NetworkServiceDescriptorAgent
*/
public NetworkServiceDescriptorAgent getNetworkServiceDescriptorAgent() {
if (this.networkServiceDescriptorAgent == null)
this.networkServiceDescriptorAgent =
new NetworkServiceDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.networkServiceDescriptorAgent;
}
/**
* Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
* VirtualNetworkFunctionDescriptors can be sent to the NFVO.
*
* @return a VirtualNetworkFunctionDescriptorAgent
*/
public VirtualNetworkFunctionDescriptorAgent getVirtualNetworkFunctionDescriptorAgent() {
if (this.virtualNetworkFunctionDescriptorAgent == null)
this.virtualNetworkFunctionDescriptorAgent =
new VirtualNetworkFunctionDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualNetworkFunctionDescriptorAgent;
}
/**
* Returns a NetworkServiceRecordAgent with which requests regarding NetworkServiceRecords can be
* sent to the NFVO.
*
* @return a NetworkServiceRecordAgent
*/
public NetworkServiceRecordAgent getNetworkServiceRecordAgent() {
if (this.networkServiceRecordAgent == null)
this.networkServiceRecordAgent =
new NetworkServiceRecordAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.networkServiceRecordAgent;
}
/**
* Returns a VimInstanceAgent with which requests regarding VimInstances can be sent to the NFVO.
*
* @return a VimInstanceAgent
*/
public VimInstanceAgent getVimInstanceAgent() {
if (this.vimInstanceAgent == null)
this.vimInstanceAgent =
new VimInstanceAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vimInstanceAgent;
}
/**
* Returns a VirtualLinkAgent with which requests regarding VirtualLinks can be sent to the NFVO.
*
* @return a VirtualLinkAgent
*/
public VirtualLinkAgent getVirtualLinkAgent() {
if (this.virtualLinkAgent == null)
this.virtualLinkAgent =
new VirtualLinkAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualLinkAgent;
}
/**
* Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
* VirtualNetworkFunctionDescriptors can be sent to the NFVO.
*
* @return a VirtualNetworkFunctionDescriptorAgent
*/
public VirtualNetworkFunctionDescriptorAgent getVirtualNetworkFunctionDescriptorRestAgent() {
if (this.virtualNetworkFunctionDescriptorAgent == null)
this.virtualNetworkFunctionDescriptorAgent =
new VirtualNetworkFunctionDescriptorAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.virtualNetworkFunctionDescriptorAgent;
}
/**
* Returns a VNFFGAgent with which requests regarding VNFFGAgent can be sent to the NFVO.
*
* @return a VNFFGAgent
*/
public VNFFGAgent getVNFFGAgent() {
if (this.vnffgAgent == null)
this.vnffgAgent =
new VNFFGAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vnffgAgent;
}
/**
* Returns an EventAgent with which requests regarding Events can be sent to the NFVO.
*
* @return an EventAgent
*/
public EventAgent getEventAgent() {
if (this.eventAgent == null)
this.eventAgent =
new EventAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.eventAgent;
}
/**
* Returns a VNFPackageAgent with which requests regarding VNFPackages can be sent to the NFVO.
*
* @return a VNFPackageAgent
*/
public VNFPackageAgent getVNFPackageAgent() {
if (this.vnfPackageAgent == null)
this.vnfPackageAgent =
new VNFPackageAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.vnfPackageAgent;
}
/**
* Returns a ProjectAgent with which requests regarding Projects can be sent to the NFVO.
*
* @return a ProjectAgent
*/
public ProjectAgent getProjectAgent() {
if (this.projectAgent == null)
this.projectAgent =
new ProjectAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.projectAgent;
}
/**
* Returns a UserAgent with which requests regarding Users can be sent to the NFVO.
*
* @return a UserAgent
*/
public UserAgent getUserAgent() {
if (this.userAgent == null)
this.userAgent =
new UserAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.userAgent;
}
/**
* Returns a KeyAgent with which requests regarding Keys can be sent to the NFVO.
*
* @return a KeyAgent
*/
public KeyAgent getKeyAgent() {
if (this.keyAgent == null)
this.keyAgent =
new KeyAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
return this.keyAgent;
}
}
|
Imp: Add the possibility to change the NFVORequestor's current project.
|
sdk/src/main/java/org/openbaton/sdk/NFVORequestor.java
|
Imp: Add the possibility to change the NFVORequestor's current project.
|
<ide><path>dk/src/main/java/org/openbaton/sdk/NFVORequestor.java
<ide> private ProjectAgent projectAgent;
<ide> private UserAgent userAgent;
<ide> private KeyAgent keyAgent;
<add> // if a new agent is added please keep in mind to update the resetAgents method
<ide>
<ide> /**
<ide> * Constructor for the NFVORequestor.
<ide> this.nfvoIp = nfvoIp;
<ide> this.nfvoPort = nfvoPort;
<ide> this.version = version;
<del>
<del> this.projectId = "";
<add> try {
<add> this.projectId = getProjectIdForProjectName(projectName);
<add> } catch (SDKException e) {
<add> throw new SDKException(
<add> "Could not create the NFVORequestor", e.getStackTraceElements(), e.getReason());
<add> }
<add> }
<add>
<add> /**
<add> * Returns a ConfigurationAgent with which requests regarding Configurations can be sent to the
<add> * NFVO.
<add> *
<add> * @return a ConfigurationAgent
<add> */
<add> public synchronized ConfigurationAgent getConfigurationAgent() {
<add> if (this.configurationAgent == null)
<add> this.configurationAgent =
<add> new ConfigurationAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.configurationAgent;
<add> }
<add>
<add> /**
<add> * Returns a NetworkServiceDescriptorAgent with which requests regarding NetworkServiceDescriptors
<add> * can be sent to the NFVO.
<add> *
<add> * @return a NetworkServiceDescriptorAgent
<add> */
<add> public synchronized NetworkServiceDescriptorAgent getNetworkServiceDescriptorAgent() {
<add> if (this.networkServiceDescriptorAgent == null)
<add> this.networkServiceDescriptorAgent =
<add> new NetworkServiceDescriptorAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.networkServiceDescriptorAgent;
<add> }
<add>
<add> /**
<add> * Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
<add> * VirtualNetworkFunctionDescriptors can be sent to the NFVO.
<add> *
<add> * @return a VirtualNetworkFunctionDescriptorAgent
<add> */
<add> public synchronized VirtualNetworkFunctionDescriptorAgent
<add> getVirtualNetworkFunctionDescriptorAgent() {
<add> if (this.virtualNetworkFunctionDescriptorAgent == null)
<add> this.virtualNetworkFunctionDescriptorAgent =
<add> new VirtualNetworkFunctionDescriptorAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.virtualNetworkFunctionDescriptorAgent;
<add> }
<add>
<add> /**
<add> * Returns a NetworkServiceRecordAgent with which requests regarding NetworkServiceRecords can be
<add> * sent to the NFVO.
<add> *
<add> * @return a NetworkServiceRecordAgent
<add> */
<add> public synchronized NetworkServiceRecordAgent getNetworkServiceRecordAgent() {
<add> if (this.networkServiceRecordAgent == null)
<add> this.networkServiceRecordAgent =
<add> new NetworkServiceRecordAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.networkServiceRecordAgent;
<add> }
<add>
<add> /**
<add> * Returns a VimInstanceAgent with which requests regarding VimInstances can be sent to the NFVO.
<add> *
<add> * @return a VimInstanceAgent
<add> */
<add> public synchronized VimInstanceAgent getVimInstanceAgent() {
<add> if (this.vimInstanceAgent == null)
<add> this.vimInstanceAgent =
<add> new VimInstanceAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.vimInstanceAgent;
<add> }
<add>
<add> /**
<add> * Returns a VirtualLinkAgent with which requests regarding VirtualLinks can be sent to the NFVO.
<add> *
<add> * @return a VirtualLinkAgent
<add> */
<add> public synchronized VirtualLinkAgent getVirtualLinkAgent() {
<add> if (this.virtualLinkAgent == null)
<add> this.virtualLinkAgent =
<add> new VirtualLinkAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.virtualLinkAgent;
<add> }
<add>
<add> /**
<add> * Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
<add> * VirtualNetworkFunctionDescriptors can be sent to the NFVO.
<add> *
<add> * @return a VirtualNetworkFunctionDescriptorAgent
<add> */
<add> public synchronized VirtualNetworkFunctionDescriptorAgent
<add> getVirtualNetworkFunctionDescriptorRestAgent() {
<add> if (this.virtualNetworkFunctionDescriptorAgent == null)
<add> this.virtualNetworkFunctionDescriptorAgent =
<add> new VirtualNetworkFunctionDescriptorAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.virtualNetworkFunctionDescriptorAgent;
<add> }
<add>
<add> /**
<add> * Returns a VNFFGAgent with which requests regarding VNFFGAgent can be sent to the NFVO.
<add> *
<add> * @return a VNFFGAgent
<add> */
<add> public synchronized VNFFGAgent getVNFFGAgent() {
<add> if (this.vnffgAgent == null)
<add> this.vnffgAgent =
<add> new VNFFGAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.vnffgAgent;
<add> }
<add>
<add> /**
<add> * Returns an EventAgent with which requests regarding Events can be sent to the NFVO.
<add> *
<add> * @return an EventAgent
<add> */
<add> public synchronized EventAgent getEventAgent() {
<add> if (this.eventAgent == null)
<add> this.eventAgent =
<add> new EventAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.eventAgent;
<add> }
<add>
<add> /**
<add> * Returns a VNFPackageAgent with which requests regarding VNFPackages can be sent to the NFVO.
<add> *
<add> * @return a VNFPackageAgent
<add> */
<add> public synchronized VNFPackageAgent getVNFPackageAgent() {
<add> if (this.vnfPackageAgent == null)
<add> this.vnfPackageAgent =
<add> new VNFPackageAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.vnfPackageAgent;
<add> }
<add>
<add> /**
<add> * Returns a ProjectAgent with which requests regarding Projects can be sent to the NFVO.
<add> *
<add> * @return a ProjectAgent
<add> */
<add> public synchronized ProjectAgent getProjectAgent() {
<add> if (this.projectAgent == null)
<add> this.projectAgent =
<add> new ProjectAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.projectAgent;
<add> }
<add>
<add> /**
<add> * Returns a UserAgent with which requests regarding Users can be sent to the NFVO.
<add> *
<add> * @return a UserAgent
<add> */
<add> public synchronized UserAgent getUserAgent() {
<add> if (this.userAgent == null)
<add> this.userAgent =
<add> new UserAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.userAgent;
<add> }
<add>
<add> /**
<add> * Returns a KeyAgent with which requests regarding Keys can be sent to the NFVO.
<add> *
<add> * @return a KeyAgent
<add> */
<add> public synchronized KeyAgent getKeyAgent() {
<add> if (this.keyAgent == null)
<add> this.keyAgent =
<add> new KeyAgent(
<add> this.username,
<add> this.password,
<add> this.projectId,
<add> this.sslEnabled,
<add> this.nfvoIp,
<add> this.nfvoPort,
<add> this.version);
<add> return this.keyAgent;
<add> }
<add>
<add> /**
<add> * Set the NFVORequestor's project id. See the {@link #switchProject(String) switchProject} method
<add> * for a more convenient alternative.
<add> *
<add> * @param projectId
<add> */
<add> public synchronized void setProjectId(String projectId) {
<add> // Set the agents to null so that no outdated agent is returned
<add> resetAgents();
<add> this.projectId = projectId;
<add> }
<add>
<add> /**
<add> * Get the NFVORequestor's project id.
<add> *
<add> * @return the current project id
<add> */
<add> public synchronized String getProjectId() {
<add> return this.projectId;
<add> }
<add>
<add> /**
<add> * Change the project related to this NFVORequestor. This is a convenient alternative for the
<add> * {@link #setProjectId(String) setProjectId} method. It throws an SDKException if no project
<add> * exists with the given projectName.
<add> *
<add> * @param projectName the name of the project to switch to
<add> * @throws SDKException
<add> */
<add> public synchronized void switchProject(String projectName) throws SDKException {
<add> try {
<add> this.projectId = getProjectIdForProjectName(projectName);
<add> // Set the agents to null so that no outdated agent is returned
<add> resetAgents();
<add> } catch (SDKException e) {
<add> throw new SDKException(
<add> "Could not switch to project " + projectName, e.getStackTraceElements(), e.getReason());
<add> }
<add> }
<add>
<add> /**
<add> * Return the project id for a given project name.
<add> *
<add> * @param projectName
<add> * @return the project id for the given project name
<add> * @throws SDKException
<add> */
<add> private String getProjectIdForProjectName(String projectName) throws SDKException {
<ide> try {
<ide> for (Project project : this.getProjectAgent().findAll()) {
<ide> if (project.getName().equals(projectName)) {
<del> this.projectId = project.getId();
<del> break;
<add> return project.getId();
<ide> }
<ide> }
<ide> } catch (ClassNotFoundException e) {
<ide> throw new SDKException(e.getCause());
<ide> }
<del> if (this.projectId.equals(""))
<del> throw new SDKException(
<del> "Could not create the NFVORequestor",
<del> null,
<del> "Did not find a Project named " + projectName);
<del> }
<del>
<del> /**
<del> * Returns a ConfigurationAgent with which requests regarding Configurations can be sent to the
<del> * NFVO.
<del> *
<del> * @return a ConfigurationAgent
<del> */
<del> public ConfigurationAgent getConfigurationAgent() {
<del> if (this.configurationAgent == null)
<del> this.configurationAgent =
<del> new ConfigurationAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.configurationAgent;
<del> }
<del>
<del> /**
<del> * Returns a NetworkServiceDescriptorAgent with which requests regarding NetworkServiceDescriptors
<del> * can be sent to the NFVO.
<del> *
<del> * @return a NetworkServiceDescriptorAgent
<del> */
<del> public NetworkServiceDescriptorAgent getNetworkServiceDescriptorAgent() {
<del> if (this.networkServiceDescriptorAgent == null)
<del> this.networkServiceDescriptorAgent =
<del> new NetworkServiceDescriptorAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.networkServiceDescriptorAgent;
<del> }
<del>
<del> /**
<del> * Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
<del> * VirtualNetworkFunctionDescriptors can be sent to the NFVO.
<del> *
<del> * @return a VirtualNetworkFunctionDescriptorAgent
<del> */
<del> public VirtualNetworkFunctionDescriptorAgent getVirtualNetworkFunctionDescriptorAgent() {
<del> if (this.virtualNetworkFunctionDescriptorAgent == null)
<del> this.virtualNetworkFunctionDescriptorAgent =
<del> new VirtualNetworkFunctionDescriptorAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.virtualNetworkFunctionDescriptorAgent;
<del> }
<del>
<del> /**
<del> * Returns a NetworkServiceRecordAgent with which requests regarding NetworkServiceRecords can be
<del> * sent to the NFVO.
<del> *
<del> * @return a NetworkServiceRecordAgent
<del> */
<del> public NetworkServiceRecordAgent getNetworkServiceRecordAgent() {
<del> if (this.networkServiceRecordAgent == null)
<del> this.networkServiceRecordAgent =
<del> new NetworkServiceRecordAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.networkServiceRecordAgent;
<del> }
<del>
<del> /**
<del> * Returns a VimInstanceAgent with which requests regarding VimInstances can be sent to the NFVO.
<del> *
<del> * @return a VimInstanceAgent
<del> */
<del> public VimInstanceAgent getVimInstanceAgent() {
<del> if (this.vimInstanceAgent == null)
<del> this.vimInstanceAgent =
<del> new VimInstanceAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.vimInstanceAgent;
<del> }
<del>
<del> /**
<del> * Returns a VirtualLinkAgent with which requests regarding VirtualLinks can be sent to the NFVO.
<del> *
<del> * @return a VirtualLinkAgent
<del> */
<del> public VirtualLinkAgent getVirtualLinkAgent() {
<del> if (this.virtualLinkAgent == null)
<del> this.virtualLinkAgent =
<del> new VirtualLinkAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.virtualLinkAgent;
<del> }
<del>
<del> /**
<del> * Returns a VirtualNetworkFunctionDescriptorAgent with which requests regarding
<del> * VirtualNetworkFunctionDescriptors can be sent to the NFVO.
<del> *
<del> * @return a VirtualNetworkFunctionDescriptorAgent
<del> */
<del> public VirtualNetworkFunctionDescriptorAgent getVirtualNetworkFunctionDescriptorRestAgent() {
<del> if (this.virtualNetworkFunctionDescriptorAgent == null)
<del> this.virtualNetworkFunctionDescriptorAgent =
<del> new VirtualNetworkFunctionDescriptorAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.virtualNetworkFunctionDescriptorAgent;
<del> }
<del>
<del> /**
<del> * Returns a VNFFGAgent with which requests regarding VNFFGAgent can be sent to the NFVO.
<del> *
<del> * @return a VNFFGAgent
<del> */
<del> public VNFFGAgent getVNFFGAgent() {
<del> if (this.vnffgAgent == null)
<del> this.vnffgAgent =
<del> new VNFFGAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.vnffgAgent;
<del> }
<del>
<del> /**
<del> * Returns an EventAgent with which requests regarding Events can be sent to the NFVO.
<del> *
<del> * @return an EventAgent
<del> */
<del> public EventAgent getEventAgent() {
<del> if (this.eventAgent == null)
<del> this.eventAgent =
<del> new EventAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.eventAgent;
<del> }
<del>
<del> /**
<del> * Returns a VNFPackageAgent with which requests regarding VNFPackages can be sent to the NFVO.
<del> *
<del> * @return a VNFPackageAgent
<del> */
<del> public VNFPackageAgent getVNFPackageAgent() {
<del> if (this.vnfPackageAgent == null)
<del> this.vnfPackageAgent =
<del> new VNFPackageAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.vnfPackageAgent;
<del> }
<del>
<del> /**
<del> * Returns a ProjectAgent with which requests regarding Projects can be sent to the NFVO.
<del> *
<del> * @return a ProjectAgent
<del> */
<del> public ProjectAgent getProjectAgent() {
<del> if (this.projectAgent == null)
<del> this.projectAgent =
<del> new ProjectAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.projectAgent;
<del> }
<del>
<del> /**
<del> * Returns a UserAgent with which requests regarding Users can be sent to the NFVO.
<del> *
<del> * @return a UserAgent
<del> */
<del> public UserAgent getUserAgent() {
<del> if (this.userAgent == null)
<del> this.userAgent =
<del> new UserAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.userAgent;
<del> }
<del>
<del> /**
<del> * Returns a KeyAgent with which requests regarding Keys can be sent to the NFVO.
<del> *
<del> * @return a KeyAgent
<del> */
<del> public KeyAgent getKeyAgent() {
<del> if (this.keyAgent == null)
<del> this.keyAgent =
<del> new KeyAgent(
<del> this.username,
<del> this.password,
<del> this.projectId,
<del> this.sslEnabled,
<del> this.nfvoIp,
<del> this.nfvoPort,
<del> this.version);
<del> return this.keyAgent;
<add> throw new SDKException(
<add> "Did not find a Project named " + projectName,
<add> null,
<add> "Did not find a Project named " + projectName);
<add> }
<add>
<add> /** Set all the agent objects to null. */
<add> private void resetAgents() {
<add> this.configurationAgent = null;
<add> this.keyAgent = null;
<add> this.userAgent = null;
<add> this.vnfPackageAgent = null;
<add> this.projectAgent = null;
<add> this.eventAgent = null;
<add> this.vnffgAgent = null;
<add> this.virtualNetworkFunctionDescriptorAgent = null;
<add> this.virtualLinkAgent = null;
<add> this.vimInstanceAgent = null;
<add> this.networkServiceDescriptorAgent = null;
<add> this.networkServiceRecordAgent = null;
<ide> }
<ide> }
|
|
Java
|
mit
|
88e47a1d3ce8e07b129967a5c24a3b298f567a8e
| 0 |
dmusican/Elegit,dmusican/Elegit,dmusican/Elegit
|
package main.java.elegit;
import de.jensd.fx.glyphs.GlyphsDude;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.geometry.Side;
import javafx.scene.Node;
import javafx.scene.control.*;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TextArea;
import javafx.scene.input.Clipboard;
import javafx.scene.input.ClipboardContent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.text.Text;
import main.java.elegit.exceptions.*;
import org.controlsfx.control.NotificationPane;
import org.controlsfx.control.action.Action;
import org.eclipse.jgit.api.errors.*;
import org.eclipse.jgit.errors.NoMergeBaseException;
import java.awt.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.prefs.BackingStoreException;
/**
* The controller for the entire session.
*/
public class SessionController {
public ComboBox<LocalBranchHelper> branchDropdownSelector;
public ComboBox<RepoHelper> repoDropdownSelector;
public Button loadNewRepoButton;
private SessionModel theModel;
public Node root;
public NotificationPane notificationPane;
public Button selectAllButton;
public Button deselectAllButton;
public Button switchUserButton;
public Button clearRecentReposButton;
public Button openRepoDirButton;
public Button gitStatusButton;
public Button commitButton;
public Button mergeFromFetchButton;
public Button pushButton;
public Button fetchButton;
public Button branchesButton;
public ProgressIndicator fetchProgressIndicator;
public ProgressIndicator pushProgressIndicator;
public TextArea commitMessageField;
public WorkingTreePanelView workingTreePanelView;
public CommitTreePanelView localCommitTreePanelView;
public CommitTreePanelView remoteCommitTreePanelView;
public Circle remoteCircle;
public Label commitInfoNameText;
public Label commitInfoAuthorText;
public Label commitInfoDateText;
public Button commitInfoNameCopyButton;
public Button commitInfoGoToButton;
public TextArea commitInfoMessageText;
CommitTreeModel localCommitTreeModel;
CommitTreeModel remoteCommitTreeModel;
/**
* Initializes the environment by obtaining the model
* and putting the views on display.
*
* This method is automatically called by JavaFX.
*/
public void initialize() {
this.theModel = SessionModel.getSessionModel();
this.initializeLayoutParameters();
CommitTreeController.sessionController = this;
this.workingTreePanelView.setSessionModel(this.theModel);
this.localCommitTreeModel = new LocalCommitTreeModel(this.theModel, this.localCommitTreePanelView);
this.remoteCommitTreeModel = new RemoteCommitTreeModel(this.theModel, this.remoteCommitTreePanelView);
// Add FontAwesome icons to buttons:
Text openExternallyIcon = GlyphsDude.createIcon(FontAwesomeIcon.EXTERNAL_LINK);
openExternallyIcon.setFill(javafx.scene.paint.Color.WHITE);
this.openRepoDirButton.setGraphic(openExternallyIcon);
this.openRepoDirButton.setTooltip(new Tooltip("Open repository directory"));
Text plusIcon = GlyphsDude.createIcon(FontAwesomeIcon.PLUS);
plusIcon.setFill(Color.WHITE);
this.loadNewRepoButton.setGraphic(plusIcon);
Text userIcon = GlyphsDude.createIcon(FontAwesomeIcon.USER);
userIcon.setFill(Color.WHITE);
this.switchUserButton.setGraphic(userIcon);
Text branchIcon = GlyphsDude.createIcon(FontAwesomeIcon.CODE_FORK);
branchIcon.setFill(Color.WHITE);
this.branchesButton.setGraphic(branchIcon);
Text exclamationIcon = GlyphsDude.createIcon(FontAwesomeIcon.EXCLAMATION);
exclamationIcon.setFill(Color.WHITE);
this.clearRecentReposButton.setGraphic(exclamationIcon);
Text clipboardIcon = GlyphsDude.createIcon(FontAwesomeIcon.CLIPBOARD);
clipboardIcon.setFill(Color.WHITE);
this.commitInfoNameCopyButton.setGraphic(clipboardIcon);
Text goToIcon = GlyphsDude.createIcon(FontAwesomeIcon.ARROW_CIRCLE_LEFT);
goToIcon.setFill(Color.WHITE);
this.commitInfoGoToButton.setGraphic(goToIcon);
// Set up the "+" button for loading new repos (give it a menu)
Text downloadIcon = GlyphsDude.createIcon(FontAwesomeIcon.CLOUD_DOWNLOAD);
MenuItem cloneOption = new MenuItem("Clone repository", downloadIcon);
cloneOption.setOnAction(t -> handleLoadRepoMenuItem(new ClonedRepoHelperBuilder(this.theModel)));
Text folderOpenIcon = GlyphsDude.createIcon(FontAwesomeIcon.FOLDER_OPEN);
MenuItem existingOption = new MenuItem("Load existing repository", folderOpenIcon);
existingOption.setOnAction(t -> handleLoadRepoMenuItem(new ExistingRepoHelperBuilder(this.theModel)));
ContextMenu newRepoOptionsMenu = new ContextMenu(cloneOption, existingOption);
this.loadNewRepoButton.setOnAction(e -> newRepoOptionsMenu.show(this.loadNewRepoButton, Side.BOTTOM ,0, 0));
this.loadNewRepoButton.setTooltip(new Tooltip("Load a new repository"));
// Buttons start out disabled, since no repo is loaded
this.setButtonsDisabled(true);
// Branch selector and trigger button starts invisible, since there's no repo and no branches
this.branchDropdownSelector.setVisible(false);
this.theModel.loadRecentRepoHelpersFromStoredPathStrings();
this.theModel.loadMostRecentRepoHelper();
this.initPanelViews();
this.updateUIEnabledStatus();
this.updateRecentReposDropdown();
RepositoryMonitor.beginWatchingRemote(theModel);
RepositoryMonitor.hasFoundNewRemoteChanges.addListener((observable, oldValue, newValue) -> {
if(newValue) showNewRemoteChangesNotification();
});
RepositoryMonitor.beginWatchingLocal(this);
}
/**
* Sets up the layout parameters for things that cannot be set in FXML
*/
private void initializeLayoutParameters(){
openRepoDirButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
gitStatusButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
mergeFromFetchButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
pushButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
fetchButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
branchesButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
gitStatusButton.setMaxWidth(Double.MAX_VALUE);
workingTreePanelView.setMinSize(Control.USE_PREF_SIZE, 200);
commitMessageField.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
branchDropdownSelector.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
// currentRepoLabel.setMaxWidth(CURRENT_REPO_LABEL_MAX_WIDTH);//- openRepoDirButton.getWidth());
remoteCommitTreePanelView.heightProperty().addListener((observable, oldValue, newValue) -> {
remoteCircle.setCenterY(newValue.doubleValue() / 2.0);
if(oldValue.doubleValue() == 0){
remoteCircle.setRadius(newValue.doubleValue() / 4.0);
}
});
commitInfoNameCopyButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitInfoGoToButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitInfoNameText.maxWidthProperty().bind(commitInfoMessageText.widthProperty()
.subtract(commitInfoGoToButton.widthProperty())
.subtract(commitInfoNameCopyButton.widthProperty())
.subtract(10)); // The gap between each button and this label is 5
}
/**
* Gets the local branches and populates the branch selector dropdown.
*
* @throws NoRepoLoadedException
* @throws MissingRepoException
*/
public void updateBranchDropdown() throws NoRepoLoadedException, MissingRepoException, IOException, GitAPIException {
RepoHelper currentRepoHelper = this.theModel.getCurrentRepoHelper();
if(currentRepoHelper==null) throw new NoRepoLoadedException();
if(!currentRepoHelper.exists()) throw new MissingRepoException();
List<LocalBranchHelper> branches = currentRepoHelper.callGitForLocalBranches();
currentRepoHelper.refreshCurrentBranch();
LocalBranchHelper currentBranch = currentRepoHelper.getCurrentBranch();
Platform.runLater(() -> {
this.branchDropdownSelector.setVisible(true);
this.branchDropdownSelector.getItems().setAll(branches);
if(this.branchDropdownSelector.getValue() == null || !this.branchDropdownSelector.getValue().getBranchName().equals(currentBranch.getBranchName())){
this.branchDropdownSelector.setValue(currentBranch);
}
});
}
/**
* Called when a selection is made from the 'Load New Repository' menu. Creates a new repository
* using the given builder and updates the UI
* @param builder the builder to use to create a new repository
*/
private synchronized void handleLoadRepoMenuItem(RepoHelperBuilder builder){
try{
RepoHelper repoHelper = builder.getRepoHelperFromDialogs();
BusyWindow.show();
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try {
theModel.openRepoFromHelper(repoHelper);
initPanelViews();
updateUIEnabledStatus();
} catch(BackingStoreException | ClassNotFoundException e) {
// These should only occur when the recent repo information
// fails to be loaded or stored, respectively
// Should be ok to silently fail
} catch (MissingRepoException e) {
showMissingRepoNotification();
updateRecentReposDropdown();
} catch (IOException e) {
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
} finally{
BusyWindow.hide();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Loading existing/cloning repository");
th.start();
} catch (IllegalArgumentException e) {
showInvalidRepoNotification();
e.printStackTrace();
} catch(NoOwnerInfoException e) {
showNotLoggedInNotification(() -> handleLoadRepoMenuItem(builder));
} catch(JGitInternalException e){
showNonEmptyFolderNotification();
} catch(InvalidRemoteException e){
showInvalidRemoteNotification();
} catch(TransportException e){
showNotAuthorizedNotification(() -> handleLoadRepoMenuItem(builder));
} catch (NoRepoSelectedException e) {
// The user pressed cancel on the dialog box. Do nothing!
} catch(IOException | GitAPIException e){
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
}
}
/**
* Gets the current RepoHelpers and puts them in the recent repos dropdown
* selector.
*/
@FXML
private void updateRecentReposDropdown() {
List<RepoHelper> repoHelpers = this.theModel.getAllRepoHelpers();
RepoHelper currentRepo = this.theModel.getCurrentRepoHelper();
Platform.runLater(() -> {
this.repoDropdownSelector.setItems(FXCollections.observableArrayList(repoHelpers));
this.repoDropdownSelector.setValue(currentRepo);
});
}
/**
* Loads the given repository and updates the UI accordingly.
* @param repoHelper the repository to open
*/
private synchronized void handleRecentRepoMenuItem(RepoHelper repoHelper){
BusyWindow.show();
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() throws Exception{
try {
theModel.openRepoFromHelper(repoHelper);
initPanelViews();
updateUIEnabledStatus();
} catch (IOException e) {
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
} catch(MissingRepoException e){
showMissingRepoNotification();
updateRecentReposDropdown();
} catch (BackingStoreException | ClassNotFoundException e) {
// These should only occur when the recent repo information
// fails to be loaded or stored, respectively
// Should be ok to silently fail
} finally{
BusyWindow.hide();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Open repository from recent list");
th.start();
}
/**
* A helper method that grabs the currently selected repo in the repo dropdown
* and loads it using the handleRecentRepoMenuItem(...) method.
*/
public void loadSelectedRepo() {
RepoHelper selectedRepoHelper = this.repoDropdownSelector.getValue();
this.handleRecentRepoMenuItem(selectedRepoHelper);
}
/**
* Perform the updateFileStatusInRepo() method for each file whose
* checkbox is checked. Then commit with the commit message and push.
*/
public void handleCommitButton() {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().exists()) throw new MissingRepoException();
String commitMessage = commitMessageField.getText();
if(!workingTreePanelView.isAnyFileSelected()) throw new NoFilesStagedForCommitException();
if(commitMessage.length() == 0) throw new NoCommitMessageException();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
for(RepoFile checkedFile : workingTreePanelView.getCheckedFilesInDirectory()){
checkedFile.updateFileStatusInRepo();
}
theModel.getCurrentRepoHelper().commit(commitMessage);
// Now clear the commit text and a view reload ( or `git status`) to show that something happened
commitMessageField.clear();
gitStatus();
} catch(JGitInternalException e){
showGenericErrorNotification();
e.printStackTrace();
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch (TransportException e) {
showNotAuthorizedNotification(null);
} catch (WrongRepositoryStateException e) {
System.out.println("Threw a WrongRepositoryStateException");
e.printStackTrace();
// TODO remove the above debug statements
// This should only come up when the user chooses to resolve conflicts in a file.
// Do nothing.
} catch(GitAPIException | IOException e){
// Git error, or error presenting the file chooser window
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git commit");
th.start();
} catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
} catch(MissingRepoException e){
this.showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(NoCommitMessageException e){
this.showNoCommitMessageNotification();
}catch(NoFilesStagedForCommitException e){
this.showNoFilesStagedForCommitNotification();
}
}
/**
* Merges in FETCH_HEAD (after a fetch).
*/
public void handleMergeFromFetchButton() {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().hasUnmergedCommits()) throw new NoCommitsToMergeException();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call(){
try{
if(!theModel.getCurrentRepoHelper().mergeFromFetch()){
showUnsuccessfulMergeNotification();
}
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch(TransportException e){
showNotAuthorizedNotification(null);
} catch (NoMergeBaseException | JGitInternalException e) {
// Merge conflict
System.out.println("*****");
e.printStackTrace();
// todo: figure out rare NoMergeBaseException.
// Has something to do with pushing conflicts.
// At this point in the stack, it's caught as a JGitInternalException.
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git merge FETCH_HEAD");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(NoCommitsToMergeException e){
this.showNoCommitsToMergeNotification();
}
}
/**
* Performs a `git push`
*/
public void handlePushButton() {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().hasUnpushedCommits()) throw new NoCommitsToPushException();
pushButton.setVisible(false);
pushProgressIndicator.setVisible(true);
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
RepositoryMonitor.resetFoundNewChanges(false);
theModel.getCurrentRepoHelper().pushAll();
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch(PushToAheadRemoteError e) {
showPushToAheadRemoteNotification();
} catch (TransportException e) {
if (e.getMessage().contains("git-receive-pack not found")) {
// The error has this message if there is no longer a remote to push to
showLostRemoteNotification();
} else {
showNotAuthorizedNotification(null);
}
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
pushProgressIndicator.setVisible(false);
pushButton.setVisible(true);
}
return null;
}
});
th.setDaemon(true);
th.setName("Git push");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(NoCommitsToPushException e){
this.showNoCommitsToPushNotification();
}
}
/**
* Handles a click on the "Fetch" button. Calls gitFetch()
*/
public void handleFetchButton(){
gitFetch();
}
/**
* Queries the remote for new commits, and updates the local
* remote as necessary.
* Equivalent to `git fetch`
*/
public synchronized void gitFetch(){
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
fetchButton.setVisible(false);
fetchProgressIndicator.setVisible(true);
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
RepositoryMonitor.resetFoundNewChanges(false);
if(!theModel.getCurrentRepoHelper().fetch()){
showNoCommitsFetchedNotification();
}
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch (TransportException e) {
showNotAuthorizedNotification(null);
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException e){
showGenericErrorNotification();
e.printStackTrace();
} finally{
fetchProgressIndicator.setVisible(false);
fetchButton.setVisible(true);
}
return null;
}
});
th.setDaemon(true);
th.setName("Git fetch");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
/**
* Updates the panel views when the "git status" button is clicked.
* Highlights the current HEAD.
*/
public void onGitStatusButton(){
this.gitStatus();
CommitTreeController.focusCommitInGraph(theModel.getCurrentRepoHelper().getHead());
}
/**
* Updates the trees, changed files, and branch information. Equivalent
* to 'git status'
*
* See initPanelViews for Thread information
*/
public synchronized void gitStatus(){
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call(){
ReentrantLock lock = new ReentrantLock();
Condition finishedUpdate = lock.newCondition();
Platform.runLater(() -> {
lock.lock();
try{
workingTreePanelView.drawDirectoryView();
localCommitTreeModel.update();
remoteCommitTreeModel.update();
finishedUpdate.signal();
}catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
lock.unlock();
}
});
lock.lock();
try{
finishedUpdate.await(); // updateBranchDropdown needs to be called after the trees have
updateBranchDropdown(); // been updated, but shouldn't run on the Application thread
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(NoRepoLoadedException e){
showNoRepoLoadedNotification();
setButtonsDisabled(true);
} catch(GitAPIException | IOException | InterruptedException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
lock.unlock();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git status");
th.start();
}
/**
* When the circle representing the remote repo is clicked, go to the
* corresponding remote url
* @param event the mouse event corresponding to the click
*/
public void handleRemoteCircleMouseClick(MouseEvent event){
if(event.getButton() != MouseButton.PRIMARY) return;
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().exists()) throw new MissingRepoException();
List<String> remoteURLs = this.theModel.getCurrentRepoHelper().getLinkedRemoteRepoURLs();
if(remoteURLs.size() == 0){
this.showNoRemoteNotification();
}
for (String remoteURL : remoteURLs) {
if(remoteURL.contains("@")){
remoteURL = "https://"+remoteURL.replace(":","/").split("@")[1];
}
desktop.browse(new URI(remoteURL));
}
}catch(URISyntaxException | IOException e){
this.showGenericErrorNotification();
}catch(MissingRepoException e){
this.showMissingRepoNotification();
this.setButtonsDisabled(true);
this.updateRecentReposDropdown();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
this.setButtonsDisabled(true);
}
}
}
/**
* Initializes each panel of the view
*
* TODO: change this if/when we update the JDK to 8u60 or higher
* With JDK version 8u40, creation of control items needs to take place
* in the application thread even if they are not added to the scene.
* This is fixed in JDK 8u60 and above
* https://bugs.openjdk.java.net/browse/JDK-8097541
*
* This applies to all methods used here
*/
private synchronized void initPanelViews() {
BusyWindow.show();
Platform.runLater(() -> {
try{
workingTreePanelView.drawDirectoryView();
}catch(GitAPIException e){
showGenericErrorNotification();
}
localCommitTreeModel.init();
remoteCommitTreeModel.init();
BusyWindow.hide();
});
}
/**
* A helper method for enabling/disabling buttons.
*
* @param disable a boolean for whether or not to disable the buttons.
*/
private void setButtonsDisabled(boolean disable) {
Platform.runLater(() -> {
openRepoDirButton.setDisable(disable);
gitStatusButton.setDisable(disable);
commitButton.setDisable(disable);
mergeFromFetchButton.setDisable(disable);
pushButton.setDisable(disable);
fetchButton.setDisable(disable);
selectAllButton.setDisable(disable);
deselectAllButton.setDisable(disable);
remoteCircle.setVisible(!disable);
commitMessageField.setDisable(disable);
});
}
/**
* Checks out the branch that is currently selected in the dropdown.
*/
public void loadSelectedBranch() {
LocalBranchHelper selectedBranch = this.branchDropdownSelector.getValue();
if(selectedBranch == null) return;
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
// This is an edge case for new local repos.
//
// When a repo is first initialized,the `master` branch is checked-out,
// but it is "unborn" -- it doesn't exist yet in the `refs/heads` folder
// until there are commits.
//
// (see http://stackoverflow.com/a/21255920/5054197)
//
// So, check that there are refs in the refs folder (if there aren't, do nothing):
String gitDirString = theModel.getCurrentRepo().getDirectory().toString();
Path refsHeadsFolder = Paths.get(gitDirString + "/refs/heads");
DirectoryStream<Path> pathStream = Files.newDirectoryStream(refsHeadsFolder);
Iterator<Path> pathStreamIterator = pathStream.iterator();
if (pathStreamIterator.hasNext()){ // => There ARE branch refs in the folder
selectedBranch.checkoutBranch();
CommitTreeController.focusCommitInGraph(selectedBranch.getHead());
}
}catch(CheckoutConflictException e){
showCheckoutConflictsNotification(e.getConflictingPaths());
try{
updateBranchDropdown();
}catch(NoRepoLoadedException e1){
showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(MissingRepoException e1){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
}catch(GitAPIException | IOException e1){
showGenericErrorNotification();
e1.printStackTrace();
}
}catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Branch Checkout");
th.start();
}
/**
* A helper helper method to enable or disable buttons/UI elements
* depending on whether there is a repo open for the buttons to
* interact with.
*/
private void updateUIEnabledStatus() {
try{
if(this.theModel.getCurrentRepoHelper() == null && this.theModel.getAllRepoHelpers().size() == 0) {
// (There's no repo for the buttons to interact with)
setButtonsDisabled(true);
Platform.runLater(() -> this.branchDropdownSelector.setVisible(false));
} else if (this.theModel.getCurrentRepoHelper() == null && this.theModel.getAllRepoHelpers().size() > 0) {
// (There's no repo for buttons to interact with, but there are repos in the menu bar)
setButtonsDisabled(true);
Platform.runLater(() -> this.branchDropdownSelector.setVisible(false));
}else{
setButtonsDisabled(false);
this.updateBranchDropdown();
}
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(MissingRepoException e){
this.showMissingRepoNotification();
setButtonsDisabled(true);
this.updateRecentReposDropdown();
} catch (GitAPIException | IOException e) {
this.showGenericErrorNotification();
e.printStackTrace();
}
}
/**
* Clears the history stored with the Preferences API.
*
* TODO: Come up with better solution?
*
* @throws BackingStoreException
* @throws IOException
* @throws ClassNotFoundException
*/
public void clearSavedStuff() throws BackingStoreException, IOException, ClassNotFoundException {
this.theModel.clearStoredPreferences();
this.showPrefsClearedNotification();
}
/**
* Creates a new owner and set it as the current default owner.
*/
public boolean switchUser() {
// Begin with a nullified RepoOwner:
RepoOwner newOwner = this.theModel.getDefaultOwner() == null ? new RepoOwner(null, null) : this.theModel.getDefaultOwner();
boolean switchedLogin = true;
try {
newOwner = new RepoOwner();
} catch (CancelledLoginException e) {
// User cancelled the login, so we'll leave the owner full of nullness.
switchedLogin = false;
}
RepoHelper currentRepoHelper = theModel.getCurrentRepoHelper();
if(currentRepoHelper != null){
currentRepoHelper.setOwner(newOwner);
}
this.theModel.setCurrentDefaultOwner(newOwner);
return switchedLogin;
}
/**
* Called when the switch user button is clicked. See switchUser
*/
public void handleSwitchUserButton(){
this.switchUser();
}
/**
* Opens the current repo directory (e.g. in Finder or Windows Explorer).
*/
public void openRepoDirectory(){
if (Desktop.isDesktopSupported()) {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
Desktop.getDesktop().open(this.theModel.getCurrentRepoHelper().localPath.toFile());
}catch(IOException | IllegalArgumentException e){
this.showFailedToOpenLocalNotification();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
}
/// BEGIN: ERROR NOTIFICATIONS:
private void showNotLoggedInNotification(Runnable callBack) {
Platform.runLater(() -> {
this.notificationPane.setText("You need to log in to do that.");
Action loginAction = new Action("Enter login info", e -> {
this.notificationPane.hide();
if(this.switchUser()){
if(callBack != null) callBack.run();
}
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(loginAction);
this.notificationPane.show();
});
}
private void showNoRepoLoadedNotification() {
Platform.runLater(() -> {
this.notificationPane.setText("You need to load a repository before you can perform operations on it!");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showInvalidRepoNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure the directory you selected contains an existing (non-bare) Git repository.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showMissingRepoNotification(){
Platform.runLater(()-> {
this.notificationPane.setText("That repository no longer exists.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoRemoteNotification(){
Platform.runLater(()-> {
String name = this.theModel.getCurrentRepoHelper() != null ? this.theModel.getCurrentRepoHelper().toString() : "the current repository";
this.notificationPane.setText("There is no remote repository associated with " + name);
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showFailedToOpenLocalNotification(){
Platform.runLater(()-> {
String path = this.theModel.getCurrentRepoHelper() != null ? this.theModel.getCurrentRepoHelper().getLocalPath().toString() : "the location of the local repository";
this.notificationPane.setText("Could not open directory at " + path);
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNonEmptyFolderNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure the directory you selected is completely empty. The best " +
"way to do this is to create a new folder from the directory chooser.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showInvalidRemoteNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure you entered the correct remote URL.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showGenericErrorNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Sorry, there was an error.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNotAuthorizedNotification(Runnable callback) {
Platform.runLater(() -> {
this.notificationPane.setText("The login information you gave does not allow you to modify this repository. Try switching your login and trying again.");
Action loginAction = new Action("Log in", e -> {
this.notificationPane.hide();
if(this.switchUser()){
if(callback != null) callback.run();
}
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(loginAction);
this.notificationPane.show();
});
}
private void showRepoWasNotLoadedNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("No repository was loaded.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showPrefsClearedNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Your recent repositories have been cleared. Restart the app for changes to take effect.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showCheckoutConflictsNotification(List<String> conflictingPaths) {
Platform.runLater(() -> {
String conflictList = "";
for(String pathName : conflictingPaths){
conflictList += "\n" + pathName;
}
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Conflicting files");
alert.setHeaderText("Can't checkout that branch");
alert.setContentText("You can't switch to that branch because of the following conflicting files between that branch and your current branch: "
+ conflictList);
this.notificationPane.setText("You can't switch to that branch because there would be a merge conflict. Stash your changes or resolve conflicts first.");
Action seeConflictsAction = new Action("See conflicts", e -> {
this.notificationPane.hide();
alert.showAndWait();
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(seeConflictsAction);
this.notificationPane.show();
});
}
private void showPushToAheadRemoteNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("The remote repository is ahead of the local. You need to fetch and then merge (pull) before pushing.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showLostRemoteNotification() {
Platform.runLater(() -> {
this.notificationPane.setText("The push failed because the remote repository couldn't be found.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showUnsuccessfulMergeNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("Merging failed");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNewRemoteChangesNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There are new changes in the remote repository.");
Action fetchAction = new Action("Fetch", e -> {
this.notificationPane.hide();
gitFetch();
});
Action ignoreAction = new Action("Ignore", e -> {
this.notificationPane.hide();
RepositoryMonitor.resetFoundNewChanges(true);
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(fetchAction, ignoreAction);
this.notificationPane.show();
});
}
private void showNoFilesStagedForCommitNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("You need to select which files to commit");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitMessageNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("You need to write a commit message in order to commit your changes");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsToPushNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There aren't any local commits to push");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsToMergeNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There aren't any fetched commits to merge");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsFetchedNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("No new commits were fetched");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
// END: ERROR NOTIFICATIONS ^^^
/**
* Opens up the current repo helper's Branch Manager window after
* passing in this SessionController object, so that the
* BranchManagerController can update the main window's views.
*/
public void showBranchManager() {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
this.theModel.getCurrentRepoHelper().showBranchManagerWindow();
}catch(IOException e){
this.showGenericErrorNotification();
e.printStackTrace();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
/**
* Displays information about the commit with the given id
* @param id the selected commit
*/
public void selectCommit(String id){
CommitHelper commit = this.theModel.getCurrentRepoHelper().getCommit(id);
commitInfoNameText.setText(commit.getName());
commitInfoAuthorText.setText(commit.getAuthorName());
commitInfoDateText.setText(commit.getFormattedWhen());
commitInfoMessageText.setVisible(true);
commitInfoNameCopyButton.setVisible(true);
commitInfoGoToButton.setVisible(true);
String s = "";
for(BranchHelper branch : commit.getBranchesAsHead()){
if(branch instanceof RemoteBranchHelper){
s = s + "origin/";
}
s = s + branch.getBranchName() + "\n";
}
if(s.length() > 0){
commitInfoMessageText.setText("Head of branches: \n"+s+"\n\n"+commit.getMessage(true));
}else{
commitInfoMessageText.setText(commit.getMessage(true));
}
}
/**
* Stops displaying commit information
*/
public void clearSelectedCommit(){
commitInfoNameText.setText("");
commitInfoAuthorText.setText("");
commitInfoDateText.setText("");
commitInfoMessageText.setText("");
commitInfoMessageText.setVisible(false);
commitInfoNameCopyButton.setVisible(false);
commitInfoGoToButton.setVisible(false);
}
/**
* Copies the commit hash onto the clipboard
*/
public void handleCommitNameCopyButton(){
Clipboard clipboard = Clipboard.getSystemClipboard();
ClipboardContent content = new ClipboardContent();
content.putString(commitInfoNameText.getText());
clipboard.setContent(content);
}
/**
* Jumps to the selected commit in the tree display
*/
public void handleGoToCommitButton(){
String id = commitInfoNameText.getText();
CommitTreeController.focusCommitInGraph(id);
}
/**
* Selects all files in the working tree for a commit.
*
*/
public void onSelectAllButton() {
this.workingTreePanelView.setAllFilesSelected(true);
}
/**
* Deselects all files in the working tree for a commit.
*
*/
public void onDeselectAllButton() {
this.workingTreePanelView.setAllFilesSelected(false);
}
}
|
src/main/java/elegit/SessionController.java
|
package main.java.elegit;
import de.jensd.fx.glyphs.GlyphsDude;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.scene.Node;
import javafx.scene.control.*;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TextArea;
import javafx.scene.input.Clipboard;
import javafx.scene.input.ClipboardContent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.text.Text;
import main.java.elegit.exceptions.*;
import org.controlsfx.control.NotificationPane;
import org.controlsfx.control.action.Action;
import org.eclipse.jgit.api.errors.*;
import org.eclipse.jgit.errors.NoMergeBaseException;
import java.awt.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.prefs.BackingStoreException;
/**
* The controller for the entire session.
*/
public class SessionController {
public ComboBox<LocalBranchHelper> branchDropdownSelector;
public ComboBox<RepoHelper> repoDropdownSelector;
public Button loadNewRepoButton;
private SessionModel theModel;
public Node root;
public NotificationPane notificationPane;
public Button selectAllButton;
public Button deselectAllButton;
public Button switchUserButton;
public Button clearRecentReposButton;
public Button openRepoDirButton;
public Button gitStatusButton;
public Button commitButton;
public Button mergeFromFetchButton;
public Button pushButton;
public Button fetchButton;
public Button branchesButton;
public ProgressIndicator fetchProgressIndicator;
public ProgressIndicator pushProgressIndicator;
public TextArea commitMessageField;
public WorkingTreePanelView workingTreePanelView;
public CommitTreePanelView localCommitTreePanelView;
public CommitTreePanelView remoteCommitTreePanelView;
public Circle remoteCircle;
public Label commitInfoNameText;
public Label commitInfoAuthorText;
public Label commitInfoDateText;
public Button commitInfoNameCopyButton;
public Button commitInfoGoToButton;
public TextArea commitInfoMessageText;
CommitTreeModel localCommitTreeModel;
CommitTreeModel remoteCommitTreeModel;
/**
* Initializes the environment by obtaining the model
* and putting the views on display.
*
* This method is automatically called by JavaFX.
*/
public void initialize() {
this.theModel = SessionModel.getSessionModel();
this.initializeLayoutParameters();
CommitTreeController.sessionController = this;
this.workingTreePanelView.setSessionModel(this.theModel);
this.localCommitTreeModel = new LocalCommitTreeModel(this.theModel, this.localCommitTreePanelView);
this.remoteCommitTreeModel = new RemoteCommitTreeModel(this.theModel, this.remoteCommitTreePanelView);
// Add FontAwesome icons to buttons:
Text openExternallyIcon = GlyphsDude.createIcon(FontAwesomeIcon.EXTERNAL_LINK);
openExternallyIcon.setFill(javafx.scene.paint.Color.WHITE);
this.openRepoDirButton.setGraphic(openExternallyIcon);
this.openRepoDirButton.setTooltip(new Tooltip("Open repository directory"));
Text plusIcon = GlyphsDude.createIcon(FontAwesomeIcon.PLUS);
plusIcon.setFill(Color.WHITE);
this.loadNewRepoButton.setGraphic(plusIcon);
Text userIcon = GlyphsDude.createIcon(FontAwesomeIcon.USER);
userIcon.setFill(Color.WHITE);
this.switchUserButton.setGraphic(userIcon);
Text branchIcon = GlyphsDude.createIcon(FontAwesomeIcon.CODE_FORK);
branchIcon.setFill(Color.WHITE);
this.branchesButton.setGraphic(branchIcon);
Text exclamationIcon = GlyphsDude.createIcon(FontAwesomeIcon.EXCLAMATION);
exclamationIcon.setFill(Color.WHITE);
this.clearRecentReposButton.setGraphic(exclamationIcon);
Text clipboardIcon = GlyphsDude.createIcon(FontAwesomeIcon.CLIPBOARD);
clipboardIcon.setFill(Color.WHITE);
this.commitInfoNameCopyButton.setGraphic(clipboardIcon);
Text goToIcon = GlyphsDude.createIcon(FontAwesomeIcon.ARROW_CIRCLE_LEFT);
goToIcon.setFill(Color.WHITE);
this.commitInfoGoToButton.setGraphic(goToIcon);
// Set up the "+" button for loading new repos (give it a menu)
Text downloadIcon = GlyphsDude.createIcon(FontAwesomeIcon.CLOUD_DOWNLOAD);
MenuItem cloneOption = new MenuItem("Clone repository", downloadIcon);
cloneOption.setOnAction(t -> handleLoadRepoMenuItem(new ClonedRepoHelperBuilder(this.theModel)));
Text folderOpenIcon = GlyphsDude.createIcon(FontAwesomeIcon.FOLDER_OPEN_ALT);
MenuItem existingOption = new MenuItem("Load existing repository", folderOpenIcon);
existingOption.setOnAction(t -> handleLoadRepoMenuItem(new ExistingRepoHelperBuilder(this.theModel)));
ContextMenu newRepoOptionsMenu = new ContextMenu(cloneOption, existingOption);
this.loadNewRepoButton.setContextMenu(newRepoOptionsMenu);
// Buttons start out disabled, since no repo is loaded
this.setButtonsDisabled(true);
// Branch selector and trigger button starts invisible, since there's no repo and no branches
this.branchDropdownSelector.setVisible(false);
this.theModel.loadRecentRepoHelpersFromStoredPathStrings();
this.theModel.loadMostRecentRepoHelper();
this.initPanelViews();
this.updateUIEnabledStatus();
this.updateRecentReposDropdown();
RepositoryMonitor.beginWatchingRemote(theModel);
RepositoryMonitor.hasFoundNewRemoteChanges.addListener((observable, oldValue, newValue) -> {
if(newValue) showNewRemoteChangesNotification();
});
RepositoryMonitor.beginWatchingLocal(this);
}
/**
* Sets up the layout parameters for things that cannot be set in FXML
*/
private void initializeLayoutParameters(){
openRepoDirButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
gitStatusButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
mergeFromFetchButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
pushButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
fetchButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
branchesButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
gitStatusButton.setMaxWidth(Double.MAX_VALUE);
workingTreePanelView.setMinSize(Control.USE_PREF_SIZE, 200);
commitMessageField.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
branchDropdownSelector.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
// currentRepoLabel.setMaxWidth(CURRENT_REPO_LABEL_MAX_WIDTH);//- openRepoDirButton.getWidth());
remoteCommitTreePanelView.heightProperty().addListener((observable, oldValue, newValue) -> {
remoteCircle.setCenterY(newValue.doubleValue() / 2.0);
if(oldValue.doubleValue() == 0){
remoteCircle.setRadius(newValue.doubleValue() / 4.0);
}
});
commitInfoNameCopyButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitInfoGoToButton.setMinSize(Control.USE_PREF_SIZE, Control.USE_PREF_SIZE);
commitInfoNameText.maxWidthProperty().bind(commitInfoMessageText.widthProperty()
.subtract(commitInfoGoToButton.widthProperty())
.subtract(commitInfoNameCopyButton.widthProperty())
.subtract(10)); // The gap between each button and this label is 5
}
/**
* Gets the local branches and populates the branch selector dropdown.
*
* @throws NoRepoLoadedException
* @throws MissingRepoException
*/
public void updateBranchDropdown() throws NoRepoLoadedException, MissingRepoException, IOException, GitAPIException {
RepoHelper currentRepoHelper = this.theModel.getCurrentRepoHelper();
if(currentRepoHelper==null) throw new NoRepoLoadedException();
if(!currentRepoHelper.exists()) throw new MissingRepoException();
List<LocalBranchHelper> branches = currentRepoHelper.callGitForLocalBranches();
currentRepoHelper.refreshCurrentBranch();
LocalBranchHelper currentBranch = currentRepoHelper.getCurrentBranch();
Platform.runLater(() -> {
this.branchDropdownSelector.setVisible(true);
this.branchDropdownSelector.getItems().setAll(branches);
if(this.branchDropdownSelector.getValue() == null || !this.branchDropdownSelector.getValue().getBranchName().equals(currentBranch.getBranchName())){
this.branchDropdownSelector.setValue(currentBranch);
}
});
}
/**
* Called when a selection is made from the 'Load New Repository' menu. Creates a new repository
* using the given builder and updates the UI
* @param builder the builder to use to create a new repository
*/
private synchronized void handleLoadRepoMenuItem(RepoHelperBuilder builder){
try{
RepoHelper repoHelper = builder.getRepoHelperFromDialogs();
BusyWindow.show();
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try {
theModel.openRepoFromHelper(repoHelper);
initPanelViews();
updateUIEnabledStatus();
} catch(BackingStoreException | ClassNotFoundException e) {
// These should only occur when the recent repo information
// fails to be loaded or stored, respectively
// Should be ok to silently fail
} catch (MissingRepoException e) {
showMissingRepoNotification();
updateRecentReposDropdown();
} catch (IOException e) {
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
} finally{
BusyWindow.hide();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Loading existing/cloning repository");
th.start();
} catch (IllegalArgumentException e) {
showInvalidRepoNotification();
e.printStackTrace();
} catch(NoOwnerInfoException e) {
showNotLoggedInNotification(() -> handleLoadRepoMenuItem(builder));
} catch(JGitInternalException e){
showNonEmptyFolderNotification();
} catch(InvalidRemoteException e){
showInvalidRemoteNotification();
} catch(TransportException e){
showNotAuthorizedNotification(() -> handleLoadRepoMenuItem(builder));
} catch (NoRepoSelectedException e) {
// The user pressed cancel on the dialog box. Do nothing!
} catch(IOException | GitAPIException e){
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
}
}
/**
* Gets the current RepoHelpers and puts them in the recent repos dropdown
* selector.
*/
@FXML
private void updateRecentReposDropdown() {
List<RepoHelper> repoHelpers = this.theModel.getAllRepoHelpers();
RepoHelper currentRepo = this.theModel.getCurrentRepoHelper();
Platform.runLater(() -> {
this.repoDropdownSelector.setItems(FXCollections.observableArrayList(repoHelpers));
this.repoDropdownSelector.setValue(currentRepo);
});
}
/**
* Loads the given repository and updates the UI accordingly.
* @param repoHelper the repository to open
*/
private synchronized void handleRecentRepoMenuItem(RepoHelper repoHelper){
BusyWindow.show();
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() throws Exception{
try {
theModel.openRepoFromHelper(repoHelper);
initPanelViews();
updateUIEnabledStatus();
} catch (IOException e) {
// Somehow, the repository failed to get properly loaded
// TODO: better error message?
showRepoWasNotLoadedNotification();
} catch(MissingRepoException e){
showMissingRepoNotification();
updateRecentReposDropdown();
} catch (BackingStoreException | ClassNotFoundException e) {
// These should only occur when the recent repo information
// fails to be loaded or stored, respectively
// Should be ok to silently fail
} finally{
BusyWindow.hide();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Open repository from recent list");
th.start();
}
/**
* A helper method that grabs the currently selected repo in the repo dropdown
* and loads it using the handleRecentRepoMenuItem(...) method.
*/
public void loadSelectedRepo() {
RepoHelper selectedRepoHelper = this.repoDropdownSelector.getValue();
this.handleRecentRepoMenuItem(selectedRepoHelper);
}
/**
* Perform the updateFileStatusInRepo() method for each file whose
* checkbox is checked. Then commit with the commit message and push.
*/
public void handleCommitButton() {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().exists()) throw new MissingRepoException();
String commitMessage = commitMessageField.getText();
if(!workingTreePanelView.isAnyFileSelected()) throw new NoFilesStagedForCommitException();
if(commitMessage.length() == 0) throw new NoCommitMessageException();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
for(RepoFile checkedFile : workingTreePanelView.getCheckedFilesInDirectory()){
checkedFile.updateFileStatusInRepo();
}
theModel.getCurrentRepoHelper().commit(commitMessage);
// Now clear the commit text and a view reload ( or `git status`) to show that something happened
commitMessageField.clear();
gitStatus();
} catch(JGitInternalException e){
showGenericErrorNotification();
e.printStackTrace();
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch (TransportException e) {
showNotAuthorizedNotification(null);
} catch (WrongRepositoryStateException e) {
System.out.println("Threw a WrongRepositoryStateException");
e.printStackTrace();
// TODO remove the above debug statements
// This should only come up when the user chooses to resolve conflicts in a file.
// Do nothing.
} catch(GitAPIException | IOException e){
// Git error, or error presenting the file chooser window
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git commit");
th.start();
} catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
} catch(MissingRepoException e){
this.showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(NoCommitMessageException e){
this.showNoCommitMessageNotification();
}catch(NoFilesStagedForCommitException e){
this.showNoFilesStagedForCommitNotification();
}
}
/**
* Merges in FETCH_HEAD (after a fetch).
*/
public void handleMergeFromFetchButton() {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().hasUnmergedCommits()) throw new NoCommitsToMergeException();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call(){
try{
if(!theModel.getCurrentRepoHelper().mergeFromFetch()){
showUnsuccessfulMergeNotification();
}
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch(TransportException e){
showNotAuthorizedNotification(null);
} catch (NoMergeBaseException | JGitInternalException e) {
// Merge conflict
System.out.println("*****");
e.printStackTrace();
// todo: figure out rare NoMergeBaseException.
// Has something to do with pushing conflicts.
// At this point in the stack, it's caught as a JGitInternalException.
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git merge FETCH_HEAD");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(NoCommitsToMergeException e){
this.showNoCommitsToMergeNotification();
}
}
/**
* Performs a `git push`
*/
public void handlePushButton() {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().hasUnpushedCommits()) throw new NoCommitsToPushException();
pushButton.setVisible(false);
pushProgressIndicator.setVisible(true);
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
RepositoryMonitor.resetFoundNewChanges(false);
theModel.getCurrentRepoHelper().pushAll();
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch(PushToAheadRemoteError e) {
showPushToAheadRemoteNotification();
} catch (TransportException e) {
if (e.getMessage().contains("git-receive-pack not found")) {
// The error has this message if there is no longer a remote to push to
showLostRemoteNotification();
} else {
showNotAuthorizedNotification(null);
}
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
pushProgressIndicator.setVisible(false);
pushButton.setVisible(true);
}
return null;
}
});
th.setDaemon(true);
th.setName("Git push");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(NoCommitsToPushException e){
this.showNoCommitsToPushNotification();
}
}
/**
* Handles a click on the "Fetch" button. Calls gitFetch()
*/
public void handleFetchButton(){
gitFetch();
}
/**
* Queries the remote for new commits, and updates the local
* remote as necessary.
* Equivalent to `git fetch`
*/
public synchronized void gitFetch(){
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
fetchButton.setVisible(false);
fetchProgressIndicator.setVisible(true);
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
RepositoryMonitor.resetFoundNewChanges(false);
if(!theModel.getCurrentRepoHelper().fetch()){
showNoCommitsFetchedNotification();
}
gitStatus();
} catch(InvalidRemoteException e){
showNoRemoteNotification();
} catch (TransportException e) {
showNotAuthorizedNotification(null);
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(GitAPIException e){
showGenericErrorNotification();
e.printStackTrace();
} finally{
fetchProgressIndicator.setVisible(false);
fetchButton.setVisible(true);
}
return null;
}
});
th.setDaemon(true);
th.setName("Git fetch");
th.start();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
/**
* Updates the panel views when the "git status" button is clicked.
* Highlights the current HEAD.
*/
public void onGitStatusButton(){
this.gitStatus();
CommitTreeController.focusCommitInGraph(theModel.getCurrentRepoHelper().getHead());
}
/**
* Updates the trees, changed files, and branch information. Equivalent
* to 'git status'
*
* See initPanelViews for Thread information
*/
public synchronized void gitStatus(){
RepositoryMonitor.pause();
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call(){
ReentrantLock lock = new ReentrantLock();
Condition finishedUpdate = lock.newCondition();
Platform.runLater(() -> {
lock.lock();
try{
workingTreePanelView.drawDirectoryView();
localCommitTreeModel.update();
remoteCommitTreeModel.update();
finishedUpdate.signal();
}catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
lock.unlock();
}
});
lock.lock();
try{
finishedUpdate.await(); // updateBranchDropdown needs to be called after the trees have
updateBranchDropdown(); // been updated, but shouldn't run on the Application thread
} catch(MissingRepoException e){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
} catch(NoRepoLoadedException e){
showNoRepoLoadedNotification();
setButtonsDisabled(true);
} catch(GitAPIException | IOException | InterruptedException e){
showGenericErrorNotification();
e.printStackTrace();
}finally{
lock.unlock();
RepositoryMonitor.unpause();
}
return null;
}
});
th.setDaemon(true);
th.setName("Git status");
th.start();
}
/**
* When the circle representing the remote repo is clicked, go to the
* corresponding remote url
* @param event the mouse event corresponding to the click
*/
public void handleRemoteCircleMouseClick(MouseEvent event){
if(event.getButton() != MouseButton.PRIMARY) return;
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
if(!this.theModel.getCurrentRepoHelper().exists()) throw new MissingRepoException();
List<String> remoteURLs = this.theModel.getCurrentRepoHelper().getLinkedRemoteRepoURLs();
if(remoteURLs.size() == 0){
this.showNoRemoteNotification();
}
for (String remoteURL : remoteURLs) {
if(remoteURL.contains("@")){
remoteURL = "https://"+remoteURL.replace(":","/").split("@")[1];
}
desktop.browse(new URI(remoteURL));
}
}catch(URISyntaxException | IOException e){
this.showGenericErrorNotification();
}catch(MissingRepoException e){
this.showMissingRepoNotification();
this.setButtonsDisabled(true);
this.updateRecentReposDropdown();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
this.setButtonsDisabled(true);
}
}
}
/**
* Initializes each panel of the view
*
* TODO: change this if/when we update the JDK to 8u60 or higher
* With JDK version 8u40, creation of control items needs to take place
* in the application thread even if they are not added to the scene.
* This is fixed in JDK 8u60 and above
* https://bugs.openjdk.java.net/browse/JDK-8097541
*
* This applies to all methods used here
*/
private synchronized void initPanelViews() {
BusyWindow.show();
Platform.runLater(() -> {
try{
workingTreePanelView.drawDirectoryView();
}catch(GitAPIException e){
showGenericErrorNotification();
}
localCommitTreeModel.init();
remoteCommitTreeModel.init();
BusyWindow.hide();
});
}
/**
* A helper method for enabling/disabling buttons.
*
* @param disable a boolean for whether or not to disable the buttons.
*/
private void setButtonsDisabled(boolean disable) {
Platform.runLater(() -> {
openRepoDirButton.setDisable(disable);
gitStatusButton.setDisable(disable);
commitButton.setDisable(disable);
mergeFromFetchButton.setDisable(disable);
pushButton.setDisable(disable);
fetchButton.setDisable(disable);
selectAllButton.setDisable(disable);
deselectAllButton.setDisable(disable);
remoteCircle.setVisible(!disable);
commitMessageField.setDisable(disable);
});
}
/**
* Checks out the branch that is currently selected in the dropdown.
*/
public void loadSelectedBranch() {
LocalBranchHelper selectedBranch = this.branchDropdownSelector.getValue();
if(selectedBranch == null) return;
Thread th = new Thread(new Task<Void>(){
@Override
protected Void call() {
try{
// This is an edge case for new local repos.
//
// When a repo is first initialized,the `master` branch is checked-out,
// but it is "unborn" -- it doesn't exist yet in the `refs/heads` folder
// until there are commits.
//
// (see http://stackoverflow.com/a/21255920/5054197)
//
// So, check that there are refs in the refs folder (if there aren't, do nothing):
String gitDirString = theModel.getCurrentRepo().getDirectory().toString();
Path refsHeadsFolder = Paths.get(gitDirString + "/refs/heads");
DirectoryStream<Path> pathStream = Files.newDirectoryStream(refsHeadsFolder);
Iterator<Path> pathStreamIterator = pathStream.iterator();
if (pathStreamIterator.hasNext()){ // => There ARE branch refs in the folder
selectedBranch.checkoutBranch();
CommitTreeController.focusCommitInGraph(selectedBranch.getHead());
}
}catch(CheckoutConflictException e){
showCheckoutConflictsNotification(e.getConflictingPaths());
try{
updateBranchDropdown();
}catch(NoRepoLoadedException e1){
showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(MissingRepoException e1){
showMissingRepoNotification();
setButtonsDisabled(true);
updateRecentReposDropdown();
}catch(GitAPIException | IOException e1){
showGenericErrorNotification();
e1.printStackTrace();
}
}catch(GitAPIException | IOException e){
showGenericErrorNotification();
e.printStackTrace();
}
return null;
}
});
th.setDaemon(true);
th.setName("Branch Checkout");
th.start();
}
/**
* A helper helper method to enable or disable buttons/UI elements
* depending on whether there is a repo open for the buttons to
* interact with.
*/
private void updateUIEnabledStatus() {
try{
if(this.theModel.getCurrentRepoHelper() == null && this.theModel.getAllRepoHelpers().size() == 0) {
// (There's no repo for the buttons to interact with)
setButtonsDisabled(true);
Platform.runLater(() -> this.branchDropdownSelector.setVisible(false));
} else if (this.theModel.getCurrentRepoHelper() == null && this.theModel.getAllRepoHelpers().size() > 0) {
// (There's no repo for buttons to interact with, but there are repos in the menu bar)
setButtonsDisabled(true);
Platform.runLater(() -> this.branchDropdownSelector.setVisible(false));
}else{
setButtonsDisabled(false);
this.updateBranchDropdown();
}
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}catch(MissingRepoException e){
this.showMissingRepoNotification();
setButtonsDisabled(true);
this.updateRecentReposDropdown();
} catch (GitAPIException | IOException e) {
this.showGenericErrorNotification();
e.printStackTrace();
}
}
/**
* Clears the history stored with the Preferences API.
*
* TODO: Come up with better solution?
*
* @throws BackingStoreException
* @throws IOException
* @throws ClassNotFoundException
*/
public void clearSavedStuff() throws BackingStoreException, IOException, ClassNotFoundException {
this.theModel.clearStoredPreferences();
this.showPrefsClearedNotification();
}
/**
* Creates a new owner and set it as the current default owner.
*/
public boolean switchUser() {
// Begin with a nullified RepoOwner:
RepoOwner newOwner = this.theModel.getDefaultOwner() == null ? new RepoOwner(null, null) : this.theModel.getDefaultOwner();
boolean switchedLogin = true;
try {
newOwner = new RepoOwner();
} catch (CancelledLoginException e) {
// User cancelled the login, so we'll leave the owner full of nullness.
switchedLogin = false;
}
RepoHelper currentRepoHelper = theModel.getCurrentRepoHelper();
if(currentRepoHelper != null){
currentRepoHelper.setOwner(newOwner);
}
this.theModel.setCurrentDefaultOwner(newOwner);
return switchedLogin;
}
/**
* Called when the switch user button is clicked. See switchUser
*/
public void handleSwitchUserButton(){
this.switchUser();
}
/**
* Opens the current repo directory (e.g. in Finder or Windows Explorer).
*/
public void openRepoDirectory(){
if (Desktop.isDesktopSupported()) {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
Desktop.getDesktop().open(this.theModel.getCurrentRepoHelper().localPath.toFile());
}catch(IOException | IllegalArgumentException e){
this.showFailedToOpenLocalNotification();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
}
/// BEGIN: ERROR NOTIFICATIONS:
private void showNotLoggedInNotification(Runnable callBack) {
Platform.runLater(() -> {
this.notificationPane.setText("You need to log in to do that.");
Action loginAction = new Action("Enter login info", e -> {
this.notificationPane.hide();
if(this.switchUser()){
if(callBack != null) callBack.run();
}
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(loginAction);
this.notificationPane.show();
});
}
private void showNoRepoLoadedNotification() {
Platform.runLater(() -> {
this.notificationPane.setText("You need to load a repository before you can perform operations on it!");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showInvalidRepoNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure the directory you selected contains an existing (non-bare) Git repository.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showMissingRepoNotification(){
Platform.runLater(()-> {
this.notificationPane.setText("That repository no longer exists.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoRemoteNotification(){
Platform.runLater(()-> {
String name = this.theModel.getCurrentRepoHelper() != null ? this.theModel.getCurrentRepoHelper().toString() : "the current repository";
this.notificationPane.setText("There is no remote repository associated with " + name);
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showFailedToOpenLocalNotification(){
Platform.runLater(()-> {
String path = this.theModel.getCurrentRepoHelper() != null ? this.theModel.getCurrentRepoHelper().getLocalPath().toString() : "the location of the local repository";
this.notificationPane.setText("Could not open directory at " + path);
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNonEmptyFolderNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure the directory you selected is completely empty. The best " +
"way to do this is to create a new folder from the directory chooser.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showInvalidRemoteNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Make sure you entered the correct remote URL.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showGenericErrorNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Sorry, there was an error.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNotAuthorizedNotification(Runnable callback) {
Platform.runLater(() -> {
this.notificationPane.setText("The login information you gave does not allow you to modify this repository. Try switching your login and trying again.");
Action loginAction = new Action("Log in", e -> {
this.notificationPane.hide();
if(this.switchUser()){
if(callback != null) callback.run();
}
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(loginAction);
this.notificationPane.show();
});
}
private void showRepoWasNotLoadedNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("No repository was loaded.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showPrefsClearedNotification() {
Platform.runLater(()-> {
this.notificationPane.setText("Your recent repositories have been cleared. Restart the app for changes to take effect.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showCheckoutConflictsNotification(List<String> conflictingPaths) {
Platform.runLater(() -> {
String conflictList = "";
for(String pathName : conflictingPaths){
conflictList += "\n" + pathName;
}
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Conflicting files");
alert.setHeaderText("Can't checkout that branch");
alert.setContentText("You can't switch to that branch because of the following conflicting files between that branch and your current branch: "
+ conflictList);
this.notificationPane.setText("You can't switch to that branch because there would be a merge conflict. Stash your changes or resolve conflicts first.");
Action seeConflictsAction = new Action("See conflicts", e -> {
this.notificationPane.hide();
alert.showAndWait();
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(seeConflictsAction);
this.notificationPane.show();
});
}
private void showPushToAheadRemoteNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("The remote repository is ahead of the local. You need to fetch and then merge (pull) before pushing.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showLostRemoteNotification() {
Platform.runLater(() -> {
this.notificationPane.setText("The push failed because the remote repository couldn't be found.");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showUnsuccessfulMergeNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("Merging failed");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNewRemoteChangesNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There are new changes in the remote repository.");
Action fetchAction = new Action("Fetch", e -> {
this.notificationPane.hide();
gitFetch();
});
Action ignoreAction = new Action("Ignore", e -> {
this.notificationPane.hide();
RepositoryMonitor.resetFoundNewChanges(true);
});
this.notificationPane.getActions().clear();
this.notificationPane.getActions().setAll(fetchAction, ignoreAction);
this.notificationPane.show();
});
}
private void showNoFilesStagedForCommitNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("You need to select which files to commit");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitMessageNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("You need to write a commit message in order to commit your changes");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsToPushNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There aren't any local commits to push");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsToMergeNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("There aren't any fetched commits to merge");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
private void showNoCommitsFetchedNotification(){
Platform.runLater(() -> {
this.notificationPane.setText("No new commits were fetched");
this.notificationPane.getActions().clear();
this.notificationPane.show();
});
}
// END: ERROR NOTIFICATIONS ^^^
/**
* Opens up the current repo helper's Branch Manager window after
* passing in this SessionController object, so that the
* BranchManagerController can update the main window's views.
*/
public void showBranchManager() {
try{
if(this.theModel.getCurrentRepoHelper() == null) throw new NoRepoLoadedException();
this.theModel.getCurrentRepoHelper().showBranchManagerWindow();
}catch(IOException e){
this.showGenericErrorNotification();
e.printStackTrace();
}catch(NoRepoLoadedException e){
this.showNoRepoLoadedNotification();
setButtonsDisabled(true);
}
}
/**
* Displays information about the commit with the given id
* @param id the selected commit
*/
public void selectCommit(String id){
CommitHelper commit = this.theModel.getCurrentRepoHelper().getCommit(id);
commitInfoNameText.setText(commit.getName());
commitInfoAuthorText.setText(commit.getAuthorName());
commitInfoDateText.setText(commit.getFormattedWhen());
commitInfoMessageText.setVisible(true);
commitInfoNameCopyButton.setVisible(true);
commitInfoGoToButton.setVisible(true);
String s = "";
for(BranchHelper branch : commit.getBranchesAsHead()){
if(branch instanceof RemoteBranchHelper){
s = s + "origin/";
}
s = s + branch.getBranchName() + "\n";
}
if(s.length() > 0){
commitInfoMessageText.setText("Head of branches: \n"+s+"\n\n"+commit.getMessage(true));
}else{
commitInfoMessageText.setText(commit.getMessage(true));
}
}
/**
* Stops displaying commit information
*/
public void clearSelectedCommit(){
commitInfoNameText.setText("");
commitInfoAuthorText.setText("");
commitInfoDateText.setText("");
commitInfoMessageText.setText("");
commitInfoMessageText.setVisible(false);
commitInfoNameCopyButton.setVisible(false);
commitInfoGoToButton.setVisible(false);
}
/**
* Copies the commit hash onto the clipboard
*/
public void handleCommitNameCopyButton(){
Clipboard clipboard = Clipboard.getSystemClipboard();
ClipboardContent content = new ClipboardContent();
content.putString(commitInfoNameText.getText());
clipboard.setContent(content);
}
/**
* Jumps to the selected commit in the tree display
*/
public void handleGoToCommitButton(){
String id = commitInfoNameText.getText();
CommitTreeController.focusCommitInGraph(id);
}
/**
* Selects all files in the working tree for a commit.
*
*/
public void onSelectAllButton() {
this.workingTreePanelView.setAllFilesSelected(true);
}
/**
* Deselects all files in the working tree for a commit.
*
*/
public void onDeselectAllButton() {
this.workingTreePanelView.setAllFilesSelected(false);
}
}
|
Load new repo menu now shows on left click (not right)
|
src/main/java/elegit/SessionController.java
|
Load new repo menu now shows on left click (not right)
|
<ide><path>rc/main/java/elegit/SessionController.java
<ide> import javafx.collections.FXCollections;
<ide> import javafx.concurrent.Task;
<ide> import javafx.fxml.FXML;
<add>import javafx.geometry.Side;
<ide> import javafx.scene.Node;
<ide> import javafx.scene.control.*;
<ide> import javafx.scene.control.Button;
<ide> MenuItem cloneOption = new MenuItem("Clone repository", downloadIcon);
<ide> cloneOption.setOnAction(t -> handleLoadRepoMenuItem(new ClonedRepoHelperBuilder(this.theModel)));
<ide>
<del> Text folderOpenIcon = GlyphsDude.createIcon(FontAwesomeIcon.FOLDER_OPEN_ALT);
<add> Text folderOpenIcon = GlyphsDude.createIcon(FontAwesomeIcon.FOLDER_OPEN);
<ide> MenuItem existingOption = new MenuItem("Load existing repository", folderOpenIcon);
<ide> existingOption.setOnAction(t -> handleLoadRepoMenuItem(new ExistingRepoHelperBuilder(this.theModel)));
<ide> ContextMenu newRepoOptionsMenu = new ContextMenu(cloneOption, existingOption);
<del> this.loadNewRepoButton.setContextMenu(newRepoOptionsMenu);
<add>
<add> this.loadNewRepoButton.setOnAction(e -> newRepoOptionsMenu.show(this.loadNewRepoButton, Side.BOTTOM ,0, 0));
<add> this.loadNewRepoButton.setTooltip(new Tooltip("Load a new repository"));
<ide>
<ide> // Buttons start out disabled, since no repo is loaded
<ide> this.setButtonsDisabled(true);
|
|
Java
|
mit
|
error: pathspec 'Challenge_6/SolidSurface.java' did not match any file(s) known to git
|
cca5a85b3f40b2bbbe4a7c8edcbdee3eee174dc6
| 1 |
marbros/Computer-Graphics
|
package Model;
import Math.Intersectable;
/**
* All objects implementing this interface have to return a material.
*/
public interface SolidSurface {
public Material getMaterial();
public Intersectable getIntersectable();
}
|
Challenge_6/SolidSurface.java
|
small changes are added to challenge 6
|
Challenge_6/SolidSurface.java
|
small changes are added to challenge 6
|
<ide><path>hallenge_6/SolidSurface.java
<add>package Model;
<add>import Math.Intersectable;
<add>
<add>/**
<add> * All objects implementing this interface have to return a material.
<add> */
<add>public interface SolidSurface {
<add> public Material getMaterial();
<add> public Intersectable getIntersectable();
<add>}
|
|
Java
|
apache-2.0
|
9511831f0a3f8dd30e05f093e6a5f29aa6ae5bfd
| 0 |
jcshen007/cloudstack,wido/cloudstack,wido/cloudstack,wido/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,wido/cloudstack,jcshen007/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,resmo/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack
|
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package org.apache.cloudstack.storage.datastore.util;
import com.cloud.agent.api.Answer;
import com.cloud.utils.exception.CloudRuntimeException;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.utils.security.SSLUtils;
import org.apache.cloudstack.utils.security.SecureSSLSocketFactory;
import org.apache.http.auth.InvalidCredentialsException;
import org.apache.log4j.Logger;
import javax.naming.ServiceUnavailableException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriBuilder;
import java.net.ConnectException;
import java.security.InvalidParameterException;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.HashMap;
public class ElastistorUtil {
private static final Logger s_logger = Logger.getLogger(ElastistorUtil.class);
private static ConfigurationDao configurationDao;
public static ConfigurationDao getConfigurationDao() {
return configurationDao;
}
public static void setConfigurationDao(ConfigurationDao configurationDao) {
ElastistorUtil.configurationDao = configurationDao;
}
/**
* Elastistor REST API Param Keys. These should match exactly with the
* elastistor API commands' params.
*/
public static final String REST_PARAM_COMMAND = "command";
public static final String REST_PARAM_APIKEY = "apikey";
public static final String REST_PARAM_KEYWORD = "keyword";
public static final String REST_PARAM_ID = "id";
public static final String REST_PARAM_QUOTA_SIZE = "quotasize";
public static final String REST_PARAM_READONLY = "readonly";
public static final String REST_PARAM_RESPONSE = "response";
public static final String REST_PARAM_POOLID = "poolid";
public static final String REST_PARAM_ACCOUNTID = "accountid";
public static final String REST_PARAM_GATEWAY = "router";
public static final String REST_PARAM_SUBNET = "subnet";
public static final String REST_PARAM_INTERFACE = "tntinterface";
public static final String REST_PARAM_IPADDRESS = "ipaddress";
public static final String REST_PARAM_JOBID = "jobId";
public static final String REST_PARAM_FORECEDELETE = "forcedelete";
public static final String REST_PARAM_TSM_THROUGHPUT = "totalthroughput";
public static final String REST_PARAM_NAME = "name";
public static final String REST_PARAM_NOOFCOPIES = "noofcopies";
public static final String REST_PARAM_RECORDSIZE = "recordsize";
public static final String REST_PARAM_TOTALIOPS = "totaliops";
public static final String REST_PARAM_LATENCY = "latency";
public static final String REST_PARAM_BLOCKSIZE = "blocksize";
public static final String REST_PARAM_GRACEALLOWED = "graceallowed";
public static final String REST_PARAM_IOPS = "iops";
public static final String REST_PARAM_THROUGHPUT = "throughput";
public static final String REST_PARAM_MEMLIMIT = "memlimit";
public static final String REST_PARAM_NETWORKSPEED = "networkspeed";
public static final String REST_PARAM_TSMID = "tsmid";
public static final String REST_PARAM_DATASETID = "datasetid";
public static final String REST_PARAM_QOSGROUPID = "qosgroupid";
public static final String REST_PARAM_DEDUPLICATION = "deduplication";
public static final String REST_PARAM_COMPRESSION = "compression";
public static final String REST_PARAM_SYNC = "sync";
public static final String REST_PARAM_MOUNTPOINT = "mountpoint";
public static final String REST_PARAM_CASESENSITIVITY = "casesensitivity";
public static final String REST_PARAM_UNICODE = "unicode";
public static final String REST_PARAM_PROTOCOLTYPE = "protocoltype";
public static final String REST_PARAM_AUTHNETWORK = "authnetwork";
public static final String REST_PARAM_MAPUSERSTOROOT = "mapuserstoroot";
public static final String REST_PARAM_STORAGEID = "storageid";
public static final String REST_PARAM_TPCONTROL = "tpcontrol";
public static final String REST_PARAM_IOPSCONTROL = "iopscontrol";
/**
* Constants related to elastistor which are persisted in cloudstack
* databases as keys.
*/
public static final String ES_SUBNET = "essubnet";
public static final String ES_INTERFACE = "estntinterface";
public static final String ES_GATEWAY = "esdefaultgateway";
public static final String ES_PROVIDER_NAME = "CloudByte";
public static final String ES_ACCOUNT_ID = "esAccountId";
public static final String ES_POOL_ID = "esPoolId";
public static final String ES_ACCOUNT_NAME = "esAccountName";
public static final String ES_STORAGE_IP = "esStorageIp";
public static final String ES_STORAGE_PORT = "esStoragePort";
public static final String ES_STORAGE_TYPE = "esStorageType";
public static final String ES_MANAGEMENT_IP = "esMgmtIp";
public static final String ES_MANAGEMENT_PORT = "esMgmtPort";
public static final String ES_API_KEY = "esApiKey";
public static final String ES_VOLUME_ID = "esVolumeId";
public static final String ES_VOLUME_GROUP_ID = "esVolumeGroupId";
public static final String ES_FILE_SYSTEM_ID = "esFilesystemId";
/**
* Values from configuration that are required for every invocation of
* ElastiCenter API. These might in turn be saved as DB updates along with
* above keys.
*/
public static String s_esIPVAL = "";
public static String s_esAPIKEYVAL = "";
public static String s_esACCOUNTIDVAL = "";
public static String s_esPOOLIDVAL = "";
public static String s_esSUBNETVAL = "";
public static String s_esINTERFACEVAL = "";
public static String s_esGATEWAYVAL = "";
/**
* hardcoded constants for elastistor api calls.
*/
private static final String ES_NOOFCOPIES_VAL = "1";
private static final String ES_BLOCKSIZE_VAL = "4K";
private static final String ES_LATENCY_VAL = "15";
private static final String ES_GRACEALLOWED_VAL = "false";
private static final String ES_MEMLIMIT_VAL = "0";
private static final String ES_NETWORKSPEED_VAL = "0";
private static final String ES_DEDUPLICATION_VAL = "off";
private static final String ES_COMPRESSION_VAL = "off";
private static final String ES_CASESENSITIVITY_VAL = "sensitive";
private static final String ES_READONLY_VAL = "off";
private static final String ES_UNICODE_VAL = "off";
private static final String ES_AUTHNETWORK_VAL = "all";
private static final String ES_MAPUSERSTOROOT_VAL = "yes";
private static final String ES_SYNC_VAL = "always";
private static final String ES_TPCONTROL_VAL = "false";
private static final String ES_IOPSCONTROL_VAL = "true";
/**
* Private constructor s.t. its never instantiated.
*/
private ElastistorUtil() {
}
/**
* This intializes a new jersey restclient for http call with elasticenter
*/
public static ElastiCenterClient getElastistorRestClient() {
ElastiCenterClient restclient = null;
try {
String ip = getConfigurationDao().getValue("cloudbyte.management.ip");
String apikey = getConfigurationDao().getValue("cloudbyte.management.apikey");
if (ip == null) {
throw new CloudRuntimeException("set the value of cloudbyte.management.ip in global settings");
}
if (apikey == null) {
throw new CloudRuntimeException("set the value of cloudbyte.management.apikey in global settings");
}
restclient = new ElastiCenterClient(ip, apikey);
} catch (InvalidCredentialsException e) {
throw new CloudRuntimeException("InvalidCredentialsException:" + e.getMessage(), e);
} catch (InvalidParameterException e) {
throw new CloudRuntimeException("InvalidParameterException:" + e.getMessage(), e);
} catch (SSLHandshakeException e) {
throw new CloudRuntimeException("SSLHandshakeException:" + e.getMessage(), e);
} catch (ServiceUnavailableException e) {
throw new CloudRuntimeException("ServiceUnavailableException:" + e.getMessage(), e);
}
return restclient;
}
public static void setElastistorApiKey(String value) {
s_esAPIKEYVAL = value;
}
public static void setElastistorManagementIp(String value) {
s_esIPVAL = value;
}
public static void setElastistorPoolId(String value) {
s_esPOOLIDVAL = value;
}
public static void setElastistorAccountId(String value) {
s_esACCOUNTIDVAL = value;
}
public static void setElastistorGateway(String value) {
s_esGATEWAYVAL = value;
}
public static void setElastistorInterface(String value) {
s_esINTERFACEVAL = value;
}
public static void setElastistorSubnet(String value) {
s_esSUBNETVAL = value;
}
/**
* This creates a new Account in Elasticenter for the given Domain Name.
*
* @return
*/
public static String getElastistorAccountId(String domainName) throws Throwable {
ListAccountResponse listAccountResponse = ListElastistorAccounts();
if (listAccountResponse.getAccounts().getCount() != 0) {
int i;
// check weather a account in elasticenter with given Domain name is
// already present in the list of accounts
for (i = 0; i < listAccountResponse.getAccounts().getCount(); i++) {
if (domainName.equals(listAccountResponse.getAccounts().getAccount(i).getName())) {
return listAccountResponse.getAccounts().getAccount(i).getUuid();
}
}
// if no account matches the give Domain Name , create one with the
// Domain name
CreateAccountResponse createAccountResponse = createElastistorAccount(domainName);
return createAccountResponse.getAccount().getUuid();
} else {
// if no account is present in the elasticenter create one
CreateAccountResponse createAccountResponse = createElastistorAccount(domainName);
return createAccountResponse.getAccount().getUuid();
}
}
/**
* This creates a new tenant storage machine(TSM) for the given storagepool
* ip in elastistor.
*
* @param domainName
* TODO
*/
public static Tsm createElastistorTsm(String storagePoolName, String storageIp, Long capacityBytes, Long capacityIops, String domainName) throws Throwable {
String totalthroughput = String.valueOf(capacityIops * 4);
String totaliops = String.valueOf(capacityIops);
String quotasize = convertCapacityBytes(capacityBytes);
CreateTsmCmd createTsmCmd = new CreateTsmCmd();
if (null != ElastistorUtil.s_esACCOUNTIDVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID, domainName);
if (null != totalthroughput)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSM_THROUGHPUT, totalthroughput);
if (null != ElastistorUtil.s_esPOOLIDVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_POOLID, ElastistorUtil.s_esPOOLIDVAL);
if (null != storagePoolName)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, storagePoolName);
if (null != quotasize)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QUOTA_SIZE, quotasize);
if (null != storageIp)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IPADDRESS, storageIp);
if (null != ElastistorUtil.s_esSUBNETVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_SUBNET, ElastistorUtil.s_esSUBNETVAL);
if (null != ElastistorUtil.s_esGATEWAYVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GATEWAY, ElastistorUtil.s_esGATEWAYVAL);
if (null != ElastistorUtil.s_esINTERFACEVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_INTERFACE, ElastistorUtil.s_esINTERFACEVAL);
if (null != ElastistorUtil.ES_NOOFCOPIES_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NOOFCOPIES, ElastistorUtil.ES_NOOFCOPIES_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != totaliops)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TOTALIOPS, totaliops);
if (null != ElastistorUtil.ES_LATENCY_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_LATENCY, ElastistorUtil.ES_LATENCY_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != ElastistorUtil.ES_GRACEALLOWED_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GRACEALLOWED, ElastistorUtil.ES_GRACEALLOWED_VAL);
CreateTsmCmdResponse createTsmCmdResponse;
Tsm tsm = null;
try {
createTsmCmdResponse = (CreateTsmCmdResponse) getElastistorRestClient().executeCommand(createTsmCmd);
if (createTsmCmdResponse.getJobid() == null) {
throw new CloudRuntimeException("tsm creation failed , contact elatistor admin");
} else {
tsm = queryAsyncTsmJobResult(createTsmCmdResponse.getJobid());
if (tsm == null) {
throw new CloudRuntimeException("tsm queryAsync failed , contact elatistor admin");
}
}
return tsm;
} catch (Exception e) {
throw new CloudRuntimeException("tsm creation failed , contact elatistor admin" + e.toString());
}
}
/**
* This creates the specified volume on the created tsm.
*/
public static FileSystem createElastistorVolume(String volumeName, String tsmid, Long capacityBytes, Long capacityIops, String protocoltype, String mountpoint) throws Throwable {
String datasetid;
String qosgroupid;
String VolumeName = volumeName;
String totaliops = String.valueOf(capacityIops);
//String totalthroughput = String.valueOf(capacityIops * 4);
String totalthroughput = "0";
String quotasize = convertCapacityBytes(capacityBytes);
AddQosGroupCmd addQosGroupCmd = new AddQosGroupCmd();
ListTsmsResponse listTsmsResponse = listTsm(tsmid);
tsmid = listTsmsResponse.getTsms().getTsm(0).getUuid();
datasetid = listTsmsResponse.getTsms().getTsm(0).getDatasetid();
if (null != VolumeName)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, "QOS_" + VolumeName);
if (null != totaliops)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IOPS, totaliops);
if (null != ElastistorUtil.ES_LATENCY_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_LATENCY, ElastistorUtil.ES_LATENCY_VAL);
if (null != totalthroughput)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_THROUGHPUT, totalthroughput);
if (null != ElastistorUtil.ES_MEMLIMIT_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MEMLIMIT, ElastistorUtil.ES_MEMLIMIT_VAL);
if (null != ElastistorUtil.ES_NETWORKSPEED_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NETWORKSPEED, ElastistorUtil.ES_NETWORKSPEED_VAL);
if (null != tsmid)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSMID, tsmid);
if (null != datasetid)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DATASETID, datasetid);
if (null != ElastistorUtil.ES_GRACEALLOWED_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GRACEALLOWED, ElastistorUtil.ES_GRACEALLOWED_VAL);
if (null != ElastistorUtil.ES_IOPSCONTROL_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IOPSCONTROL, ElastistorUtil.ES_IOPSCONTROL_VAL);
if (null != ElastistorUtil.ES_TPCONTROL_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TPCONTROL, ElastistorUtil.ES_TPCONTROL_VAL);
AddQosGroupCmdResponse addQosGroupCmdResponse = (AddQosGroupCmdResponse) getElastistorRestClient().executeCommand(addQosGroupCmd);
if (addQosGroupCmdResponse.getQoSGroup().getUuid() == null) {
throw new CloudRuntimeException("adding qos group failed , contact elatistor admin");
}
else {
CreateVolumeCmd createVolumeCmd = new CreateVolumeCmd();
qosgroupid = addQosGroupCmdResponse.getQoSGroup().getUuid();
// if (null !=
// ElastistorUtil.s_esACCOUNTIDVAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID,ElastistorUtil.s_esACCOUNTIDVAL);
if (null != qosgroupid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QOSGROUPID, qosgroupid);
if (null != tsmid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSMID, tsmid);
// if (null !=
// ElastistorUtil.s_esPOOLIDVAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_POOLID,ElastistorUtil.s_esPOOLIDVAL);
if (null != VolumeName)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, VolumeName);
if (null != quotasize)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QUOTA_SIZE, quotasize);
if (protocoltype.equalsIgnoreCase("nfs")) {
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
} else {
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, "512B");
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, "512B");
}
if (null != ElastistorUtil.ES_DEDUPLICATION_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DEDUPLICATION, ElastistorUtil.ES_DEDUPLICATION_VAL);
if (null != ElastistorUtil.ES_SYNC_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_SYNC, ElastistorUtil.ES_SYNC_VAL);
if (null != ElastistorUtil.ES_COMPRESSION_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_COMPRESSION, ElastistorUtil.ES_COMPRESSION_VAL);
// if (null !=
// ElastistorUtil.ES_NOOFCOPIES_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NOOFCOPIES,
// ElastistorUtil.ES_NOOFCOPIES_VAL);
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MOUNTPOINT, mountpoint);
// if (null !=
// ElastistorUtil.ES_CASESENSITIVITY_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_CASESENSITIVITY,
// ElastistorUtil.ES_CASESENSITIVITY_VAL);
// if (null !=
// ElastistorUtil.ES_READONLY_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_READONLY,
// ElastistorUtil.ES_READONLY_VAL);
if (null != datasetid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DATASETID, datasetid);
// if (null !=
// ElastistorUtil.ES_UNICODE_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_UNICODE,
// ElastistorUtil.ES_UNICODE_VAL);
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_PROTOCOLTYPE, protocoltype);
// if (null !=
// ElastistorUtil.ES_AUTHNETWORK_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_AUTHNETWORK,
// ElastistorUtil.ES_AUTHNETWORK_VAL);
// if (null !=
// ElastistorUtil.ES_MAPUSERSTOROOT_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MAPUSERSTOROOT,
// ElastistorUtil.ES_MAPUSERSTOROOT_VAL);
CreateVolumeCmdResponse createVolumeCmdResponse;
FileSystem volume = null;
FileSystem fileSystem = null;
try {
createVolumeCmdResponse = (CreateVolumeCmdResponse) getElastistorRestClient().executeCommand(createVolumeCmd);
if (createVolumeCmdResponse.getJobid() == null) {
throw new CloudRuntimeException("creating volume failed , contact elatistor admin");
} else {
volume = queryAsyncVolumeJobResult(createVolumeCmdResponse.getJobid());
if (volume == null) {
throw new CloudRuntimeException("tsm queryAsync failed , contact elatistor admin");
} else {
if (protocoltype.equalsIgnoreCase("nfs")) {
fileSystem = updateNfsService(volume.getUuid());
} else {
fileSystem = updateIscsiService(volume.getUuid());
}
}
}
return fileSystem;
} catch (Exception e) {
throw new CloudRuntimeException("creating volume failed , contact elatistor admin", e);
}
}
}
public static FileSystem updateNfsService(String volumeid) throws Throwable {
FileSystem fileSystem = null;
String datasetid = updateElastistorNfsVolume(volumeid);
if (datasetid == null) {
throw new CloudRuntimeException("Updating Nfs Volume Failed");
} else {
fileSystem = listVolume(datasetid);
if (fileSystem == null) {
throw new CloudRuntimeException("Volume Creation failed : List Filesystem failed");
}
}
return fileSystem;
}
public static FileSystem updateIscsiService(String volumeid) throws Throwable {
Volumeiscsioptions volumeiscsioptions = null;
FileSystem fileSystem = null;
String accountId;
fileSystem = listVolume(volumeid);
accountId = fileSystem.getAccountid();
volumeiscsioptions = updateElastistorIscsiVolume(volumeid, accountId);
if (volumeiscsioptions == null) {
throw new CloudRuntimeException("Updating Iscsi Volume Failed");
} else {
fileSystem = listVolume(volumeiscsioptions.getVolumeid());
if (fileSystem == null) {
throw new CloudRuntimeException("Volume Creation failed : List Filesystem failed");
}
}
return fileSystem;
}
public static String updateElastistorNfsVolume(String volumeid) throws Throwable {
NfsServiceCmd nfsServiceCmd = new NfsServiceCmd();
nfsServiceCmd.putCommandParameter("datasetid", volumeid);
nfsServiceCmd.putCommandParameter("authnetwork", "all");
nfsServiceCmd.putCommandParameter("managedstate", "true");
nfsServiceCmd.putCommandParameter("alldirs", "yes");
nfsServiceCmd.putCommandParameter("mapuserstoroot", "yes");
nfsServiceCmd.putCommandParameter("readonly", "no");
NfsServiceResponse nfsServiceResponse = (NfsServiceResponse) getElastistorRestClient().executeCommand(nfsServiceCmd);
if (nfsServiceResponse.getNfsService().getUuid() != null) {
UpdateControllerCmd controllerCmd = new UpdateControllerCmd();
controllerCmd.putCommandParameter("nfsid", nfsServiceResponse.getNfsService().getUuid());
controllerCmd.putCommandParameter("type", "configurenfs");
controllerCmd.putCommandParameter("id", nfsServiceResponse.getNfsService().getControllerid());
UpdateControllerResponse controllerResponse = (UpdateControllerResponse) getElastistorRestClient().executeCommand(controllerCmd);
if (controllerResponse.getController().getUuid() != null) {
s_logger.info("updated nfs service to ALL");
return nfsServiceResponse.getNfsService().getDatasetid();
} else {
throw new CloudRuntimeException("Updating Nfs Volume Failed");
}
}
return null;
}
public static Volumeiscsioptions updateElastistorIscsiVolume(String volumeid, String accountid) throws Throwable {
// now listing the iscsi volume service group to get iscsi id
ListVolumeiSCSIServiceCmd listVolumeiSCSIServiceCmd = new ListVolumeiSCSIServiceCmd();
if (null != volumeid)
listVolumeiSCSIServiceCmd.putCommandParameter(ElastistorUtil.REST_PARAM_STORAGEID, volumeid);
ListVolumeiSCSIServiceResponse volumeiSCSIServiceResponse = (ListVolumeiSCSIServiceResponse) getElastistorRestClient().executeCommand(listVolumeiSCSIServiceCmd);
String iscsiId = volumeiSCSIServiceResponse.getIscsiVolume().getIscsiVolume(0).getUuid();
String AG_Id = volumeiSCSIServiceResponse.getIscsiVolume().getIscsiVolume(0).getAg_id();
// now listing the initiator group to get initiator id
ListiSCSIInitiatorCmd initiatorCmd = new ListiSCSIInitiatorCmd();
if (null != volumeid)
initiatorCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID, accountid);
ListiSCSIInitiatorResponse initiatorResponse = (ListiSCSIInitiatorResponse) getElastistorRestClient().executeCommand(initiatorCmd);
String IG_Id;
if (initiatorResponse.getIInitiator().getInterface(0).getInitiatorgroup().equalsIgnoreCase("ALL")) {
IG_Id = initiatorResponse.getIInitiator().getInterface(0).getUuid();
} else {
IG_Id = initiatorResponse.getIInitiator().getInterface(1).getUuid();
}
if (iscsiId != null) {
UpdateVolumeiSCSIServiceCmd updateVolumeiSCSIServiceCmd = new UpdateVolumeiSCSIServiceCmd();
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, iscsiId);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("status", "true");
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("igid", IG_Id);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("authgroupid", AG_Id);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("initialdigest", "Auto");
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("queuedepth", "32");
UpdateVolumeiSCSIServiceCmdResponse cmdResponse = (UpdateVolumeiSCSIServiceCmdResponse) getElastistorRestClient().executeCommand(updateVolumeiSCSIServiceCmd);
if (cmdResponse.getVolumeiscsioptions().getVolumeid() == null) {
throw new CloudRuntimeException("Updating Iscsi Volume Failed");
}
return cmdResponse.getVolumeiscsioptions();
}
return null;
}
/**
* This deletes both the volume and the tsm in elastistor.
*/
public static boolean deleteElastistorTsm(String tsmid, boolean managed) throws Throwable {
if (!managed) {
s_logger.info("elastistor pool is NOT a managed storage , hence deleting the volume then tsm");
String esvolumeid = null;
ListTsmsResponse listTsmsResponse = listTsm(tsmid);
if (listTsmsResponse.getTsmsCount() != 0) {
if (listTsmsResponse.getTsms().getTsm(0).checkvolume()) {
esvolumeid = listTsmsResponse.getTsms().getTsm(0).getVolumeProperties(0).getid();
DeleteVolumeResponse deleteVolumeResponse = deleteVolume(esvolumeid, null);
if (deleteVolumeResponse != null) {
String jobid = deleteVolumeResponse.getJobId();
int jobstatus = queryAsyncJobResult(jobid);
if (jobstatus == 1) {
s_logger.info("elastistor volume successfully deleted");
} else {
s_logger.info("now farce deleting the volume");
while (jobstatus != 1) {
DeleteVolumeResponse deleteVolumeResponse1 = deleteVolume(esvolumeid, "true");
if (deleteVolumeResponse1 != null) {
String jobid1 = deleteVolumeResponse1.getJobId();
jobstatus = queryAsyncJobResult(jobid1);
}
}
s_logger.info("elastistor volume successfully deleted");
}
}
} else {
s_logger.info("no volume present in on the given tsm");
}
}
}
s_logger.info("now trying to delete elastistor tsm");
if (tsmid != null) {
DeleteTsmCmd deleteTsmCmd = new DeleteTsmCmd();
deleteTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, tsmid);
DeleteTsmResponse deleteTsmResponse = (DeleteTsmResponse) getElastistorRestClient().executeCommand(deleteTsmCmd);
if (deleteTsmResponse != null) {
String jobstatus = deleteTsmResponse.getJobStatus();
if (jobstatus.equalsIgnoreCase("true")) {
s_logger.info("deletion of elastistor tsm successful");
return true;
} else {
s_logger.info("failed to delete elastistor tsm");
return false;
}
} else {
s_logger.info("elastistor tsm id not present");
}
}
s_logger.info("tsm id is null");
return false;
/*
* else { s_logger.error("no volume is present in the tsm"); } } else {
* s_logger.error(
* "List tsm failed, no tsm present in the eastistor for the given IP "
* ); return false; } return false;
*/
}
public static boolean deleteElastistorVolume(String esvolumeid) throws Throwable {
FileSystem fileSystem = listVolume(esvolumeid);
if (fileSystem != null) {
DeleteVolumeResponse deleteVolumeResponse = deleteVolume(esvolumeid, null);
if (deleteVolumeResponse != null) {
String jobid = deleteVolumeResponse.getJobId();
int jobstatus = queryAsyncJobResult(jobid);
if (jobstatus == 1) {
s_logger.info("elastistor volume successfully deleted");
return true;
} else {
s_logger.info("now force deleting the volume");
while (jobstatus != 1) {
DeleteVolumeResponse deleteVolumeResponse1 = deleteVolume(esvolumeid, "true");
if (deleteVolumeResponse1 != null) {
String jobid1 = deleteVolumeResponse1.getJobId();
jobstatus = queryAsyncJobResult(jobid1);
}
}
s_logger.info("elastistor volume successfully deleted");
return true;
}
} else {
s_logger.info("the given volume is not present on elastistor, datasetrespone is NULL");
return false;
}
} else {
s_logger.info("the given volume is not present on elastistor");
return false;
}
}
/**
* This give a json response containing the list of Interface's in
* elastistor.
*/
public static ListInterfacesResponse ListElastistorInterfaces(String controllerid) throws Throwable {
ListInterfacesCmd listInterfacesCmd = new ListInterfacesCmd();
listInterfacesCmd.putCommandParameter("controllerid", controllerid);
ListInterfacesResponse interfacesResponse = (ListInterfacesResponse) getElastistorRestClient().executeCommand(listInterfacesCmd);
if (interfacesResponse != null && interfacesResponse.getInterfaces() != null) {
return interfacesResponse;
} else {
throw new CloudRuntimeException("There are no elastistor interfaces.");
}
}
/**
* This give a json response containing the list of Accounts's in
* elastistor.
*/
public static CreateAccountResponse createElastistorAccount(String domainName) throws Throwable {
CreateAccountCmd createAccountCmd = new CreateAccountCmd();
createAccountCmd.putCommandParameter("name", domainName);
CreateAccountResponse createAccountResponse = (CreateAccountResponse) getElastistorRestClient().executeCommand(createAccountCmd);
if (createAccountResponse != null) {
return createAccountResponse;
} else {
throw new CloudRuntimeException("Creating Elastistor Account failed");
}
}
/**
* This give a json response containing the list of Accounts's in
* elastistor.
*/
public static ListAccountResponse ListElastistorAccounts() throws Throwable {
ListAccountsCmd listAccountsCmd = new ListAccountsCmd();
ListAccountResponse accountResponse = (ListAccountResponse) getElastistorRestClient().executeCommand(listAccountsCmd);
if (accountResponse != null) {
return accountResponse;
} else {
throw new CloudRuntimeException("List Elastistor Account failed");
}
}
/**
* This give a json response containing the list of Pool's in elastistor.
*/
public static ListPoolsResponse ListElastistorPools() throws Throwable {
ListPoolsCmd listPoolsCmd = new ListPoolsCmd();
ListPoolsResponse listPoolsResponse = (ListPoolsResponse) getElastistorRestClient().executeCommand(listPoolsCmd);
if (listPoolsResponse != null) {
return listPoolsResponse;
} else {
throw new CloudRuntimeException("List Elastistor pool failed");
}
}
/**
* This give a json response containing the list of tsm's in elastistor.
*/
private static ListTsmsResponse listTsm(String uuid) throws Throwable {
ListTsmCmd listTsmCmd = new ListTsmCmd();
listTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, uuid);
ListTsmsResponse listTsmsResponse = (ListTsmsResponse) getElastistorRestClient().executeCommand(listTsmCmd);
return listTsmsResponse;
}
/**
* This give a json response containing the list of Volume in elastistor.
*/
public static FileSystem listVolume(String uuid) throws Throwable {
ListFileSystemCmd listFileSystemCmd = new ListFileSystemCmd();
listFileSystemCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, uuid);
ListFileSystemResponse listFileSystemResponse = (ListFileSystemResponse) getElastistorRestClient().executeCommand(listFileSystemCmd);
return listFileSystemResponse.getFilesystems().getFileSystem(0);
}
private static DeleteVolumeResponse deleteVolume(String esvolumeid, String forcedelete) throws Throwable {
DeleteVolumeCmd deleteVolumeCmd = new DeleteVolumeCmd();
deleteVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, esvolumeid);
deleteVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_FORECEDELETE, forcedelete);
DeleteVolumeResponse deleteVolumeResponse = (DeleteVolumeResponse) getElastistorRestClient().executeCommand(deleteVolumeCmd);
return deleteVolumeResponse;
}
private static int queryAsyncJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
while (jobstatus == 0) {
QueryAsyncJobResultResponse jobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = jobResultResponse.getAsync().getJobStatus();
}
return jobstatus;
}
return 0;
}
private static Tsm queryAsyncTsmJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
Tsm tsm = null;
while (jobstatus == 0) {
asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
}
if (jobstatus == 1) {
tsm = asyncJobResultResponse.getAsync().getJobResult().getTsm();
return tsm;
}
}
return null;
}
private static FileSystem queryAsyncVolumeJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
FileSystem volume = null;
while (jobstatus == 0) {
asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
}
if (jobstatus == 1) {
volume = asyncJobResultResponse.getAsync().getJobResult().getVolume();
return volume;
}
}
return null;
}
/**
* this method converts the long capacitybytes to string format, which is
* feasible for elastistor rest api 214748364800 = 200G.
*/
private static String convertCapacityBytes(Long capacityBytes) {
if ((1099511627776L) > capacityBytes && (capacityBytes > (1073741824))) {
return (String.valueOf(capacityBytes / (1024 * 1024 * 1024)) + "G");
} else {
int temp1 = (int) (capacityBytes / (1024 * 1024 * 1024));
int temp2 = temp1 / 1024;
return (String.valueOf(temp2) + "T");
}
}
static interface ElastiCenterCommand {
/*
* Returns the command string to be sent to the ElastiCenter
*/
public String getCommandName();
/*
* Utility method to allow the client to validate the input parameters
* before sending to the ElastiCenter.
*
* This command will be executed by the ElastiCenterClient only this
* method returns true.
*/
public boolean validate();
/*
* Returns the query parameters that have to be passed to execute the
* command.
*
* Returns null if there are query parameters associated with the
* command
*/
public MultivaluedMap<String, String> getCommandParameters();
/*
* Adds new key-value pair to the query paramters lists.
*/
public void putCommandParameter(String key, String value);
/*
* Return an instance of the Response Object Type.
*
* Return null if no response is expected.
*/
public Object getResponseObject();
}
private static class BaseCommand implements ElastiCenterCommand {
private String commandName = null;
private MultivaluedMap<String, String> commandParameters = null;
private Object responseObject = null;
/*
* Enforce the Commands to be initialized with command name and optional
* response object
*/
protected BaseCommand(String cmdName, Object responseObj) {
commandName = cmdName;
responseObject = responseObj;
}
@Override
public String getCommandName() {
return commandName;
}
@Override
public boolean validate() {
// TODO This method can be extended to do some generic
// validations.
return true;
}
@Override
public MultivaluedMap<String, String> getCommandParameters() {
return commandParameters;
}
@Override
public void putCommandParameter(String key, String value) {
if (null == commandParameters) {
commandParameters = new MultivaluedMapImpl();
}
commandParameters.add(key, value);
}
@Override
public Object getResponseObject() {
return responseObject;
}
}
/**
* this is a rest client which is used to call the http rest calls to
* elastistor
*
* @author punith
*
*/
private static final class ElastiCenterClient {
public static boolean debug = false;
private boolean initialized = false;
private String apiKey = null;
private String elastiCenterAddress = null;
private String responseType = "json";
private boolean ignoreSSLCertificate = false;
private String restprotocol = "https://";
private String restpath = "/client/api";
private String restdefaultcommand = "listCapabilities";
private String queryparamcommand = "command";
private String queryparamapikey = "apikey";
private String queryparamresponse = "response";
public ElastiCenterClient(String address, String key) throws InvalidCredentialsException, InvalidParameterException, SSLHandshakeException, ServiceUnavailableException {
elastiCenterAddress = address;
apiKey = key;
initialize();
}
public void initialize() throws InvalidParameterException, SSLHandshakeException, InvalidCredentialsException, ServiceUnavailableException {
if (apiKey == null || apiKey.trim().isEmpty()) {
throw new InvalidParameterException("Unable to initialize. Please specify a valid API Key.");
}
if (elastiCenterAddress == null || elastiCenterAddress.trim().isEmpty()) {
// TODO : Validate the format, like valid IP address or
// hostname.
throw new InvalidParameterException("Unable to initialize. Please specify a valid ElastiCenter IP Address or Hostname.");
}
if (ignoreSSLCertificate) {
// Create a trust manager that does not validate certificate
// chains
TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkClientTrusted(X509Certificate[] certs, String authType) {
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String authType) {
}
} };
HostnameVerifier hv = new HostnameVerifier() {
@Override
public boolean verify(String urlHostName, SSLSession session) {
return true;
}
};
// Install the all-trusting trust manager
try {
SSLContext sc = SSLUtils.getSSLContext();
sc.init(null, trustAllCerts, new SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(new SecureSSLSocketFactory(sc));
HttpsURLConnection.setDefaultHostnameVerifier(hv);
} catch (Exception e) {
;
}
}
ListCapabilitiesResponse listCapabilitiesResponse = null;
try {
initialized = true;
listCapabilitiesResponse = (ListCapabilitiesResponse) executeCommand(restdefaultcommand, null, new ListCapabilitiesResponse());
} catch (Throwable t) {
initialized = false;
if (t instanceof InvalidCredentialsException) {
throw (InvalidCredentialsException) t;
} else if (t instanceof ServiceUnavailableException) {
throw (ServiceUnavailableException) t;
} else if (t.getCause() instanceof SSLHandshakeException) {
throw new SSLHandshakeException("Unable to initialize. An untrusted SSL Certificate was received from " + elastiCenterAddress
+ ". Please verify your truststore or configure ElastiCenterClient to skip the SSL Validation. ");
} else if (t.getCause() instanceof ConnectException) {
throw new ServiceUnavailableException("Unable to initialize. Failed to connect to " + elastiCenterAddress
+ ". Please verify the IP Address, Network Connectivity and ensure that Services are running on the ElastiCenter Server. ");
}
throw new ServiceUnavailableException("Unable to initialize. Please contact your ElastiCenter Administrator. Exception " + t.getMessage());
}
if (null == listCapabilitiesResponse || null == listCapabilitiesResponse.getCapabilities() || null == listCapabilitiesResponse.getCapabilities().getVersion()) {
initialized = false;
throw new ServiceUnavailableException("Unable to execute command on the server");
}
}
public Object executeCommand(ElastiCenterCommand cmd) throws Throwable {
return executeCommand(cmd.getCommandName(), cmd.getCommandParameters(), cmd.getResponseObject());
}
public Object executeCommand(String command, MultivaluedMap<String, String> params, Object responeObj) throws Throwable {
if (!initialized) {
throw new IllegalStateException("Error : ElastiCenterClient is not initialized.");
}
if (command == null || command.trim().isEmpty()) {
throw new InvalidParameterException("No command to execute.");
}
try {
ClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
WebResource webResource = client.resource(UriBuilder.fromUri(restprotocol + elastiCenterAddress + restpath).build());
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add(queryparamapikey, apiKey);
queryParams.add(queryparamresponse, responseType);
queryParams.add(queryparamcommand, command);
if (null != params) {
for (String key : params.keySet()) {
queryParams.add(key, params.getFirst(key));
}
}
if (debug) {
System.out.println("Command Sent " + command + " : " + queryParams);
}
ClientResponse response = webResource.queryParams(queryParams).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (response.getStatus() >= 300) {
if (debug)
System.out.println("ElastiCenter returned error code : " + response.getStatus());
if (401 == response.getStatus()) {
throw new InvalidCredentialsException("Please specify a valid API Key.");
} else if (431 == response.getStatus()) {
throw new InvalidParameterException(response.getHeaders().getFirst("X-Description"));
} else if (432 == response.getStatus()) {
throw new InvalidParameterException(command + " does not exist on the ElastiCenter server. Please specify a valid command or contact your ElastiCenter Administrator.");
} else {
throw new ServiceUnavailableException("Internal Error. Please contact your ElastiCenter Administrator.");
}
} else if (null != responeObj) {
String jsonResponse = response.getEntity(String.class);
if (debug) {
System.out.println("Command Response : " + jsonResponse);
}
Gson gson = new Gson();
return gson.fromJson(jsonResponse, responeObj.getClass());
} else {
return "Success";
}
} catch (Throwable t) {
throw t;
}
}
}
/**
* these are the list of Elastistor rest commands being called from the
* plugin.
*/
private static final class CreateTsmCmd extends BaseCommand {
public CreateTsmCmd() {
super("createTsm", new CreateTsmCmdResponse());
}
}
private static final class AddQosGroupCmd extends BaseCommand {
public AddQosGroupCmd() {
super("addQosGroup", new AddQosGroupCmdResponse());
}
}
private static final class CreateVolumeCmd extends BaseCommand {
public CreateVolumeCmd() {
super("createVolume", new CreateVolumeCmdResponse());
}
}
private static final class ListVolumeiSCSIServiceCmd extends BaseCommand {
public ListVolumeiSCSIServiceCmd() {
super("listVolumeiSCSIService", new ListVolumeiSCSIServiceResponse());
}
}
private static final class ListiSCSIInitiatorCmd extends BaseCommand {
public ListiSCSIInitiatorCmd() {
super("listiSCSIInitiator", new ListiSCSIInitiatorResponse());
}
}
private static final class NfsServiceCmd extends BaseCommand {
public NfsServiceCmd() {
super("nfsService", new NfsServiceResponse());
}
}
private static final class UpdateControllerCmd extends BaseCommand {
public UpdateControllerCmd() {
super("updateController", new UpdateControllerResponse());
}
}
private static final class UpdateVolumeiSCSIServiceCmd extends BaseCommand {
public UpdateVolumeiSCSIServiceCmd() {
super("updateVolumeiSCSIService", new UpdateVolumeiSCSIServiceCmdResponse());
}
}
private static final class DeleteTsmCmd extends BaseCommand {
public DeleteTsmCmd() {
super("deleteTsm", new DeleteTsmResponse());
}
}
private static final class DeleteVolumeCmd extends BaseCommand {
public DeleteVolumeCmd() {
super("deleteFileSystem", new DeleteVolumeResponse());
}
}
private static final class QueryAsyncJobResultCmd extends BaseCommand {
public QueryAsyncJobResultCmd() {
super("queryAsyncJobResult", new QueryAsyncJobResultResponse());
}
}
private static final class ListTsmCmd extends BaseCommand {
public ListTsmCmd() {
super("listTsm", new ListTsmsResponse());
}
}
private static final class ListFileSystemCmd extends BaseCommand {
public ListFileSystemCmd() {
super("listFileSystem", new ListFileSystemResponse());
}
}
private static final class ListAccountsCmd extends BaseCommand {
public ListAccountsCmd() {
super("listAccount", new ListAccountResponse());
}
}
private static final class CreateAccountCmd extends BaseCommand {
public CreateAccountCmd() {
super("createAccount", new CreateAccountResponse());
}
}
private static final class ListInterfacesCmd extends BaseCommand {
public ListInterfacesCmd() {
super("listSharedNICs", new ListInterfacesResponse());
}
}
private static final class ListPoolsCmd extends BaseCommand {
public ListPoolsCmd() {
super("listHAPool", new ListPoolsResponse());
}
}
/**
* these are the list of Elastistor rest json response classes for parsing
* the json response sent by elastistor.
*
*/
public static final class CreateTsmCmdResponse {
@SerializedName("addTsmResponse")
private JobId jobId;
public String getJobid() {
return jobId.getJobid();
}
public String getJobStatus() {
return jobId.getJobStatus();
}
@SerializedName("createTsmResponse")
private TsmWrapper tsmWrapper;
public Tsm getTsm() {
return tsmWrapper.getTsm();
}
}
public static final class Tsm {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("datasetid")
private String datasetid;
@SerializedName("ipaddress")
private String ipaddress;
@SerializedName("volumes")
private VolumeProperties[] volumeProperties;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getIpaddress() {
return ipaddress;
}
public String getDatasetid() {
return datasetid;
}
public boolean checkvolume() {
if (volumeProperties != null) {
return true;
} else {
return false;
}
}
public VolumeProperties getVolumeProperties(int i) {
return volumeProperties[i];
}
}
public static final class VolumeProperties {
@SerializedName("id")
private String id;
@SerializedName("groupid")
private String groupid;
@SerializedName("iops")
private String iops;
@SerializedName("name")
private String name;
public String getid() {
return id;
}
public String getQosgroupid() {
return groupid;
}
public String getName() {
return name;
}
public String getIops() {
return iops;
}
}
public static final class TsmWrapper {
@SerializedName("tsm")
private Tsm tsm;
public Tsm getTsm() {
return tsm;
}
}
public static final class AddQosGroupCmdResponse {
@SerializedName("addqosgroupresponse")
private QoSGroupWrapper qosGroupWrapper;
public QoSGroup getQoSGroup() {
return qosGroupWrapper.getQosGroup();
}
}
public static final class QoSGroupWrapper {
@SerializedName("qosgroup")
private QoSGroup qoSGroup;
public QoSGroup getQosGroup() {
return qoSGroup;
}
}
public static final class QoSGroup {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("qosgroupproperties")
private HashMap<String, String> qosGroupProperties;
public String getName() {
return name;
}
public String getUuid() {
return uuid;
}
public String getIops() {
return qosGroupProperties.get("iops");
}
public String getThroughput() {
return qosGroupProperties.get("throughput");
}
public String getLatency() {
return qosGroupProperties.get("latency");
}
}
public static final class UpdateVolumeiSCSIServiceCmdResponse {
@SerializedName("updatingvolumeiscsidetails")
private VolumeiSCSIServiceWrapper volumeiSCSIServiceWrapper;
public Volumeiscsioptions getVolumeiscsioptions() {
return volumeiSCSIServiceWrapper.getVolumeiscsioptions();
}
}
public static final class VolumeiSCSIServiceWrapper {
@SerializedName("viscsioptions")
private Volumeiscsioptions viscsioptions;
public Volumeiscsioptions getVolumeiscsioptions() {
return viscsioptions;
}
}
public static final class Volumeiscsioptions {
@SerializedName("id")
private String uuid;
@SerializedName("volume_id")
private String volumeid;
@SerializedName("iqnname")
private String iqnname;
public String getUuid() {
return uuid;
}
public String getVolumeid() {
return volumeid;
}
public String getIqn() {
return iqnname;
}
}
public static final class NfsServiceResponse {
@SerializedName("nfsserviceprotocolresponse")
private NfsServiceWrapper nfsServiceWrapper;
public NfsService getNfsService() {
return nfsServiceWrapper.getNfsservice();
}
}
public static final class NfsServiceWrapper {
@SerializedName("nfs")
private NfsService nfsService;
public NfsService getNfsservice() {
return nfsService;
}
}
public static final class NfsService {
@SerializedName("id")
private String uuid;
@SerializedName("STORAGEID")
private String datasetid;
@SerializedName("controllerid")
private String controllerid;
@SerializedName("authnetwork")
private String authnetwork;
public String getUuid() {
return uuid;
}
public String getDatasetid() {
return datasetid;
}
public String getControllerid() {
return controllerid;
}
public String getAuthnetwork() {
return authnetwork;
}
}
public static final class UpdateControllerResponse {
@SerializedName("updateControllerResponse")
private UpdateControllerWrapper controllerWrapper;
public Controller getController() {
return controllerWrapper.getController();
}
}
public static final class UpdateControllerWrapper {
@SerializedName("controller")
private Controller controller;
public Controller getController() {
return controller;
}
}
public static final class Controller {
@SerializedName("id")
private String uuid;
public String getUuid() {
return uuid;
}
}
public static final class CreateVolumeCmdResponse {
@SerializedName("createvolumeresponse")
private JobId jobId;
public String getJobid() {
return jobId.getJobid();
}
@SerializedName("adddatasetresponse")
private FileSystemWrapper fileSystemWrapper;
public FileSystem getFileSystem() {
return fileSystemWrapper.getFileSystem();
}
}
public static final class FileSystemWrapper {
@SerializedName("filesystem")
private FileSystem fileSystem;
public FileSystem getFileSystem() {
return fileSystem;
}
}
public static final class FileSystem {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("quota")
private String quota;
@SerializedName("accountid")
private String accountid;
@SerializedName("iqnname")
private String iqnname;
@SerializedName("nfsenabled")
private String nfsenabled;
@SerializedName("iscsienabled")
private String iscsienabled;
@SerializedName("path")
private String path;
@SerializedName("groupid")
private String groupid;
@SerializedName("compression")
private String compression;
@SerializedName("sync")
private String sync;
@SerializedName("deduplication")
private String deduplication;
@SerializedName("graceallowed")
private String graceallowed;
public String getCompression() {
return compression;
}
public String getSync() {
return sync;
}
public String getDeduplication() {
return deduplication;
}
public String getGraceallowed() {
return graceallowed;
}
public String getUuid() {
return uuid;
}
public String getQosGroupid() {
return groupid;
}
public String getName() {
return name;
}
public String getNfsenabled() {
return nfsenabled;
}
public String getIscsienabled() {
return iscsienabled;
}
public String getPath() {
return path;
}
public String getIqn() {
return iqnname;
}
public String getQuota() {
return quota;
}
public String getAccountid() {
return accountid;
}
}
public static final class DeleteTsmResponse {
@SerializedName("deleteTsmResponse")
private JobId jobId;
public String getJobStatus() {
return jobId.getJobStatus();
}
}
public static final class JobId {
@SerializedName("jobid")
private String jobid;
@SerializedName("success")
private String jobStatus;
@SerializedName("jobresult")
private JobResult jobresult;
@SerializedName("tsm")
private Tsm tsm;
@SerializedName("storage")
private FileSystem volume;
public Tsm getTsm() {
return tsm;
}
public FileSystem getVolume() {
return volume;
}
public JobResult getJobResult() {
return jobresult;
}
public String getJobid() {
return jobid;
}
public String getJobStatus() {
return jobStatus;
}
}
public static final class JobResult {
@SerializedName("tsm")
private Tsm tsm;
@SerializedName("storage")
private FileSystem volume;
public Tsm getTsm() {
return tsm;
}
public FileSystem getVolume() {
return volume;
}
}
public static final class DeleteVolumeResponse {
@SerializedName("deleteFileSystemResponse")
private JobId jobId;
public String getJobId() {
return jobId.getJobid();
}
}
public static final class ListCapabilitiesResponse {
@SerializedName("listcapabilitiesresponse")
private Capabilities capabilities;
public Capabilities getCapabilities() {
return capabilities;
}
}
public static final class ListFileSystemResponse {
@SerializedName("listFilesystemResponse")
private Filesystems filesystems;
public int getFilesystemCount() {
return filesystems.getCount();
}
public Filesystems getFilesystems() {
return filesystems;
}
}
public static final class Filesystems {
@SerializedName("count")
private int count;
@SerializedName("filesystem")
private FileSystem[] fileSystems;
public int getCount() {
return count;
}
public FileSystem getFileSystem(int i) {
return fileSystems[i];
}
}
public static final class ListPoolsResponse {
@SerializedName("listHAPoolResponse")
private Pools pools;
public Pools getPools() {
return pools;
}
}
public static final class Pools {
@SerializedName("hapool")
private Pool[] pool;
@SerializedName("count")
private int count;
public Pool getPool(int i) {
return pool[i];
}
public int getCount() {
return count;
}
}
public static final class Pool {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("currentAvailableSpace")
private String currentAvailableSpace;
@SerializedName("availIOPS")
private String availIOPS;
@SerializedName("status")
private String state;
@SerializedName("controllerid")
private String controllerid;
@SerializedName("gateway")
private String gateway;
public String getControllerid() {
return controllerid;
}
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getAvailableSpace() {
return currentAvailableSpace;
}
public String getAvailIOPS() {
return availIOPS;
}
public String getState() {
return state;
}
public String getGateway() {
return gateway;
}
}
public static final class ListInterfacesResponse {
@SerializedName("listSharedNICsResponse")
private Interfaces interfaces;
public Interfaces getInterfaces() {
return interfaces;
}
}
public static final class Interfaces {
@SerializedName("nic")
private Interface[] interfaces;
@SerializedName("count")
private int count;
public Interface getInterface(int i) {
return interfaces[i];
}
public int getCount() {
return count;
}
}
public static final class Interface {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("status")
private String status;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getStatus() {
return status;
}
}
public static final class ListiSCSIInitiatorResponse {
@SerializedName("listInitiatorsResponse")
private Initiators initiators;
public Initiators getIInitiator() {
return initiators;
}
}
public static final class Initiators {
@SerializedName("initiator")
private Initiator[] initiators;
@SerializedName("count")
private int count;
public Initiator getInterface(int i) {
return initiators[i];
}
public int getCount() {
return count;
}
}
public static final class Initiator {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("initiatorgroup")
private String initiatorgroup;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getInitiatorgroup() {
return initiatorgroup;
}
}
public static final class ListAccountResponse {
@SerializedName("listAccountResponse")
private Accounts accounts;
public Accounts getAccounts() {
return accounts;
}
}
public static final class Accounts {
@SerializedName("account")
private Account[] Accounts;
@SerializedName("count")
private int count;
public Account getAccount(int i) {
return Accounts[i];
}
public int getCount() {
return count;
}
}
public static final class CreateAccountResponse {
@SerializedName("createaccountresponse")
private Accounts2 accounts;
public Account getAccount() {
return accounts.getAccount();
}
}
public static final class Accounts2 {
@SerializedName("account2")
private Account Account;
@SerializedName("count")
private int count;
public Account getAccount() {
return Account;
}
public int getCount() {
return count;
}
}
public static final class Account {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
}
public static final class ListTsmsResponse {
@SerializedName("listTsmResponse")
private Tsms tsms;
public int getTsmsCount() {
return tsms.getCount();
}
public Tsms getTsms() {
return tsms;
}
}
public static final class Tsms {
@SerializedName("count")
private int count;
@SerializedName("listTsm")
private Tsm[] tsms;
public int getCount() {
return count;
}
public Tsm getTsm(int i) {
return tsms[i];
}
}
public static final class ListVolumeiSCSIServiceResponse {
@SerializedName("listVolumeiSCSIServiceResponse")
private IscsiVolumeService iscsiVolumes;
public int getVolumeCount() {
return iscsiVolumes.getCount();
}
public IscsiVolumeService getIscsiVolume() {
return iscsiVolumes;
}
}
public static final class IscsiVolumeService {
@SerializedName("count")
private int count;
@SerializedName("iSCSIService")
private IscsiVolume[] iscsiVolumes;
public int getCount() {
return count;
}
public IscsiVolume getIscsiVolume(int i) {
return iscsiVolumes[i];
}
}
public static final class IscsiVolume {
@SerializedName("id")
private String uuid;
@SerializedName("ag_id")
private String agid;
@SerializedName("ig_id")
private String igid;
public String getAg_id() {
return agid;
}
public String getUuid() {
return uuid;
}
public String getIg_id() {
return igid;
}
}
public static final class QueryAsyncJobResultResponse {
@SerializedName("queryasyncjobresultresponse")
private Async async;
public Async getAsync() {
return async;
}
}
public static final class Async {
@SerializedName("jobstatus")
private int jobstatus;
@SerializedName("jobresult")
private JobId jobresult;
@SerializedName("cmd")
private String cmd;
public int getJobStatus() {
return jobstatus;
}
public JobId getJobResult() {
return jobresult;
}
public String getCmd() {
return cmd;
}
}
public static final class Capabilities {
@SerializedName("capability")
private HashMap<String, String> capabilites;
public String getVersion() {
return capabilites.get("cloudByteVersion");
}
}
/*
*
* change Volume IOPS
*/
public static Answer updateElastistorVolumeQosGroup(String volumeId, Long newIOPS, String graceAllowed) throws Throwable {
FileSystem fileSystem = listVolume(volumeId);
String qosid = fileSystem.getQosGroupid();
if (qosid != null) {
UpdateQosGroupCmdResponse qosGroupCmdResponse = updateQosGroupVolume(newIOPS.toString(), qosid, graceAllowed);
if (qosGroupCmdResponse.getQoSGroup(0).uuid != null) {
return new Answer(null, true, null);
} else {
return new Answer(null, false, "Update Qos Group Failed");
}
} else {
return new Answer(null, false, "Qos Group id is NULL");
}
}
private static UpdateQosGroupCmdResponse updateQosGroupVolume(String iops, String qosgroupid, String graceAllowed) throws Throwable {
UpdateQosGroupCmd updateQosGroupCmd = new UpdateQosGroupCmd();
updateQosGroupCmd.putCommandParameter("id", qosgroupid);
updateQosGroupCmd.putCommandParameter("iops", iops);
updateQosGroupCmd.putCommandParameter("graceallowed", graceAllowed);
UpdateQosGroupCmdResponse updateQosGroupCmdResponse = (UpdateQosGroupCmdResponse) getElastistorRestClient().executeCommand(updateQosGroupCmd);
return updateQosGroupCmdResponse;
}
private static final class UpdateQosGroupCmd extends BaseCommand {
public UpdateQosGroupCmd() {
super("updateQosGroup", new UpdateQosGroupCmdResponse());
}
}
public static final class UpdateQosGroupCmdResponse {
@SerializedName("updateqosresponse")
private QoSGroupWrapperChangeVolumeIops qosGroupWrapper;
public QoSGroup getQoSGroup(int i) {
return qosGroupWrapper.getQosGroup(i);
}
}
public static final class QoSGroupWrapperChangeVolumeIops {
@SerializedName("qosgroup")
private QoSGroup qoSGroup[];
public QoSGroup getQosGroup(int i) {
return qoSGroup[i];
}
}
/*
* resize volume
*/
public static Boolean updateElastistorVolumeSize(String volumeId, Long newSize) throws Throwable {
Boolean status = false;
String quotasize = (String.valueOf(newSize / (1024 * 1024 * 1024)) + "G");
UpdateFileSystemCmdResponse fileSystemCmdResponse = updateFileSystem(quotasize, volumeId, null, null, null);
if (fileSystemCmdResponse.getFileSystem(0).uuid != null) {
status = true;
return status;
}
return status;
}
public static UpdateFileSystemCmdResponse updateFileSystem(String quotasize, String esvolumeid, String dedeplication, String compression, String sync) throws Throwable {
UpdateFileSystemCmd fileSystemCmd = new UpdateFileSystemCmd();
fileSystemCmd.putCommandParameter("id", esvolumeid);
if (null != quotasize)
fileSystemCmd.putCommandParameter("quotasize", quotasize);
if (null != dedeplication)
fileSystemCmd.putCommandParameter("deduplication", dedeplication);
if (null != compression)
fileSystemCmd.putCommandParameter("compression", compression);
if (null != sync)
fileSystemCmd.putCommandParameter("sync", sync);
UpdateFileSystemCmdResponse fileSystemCmdResponse = (UpdateFileSystemCmdResponse) getElastistorRestClient().executeCommand(fileSystemCmd);
return fileSystemCmdResponse;
}
private static final class UpdateFileSystemCmd extends BaseCommand {
public UpdateFileSystemCmd() {
super("updateFileSystem", new UpdateFileSystemCmdResponse());
}
}
private static final class UpdateFileSystemCmdResponse {
@SerializedName("updatefilesystemresponse")
private UpdateFileSystemWrapper fileSystemWrapper;
public FileSystem getFileSystem(int i) {
return fileSystemWrapper.getFileSystem(i);
}
}
public class UpdateFileSystemWrapper {
@SerializedName("filesystem")
private FileSystem fileSystem[];
public FileSystem getFileSystem(int i) {
return fileSystem[i];
}
}
/*
* create snapshot
*/
public static Answer createElastistorVolumeSnapshot(String volumeId, String snapshotName) throws Throwable{
CreateStorageSnapshotCmd snapshotCmd = new CreateStorageSnapshotCmd();
snapshotCmd.putCommandParameter("id", volumeId);
snapshotCmd.putCommandParameter("name", snapshotName);
CreateStorageSnapshotCmdResponse snapshotCmdResponse = (CreateStorageSnapshotCmdResponse) getElastistorRestClient().executeCommand(snapshotCmd);
if(snapshotCmdResponse.getStorageSnapshot().getId() != null){
return new Answer(null, true, snapshotCmdResponse.getStorageSnapshot().getId());
}else{
return new Answer(null, false, "snapshot failed");
}
}
private static final class CreateStorageSnapshotCmd extends BaseCommand {
public CreateStorageSnapshotCmd() {
super("createStorageSnapshot", new CreateStorageSnapshotCmdResponse() );
}
}
private static final class CreateStorageSnapshotCmdResponse {
@SerializedName("createStorageSnapshotResponse")
private StorageSnapshotWrapper StorageSnapshot;
public StorageSnapshot getStorageSnapshot() {
return StorageSnapshot.getStorageSnapshot();
}
}
public static final class StorageSnapshotWrapper {
@SerializedName("StorageSnapshot")
private StorageSnapshot snapshot;
public StorageSnapshot getStorageSnapshot() {
return snapshot;
}
}
public static final class StorageSnapshot {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
public String getId(){
return uuid;
}
public String getName(){
return name;
}
}
// update the TSM storage
public static UpdateTsmStorageCmdResponse updateElastistorTsmStorage(String capacityBytes,String uuid) throws Throwable{
Long size = (Long.parseLong(capacityBytes)/(1024 * 1024 * 1024));
String quotasize = null;
if(size > 1024){
quotasize = (String.valueOf(Long.valueOf(capacityBytes)/(1024)) + "T");
}else{
quotasize = String.valueOf(quotasize) + "G";
}
s_logger.info("elastistor tsm storage is updating to " + quotasize);
UpdateTsmStorageCmd updateTsmStorageCmd = new UpdateTsmStorageCmd();
updateTsmStorageCmd.putCommandParameter("id", uuid);
updateTsmStorageCmd.putCommandParameter("quotasize", quotasize);
UpdateTsmStorageCmdResponse updateTsmStorageCmdResponse = (UpdateTsmStorageCmdResponse) getElastistorRestClient().executeCommand(updateTsmStorageCmd);
return updateTsmStorageCmdResponse;
}
private static final class UpdateTsmStorageCmd extends BaseCommand {
public UpdateTsmStorageCmd() {
super("updateStorage", new UpdateTsmStorageCmdResponse());
}
}
public static final class UpdateTsmStorageCmdResponse {
@SerializedName("updatedatasetresponse")
private StorageWrapper storageWrapper;
public Storage getStorage() {
return storageWrapper.getStorage();
}
}
public static final class StorageWrapper {
@SerializedName("storage")
private Storage storage;
public Storage getStorage() {
return storage;
}
}
public static final class Storage {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("quota")
private String quota;
public String getId(){
return uuid;
}
public String getName(){
return name;
}
public String getsize(){
return quota;
}
}
// update the TSM IOPS
public static UpdateTsmCmdResponse updateElastistorTsmIOPS(String capacityIOPs,String uuid) throws Throwable{
s_logger.info("elastistor tsm IOPS is updating to " + capacityIOPs);
UpdateTsmCmd updateTsmCmd = new UpdateTsmCmd();
String throughput = String.valueOf(Long.parseLong(capacityIOPs)*4);
updateTsmCmd.putCommandParameter("id", uuid);
updateTsmCmd.putCommandParameter("iops", capacityIOPs);
updateTsmCmd.putCommandParameter("throughput", throughput);
UpdateTsmCmdResponse updateTsmStorageCmdResponse = (UpdateTsmCmdResponse) getElastistorRestClient().executeCommand(updateTsmCmd);
return updateTsmStorageCmdResponse;
}
private static final class UpdateTsmCmd extends BaseCommand {
public UpdateTsmCmd() {
super("updateTsm", new UpdateTsmCmdResponse());
}
}
public static final class UpdateTsmCmdResponse {
@SerializedName("updateTsmResponse")
private UpdateTsmWrapper tsmWrapper;
public Tsm getTsm(int i) {
return tsmWrapper.getTsm(i);
}
}
public static final class UpdateTsmWrapper {
@SerializedName("count")
private int count;
@SerializedName("tsm")
private Tsm[] tsms;
public int getCount() {
return count;
}
public Tsm getTsm(int i) {
return tsms[i];
}
}
}
|
plugins/storage/volume/cloudbyte/src/org/apache/cloudstack/storage/datastore/util/ElastistorUtil.java
|
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package org.apache.cloudstack.storage.datastore.util;
import com.cloud.agent.api.Answer;
import com.cloud.utils.exception.CloudRuntimeException;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.utils.security.SSLUtils;
import org.apache.cloudstack.utils.security.SecureSSLSocketFactory;
import org.apache.http.auth.InvalidCredentialsException;
import org.apache.log4j.Logger;
import javax.naming.ServiceUnavailableException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriBuilder;
import java.net.ConnectException;
import java.security.InvalidParameterException;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.HashMap;
public class ElastistorUtil {
private static final Logger s_logger = Logger.getLogger(ElastistorUtil.class);
private static ConfigurationDao configurationDao;
public static ConfigurationDao getConfigurationDao() {
return configurationDao;
}
public static void setConfigurationDao(ConfigurationDao configurationDao) {
ElastistorUtil.configurationDao = configurationDao;
}
/**
* Elastistor REST API Param Keys. These should match exactly with the
* elastistor API commands' params.
*/
public static final String REST_PARAM_COMMAND = "command";
public static final String REST_PARAM_APIKEY = "apikey";
public static final String REST_PARAM_KEYWORD = "keyword";
public static final String REST_PARAM_ID = "id";
public static final String REST_PARAM_QUOTA_SIZE = "quotasize";
public static final String REST_PARAM_READONLY = "readonly";
public static final String REST_PARAM_RESPONSE = "response";
public static final String REST_PARAM_POOLID = "poolid";
public static final String REST_PARAM_ACCOUNTID = "accountid";
public static final String REST_PARAM_GATEWAY = "router";
public static final String REST_PARAM_SUBNET = "subnet";
public static final String REST_PARAM_INTERFACE = "tntinterface";
public static final String REST_PARAM_IPADDRESS = "ipaddress";
public static final String REST_PARAM_JOBID = "jobId";
public static final String REST_PARAM_FORECEDELETE = "forcedelete";
public static final String REST_PARAM_TSM_THROUGHPUT = "totalthroughput";
public static final String REST_PARAM_NAME = "name";
public static final String REST_PARAM_NOOFCOPIES = "noofcopies";
public static final String REST_PARAM_RECORDSIZE = "recordsize";
public static final String REST_PARAM_TOTALIOPS = "totaliops";
public static final String REST_PARAM_LATENCY = "latency";
public static final String REST_PARAM_BLOCKSIZE = "blocksize";
public static final String REST_PARAM_GRACEALLOWED = "graceallowed";
public static final String REST_PARAM_IOPS = "iops";
public static final String REST_PARAM_THROUGHPUT = "throughput";
public static final String REST_PARAM_MEMLIMIT = "memlimit";
public static final String REST_PARAM_NETWORKSPEED = "networkspeed";
public static final String REST_PARAM_TSMID = "tsmid";
public static final String REST_PARAM_DATASETID = "datasetid";
public static final String REST_PARAM_QOSGROUPID = "qosgroupid";
public static final String REST_PARAM_DEDUPLICATION = "deduplication";
public static final String REST_PARAM_COMPRESSION = "compression";
public static final String REST_PARAM_SYNC = "sync";
public static final String REST_PARAM_MOUNTPOINT = "mountpoint";
public static final String REST_PARAM_CASESENSITIVITY = "casesensitivity";
public static final String REST_PARAM_UNICODE = "unicode";
public static final String REST_PARAM_PROTOCOLTYPE = "protocoltype";
public static final String REST_PARAM_AUTHNETWORK = "authnetwork";
public static final String REST_PARAM_MAPUSERSTOROOT = "mapuserstoroot";
public static final String REST_PARAM_STORAGEID = "storageid";
public static final String REST_PARAM_TPCONTROL = "tpcontrol";
public static final String REST_PARAM_IOPSCONTROL = "iopscontrol";
/**
* Constants related to elastistor which are persisted in cloudstack
* databases as keys.
*/
public static final String ES_SUBNET = "essubnet";
public static final String ES_INTERFACE = "estntinterface";
public static final String ES_GATEWAY = "esdefaultgateway";
public static final String ES_PROVIDER_NAME = "CloudByte";
public static final String ES_ACCOUNT_ID = "esAccountId";
public static final String ES_POOL_ID = "esPoolId";
public static final String ES_ACCOUNT_NAME = "esAccountName";
public static final String ES_STORAGE_IP = "esStorageIp";
public static final String ES_STORAGE_PORT = "esStoragePort";
public static final String ES_STORAGE_TYPE = "esStorageType";
public static final String ES_MANAGEMENT_IP = "esMgmtIp";
public static final String ES_MANAGEMENT_PORT = "esMgmtPort";
public static final String ES_API_KEY = "esApiKey";
public static final String ES_VOLUME_ID = "esVolumeId";
public static final String ES_VOLUME_GROUP_ID = "esVolumeGroupId";
public static final String ES_FILE_SYSTEM_ID = "esFilesystemId";
/**
* Values from configuration that are required for every invocation of
* ElastiCenter API. These might in turn be saved as DB updates along with
* above keys.
*/
public static String s_esIPVAL = "";
public static String s_esAPIKEYVAL = "";
public static String s_esACCOUNTIDVAL = "";
public static String s_esPOOLIDVAL = "";
public static String s_esSUBNETVAL = "";
public static String s_esINTERFACEVAL = "";
public static String s_esGATEWAYVAL = "";
/**
* hardcoded constants for elastistor api calls.
*/
private static final String ES_NOOFCOPIES_VAL = "1";
private static final String ES_BLOCKSIZE_VAL = "4K";
private static final String ES_LATENCY_VAL = "15";
private static final String ES_GRACEALLOWED_VAL = "false";
private static final String ES_MEMLIMIT_VAL = "0";
private static final String ES_NETWORKSPEED_VAL = "0";
private static final String ES_DEDUPLICATION_VAL = "off";
private static final String ES_COMPRESSION_VAL = "off";
private static final String ES_CASESENSITIVITY_VAL = "sensitive";
private static final String ES_READONLY_VAL = "off";
private static final String ES_UNICODE_VAL = "off";
private static final String ES_AUTHNETWORK_VAL = "all";
private static final String ES_MAPUSERSTOROOT_VAL = "yes";
private static final String ES_SYNC_VAL = "always";
private static final String ES_TPCONTROL_VAL = "false";
private static final String ES_IOPSCONTROL_VAL = "true";
/**
* Private constructor s.t. its never instantiated.
*/
private ElastistorUtil() {
}
/**
* This intializes a new jersey restclient for http call with elasticenter
*/
public static ElastiCenterClient getElastistorRestClient() {
ElastiCenterClient restclient = null;
try {
String ip = getConfigurationDao().getValue("cloudbyte.management.ip");
String apikey = getConfigurationDao().getValue("cloudbyte.management.apikey");
if (ip == null) {
throw new CloudRuntimeException("set the value of cloudbyte.management.ip in global settings");
}
if (apikey == null) {
throw new CloudRuntimeException("set the value of cloudbyte.management.apikey in global settings");
}
restclient = new ElastiCenterClient(ip, apikey);
} catch (InvalidCredentialsException e) {
throw new CloudRuntimeException("InvalidCredentialsException:" + e.getMessage(), e);
} catch (InvalidParameterException e) {
throw new CloudRuntimeException("InvalidParameterException:" + e.getMessage(), e);
} catch (SSLHandshakeException e) {
throw new CloudRuntimeException("SSLHandshakeException:" + e.getMessage(), e);
} catch (ServiceUnavailableException e) {
throw new CloudRuntimeException("ServiceUnavailableException:" + e.getMessage(), e);
}
return restclient;
}
public static void setElastistorApiKey(String value) {
s_esAPIKEYVAL = value;
}
public static void setElastistorManagementIp(String value) {
s_esIPVAL = value;
}
public static void setElastistorPoolId(String value) {
s_esPOOLIDVAL = value;
}
public static void setElastistorAccountId(String value) {
s_esACCOUNTIDVAL = value;
}
public static void setElastistorGateway(String value) {
s_esGATEWAYVAL = value;
}
public static void setElastistorInterface(String value) {
s_esINTERFACEVAL = value;
}
public static void setElastistorSubnet(String value) {
s_esSUBNETVAL = value;
}
/**
* This creates a new Account in Elasticenter for the given Domain Name.
*
* @return
*/
public static String getElastistorAccountId(String domainName) throws Throwable {
ListAccountResponse listAccountResponse = ListElastistorAccounts();
if (listAccountResponse.getAccounts().getCount() != 0) {
int i;
// check weather a account in elasticenter with given Domain name is
// already present in the list of accounts
for (i = 0; i < listAccountResponse.getAccounts().getCount(); i++) {
if (domainName.equals(listAccountResponse.getAccounts().getAccount(i).getName())) {
return listAccountResponse.getAccounts().getAccount(i).getUuid();
}
}
// if no account matches the give Domain Name , create one with the
// Domain name
CreateAccountResponse createAccountResponse = createElastistorAccount(domainName);
return createAccountResponse.getAccount().getUuid();
} else {
// if no account is present in the elasticenter create one
CreateAccountResponse createAccountResponse = createElastistorAccount(domainName);
return createAccountResponse.getAccount().getUuid();
}
}
/**
* This creates a new tenant storage machine(TSM) for the given storagepool
* ip in elastistor.
*
* @param domainName
* TODO
*/
public static Tsm createElastistorTsm(String storagePoolName, String storageIp, Long capacityBytes, Long capacityIops, String domainName) throws Throwable {
String totalthroughput = String.valueOf(capacityIops * 4);
String totaliops = String.valueOf(capacityIops);
String quotasize = convertCapacityBytes(capacityBytes);
CreateTsmCmd createTsmCmd = new CreateTsmCmd();
if (null != ElastistorUtil.s_esACCOUNTIDVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID, domainName);
if (null != totalthroughput)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSM_THROUGHPUT, totalthroughput);
if (null != ElastistorUtil.s_esPOOLIDVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_POOLID, ElastistorUtil.s_esPOOLIDVAL);
if (null != storagePoolName)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, storagePoolName);
if (null != quotasize)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QUOTA_SIZE, quotasize);
if (null != storageIp)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IPADDRESS, storageIp);
if (null != ElastistorUtil.s_esSUBNETVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_SUBNET, ElastistorUtil.s_esSUBNETVAL);
if (null != ElastistorUtil.s_esGATEWAYVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GATEWAY, ElastistorUtil.s_esGATEWAYVAL);
if (null != ElastistorUtil.s_esINTERFACEVAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_INTERFACE, ElastistorUtil.s_esINTERFACEVAL);
if (null != ElastistorUtil.ES_NOOFCOPIES_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NOOFCOPIES, ElastistorUtil.ES_NOOFCOPIES_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != totaliops)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TOTALIOPS, totaliops);
if (null != ElastistorUtil.ES_LATENCY_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_LATENCY, ElastistorUtil.ES_LATENCY_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != ElastistorUtil.ES_GRACEALLOWED_VAL)
createTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GRACEALLOWED, ElastistorUtil.ES_GRACEALLOWED_VAL);
CreateTsmCmdResponse createTsmCmdResponse;
Tsm tsm = null;
try {
createTsmCmdResponse = (CreateTsmCmdResponse) getElastistorRestClient().executeCommand(createTsmCmd);
if (createTsmCmdResponse.getJobid() == null) {
throw new CloudRuntimeException("tsm creation failed , contact elatistor admin");
} else {
tsm = queryAsyncTsmJobResult(createTsmCmdResponse.getJobid());
if (tsm == null) {
throw new CloudRuntimeException("tsm queryAsync failed , contact elatistor admin");
}
}
return tsm;
} catch (Exception e) {
throw new CloudRuntimeException("tsm creation failed , contact elatistor admin" + e.toString());
}
}
/**
* This creates the specified volume on the created tsm.
*/
public static FileSystem createElastistorVolume(String volumeName, String tsmid, Long capacityBytes, Long capacityIops, String protocoltype, String mountpoint) throws Throwable {
String datasetid;
String qosgroupid;
String VolumeName = volumeName;
String totaliops = String.valueOf(capacityIops);
//String totalthroughput = String.valueOf(capacityIops * 4);
String totalthroughput = "0";
String quotasize = convertCapacityBytes(capacityBytes);
AddQosGroupCmd addQosGroupCmd = new AddQosGroupCmd();
ListTsmsResponse listTsmsResponse = listTsm(tsmid);
tsmid = listTsmsResponse.getTsms().getTsm(0).getUuid();
datasetid = listTsmsResponse.getTsms().getTsm(0).getDatasetid();
if (null != VolumeName)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, "QOS_" + VolumeName);
if (null != totaliops)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IOPS, totaliops);
if (null != ElastistorUtil.ES_LATENCY_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_LATENCY, ElastistorUtil.ES_LATENCY_VAL);
if (null != totalthroughput)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_THROUGHPUT, totalthroughput);
if (null != ElastistorUtil.ES_MEMLIMIT_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MEMLIMIT, ElastistorUtil.ES_MEMLIMIT_VAL);
if (null != ElastistorUtil.ES_NETWORKSPEED_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NETWORKSPEED, ElastistorUtil.ES_NETWORKSPEED_VAL);
if (null != tsmid)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSMID, tsmid);
if (null != datasetid)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DATASETID, datasetid);
if (null != ElastistorUtil.ES_GRACEALLOWED_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_GRACEALLOWED, ElastistorUtil.ES_GRACEALLOWED_VAL);
if (null != ElastistorUtil.ES_IOPSCONTROL_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_IOPSCONTROL, ElastistorUtil.ES_IOPSCONTROL_VAL);
if (null != ElastistorUtil.ES_TPCONTROL_VAL)
addQosGroupCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TPCONTROL, ElastistorUtil.ES_TPCONTROL_VAL);
AddQosGroupCmdResponse addQosGroupCmdResponse = (AddQosGroupCmdResponse) getElastistorRestClient().executeCommand(addQosGroupCmd);
if (addQosGroupCmdResponse.getQoSGroup().getUuid() == null) {
throw new CloudRuntimeException("adding qos group failed , contact elatistor admin");
}
else {
CreateVolumeCmd createVolumeCmd = new CreateVolumeCmd();
qosgroupid = addQosGroupCmdResponse.getQoSGroup().getUuid();
// if (null !=
// ElastistorUtil.s_esACCOUNTIDVAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID,ElastistorUtil.s_esACCOUNTIDVAL);
if (null != qosgroupid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QOSGROUPID, qosgroupid);
if (null != tsmid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_TSMID, tsmid);
// if (null !=
// ElastistorUtil.s_esPOOLIDVAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_POOLID,ElastistorUtil.s_esPOOLIDVAL);
if (null != VolumeName)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NAME, VolumeName);
if (null != quotasize)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_QUOTA_SIZE, quotasize);
if (protocoltype.equalsIgnoreCase("nfs")) {
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, ElastistorUtil.ES_BLOCKSIZE_VAL);
} else {
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_BLOCKSIZE, "512B");
if (null != ElastistorUtil.ES_BLOCKSIZE_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_RECORDSIZE, "512B");
}
if (null != ElastistorUtil.ES_DEDUPLICATION_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DEDUPLICATION, ElastistorUtil.ES_DEDUPLICATION_VAL);
if (null != ElastistorUtil.ES_SYNC_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_SYNC, ElastistorUtil.ES_SYNC_VAL);
if (null != ElastistorUtil.ES_COMPRESSION_VAL)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_COMPRESSION, ElastistorUtil.ES_COMPRESSION_VAL);
// if (null !=
// ElastistorUtil.ES_NOOFCOPIES_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_NOOFCOPIES,
// ElastistorUtil.ES_NOOFCOPIES_VAL);
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MOUNTPOINT, mountpoint);
// if (null !=
// ElastistorUtil.ES_CASESENSITIVITY_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_CASESENSITIVITY,
// ElastistorUtil.ES_CASESENSITIVITY_VAL);
// if (null !=
// ElastistorUtil.ES_READONLY_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_READONLY,
// ElastistorUtil.ES_READONLY_VAL);
if (null != datasetid)
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_DATASETID, datasetid);
// if (null !=
// ElastistorUtil.ES_UNICODE_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_UNICODE,
// ElastistorUtil.ES_UNICODE_VAL);
createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_PROTOCOLTYPE, protocoltype);
// if (null !=
// ElastistorUtil.ES_AUTHNETWORK_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_AUTHNETWORK,
// ElastistorUtil.ES_AUTHNETWORK_VAL);
// if (null !=
// ElastistorUtil.ES_MAPUSERSTOROOT_VAL)createVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_MAPUSERSTOROOT,
// ElastistorUtil.ES_MAPUSERSTOROOT_VAL);
CreateVolumeCmdResponse createVolumeCmdResponse;
FileSystem volume = null;
FileSystem fileSystem = null;
try {
createVolumeCmdResponse = (CreateVolumeCmdResponse) getElastistorRestClient().executeCommand(createVolumeCmd);
if (createVolumeCmdResponse.getJobid() == null) {
throw new CloudRuntimeException("creating volume failed , contact elatistor admin");
} else {
volume = queryAsyncVolumeJobResult(createVolumeCmdResponse.getJobid());
if (volume == null) {
throw new CloudRuntimeException("tsm queryAsync failed , contact elatistor admin");
} else {
if (protocoltype.equalsIgnoreCase("nfs")) {
fileSystem = updateNfsService(volume.getUuid());
} else {
fileSystem = updateIscsiService(volume.getUuid());
}
}
}
return fileSystem;
} catch (Exception e) {
throw new CloudRuntimeException("creating volume failed , contact elatistor admin", e);
}
}
}
public static FileSystem updateNfsService(String volumeid) throws Throwable {
FileSystem fileSystem = null;
String datasetid = updateElastistorNfsVolume(volumeid);
if (datasetid == null) {
throw new CloudRuntimeException("Updating Nfs Volume Failed");
} else {
fileSystem = listVolume(datasetid);
if (fileSystem == null) {
throw new CloudRuntimeException("Volume Creation failed : List Filesystem failed");
}
}
return fileSystem;
}
public static FileSystem updateIscsiService(String volumeid) throws Throwable {
Volumeiscsioptions volumeiscsioptions = null;
FileSystem fileSystem = null;
String accountId;
fileSystem = listVolume(volumeid);
accountId = fileSystem.getAccountid();
volumeiscsioptions = updateElastistorIscsiVolume(volumeid, accountId);
if (volumeiscsioptions == null) {
throw new CloudRuntimeException("Updating Iscsi Volume Failed");
} else {
fileSystem = listVolume(volumeiscsioptions.getVolumeid());
if (fileSystem == null) {
throw new CloudRuntimeException("Volume Creation failed : List Filesystem failed");
}
}
return fileSystem;
}
public static String updateElastistorNfsVolume(String volumeid) throws Throwable {
NfsServiceCmd nfsServiceCmd = new NfsServiceCmd();
nfsServiceCmd.putCommandParameter("datasetid", volumeid);
nfsServiceCmd.putCommandParameter("authnetwork", "all");
nfsServiceCmd.putCommandParameter("managedstate", "true");
nfsServiceCmd.putCommandParameter("alldirs", "yes");
nfsServiceCmd.putCommandParameter("mapuserstoroot", "yes");
nfsServiceCmd.putCommandParameter("readonly", "no");
NfsServiceResponse nfsServiceResponse = (NfsServiceResponse) getElastistorRestClient().executeCommand(nfsServiceCmd);
if (nfsServiceResponse.getNfsService().getUuid() != null) {
UpdateControllerCmd controllerCmd = new UpdateControllerCmd();
controllerCmd.putCommandParameter("nfsid", nfsServiceResponse.getNfsService().getUuid());
controllerCmd.putCommandParameter("type", "configurenfs");
controllerCmd.putCommandParameter("id", nfsServiceResponse.getNfsService().getControllerid());
UpdateControllerResponse controllerResponse = (UpdateControllerResponse) getElastistorRestClient().executeCommand(controllerCmd);
if (controllerResponse.getController().getUuid() != null) {
s_logger.info("updated nfs service to ALL");
return nfsServiceResponse.getNfsService().getDatasetid();
} else {
throw new CloudRuntimeException("Updating Nfs Volume Failed");
}
}
return null;
}
public static Volumeiscsioptions updateElastistorIscsiVolume(String volumeid, String accountid) throws Throwable {
// now listing the iscsi volume service group to get iscsi id
ListVolumeiSCSIServiceCmd listVolumeiSCSIServiceCmd = new ListVolumeiSCSIServiceCmd();
if (null != volumeid)
listVolumeiSCSIServiceCmd.putCommandParameter(ElastistorUtil.REST_PARAM_STORAGEID, volumeid);
ListVolumeiSCSIServiceResponse volumeiSCSIServiceResponse = (ListVolumeiSCSIServiceResponse) getElastistorRestClient().executeCommand(listVolumeiSCSIServiceCmd);
String iscsiId = volumeiSCSIServiceResponse.getIscsiVolume().getIscsiVolume(0).getUuid();
String AG_Id = volumeiSCSIServiceResponse.getIscsiVolume().getIscsiVolume(0).getAg_id();
// now listing the initiator group to get initiator id
ListiSCSIInitiatorCmd initiatorCmd = new ListiSCSIInitiatorCmd();
if (null != volumeid)
initiatorCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ACCOUNTID, accountid);
ListiSCSIInitiatorResponse initiatorResponse = (ListiSCSIInitiatorResponse) getElastistorRestClient().executeCommand(initiatorCmd);
String IG_Id;
if (initiatorResponse.getIInitiator().getInterface(0).getInitiatorgroup().equalsIgnoreCase("ALL")) {
IG_Id = initiatorResponse.getIInitiator().getInterface(0).getUuid();
} else {
IG_Id = initiatorResponse.getIInitiator().getInterface(1).getUuid();
}
if (iscsiId != null) {
UpdateVolumeiSCSIServiceCmd updateVolumeiSCSIServiceCmd = new UpdateVolumeiSCSIServiceCmd();
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, iscsiId);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("status", "true");
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("igid", IG_Id);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("authgroupid", AG_Id);
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("initialdigest", "Auto");
if (null != volumeid)
updateVolumeiSCSIServiceCmd.putCommandParameter("queuedepth", "32");
UpdateVolumeiSCSIServiceCmdResponse cmdResponse = (UpdateVolumeiSCSIServiceCmdResponse) getElastistorRestClient().executeCommand(updateVolumeiSCSIServiceCmd);
if (cmdResponse.getVolumeiscsioptions().getVolumeid() == null) {
throw new CloudRuntimeException("Updating Iscsi Volume Failed");
}
return cmdResponse.getVolumeiscsioptions();
}
return null;
}
/**
* This deletes both the volume and the tsm in elastistor.
*/
public static boolean deleteElastistorTsm(String tsmid, boolean managed) throws Throwable {
if (!managed) {
s_logger.info("elastistor pool is NOT a managed storage , hence deleting the volume then tsm");
String esvolumeid = null;
ListTsmsResponse listTsmsResponse = listTsm(tsmid);
if (listTsmsResponse.getTsmsCount() != 0) {
if (listTsmsResponse.getTsms().getTsm(0).checkvolume()) {
esvolumeid = listTsmsResponse.getTsms().getTsm(0).getVolumeProperties(0).getid();
DeleteVolumeResponse deleteVolumeResponse = deleteVolume(esvolumeid, null);
if (deleteVolumeResponse != null) {
String jobid = deleteVolumeResponse.getJobId();
int jobstatus = queryAsyncJobResult(jobid);
if (jobstatus == 1) {
s_logger.info("elastistor volume successfully deleted");
} else {
s_logger.info("now farce deleting the volume");
while (jobstatus != 1) {
DeleteVolumeResponse deleteVolumeResponse1 = deleteVolume(esvolumeid, "true");
if (deleteVolumeResponse1 != null) {
String jobid1 = deleteVolumeResponse1.getJobId();
jobstatus = queryAsyncJobResult(jobid1);
}
}
s_logger.info("elastistor volume successfully deleted");
}
}
} else {
s_logger.info("no volume present in on the given tsm");
}
}
}
s_logger.info("now trying to delete elastistor tsm");
if (tsmid != null) {
DeleteTsmCmd deleteTsmCmd = new DeleteTsmCmd();
deleteTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, tsmid);
DeleteTsmResponse deleteTsmResponse = (DeleteTsmResponse) getElastistorRestClient().executeCommand(deleteTsmCmd);
if (deleteTsmResponse != null) {
String jobstatus = deleteTsmResponse.getJobStatus();
if (jobstatus.equalsIgnoreCase("true")) {
s_logger.info("deletion of elastistor tsm successful");
return true;
} else {
s_logger.info("failed to delete elastistor tsm");
return false;
}
} else {
s_logger.info("elastistor tsm id not present");
}
}
s_logger.info("tsm id is null");
return false;
/*
* else { s_logger.error("no volume is present in the tsm"); } } else {
* s_logger.error(
* "List tsm failed, no tsm present in the eastistor for the given IP "
* ); return false; } return false;
*/
}
public static boolean deleteElastistorVolume(String esvolumeid) throws Throwable {
FileSystem fileSystem = listVolume(esvolumeid);
if (fileSystem != null) {
DeleteVolumeResponse deleteVolumeResponse = deleteVolume(esvolumeid, null);
if (deleteVolumeResponse != null) {
String jobid = deleteVolumeResponse.getJobId();
int jobstatus = queryAsyncJobResult(jobid);
if (jobstatus == 1) {
s_logger.info("elastistor volume successfully deleted");
return true;
} else {
s_logger.info("now force deleting the volume");
while (jobstatus != 1) {
DeleteVolumeResponse deleteVolumeResponse1 = deleteVolume(esvolumeid, "true");
if (deleteVolumeResponse1 != null) {
String jobid1 = deleteVolumeResponse1.getJobId();
jobstatus = queryAsyncJobResult(jobid1);
}
}
s_logger.info("elastistor volume successfully deleted");
return true;
}
} else {
s_logger.info("the given volume is not present on elastistor, datasetrespone is NULL");
return false;
}
} else {
s_logger.info("the given volume is not present on elastistor");
return false;
}
}
/**
* This give a json response containing the list of Interface's in
* elastistor.
*/
public static ListInterfacesResponse ListElastistorInterfaces(String controllerid) throws Throwable {
ListInterfacesCmd listInterfacesCmd = new ListInterfacesCmd();
listInterfacesCmd.putCommandParameter("controllerid", controllerid);
ListInterfacesResponse interfacesResponse = (ListInterfacesResponse) getElastistorRestClient().executeCommand(listInterfacesCmd);
if (interfacesResponse != null && interfacesResponse.getInterfaces() != null) {
return interfacesResponse;
} else {
throw new CloudRuntimeException("There are no elastistor interfaces.");
}
}
/**
* This give a json response containing the list of Accounts's in
* elastistor.
*/
public static CreateAccountResponse createElastistorAccount(String domainName) throws Throwable {
CreateAccountCmd createAccountCmd = new CreateAccountCmd();
createAccountCmd.putCommandParameter("name", domainName);
CreateAccountResponse createAccountResponse = (CreateAccountResponse) getElastistorRestClient().executeCommand(createAccountCmd);
if (createAccountResponse != null) {
return createAccountResponse;
} else {
throw new CloudRuntimeException("Creating Elastistor Account failed");
}
}
/**
* This give a json response containing the list of Accounts's in
* elastistor.
*/
public static ListAccountResponse ListElastistorAccounts() throws Throwable {
ListAccountsCmd listAccountsCmd = new ListAccountsCmd();
ListAccountResponse accountResponse = (ListAccountResponse) getElastistorRestClient().executeCommand(listAccountsCmd);
if (accountResponse != null) {
return accountResponse;
} else {
throw new CloudRuntimeException("List Elastistor Account failed");
}
}
/**
* This give a json response containing the list of Pool's in elastistor.
*/
public static ListPoolsResponse ListElastistorPools() throws Throwable {
ListPoolsCmd listPoolsCmd = new ListPoolsCmd();
ListPoolsResponse listPoolsResponse = (ListPoolsResponse) getElastistorRestClient().executeCommand(listPoolsCmd);
if (listPoolsResponse != null) {
return listPoolsResponse;
} else {
throw new CloudRuntimeException("List Elastistor pool failed");
}
}
/**
* This give a json response containing the list of tsm's in elastistor.
*/
private static ListTsmsResponse listTsm(String uuid) throws Throwable {
ListTsmCmd listTsmCmd = new ListTsmCmd();
listTsmCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, uuid);
ListTsmsResponse listTsmsResponse = (ListTsmsResponse) getElastistorRestClient().executeCommand(listTsmCmd);
return listTsmsResponse;
}
/**
* This give a json response containing the list of Volume in elastistor.
*/
public static FileSystem listVolume(String uuid) throws Throwable {
ListFileSystemCmd listFileSystemCmd = new ListFileSystemCmd();
listFileSystemCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, uuid);
ListFileSystemResponse listFileSystemResponse = (ListFileSystemResponse) getElastistorRestClient().executeCommand(listFileSystemCmd);
return listFileSystemResponse.getFilesystems().getFileSystem(0);
}
private static DeleteVolumeResponse deleteVolume(String esvolumeid, String forcedelete) throws Throwable {
DeleteVolumeCmd deleteVolumeCmd = new DeleteVolumeCmd();
deleteVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_ID, esvolumeid);
deleteVolumeCmd.putCommandParameter(ElastistorUtil.REST_PARAM_FORECEDELETE, forcedelete);
DeleteVolumeResponse deleteVolumeResponse = (DeleteVolumeResponse) getElastistorRestClient().executeCommand(deleteVolumeCmd);
return deleteVolumeResponse;
}
private static int queryAsyncJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
while (jobstatus == 0) {
QueryAsyncJobResultResponse jobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = jobResultResponse.getAsync().getJobStatus();
}
return jobstatus;
}
return 0;
}
private static Tsm queryAsyncTsmJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
Tsm tsm = null;
while (jobstatus == 0) {
asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
}
if (jobstatus == 1) {
tsm = asyncJobResultResponse.getAsync().getJobResult().getTsm();
return tsm;
}
}
return null;
}
private static FileSystem queryAsyncVolumeJobResult(String jobid) throws Throwable {
QueryAsyncJobResultCmd asyncJobResultCmd = new QueryAsyncJobResultCmd();
ElastiCenterClient restclient = getElastistorRestClient();
asyncJobResultCmd.putCommandParameter(ElastistorUtil.REST_PARAM_JOBID, jobid);
QueryAsyncJobResultResponse asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
if (asyncJobResultResponse != null) {
int jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
FileSystem volume = null;
while (jobstatus == 0) {
asyncJobResultResponse = (QueryAsyncJobResultResponse) restclient.executeCommand(asyncJobResultCmd);
jobstatus = asyncJobResultResponse.getAsync().getJobStatus();
}
if (jobstatus == 1) {
volume = asyncJobResultResponse.getAsync().getJobResult().getVolume();
return volume;
}
}
return null;
}
/**
* this method converts the long capacitybytes to string format, which is
* feasible for elastistor rest api 214748364800 = 200G.
*/
private static String convertCapacityBytes(Long capacityBytes) {
if ((1099511627776L) > capacityBytes && (capacityBytes > (1073741824))) {
return (String.valueOf(capacityBytes / (1024 * 1024 * 1024)) + "G");
} else {
int temp1 = (int) (capacityBytes / (1024 * 1024 * 1024));
int temp2 = temp1 / 1024;
return (String.valueOf(temp2) + "T");
}
}
static interface ElastiCenterCommand {
/*
* Returns the command string to be sent to the ElastiCenter
*/
public String getCommandName();
/*
* Utility method to allow the client to validate the input parameters
* before sending to the ElastiCenter.
*
* This command will be executed by the ElastiCenterClient only this
* method returns true.
*/
public boolean validate();
/*
* Returns the query parameters that have to be passed to execute the
* command.
*
* Returns null if there are query parameters associated with the
* command
*/
public MultivaluedMap<String, String> getCommandParameters();
/*
* Adds new key-value pair to the query paramters lists.
*/
public void putCommandParameter(String key, String value);
/*
* Return an instance of the Response Object Type.
*
* Return null if no response is expected.
*/
public Object getResponseObject();
}
private static class BaseCommand implements ElastiCenterCommand {
private String commandName = null;
private MultivaluedMap<String, String> commandParameters = null;
private Object responseObject = null;
/*
* Enforce the Commands to be initialized with command name and optional
* response object
*/
protected BaseCommand(String cmdName, Object responseObj) {
commandName = cmdName;
responseObject = responseObj;
}
@Override
public String getCommandName() {
return commandName;
}
@Override
public boolean validate() {
// TODO This method can be extended to do some generic
// validations.
return true;
}
@Override
public MultivaluedMap<String, String> getCommandParameters() {
return commandParameters;
}
@Override
public void putCommandParameter(String key, String value) {
if (null == commandParameters) {
commandParameters = new MultivaluedMapImpl();
}
commandParameters.add(key, value);
}
@Override
public Object getResponseObject() {
return responseObject;
}
}
/**
* this is a rest client which is used to call the http rest calls to
* elastistor
*
* @author punith
*
*/
private static final class ElastiCenterClient {
public static boolean debug = false;
private boolean initialized = false;
private String apiKey = null;
private String elastiCenterAddress = null;
private String responseType = "json";
private boolean ignoreSSLCertificate = false;
private String restprotocol = "https://";
private String restpath = "/client/api";
private String restdefaultcommand = "listCapabilities";
private String queryparamcommand = "command";
private String queryparamapikey = "apikey";
private String queryparamresponse = "response";
public ElastiCenterClient(String address, String key) throws InvalidCredentialsException, InvalidParameterException, SSLHandshakeException, ServiceUnavailableException {
elastiCenterAddress = address;
apiKey = key;
initialize();
}
public void initialize() throws InvalidParameterException, SSLHandshakeException, InvalidCredentialsException, ServiceUnavailableException {
if (apiKey == null || apiKey.trim().isEmpty()) {
throw new InvalidParameterException("Unable to initialize. Please specify a valid API Key.");
}
if (elastiCenterAddress == null || elastiCenterAddress.trim().isEmpty()) {
// TODO : Validate the format, like valid IP address or
// hostname.
throw new InvalidParameterException("Unable to initialize. Please specify a valid ElastiCenter IP Address or Hostname.");
}
if (ignoreSSLCertificate) {
// Create a trust manager that does not validate certificate
// chains
TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkClientTrusted(X509Certificate[] certs, String authType) {
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String authType) {
}
} };
HostnameVerifier hv = new HostnameVerifier() {
@Override
public boolean verify(String urlHostName, SSLSession session) {
return true;
}
};
// Install the all-trusting trust manager
try {
SSLContext sc = SSLUtils.getSSLContext();
sc.init(null, trustAllCerts, new SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(new SecureSSLSocketFactory(sc));
HttpsURLConnection.setDefaultHostnameVerifier(hv);
} catch (Exception e) {
;
}
}
ListCapabilitiesResponse listCapabilitiesResponse = null;
try {
initialized = true;
listCapabilitiesResponse = (ListCapabilitiesResponse) executeCommand(restdefaultcommand, null, new ListCapabilitiesResponse());
} catch (Throwable t) {
initialized = false;
if (t instanceof InvalidCredentialsException) {
throw (InvalidCredentialsException) t;
} else if (t instanceof ServiceUnavailableException) {
throw (ServiceUnavailableException) t;
} else if (t.getCause() instanceof SSLHandshakeException) {
throw new SSLHandshakeException("Unable to initialize. An untrusted SSL Certificate was received from " + elastiCenterAddress
+ ". Please verify your truststore or configure ElastiCenterClient to skip the SSL Validation. ");
} else if (t.getCause() instanceof ConnectException) {
throw new ServiceUnavailableException("Unable to initialize. Failed to connect to " + elastiCenterAddress
+ ". Please verify the IP Address, Network Connectivity and ensure that Services are running on the ElastiCenter Server. ");
}
throw new ServiceUnavailableException("Unable to initialize. Please contact your ElastiCenter Administrator. Exception " + t.getMessage());
}
if (null == listCapabilitiesResponse || null == listCapabilitiesResponse.getCapabilities() || null == listCapabilitiesResponse.getCapabilities().getVersion()) {
initialized = false;
throw new ServiceUnavailableException("Unable to execute command on the server");
}
}
public Object executeCommand(ElastiCenterCommand cmd) throws Throwable {
return executeCommand(cmd.getCommandName(), cmd.getCommandParameters(), cmd.getResponseObject());
}
public Object executeCommand(String command, MultivaluedMap<String, String> params, Object responeObj) throws Throwable {
if (!initialized) {
throw new IllegalStateException("Error : ElastiCenterClient is not initialized.");
}
if (command == null || command.trim().isEmpty()) {
throw new InvalidParameterException("No command to execute.");
}
try {
ClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
WebResource webResource = client.resource(UriBuilder.fromUri(restprotocol + elastiCenterAddress + restpath).build());
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add(queryparamapikey, apiKey);
queryParams.add(queryparamresponse, responseType);
queryParams.add(queryparamcommand, command);
if (null != params) {
for (String key : params.keySet()) {
queryParams.add(key, params.getFirst(key));
}
}
if (debug) {
System.out.println("Command Sent " + command + " : " + queryParams);
}
ClientResponse response = webResource.queryParams(queryParams).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (response.getStatus() >= 300) {
if (debug)
System.out.println("ElastiCenter returned error code : " + response.getStatus());
if (401 == response.getStatus()) {
throw new InvalidCredentialsException("Please specify a valid API Key.");
} else if (431 == response.getStatus()) {
throw new InvalidParameterException(response.getHeaders().getFirst("X-Description"));
} else if (432 == response.getStatus()) {
throw new InvalidParameterException(command + " does not exist on the ElastiCenter server. Please specify a valid command or contact your ElastiCenter Administrator.");
} else {
throw new ServiceUnavailableException("Internal Error. Please contact your ElastiCenter Administrator.");
}
} else if (null != responeObj) {
String jsonResponse = response.getEntity(String.class);
if (debug) {
System.out.println("Command Response : " + jsonResponse);
}
Gson gson = new Gson();
return gson.fromJson(jsonResponse, responeObj.getClass());
} else {
return "Success";
}
} catch (Throwable t) {
throw t;
}
}
}
/**
* these are the list of Elastistor rest commands being called from the
* plugin.
*/
private static final class CreateTsmCmd extends BaseCommand {
public CreateTsmCmd() {
super("createTsm", new CreateTsmCmdResponse());
}
}
private static final class AddQosGroupCmd extends BaseCommand {
public AddQosGroupCmd() {
super("addQosGroup", new AddQosGroupCmdResponse());
}
}
private static final class CreateVolumeCmd extends BaseCommand {
public CreateVolumeCmd() {
super("createVolume", new CreateVolumeCmdResponse());
}
}
private static final class ListVolumeiSCSIServiceCmd extends BaseCommand {
public ListVolumeiSCSIServiceCmd() {
super("listVolumeiSCSIService", new ListVolumeiSCSIServiceResponse());
}
}
private static final class ListiSCSIInitiatorCmd extends BaseCommand {
public ListiSCSIInitiatorCmd() {
super("listiSCSIInitiator", new ListiSCSIInitiatorResponse());
}
}
private static final class NfsServiceCmd extends BaseCommand {
public NfsServiceCmd() {
super("nfsService", new NfsServiceResponse());
}
}
private static final class UpdateControllerCmd extends BaseCommand {
public UpdateControllerCmd() {
super("updateController", new UpdateControllerResponse());
}
}
private static final class UpdateVolumeiSCSIServiceCmd extends BaseCommand {
public UpdateVolumeiSCSIServiceCmd() {
super("updateVolumeiSCSIService", new UpdateVolumeiSCSIServiceCmdResponse());
}
}
private static final class DeleteTsmCmd extends BaseCommand {
public DeleteTsmCmd() {
super("deleteTsm", new DeleteTsmResponse());
}
}
private static final class DeleteVolumeCmd extends BaseCommand {
public DeleteVolumeCmd() {
super("deleteFileSystem", new DeleteVolumeResponse());
}
}
private static final class QueryAsyncJobResultCmd extends BaseCommand {
public QueryAsyncJobResultCmd() {
super("queryAsyncJobResult", new QueryAsyncJobResultResponse());
}
}
private static final class ListTsmCmd extends BaseCommand {
public ListTsmCmd() {
super("listTsm", new ListTsmsResponse());
}
}
private static final class ListFileSystemCmd extends BaseCommand {
public ListFileSystemCmd() {
super("listFileSystem", new ListFileSystemResponse());
}
}
private static final class ListAccountsCmd extends BaseCommand {
public ListAccountsCmd() {
super("listAccount", new ListAccountResponse());
}
}
private static final class CreateAccountCmd extends BaseCommand {
public CreateAccountCmd() {
super("createAccount", new CreateAccountResponse());
}
}
private static final class ListInterfacesCmd extends BaseCommand {
public ListInterfacesCmd() {
super("listSharedNICs", new ListInterfacesResponse());
}
}
private static final class ListPoolsCmd extends BaseCommand {
public ListPoolsCmd() {
super("listHAPool", new ListPoolsResponse());
}
}
/**
* these are the list of Elastistor rest json response classes for parsing
* the json response sent by elastistor.
*
*/
public static final class CreateTsmCmdResponse {
@SerializedName("addTsmResponse")
private JobId jobId;
public String getJobid() {
return jobId.getJobid();
}
public String getJobStatus() {
return jobId.getJobStatus();
}
@SerializedName("createTsmResponse")
private TsmWrapper tsmWrapper;
public Tsm getTsm() {
return tsmWrapper.getTsm();
}
}
public static final class Tsm {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("datasetid")
private String datasetid;
@SerializedName("ipaddress")
private String ipaddress;
@SerializedName("volumes")
private VolumeProperties[] volumeProperties;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getIpaddress() {
return ipaddress;
}
public String getDatasetid() {
return datasetid;
}
public boolean checkvolume() {
if (volumeProperties != null) {
return true;
} else {
return false;
}
}
public VolumeProperties getVolumeProperties(int i) {
return volumeProperties[i];
}
}
public static final class VolumeProperties {
@SerializedName("id")
private String id;
@SerializedName("groupid")
private String groupid;
@SerializedName("iops")
private String iops;
@SerializedName("name")
private String name;
public String getid() {
return id;
}
public String getQosgroupid() {
return groupid;
}
public String getName() {
return name;
}
public String getIops() {
return iops;
}
}
public static final class TsmWrapper {
@SerializedName("tsm")
private Tsm tsm;
public Tsm getTsm() {
return tsm;
}
}
public static final class AddQosGroupCmdResponse {
@SerializedName("addqosgroupresponse")
private QoSGroupWrapper qosGroupWrapper;
public QoSGroup getQoSGroup() {
return qosGroupWrapper.getQosGroup();
}
}
public static final class QoSGroupWrapper {
@SerializedName("qosgroup")
private QoSGroup qoSGroup;
public QoSGroup getQosGroup() {
return qoSGroup;
}
}
public static final class QoSGroup {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("qosgroupproperties")
private HashMap<String, String> qosGroupProperties;
public String getName() {
return name;
}
public String getUuid() {
return uuid;
}
public String getIops() {
return qosGroupProperties.get("iops");
}
public String getThroughput() {
return qosGroupProperties.get("throughput");
}
public String getLatency() {
return qosGroupProperties.get("latency");
}
}
public static final class UpdateVolumeiSCSIServiceCmdResponse {
@SerializedName("updatingvolumeiscsidetails")
private VolumeiSCSIServiceWrapper volumeiSCSIServiceWrapper;
public Volumeiscsioptions getVolumeiscsioptions() {
return volumeiSCSIServiceWrapper.getVolumeiscsioptions();
}
}
public static final class VolumeiSCSIServiceWrapper {
@SerializedName("viscsioptions")
private Volumeiscsioptions viscsioptions;
public Volumeiscsioptions getVolumeiscsioptions() {
return viscsioptions;
}
}
public static final class Volumeiscsioptions {
@SerializedName("id")
private String uuid;
@SerializedName("volume_id")
private String volumeid;
@SerializedName("iqnname")
private String iqnname;
public String getUuid() {
return uuid;
}
public String getVolumeid() {
return volumeid;
}
public String getIqn() {
return iqnname;
}
}
public static final class NfsServiceResponse {
@SerializedName("nfsserviceprotocolresponse")
private NfsServiceWrapper nfsServiceWrapper;
public NfsService getNfsService() {
return nfsServiceWrapper.getNfsservice();
}
}
public static final class NfsServiceWrapper {
@SerializedName("nfs")
private NfsService nfsService;
public NfsService getNfsservice() {
return nfsService;
}
}
public static final class NfsService {
@SerializedName("id")
private String uuid;
@SerializedName("STORAGEID")
private String datasetid;
@SerializedName("controllerid")
private String controllerid;
@SerializedName("authnetwork")
private String authnetwork;
public String getUuid() {
return uuid;
}
public String getDatasetid() {
return datasetid;
}
public String getControllerid() {
return controllerid;
}
public String getAuthnetwork() {
return authnetwork;
}
}
public static final class UpdateControllerResponse {
@SerializedName("updateControllerResponse")
private UpdateControllerWrapper controllerWrapper;
public Controller getController() {
return controllerWrapper.getController();
}
}
public static final class UpdateControllerWrapper {
@SerializedName("controller")
private Controller controller;
public Controller getController() {
return controller;
}
}
public static final class Controller {
@SerializedName("id")
private String uuid;
public String getUuid() {
return uuid;
}
}
public static final class CreateVolumeCmdResponse {
@SerializedName("createvolumeresponse")
private JobId jobId;
public String getJobid() {
return jobId.getJobid();
}
@SerializedName("adddatasetresponse")
private FileSystemWrapper fileSystemWrapper;
public FileSystem getFileSystem() {
return fileSystemWrapper.getFileSystem();
}
}
public static final class FileSystemWrapper {
@SerializedName("filesystem")
private FileSystem fileSystem;
public FileSystem getFileSystem() {
return fileSystem;
}
}
public static final class FileSystem {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("quota")
private String quota;
@SerializedName("accountid")
private String accountid;
@SerializedName("iqnname")
private String iqnname;
@SerializedName("nfsenabled")
private String nfsenabled;
@SerializedName("iscsienabled")
private String iscsienabled;
@SerializedName("path")
private String path;
@SerializedName("groupid")
private String groupid;
@SerializedName("compression")
private String compression;
@SerializedName("sync")
private String sync;
@SerializedName("deduplication")
private String deduplication;
@SerializedName("graceallowed")
private String graceallowed;
public String getCompression() {
return compression;
}
public String getSync() {
return sync;
}
public String getDeduplication() {
return deduplication;
}
public String getGraceallowed() {
return graceallowed;
}
public String getUuid() {
return uuid;
}
public String getQosGroupid() {
return groupid;
}
public String getName() {
return name;
}
public String getNfsenabled() {
return nfsenabled;
}
public String getIscsienabled() {
return iscsienabled;
}
public String getPath() {
return path;
}
public String getIqn() {
return iqnname;
}
public String getQuota() {
return quota;
}
public String getAccountid() {
return accountid;
}
}
public static final class DeleteTsmResponse {
@SerializedName("deleteTsmResponse")
private JobId jobId;
public String getJobStatus() {
return jobId.getJobStatus();
}
}
public static final class JobId {
@SerializedName("jobid")
private String jobid;
@SerializedName("success")
private String jobStatus;
@SerializedName("jobresult")
private JobResult jobresult;
@SerializedName("tsm")
private Tsm tsm;
@SerializedName("storage")
private FileSystem volume;
public Tsm getTsm() {
return tsm;
}
public FileSystem getVolume() {
return volume;
}
public JobResult getJobResult() {
return jobresult;
}
public String getJobid() {
return jobid;
}
public String getJobStatus() {
return jobStatus;
}
}
public static final class JobResult {
@SerializedName("tsm")
private Tsm tsm;
@SerializedName("storage")
private FileSystem volume;
public Tsm getTsm() {
return tsm;
}
public FileSystem getVolume() {
return volume;
}
}
public static final class DeleteVolumeResponse {
@SerializedName("deleteFileSystemResponse")
private JobId jobId;
public String getJobId() {
return jobId.getJobid();
}
}
public static final class ListCapabilitiesResponse {
@SerializedName("listcapabilitiesresponse")
private Capabilities capabilities;
public Capabilities getCapabilities() {
return capabilities;
}
}
public static final class ListFileSystemResponse {
@SerializedName("listFilesystemResponse")
private Filesystems filesystems;
public int getFilesystemCount() {
return filesystems.getCount();
}
public Filesystems getFilesystems() {
return filesystems;
}
}
public static final class Filesystems {
@SerializedName("count")
private int count;
@SerializedName("filesystem")
private FileSystem[] fileSystems;
public int getCount() {
return count;
}
public FileSystem getFileSystem(int i) {
return fileSystems[i];
}
}
public static final class ListPoolsResponse {
@SerializedName("listHAPoolResponse")
private Pools pools;
public Pools getPools() {
return pools;
}
}
public static final class Pools {
@SerializedName("hapool")
private Pool[] pool;
@SerializedName("count")
private int count;
public Pool getPool(int i) {
return pool[i];
}
public int getCount() {
return count;
}
}
public static final class Pool {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("currentAvailableSpace")
private String currentAvailableSpace;
@SerializedName("availIOPS")
private String availIOPS;
@SerializedName("status")
private String state;
@SerializedName("controllerid")
private String controllerid;
@SerializedName("gateway")
private String gateway;
public String getControllerid() {
return controllerid;
}
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getAvailableSpace() {
return currentAvailableSpace;
}
public String getAvailIOPS() {
return availIOPS;
}
public String getState() {
return state;
}
public String getGateway() {
return gateway;
}
}
public static final class ListInterfacesResponse {
@SerializedName("listSharedNICsResponse")
private Interfaces interfaces;
public Interfaces getInterfaces() {
return interfaces;
}
}
public static final class Interfaces {
@SerializedName("nic")
private Interface[] interfaces;
@SerializedName("count")
private int count;
public Interface getInterface(int i) {
return interfaces[i];
}
public int getCount() {
return count;
}
}
public static final class Interface {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("status")
private String status;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getStatus() {
return status;
}
}
public static final class ListiSCSIInitiatorResponse {
@SerializedName("listInitiatorsResponse")
private Initiators initiators;
public Initiators getIInitiator() {
return initiators;
}
}
public static final class Initiators {
@SerializedName("initiator")
private Initiator[] initiators;
@SerializedName("count")
private int count;
public Initiator getInterface(int i) {
return initiators[i];
}
public int getCount() {
return count;
}
}
public static final class Initiator {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("initiatorgroup")
private String initiatorgroup;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
public String getInitiatorgroup() {
return initiatorgroup;
}
}
public static final class ListAccountResponse {
@SerializedName("listAccountResponse")
private Accounts accounts;
public Accounts getAccounts() {
return accounts;
}
}
public static final class Accounts {
@SerializedName("account")
private Account[] Accounts;
@SerializedName("count")
private int count;
public Account getAccount(int i) {
return Accounts[i];
}
public int getCount() {
return count;
}
}
public static final class CreateAccountResponse {
@SerializedName("createaccountresponse")
private Accounts2 accounts;
public Account getAccount() {
return accounts.getAccount();
}
}
public static final class Accounts2 {
@SerializedName("account2")
private Account Account;
@SerializedName("count")
private int count;
public Account getAccount() {
return Account;
}
public int getCount() {
return count;
}
}
public static final class Account {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
public String getUuid() {
return uuid;
}
public String getName() {
return name;
}
}
public static final class ListTsmsResponse {
@SerializedName("listTsmResponse")
private Tsms tsms;
public int getTsmsCount() {
return tsms.getCount();
}
public Tsms getTsms() {
return tsms;
}
}
public static final class Tsms {
@SerializedName("count")
private int count;
@SerializedName("listTsm")
private Tsm[] tsms;
public int getCount() {
return count;
}
public Tsm getTsm(int i) {
return tsms[i];
}
}
public static final class ListVolumeiSCSIServiceResponse {
@SerializedName("listVolumeiSCSIServiceResponse")
private IscsiVolumeService iscsiVolumes;
public int getVolumeCount() {
return iscsiVolumes.getCount();
}
public IscsiVolumeService getIscsiVolume() {
return iscsiVolumes;
}
}
public static final class IscsiVolumeService {
@SerializedName("count")
private int count;
@SerializedName("iSCSIService")
private IscsiVolume[] iscsiVolumes;
public int getCount() {
return count;
}
public IscsiVolume getIscsiVolume(int i) {
return iscsiVolumes[i];
}
}
public static final class IscsiVolume {
@SerializedName("id")
private String uuid;
@SerializedName("ag_id")
private String agid;
@SerializedName("ig_id")
private String igid;
public String getAg_id() {
return agid;
}
public String getUuid() {
return uuid;
}
public String getIg_id() {
return igid;
}
}
public static final class QueryAsyncJobResultResponse {
@SerializedName("queryasyncjobresultresponse")
private Async async;
public Async getAsync() {
return async;
}
}
public static final class Async {
@SerializedName("jobstatus")
private int jobstatus;
@SerializedName("jobresult")
private JobId jobresult;
@SerializedName("cmd")
private String cmd;
public int getJobStatus() {
return jobstatus;
}
public JobId getJobResult() {
return jobresult;
}
public String getCmd() {
return cmd;
}
}
public static final class Capabilities {
@SerializedName("capability")
private HashMap<String, String> capabilites;
public String getVersion() {
return capabilites.get("cloudByteVersion");
}
}
/*
*
* change Volume IOPS
*/
public static Answer updateElastistorVolumeQosGroup(String volumeId, Long newIOPS, String graceAllowed) throws Throwable {
FileSystem fileSystem = listVolume(volumeId);
String qosid = fileSystem.getQosGroupid();
if (qosid != null) {
UpdateQosGroupCmdResponse qosGroupCmdResponse = updateQosGroupVolume(newIOPS.toString(), qosid, graceAllowed);
if (qosGroupCmdResponse.getQoSGroup(0).uuid != null) {
return new Answer(null, true, null);
} else {
return new Answer(null, false, "Update Qos Group Failed");
}
} else {
return new Answer(null, false, "Qos Group id is NULL");
}
}
private static UpdateQosGroupCmdResponse updateQosGroupVolume(String iops, String qosgroupid, String graceAllowed) throws Throwable {
UpdateQosGroupCmd updateQosGroupCmd = new UpdateQosGroupCmd();
updateQosGroupCmd.putCommandParameter("id", qosgroupid);
updateQosGroupCmd.putCommandParameter("iops", iops);
updateQosGroupCmd.putCommandParameter("graceallowed", graceAllowed);
UpdateQosGroupCmdResponse updateQosGroupCmdResponse = (UpdateQosGroupCmdResponse) getElastistorRestClient().executeCommand(updateQosGroupCmd);
return updateQosGroupCmdResponse;
}
private static final class UpdateQosGroupCmd extends BaseCommand {
public UpdateQosGroupCmd() {
super("updateQosGroup", new UpdateQosGroupCmdResponse());
}
}
public static final class UpdateQosGroupCmdResponse {
@SerializedName("updateqosresponse")
private QoSGroupWrapperChangeVolumeIops qosGroupWrapper;
public QoSGroup getQoSGroup(int i) {
return qosGroupWrapper.getQosGroup(i);
}
}
public static final class QoSGroupWrapperChangeVolumeIops {
@SerializedName("qosgroup")
private QoSGroup qoSGroup[];
public QoSGroup getQosGroup(int i) {
return qoSGroup[i];
}
}
/*
* resize volume
*/
public static Boolean updateElastistorVolumeSize(String volumeId, Long newSize) throws Throwable {
Boolean status = false;
String quotasize = (String.valueOf(newSize / (1024 * 1024 * 1024)) + "G");
UpdateFileSystemCmdResponse fileSystemCmdResponse = updateFileSystem(quotasize, volumeId, null, null, null);
if (fileSystemCmdResponse.getFileSystem(0).uuid != null) {
status = true;
return status;
}
return status;
}
public static UpdateFileSystemCmdResponse updateFileSystem(String quotasize, String esvolumeid, String dedeplication, String compression, String sync) throws Throwable {
UpdateFileSystemCmd fileSystemCmd = new UpdateFileSystemCmd();
fileSystemCmd.putCommandParameter("id", esvolumeid);
if (null != quotasize)
fileSystemCmd.putCommandParameter("quotasize", quotasize);
if (null != dedeplication)
fileSystemCmd.putCommandParameter("deduplication", dedeplication);
if (null != compression)
fileSystemCmd.putCommandParameter("compression", compression);
if (null != sync)
fileSystemCmd.putCommandParameter("sync", sync);
UpdateFileSystemCmdResponse fileSystemCmdResponse = (UpdateFileSystemCmdResponse) getElastistorRestClient().executeCommand(fileSystemCmd);
return fileSystemCmdResponse;
}
private static final class UpdateFileSystemCmd extends BaseCommand {
public UpdateFileSystemCmd() {
super("updateFileSystem", new UpdateFileSystemCmdResponse());
}
}
private static final class UpdateFileSystemCmdResponse {
@SerializedName("updatefilesystemresponse")
private UpdateFileSystemWrapper fileSystemWrapper;
public FileSystem getFileSystem(int i) {
return fileSystemWrapper.getFileSystem(i);
}
}
public class UpdateFileSystemWrapper {
@SerializedName("filesystem")
private FileSystem fileSystem[];
public FileSystem getFileSystem(int i) {
return fileSystem[i];
}
}
/*
* create snapshot
*/
public static Answer createElastistorVolumeSnapshot(String volumeId, String snapshotName) throws Throwable{
CreateStorageSnapshotCmd snapshotCmd = new CreateStorageSnapshotCmd();
snapshotCmd.putCommandParameter("id", volumeId);
snapshotCmd.putCommandParameter("name", snapshotName);
CreateStorageSnapshotCmdResponse snapshotCmdResponse = (CreateStorageSnapshotCmdResponse) getElastistorRestClient().executeCommand(snapshotCmd);
if(snapshotCmdResponse.getStorageSnapshot().getId() != null){
return new Answer(null, true, snapshotCmdResponse.getStorageSnapshot().getId());
}else{
return new Answer(null, false, "snapshot failed");
}
}
private static final class CreateStorageSnapshotCmd extends BaseCommand {
public CreateStorageSnapshotCmd() {
super("createStorageSnapshot", new CreateStorageSnapshotCmdResponse() );
}
}
private static final class CreateStorageSnapshotCmdResponse {
@SerializedName("createStorageSnapshotResponse")
private StorageSnapshotWrapper StorageSnapshot;
public StorageSnapshot getStorageSnapshot() {
return StorageSnapshot.getStorageSnapshot();
}
}
public static final class StorageSnapshotWrapper {
@SerializedName("StorageSnapshot")
private StorageSnapshot snapshot;
public StorageSnapshot getStorageSnapshot() {
return snapshot;
}
}
public static final class StorageSnapshot {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
public String getId(){
return uuid;
}
public String getName(){
return name;
}
}
// update the TSM storage
public static UpdateTsmStorageCmdResponse updateElastistorTsmStorage(String capacityBytes,String uuid) throws Throwable{
Long size = (Long.valueOf(capacityBytes)/(1024 * 1024 * 1024));
String quotasize = null;
if(size > 1024){
quotasize = (String.valueOf(Long.valueOf(capacityBytes)/(1024)) + "T");
}else{
quotasize = String.valueOf(quotasize) + "G";
}
s_logger.info("elastistor tsm storage is updating to " + quotasize);
UpdateTsmStorageCmd updateTsmStorageCmd = new UpdateTsmStorageCmd();
updateTsmStorageCmd.putCommandParameter("id", uuid);
updateTsmStorageCmd.putCommandParameter("quotasize", quotasize);
UpdateTsmStorageCmdResponse updateTsmStorageCmdResponse = (UpdateTsmStorageCmdResponse) getElastistorRestClient().executeCommand(updateTsmStorageCmd);
return updateTsmStorageCmdResponse;
}
private static final class UpdateTsmStorageCmd extends BaseCommand {
public UpdateTsmStorageCmd() {
super("updateStorage", new UpdateTsmStorageCmdResponse());
}
}
public static final class UpdateTsmStorageCmdResponse {
@SerializedName("updatedatasetresponse")
private StorageWrapper storageWrapper;
public Storage getStorage() {
return storageWrapper.getStorage();
}
}
public static final class StorageWrapper {
@SerializedName("storage")
private Storage storage;
public Storage getStorage() {
return storage;
}
}
public static final class Storage {
@SerializedName("id")
private String uuid;
@SerializedName("name")
private String name;
@SerializedName("quota")
private String quota;
public String getId(){
return uuid;
}
public String getName(){
return name;
}
public String getsize(){
return quota;
}
}
// update the TSM IOPS
public static UpdateTsmCmdResponse updateElastistorTsmIOPS(String capacityIOPs,String uuid) throws Throwable{
s_logger.info("elastistor tsm IOPS is updating to " + capacityIOPs);
UpdateTsmCmd updateTsmCmd = new UpdateTsmCmd();
String throughput = String.valueOf(Long.valueOf(capacityIOPs)*4);
updateTsmCmd.putCommandParameter("id", uuid);
updateTsmCmd.putCommandParameter("iops", capacityIOPs);
updateTsmCmd.putCommandParameter("throughput", throughput);
UpdateTsmCmdResponse updateTsmStorageCmdResponse = (UpdateTsmCmdResponse) getElastistorRestClient().executeCommand(updateTsmCmd);
return updateTsmStorageCmdResponse;
}
private static final class UpdateTsmCmd extends BaseCommand {
public UpdateTsmCmd() {
super("updateTsm", new UpdateTsmCmdResponse());
}
}
public static final class UpdateTsmCmdResponse {
@SerializedName("updateTsmResponse")
private UpdateTsmWrapper tsmWrapper;
public Tsm getTsm(int i) {
return tsmWrapper.getTsm(i);
}
}
public static final class UpdateTsmWrapper {
@SerializedName("count")
private int count;
@SerializedName("tsm")
private Tsm[] tsms;
public int getCount() {
return count;
}
public Tsm getTsm(int i) {
return tsms[i];
}
}
}
|
Fix 2 findbugs warnings in ElastistorUtil.java Unnecessary boxing/unboxing of long value
Signed-off-by: Rohit Yadav <[email protected]>
This closes #429
|
plugins/storage/volume/cloudbyte/src/org/apache/cloudstack/storage/datastore/util/ElastistorUtil.java
|
Fix 2 findbugs warnings in ElastistorUtil.java Unnecessary boxing/unboxing of long value
|
<ide><path>lugins/storage/volume/cloudbyte/src/org/apache/cloudstack/storage/datastore/util/ElastistorUtil.java
<ide> // update the TSM storage
<ide> public static UpdateTsmStorageCmdResponse updateElastistorTsmStorage(String capacityBytes,String uuid) throws Throwable{
<ide>
<del> Long size = (Long.valueOf(capacityBytes)/(1024 * 1024 * 1024));
<add> Long size = (Long.parseLong(capacityBytes)/(1024 * 1024 * 1024));
<ide>
<ide> String quotasize = null;
<ide>
<ide>
<ide> s_logger.info("elastistor tsm IOPS is updating to " + capacityIOPs);
<ide> UpdateTsmCmd updateTsmCmd = new UpdateTsmCmd();
<del> String throughput = String.valueOf(Long.valueOf(capacityIOPs)*4);
<add> String throughput = String.valueOf(Long.parseLong(capacityIOPs)*4);
<ide>
<ide> updateTsmCmd.putCommandParameter("id", uuid);
<ide> updateTsmCmd.putCommandParameter("iops", capacityIOPs);
|
|
Java
|
apache-2.0
|
eb4b5337751f72e1f32cc3c4851c98b4b66ece53
| 0 |
apache/giraph,apache/giraph,apache/giraph,apache/giraph,apache/giraph,apache/giraph
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.utils;
import static org.apache.hadoop.util.ReflectionUtils.newInstance;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.giraph.conf.ImmutableClassesGiraphConfiguration;
import org.apache.giraph.edge.Edge;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.factories.ValueFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.zk.ZooKeeperExt;
import org.apache.giraph.zk.ZooKeeperExt.PathStat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
/**
* Helper static methods for working with Writable objects.
*/
public class WritableUtils {
/**
* Don't construct.
*/
private WritableUtils() { }
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(Class<W> klass) {
return createWritable(klass, null);
}
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param configuration Configuration
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(
Class<W> klass,
ImmutableClassesGiraphConfiguration configuration) {
W result;
if (NullWritable.class.equals(klass)) {
result = (W) NullWritable.get();
} else {
result = ReflectionUtils.newInstance(klass);
}
ConfigurationUtils.configureIfPossible(result, configuration);
return result;
}
/**
* Read fields from byteArray to a Writeable object.
*
* @param byteArray Byte array to find the fields in.
* @param writableObjects Objects to fill in the fields.
*/
public static void readFieldsFromByteArray(
byte[] byteArray, Writable... writableObjects) {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
try {
for (Writable writableObject : writableObjects) {
writableObject.readFields(inputStream);
}
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArray: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableObjects Objects to read into.
*/
public static void readFieldsFromZnode(ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Writable... writableObjects) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
readFieldsFromByteArray(zkData, writableObjects);
} catch (KeeperException e) {
throw new IllegalStateException(
"readFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readFieldsFromZnode: InterrruptedStateException on " + zkPath, e);
}
}
/**
* Write object to a byte array.
*
* @param writableObjects Objects to write from.
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArray(Writable... writableObjects) {
ByteArrayOutputStream outputStream =
new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
for (Writable writableObject : writableObjects) {
writableObject.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeToByteArray: IOStateException", e);
}
return outputStream.toByteArray();
}
/**
* Read fields from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable
*
* @param byteArray Byte array to find the fields in.
* @param writableObject Object to fill in the fields.
* @param unsafe Use unsafe deserialization
*/
public static void readFieldsFromByteArrayWithSize(
byte[] byteArray, Writable writableObject, boolean unsafe) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
writableObject.readFields(extendedDataInput);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
boolean unsafe) {
return writeToByteArrayWithSize(writableObject, null, unsafe);
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
byte[] buffer,
boolean unsafe) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writableObject.write(extendedDataOutput);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeToByteArrayWithSize: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write object to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableObjects Objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeToZnode(ZooKeeperExt zkExt,
String zkPath,
int version,
Writable... writableObjects) {
try {
byte[] byteArray = writeToByteArray(writableObjects);
return zkExt.createOrSetExt(zkPath,
byteArray,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Write list of object to a byte array.
*
* @param writableList List of object to write from.
* @return Byte array with serialized objects.
*/
public static byte[] writeListToByteArray(
List<? extends Writable> writableList) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
output.writeInt(writableList.size());
for (Writable writable : writableList) {
writable.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeListToByteArray: IOException", e);
}
return outputStream.toByteArray();
}
/**
* Write list of objects to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableList List of objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeListToZnode(
ZooKeeperExt zkExt,
String zkPath,
int version,
List<? extends Writable> writableList) {
try {
return zkExt.createOrSetExt(
zkPath,
writeListToByteArray(writableList),
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeListToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeListToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Read fields from byteArray to a list of objects.
*
* @param byteArray Byte array to find the fields in.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromByteArray(
byte[] byteArray,
Class<? extends T> writableClass,
Configuration conf) {
try {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
int size = inputStream.readInt();
List<T> writableList = new ArrayList<T>(size);
for (int i = 0; i < size; ++i) {
T writable = newInstance(writableClass, conf);
writable.readFields(inputStream);
writableList.add(writable);
}
return writableList;
} catch (IOException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode into a list of objects.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromZnode(
ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Class<? extends T> writableClass,
Configuration conf) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
return WritableUtils.<T>readListFieldsFromByteArray(zkData,
writableClass, conf);
} catch (KeeperException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: InterruptedException on " + zkPath,
e);
}
}
/**
* Write ExtendedDataOutput to DataOutput
*
* @param extendedDataOutput ExtendedDataOutput to write
* @param out DataOutput to write to
*/
public static void writeExtendedDataOutput(
ExtendedDataOutput extendedDataOutput, DataOutput out)
throws IOException {
out.writeInt(extendedDataOutput.getPos());
out.write(
extendedDataOutput.getByteArray(), 0, extendedDataOutput.getPos());
}
/**
* Read ExtendedDataOutput from DataInput
*
* @param in DataInput to read from
* @param conf Configuration
* @return ExtendedDataOutput read
*/
public static ExtendedDataOutput readExtendedDataOutput(DataInput in,
ImmutableClassesGiraphConfiguration conf) throws IOException {
int size = in.readInt();
byte[] buf = new byte[size];
in.readFully(buf);
return conf.createExtendedDataOutput(buf, size);
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
byte[] buffer,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writeVertexToDataOutput(extendedDataOutput, vertex, conf);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeVertexToByteArray: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
return writeVertexToByteArray(vertex, null, unsafe, conf);
}
/**
* Read vertex data from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable. Assumes the vertex has already been
* initialized and contains values for Id, value, and edges.
*
* @param byteArray Byte array to find the fields in.
* @param vertex Vertex to fill in the fields.
* @param unsafe Use unsafe deserialization
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @param conf Configuration
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromByteArray(
byte[] byteArray,
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
reinitializeVertexFromDataInput(extendedDataInput, vertex, conf);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write an edge to an output stream.
*
* @param out Data output
* @param edge Edge to write
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void writeEdge(DataOutput out, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().write(out);
edge.getValue().write(out);
}
/**
* Read an edge from an input stream.
*
* @param in Data input
* @param edge Edge to fill in-place
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void readEdge(DataInput in, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().readFields(in);
edge.getValue().readFields(in);
}
/**
* Reads data from input stream to initialize Vertex. Assumes the vertex has
* already been initialized and contains values for Id, value, and edges.
*
* @param input The input stream
* @param vertex The vertex to initialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromDataInput(
DataInput input,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().readFields(input);
vertex.getValue().readFields(input);
((OutEdges<I, E>) vertex.getEdges()).readFields(input);
if (input.readBoolean()) {
vertex.voteToHalt();
} else {
vertex.wakeUp();
}
}
/**
* Reads data from input stream to initialize Vertex.
*
* @param input The input stream
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return The vertex
* @throws IOException
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> Vertex<I, V, E>
readVertexFromDataInput(
DataInput input,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
Vertex<I, V, E> vertex = conf.createVertex();
I id = conf.createVertexId();
V value = conf.createVertexValue();
OutEdges<I, E> edges = conf.createOutEdges();
vertex.initialize(id, value, edges);
reinitializeVertexFromDataInput(input, vertex, conf);
return vertex;
}
/**
* Writes Vertex data to output stream.
*
* @param output the output stream
* @param vertex The vertex to serialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void writeVertexToDataOutput(
DataOutput output,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().write(output);
vertex.getValue().write(output);
((OutEdges<I, E>) vertex.getEdges()).write(output);
output.writeBoolean(vertex.isHalted());
}
/**
* Write class to data output. Also handles the case when class is null.
*
* @param clazz Class
* @param output Data output
* @param <T> Class type
*/
public static <T> void writeClass(Class<T> clazz,
DataOutput output) throws IOException {
output.writeBoolean(clazz != null);
if (clazz != null) {
output.writeUTF(clazz.getName());
}
}
/**
* Read class from data input.
* Matches {@link #writeClass(Class, DataOutput)}.
*
* @param input Data input
* @param <T> Class type
* @return Class, or null if null was written
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> readClass(DataInput input) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
return (Class<T>) Class.forName(className);
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readClass: No class found " +
className);
}
} else {
return null;
}
}
/**
* Write object to output stream
* @param object Object
* @param output Output stream
* @throws IOException
*/
public static void writeWritableObject(
Writable object, DataOutput output)
throws IOException {
output.writeBoolean(object != null);
if (object != null) {
output.writeUTF(object.getClass().getName());
object.write(output);
}
}
/**
* Reads object from input stream
* @param input Input stream
* @param conf Configuration
* @param <T> Object type
* @return Object
* @throws IOException
*/
public static <T extends Writable>
T readWritableObject(DataInput input,
ImmutableClassesGiraphConfiguration conf) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
T object =
(T) ReflectionUtils.newInstance(Class.forName(className), conf);
object.readFields(input);
return object;
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readWritableObject: No class found " +
className);
}
} else {
return null;
}
}
/**
* Writes a list of Writable objects into output stream.
* This method is trying to optimize space occupied by class information only
* storing class object if it is different from the previous one
* as in most cases arrays tend to have objects of the same type inside.
* @param list serialized object
* @param output the output stream
* @throws IOException
*/
public static void writeList(List<? extends Writable> list, DataOutput output)
throws IOException {
output.writeBoolean(list != null);
if (list != null) {
output.writeInt(list.size());
Class<? extends Writable> clazz = null;
for (Writable element : list) {
output.writeBoolean(element == null);
if (element != null) {
if (element.getClass() != clazz) {
clazz = element.getClass();
output.writeBoolean(true);
writeClass(clazz, output);
} else {
output.writeBoolean(false);
}
element.write(output);
}
}
}
}
/**
* Reads list of Writable objects from data input stream.
* Input stream should have class information along with object data.
* @param input input stream
* @return deserialized list
* @throws IOException
*/
public static List<? extends Writable> readList(DataInput input)
throws IOException {
try {
List<Writable> res = null;
if (input.readBoolean()) {
int size = input.readInt();
res = new ArrayList<>(size);
Class<? extends Writable> clazz = null;
for (int i = 0; i < size; i++) {
boolean isNull = input.readBoolean();
if (isNull) {
res.add(null);
} else {
boolean hasClassInfo = input.readBoolean();
if (hasClassInfo) {
clazz = readClass(input);
}
Writable element = clazz.newInstance();
element.readFields(input);
res.add(element);
}
}
}
return res;
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalStateException("unable to instantiate object", e);
}
}
/**
* Writes primitive int array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeIntArray(int[] array, DataOutput dataOutput)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (int r : array) {
dataOutput.writeInt(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive int array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static int[] readIntArray(DataInput dataInput)
throws IOException {
int [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new int[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readInt();
}
}
return res;
}
/**
* Writes primitive long array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeLongArray(DataOutput dataOutput, long[] array)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (long r : array) {
dataOutput.writeLong(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive long array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static long[] readLongArray(DataInput dataInput)
throws IOException {
long [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new long[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readLong();
}
}
return res;
}
/**
* Writes enum into a stream, by serializing class name and it's index
* @param enumValue Enum value
* @param output Output stream
* @param <T> Enum type
*/
public static <T extends Enum<T>> void writeEnum(T enumValue,
DataOutput output) throws IOException {
writeClass(
enumValue != null ? enumValue.getDeclaringClass() : null, output);
if (enumValue != null) {
Varint.writeUnsignedVarInt(enumValue.ordinal(), output);
}
}
/**
* Reads enum from the stream, serialized by writeEnum
* @param input Input stream
* @param <T> Enum type
* @return Enum value
*/
public static <T extends Enum<T>> T readEnum(DataInput input) throws
IOException {
Class<T> clazz = readClass(input);
if (clazz != null) {
int ordinal = Varint.readUnsignedVarInt(input);
try {
T[] values = (T[]) clazz.getDeclaredMethod("values").invoke(null);
return values[ordinal];
} catch (IllegalAccessException | IllegalArgumentException |
InvocationTargetException | NoSuchMethodException |
SecurityException e) {
throw new IOException("Cannot read enum", e);
}
} else {
return null;
}
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(T from, T to) {
copyInto(from, to, false);
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param checkOverRead if true, will add one more byte at the end of writing,
* to make sure read is not touching it. Useful for tests
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(
T from, T to, boolean checkOverRead) {
try {
if (from.getClass() != to.getClass()) {
throw new RuntimeException(
"Trying to copy from " + from.getClass() +
" into " + to.getClass());
}
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
from.write(out);
if (checkOverRead) {
out.writeByte(0);
}
UnsafeByteArrayInputStream in =
new UnsafeByteArrayInputStream(out.getByteArray(), 0, out.getPos());
to.readFields(in);
if (in.available() != (checkOverRead ? 1 : 0)) {
throw new RuntimeException(
"Serialization encountered issues with " + from.getClass() + ", " +
(in.available() - (checkOverRead ? 1 : 0)) + " fewer bytes read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param reusableOut Reusable output stream to serialize into
* @param reusableIn Reusable input stream to deserialize out of
* @param original Original value of which to make a copy
* @param conf Configuration
* @param <T> Type of the object
* @return Copy of the original value
*/
public static <T extends Writable> T createCopy(
UnsafeByteArrayOutputStream reusableOut,
UnsafeReusableByteArrayInput reusableIn, T original,
ImmutableClassesGiraphConfiguration conf) {
T copy = (T) createWritable(original.getClass(), conf);
try {
reusableOut.reset();
original.write(reusableOut);
reusableIn.initialize(
reusableOut.getByteArray(), 0, reusableOut.getPos());
copy.readFields(reusableIn);
if (reusableIn.available() != 0) {
throw new RuntimeException("Serialization of " +
original.getClass() + " encountered issues, " +
reusableIn.available() + " bytes left to be read");
}
} catch (IOException e) {
throw new IllegalStateException(
"IOException occurred while trying to create a copy " +
original.getClass(), e);
}
return copy;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable> T createCopy(T original) {
return (T) createCopy(original, original.getClass(), null);
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param outputClass Expected copy class, needs to match original
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, Class<? extends T> outputClass,
ImmutableClassesGiraphConfiguration conf) {
T result = WritableUtils.createWritable(outputClass, conf);
copyInto(original, result);
return result;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param classFactory Factory to create new empty object from
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, ValueFactory<T> classFactory,
ImmutableClassesGiraphConfiguration conf) {
T result = classFactory.newInstance();
copyInto(original, result);
return result;
}
/**
* Serialize given writable object, and return it's size.
*
* @param w Writable object
* @return it's size after serialization
*/
public static int size(Writable w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.getPos();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of ExtendedByteArrayDataOutput.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArray(T w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using new instance of ExtendedByteArrayDataInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArray(byte[] data, T to) {
try {
ExtendedByteArrayDataInput in =
new ExtendedByteArrayDataInput(data, 0, data.length);
to.readFields(in);
if (in.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + in.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of UnsafeByteArrayOutputStream.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArrayUnsafe(T w) {
try {
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using given reusable UnsafeReusableByteArrayInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param reusableInput Reusable input to use
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArrayUnsafe(
byte[] data, T to, UnsafeReusableByteArrayInput reusableInput) {
try {
reusableInput.initialize(data, 0, data.length);
to.readFields(reusableInput);
if (reusableInput.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + reusableInput.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* First write a boolean saying whether an object is not null,
* and if it's not write the object
*
* @param object Object to write
* @param out DataOutput to write to
* @param <T> Object type
*/
public static <T extends Writable> void writeIfNotNullAndObject(T object,
DataOutput out) throws IOException {
out.writeBoolean(object != null);
if (object != null) {
object.write(out);
}
}
/**
* First read a boolean saying whether an object is not null,
* and if it's not read the object
*
* @param reusableObject Reuse this object instance
* @param objectClass Class of the object, to create if reusableObject is null
* @param in DataInput to read from
* @param <T> Object type
* @return Object, or null
*/
public static <T extends Writable> T readIfNotNullAndObject(T reusableObject,
Class<T> objectClass, DataInput in) throws IOException {
if (in.readBoolean()) {
if (reusableObject == null) {
reusableObject = ReflectionUtils.newInstance(objectClass);
}
reusableObject.readFields(in);
return reusableObject;
} else {
return null;
}
}
}
|
giraph-core/src/main/java/org/apache/giraph/utils/WritableUtils.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.utils;
import static org.apache.hadoop.util.ReflectionUtils.newInstance;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.giraph.conf.ImmutableClassesGiraphConfiguration;
import org.apache.giraph.edge.Edge;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.factories.ValueFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.zk.ZooKeeperExt;
import org.apache.giraph.zk.ZooKeeperExt.PathStat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
/**
* Helper static methods for working with Writable objects.
*/
public class WritableUtils {
/**
* Don't construct.
*/
private WritableUtils() { }
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(Class<W> klass) {
return createWritable(klass, null);
}
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param configuration Configuration
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(
Class<W> klass,
ImmutableClassesGiraphConfiguration configuration) {
W result;
if (NullWritable.class.equals(klass)) {
result = (W) NullWritable.get();
} else {
result = ReflectionUtils.newInstance(klass);
}
ConfigurationUtils.configureIfPossible(result, configuration);
return result;
}
/**
* Read fields from byteArray to a Writeable object.
*
* @param byteArray Byte array to find the fields in.
* @param writableObjects Objects to fill in the fields.
*/
public static void readFieldsFromByteArray(
byte[] byteArray, Writable... writableObjects) {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
try {
for (Writable writableObject : writableObjects) {
writableObject.readFields(inputStream);
}
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArray: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableObjects Objects to read into.
*/
public static void readFieldsFromZnode(ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Writable... writableObjects) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
readFieldsFromByteArray(zkData, writableObjects);
} catch (KeeperException e) {
throw new IllegalStateException(
"readFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readFieldsFromZnode: InterrruptedStateException on " + zkPath, e);
}
}
/**
* Write object to a byte array.
*
* @param writableObjects Objects to write from.
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArray(Writable... writableObjects) {
ByteArrayOutputStream outputStream =
new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
for (Writable writableObject : writableObjects) {
writableObject.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeToByteArray: IOStateException", e);
}
return outputStream.toByteArray();
}
/**
* Read fields from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable
*
* @param byteArray Byte array to find the fields in.
* @param writableObject Object to fill in the fields.
* @param unsafe Use unsafe deserialization
*/
public static void readFieldsFromByteArrayWithSize(
byte[] byteArray, Writable writableObject, boolean unsafe) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
writableObject.readFields(extendedDataInput);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
boolean unsafe) {
return writeToByteArrayWithSize(writableObject, null, unsafe);
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
byte[] buffer,
boolean unsafe) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writableObject.write(extendedDataOutput);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeToByteArrayWithSize: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write object to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableObjects Objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeToZnode(ZooKeeperExt zkExt,
String zkPath,
int version,
Writable... writableObjects) {
try {
byte[] byteArray = writeToByteArray(writableObjects);
return zkExt.createOrSetExt(zkPath,
byteArray,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Write list of object to a byte array.
*
* @param writableList List of object to write from.
* @return Byte array with serialized objects.
*/
public static byte[] writeListToByteArray(
List<? extends Writable> writableList) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
output.writeInt(writableList.size());
for (Writable writable : writableList) {
writable.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeListToByteArray: IOException", e);
}
return outputStream.toByteArray();
}
/**
* Write list of objects to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableList List of objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeListToZnode(
ZooKeeperExt zkExt,
String zkPath,
int version,
List<? extends Writable> writableList) {
try {
return zkExt.createOrSetExt(
zkPath,
writeListToByteArray(writableList),
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeListToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeListToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Read fields from byteArray to a list of objects.
*
* @param byteArray Byte array to find the fields in.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromByteArray(
byte[] byteArray,
Class<? extends T> writableClass,
Configuration conf) {
try {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
int size = inputStream.readInt();
List<T> writableList = new ArrayList<T>(size);
for (int i = 0; i < size; ++i) {
T writable = newInstance(writableClass, conf);
writable.readFields(inputStream);
writableList.add(writable);
}
return writableList;
} catch (IOException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode into a list of objects.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromZnode(
ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Class<? extends T> writableClass,
Configuration conf) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
return WritableUtils.<T>readListFieldsFromByteArray(zkData,
writableClass, conf);
} catch (KeeperException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: InterruptedException on " + zkPath,
e);
}
}
/**
* Write ExtendedDataOutput to DataOutput
*
* @param extendedDataOutput ExtendedDataOutput to write
* @param out DataOutput to write to
*/
public static void writeExtendedDataOutput(
ExtendedDataOutput extendedDataOutput, DataOutput out)
throws IOException {
out.writeInt(extendedDataOutput.getPos());
out.write(
extendedDataOutput.getByteArray(), 0, extendedDataOutput.getPos());
}
/**
* Read ExtendedDataOutput from DataInput
*
* @param in DataInput to read from
* @param conf Configuration
* @return ExtendedDataOutput read
*/
public static ExtendedDataOutput readExtendedDataOutput(DataInput in,
ImmutableClassesGiraphConfiguration conf) throws IOException {
int size = in.readInt();
byte[] buf = new byte[size];
in.readFully(buf);
return conf.createExtendedDataOutput(buf, size);
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
byte[] buffer,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writeVertexToDataOutput(extendedDataOutput, vertex, conf);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeVertexToByteArray: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
return writeVertexToByteArray(vertex, null, unsafe, conf);
}
/**
* Read vertex data from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable. Assumes the vertex has already been
* initialized and contains values for Id, value, and edges.
*
* @param byteArray Byte array to find the fields in.
* @param vertex Vertex to fill in the fields.
* @param unsafe Use unsafe deserialization
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @param conf Configuration
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromByteArray(
byte[] byteArray,
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
reinitializeVertexFromDataInput(extendedDataInput, vertex, conf);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write an edge to an output stream.
*
* @param out Data output
* @param edge Edge to write
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void writeEdge(DataOutput out, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().write(out);
edge.getValue().write(out);
}
/**
* Read an edge from an input stream.
*
* @param in Data input
* @param edge Edge to fill in-place
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void readEdge(DataInput in, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().readFields(in);
edge.getValue().readFields(in);
}
/**
* Reads data from input stream to inizialize Vertex. Assumes the vertex has
* already been initialized and contains values for Id, value, and edges.
*
* @param input The input stream
* @param vertex The vertex to initialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromDataInput(
DataInput input,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().readFields(input);
vertex.getValue().readFields(input);
((OutEdges<I, E>) vertex.getEdges()).readFields(input);
if (input.readBoolean()) {
vertex.voteToHalt();
} else {
vertex.wakeUp();
}
}
/**
* Reads data from input stream to initialize Vertex.
*
* @param input The input stream
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return The vertex
* @throws IOException
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> Vertex<I, V, E>
readVertexFromDataInput(
DataInput input,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
Vertex<I, V, E> vertex = conf.createVertex();
I id = conf.createVertexId();
V value = conf.createVertexValue();
OutEdges<I, E> edges = conf.createOutEdges();
vertex.initialize(id, value, edges);
reinitializeVertexFromDataInput(input, vertex, conf);
return vertex;
}
/**
* Writes Vertex data to output stream.
*
* @param output the output stream
* @param vertex The vertex to serialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void writeVertexToDataOutput(
DataOutput output,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().write(output);
vertex.getValue().write(output);
((OutEdges<I, E>) vertex.getEdges()).write(output);
output.writeBoolean(vertex.isHalted());
}
/**
* Write class to data output. Also handles the case when class is null.
*
* @param clazz Class
* @param output Data output
* @param <T> Class type
*/
public static <T> void writeClass(Class<T> clazz,
DataOutput output) throws IOException {
output.writeBoolean(clazz != null);
if (clazz != null) {
output.writeUTF(clazz.getName());
}
}
/**
* Read class from data input.
* Matches {@link #writeClass(Class, DataOutput)}.
*
* @param input Data input
* @param <T> Class type
* @return Class, or null if null was written
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> readClass(DataInput input) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
return (Class<T>) Class.forName(className);
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readClass: No class found " +
className);
}
} else {
return null;
}
}
/**
* Write object to output stream
* @param object Object
* @param output Output stream
* @throws IOException
*/
public static void writeWritableObject(
Writable object, DataOutput output)
throws IOException {
output.writeBoolean(object != null);
if (object != null) {
output.writeUTF(object.getClass().getName());
object.write(output);
}
}
/**
* Reads object from input stream
* @param input Input stream
* @param conf Configuration
* @param <T> Object type
* @return Object
* @throws IOException
*/
public static <T extends Writable>
T readWritableObject(DataInput input,
ImmutableClassesGiraphConfiguration conf) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
T object =
(T) ReflectionUtils.newInstance(Class.forName(className), conf);
object.readFields(input);
return object;
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readWritableObject: No class found " +
className);
}
} else {
return null;
}
}
/**
* Writes a list of Writable objects into output stream.
* This method is trying to optimize space occupied by class information only
* storing class object if it is different from the previous one
* as in most cases arrays tend to have objects of the same type inside.
* @param list serialized object
* @param output the output stream
* @throws IOException
*/
public static void writeList(List<? extends Writable> list, DataOutput output)
throws IOException {
output.writeBoolean(list != null);
if (list != null) {
output.writeInt(list.size());
Class<? extends Writable> clazz = null;
for (Writable element : list) {
output.writeBoolean(element == null);
if (element != null) {
if (element.getClass() != clazz) {
clazz = element.getClass();
output.writeBoolean(true);
writeClass(clazz, output);
} else {
output.writeBoolean(false);
}
element.write(output);
}
}
}
}
/**
* Reads list of Writable objects from data input stream.
* Input stream should have class information along with object data.
* @param input input stream
* @return deserialized list
* @throws IOException
*/
public static List<? extends Writable> readList(DataInput input)
throws IOException {
try {
List<Writable> res = null;
if (input.readBoolean()) {
int size = input.readInt();
res = new ArrayList<>(size);
Class<? extends Writable> clazz = null;
for (int i = 0; i < size; i++) {
boolean isNull = input.readBoolean();
if (isNull) {
res.add(null);
} else {
boolean hasClassInfo = input.readBoolean();
if (hasClassInfo) {
clazz = readClass(input);
}
Writable element = clazz.newInstance();
element.readFields(input);
res.add(element);
}
}
}
return res;
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalStateException("unable to instantiate object", e);
}
}
/**
* Writes primitive int array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeIntArray(int[] array, DataOutput dataOutput)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (int r : array) {
dataOutput.writeInt(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive int array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static int[] readIntArray(DataInput dataInput)
throws IOException {
int [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new int[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readInt();
}
}
return res;
}
/**
* Writes primitive long array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeLongArray(DataOutput dataOutput, long[] array)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (long r : array) {
dataOutput.writeLong(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive long array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static long[] readLongArray(DataInput dataInput)
throws IOException {
long [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new long[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readLong();
}
}
return res;
}
/**
* Writes enum into a stream, by serializing class name and it's index
* @param enumValue Enum value
* @param output Output stream
* @param <T> Enum type
*/
public static <T extends Enum<T>> void writeEnum(T enumValue,
DataOutput output) throws IOException {
writeClass(
enumValue != null ? enumValue.getDeclaringClass() : null, output);
if (enumValue != null) {
Varint.writeUnsignedVarInt(enumValue.ordinal(), output);
}
}
/**
* Reads enum from the stream, serialized by writeEnum
* @param input Input stream
* @param <T> Enum type
* @return Enum value
*/
public static <T extends Enum<T>> T readEnum(DataInput input) throws
IOException {
Class<T> clazz = readClass(input);
if (clazz != null) {
int ordinal = Varint.readUnsignedVarInt(input);
try {
T[] values = (T[]) clazz.getDeclaredMethod("values").invoke(null);
return values[ordinal];
} catch (IllegalAccessException | IllegalArgumentException |
InvocationTargetException | NoSuchMethodException |
SecurityException e) {
throw new IOException("Cannot read enum", e);
}
} else {
return null;
}
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(T from, T to) {
copyInto(from, to, false);
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param checkOverRead if true, will add one more byte at the end of writing,
* to make sure read is not touching it. Useful for tests
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(
T from, T to, boolean checkOverRead) {
try {
if (from.getClass() != to.getClass()) {
throw new RuntimeException(
"Trying to copy from " + from.getClass() +
" into " + to.getClass());
}
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
from.write(out);
if (checkOverRead) {
out.writeByte(0);
}
UnsafeByteArrayInputStream in =
new UnsafeByteArrayInputStream(out.getByteArray(), 0, out.getPos());
to.readFields(in);
if (in.available() != (checkOverRead ? 1 : 0)) {
throw new RuntimeException(
"Serialization encountered issues with " + from.getClass() + ", " +
(in.available() - (checkOverRead ? 1 : 0)) + " fewer bytes read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param reusableOut Reusable output stream to serialize into
* @param reusableIn Reusable input stream to deserialize out of
* @param original Original value of which to make a copy
* @param conf Configuration
* @param <T> Type of the object
* @return Copy of the original value
*/
public static <T extends Writable> T createCopy(
UnsafeByteArrayOutputStream reusableOut,
UnsafeReusableByteArrayInput reusableIn, T original,
ImmutableClassesGiraphConfiguration conf) {
T copy = (T) createWritable(original.getClass(), conf);
try {
reusableOut.reset();
original.write(reusableOut);
reusableIn.initialize(
reusableOut.getByteArray(), 0, reusableOut.getPos());
copy.readFields(reusableIn);
if (reusableIn.available() != 0) {
throw new RuntimeException("Serialization of " +
original.getClass() + " encountered issues, " +
reusableIn.available() + " bytes left to be read");
}
} catch (IOException e) {
throw new IllegalStateException(
"IOException occurred while trying to create a copy " +
original.getClass(), e);
}
return copy;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable> T createCopy(T original) {
return (T) createCopy(original, original.getClass(), null);
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param outputClass Expected copy class, needs to match original
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, Class<? extends T> outputClass,
ImmutableClassesGiraphConfiguration conf) {
T result = WritableUtils.createWritable(outputClass, conf);
copyInto(original, result);
return result;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param classFactory Factory to create new empty object from
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, ValueFactory<T> classFactory,
ImmutableClassesGiraphConfiguration conf) {
T result = classFactory.newInstance();
copyInto(original, result);
return result;
}
/**
* Serialize given writable object, and return it's size.
*
* @param w Writable object
* @return it's size after serialization
*/
public static int size(Writable w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.getPos();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of ExtendedByteArrayDataOutput.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArray(T w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using new instance of ExtendedByteArrayDataInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArray(byte[] data, T to) {
try {
ExtendedByteArrayDataInput in =
new ExtendedByteArrayDataInput(data, 0, data.length);
to.readFields(in);
if (in.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + in.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of UnsafeByteArrayOutputStream.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArrayUnsafe(T w) {
try {
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using given reusable UnsafeReusableByteArrayInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param reusableInput Reusable input to use
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArrayUnsafe(
byte[] data, T to, UnsafeReusableByteArrayInput reusableInput) {
try {
reusableInput.initialize(data, 0, data.length);
to.readFields(reusableInput);
if (reusableInput.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + reusableInput.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* First write a boolean saying whether an object is not null,
* and if it's not write the object
*
* @param object Object to write
* @param out DataOutput to write to
* @param <T> Object type
*/
public static <T extends Writable> void writeIfNotNullAndObject(T object,
DataOutput out) throws IOException {
out.writeBoolean(object != null);
if (object != null) {
object.write(out);
}
}
/**
* First read a boolean saying whether an object is not null,
* and if it's not read the object
*
* @param reusableObject Reuse this object instance
* @param objectClass Class of the object, to create if reusableObject is null
* @param in DataInput to read from
* @param <T> Object type
* @return Object, or null
*/
public static <T extends Writable> T readIfNotNullAndObject(T reusableObject,
Class<T> objectClass, DataInput in) throws IOException {
if (in.readBoolean()) {
if (reusableObject == null) {
reusableObject = ReflectionUtils.newInstance(objectClass);
}
reusableObject.readFields(in);
return reusableObject;
} else {
return null;
}
}
}
|
Correct typo in word "initialize"
Author: KidEinstein
Reviewer: edunov
Closes #10
|
giraph-core/src/main/java/org/apache/giraph/utils/WritableUtils.java
|
Correct typo in word "initialize"
|
<ide><path>iraph-core/src/main/java/org/apache/giraph/utils/WritableUtils.java
<ide> }
<ide>
<ide> /**
<del> * Reads data from input stream to inizialize Vertex. Assumes the vertex has
<add> * Reads data from input stream to initialize Vertex. Assumes the vertex has
<ide> * already been initialized and contains values for Id, value, and edges.
<ide> *
<ide> * @param input The input stream
|
|
JavaScript
|
cc0-1.0
|
43b093ee6ee3857505aae3735035c603de8e90ff
| 0 |
stvnrlly/continua11y,stvnrlly/continua11y,stvnrlly/continua11y
|
var models = require('../models');
exports.get = function (req, res){
models.Repo.findOne({
where: {
repoName: req.params.account+'/'+req.params.repo,
},
order: [['updatedAt', 'DESC']]
}).then(function (repo) {
if (repo) {
models.Commit.findAll({
where: {
repo: repo.repo
}
}).then(function (commits) {
res.send({
status: 'success',
results: commits
});
});
} else {
res.send({
status: 'error',
error: 'no such repo'
});
}
});
};
|
routes/api.js
|
var models = require('../models');
exports.get = function (req, res){
models.Repo.findOne({
where: {
repoName: req.params.account+'/'+req.params.repo,
},
order: [['updatedAt', 'DESC']]
}).then(function (repo) {
models.Commit.findAll({
where: {
repo: repo.repo
}
}).then(function (commits) {
res.send({results: commits});
});
});
};
|
improve api responses
|
routes/api.js
|
improve api responses
|
<ide><path>outes/api.js
<ide> },
<ide> order: [['updatedAt', 'DESC']]
<ide> }).then(function (repo) {
<del> models.Commit.findAll({
<del> where: {
<del> repo: repo.repo
<del> }
<del> }).then(function (commits) {
<del> res.send({results: commits});
<del> });
<add> if (repo) {
<add> models.Commit.findAll({
<add> where: {
<add> repo: repo.repo
<add> }
<add> }).then(function (commits) {
<add> res.send({
<add> status: 'success',
<add> results: commits
<add> });
<add> });
<add> } else {
<add> res.send({
<add> status: 'error',
<add> error: 'no such repo'
<add> });
<add> }
<ide> });
<ide> };
|
|
Java
|
bsd-2-clause
|
fc9a6169330edeeef3f163134a7332740d972bd9
| 0 |
TehSAUCE/imagej,TehSAUCE/imagej,biovoxxel/imagej,TehSAUCE/imagej,biovoxxel/imagej,biovoxxel/imagej
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2012 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.updater.core;
import imagej.log.LogService;
import imagej.updater.core.FileObject.Action;
import imagej.updater.core.FileObject.Status;
import imagej.updater.util.Canceled;
import imagej.updater.util.DependencyAnalyzer;
import imagej.updater.util.Progress;
import imagej.updater.util.StderrLogService;
import imagej.updater.util.Util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.zip.GZIPOutputStream;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerConfigurationException;
import org.xml.sax.SAXException;
/**
* TODO
*
* @author Johannes Schindelin
*/
@SuppressWarnings("serial")
public class FilesCollection extends LinkedHashMap<String, FileObject>
implements Iterable<FileObject>
{
public final static String DEFAULT_UPDATE_SITE = "ImageJ";
protected File imagejRoot;
public LogService log;
protected Set<FileObject> ignoredConflicts = new HashSet<FileObject>();
public static class UpdateSite implements Cloneable, Comparable<UpdateSite> {
public String url, sshHost, uploadDirectory;
public long timestamp;
public int rank;
public UpdateSite(String url, final String sshHost, String uploadDirectory,
final long timestamp)
{
if (!url.endsWith("/")) url += "/";
if (uploadDirectory != null && !uploadDirectory.equals("") &&
!uploadDirectory.endsWith("/")) uploadDirectory += "/";
this.url = url;
this.sshHost = sshHost;
this.uploadDirectory = uploadDirectory;
this.timestamp = timestamp;
}
@Override
public Object clone() {
return new UpdateSite(url, sshHost, uploadDirectory, timestamp);
}
public boolean isLastModified(final long lastModified) {
return timestamp == Long.parseLong(Util.timestamp(lastModified));
}
public void setLastModified(final long lastModified) {
timestamp = Long.parseLong(Util.timestamp(lastModified));
}
public boolean isUploadable() {
return uploadDirectory != null && !uploadDirectory.equals("");
}
@Override
public String toString() {
return url + (sshHost != null ? ", " + sshHost : "") +
(uploadDirectory != null ? ", " + uploadDirectory : "");
}
@Override
public int compareTo(UpdateSite other) {
return rank - other.rank;
}
@Override
public boolean equals(Object other) {
if (other instanceof UpdateSite)
return rank == ((UpdateSite)other).rank;
return false;
}
@Override
public int hashCode() {
return rank;
}
}
private Map<String, UpdateSite> updateSites;
/**
* This constructor takes the imagejRoot primarily for testing purposes.
*
* @param imagejRoot the ImageJ directory
*/
public FilesCollection(final File imagejRoot) {
this(new StderrLogService(), imagejRoot);
}
/**
* This constructor takes the imagejRoot primarily for testing purposes.
*
* @param imagejRoot the ImageJ directory
*/
public FilesCollection(final LogService log, final File imagejRoot) {
this.log = log;
this.imagejRoot = imagejRoot;
updateSites = new LinkedHashMap<String, UpdateSite>();
addUpdateSite(DEFAULT_UPDATE_SITE, Util.MAIN_URL, null, null,
imagejRoot == null ? 0 : Util.getTimestamp(prefix(Util.XML_COMPRESSED)));
}
public void addUpdateSite(final String name, final String url,
final String sshHost, final String uploadDirectory, final long timestamp)
{
addUpdateSite(name, new UpdateSite(url, sshHost, uploadDirectory,
timestamp));
}
protected void addUpdateSite(final String name, final UpdateSite updateSite) {
UpdateSite already = updateSites.get(name);
updateSite.rank = already != null ? already.rank : updateSites.size();
updateSites.put(name, updateSite);
}
public void renameUpdateSite(final String oldName, final String newName) {
if (getUpdateSite(newName) != null) throw new RuntimeException(
"Update site " + newName + " exists already!");
if (getUpdateSite(oldName) == null) throw new RuntimeException(
"Update site " + oldName + " does not exist!");
// handle all files
for (final FileObject file : this)
if (file.updateSite.equals(oldName)) file.updateSite = newName;
// preserve order
final Map<String, UpdateSite> oldMap = updateSites;
updateSites = new LinkedHashMap<String, UpdateSite>();
for (final String name : oldMap.keySet()) {
addUpdateSite(name.equals(oldName) ? newName : name, oldMap.get(name));
}
}
public void removeUpdateSite(final String name) throws IOException {
// TODO: remove NOT_INSTALLED files, mark others LOCAL_ONLY
Set<String> toReRead = new HashSet<String>();
for (final FileObject file : forUpdateSite(name)) {
toReRead.addAll(file.overriddenUpdateSites);
if (file.getStatus() == Status.NOT_INSTALLED) {
remove(file);
}
else {
file.setStatus(Status.LOCAL_ONLY);
file.updateSite = null;
}
}
updateSites.remove(name);
// re-read the overridden sites
// no need to sort, the XMLFileReader will only override data from higher-ranked sites
new XMLFileDownloader(this, toReRead).start();
// update rank
int counter = 1;
for (final Map.Entry<String, UpdateSite> entry : updateSites.entrySet()) {
entry.getValue().rank = counter++;
}
}
public UpdateSite getUpdateSite(final String name) {
if (name == null) return null;
return updateSites.get(name);
}
public Collection<String> getUpdateSiteNames() {
return updateSites.keySet();
}
public Collection<String> getSiteNamesToUpload() {
final Collection<String> set = new HashSet<String>();
for (final FileObject file : toUpload(true))
set.add(file.updateSite);
for (final FileObject file : toRemove())
set.add(file.updateSite);
// keep the update sites' order
final List<String> result = new ArrayList<String>();
for (final String name : getUpdateSiteNames())
if (set.contains(name)) result.add(name);
if (result.size() != set.size()) throw new RuntimeException(
"Unknown update site in " + set.toString() + " (known: " +
result.toString() + ")");
return result;
}
public boolean hasUploadableSites() {
for (final String name : updateSites.keySet())
if (getUpdateSite(name).isUploadable()) return true;
return false;
}
public void reReadUpdateSite(final String name, final Progress progress) throws ParserConfigurationException, IOException, SAXException {
new XMLFileReader(this).read(name);
final List<String> filesFromSite = new ArrayList<String>();
for (final FileObject file : forUpdateSite(name))
filesFromSite.add(file.localFilename != null ? file.localFilename : file.filename);
final Checksummer checksummer =
new Checksummer(this, progress);
checksummer.updateFromLocal(filesFromSite);
}
public Action[] getActions(final FileObject file) {
return file.isUploadable(this) ? file.getStatus().getDeveloperActions()
: file.getStatus().getActions();
}
public Action[] getActions(final Iterable<FileObject> files) {
List<Action> result = null;
for (final FileObject file : files) {
final Action[] actions = getActions(file);
if (result == null) {
result = new ArrayList<Action>();
for (final Action action : actions)
result.add(action);
}
else {
final Set<Action> set = new TreeSet<Action>();
for (final Action action : actions)
set.add(action);
final Iterator<Action> iter = result.iterator();
while (iter.hasNext())
if (!set.contains(iter.next())) iter.remove();
}
}
return result.toArray(new Action[result.size()]);
}
public void read() throws IOException, ParserConfigurationException,
SAXException
{
read(prefix(Util.XML_COMPRESSED));
}
public void read(final File file) throws IOException,
ParserConfigurationException, SAXException
{
read(new FileInputStream(file));
}
public void read(final FileInputStream in) throws IOException,
ParserConfigurationException, SAXException
{
new XMLFileReader(this).read(in);
}
public void write() throws IOException, SAXException,
TransformerConfigurationException, ParserConfigurationException
{
new XMLFileWriter(this).write(new GZIPOutputStream(new FileOutputStream(
prefix(Util.XML_COMPRESSED))), true);
}
protected static DependencyAnalyzer dependencyAnalyzer;
public interface Filter {
boolean matches(FileObject file);
}
public FilesCollection clone(final Iterable<FileObject> iterable) {
final FilesCollection result = new FilesCollection(imagejRoot);
for (final FileObject file : iterable)
result.add(file);
for (final String name : updateSites.keySet())
result.updateSites.put(name, (UpdateSite) updateSites.get(name).clone());
return result;
}
public Iterable<FileObject> toUploadOrRemove() {
return filter(or(is(Action.UPLOAD), is(Action.REMOVE)));
}
public Iterable<FileObject> toUpload() {
return toUpload(false);
}
public Iterable<FileObject> toUpload(final boolean includeMetadataChanges) {
if (!includeMetadataChanges) return filter(is(Action.UPLOAD));
return filter(or(is(Action.UPLOAD), new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.metadataChanged;
}
}));
}
public Iterable<FileObject> toUpload(final String updateSite) {
return filter(and(is(Action.UPLOAD), isUpdateSite(updateSite)));
}
public Iterable<FileObject> toUninstall() {
return filter(is(Action.UNINSTALL));
}
public Iterable<FileObject> toRemove() {
return filter(is(Action.REMOVE));
}
public Iterable<FileObject> toUpdate() {
return filter(is(Action.UPDATE));
}
public Iterable<FileObject> upToDate() {
return filter(is(Action.INSTALLED));
}
public Iterable<FileObject> toInstall() {
return filter(is(Action.INSTALL));
}
public Iterable<FileObject> toInstallOrUpdate() {
return filter(oneOf(Action.INSTALL, Action.UPDATE));
}
public Iterable<FileObject> notHidden() {
return filter(and(not(is(Status.OBSOLETE_UNINSTALLED)), doesPlatformMatch()));
}
public Iterable<FileObject> uninstalled() {
return filter(is(Status.NOT_INSTALLED));
}
public Iterable<FileObject> installed() {
return filter(not(oneOf(Status.LOCAL_ONLY,
Status.NOT_INSTALLED)));
}
public Iterable<FileObject> locallyModified() {
return filter(oneOf(Status.MODIFIED,
Status.OBSOLETE_MODIFIED));
}
public Iterable<FileObject> forUpdateSite(final String name) {
return filter(and(not(is(Status.OBSOLETE_UNINSTALLED)), and(doesPlatformMatch(), isUpdateSite(name))));
}
public Iterable<FileObject> managedFiles() {
return filter(not(is(Status.LOCAL_ONLY)));
}
public Iterable<FileObject> localOnly() {
return filter(is(Status.LOCAL_ONLY));
}
public Iterable<FileObject> shownByDefault() {
/*
* Let's not show the NOT_INSTALLED ones, as the user chose not
* to have them.
*/
final Status[] oneOf =
{ Status.UPDATEABLE, Status.NEW, Status.OBSOLETE,
Status.OBSOLETE_MODIFIED };
return filter(or(oneOf(oneOf), is(Action.INSTALL)));
}
public Iterable<FileObject> uploadable() {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUploadable(FilesCollection.this);
}
});
}
public Iterable<FileObject> changes() {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() != file.getStatus().getActions()[0];
}
});
}
public static class FilteredIterator implements Iterator<FileObject> {
Filter filter;
boolean opposite;
Iterator<FileObject> iterator;
FileObject next;
FilteredIterator(final Filter filter, final Iterable<FileObject> files) {
this.filter = filter;
iterator = files.iterator();
findNext();
}
@Override
public boolean hasNext() {
return next != null;
}
@Override
public FileObject next() {
final FileObject file = next;
findNext();
return file;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
protected void findNext() {
while (iterator.hasNext()) {
next = iterator.next();
if (filter.matches(next)) return;
}
next = null;
}
}
public static Iterable<FileObject> filter(final Filter filter,
final Iterable<FileObject> files)
{
return new Iterable<FileObject>() {
@Override
public Iterator<FileObject> iterator() {
return new FilteredIterator(filter, files);
}
};
}
public static Iterable<FileObject> filter(final String search,
final Iterable<FileObject> files)
{
final String keyword = search.trim().toLowerCase();
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getFilename().trim().toLowerCase().indexOf(keyword) >= 0;
}
}, files);
}
public Filter yes() {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return true;
}
};
}
public Filter doesPlatformMatch() {
// If we're a developer or no platform was specified, return yes
if (hasUploadableSites()) return yes();
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUpdateablePlatform();
}
};
}
public Filter is(final Action action) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() == action;
}
};
}
public Filter isNoAction() {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() == file.getStatus().getNoAction();
}
};
}
public Filter oneOf(final Action... actions) {
final Set<Action> oneOf = new HashSet<Action>();
for (final Action action : actions)
oneOf.add(action);
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return oneOf.contains(file.getAction());
}
};
}
public Filter is(final Status status) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getStatus() == status;
}
};
}
public Filter isUpdateSite(final String updateSite) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.updateSite != null && // is null for local-only files
file.updateSite.equals(updateSite);
}
};
}
public Filter oneOf(final Status... states) {
final Set<Status> oneOf = new HashSet<Status>();
for (final Status status : states)
oneOf.add(status);
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return oneOf.contains(file.getStatus());
}
};
}
public Filter startsWith(final String prefix) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.filename.startsWith(prefix);
}
};
}
public Filter startsWith(final String... prefixes) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
for (final String prefix : prefixes)
if (file.filename.startsWith(prefix)) return true;
return false;
}
};
}
public Filter endsWith(final String suffix) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.filename.endsWith(suffix);
}
};
}
public Filter not(final Filter filter) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return !filter.matches(file);
}
};
}
public Filter or(final Filter a, final Filter b) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return a.matches(file) || b.matches(file);
}
};
}
public Filter and(final Filter a, final Filter b) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return a.matches(file) && b.matches(file);
}
};
}
public Iterable<FileObject> filter(final Filter filter) {
return filter(filter, this);
}
public FileObject
getFileFromDigest(final String filename, final String digest)
{
for (final FileObject file : this)
if (file.getFilename().equals(filename) &&
file.getChecksum().equals(digest)) return file;
return null;
}
public Iterable<String> analyzeDependencies(final FileObject file) {
try {
if (dependencyAnalyzer == null) dependencyAnalyzer =
new DependencyAnalyzer(imagejRoot);
return dependencyAnalyzer.getDependencies(imagejRoot, file.getFilename());
}
catch (final IOException e) {
log.error(e);
return null;
}
}
public void updateDependencies(final FileObject file) {
final Iterable<String> dependencies = analyzeDependencies(file);
if (dependencies == null) return;
for (final String dependency : dependencies)
file.addDependency(dependency, prefix(dependency));
}
public boolean has(final Filter filter) {
for (final FileObject file : this)
if (filter.matches(file)) return true;
return false;
}
public boolean hasChanges() {
return has(not(isNoAction()));
}
public boolean hasUploadOrRemove() {
return has(oneOf(Action.UPLOAD, Action.REMOVE));
}
public boolean hasForcableUpdates() {
for (final FileObject file : updateable(true))
if (!file.isUpdateable(false)) return true;
return false;
}
public Iterable<FileObject> updateable(final boolean evenForcedOnes) {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUpdateable(evenForcedOnes) && file.isUpdateablePlatform();
}
});
}
public void markForUpdate(final boolean evenForcedUpdates) {
for (final FileObject file : updateable(evenForcedUpdates)) {
file.setFirstValidAction(this, Action.UPDATE,
Action.UNINSTALL, Action.INSTALL);
}
}
public String getURL(final FileObject file) {
final String siteName = file.updateSite;
assert (siteName != null && !siteName.equals(""));
final UpdateSite site = getUpdateSite(siteName);
return site.url + file.filename.replace(" ", "%20") + "-" +
file.getTimestamp();
}
public static class DependencyMap extends
HashMap<FileObject, FilesCollection>
{
// returns true when the map did not have the dependency before
public boolean
add(final FileObject dependency, final FileObject dependencee)
{
if (containsKey(dependency)) {
get(dependency).add(dependencee);
return false;
}
final FilesCollection list = new FilesCollection(null);
list.add(dependencee);
put(dependency, list);
return true;
}
}
// TODO: for developers, there should be a consistency check:
// no dependencies on local-only files, no circular dependencies,
// and no overring circular dependencies.
void addDependencies(final FileObject file, final DependencyMap map,
final boolean overriding)
{
for (final Dependency dependency : file.getDependencies()) {
final FileObject other = get(dependency.filename);
if (other == null || overriding != dependency.overrides ||
!other.isUpdateablePlatform()) continue;
if (dependency.overrides) {
if (other.willNotBeInstalled()) continue;
}
else if (other.willBeUpToDate()) continue;
if (!map.add(other, file)) continue;
// overriding dependencies are not recursive
if (!overriding) addDependencies(other, map, overriding);
}
}
public DependencyMap getDependencies(final boolean overridingOnes) {
final DependencyMap result = new DependencyMap();
for (final FileObject file : toInstallOrUpdate())
addDependencies(file, result, overridingOnes);
return result;
}
public void sort() {
// first letters in this order: 'C', 'I', 'f', 'p', 'j', 's', 'i', 'm', 'l,
// 'r'
final ArrayList<FileObject> files = new ArrayList<FileObject>();
for (final FileObject file : this) {
files.add(file);
}
Collections.sort(files, new Comparator<FileObject>() {
@Override
public int compare(final FileObject a, final FileObject b) {
final int result = firstChar(a) - firstChar(b);
return result != 0 ? result : a.filename.compareTo(b.filename);
}
int firstChar(final FileObject file) {
final char c = file.filename.charAt(0);
final int index = "CIfpjsim".indexOf(c);
return index < 0 ? 0x200 + c : index;
}
});
this.clear();
for (final FileObject file : files) {
super.put(file.filename, file);
}
}
String checkForCircularDependency(final FileObject file,
final Set<FileObject> seen)
{
if (seen.contains(file)) return "";
final String result =
checkForCircularDependency(file, seen, new HashSet<FileObject>());
if (result == null) return "";
// Display only the circular dependency
final int last = result.lastIndexOf(' ');
final int off = result.lastIndexOf(result.substring(last), last - 1);
return "Circular dependency detected: " + result.substring(off + 1) + "\n";
}
String checkForCircularDependency(final FileObject file,
final Set<FileObject> seen, final Set<FileObject> chain)
{
if (seen.contains(file)) return null;
for (final String dependency : file.dependencies.keySet()) {
final FileObject dep = get(dependency);
if (dep == null) continue;
if (chain.contains(dep)) return " " + dependency;
chain.add(dep);
final String result = checkForCircularDependency(dep, seen, chain);
seen.add(dep);
if (result != null) return " " + dependency + " ->" + result;
chain.remove(dep);
}
return null;
}
/* returns null if consistent, error string when not */
public String checkConsistency() {
final StringBuilder result = new StringBuilder();
final Set<FileObject> circularChecked = new HashSet<FileObject>();
for (final FileObject file : this) {
result.append(checkForCircularDependency(file, circularChecked));
// only non-obsolete components can have dependencies
final Set<String> deps = file.dependencies.keySet();
if (deps.size() > 0 && file.isObsolete()) result.append("Obsolete file " +
file + "has dependencies: " + Util.join(", ", deps) + "!\n");
for (final String dependency : deps) {
final FileObject dep = get(dependency);
if (dep == null || dep.current == null) result.append("The file " +
file + " has the obsolete/local-only " + "dependency " + dependency +
"!\n");
}
}
return result.length() > 0 ? result.toString() : null;
}
public File prefix(final FileObject file) {
return prefix(file.getFilename());
}
public File prefix(final String path) {
final File file = new File(path);
if (file.isAbsolute()) return file;
assert (imagejRoot != null);
return new File(imagejRoot, path);
}
public File prefixUpdate(final String path) {
return prefix("update/" + path);
}
public boolean fileExists(final String filename) {
return prefix(filename).exists();
}
@Override
public String toString() {
return Util.join(", ", this);
}
public FileObject get(final int index) {
throw new UnsupportedOperationException();
}
public void add(final FileObject file) {
super.put(file.getFilename(true), file);
}
@Override
public FileObject get(final Object filename) {
return super.get(FileObject.getFilename((String)filename, true));
}
@Override
public FileObject put(final String key, final FileObject file) {
throw new UnsupportedOperationException();
}
@Override
public Iterator<FileObject> iterator() {
final Iterator<Map.Entry<String, FileObject>> iterator = entrySet().iterator();
return new Iterator<FileObject>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public FileObject next() {
return iterator.next().getValue();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public String downloadIndexAndChecksum(final Progress progress) throws IOException, ParserConfigurationException, SAXException {
try {
read();
}
catch (final FileNotFoundException e) { /* ignore */}
final XMLFileDownloader downloader = new XMLFileDownloader(this);
downloader.addProgress(progress);
try {
downloader.start(false);
} catch (final Canceled e) {
downloader.done();
throw e;
}
new Checksummer(this, progress).updateFromLocal();
return downloader.getWarnings();
}
}
|
core/updater/core/src/main/java/imagej/updater/core/FilesCollection.java
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2012 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.updater.core;
import imagej.log.LogService;
import imagej.updater.core.FileObject.Action;
import imagej.updater.core.FileObject.Status;
import imagej.updater.util.Canceled;
import imagej.updater.util.DependencyAnalyzer;
import imagej.updater.util.Progress;
import imagej.updater.util.StderrLogService;
import imagej.updater.util.Util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.zip.GZIPOutputStream;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerConfigurationException;
import org.xml.sax.SAXException;
/**
* TODO
*
* @author Johannes Schindelin
*/
@SuppressWarnings("serial")
public class FilesCollection extends LinkedHashMap<String, FileObject>
implements Iterable<FileObject>
{
public final static String DEFAULT_UPDATE_SITE = "ImageJ";
protected File imagejRoot;
public LogService log;
protected Set<FileObject> ignoredConflicts = new HashSet<FileObject>();
public static class UpdateSite implements Cloneable, Comparable<UpdateSite> {
public String url, sshHost, uploadDirectory;
public long timestamp;
public int rank;
public UpdateSite(String url, final String sshHost, String uploadDirectory,
final long timestamp)
{
if (!url.endsWith("/")) url += "/";
if (uploadDirectory != null && !uploadDirectory.equals("") &&
!uploadDirectory.endsWith("/")) uploadDirectory += "/";
this.url = url;
this.sshHost = sshHost;
this.uploadDirectory = uploadDirectory;
this.timestamp = timestamp;
}
@Override
public Object clone() {
return new UpdateSite(url, sshHost, uploadDirectory, timestamp);
}
public boolean isLastModified(final long lastModified) {
return timestamp == Long.parseLong(Util.timestamp(lastModified));
}
public void setLastModified(final long lastModified) {
timestamp = Long.parseLong(Util.timestamp(lastModified));
}
public boolean isUploadable() {
return uploadDirectory != null && !uploadDirectory.equals("");
}
@Override
public String toString() {
return url + (sshHost != null ? ", " + sshHost : "") +
(uploadDirectory != null ? ", " + uploadDirectory : "");
}
@Override
public int compareTo(UpdateSite other) {
return rank - other.rank;
}
@Override
public boolean equals(Object other) {
if (other instanceof UpdateSite)
return rank == ((UpdateSite)other).rank;
return false;
}
@Override
public int hashCode() {
return rank;
}
}
private Map<String, UpdateSite> updateSites;
/**
* This constructor takes the imagejRoot primarily for testing purposes.
*
* @param imagejRoot the ImageJ directory
*/
public FilesCollection(final File imagejRoot) {
this(new StderrLogService(), imagejRoot);
}
/**
* This constructor takes the imagejRoot primarily for testing purposes.
*
* @param imagejRoot the ImageJ directory
*/
public FilesCollection(final LogService log, final File imagejRoot) {
this.log = log;
this.imagejRoot = imagejRoot;
updateSites = new LinkedHashMap<String, UpdateSite>();
addUpdateSite(DEFAULT_UPDATE_SITE, Util.MAIN_URL, null, null,
imagejRoot == null ? 0 : Util.getTimestamp(prefix(Util.XML_COMPRESSED)));
}
public void addUpdateSite(final String name, final String url,
final String sshHost, final String uploadDirectory, final long timestamp)
{
addUpdateSite(name, new UpdateSite(url, sshHost, uploadDirectory,
timestamp));
}
protected void addUpdateSite(final String name, final UpdateSite updateSite) {
UpdateSite already = updateSites.get(name);
updateSite.rank = already != null ? already.rank : updateSites.size();
updateSites.put(name, updateSite);
}
public void renameUpdateSite(final String oldName, final String newName) {
if (getUpdateSite(newName) != null) throw new RuntimeException(
"Update site " + newName + " exists already!");
if (getUpdateSite(oldName) == null) throw new RuntimeException(
"Update site " + oldName + " does not exist!");
// handle all files
for (final FileObject file : this)
if (file.updateSite.equals(oldName)) file.updateSite = newName;
// preserve order
final Map<String, UpdateSite> oldMap = updateSites;
updateSites = new LinkedHashMap<String, UpdateSite>();
for (final String name : oldMap.keySet()) {
addUpdateSite(name.equals(oldName) ? newName : name, oldMap.get(name));
}
}
public void removeUpdateSite(final String name) throws IOException {
// TODO: remove NOT_INSTALLED files, mark others LOCAL_ONLY
Set<String> toReRead = new HashSet<String>();
for (final FileObject file : forUpdateSite(name)) {
toReRead.addAll(file.overriddenUpdateSites);
if (file.getStatus() == Status.NOT_INSTALLED) {
remove(file);
}
else {
file.setStatus(Status.LOCAL_ONLY);
file.updateSite = null;
}
}
updateSites.remove(name);
// re-read the overridden sites
// no need to sort, the XMLFileReader will only override data from higher-ranked sites
new XMLFileDownloader(this, toReRead).start();
// update rank
int counter = 1;
for (final Map.Entry<String, UpdateSite> entry : updateSites.entrySet()) {
entry.getValue().rank = counter++;
}
}
public UpdateSite getUpdateSite(final String name) {
if (name == null) return null;
return updateSites.get(name);
}
public Collection<String> getUpdateSiteNames() {
return updateSites.keySet();
}
public Collection<String> getSiteNamesToUpload() {
final Collection<String> set = new HashSet<String>();
for (final FileObject file : toUpload(true))
set.add(file.updateSite);
for (final FileObject file : toRemove())
set.add(file.updateSite);
// keep the update sites' order
final List<String> result = new ArrayList<String>();
for (final String name : getUpdateSiteNames())
if (set.contains(name)) result.add(name);
if (result.size() != set.size()) throw new RuntimeException(
"Unknown update site in " + set.toString() + " (known: " +
result.toString() + ")");
return result;
}
public boolean hasUploadableSites() {
for (final String name : updateSites.keySet())
if (getUpdateSite(name).isUploadable()) return true;
return false;
}
public void reReadUpdateSite(final String name, final Progress progress) throws ParserConfigurationException, IOException, SAXException {
new XMLFileReader(this).read(name);
final List<String> filesFromSite = new ArrayList<String>();
for (final FileObject file : forUpdateSite(name))
filesFromSite.add(file.localFilename != null ? file.localFilename : file.filename);
final Checksummer checksummer =
new Checksummer(this, progress);
checksummer.updateFromLocal(filesFromSite);
}
public Action[] getActions(final FileObject file) {
return file.isUploadable(this) ? file.getStatus().getDeveloperActions()
: file.getStatus().getActions();
}
public Action[] getActions(final Iterable<FileObject> files) {
List<Action> result = null;
for (final FileObject file : files) {
final Action[] actions = getActions(file);
if (result == null) {
result = new ArrayList<Action>();
for (final Action action : actions)
result.add(action);
}
else {
final Set<Action> set = new TreeSet<Action>();
for (final Action action : actions)
set.add(action);
final Iterator<Action> iter = result.iterator();
while (iter.hasNext())
if (!set.contains(iter.next())) iter.remove();
}
}
return result.toArray(new Action[result.size()]);
}
public void read() throws IOException, ParserConfigurationException,
SAXException
{
read(prefix(Util.XML_COMPRESSED));
}
public void read(final File file) throws IOException,
ParserConfigurationException, SAXException
{
read(new FileInputStream(file));
}
public void read(final FileInputStream in) throws IOException,
ParserConfigurationException, SAXException
{
new XMLFileReader(this).read(in);
}
public void write() throws IOException, SAXException,
TransformerConfigurationException, ParserConfigurationException
{
new XMLFileWriter(this).write(new GZIPOutputStream(new FileOutputStream(
prefix(Util.XML_COMPRESSED))), true);
}
protected static DependencyAnalyzer dependencyAnalyzer;
public interface Filter {
boolean matches(FileObject file);
}
public FilesCollection clone(final Iterable<FileObject> iterable) {
final FilesCollection result = new FilesCollection(imagejRoot);
for (final FileObject file : iterable)
result.add(file);
for (final String name : updateSites.keySet())
result.updateSites.put(name, (UpdateSite) updateSites.get(name).clone());
return result;
}
public Iterable<FileObject> toUploadOrRemove() {
return filter(or(is(Action.UPLOAD), is(Action.REMOVE)));
}
public Iterable<FileObject> toUpload() {
return toUpload(false);
}
public Iterable<FileObject> toUpload(final boolean includeMetadataChanges) {
if (!includeMetadataChanges) return filter(is(Action.UPLOAD));
return filter(or(is(Action.UPLOAD), new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.metadataChanged;
}
}));
}
public Iterable<FileObject> toUpload(final String updateSite) {
return filter(and(is(Action.UPLOAD), isUpdateSite(updateSite)));
}
public Iterable<FileObject> toUninstall() {
return filter(is(Action.UNINSTALL));
}
public Iterable<FileObject> toRemove() {
return filter(is(Action.REMOVE));
}
public Iterable<FileObject> toUpdate() {
return filter(is(Action.UPDATE));
}
public Iterable<FileObject> upToDate() {
return filter(is(Action.INSTALLED));
}
public Iterable<FileObject> toInstall() {
return filter(is(Action.INSTALL));
}
public Iterable<FileObject> toInstallOrUpdate() {
return filter(oneOf(Action.INSTALL, Action.UPDATE));
}
public Iterable<FileObject> notHidden() {
return filter(and(not(is(Status.OBSOLETE_UNINSTALLED)), doesPlatformMatch()));
}
public Iterable<FileObject> uninstalled() {
return filter(is(Status.NOT_INSTALLED));
}
public Iterable<FileObject> installed() {
return filter(not(oneOf(Status.LOCAL_ONLY,
Status.NOT_INSTALLED)));
}
public Iterable<FileObject> locallyModified() {
return filter(oneOf(Status.MODIFIED,
Status.OBSOLETE_MODIFIED));
}
public Iterable<FileObject> forUpdateSite(final String name) {
return filter(isUpdateSite(name));
}
public Iterable<FileObject> managedFiles() {
return filter(not(is(Status.LOCAL_ONLY)));
}
public Iterable<FileObject> localOnly() {
return filter(is(Status.LOCAL_ONLY));
}
public Iterable<FileObject> shownByDefault() {
/*
* Let's not show the NOT_INSTALLED ones, as the user chose not
* to have them.
*/
final Status[] oneOf =
{ Status.UPDATEABLE, Status.NEW, Status.OBSOLETE,
Status.OBSOLETE_MODIFIED };
return filter(or(oneOf(oneOf), is(Action.INSTALL)));
}
public Iterable<FileObject> uploadable() {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUploadable(FilesCollection.this);
}
});
}
public Iterable<FileObject> changes() {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() != file.getStatus().getActions()[0];
}
});
}
public static class FilteredIterator implements Iterator<FileObject> {
Filter filter;
boolean opposite;
Iterator<FileObject> iterator;
FileObject next;
FilteredIterator(final Filter filter, final Iterable<FileObject> files) {
this.filter = filter;
iterator = files.iterator();
findNext();
}
@Override
public boolean hasNext() {
return next != null;
}
@Override
public FileObject next() {
final FileObject file = next;
findNext();
return file;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
protected void findNext() {
while (iterator.hasNext()) {
next = iterator.next();
if (filter.matches(next)) return;
}
next = null;
}
}
public static Iterable<FileObject> filter(final Filter filter,
final Iterable<FileObject> files)
{
return new Iterable<FileObject>() {
@Override
public Iterator<FileObject> iterator() {
return new FilteredIterator(filter, files);
}
};
}
public static Iterable<FileObject> filter(final String search,
final Iterable<FileObject> files)
{
final String keyword = search.trim().toLowerCase();
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getFilename().trim().toLowerCase().indexOf(keyword) >= 0;
}
}, files);
}
public Filter yes() {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return true;
}
};
}
public Filter doesPlatformMatch() {
// If we're a developer or no platform was specified, return yes
if (hasUploadableSites()) return yes();
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUpdateablePlatform();
}
};
}
public Filter is(final Action action) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() == action;
}
};
}
public Filter isNoAction() {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getAction() == file.getStatus().getNoAction();
}
};
}
public Filter oneOf(final Action... actions) {
final Set<Action> oneOf = new HashSet<Action>();
for (final Action action : actions)
oneOf.add(action);
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return oneOf.contains(file.getAction());
}
};
}
public Filter is(final Status status) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.getStatus() == status;
}
};
}
public Filter isUpdateSite(final String updateSite) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.updateSite != null && // is null for local-only files
file.updateSite.equals(updateSite);
}
};
}
public Filter oneOf(final Status... states) {
final Set<Status> oneOf = new HashSet<Status>();
for (final Status status : states)
oneOf.add(status);
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return oneOf.contains(file.getStatus());
}
};
}
public Filter startsWith(final String prefix) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.filename.startsWith(prefix);
}
};
}
public Filter startsWith(final String... prefixes) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
for (final String prefix : prefixes)
if (file.filename.startsWith(prefix)) return true;
return false;
}
};
}
public Filter endsWith(final String suffix) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.filename.endsWith(suffix);
}
};
}
public Filter not(final Filter filter) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return !filter.matches(file);
}
};
}
public Filter or(final Filter a, final Filter b) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return a.matches(file) || b.matches(file);
}
};
}
public Filter and(final Filter a, final Filter b) {
return new Filter() {
@Override
public boolean matches(final FileObject file) {
return a.matches(file) && b.matches(file);
}
};
}
public Iterable<FileObject> filter(final Filter filter) {
return filter(filter, this);
}
public FileObject
getFileFromDigest(final String filename, final String digest)
{
for (final FileObject file : this)
if (file.getFilename().equals(filename) &&
file.getChecksum().equals(digest)) return file;
return null;
}
public Iterable<String> analyzeDependencies(final FileObject file) {
try {
if (dependencyAnalyzer == null) dependencyAnalyzer =
new DependencyAnalyzer(imagejRoot);
return dependencyAnalyzer.getDependencies(imagejRoot, file.getFilename());
}
catch (final IOException e) {
log.error(e);
return null;
}
}
public void updateDependencies(final FileObject file) {
final Iterable<String> dependencies = analyzeDependencies(file);
if (dependencies == null) return;
for (final String dependency : dependencies)
file.addDependency(dependency, prefix(dependency));
}
public boolean has(final Filter filter) {
for (final FileObject file : this)
if (filter.matches(file)) return true;
return false;
}
public boolean hasChanges() {
return has(not(isNoAction()));
}
public boolean hasUploadOrRemove() {
return has(oneOf(Action.UPLOAD, Action.REMOVE));
}
public boolean hasForcableUpdates() {
for (final FileObject file : updateable(true))
if (!file.isUpdateable(false)) return true;
return false;
}
public Iterable<FileObject> updateable(final boolean evenForcedOnes) {
return filter(new Filter() {
@Override
public boolean matches(final FileObject file) {
return file.isUpdateable(evenForcedOnes) && file.isUpdateablePlatform();
}
});
}
public void markForUpdate(final boolean evenForcedUpdates) {
for (final FileObject file : updateable(evenForcedUpdates)) {
file.setFirstValidAction(this, Action.UPDATE,
Action.UNINSTALL, Action.INSTALL);
}
}
public String getURL(final FileObject file) {
final String siteName = file.updateSite;
assert (siteName != null && !siteName.equals(""));
final UpdateSite site = getUpdateSite(siteName);
return site.url + file.filename.replace(" ", "%20") + "-" +
file.getTimestamp();
}
public static class DependencyMap extends
HashMap<FileObject, FilesCollection>
{
// returns true when the map did not have the dependency before
public boolean
add(final FileObject dependency, final FileObject dependencee)
{
if (containsKey(dependency)) {
get(dependency).add(dependencee);
return false;
}
final FilesCollection list = new FilesCollection(null);
list.add(dependencee);
put(dependency, list);
return true;
}
}
// TODO: for developers, there should be a consistency check:
// no dependencies on local-only files, no circular dependencies,
// and no overring circular dependencies.
void addDependencies(final FileObject file, final DependencyMap map,
final boolean overriding)
{
for (final Dependency dependency : file.getDependencies()) {
final FileObject other = get(dependency.filename);
if (other == null || overriding != dependency.overrides ||
!other.isUpdateablePlatform()) continue;
if (dependency.overrides) {
if (other.willNotBeInstalled()) continue;
}
else if (other.willBeUpToDate()) continue;
if (!map.add(other, file)) continue;
// overriding dependencies are not recursive
if (!overriding) addDependencies(other, map, overriding);
}
}
public DependencyMap getDependencies(final boolean overridingOnes) {
final DependencyMap result = new DependencyMap();
for (final FileObject file : toInstallOrUpdate())
addDependencies(file, result, overridingOnes);
return result;
}
public void sort() {
// first letters in this order: 'C', 'I', 'f', 'p', 'j', 's', 'i', 'm', 'l,
// 'r'
final ArrayList<FileObject> files = new ArrayList<FileObject>();
for (final FileObject file : this) {
files.add(file);
}
Collections.sort(files, new Comparator<FileObject>() {
@Override
public int compare(final FileObject a, final FileObject b) {
final int result = firstChar(a) - firstChar(b);
return result != 0 ? result : a.filename.compareTo(b.filename);
}
int firstChar(final FileObject file) {
final char c = file.filename.charAt(0);
final int index = "CIfpjsim".indexOf(c);
return index < 0 ? 0x200 + c : index;
}
});
this.clear();
for (final FileObject file : files) {
super.put(file.filename, file);
}
}
String checkForCircularDependency(final FileObject file,
final Set<FileObject> seen)
{
if (seen.contains(file)) return "";
final String result =
checkForCircularDependency(file, seen, new HashSet<FileObject>());
if (result == null) return "";
// Display only the circular dependency
final int last = result.lastIndexOf(' ');
final int off = result.lastIndexOf(result.substring(last), last - 1);
return "Circular dependency detected: " + result.substring(off + 1) + "\n";
}
String checkForCircularDependency(final FileObject file,
final Set<FileObject> seen, final Set<FileObject> chain)
{
if (seen.contains(file)) return null;
for (final String dependency : file.dependencies.keySet()) {
final FileObject dep = get(dependency);
if (dep == null) continue;
if (chain.contains(dep)) return " " + dependency;
chain.add(dep);
final String result = checkForCircularDependency(dep, seen, chain);
seen.add(dep);
if (result != null) return " " + dependency + " ->" + result;
chain.remove(dep);
}
return null;
}
/* returns null if consistent, error string when not */
public String checkConsistency() {
final StringBuilder result = new StringBuilder();
final Set<FileObject> circularChecked = new HashSet<FileObject>();
for (final FileObject file : this) {
result.append(checkForCircularDependency(file, circularChecked));
// only non-obsolete components can have dependencies
final Set<String> deps = file.dependencies.keySet();
if (deps.size() > 0 && file.isObsolete()) result.append("Obsolete file " +
file + "has dependencies: " + Util.join(", ", deps) + "!\n");
for (final String dependency : deps) {
final FileObject dep = get(dependency);
if (dep == null || dep.current == null) result.append("The file " +
file + " has the obsolete/local-only " + "dependency " + dependency +
"!\n");
}
}
return result.length() > 0 ? result.toString() : null;
}
public File prefix(final FileObject file) {
return prefix(file.getFilename());
}
public File prefix(final String path) {
final File file = new File(path);
if (file.isAbsolute()) return file;
assert (imagejRoot != null);
return new File(imagejRoot, path);
}
public File prefixUpdate(final String path) {
return prefix("update/" + path);
}
public boolean fileExists(final String filename) {
return prefix(filename).exists();
}
@Override
public String toString() {
return Util.join(", ", this);
}
public FileObject get(final int index) {
throw new UnsupportedOperationException();
}
public void add(final FileObject file) {
super.put(file.getFilename(true), file);
}
@Override
public FileObject get(final Object filename) {
return super.get(FileObject.getFilename((String)filename, true));
}
@Override
public FileObject put(final String key, final FileObject file) {
throw new UnsupportedOperationException();
}
@Override
public Iterator<FileObject> iterator() {
final Iterator<Map.Entry<String, FileObject>> iterator = entrySet().iterator();
return new Iterator<FileObject>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public FileObject next() {
return iterator.next().getValue();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public String downloadIndexAndChecksum(final Progress progress) throws IOException, ParserConfigurationException, SAXException {
try {
read();
}
catch (final FileNotFoundException e) { /* ignore */}
final XMLFileDownloader downloader = new XMLFileDownloader(this);
downloader.addProgress(progress);
try {
downloader.start(false);
} catch (final Canceled e) {
downloader.done();
throw e;
}
new Checksummer(this, progress).updateFromLocal();
return downloader.getWarnings();
}
}
|
Updater GUI: do not show obsolete files
Signed-off-by: Johannes Schindelin <[email protected]>
|
core/updater/core/src/main/java/imagej/updater/core/FilesCollection.java
|
Updater GUI: do not show obsolete files
|
<ide><path>ore/updater/core/src/main/java/imagej/updater/core/FilesCollection.java
<ide> }
<ide>
<ide> public Iterable<FileObject> forUpdateSite(final String name) {
<del> return filter(isUpdateSite(name));
<add> return filter(and(not(is(Status.OBSOLETE_UNINSTALLED)), and(doesPlatformMatch(), isUpdateSite(name))));
<ide> }
<ide>
<ide> public Iterable<FileObject> managedFiles() {
|
|
JavaScript
|
mit
|
31dccb7e783c91f281787ff44978649d9e96da78
| 0 |
Raathigesh/Lazymine,AwadMaharoof/Lazymine,AwadMaharoof/Lazymine,Raathigesh/Lazymine
|
var React = require('react'),
StateMixin = require('../mixins/app-StateMixin');
var WeekTimeCard = React.createClass({
mixins: [StateMixin],
render: function () {
"use strict";
return (
<div className="card card-blue">
<div className="card-main">
<div className="card-header">
<div className="card-inner">
<p className="card-heading">Weekly Summary</p>
<a className="pull-right collapsed week-time-expand" data-toggle="collapse" href="#collapsible-region">
<span className="icon icon-expand-more collapsed-show"></span>
<span className="icon icon-expand-less collapsed-hide"></span>
</a>
</div>
</div>
<div className="card-img">
<span className="card-key-value">38</span> <span className="card-sub-heading">Hours entered for the week August 2 - August 8</span>
</div>
<div className="card-inner collapsible-region collapse" id="collapsible-region" aria-expanded="false" style={{height: 0 + 'px'}}>
<div className="daily-time-total">
<span className="text-left">
Sunday August 2
</span>
<span className="pull-right">
7.5h
</span>
</div>
<div className="tile tile-collapse">
<div className="card-action">
<div className="pull-left tile-side">
<div className="avatar avatar-sm avatar-multi">
<span className="">R</span>
</div>
</div>
<div className="tile-action tile-action-show">
1.5h
</div>
<div class="tile-inner">
<div class="text-overflow">
OAC-254 OAC Tiger Beta Release : Submit and c
</div>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
});
module.exports = WeekTimeCard;
|
src/js/components/app-WeekTimeCard.js
|
var React = require('react'),
StateMixin = require('../mixins/app-StateMixin');
var WeekTimeCard = React.createClass({
mixins: [StateMixin],
render: function () {
"use strict";
return (
<div className="card card-blue">
<div className="card-main">
<div className="card-header">
<div className="card-inner">
<p className="card-heading">Weekly Summary</p>
<a className="pull-right collapsed week-time-expand" data-toggle="collapse" href="#collapsible-region">
<span className="icon icon-expand-more collapsed-hide"></span>
<span className="icon icon-expand-less collapsed-show"></span>
</a>
</div>
</div>
<div className="card-img">
<span className="card-key-value">38</span> <span className="card-sub-heading">Hours entered for the week August 2 - August 8</span>
</div>
<div className="card-inner collapsible-region collapse" id="collapsible-region" aria-expanded="false" style={{height: 0 + 'px'}}>
<div className="daily-time-total">
<span className="text-left">
Sunday August 2
</span>
<span className="pull-right">
7.5h
</span>
</div>
<div className="tile tile-collapse">
<div className="card-action">
<div className="pull-left tile-side">
<div className="avatar avatar-sm avatar-multi">
<span className="">R</span>
</div>
</div>
<div className="tile-action tile-action-show">
1.5h
</div>
<div class="tile-inner">
<div class="text-overflow">
OAC-254 OAC Tiger Beta Release : Submit and c
</div>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
});
module.exports = WeekTimeCard;
|
Fix minimize and expand icon for weekly summary
|
src/js/components/app-WeekTimeCard.js
|
Fix minimize and expand icon for weekly summary
|
<ide><path>rc/js/components/app-WeekTimeCard.js
<ide> <div className="card-inner">
<ide> <p className="card-heading">Weekly Summary</p>
<ide> <a className="pull-right collapsed week-time-expand" data-toggle="collapse" href="#collapsible-region">
<del> <span className="icon icon-expand-more collapsed-hide"></span>
<del> <span className="icon icon-expand-less collapsed-show"></span>
<add> <span className="icon icon-expand-more collapsed-show"></span>
<add> <span className="icon icon-expand-less collapsed-hide"></span>
<ide> </a>
<ide> </div>
<ide> </div>
|
|
Java
|
apache-2.0
|
de3ffa6cf1d919bc693f0c81c07d70b9e47bb5d5
| 0 |
tabish121/proton4j
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.proton4j.engine.impl;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.apache.qpid.proton4j.amqp.driver.ProtonTestPeer;
import org.apache.qpid.proton4j.engine.Connection;
import org.apache.qpid.proton4j.engine.ConnectionState;
import org.apache.qpid.proton4j.engine.Session;
import org.apache.qpid.proton4j.engine.exceptions.EngineStateException;
import org.junit.Test;
/**
* Test for basic functionality of the ProtonEngine implementation.
*/
public class ProtonEngineTest extends ProtonEngineTestSupport {
@Test
public void testEngineStart() {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
// Engine cannot accept input bytes until started.
assertFalse(engine.isWritable());
Connection connection = engine.start();
assertNotNull(connection);
// Default engine should start and return a connection immediately
assertTrue(engine.isWritable());
assertNotNull(connection);
assertNull(failure);
}
@Test
public void testEngineEmitsAMQPHeaderOnConnectionOpen() {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
// Default engine should start and return a connection immediately
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().respond().withContainerId("driver");
connection.setContainerId("test");
connection.open();
peer.waitForScriptToComplete();
assertEquals(ConnectionState.ACTIVE, connection.getState());
assertEquals(ConnectionState.ACTIVE, connection.getRemoteState());
assertNull(failure);
}
@Test
public void testNoArgTickFailsWhenConnectionNotOpenedNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, false, false, false, false);
}
@Test
public void testNoArgTickFailsWhenConnectionNotOpenedLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, false, false, false, false);
}
@Test
public void testTickFailsWhenConnectionNotOpenedNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, true, false, false, false);
}
@Test
public void testTickFailsWhenConnectionNotOpenedLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, true, false, false, false);
}
@Test
public void testNoArgTickFailsWhenEngineShutdownNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, false, true, true, true);
}
@Test
public void testNoArgTickFailsWhenEngineShutdownLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, false, true, true, true);
}
@Test
public void testTickFailsWhenEngineIsShutdownNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, true, true, true, true);
}
@Test
public void testTickFailsWhenEngineIsShutdownLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, true, true, true, true);
}
private void doTestTickFailsBasedOnState(boolean setLocalTimeout, boolean tickWithArgs, boolean open, boolean close, boolean shutdown) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
if (setLocalTimeout) {
connection.setIdleTimeout(1000);
}
if (open) {
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().respond();
connection.open();
}
if (close) {
peer.expectClose().respond();
connection.close();
}
peer.waitForScriptToComplete();
assertNull(failure);
if (shutdown) {
engine.shutdown();
}
try {
if (tickWithArgs) {
engine.tick(5000);
} else {
engine.tick();
}
fail("Should not be able to tick an unopened connection");
} catch (IllegalStateException ise) {
}
}
@Test
public void testTickRemoteTimeout() throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
final int remoteTimeout = 4000;
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(nullValue()).respond().withIdleTimeOut(remoteTimeout);
// Set our local idleTimeout
connection.open();
long deadline = engine.tick(0);
assertEquals("Expected to be returned a deadline of 2000", 2000, deadline); // deadline = 4000 / 2
deadline = engine.tick(1000); // Wait for less than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", 2000, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(remoteTimeout / 2); // Wait for the deadline - next deadline should be (4000/2)*2
assertEquals("When the deadline has been reached expected a new deadline to be returned 4000", 4000, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectBegin();
Session session = connection.session().open();
deadline = engine.tick(3000);
assertEquals("Writing data resets the deadline", 5000, deadline);
assertEquals("When the deadline is reset tick() shouldn't write an empty frame", 1, peer.getEmptyFrameCount());
peer.expectAttach();
session.sender("test").open();
deadline = engine.tick(4000);
assertEquals("Writing data resets the deadline", 6000, deadline);
assertEquals("When the deadline is reset tick() shouldn't write an empty frame", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickLocalTimeout() throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
final int localTimeout = 4000;
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(localTimeout).respond();
// Set our local idleTimeout
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(0);
assertEquals("Expected to be returned a deadline of 4000", 4000, deadline);
deadline = engine.tick(1000); // Wait for less than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", 4000, deadline);
assertEquals("Reading data should never result in a frame being written", 0, peer.getEmptyFrameCount());
// remote sends an empty frame now
peer.remoteEmptyFrame().now();
deadline = engine.tick(2000);
assertEquals("Reading data resets the deadline", 6000, deadline);
assertEquals("Reading data should never result in a frame being written", 0, peer.getEmptyFrameCount());
assertEquals("Reading data before the deadline should keep the connection open", ConnectionState.ACTIVE, connection.getState());
peer.expectClose().respond();
deadline = engine.tick(7000);
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
peer.waitForScriptToComplete();
assertNotNull(failure);
}
@Test
public void testTickWithZeroIdleTimeoutsGivesZeroDeadline() throws EngineStateException {
doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(true);
}
@Test
public void testTickWithNullIdleTimeoutsGivesZeroDeadline() throws EngineStateException {
doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(false);
}
private void doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(boolean useZero) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
if (useZero) {
peer.expectOpen().withIdleTimeOut(nullValue()).respond().withIdleTimeOut(0);
} else {
peer.expectOpen().withIdleTimeOut(nullValue()).respond();
}
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
assertEquals(0, connection.getRemoteIdleTimeout());
long deadline = engine.tick(0);
assertEquals("Unexpected deadline returned", 0, deadline);
deadline = engine.tick(10);
assertEquals("Unexpected deadline returned", 0, deadline);
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickWithLocalTimeout() throws EngineStateException {
// all-positive
doTickWithLocalTimeoutTestImpl(4000, 10000, 14000, 18000, 22000);
// all-negative
doTickWithLocalTimeoutTestImpl(2000, -100000, -98000, -96000, -94000);
// negative to positive missing 0
doTickWithLocalTimeoutTestImpl(500, -950, -450, 50, 550);
// negative to positive striking 0
doTickWithLocalTimeoutTestImpl(3000, -6000, -3000, 1, 3001);
}
private void doTickWithLocalTimeoutTestImpl(int localTimeout, long tick1, long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
this.failure = null;
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(localTimeout).respond();
// Set our local idleTimeout
connection.setIdleTimeout(localTimeout);
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline2, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline3, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.expectClose().withError(notNullValue()).respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(expectedDeadline3); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("tick() should have written data", 2, peer.getPerformativeCount());
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
peer.waitForScriptToComplete();
assertNotNull(failure);
}
@Test
public void testTickWithRemoteTimeout() throws EngineStateException {
// all-positive
doTickWithRemoteTimeoutTestImpl(4000, 10000, 14000, 18000, 22000);
// all-negative
doTickWithRemoteTimeoutTestImpl(2000, -100000, -98000, -96000, -94000);
// negative to positive missing 0
doTickWithRemoteTimeoutTestImpl(500, -950, -450, 50, 550);
// negative to positive striking 0
doTickWithRemoteTimeoutTestImpl(3000, -6000, -3000, 1, 3001);
}
private void doTickWithRemoteTimeoutTestImpl(int remoteTimeoutHalf, long tick1, long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline2, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectBegin();
// Do some actual work, create real traffic, removing the need to send empty frame to satisfy idle-timeout
connection.session().open();
assertEquals("session open should have written data", 2, peer.getPerformativeCount());
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline3, deadline);
assertEquals("tick() should not have written data as there was actual activity", 2, peer.getPerformativeCount());
assertEquals("tick() should not have written data as there was actual activity", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
engine.tick(expectedDeadline3);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickWithBothTimeouts() throws EngineStateException {
// all-positive
doTickWithBothTimeoutsTestImpl(true, 5000, 2000, 10000, 12000, 14000, 15000);
doTickWithBothTimeoutsTestImpl(false, 5000, 2000, 10000, 12000, 14000, 15000);
// all-negative
doTickWithBothTimeoutsTestImpl(true, 10000, 4000, -100000, -96000, -92000, -90000);
doTickWithBothTimeoutsTestImpl(false, 10000, 4000, -100000, -96000, -92000, -90000);
// negative to positive missing 0
doTickWithBothTimeoutsTestImpl(true, 500, 200, -450, -250, -50, 50);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -450, -250, -50, 50);
// negative to positive striking 0 with local deadline
doTickWithBothTimeoutsTestImpl(true, 500, 200, -500, -300, -100, 1);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -500, -300, -100, 1);
// negative to positive striking 0 with remote deadline
doTickWithBothTimeoutsTestImpl(true, 500, 200, -200, 1, 201, 300);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -200, 1, 201, 300);
}
private void doTickWithBothTimeoutsTestImpl(boolean allowLocalTimeout, int localTimeout, int remoteTimeoutHalf, long tick1,
long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
this.failure = null;
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline2, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline3, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(expectedDeadline3); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline3);
assertEquals("Receiving data should have reset the deadline (to the next remote one)", expectedDeadline2 + (remoteTimeoutHalf), deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemote() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 5000;
int remoteTimeoutHalf = 2000;
assertTrue(remoteTimeoutHalf < localTimeout);
long offset = 2500;
assertTrue(offset < localTimeout);
assertTrue(offset > remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MAX_VALUE - offset + remoteTimeoutHalf, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MAX_VALUE -offset + remoteTimeoutHalf, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MAX_VALUE -offset + remoteTimeoutHalf); // Wait for the deadline - next deadline should be previous + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1); // Wait for the deadline - next deadline should be orig + localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + 3*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (3* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocal() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 2000;
int remoteTimeoutHalf = 5000;
assertTrue(localTimeout < remoteTimeoutHalf);
long offset = 2500;
assertTrue(offset > localTimeout);
assertTrue(offset < remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MAX_VALUE - offset + localTimeout, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MAX_VALUE - offset + localTimeout, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MAX_VALUE - offset + localTimeout); // Wait for the deadline - next deadline should be orig + 2* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout, deadline);
assertEquals("tick() should not have written data", 0, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 0, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline - next deadline should be orig + remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + remoteTimeoutHalf - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + remoteTimeoutHalf - offset -1); // Wait for the deadline - next deadline should be orig + 3* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (3* localTimeout) - offset -1, deadline);
assertEquals("tick() should have written an empty frame", 1, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirst() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 2000;
int remoteTimeoutHalf = 2500;
assertTrue(localTimeout < remoteTimeoutHalf);
long offset = 500;
assertTrue(offset < localTimeout);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1); // Wait for the deadline - next deadline should be orig + 2* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline - next deadline should be orig + 2*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 1, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirst() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 5000;
int remoteTimeoutHalf = 2000;
assertTrue(remoteTimeoutHalf < localTimeout);
long offset = 500;
assertTrue(offset < remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1); // Wait for the deadline - next deadline should be previous + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1 + remoteTimeoutHalf, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1 + remoteTimeoutHalf); // Wait for the deadline - next deadline should be orig + localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + 3*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (3* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
}
|
qpid-proton4j-engine/src/test/java/org/apache/qpid/proton4j/engine/impl/ProtonEngineTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.proton4j.engine.impl;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.apache.qpid.proton4j.amqp.driver.ProtonTestPeer;
import org.apache.qpid.proton4j.engine.Connection;
import org.apache.qpid.proton4j.engine.ConnectionState;
import org.apache.qpid.proton4j.engine.Session;
import org.apache.qpid.proton4j.engine.exceptions.EngineStateException;
import org.junit.Test;
/**
* Test for basic functionality of the ProtonEngine implementation.
*/
public class ProtonEngineTest extends ProtonEngineTestSupport {
@Test
public void testEngineStart() {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
// Engine cannot accept input bytes until started.
assertFalse(engine.isWritable());
Connection connection = engine.start();
assertNotNull(connection);
// Default engine should start and return a connection immediately
assertTrue(engine.isWritable());
assertNotNull(connection);
assertNull(failure);
}
@Test
public void testEngineEmitsAMQPHeaderOnConnectionOpen() {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
// Default engine should start and return a connection immediately
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().respond().withContainerId("driver");
connection.setContainerId("test");
connection.open();
peer.waitForScriptToComplete();
assertEquals(ConnectionState.ACTIVE, connection.getState());
assertEquals(ConnectionState.ACTIVE, connection.getRemoteState());
assertNull(failure);
}
private void doTestNoArgTickFailsWhenConnectionNotOpened(boolean setLocalTimeout) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
if (setLocalTimeout) {
connection.setIdleTimeout(1000);
}
try {
engine.tick();
fail("Should not be able to tick an unopened connection");
} catch (IllegalStateException ise) {
}
}
@Test
public void testNoArgTickFailsWhenConnectionNotOpenedNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, false, false, false, false);
}
@Test
public void testNoArgTickFailsWhenConnectionNotOpenedLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, false, false, false, false);
}
@Test
public void testTickFailsWhenConnectionNotOpenedNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, true, false, false, false);
}
@Test
public void testTickFailsWhenConnectionNotOpenedLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, true, false, false, false);
}
@Test
public void testNoArgTickFailsWhenEngineShutdownNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, false, true, true, true);
}
@Test
public void testNoArgTickFailsWhenEngineShutdownLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, false, true, true, true);
}
@Test
public void testTickFailsWhenEngineIsShutdownNoLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(false, true, true, true, true);
}
@Test
public void testTickFailsWhenEngineIsShutdownLocalIdleSet() throws EngineStateException {
doTestTickFailsBasedOnState(true, true, true, true, true);
}
private void doTestTickFailsBasedOnState(boolean setLocalTimeout, boolean tickWithArgs, boolean open, boolean close, boolean shutdown) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
if (setLocalTimeout) {
connection.setIdleTimeout(1000);
}
if (open) {
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().respond();
connection.open();
}
if (close) {
peer.expectClose().respond();
connection.close();
}
peer.waitForScriptToComplete();
assertNull(failure);
if (shutdown) {
engine.shutdown();
}
try {
if (tickWithArgs) {
engine.tick(5000);
} else {
engine.tick();
}
fail("Should not be able to tick an unopened connection");
} catch (IllegalStateException ise) {
}
}
@Test
public void testTickRemoteTimeout() throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
final int remoteTimeout = 4000;
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(nullValue()).respond().withIdleTimeOut(remoteTimeout);
// Set our local idleTimeout
connection.open();
long deadline = engine.tick(0);
assertEquals("Expected to be returned a deadline of 2000", 2000, deadline); // deadline = 4000 / 2
deadline = engine.tick(1000); // Wait for less than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", 2000, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(remoteTimeout / 2); // Wait for the deadline - next deadline should be (4000/2)*2
assertEquals("When the deadline has been reached expected a new deadline to be returned 4000", 4000, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectBegin();
Session session = connection.session().open();
deadline = engine.tick(3000);
assertEquals("Writing data resets the deadline", 5000, deadline);
assertEquals("When the deadline is reset tick() shouldn't write an empty frame", 1, peer.getEmptyFrameCount());
peer.expectAttach();
session.sender("test").open();
deadline = engine.tick(4000);
assertEquals("Writing data resets the deadline", 6000, deadline);
assertEquals("When the deadline is reset tick() shouldn't write an empty frame", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickLocalTimeout() throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
final int localTimeout = 4000;
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(localTimeout).respond();
// Set our local idleTimeout
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(0);
assertEquals("Expected to be returned a deadline of 4000", 4000, deadline);
deadline = engine.tick(1000); // Wait for less than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", 4000, deadline);
assertEquals("Reading data should never result in a frame being written", 0, peer.getEmptyFrameCount());
// remote sends an empty frame now
peer.remoteEmptyFrame().now();
deadline = engine.tick(2000);
assertEquals("Reading data resets the deadline", 6000, deadline);
assertEquals("Reading data should never result in a frame being written", 0, peer.getEmptyFrameCount());
assertEquals("Reading data before the deadline should keep the connection open", ConnectionState.ACTIVE, connection.getState());
peer.expectClose().respond();
deadline = engine.tick(7000);
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
peer.waitForScriptToComplete();
assertNotNull(failure);
}
@Test
public void testTickWithZeroIdleTimeoutsGivesZeroDeadline() throws EngineStateException {
doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(true);
}
@Test
public void testTickWithNullIdleTimeoutsGivesZeroDeadline() throws EngineStateException {
doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(false);
}
private void doTickWithNoIdleTimeoutGivesZeroDeadlineTestImpl(boolean useZero) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
if (useZero) {
peer.expectOpen().withIdleTimeOut(nullValue()).respond().withIdleTimeOut(0);
} else {
peer.expectOpen().withIdleTimeOut(nullValue()).respond();
}
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
assertEquals(0, connection.getRemoteIdleTimeout());
long deadline = engine.tick(0);
assertEquals("Unexpected deadline returned", 0, deadline);
deadline = engine.tick(10);
assertEquals("Unexpected deadline returned", 0, deadline);
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickWithLocalTimeout() throws EngineStateException {
// all-positive
doTickWithLocalTimeoutTestImpl(4000, 10000, 14000, 18000, 22000);
// all-negative
doTickWithLocalTimeoutTestImpl(2000, -100000, -98000, -96000, -94000);
// negative to positive missing 0
doTickWithLocalTimeoutTestImpl(500, -950, -450, 50, 550);
// negative to positive striking 0
doTickWithLocalTimeoutTestImpl(3000, -6000, -3000, 1, 3001);
}
private void doTickWithLocalTimeoutTestImpl(int localTimeout, long tick1, long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
this.failure = null;
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
peer.expectOpen().withIdleTimeOut(localTimeout).respond();
// Set our local idleTimeout
connection.setIdleTimeout(localTimeout);
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline2, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline3, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertNull(failure);
peer.expectClose().withError(notNullValue()).respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(expectedDeadline3); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("tick() should have written data", 2, peer.getPerformativeCount());
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
peer.waitForScriptToComplete();
assertNotNull(failure);
}
@Test
public void testTickWithRemoteTimeout() throws EngineStateException {
// all-positive
doTickWithRemoteTimeoutTestImpl(4000, 10000, 14000, 18000, 22000);
// all-negative
doTickWithRemoteTimeoutTestImpl(2000, -100000, -98000, -96000, -94000);
// negative to positive missing 0
doTickWithRemoteTimeoutTestImpl(500, -950, -450, 50, 550);
// negative to positive striking 0
doTickWithRemoteTimeoutTestImpl(3000, -6000, -3000, 1, 3001);
}
private void doTickWithRemoteTimeoutTestImpl(int remoteTimeoutHalf, long tick1, long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.open();
peer.waitForScriptToComplete();
assertNull(failure);
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 1, peer.getPerformativeCount());
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline2, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectBegin();
// Do some actual work, create real traffic, removing the need to send empty frame to satisfy idle-timeout
connection.session().open();
assertEquals("session open should have written data", 2, peer.getPerformativeCount());
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline3, deadline);
assertEquals("tick() should not have written data as there was actual activity", 2, peer.getPerformativeCount());
assertEquals("tick() should not have written data as there was actual activity", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
engine.tick(expectedDeadline3);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNull(failure);
}
@Test
public void testTickWithBothTimeouts() throws EngineStateException {
// all-positive
doTickWithBothTimeoutsTestImpl(true, 5000, 2000, 10000, 12000, 14000, 15000);
doTickWithBothTimeoutsTestImpl(false, 5000, 2000, 10000, 12000, 14000, 15000);
// all-negative
doTickWithBothTimeoutsTestImpl(true, 10000, 4000, -100000, -96000, -92000, -90000);
doTickWithBothTimeoutsTestImpl(false, 10000, 4000, -100000, -96000, -92000, -90000);
// negative to positive missing 0
doTickWithBothTimeoutsTestImpl(true, 500, 200, -450, -250, -50, 50);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -450, -250, -50, 50);
// negative to positive striking 0 with local deadline
doTickWithBothTimeoutsTestImpl(true, 500, 200, -500, -300, -100, 1);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -500, -300, -100, 1);
// negative to positive striking 0 with remote deadline
doTickWithBothTimeoutsTestImpl(true, 500, 200, -200, 1, 201, 300);
doTickWithBothTimeoutsTestImpl(false, 500, 200, -200, 1, 201, 300);
}
private void doTickWithBothTimeoutsTestImpl(boolean allowLocalTimeout, int localTimeout, int remoteTimeoutHalf, long tick1,
long expectedDeadline1, long expectedDeadline2, long expectedDeadline3) throws EngineStateException {
this.failure = null;
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(tick1);
assertEquals("Unexpected deadline returned", expectedDeadline1, deadline);
// Wait for less time than the deadline with no data - get the same value
long interimTick = tick1 + 10;
assertTrue (interimTick < expectedDeadline1);
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", expectedDeadline1, engine.tick(interimTick));
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline1);
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", expectedDeadline2, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(expectedDeadline2);
assertEquals("When the deadline has been reached expected a new local deadline to be returned", expectedDeadline3, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(expectedDeadline3); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
peer.remoteEmptyFrame().now();
deadline = engine.tick(expectedDeadline3);
assertEquals("Receiving data should have reset the deadline (to the next remote one)", expectedDeadline2 + (remoteTimeoutHalf), deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemote() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsLocalThenRemoteTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 5000;
int remoteTimeoutHalf = 2000;
assertTrue(remoteTimeoutHalf < localTimeout);
long offset = 2500;
assertTrue(offset < localTimeout);
assertTrue(offset > remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MAX_VALUE - offset + remoteTimeoutHalf, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MAX_VALUE -offset + remoteTimeoutHalf, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MAX_VALUE -offset + remoteTimeoutHalf); // Wait for the deadline - next deadline should be previous + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1); // Wait for the deadline - next deadline should be orig + localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + 3*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (3* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocal() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsRemoteThenLocalTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 2000;
int remoteTimeoutHalf = 5000;
assertTrue(localTimeout < remoteTimeoutHalf);
long offset = 2500;
assertTrue(offset > localTimeout);
assertTrue(offset < remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MAX_VALUE - offset + localTimeout, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MAX_VALUE - offset + localTimeout, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MAX_VALUE - offset + localTimeout); // Wait for the deadline - next deadline should be orig + 2* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout, deadline);
assertEquals("tick() should not have written data", 0, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 0, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline - next deadline should be orig + remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + remoteTimeoutHalf - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + remoteTimeoutHalf - offset -1); // Wait for the deadline - next deadline should be orig + 3* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (3* localTimeout) - offset -1, deadline);
assertEquals("tick() should have written an empty frame", 1, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirst() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsBothRemoteFirstTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 2000;
int remoteTimeoutHalf = 2500;
assertTrue(localTimeout < remoteTimeoutHalf);
long offset = 500;
assertTrue(offset < localTimeout);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() shouldn't have written data", 0, peer.getEmptyFrameCount());
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1); // Wait for the deadline - next deadline should be orig + 2* localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 1, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1 + localTimeout); // Wait for the deadline - next deadline should be orig + 2*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (2* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 1, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirst() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(false);
}
@Test
public void testTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstWithLocalTimeout() throws EngineStateException {
doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(true);
}
private void doTickWithNanoTimeDerivedValueWhichWrapsBothLocalFirstTestImpl(boolean allowLocalTimeout) throws EngineStateException {
int localTimeout = 5000;
int remoteTimeoutHalf = 2000;
assertTrue(remoteTimeoutHalf < localTimeout);
long offset = 500;
assertTrue(offset < remoteTimeoutHalf);
ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
engine.errorHandler(result -> failure = result);
ProtonTestPeer peer = new ProtonTestPeer(engine);
engine.outputConsumer(peer);
Connection connection = engine.start();
assertNotNull(connection);
peer.expectAMQPHeader().respondWithAMQPHeader();
// Handle the peer transmitting [half] their timeout. We half it on receipt to avoid spurious timeouts
// if they not have transmitted half their actual timeout, as the AMQP spec only says they SHOULD do that.
peer.expectOpen().respond().withIdleTimeOut(remoteTimeoutHalf * 2);
connection.setIdleTimeout(localTimeout);
connection.open();
long deadline = engine.tick(Long.MAX_VALUE - offset);
assertEquals("Unexpected deadline returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
deadline = engine.tick(Long.MAX_VALUE - (offset - 100)); // Wait for less time than the deadline with no data - get the same value
assertEquals("When the deadline hasn't been reached tick() should return the previous deadline", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1, deadline);
assertEquals("When the deadline hasn't been reached tick() shouldn't write data", 0, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1); // Wait for the deadline - next deadline should be previous + remoteTimeoutHalf;
assertEquals("When the deadline has been reached expected a new remote deadline to be returned", Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1 + remoteTimeoutHalf, deadline);
assertEquals("tick() should have written data", 1, peer.getEmptyFrameCount());
peer.expectEmptyFrame();
deadline = engine.tick(Long.MIN_VALUE + (remoteTimeoutHalf - offset) -1 + remoteTimeoutHalf); // Wait for the deadline - next deadline should be orig + localTimeout;
assertEquals("When the deadline has been reached expected a new local deadline to be returned", Long.MIN_VALUE + (localTimeout - offset) -1, deadline);
assertEquals("tick() should have written data", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
if (allowLocalTimeout) {
peer.expectClose().respond();
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline, but don't receive traffic, allow local timeout to expire
assertEquals("Calling tick() after the deadline should result in the connection being closed", ConnectionState.CLOSED, connection.getState());
assertEquals("tick() should have written data but not an empty frame", 2, peer.getEmptyFrameCount());
peer.waitForScriptToComplete();
assertNotNull(failure);
} else {
// Receive Empty frame to satisfy local deadline
peer.remoteEmptyFrame().now();
deadline = engine.tick(Long.MIN_VALUE + (localTimeout - offset) -1); // Wait for the deadline - next deadline should be orig + 3*remoteTimeoutHalf;
assertEquals("Receiving data should have reset the deadline (to the remote one)", Long.MIN_VALUE + (3* remoteTimeoutHalf) - offset -1, deadline);
assertEquals("tick() shouldn't have written data", 2, peer.getEmptyFrameCount());
assertEquals("Connection should be active", ConnectionState.ACTIVE, connection.getState());
peer.waitForScriptToComplete();
assertNull(failure);
}
}
}
|
Remove unused test method
|
qpid-proton4j-engine/src/test/java/org/apache/qpid/proton4j/engine/impl/ProtonEngineTest.java
|
Remove unused test method
|
<ide><path>pid-proton4j-engine/src/test/java/org/apache/qpid/proton4j/engine/impl/ProtonEngineTest.java
<ide> assertNull(failure);
<ide> }
<ide>
<del>
<del> private void doTestNoArgTickFailsWhenConnectionNotOpened(boolean setLocalTimeout) throws EngineStateException {
<del> ProtonEngine engine = ProtonEngineFactory.createDefaultEngine();
<del> engine.errorHandler(result -> failure = result);
<del> ProtonTestPeer peer = new ProtonTestPeer(engine);
<del> engine.outputConsumer(peer);
<del>
<del> Connection connection = engine.start();
<del> assertNotNull(connection);
<del>
<del> if (setLocalTimeout) {
<del> connection.setIdleTimeout(1000);
<del> }
<del>
<del> try {
<del> engine.tick();
<del> fail("Should not be able to tick an unopened connection");
<del> } catch (IllegalStateException ise) {
<del> }
<del> }
<del>
<ide> @Test
<ide> public void testNoArgTickFailsWhenConnectionNotOpenedNoLocalIdleSet() throws EngineStateException {
<ide> doTestTickFailsBasedOnState(false, false, false, false, false);
|
|
Java
|
apache-2.0
|
44341c863002461d9b38ee22bce2439be5e819d4
| 0 |
balazs-zsoldos/liquibase,dprguard2000/liquibase,cleiter/liquibase,mwaylabs/liquibase,tjardo83/liquibase,mortegac/liquibase,FreshGrade/liquibase,rkrzewski/liquibase,mattbertolini/liquibase,OpenCST/liquibase,iherasymenko/liquibase,gquintana/liquibase,jimmycd/liquibase,ZEPowerGroup/liquibase,cbotiza/liquibase,russ-p/liquibase,AlisonSouza/liquibase,russ-p/liquibase,instantdelay/liquibase,syncron/liquibase,dprguard2000/liquibase,OpenCST/liquibase,lazaronixon/liquibase,NSIT/liquibase,mbreslow/liquibase,tjardo83/liquibase,mattbertolini/liquibase,hbogaards/liquibase,FreshGrade/liquibase,foxel/liquibase,CoderPaulK/liquibase,dbmanul/dbmanul,dprguard2000/liquibase,vfpfafrf/liquibase,ArloL/liquibase,mwaylabs/liquibase,ivaylo5ev/liquibase,pellcorp/liquibase,CoderPaulK/liquibase,klopfdreh/liquibase,liquibase/liquibase,CoderPaulK/liquibase,mortegac/liquibase,dbmanul/dbmanul,OculusVR/shanghai-liquibase,gquintana/liquibase,Datical/liquibase,mortegac/liquibase,Willem1987/liquibase,vast-engineering/liquibase,hbogaards/liquibase,maberle/liquibase,jimmycd/liquibase,cbotiza/liquibase,OpenCST/liquibase,instantdelay/liquibase,vfpfafrf/liquibase,Willem1987/liquibase,mattbertolini/liquibase,evigeant/liquibase,maberle/liquibase,fbiville/liquibase,gquintana/liquibase,cbotiza/liquibase,fossamagna/liquibase,balazs-zsoldos/liquibase,EVODelavega/liquibase,foxel/liquibase,ZEPowerGroup/liquibase,ivaylo5ev/liquibase,Willem1987/liquibase,dyk/liquibase,rkrzewski/liquibase,dbmanul/dbmanul,talklittle/liquibase,pellcorp/liquibase,C0mmi3/liquibase,syncron/liquibase,maberle/liquibase,talklittle/liquibase,hbogaards/liquibase,vbekiaris/liquibase,danielkec/liquibase,EVODelavega/liquibase,dyk/liquibase,foxel/liquibase,dbmanul/dbmanul,balazs-zsoldos/liquibase,cleiter/liquibase,maberle/liquibase,EVODelavega/liquibase,evigeant/liquibase,C0mmi3/liquibase,tjardo83/liquibase,fbiville/liquibase,gquintana/liquibase,syncron/liquibase,vbekiaris/liquibase,cleiter/liquibase,instantdelay/liquibase,danielkec/liquibase,dyk/liquibase,mortegac/liquibase,Datical/liquibase,danielkec/liquibase,Vampire/liquibase,Willem1987/liquibase,vbekiaris/liquibase,foxel/liquibase,pellcorp/liquibase,FreshGrade/liquibase,OculusVR/shanghai-liquibase,vast-engineering/liquibase,klopfdreh/liquibase,NSIT/liquibase,cbotiza/liquibase,adriens/liquibase,hbogaards/liquibase,fbiville/liquibase,C0mmi3/liquibase,NSIT/liquibase,instantdelay/liquibase,russ-p/liquibase,NSIT/liquibase,dyk/liquibase,fbiville/liquibase,AlisonSouza/liquibase,OculusVR/shanghai-liquibase,evigeant/liquibase,OpenCST/liquibase,dprguard2000/liquibase,evigeant/liquibase,mwaylabs/liquibase,ZEPowerGroup/liquibase,mbreslow/liquibase,talklittle/liquibase,jimmycd/liquibase,tjardo83/liquibase,vast-engineering/liquibase,Datical/liquibase,jimmycd/liquibase,mbreslow/liquibase,Vampire/liquibase,liquibase/liquibase,AlisonSouza/liquibase,fossamagna/liquibase,lazaronixon/liquibase,EVODelavega/liquibase,mwaylabs/liquibase,ArloL/liquibase,vfpfafrf/liquibase,vfpfafrf/liquibase,mbreslow/liquibase,Vampire/liquibase,syncron/liquibase,adriens/liquibase,AlisonSouza/liquibase,russ-p/liquibase,pellcorp/liquibase,balazs-zsoldos/liquibase,liquibase/liquibase,talklittle/liquibase,mattbertolini/liquibase,cleiter/liquibase,iherasymenko/liquibase,fossamagna/liquibase,CoderPaulK/liquibase,rkrzewski/liquibase,vbekiaris/liquibase,OculusVR/shanghai-liquibase,iherasymenko/liquibase,lazaronixon/liquibase,C0mmi3/liquibase,klopfdreh/liquibase,Datical/liquibase,iherasymenko/liquibase,ArloL/liquibase,danielkec/liquibase,vast-engineering/liquibase,klopfdreh/liquibase,adriens/liquibase,lazaronixon/liquibase,FreshGrade/liquibase
|
package liquibase.database;
import liquibase.database.sql.RawSqlStatement;
import liquibase.database.sql.SqlStatement;
import liquibase.database.structure.DatabaseSnapshot;
import liquibase.database.structure.PostgresDatabaseSnapshot;
import liquibase.exception.JDBCException;
import liquibase.exception.CustomChangeException;
import liquibase.util.StringUtils;
import liquibase.diff.DiffStatusListener;
import java.sql.*;
import java.text.ParseException;
import java.util.HashSet;
import java.util.Set;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Level;
/**
* Encapsulates PostgreSQL database support.
*/
public class PostgresDatabase extends AbstractDatabase {
public static final String PRODUCT_NAME = "PostgreSQL";
private Set<String> systemTablesAndViews = new HashSet<String>();
private String defaultDatabaseSchemaName;
private String defaultCatalogName;
public PostgresDatabase() {
// systemTablesAndViews.add("pg_logdir_ls");
// systemTablesAndViews.add("administrable_role_authorizations");
// systemTablesAndViews.add("applicable_roles");
// systemTablesAndViews.add("attributes");
// systemTablesAndViews.add("check_constraint_routine_usage");
// systemTablesAndViews.add("check_constraints");
// systemTablesAndViews.add("column_domain_usage");
// systemTablesAndViews.add("column_privileges");
// systemTablesAndViews.add("column_udt_usage");
// systemTablesAndViews.add("columns");
// systemTablesAndViews.add("constraint_column_usage");
// systemTablesAndViews.add("constraint_table_usage");
// systemTablesAndViews.add("data_type_privileges");
// systemTablesAndViews.add("domain_constraints");
// systemTablesAndViews.add("domain_udt_usage");
// systemTablesAndViews.add("domains");
// systemTablesAndViews.add("element_types");
// systemTablesAndViews.add("enabled_roles");
// systemTablesAndViews.add("key_column_usage");
// systemTablesAndViews.add("parameters");
// systemTablesAndViews.add("referential_constraints");
// systemTablesAndViews.add("role_column_grants");
// systemTablesAndViews.add("role_routine_grants");
// systemTablesAndViews.add("role_table_grants");
// systemTablesAndViews.add("role_usage_grants");
// systemTablesAndViews.add("routine_privileges");
// systemTablesAndViews.add("routines");
// systemTablesAndViews.add("schemata");
// systemTablesAndViews.add("sequences");
// systemTablesAndViews.add("sql_features");
// systemTablesAndViews.add("sql_implementation_info");
// systemTablesAndViews.add("sql_languages");
// systemTablesAndViews.add("sql_packages");
// systemTablesAndViews.add("sql_parts");
// systemTablesAndViews.add("sql_sizing");
// systemTablesAndViews.add("sql_sizing_profiles");
// systemTablesAndViews.add("table_constraints");
// systemTablesAndViews.add("table_privileges");
// systemTablesAndViews.add("tables");
// systemTablesAndViews.add("triggers");
// systemTablesAndViews.add("usage_privileges");
// systemTablesAndViews.add("view_column_usage");
// systemTablesAndViews.add("view_routine_usage");
// systemTablesAndViews.add("view_table_usage");
// systemTablesAndViews.add("views");
// systemTablesAndViews.add("information_schema_catalog_name");
// systemTablesAndViews.add("triggered_update_columns");
// systemTablesAndViews.add("book_pkey");
}
public String getProductName() {
return "PostgreSQL";
}
public String getTypeName() {
return "postgresql";
}
public Set<String> getSystemTablesAndViews() {
return systemTablesAndViews;
}
public boolean supportsInitiallyDeferrableColumns() {
return true;
}
public boolean isCorrectDatabaseImplementation(Connection conn) throws JDBCException {
return PRODUCT_NAME.equalsIgnoreCase(getDatabaseProductName(conn));
}
public String getDefaultDriver(String url) {
if (url.startsWith("jdbc:postgresql:")) {
return "org.postgresql.Driver";
}
return null;
}
public String getBooleanType() {
return "BOOLEAN";
}
public String getCurrencyType() {
return "DECIMAL";
}
public String getUUIDType() {
return "CHAR(36)";
}
public String getClobType() {
return "TEXT";
}
public String getBlobType() {
return "BYTEA";
}
public String getDateTimeType() {
return "TIMESTAMP WITH TIME ZONE";
}
public boolean supportsSequences() {
return true;
}
public String getCurrentDateTimeFunction() {
return "NOW()";
}
protected String getDefaultDatabaseSchemaName() throws JDBCException {
if (defaultDatabaseSchemaName == null) {
try {
List<String> searchPaths = getSearchPaths();
if (searchPaths != null && searchPaths.size() > 0) {
for (String searchPath : searchPaths) {
if (searchPath != null && searchPath.length() > 0) {
defaultDatabaseSchemaName = searchPath;
if (defaultDatabaseSchemaName.equals("$user") && getConnectionUsername() != null) {
if (! schemaExists(getConnectionUsername())) {
defaultDatabaseSchemaName = null;
} else {
defaultDatabaseSchemaName = getConnectionUsername();
}
}
if (defaultDatabaseSchemaName != null)
break;
}
}
}
} catch (Exception e) {
// TODO: throw?
e.printStackTrace();
log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
}
}
return defaultDatabaseSchemaName;
}
public String getDefaultCatalogName() throws JDBCException {
return super.getDefaultCatalogName();
}
public String getDatabaseChangeLogTableName() {
return super.getDatabaseChangeLogTableName().toLowerCase();
}
public String getDatabaseChangeLogLockTableName() {
return super.getDatabaseChangeLogLockTableName().toLowerCase();
}
// public void dropDatabaseObjects(String schema) throws JDBCException {
// try {
// if (schema == null) {
// schema = getConnectionUsername();
// }
// new JdbcTemplate(this).execute(new RawSqlStatement("DROP OWNED BY " + schema));
//
// getConnection().commit();
//
// changeLogTableExists = false;
// changeLogLockTableExists = false;
// changeLogCreateAttempted = false;
// changeLogLockCreateAttempted = false;
//
// } catch (SQLException e) {
// throw new JDBCException(e);
// }
// }
public SqlStatement createFindSequencesSQL(String schema) throws JDBCException {
return new RawSqlStatement("SELECT relname AS SEQUENCE_NAME FROM pg_class, pg_namespace WHERE relkind='S' AND pg_class.relnamespace = pg_namespace.oid AND nspname = '" + convertRequestedSchemaToSchema(schema) + "' AND 'nextval(''" + (schema == null ? "" : schema + ".") + "'||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null) AND 'nextval('''||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null)");
}
public boolean isSystemTable(String catalogName, String schemaName, String tableName) {
return super.isSystemTable(catalogName, schemaName, tableName)
|| "pg_catalog".equals(schemaName)
|| "pg_toast".equals(schemaName)
|| tableName.endsWith("_seq")
|| tableName.endsWith("_key")
|| tableName.endsWith("_pkey")
|| tableName.startsWith("idx_")
|| tableName.startsWith("pk_");
}
public boolean supportsTablespaces() {
return true;
}
public SqlStatement getViewDefinitionSql(String schemaName, String name) throws JDBCException {
return new RawSqlStatement("select definition from pg_views where viewname='" + name + "' AND schemaname='" + convertRequestedSchemaToSchema(schemaName) + "'");
}
public String getColumnType(String columnType, Boolean autoIncrement) {
if (columnType.startsWith("java.sql.Types.VARCHAR")) { //returns "name" for type
return columnType.replace("java.sql.Types.", "");
}
String type = super.getColumnType(columnType, autoIncrement);
if (type.startsWith("TEXT(")) {
return getClobType();
} else if (type.toLowerCase().startsWith("float8")) {
return "FLOAT8";
} else if (type.toLowerCase().startsWith("float4")) {
return "FLOAT4";
}
if (autoIncrement != null && autoIncrement) {
if ("integer".equals(type.toLowerCase())) {
return "serial";
} else if ("bigint".equals(type.toLowerCase()) || "bigserial".equals(type.toLowerCase())) {
return "bigserial";
} else {
// Unknown integer type, default to "serial"
return "serial";
}
}
return type;
}
public String getAutoIncrementClause() {
return "";
}
public Object convertDatabaseValueToJavaObject(Object defaultValue, int dataType, int columnSize, int decimalDigits) throws ParseException {
if (defaultValue != null) {
if (defaultValue instanceof String) {
defaultValue = ((String) defaultValue).replaceAll("'::[\\w\\s]+$", "'");
if (dataType == Types.DATE || dataType == Types.TIME || dataType == Types.TIMESTAMP) {
//remove trailing time zone info
defaultValue = ((String) defaultValue).replaceFirst("-\\d+$", "");
}
}
}
return super.convertDatabaseValueToJavaObject(defaultValue, dataType, columnSize, decimalDigits);
}
public String convertRequestedSchemaToSchema(String requestedSchema) throws JDBCException {
if (requestedSchema == null) {
// Return the catalog name instead..
return getDefaultCatalogName();
} else {
return StringUtils.trimToNull(requestedSchema).toLowerCase();
}
}
public String convertRequestedSchemaToCatalog(String requestedSchema) throws JDBCException {
return super.convertRequestedSchemaToCatalog(requestedSchema);
}
/**
* @see liquibase.database.AbstractDatabase#escapeTableName(java.lang.String, java.lang.String)
*/
@Override
public String escapeTableName(String schemaName, String tableName) {
//Check if tableName is in reserved words and has CaseSensitivity problems
if (StringUtils.trimToNull(tableName) != null && (hasCaseProblems(tableName) || isReservedWord(tableName))) {
return super.escapeTableName(schemaName, "\"" + tableName + "\"");
}
return super.escapeTableName(schemaName, tableName);
}
/**
* @see liquibase.database.AbstractDatabase#escapeColumnName(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public String escapeColumnName(String schemaName, String tableName, String columnName) {
if (hasCaseProblems(columnName) || isReservedWord(columnName))
return "\"" + columnName + "\"";
return columnName;
}
/*
* Check if given string has case problems according to postgresql documentation.
* If there are at least one characters with upper case while all other are in lower case (or vice versa) this string should be escaped.
*/
private boolean hasCaseProblems(String tableName) {
if (tableName.matches(".*[A-Z].*") && tableName.matches(".*[a-z].*"))
return true;
return false;
}
/*
* Check if given string is reserved word.
*/
private boolean isReservedWord(String tableName) {
for (int i = 0; i != this.reservedWords.length; i++)
if (this.reservedWords[i].toLowerCase().equalsIgnoreCase(tableName))
return true;
return false;
}
/*
* Reserved words from postgresql documentation
*/
private String[] reservedWords = new String[]{"ALL", "ANALYSE", "ANALYZE", "AND", "ANY", "ARRAY", "AS", "ASC", "ASYMMETRIC", "AUTHORIZATION", "BETWEEN", "BINARY", "BOTH", "CASE", "CAST", "CHECK", "COLLATE", "COLUMN", "CONSTRAINT", "CORRESPONDING", "CREATE", "CROSS", "CURRENT_DATE", "CURRENT_ROLE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "DEFAULT", "DEFERRABLE", "DESC", "DISTINCT", "DO", "ELSE", "END", "EXCEPT", "FALSE", "FOR", "FOREIGN", "FREEZE", "FROM", "FULL", "GRANT", "GROUP", "HAVING",
"ILIKE", "IN", "INITIALLY", "INNER", "INTERSECT", "INTO", "IS", "ISNULL", "JOIN", "LEADING", "LEFT", "LIKE", "LIMIT", "LOCALTIME", "LOCALTIMESTAMP", "NATURAL", "NEW", "NOT", "NOTNULL", "NULL", "OFF", "OFFSET", "OLD", "ON", "ONLY", "OPEN", "OR", "ORDER", "OUTER", "OVERLAPS", "PLACING", "PRIMARY", "REFERENCES", "RETURNING", "RIGHT", "SELECT", "SESSION_USER", "SIMILAR", "SOME", "SYMMETRIC", "TABLE", "THEN", "TO", "TRAILING", "TRUE", "UNION", "UNIQUE", "USER", "USING", "VERBOSE", "WHEN", "WHERE"};
/*
* Get the current search paths
*/
private List<String> getSearchPaths() {
List<String> searchPaths = null;
try {
DatabaseConnection con = getConnection();
if (con != null) {
Statement stmt = con.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet searchPathQry = stmt.executeQuery("SHOW search_path");
if (searchPathQry.next()) {
String searchPathResult = searchPathQry.getString(1);
if (searchPathResult != null) {
String dirtySearchPaths[] = searchPathResult.split("\\,");
searchPaths = new ArrayList<String>();
for (String searchPath : dirtySearchPaths) {
searchPath = searchPath.trim();
// Ensure there is consistency ..
if (searchPath.equals("\"$user\"")) {
searchPath = "$user";
}
searchPaths.add(searchPath);
}
}
}
}
} catch (Exception e) {
// TODO: Something?
e.printStackTrace();
log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
}
return searchPaths;
}
private boolean catalogExists(String catalogName) throws SQLException {
if (catalogName != null) {
return runExistsQuery("select count(*) from information_schema.schemata where catalog_name='" + catalogName + "'");
} else {
return false;
}
}
private boolean schemaExists(String schemaName) throws SQLException {
if (schemaName != null) {
return runExistsQuery("select count(*) from information_schema.schemata where schema_name='" + schemaName + "'");
} else {
return false;
}
}
private boolean runExistsQuery(String query) throws SQLException {
DatabaseConnection con = getConnection();
Statement stmt = con.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet existsQry = stmt.executeQuery(query);
if (existsQry.next()) {
Integer count = existsQry.getInt(1);
if (count != null && count > 0) {
return true;
}
}
return false;
}
public DatabaseSnapshot createDatabaseSnapshot(String schema, Set<DiffStatusListener> statusListeners) throws JDBCException {
return new PostgresDatabaseSnapshot(this, statusListeners, schema);
}
}
|
core/src/java/liquibase/database/PostgresDatabase.java
|
package liquibase.database;
import liquibase.database.sql.RawSqlStatement;
import liquibase.database.sql.SqlStatement;
import liquibase.database.structure.DatabaseSnapshot;
import liquibase.database.structure.PostgresDatabaseSnapshot;
import liquibase.exception.JDBCException;
import liquibase.exception.CustomChangeException;
import liquibase.util.StringUtils;
import liquibase.diff.DiffStatusListener;
import java.sql.*;
import java.text.ParseException;
import java.util.HashSet;
import java.util.Set;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Level;
/**
* Encapsulates PostgreSQL database support.
*/
public class PostgresDatabase extends AbstractDatabase {
public static final String PRODUCT_NAME = "PostgreSQL";
private Set<String> systemTablesAndViews = new HashSet<String>();
private String defaultDatabaseSchemaName;
private String defaultCatalogName;
public PostgresDatabase() {
// systemTablesAndViews.add("pg_logdir_ls");
// systemTablesAndViews.add("administrable_role_authorizations");
// systemTablesAndViews.add("applicable_roles");
// systemTablesAndViews.add("attributes");
// systemTablesAndViews.add("check_constraint_routine_usage");
// systemTablesAndViews.add("check_constraints");
// systemTablesAndViews.add("column_domain_usage");
// systemTablesAndViews.add("column_privileges");
// systemTablesAndViews.add("column_udt_usage");
// systemTablesAndViews.add("columns");
// systemTablesAndViews.add("constraint_column_usage");
// systemTablesAndViews.add("constraint_table_usage");
// systemTablesAndViews.add("data_type_privileges");
// systemTablesAndViews.add("domain_constraints");
// systemTablesAndViews.add("domain_udt_usage");
// systemTablesAndViews.add("domains");
// systemTablesAndViews.add("element_types");
// systemTablesAndViews.add("enabled_roles");
// systemTablesAndViews.add("key_column_usage");
// systemTablesAndViews.add("parameters");
// systemTablesAndViews.add("referential_constraints");
// systemTablesAndViews.add("role_column_grants");
// systemTablesAndViews.add("role_routine_grants");
// systemTablesAndViews.add("role_table_grants");
// systemTablesAndViews.add("role_usage_grants");
// systemTablesAndViews.add("routine_privileges");
// systemTablesAndViews.add("routines");
// systemTablesAndViews.add("schemata");
// systemTablesAndViews.add("sequences");
// systemTablesAndViews.add("sql_features");
// systemTablesAndViews.add("sql_implementation_info");
// systemTablesAndViews.add("sql_languages");
// systemTablesAndViews.add("sql_packages");
// systemTablesAndViews.add("sql_parts");
// systemTablesAndViews.add("sql_sizing");
// systemTablesAndViews.add("sql_sizing_profiles");
// systemTablesAndViews.add("table_constraints");
// systemTablesAndViews.add("table_privileges");
// systemTablesAndViews.add("tables");
// systemTablesAndViews.add("triggers");
// systemTablesAndViews.add("usage_privileges");
// systemTablesAndViews.add("view_column_usage");
// systemTablesAndViews.add("view_routine_usage");
// systemTablesAndViews.add("view_table_usage");
// systemTablesAndViews.add("views");
// systemTablesAndViews.add("information_schema_catalog_name");
// systemTablesAndViews.add("triggered_update_columns");
// systemTablesAndViews.add("book_pkey");
}
public String getProductName() {
return "PostgreSQL";
}
public String getTypeName() {
return "postgresql";
}
public Set<String> getSystemTablesAndViews() {
return systemTablesAndViews;
}
public boolean supportsInitiallyDeferrableColumns() {
return true;
}
public boolean isCorrectDatabaseImplementation(Connection conn) throws JDBCException {
return PRODUCT_NAME.equalsIgnoreCase(getDatabaseProductName(conn));
}
public String getDefaultDriver(String url) {
if (url.startsWith("jdbc:postgresql:")) {
return "org.postgresql.Driver";
}
return null;
}
public String getBooleanType() {
return "BOOLEAN";
}
public String getCurrencyType() {
return "DECIMAL";
}
public String getUUIDType() {
return "CHAR(36)";
}
public String getClobType() {
return "TEXT";
}
public String getBlobType() {
return "BYTEA";
}
public String getDateTimeType() {
return "TIMESTAMP WITH TIME ZONE";
}
public boolean supportsSequences() {
return true;
}
public String getCurrentDateTimeFunction() {
return "NOW()";
}
protected String getDefaultDatabaseSchemaName() throws JDBCException {
if (defaultDatabaseSchemaName == null) {
try {
List<String> searchPaths = getSearchPaths();
if (searchPaths != null && searchPaths.size() > 0) {
for (String searchPath : searchPaths) {
if (searchPath != null && searchPath.length() > 0) {
defaultDatabaseSchemaName = searchPath;
if (defaultDatabaseSchemaName.equals("$user") && getConnectionUsername() != null) {
if (! schemaExists(getConnectionUsername())) {
defaultDatabaseSchemaName = null;
}
}
if (defaultDatabaseSchemaName != null)
break;
}
}
}
} catch (Exception e) {
// TODO: throw?
e.printStackTrace();
log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
}
}
return defaultDatabaseSchemaName;
}
public String getDefaultCatalogName() throws JDBCException {
if (defaultCatalogName == null) {
try {
List<String> searchPaths = getSearchPaths();
if (searchPaths != null && searchPaths.size() > 0) {
for (String searchPath : searchPaths) {
if (searchPath != null && searchPath.length() > 0) {
defaultCatalogName = searchPath;
if (defaultCatalogName.equals("$user") && getConnectionUsername() != null) {
if (! catalogExists(getConnectionUsername())) {
defaultCatalogName = null;
} else {
defaultCatalogName = getConnectionUsername();
}
}
if (defaultCatalogName != null)
break;
}
}
}
} catch (Exception e) {
// TODO: throw?
e.printStackTrace();
log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
}
// Default
if (defaultCatalogName == null) {
defaultCatalogName = "PUBLIC";
}
}
return defaultCatalogName;
}
public String getDatabaseChangeLogTableName() {
return super.getDatabaseChangeLogTableName().toLowerCase();
}
public String getDatabaseChangeLogLockTableName() {
return super.getDatabaseChangeLogLockTableName().toLowerCase();
}
// public void dropDatabaseObjects(String schema) throws JDBCException {
// try {
// if (schema == null) {
// schema = getConnectionUsername();
// }
// new JdbcTemplate(this).execute(new RawSqlStatement("DROP OWNED BY " + schema));
//
// getConnection().commit();
//
// changeLogTableExists = false;
// changeLogLockTableExists = false;
// changeLogCreateAttempted = false;
// changeLogLockCreateAttempted = false;
//
// } catch (SQLException e) {
// throw new JDBCException(e);
// }
// }
public SqlStatement createFindSequencesSQL(String schema) throws JDBCException {
return new RawSqlStatement("SELECT relname AS SEQUENCE_NAME FROM pg_class, pg_namespace WHERE relkind='S' AND pg_class.relnamespace = pg_namespace.oid AND nspname = '" + convertRequestedSchemaToSchema(schema) + "' AND 'nextval(''" + (schema == null ? "" : schema + ".") + "'||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null) AND 'nextval('''||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null)");
}
public boolean isSystemTable(String catalogName, String schemaName, String tableName) {
return super.isSystemTable(catalogName, schemaName, tableName)
|| "pg_catalog".equals(schemaName)
|| "pg_toast".equals(schemaName)
|| tableName.endsWith("_seq")
|| tableName.endsWith("_key")
|| tableName.endsWith("_pkey")
|| tableName.startsWith("idx_")
|| tableName.startsWith("pk_");
}
public boolean supportsTablespaces() {
return true;
}
public SqlStatement getViewDefinitionSql(String schemaName, String name) throws JDBCException {
return new RawSqlStatement("select definition from pg_views where viewname='" + name + "' AND schemaname='" + convertRequestedSchemaToSchema(schemaName) + "'");
}
public String getColumnType(String columnType, Boolean autoIncrement) {
if (columnType.startsWith("java.sql.Types.VARCHAR")) { //returns "name" for type
return columnType.replace("java.sql.Types.", "");
}
String type = super.getColumnType(columnType, autoIncrement);
if (type.startsWith("TEXT(")) {
return getClobType();
} else if (type.toLowerCase().startsWith("float8")) {
return "FLOAT8";
} else if (type.toLowerCase().startsWith("float4")) {
return "FLOAT4";
}
if (autoIncrement != null && autoIncrement) {
if ("integer".equals(type.toLowerCase())) {
return "serial";
} else if ("bigint".equals(type.toLowerCase()) || "bigserial".equals(type.toLowerCase())) {
return "bigserial";
} else {
// Unknown integer type, default to "serial"
return "serial";
}
}
return type;
}
public String getAutoIncrementClause() {
return "";
}
public Object convertDatabaseValueToJavaObject(Object defaultValue, int dataType, int columnSize, int decimalDigits) throws ParseException {
if (defaultValue != null) {
if (defaultValue instanceof String) {
defaultValue = ((String) defaultValue).replaceAll("'::[\\w\\s]+$", "'");
if (dataType == Types.DATE || dataType == Types.TIME || dataType == Types.TIMESTAMP) {
//remove trailing time zone info
defaultValue = ((String) defaultValue).replaceFirst("-\\d+$", "");
}
}
}
return super.convertDatabaseValueToJavaObject(defaultValue, dataType, columnSize, decimalDigits);
}
public String convertRequestedSchemaToSchema(String requestedSchema) throws JDBCException {
if (requestedSchema == null) {
// Return the catalog name instead..
return getDefaultCatalogName();
} else {
return StringUtils.trimToNull(requestedSchema).toLowerCase();
}
}
public String convertRequestedSchemaToCatalog(String requestedSchema) throws JDBCException {
return super.convertRequestedSchemaToCatalog(requestedSchema);
}
/**
* @see liquibase.database.AbstractDatabase#escapeTableName(java.lang.String, java.lang.String)
*/
@Override
public String escapeTableName(String schemaName, String tableName) {
//Check if tableName is in reserved words and has CaseSensitivity problems
if (StringUtils.trimToNull(tableName) != null && (hasCaseProblems(tableName) || isReservedWord(tableName))) {
return super.escapeTableName(schemaName, "\"" + tableName + "\"");
}
return super.escapeTableName(schemaName, tableName);
}
/**
* @see liquibase.database.AbstractDatabase#escapeColumnName(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public String escapeColumnName(String schemaName, String tableName, String columnName) {
if (hasCaseProblems(columnName) || isReservedWord(columnName))
return "\"" + columnName + "\"";
return columnName;
}
/*
* Check if given string has case problems according to postgresql documentation.
* If there are at least one characters with upper case while all other are in lower case (or vice versa) this string should be escaped.
*/
private boolean hasCaseProblems(String tableName) {
if (tableName.matches(".*[A-Z].*") && tableName.matches(".*[a-z].*"))
return true;
return false;
}
/*
* Check if given string is reserved word.
*/
private boolean isReservedWord(String tableName) {
for (int i = 0; i != this.reservedWords.length; i++)
if (this.reservedWords[i].toLowerCase().equalsIgnoreCase(tableName))
return true;
return false;
}
/*
* Reserved words from postgresql documentation
*/
private String[] reservedWords = new String[]{"ALL", "ANALYSE", "ANALYZE", "AND", "ANY", "ARRAY", "AS", "ASC", "ASYMMETRIC", "AUTHORIZATION", "BETWEEN", "BINARY", "BOTH", "CASE", "CAST", "CHECK", "COLLATE", "COLUMN", "CONSTRAINT", "CORRESPONDING", "CREATE", "CROSS", "CURRENT_DATE", "CURRENT_ROLE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "DEFAULT", "DEFERRABLE", "DESC", "DISTINCT", "DO", "ELSE", "END", "EXCEPT", "FALSE", "FOR", "FOREIGN", "FREEZE", "FROM", "FULL", "GRANT", "GROUP", "HAVING",
"ILIKE", "IN", "INITIALLY", "INNER", "INTERSECT", "INTO", "IS", "ISNULL", "JOIN", "LEADING", "LEFT", "LIKE", "LIMIT", "LOCALTIME", "LOCALTIMESTAMP", "NATURAL", "NEW", "NOT", "NOTNULL", "NULL", "OFF", "OFFSET", "OLD", "ON", "ONLY", "OPEN", "OR", "ORDER", "OUTER", "OVERLAPS", "PLACING", "PRIMARY", "REFERENCES", "RETURNING", "RIGHT", "SELECT", "SESSION_USER", "SIMILAR", "SOME", "SYMMETRIC", "TABLE", "THEN", "TO", "TRAILING", "TRUE", "UNION", "UNIQUE", "USER", "USING", "VERBOSE", "WHEN", "WHERE"};
/*
* Get the current search paths
*/
private List<String> getSearchPaths() {
List<String> searchPaths = null;
try {
DatabaseConnection con = getConnection();
if (con != null) {
Statement stmt = con.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet searchPathQry = stmt.executeQuery("SHOW search_path");
if (searchPathQry.next()) {
String searchPathResult = searchPathQry.getString(1);
if (searchPathResult != null) {
String dirtySearchPaths[] = searchPathResult.split("\\,");
searchPaths = new ArrayList<String>();
for (String searchPath : dirtySearchPaths) {
searchPath = searchPath.trim();
// Ensure there is consistency ..
if (searchPath.equals("\"$user\"")) {
searchPath = "$user";
}
searchPaths.add(searchPath);
}
}
}
}
} catch (Exception e) {
// TODO: Something?
e.printStackTrace();
log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
}
return searchPaths;
}
private boolean catalogExists(String catalogName) throws SQLException {
if (catalogName != null) {
return runExistsQuery("select count(*) from information_schema.schemata where catalog_name='" + catalogName + "'");
} else {
return false;
}
}
private boolean schemaExists(String schemaName) throws SQLException {
if (schemaName != null) {
return runExistsQuery("select count(*) from information_schema.schemata where schema_name='" + schemaName + "'");
} else {
return false;
}
}
private boolean runExistsQuery(String query) throws SQLException {
DatabaseConnection con = getConnection();
Statement stmt = con.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet existsQry = stmt.executeQuery(query);
if (existsQry.next()) {
Integer count = existsQry.getInt(1);
if (count != null && count > 0) {
return true;
}
}
return false;
}
public DatabaseSnapshot createDatabaseSnapshot(String schema, Set<DiffStatusListener> statusListeners) throws JDBCException {
return new PostgresDatabaseSnapshot(this, statusListeners, schema);
}
}
|
addional bug fixes from David Radunz
git-svn-id: a91d99a4c51940524e539abe295d6ea473345dd2@708 e6edf6fb-f266-4316-afb4-e53d95876a76
|
core/src/java/liquibase/database/PostgresDatabase.java
|
addional bug fixes from David Radunz
|
<ide><path>ore/src/java/liquibase/database/PostgresDatabase.java
<ide> if (defaultDatabaseSchemaName.equals("$user") && getConnectionUsername() != null) {
<ide> if (! schemaExists(getConnectionUsername())) {
<ide> defaultDatabaseSchemaName = null;
<add> } else {
<add> defaultDatabaseSchemaName = getConnectionUsername();
<ide> }
<ide> }
<ide>
<ide> }
<ide>
<ide> public String getDefaultCatalogName() throws JDBCException {
<del>
<del> if (defaultCatalogName == null) {
<del> try {
<del> List<String> searchPaths = getSearchPaths();
<del> if (searchPaths != null && searchPaths.size() > 0) {
<del> for (String searchPath : searchPaths) {
<del> if (searchPath != null && searchPath.length() > 0) {
<del> defaultCatalogName = searchPath;
<del>
<del> if (defaultCatalogName.equals("$user") && getConnectionUsername() != null) {
<del> if (! catalogExists(getConnectionUsername())) {
<del> defaultCatalogName = null;
<del> } else {
<del> defaultCatalogName = getConnectionUsername();
<del> }
<del> }
<del>
<del> if (defaultCatalogName != null)
<del> break;
<del> }
<del> }
<del> }
<del> } catch (Exception e) {
<del> // TODO: throw?
<del> e.printStackTrace();
<del> log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e);
<del> }
<del>
<del> // Default
<del> if (defaultCatalogName == null) {
<del> defaultCatalogName = "PUBLIC";
<del> }
<del>
<del> }
<del>
<del> return defaultCatalogName;
<add> return super.getDefaultCatalogName();
<ide> }
<ide>
<ide> public String getDatabaseChangeLogTableName() {
|
|
Java
|
apache-2.0
|
41f6102eacf65ecef061d5ba56d010eef9f61638
| 0 |
thesamet/gerrit,teamblueridge/gerrit,1yvT0s/gerrit,atdt/gerrit,bootstraponline-archive/gerrit-mirror,MerritCR/merrit,jackminicloud/test,jeblair/gerrit,renchaorevee/gerrit,supriyantomaftuh/gerrit,1yvT0s/gerrit,jackminicloud/test,gracefullife/gerrit,WANdisco/gerrit,makholm/gerrit-ceremony,ckamm/gerrit,Team-OctOS/host_gerrit,bootstraponline-archive/gerrit-mirror,gcoders/gerrit,anminhsu/gerrit,jeblair/gerrit,makholm/gerrit-ceremony,gerrit-review/gerrit,bpollack/gerrit,zommarin/gerrit,joshuawilson/merrit,dwhipstock/gerrit,Team-OctOS/host_gerrit,GerritCodeReview/gerrit,MerritCR/merrit,teamblueridge/gerrit,austinchic/Gerrit,sudosurootdev/gerrit,dwhipstock/gerrit,midnightradio/gerrit,joshuawilson/merrit,qtproject/qtqa-gerrit,Saulis/gerrit,rtyley/mini-git-server,Distrotech/gerrit,quyixia/gerrit,jeblair/gerrit,ckamm/gerrit,joshuawilson/merrit,thesamet/gerrit,qtproject/qtqa-gerrit,sudosurootdev/gerrit,m1kah/gerrit-contributions,basilgor/gerrit,renchaorevee/gerrit,ashang/aaron-gerrit,netroby/gerrit,hdost/gerrit,evanchueng/gerrit,GerritCodeReview/gerrit,Team-OctOS/host_gerrit,Saulis/gerrit,Seinlin/gerrit,teamblueridge/gerrit,netroby/gerrit,makholm/gerrit-ceremony,Team-OctOS/host_gerrit,gerrit-review/gerrit,netroby/gerrit,Team-OctOS/host_gerrit,Saulis/gerrit,GerritCodeReview/gerrit,renchaorevee/gerrit,qtproject/qtqa-gerrit,hdost/gerrit,CandyShop/gerrit,hdost/gerrit,midnightradio/gerrit,ashang/aaron-gerrit,gcoders/gerrit,cjh1/gerrit,Overruler/gerrit,atdt/gerrit,hdost/gerrit,Seinlin/gerrit,hdost/gerrit,sudosurootdev/gerrit,Distrotech/gerrit,jackminicloud/test,gracefullife/gerrit,basilgor/gerrit,evanchueng/gerrit,m1kah/gerrit-contributions,cjh1/gerrit,joshuawilson/merrit,WANdisco/gerrit,anminhsu/gerrit,supriyantomaftuh/gerrit,qtproject/qtqa-gerrit,quyixia/gerrit,pkdevbox/gerrit,netroby/gerrit,Overruler/gerrit,renchaorevee/gerrit,Saulis/gerrit,gerrit-review/gerrit,Saulis/gerrit,qtproject/qtqa-gerrit,thesamet/gerrit,quyixia/gerrit,jackminicloud/test,CandyShop/gerrit,supriyantomaftuh/gerrit,Team-OctOS/host_gerrit,gracefullife/gerrit,joshuawilson/merrit,joshuawilson/merrit,ashang/aaron-gerrit,thinkernel/gerrit,TonyChai24/test,TonyChai24/test,teamblueridge/gerrit,evanchueng/gerrit,duboisf/gerrit,CandyShop/gerrit,cjh1/gerrit,ckamm/gerrit,1yvT0s/gerrit,Saulis/gerrit,thinkernel/gerrit,atdt/gerrit,keerath/gerrit_newssh,jackminicloud/test,Seinlin/gerrit,MerritCR/merrit,atdt/gerrit,thinkernel/gerrit,keerath/gerrit_newssh,makholm/gerrit-ceremony,gerrit-review/gerrit,Overruler/gerrit,austinchic/Gerrit,atdt/gerrit,GerritCodeReview/gerrit,gracefullife/gerrit,gerrit-review/gerrit,Seinlin/gerrit,netroby/gerrit,duboisf/gerrit,1yvT0s/gerrit,quyixia/gerrit,midnightradio/gerrit,MerritCR/merrit,keerath/gerrit_newssh,keerath/gerrit_newssh,pkdevbox/gerrit,thinkernel/gerrit,keerath/gerrit_newssh,m1kah/gerrit-contributions,qtproject/qtqa-gerrit,thinkernel/gerrit,GerritCodeReview/gerrit,gcoders/gerrit,duboisf/gerrit,zommarin/gerrit,m1kah/gerrit-contributions,ckamm/gerrit,joshuawilson/merrit,basilgor/gerrit,gcoders/gerrit,rtyley/mini-git-server,MerritCR/merrit,MerritCR/merrit,dwhipstock/gerrit,catrope/gerrit,CandyShop/gerrit,thesamet/gerrit,bpollack/gerrit,renchaorevee/gerrit,qtproject/qtqa-gerrit,ckamm/gerrit,Distrotech/gerrit,dwhipstock/gerrit,dwhipstock/gerrit,quyixia/gerrit,GerritCodeReview/gerrit,anminhsu/gerrit,evanchueng/gerrit,austinchic/Gerrit,pkdevbox/gerrit,Distrotech/gerrit,dwhipstock/gerrit,pkdevbox/gerrit,Overruler/gerrit,1yvT0s/gerrit,Seinlin/gerrit,WANdisco/gerrit,supriyantomaftuh/gerrit,Distrotech/gerrit,teamblueridge/gerrit,supriyantomaftuh/gerrit,bootstraponline-archive/gerrit-mirror,evanchueng/gerrit,dwhipstock/gerrit,anminhsu/gerrit,anminhsu/gerrit,gcoders/gerrit,hdost/gerrit,TonyChai24/test,thinkernel/gerrit,Distrotech/gerrit,gracefullife/gerrit,anminhsu/gerrit,gcoders/gerrit,TonyChai24/test,duboisf/gerrit,CandyShop/gerrit,pkdevbox/gerrit,thesamet/gerrit,TonyChai24/test,basilgor/gerrit,bpollack/gerrit,zommarin/gerrit,Overruler/gerrit,ashang/aaron-gerrit,midnightradio/gerrit,bpollack/gerrit,GerritCodeReview/gerrit,bpollack/gerrit,TonyChai24/test,hdost/gerrit,TonyChai24/test,bootstraponline-archive/gerrit-mirror,zommarin/gerrit,MerritCR/merrit,zommarin/gerrit,pkdevbox/gerrit,GerritCodeReview/gerrit,Overruler/gerrit,bpollack/gerrit,WANdisco/gerrit,Seinlin/gerrit,jackminicloud/test,midnightradio/gerrit,supriyantomaftuh/gerrit,bootstraponline-archive/gerrit-mirror,basilgor/gerrit,bootstraponline-archive/gerrit-mirror,catrope/gerrit,netroby/gerrit,jeblair/gerrit,WANdisco/gerrit,gcoders/gerrit,sudosurootdev/gerrit,austinchic/Gerrit,thinkernel/gerrit,WANdisco/gerrit,renchaorevee/gerrit,pkdevbox/gerrit,catrope/gerrit,joshuawilson/merrit,anminhsu/gerrit,WANdisco/gerrit,netroby/gerrit,Team-OctOS/host_gerrit,supriyantomaftuh/gerrit,cjh1/gerrit,quyixia/gerrit,ashang/aaron-gerrit,Distrotech/gerrit,gerrit-review/gerrit,catrope/gerrit,midnightradio/gerrit,gerrit-review/gerrit,quyixia/gerrit,renchaorevee/gerrit,jackminicloud/test,thesamet/gerrit,thesamet/gerrit,Seinlin/gerrit,MerritCR/merrit,sudosurootdev/gerrit
|
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.patches;
import com.google.gerrit.client.Dispatcher;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.RpcStatus;
import com.google.gerrit.client.changes.CommitMessageBlock;
import com.google.gerrit.client.changes.PatchTable;
import com.google.gerrit.client.changes.Util;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.ScreenLoadCallback;
import com.google.gerrit.client.ui.Screen;
import com.google.gerrit.common.data.PatchScript;
import com.google.gerrit.common.data.PatchSetDetail;
import com.google.gerrit.prettify.client.ClientSideFormatter;
import com.google.gerrit.prettify.common.PrettyFactory;
import com.google.gerrit.reviewdb.AccountDiffPreference;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.Patch;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.logical.shared.CloseEvent;
import com.google.gwt.event.logical.shared.CloseHandler;
import com.google.gwt.event.logical.shared.OpenEvent;
import com.google.gwt.event.logical.shared.OpenHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.DeferredCommand;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.DisclosurePanel;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwtexpui.globalkey.client.GlobalKey;
import com.google.gwtexpui.globalkey.client.KeyCommand;
import com.google.gwtexpui.globalkey.client.KeyCommandSet;
import com.google.gwtjsonrpc.client.VoidResult;
public abstract class PatchScreen extends Screen implements
CommentEditorContainer {
static final PrettyFactory PRETTY = ClientSideFormatter.FACTORY;
public static class SideBySide extends PatchScreen {
public SideBySide(final Patch.Key id, final int patchIndex,
final PatchSetDetail patchSetDetail, final PatchTable patchTable) {
super(id, patchIndex, patchSetDetail, patchTable);
}
@Override
protected SideBySideTable createContentTable() {
return new SideBySideTable();
}
@Override
protected PatchScreen.Type getPatchScreenType() {
return PatchScreen.Type.SIDE_BY_SIDE;
}
}
public static class Unified extends PatchScreen {
public Unified(final Patch.Key id, final int patchIndex,
final PatchSetDetail patchSetDetail, final PatchTable patchTable) {
super(id, patchIndex, patchSetDetail, patchTable);
}
@Override
protected UnifiedDiffTable createContentTable() {
return new UnifiedDiffTable();
}
@Override
protected PatchScreen.Type getPatchScreenType() {
return PatchScreen.Type.UNIFIED;
}
}
// Which patch set id's are being diff'ed
private static PatchSet.Id diffSideA = null;
private static PatchSet.Id diffSideB = null;
private static Boolean historyOpen = null;
private static final OpenHandler<DisclosurePanel> cacheOpenState =
new OpenHandler<DisclosurePanel>() {
@Override
public void onOpen(OpenEvent<DisclosurePanel> event) {
historyOpen = true;
}
};
private static final CloseHandler<DisclosurePanel> cacheCloseState =
new CloseHandler<DisclosurePanel>() {
@Override
public void onClose(CloseEvent<DisclosurePanel> event) {
historyOpen = false;
}
};
// The change id for which the above patch set id's are valid
private static Change.Id currentChangeId = null;
protected final Patch.Key patchKey;
protected PatchSetDetail patchSetDetail;
protected PatchTable fileList;
protected PatchSet.Id idSideA;
protected PatchSet.Id idSideB;
protected PatchScriptSettingsPanel settingsPanel;
private DisclosurePanel historyPanel;
private HistoryTable historyTable;
private FlowPanel contentPanel;
private Label noDifference;
private AbstractPatchContentTable contentTable;
private CommitMessageBlock commitMessageBlock;
private NavLinks topNav;
private NavLinks bottomNav;
private int rpcSequence;
private PatchScript lastScript;
/** The index of the file we are currently looking at among the fileList */
private int patchIndex;
/** Keys that cause an action on this screen */
private KeyCommandSet keysNavigation;
private HandlerRegistration regNavigation;
/**
* How this patch should be displayed in the patch screen.
*/
public static enum Type {
UNIFIED, SIDE_BY_SIDE
}
protected PatchScreen(final Patch.Key id, final int patchIndex,
final PatchSetDetail detail, final PatchTable patchTable) {
patchKey = id;
patchSetDetail = detail;
fileList = patchTable;
// If we have any diff side stored, make sure they are applicable to the
// current change, discard them otherwise.
//
Change.Id thisChangeId = id.getParentKey().getParentKey();
if (currentChangeId != null && !currentChangeId.equals(thisChangeId)) {
diffSideA = null;
diffSideB = null;
historyOpen = null;
}
currentChangeId = thisChangeId;
idSideA = diffSideA; // null here means we're diff'ing from the Base
idSideB = diffSideB != null ? diffSideB : id.getParentKey();
this.patchIndex = patchIndex;
settingsPanel = new PatchScriptSettingsPanel();
settingsPanel
.addValueChangeHandler(new ValueChangeHandler<AccountDiffPreference>() {
@Override
public void onValueChange(ValueChangeEvent<AccountDiffPreference> event) {
update(event.getValue());
}
});
settingsPanel.getReviewedCheckBox().addValueChangeHandler(
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event) {
setReviewedByCurrentUser(event.getValue());
}
});
}
@Override
public void notifyDraftDelta(int delta) {
lastScript = null;
}
@Override
public void remove(CommentEditorPanel panel) {
lastScript = null;
}
private void update(AccountDiffPreference dp) {
if (lastScript != null && canReuse(dp, lastScript)) {
lastScript.setDiffPrefs(dp);
RpcStatus.INSTANCE.onRpcStart(null);
settingsPanel.setEnabled(false);
DeferredCommand.addCommand(new Command() {
@Override
public void execute() {
try {
onResult(lastScript, false /* not the first time */);
} finally {
RpcStatus.INSTANCE.onRpcComplete(null);
}
}
});
} else {
refresh(false);
}
}
private boolean canReuse(AccountDiffPreference dp, PatchScript last) {
if (last.getDiffPrefs().getIgnoreWhitespace() != dp.getIgnoreWhitespace()) {
// Whitespace ignore setting requires server computation.
return false;
}
final int ctx = dp.getContext();
if (ctx == AccountDiffPreference.WHOLE_FILE_CONTEXT && !last.getA().isWholeFile()) {
// We don't have the entire file here, so we can't render it.
return false;
}
if (last.getDiffPrefs().getContext() < ctx && !last.getA().isWholeFile()) {
// We don't have sufficient context.
return false;
}
if (dp.isSyntaxHighlighting()
&& !last.getA().isWholeFile()) {
// We need the whole file to syntax highlight accurately.
return false;
}
return true;
}
@Override
protected void onInitUI() {
super.onInitUI();
final Change.Id ck = patchKey.getParentKey().getParentKey();
keysNavigation = new KeyCommandSet(Gerrit.C.sectionNavigation());
keysNavigation.add(new UpToChangeCommand(ck, 0, 'u'));
keysNavigation.add(new FileListCmd(0, 'f', PatchUtil.C.fileList()));
historyTable = new HistoryTable(this);
historyPanel = new DisclosurePanel(PatchUtil.C.patchHistoryTitle());
historyPanel.setContent(historyTable);
historyPanel.setVisible(false);
// If the user selected a different patch set than the default for either
// side, expand the history panel
historyPanel.setOpen(diffSideA != null || diffSideB != null
|| (historyOpen != null && historyOpen));
historyPanel.addOpenHandler(cacheOpenState);
historyPanel.addCloseHandler(cacheCloseState);
VerticalPanel vp = new VerticalPanel();
vp.add(historyPanel);
vp.add(settingsPanel);
commitMessageBlock = new CommitMessageBlock("6em");
HorizontalPanel hp = new HorizontalPanel();
hp.setWidth("100%");
hp.add(vp);
hp.add(commitMessageBlock);
add(hp);
noDifference = new Label(PatchUtil.C.noDifference());
noDifference.setStyleName(Gerrit.RESOURCES.css().patchNoDifference());
noDifference.setVisible(false);
contentTable = createContentTable();
contentTable.fileList = fileList;
topNav =
new NavLinks(keysNavigation, patchKey.getParentKey().getParentKey());
bottomNav = new NavLinks(null, patchKey.getParentKey().getParentKey());
add(topNav);
contentPanel = new FlowPanel();
contentPanel.setStyleName(Gerrit.RESOURCES.css()
.sideBySideScreenSideBySideTable());
contentPanel.add(noDifference);
contentPanel.add(contentTable);
add(contentPanel);
add(bottomNav);
if (fileList != null) {
topNav.display(patchIndex, getPatchScreenType(), fileList);
bottomNav.display(patchIndex, getPatchScreenType(), fileList);
}
}
void setReviewedByCurrentUser(boolean reviewed) {
if (fileList != null) {
fileList.updateReviewedStatus(patchKey, reviewed);
}
PatchUtil.DETAIL_SVC.setReviewedByCurrentUser(patchKey, reviewed,
new AsyncCallback<VoidResult>() {
@Override
public void onFailure(Throwable arg0) {
// nop
}
@Override
public void onSuccess(VoidResult result) {
// nop
}
});
}
@Override
protected void onLoad() {
super.onLoad();
if (patchSetDetail == null) {
Util.DETAIL_SVC.patchSetDetail(idSideB,
new GerritCallback<PatchSetDetail>() {
@Override
public void onSuccess(PatchSetDetail result) {
patchSetDetail = result;
if (fileList == null) {
fileList = new PatchTable();
fileList.display(result);
patchIndex = fileList.indexOf(patchKey);
topNav.display(patchIndex, getPatchScreenType(), fileList);
bottomNav.display(patchIndex, getPatchScreenType(), fileList);
}
refresh(true);
}
});
} else {
refresh(true);
}
}
@Override
protected void onUnload() {
if (regNavigation != null) {
regNavigation.removeHandler();
regNavigation = null;
}
super.onUnload();
}
@Override
public void registerKeys() {
super.registerKeys();
contentTable.setRegisterKeys(contentTable.isVisible());
regNavigation = GlobalKey.add(this, keysNavigation);
}
protected abstract AbstractPatchContentTable createContentTable();
protected abstract PatchScreen.Type getPatchScreenType();
protected void refresh(final boolean isFirst) {
final int rpcseq = ++rpcSequence;
lastScript = null;
settingsPanel.setEnabled(false);
PatchUtil.DETAIL_SVC.patchScript(patchKey, idSideA, idSideB, //
settingsPanel.getValue(), new ScreenLoadCallback<PatchScript>(this) {
@Override
protected void preDisplay(final PatchScript result) {
if (rpcSequence == rpcseq) {
onResult(result, isFirst);
}
}
@Override
public void onFailure(final Throwable caught) {
if (rpcSequence == rpcseq) {
settingsPanel.setEnabled(true);
super.onFailure(caught);
}
}
});
}
private void onResult(final PatchScript script, final boolean isFirst) {
final Change.Key cid = script.getChangeId();
final String path = PatchTable.getDisplayFileName(patchKey);
String fileName = path;
final int last = fileName.lastIndexOf('/');
if (last >= 0) {
fileName = fileName.substring(last + 1);
}
setWindowTitle(PatchUtil.M.patchWindowTitle(cid.abbreviate(), fileName));
setPageTitle(PatchUtil.M.patchPageTitle(cid.abbreviate(), path));
if (idSideB.equals(patchSetDetail.getPatchSet().getId())) {
commitMessageBlock.setVisible(true);
commitMessageBlock.display(patchSetDetail.getInfo().getMessage());
} else {
commitMessageBlock.setVisible(false);
Util.DETAIL_SVC.patchSetDetail(idSideB,
new GerritCallback<PatchSetDetail>() {
@Override
public void onSuccess(PatchSetDetail result) {
commitMessageBlock.setVisible(true);
commitMessageBlock.display(result.getInfo().getMessage());
}
});
}
historyTable.display(script.getHistory());
historyPanel.setVisible(true);
// True if there are differences between the two patch sets
boolean hasEdits = !script.getEdits().isEmpty();
// True if this change is a mode change or a pure rename/copy
boolean hasMeta = !script.getPatchHeader().isEmpty();
boolean hasDifferences = hasEdits || hasMeta;
boolean pureMetaChange = !hasEdits && hasMeta;
if (contentTable instanceof SideBySideTable && pureMetaChange) {
// User asked for SideBySide (or a link guessed, wrong) and we can't
// show a binary or pure-rename change there accurately. Switch to
// the unified view instead.
//
contentTable.removeFromParent();
contentTable = new UnifiedDiffTable();
contentTable.fileList = fileList;
contentPanel.add(contentTable);
setToken(Dispatcher.toPatchUnified(patchKey));
}
if (hasDifferences) {
contentTable.display(patchKey, idSideA, idSideB, script);
contentTable.display(script.getCommentDetail());
contentTable.finishDisplay();
}
showPatch(hasDifferences);
settingsPanel.setEnableSmallFileFeatures(!script.isHugeFile());
settingsPanel.setEnableIntralineDifference(script.hasIntralineDifference());
settingsPanel.setEnabled(true);
lastScript = script;
// Mark this file reviewed as soon we display the diff screen
if (Gerrit.isSignedIn() && isFirst) {
settingsPanel.getReviewedCheckBox().setValue(true);
setReviewedByCurrentUser(true /* reviewed */);
}
}
private void showPatch(final boolean showPatch) {
noDifference.setVisible(!showPatch);
contentTable.setVisible(showPatch);
contentTable.setRegisterKeys(isCurrentView() && showPatch);
}
public void setSideA(PatchSet.Id patchSetId) {
idSideA = patchSetId;
diffSideA = patchSetId;
}
public void setSideB(PatchSet.Id patchSetId) {
idSideB = patchSetId;
diffSideB = patchSetId;
}
public class FileListCmd extends KeyCommand {
public FileListCmd(int mask, int key, String help) {
super(mask, key, help);
}
@Override
public void onKeyPress(final KeyPressEvent event) {
if (fileList == null || fileList.isAttached()) {
final PatchSet.Id psid = patchKey.getParentKey();
fileList = new PatchTable();
fileList.setSavePointerId("PatchTable " + psid);
Util.DETAIL_SVC.patchSetDetail(psid,
new GerritCallback<PatchSetDetail>() {
public void onSuccess(final PatchSetDetail result) {
fileList.display(result);
}
});
}
final PatchBrowserPopup p = new PatchBrowserPopup(patchKey, fileList);
p.open();
}
}
}
|
gerrit-gwtui/src/main/java/com/google/gerrit/client/patches/PatchScreen.java
|
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.patches;
import com.google.gerrit.client.Dispatcher;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.RpcStatus;
import com.google.gerrit.client.changes.CommitMessageBlock;
import com.google.gerrit.client.changes.PatchTable;
import com.google.gerrit.client.changes.Util;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.ScreenLoadCallback;
import com.google.gerrit.client.ui.Screen;
import com.google.gerrit.common.data.PatchScript;
import com.google.gerrit.common.data.PatchSetDetail;
import com.google.gerrit.prettify.client.ClientSideFormatter;
import com.google.gerrit.prettify.common.PrettyFactory;
import com.google.gerrit.reviewdb.AccountDiffPreference;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.Patch;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.logical.shared.CloseEvent;
import com.google.gwt.event.logical.shared.CloseHandler;
import com.google.gwt.event.logical.shared.OpenEvent;
import com.google.gwt.event.logical.shared.OpenHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.DeferredCommand;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.DisclosurePanel;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwtexpui.globalkey.client.GlobalKey;
import com.google.gwtexpui.globalkey.client.KeyCommand;
import com.google.gwtexpui.globalkey.client.KeyCommandSet;
import com.google.gwtjsonrpc.client.VoidResult;
public abstract class PatchScreen extends Screen implements
CommentEditorContainer {
static final PrettyFactory PRETTY = ClientSideFormatter.FACTORY;
public static class SideBySide extends PatchScreen {
public SideBySide(final Patch.Key id, final int patchIndex,
final PatchSetDetail patchSetDetail, final PatchTable patchTable) {
super(id, patchIndex, patchSetDetail, patchTable);
}
@Override
protected SideBySideTable createContentTable() {
return new SideBySideTable();
}
@Override
protected PatchScreen.Type getPatchScreenType() {
return PatchScreen.Type.SIDE_BY_SIDE;
}
}
public static class Unified extends PatchScreen {
public Unified(final Patch.Key id, final int patchIndex,
final PatchSetDetail patchSetDetail, final PatchTable patchTable) {
super(id, patchIndex, patchSetDetail, patchTable);
}
@Override
protected UnifiedDiffTable createContentTable() {
return new UnifiedDiffTable();
}
@Override
protected PatchScreen.Type getPatchScreenType() {
return PatchScreen.Type.UNIFIED;
}
}
// Which patch set id's are being diff'ed
private static PatchSet.Id diffSideA = null;
private static PatchSet.Id diffSideB = null;
private static Boolean historyOpen = null;
private static final OpenHandler<DisclosurePanel> cacheOpenState =
new OpenHandler<DisclosurePanel>() {
@Override
public void onOpen(OpenEvent<DisclosurePanel> event) {
historyOpen = true;
}
};
private static final CloseHandler<DisclosurePanel> cacheCloseState =
new CloseHandler<DisclosurePanel>() {
@Override
public void onClose(CloseEvent<DisclosurePanel> event) {
historyOpen = false;
}
};
// The change id for which the above patch set id's are valid
private static Change.Id currentChangeId = null;
protected final Patch.Key patchKey;
protected PatchSetDetail patchSetDetail;
protected PatchTable fileList;
protected PatchSet.Id idSideA;
protected PatchSet.Id idSideB;
protected PatchScriptSettingsPanel settingsPanel;
private DisclosurePanel historyPanel;
private HistoryTable historyTable;
private FlowPanel contentPanel;
private Label noDifference;
private AbstractPatchContentTable contentTable;
private CommitMessageBlock commitMessageBlock;
private NavLinks topNav;
private NavLinks bottomNav;
private int rpcSequence;
private PatchScript lastScript;
/** The index of the file we are currently looking at among the fileList */
private int patchIndex;
/** Keys that cause an action on this screen */
private KeyCommandSet keysNavigation;
private HandlerRegistration regNavigation;
/**
* How this patch should be displayed in the patch screen.
*/
public static enum Type {
UNIFIED, SIDE_BY_SIDE
}
protected PatchScreen(final Patch.Key id, final int patchIndex,
final PatchSetDetail detail, final PatchTable patchTable) {
patchKey = id;
patchSetDetail = detail;
fileList = patchTable;
// If we have any diff side stored, make sure they are applicable to the
// current change, discard them otherwise.
//
Change.Id thisChangeId = id.getParentKey().getParentKey();
if (currentChangeId != null && !currentChangeId.equals(thisChangeId)) {
diffSideA = null;
diffSideB = null;
historyOpen = null;
}
currentChangeId = thisChangeId;
idSideA = diffSideA; // null here means we're diff'ing from the Base
idSideB = diffSideB != null ? diffSideB : id.getParentKey();
this.patchIndex = patchIndex;
settingsPanel = new PatchScriptSettingsPanel();
settingsPanel
.addValueChangeHandler(new ValueChangeHandler<AccountDiffPreference>() {
@Override
public void onValueChange(ValueChangeEvent<AccountDiffPreference> event) {
update(event.getValue());
}
});
settingsPanel.getReviewedCheckBox().addValueChangeHandler(
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event) {
setReviewedByCurrentUser(event.getValue());
}
});
}
@Override
public void notifyDraftDelta(int delta) {
lastScript = null;
}
@Override
public void remove(CommentEditorPanel panel) {
lastScript = null;
}
private void update(AccountDiffPreference dp) {
if (lastScript != null && canReuse(dp, lastScript)) {
lastScript.setDiffPrefs(dp);
RpcStatus.INSTANCE.onRpcStart(null);
settingsPanel.setEnabled(false);
DeferredCommand.addCommand(new Command() {
@Override
public void execute() {
try {
onResult(lastScript, false /* not the first time */);
} finally {
RpcStatus.INSTANCE.onRpcComplete(null);
}
}
});
} else {
refresh(false);
}
}
private boolean canReuse(AccountDiffPreference dp, PatchScript last) {
if (last.getDiffPrefs().getIgnoreWhitespace() != dp.getIgnoreWhitespace()) {
// Whitespace ignore setting requires server computation.
return false;
}
final int ctx = dp.getContext();
if (ctx == AccountDiffPreference.WHOLE_FILE_CONTEXT && !last.getA().isWholeFile()) {
// We don't have the entire file here, so we can't render it.
return false;
}
if (last.getDiffPrefs().getContext() < ctx && !last.getA().isWholeFile()) {
// We don't have sufficient context.
return false;
}
if (dp.isSyntaxHighlighting()
&& !last.getA().isWholeFile()) {
// We need the whole file to syntax highlight accurately.
return false;
}
return true;
}
@Override
protected void onInitUI() {
super.onInitUI();
final Change.Id ck = patchKey.getParentKey().getParentKey();
keysNavigation = new KeyCommandSet(Gerrit.C.sectionNavigation());
keysNavigation.add(new UpToChangeCommand(ck, 0, 'u'));
keysNavigation.add(new FileListCmd(0, 'f', PatchUtil.C.fileList()));
historyTable = new HistoryTable(this);
historyPanel = new DisclosurePanel(PatchUtil.C.patchHistoryTitle());
historyPanel.setContent(historyTable);
historyPanel.setVisible(false);
// If the user selected a different patch set than the default for either
// side, expand the history panel
historyPanel.setOpen(diffSideA != null || diffSideB != null
|| (historyOpen != null && historyOpen));
historyPanel.addOpenHandler(cacheOpenState);
historyPanel.addCloseHandler(cacheCloseState);
VerticalPanel vp = new VerticalPanel();
vp.add(historyPanel);
vp.add(settingsPanel);
commitMessageBlock = new CommitMessageBlock("6em");
HorizontalPanel hp = new HorizontalPanel();
hp.setWidth("100%");
hp.add(vp);
hp.add(commitMessageBlock);
add(hp);
noDifference = new Label(PatchUtil.C.noDifference());
noDifference.setStyleName(Gerrit.RESOURCES.css().patchNoDifference());
noDifference.setVisible(false);
contentTable = createContentTable();
contentTable.fileList = fileList;
topNav =
new NavLinks(keysNavigation, patchKey.getParentKey().getParentKey());
bottomNav = new NavLinks(null, patchKey.getParentKey().getParentKey());
add(topNav);
contentPanel = new FlowPanel();
contentPanel.setStyleName(Gerrit.RESOURCES.css()
.sideBySideScreenSideBySideTable());
contentPanel.add(noDifference);
contentPanel.add(contentTable);
add(contentPanel);
add(bottomNav);
if (fileList != null) {
topNav.display(patchIndex, getPatchScreenType(), fileList);
bottomNav.display(patchIndex, getPatchScreenType(), fileList);
}
}
void setReviewedByCurrentUser(boolean reviewed) {
if (fileList != null) {
fileList.updateReviewedStatus(patchKey, reviewed);
}
PatchUtil.DETAIL_SVC.setReviewedByCurrentUser(patchKey, reviewed,
new AsyncCallback<VoidResult>() {
@Override
public void onFailure(Throwable arg0) {
// nop
}
@Override
public void onSuccess(VoidResult result) {
// nop
}
});
}
@Override
protected void onLoad() {
super.onLoad();
if (patchSetDetail == null) {
Util.DETAIL_SVC.patchSetDetail(idSideB,
new GerritCallback<PatchSetDetail>() {
@Override
public void onSuccess(PatchSetDetail result) {
patchSetDetail = result;
if (fileList == null) {
fileList = new PatchTable();
fileList.display(result);
patchIndex = fileList.indexOf(patchKey);
topNav.display(patchIndex, getPatchScreenType(), fileList);
bottomNav.display(patchIndex, getPatchScreenType(), fileList);
}
refresh(true);
}
});
} else {
refresh(true);
}
}
@Override
protected void onUnload() {
if (regNavigation != null) {
regNavigation.removeHandler();
regNavigation = null;
}
super.onUnload();
}
@Override
public void registerKeys() {
super.registerKeys();
contentTable.setRegisterKeys(contentTable.isVisible());
regNavigation = GlobalKey.add(this, keysNavigation);
}
protected abstract AbstractPatchContentTable createContentTable();
protected abstract PatchScreen.Type getPatchScreenType();
protected void refresh(final boolean isFirst) {
final int rpcseq = ++rpcSequence;
lastScript = null;
settingsPanel.setEnabled(false);
PatchUtil.DETAIL_SVC.patchScript(patchKey, idSideA, idSideB, //
settingsPanel.getValue(), new ScreenLoadCallback<PatchScript>(this) {
@Override
protected void preDisplay(final PatchScript result) {
if (rpcSequence == rpcseq) {
onResult(result, isFirst);
}
}
@Override
public void onFailure(final Throwable caught) {
if (rpcSequence == rpcseq) {
settingsPanel.setEnabled(true);
super.onFailure(caught);
}
}
});
}
private void onResult(final PatchScript script, final boolean isFirst) {
final Change.Key cid = script.getChangeId();
final String path = PatchTable.getDisplayFileName(patchKey);
String fileName = path;
final int last = fileName.lastIndexOf('/');
if (last >= 0) {
fileName = fileName.substring(last + 1);
}
setWindowTitle(PatchUtil.M.patchWindowTitle(cid.abbreviate(), fileName));
setPageTitle(PatchUtil.M.patchPageTitle(cid.abbreviate(), path));
if (idSideB.equals(patchSetDetail.getPatchSet().getId())) {
commitMessageBlock.setVisible(true);
commitMessageBlock.display(patchSetDetail.getInfo().getMessage());
} else {
commitMessageBlock.setVisible(false);
Util.DETAIL_SVC.patchSetDetail(idSideB,
new GerritCallback<PatchSetDetail>() {
@Override
public void onSuccess(PatchSetDetail result) {
commitMessageBlock.display(result.getInfo().getMessage());
}
});
}
historyTable.display(script.getHistory());
historyPanel.setVisible(true);
// True if there are differences between the two patch sets
boolean hasEdits = !script.getEdits().isEmpty();
// True if this change is a mode change or a pure rename/copy
boolean hasMeta = !script.getPatchHeader().isEmpty();
boolean hasDifferences = hasEdits || hasMeta;
boolean pureMetaChange = !hasEdits && hasMeta;
if (contentTable instanceof SideBySideTable && pureMetaChange) {
// User asked for SideBySide (or a link guessed, wrong) and we can't
// show a binary or pure-rename change there accurately. Switch to
// the unified view instead.
//
contentTable.removeFromParent();
contentTable = new UnifiedDiffTable();
contentTable.fileList = fileList;
contentPanel.add(contentTable);
setToken(Dispatcher.toPatchUnified(patchKey));
}
if (hasDifferences) {
contentTable.display(patchKey, idSideA, idSideB, script);
contentTable.display(script.getCommentDetail());
contentTable.finishDisplay();
}
showPatch(hasDifferences);
settingsPanel.setEnableSmallFileFeatures(!script.isHugeFile());
settingsPanel.setEnableIntralineDifference(script.hasIntralineDifference());
settingsPanel.setEnabled(true);
lastScript = script;
// Mark this file reviewed as soon we display the diff screen
if (Gerrit.isSignedIn() && isFirst) {
settingsPanel.getReviewedCheckBox().setValue(true);
setReviewedByCurrentUser(true /* reviewed */);
}
}
private void showPatch(final boolean showPatch) {
noDifference.setVisible(!showPatch);
contentTable.setVisible(showPatch);
contentTable.setRegisterKeys(isCurrentView() && showPatch);
}
public void setSideA(PatchSet.Id patchSetId) {
idSideA = patchSetId;
diffSideA = patchSetId;
}
public void setSideB(PatchSet.Id patchSetId) {
idSideB = patchSetId;
diffSideB = patchSetId;
}
public class FileListCmd extends KeyCommand {
public FileListCmd(int mask, int key, String help) {
super(mask, key, help);
}
@Override
public void onKeyPress(final KeyPressEvent event) {
if (fileList == null || fileList.isAttached()) {
final PatchSet.Id psid = patchKey.getParentKey();
fileList = new PatchTable();
fileList.setSavePointerId("PatchTable " + psid);
Util.DETAIL_SVC.patchSetDetail(psid,
new GerritCallback<PatchSetDetail>() {
public void onSuccess(final PatchSetDetail result) {
fileList.display(result);
}
});
}
final PatchBrowserPopup p = new PatchBrowserPopup(patchKey, fileList);
p.open();
}
}
}
|
Show commit message in PatchScreen if old patch sets are compared
The commit message in the PatchScreen was only
displayed if the Base or any patch set was
compared with the newest patch set. If the Base or
any patch set was compared with an old patch set
the commit message was not displayed.
Signed-off-by: Edwin Kempin <[email protected]>
|
gerrit-gwtui/src/main/java/com/google/gerrit/client/patches/PatchScreen.java
|
Show commit message in PatchScreen if old patch sets are compared
|
<ide><path>errit-gwtui/src/main/java/com/google/gerrit/client/patches/PatchScreen.java
<ide> new GerritCallback<PatchSetDetail>() {
<ide> @Override
<ide> public void onSuccess(PatchSetDetail result) {
<add> commitMessageBlock.setVisible(true);
<ide> commitMessageBlock.display(result.getInfo().getMessage());
<ide> }
<ide> });
|
|
Java
|
bsd-3-clause
|
676b22cdf3b4ff0bdaa2e774e19a8699f242f027
| 0 |
dhimmel/owltools,dhimmel/owltools,owlcollab/owltools,owlcollab/owltools,owlcollab/owltools,fbastian/owltools,dhimmel/owltools,fbastian/owltools,owlcollab/owltools,dhimmel/owltools,dhimmel/owltools,fbastian/owltools,fbastian/owltools,fbastian/owltools,fbastian/owltools,dhimmel/owltools,owlcollab/owltools,owlcollab/owltools
|
package owltools.solrj;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLPropertyExpression;
import owltools.gaf.Bioentity;
import owltools.gaf.EcoTools;
import owltools.gaf.ExtensionExpression;
import owltools.gaf.GafDocument;
import owltools.gaf.GeneAnnotation;
import owltools.gaf.TaxonTools;
import owltools.gaf.WithInfo;
import owltools.graph.OWLGraphEdge;
import owltools.graph.OWLGraphWrapper;
import owltools.graph.OWLQuantifiedProperty;
import owltools.panther.PANTHERForest;
import owltools.panther.PANTHERTree;
import com.google.gson.*;
/**
* A very specific class for the specific use case of loading in a GAF-like document into a Solr index.
* This method is very non-generic and GO-specific, and does not use the YAML configuration files to make
* things easy for mirroring a BBOP-JS constrained SOlr index.
*/
public class GafSolrDocumentLoader extends AbstractSolrLoader {
private static Logger LOG = Logger.getLogger(GafSolrDocumentLoader.class);
EcoTools eco = null;
TaxonTools taxo = null;
PANTHERForest pset = null;
GafDocument gafDocument;
int doc_limit_trigger = 1000; // the number of documents to add before pushing out to solr
//int doc_limit_trigger = 1; // the number of documents to add before pushing out to solr
int current_doc_number;
public GafSolrDocumentLoader(String url) throws MalformedURLException {
super(url);
current_doc_number = 0;
}
public GafDocument getGafDocument() {
return gafDocument;
}
public void setGafDocument(GafDocument gafDocument) {
this.gafDocument = gafDocument;
}
public void setEcoTools(EcoTools inEco) {
this.eco = inEco;
}
public void setTaxonTools(TaxonTools inTaxo) {
this.taxo = inTaxo;
}
public void setPANTHERSet(PANTHERForest inPSet) {
this.pset = inPSet;
}
@Override
public void load() throws SolrServerException, IOException {
gafDocument.index();
LOG.info("Loading: " + gafDocument.getDocumentPath());
for (Bioentity e : gafDocument.getBioentities()) {
add(e);
current_doc_number++;
if( current_doc_number % doc_limit_trigger == 0 ){
LOG.info("Processed " + doc_limit_trigger + " bioentities at " + current_doc_number + " and committing...");
incrementalAddAndCommit();
}
}
LOG.info("Doing cleanup commit.");
incrementalAddAndCommit(); // pick up anything that we didn't catch
//LOG.info("Optimizing.");
//server.optimize();
LOG.info("Done.");
}
// private OWLObjectProperty getPartOfProperty() {
// OWLObjectProperty p = graph.getOWLObjectPropertyByIdentifier("BFO:0000050");
// return p;
// }
// Main wrapping for adding non-ontology documents to GOlr.
// Also see OntologySolrLoader.
private void add(Bioentity e) {
String eid = e.getId();
String esym = e.getSymbol();
String edb = e.getDb();
String etype = e.getTypeCls();
String ename = e.getFullName();
String edbid = e.getDBID();
//LOG.info("Adding: " + eid + " " + esym);
// We'll need this for serializing later.
Gson gson = new Gson();
SolrInputDocument bioentity_doc = new SolrInputDocument();
// Bioentity document base.
bioentity_doc.addField("document_category", "bioentity");
bioentity_doc.addField("id", eid);
bioentity_doc.addField("bioentity", eid);
bioentity_doc.addField("bioentity_internal_id", edbid);
bioentity_doc.addField("bioentity_label", esym);
bioentity_doc.addField("bioentity_name", ename);
bioentity_doc.addField("source", edb);
bioentity_doc.addField("type", etype);
// A little more work for the synonyms.
List<String> esynonyms = e.getSynonyms();
if( ! esynonyms.isEmpty() ){
bioentity_doc.addField("synonym", esynonyms);
}
// Various taxon and taxon closure calculations, including map.
String etaxid = e.getNcbiTaxonId();
bioentity_doc.addField("taxon", etaxid);
addLabelField(bioentity_doc, "taxon_label", etaxid);
// Add taxon_closure and taxon_closure_label.
OWLClass tcls = graph.getOWLClassByIdentifier(etaxid);
Set<OWLClass> taxSuper = taxo.getAncestors(tcls, true);
// Collect information: ids and labels.
List<String> taxIDClosure = new ArrayList<String>();
List<String> taxLabelClosure = new ArrayList<String>();
Map<String,String> taxon_closure_map = new HashMap<String,String>();
for( OWLClass ts : taxSuper ){
String tid = graph.getIdentifier(ts);
String tlbl = graph.getLabel(ts);
taxIDClosure.add(tid);
taxLabelClosure.add(tlbl);
taxon_closure_map.put(tid, tlbl);
}
// Compile closure map to JSON and add to the document.
String jsonized_taxon_map = null;
if( ! taxon_closure_map.isEmpty() ){
jsonized_taxon_map = gson.toJson(taxon_closure_map);
}
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
bioentity_doc.addField("taxon_closure", taxIDClosure);
bioentity_doc.addField("taxon_closure_label", taxLabelClosure);
bioentity_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, pull information from the PANTHER file set.
List<String> pantherFamilyIDs = new ArrayList<String>();
List<String> pantherFamilyLabels = new ArrayList<String>();
List<String> pantherTreeGraphs = new ArrayList<String>();
//List<String> pantherTreeAnnAncestors = new ArrayList<String>();
//List<String> pantherTreeAnnDescendants = new ArrayList<String>();
if( pset != null && pset.getNumberOfFilesInSet() > 0 ){
Set<PANTHERTree> pTrees = pset.getAssociatedTrees(eid);
if( pTrees != null ){
Iterator<PANTHERTree> piter = pTrees.iterator();
int pcnt = 0; // DEBUG
while( piter.hasNext() ){
pcnt++; // DEBUG
PANTHERTree ptree = piter.next();
pantherFamilyIDs.add(ptree.getTreeID());
pantherFamilyLabels.add(ptree.getTreeLabel());
pantherTreeGraphs.add(ptree.getOWLShuntGraph().toJSON());
//pantherTreeAnnAncestors = new ArrayList<String>(ptree.getAncestorAnnotations(eid));
//pantherTreeAnnDescendants = new ArrayList<String>(ptree.getDescendantAnnotations(eid));
if( pcnt > 1 ){ // DEBUG
LOG.info("Belongs to multiple families: " + StringUtils.join(pantherFamilyIDs, ", "));
}
}
}
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
bioentity_doc.addField("family_tag", pantherFamilyIDs);
bioentity_doc.addField("family_tag_label", pantherFamilyLabels);
bioentity_doc.addField("phylo_graph", pantherTreeGraphs);
//if( ! pantherTreeAnnAncestors.isEmpty() ){
// bioentity_doc.addField("phylo_ancestor_closure", pantherTreeAnnAncestors);
//}
//if( ! pantherTreeAnnDescendants.isEmpty() ){
// bioentity_doc.addField("phylo_descendant_closure", pantherTreeAnnDescendants);
//}
}
// Something that we'll need for the annotation evidence aggregate later.
Map<String,SolrInputDocument> evAggDocMap = new HashMap<String,SolrInputDocument>();
// Annotation doc.
// We'll also need to be collecting some aggregate information, like for the GP term closures, which will be
// added at the end of this section.
Map<String, String> isap_map = new HashMap<String, String>();
Map<String, String> reg_map = new HashMap<String, String>();
for (GeneAnnotation a : gafDocument.getGeneAnnotations(e.getId())) {
SolrInputDocument annotation_doc = new SolrInputDocument();
String clsId = a.getCls();
String refId = a.getReferenceId();
// Annotation document base from static and previous bioentity.
annotation_doc.addField("document_category", "annotation"); // n/a
annotation_doc.addField("source", edb); // Col. 1 (from bioentity above)
annotation_doc.addField("bioentity", eid); // n/a, should be c1+c2.
annotation_doc.addField("bioentity_internal_id", edbid); // Col. 2 (from bioentity above)
annotation_doc.addField("bioentity_label", esym); // Col. 3 (from bioentity above)
String aqual = a.getCompositeQualifier();
annotation_doc.addField("qualifier", aqual); // Col. 4
annotation_doc.addField("annotation_class", clsId); // Col. 5
addLabelField(annotation_doc, "annotation_class_label", clsId); // n/a
annotation_doc.addField("reference", refId); // Col. 6
String a_ev_type = a.getEvidenceCls();
annotation_doc.addField("evidence_type", a_ev_type); // Col. 7
// NOTE: Col. 8 generation is below...
String a_aspect = a.getAspect();
annotation_doc.addField("aspect", a_aspect); // Col. 9
annotation_doc.addField("bioentity_name", ename); // Col. 10 (from bioentity above)
annotation_doc.addField("synonym", esynonyms); // Col. 11 (from bioentity above)
annotation_doc.addField("type", etype); // Col. 12 (from bioentity above)
annotation_doc.addField("taxon", etaxid); // Col. 13(?) (from bioentity above)
addLabelField(annotation_doc, "taxon_label", etaxid); // n/a
String adate = a.getLastUpdateDate();
annotation_doc.addField("date", adate); // Col. 14
String assgnb = a.getAssignedBy();
annotation_doc.addField("assigned_by", assgnb); // Col. 15
// NOTE: Col. generation is 16 below...
annotation_doc.addField("bioentity_isoform", a.getGeneProductForm()); // Col. 17
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
annotation_doc.addField("taxon_closure", taxIDClosure);
annotation_doc.addField("taxon_closure_label", taxLabelClosure);
annotation_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
annotation_doc.addField("family_tag", pantherFamilyIDs);
annotation_doc.addField("family_tag_label", pantherFamilyLabels);
}
// BUG/TODO: Make the ID /really/ unique - ask Chris
annotation_doc.addField("id", eid + "_:_" + clsId + "_:_" + a_ev_type + "_:_" + assgnb + "_:_" + etaxid + "_:_" + adate);
// Evidence type closure.
Set<OWLClass> ecoClasses = eco.getClassesForGoCode(a_ev_type);
Set<OWLClass> ecoSuper = eco.getAncestors(ecoClasses, true);
List<String> ecoIDClosure = new ArrayList<String>();
for( OWLClass es : ecoSuper ){
String itemID = es.toStringID();
ecoIDClosure.add(itemID);
}
addLabelFields(annotation_doc, "evidence_type_closure", ecoIDClosure);
// Drag in "with" (col 8).
//annotation_doc.addField("evidence_with", a.getWithExpression());
for (WithInfo wi : a.getWithInfos()) {
annotation_doc.addField("evidence_with", wi.getWithXref());
}
///
/// isa_partof_closure
///
OWLObject cls = graph.getOWLObjectByIdentifier(clsId);
// TODO: This may be a bug workaround, or it may be the way things are.
// getOWLObjectByIdentifier returns null on alt_ids, so skip them for now.
if( cls != null ){
// System.err.println(clsId);
// Is-a part-of closures.
ArrayList<String> isap = new ArrayList<String>();
isap.add("BFO:0000050");
Map<String, String> curr_isap_map = addClosureToAnnAndBio(isap, "isa_partof_closure", "isa_partof_closure_label", "isa_partof_closure_map",
cls, graph, annotation_doc, bioentity_doc, gson);
isap_map.putAll(curr_isap_map); // add to aggregate map
// // Add to annotation and bioentity isa_partof closures; label and id.
// List<String> idClosure = graph.getRelationIDClosure(cls, isap);
// List<String> labelClosure = graph.getRelationLabelClosure(cls, isap);
// annotation_doc.addField("isa_partof_closure", idClosure);
// annotation_doc.addField("isa_partof_closure_label", labelClosure);
// for( String tlabel : labelClosure){
// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
// }
// for( String tid : idClosure){
// addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
// }
//
// // Compile closure maps to JSON.
// Map<String, String> isa_partof_map = graph.getRelationClosureMap(cls, isap);
// if( ! isa_partof_map.isEmpty() ){
// String jsonized_isa_partof_map = gson.toJson(isa_partof_map);
// annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map);
// }
// Regulates closures.
ArrayList<String> reg = new ArrayList<String>();
reg.add("BFO:0000050");
reg.add("RO:0002211");
reg.add("RO:0002212");
reg.add("RO:0002213");
Map<String, String> curr_reg_map = addClosureToAnnAndBio(reg, "regulates_closure", "regulates_closure_label", "regulates_closure_map",
cls, graph, annotation_doc, bioentity_doc, gson);
reg_map.putAll(curr_reg_map); // add to aggregate map
///
/// Next, work on the evidence aggregate...
///
// Bug/TODO: This is a bit os a slowdown since we're not reusing our work from above here anymore.
List<String> idIsapClosure = graph.getRelationIDClosure(cls, isap);
Map<String, String> isaPartofMap = graph.getRelationClosureMap(cls, isap);
// When we cycle, we'll also want to do some stuff to track all of the evidence codes we see.
List<String> aggEvIDClosure = new ArrayList<String>();
List<String> aggEvWiths = new ArrayList<String>();
// Cycle through and pick up all the associated bits for the terms in the closure.
SolrInputDocument ev_agg_doc = null;
for( String tid : idIsapClosure ){
String tlabel = isaPartofMap.get(tid);
//OWLObject c = graph.getOWLObjectByIdentifier(tid);
// Only have to do the annotation evidence aggregate base once.
// Otherwise, just skip over and add the multi fields separately.
String evAggId = eid + "_:ev:_" + clsId;
if (evAggDocMap.containsKey(evAggId)) {
ev_agg_doc = evAggDocMap.get(evAggId);
} else {
ev_agg_doc = new SolrInputDocument();
evAggDocMap.put(evAggId, ev_agg_doc);
ev_agg_doc.addField("id", evAggId);
ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
ev_agg_doc.addField("bioentity", eid);
ev_agg_doc.addField("bioentity_label", esym);
ev_agg_doc.addField("annotation_class", tid);
ev_agg_doc.addField("annotation_class_label", tlabel);
ev_agg_doc.addField("taxon", etaxid);
addLabelField(ev_agg_doc, "taxon_label", etaxid);
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
ev_agg_doc.addField("taxon_closure", taxIDClosure);
ev_agg_doc.addField("taxon_closure_label", taxLabelClosure);
ev_agg_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
ev_agg_doc.addField("family_tag", pantherFamilyIDs);
ev_agg_doc.addField("family_tag_label", pantherFamilyLabels);
}
}
// Drag in "with" (col 8), this time for ev_agg.
for (WithInfo wi : a.getWithInfos()) {
aggEvWiths.add(wi.getWithXref());
}
// Make note for the evidence type closure.
aggEvIDClosure.add(a.getEvidenceCls());
}
// If there was actually a doc created/there, add the cumulative fields to it.
if( ev_agg_doc != null ){
addLabelFields(ev_agg_doc, "evidence_type_closure", aggEvIDClosure);
addLabelFields(ev_agg_doc, "evidence_with", aggEvWiths);
}
}
// Map<String,String> isa_partof_map = new HashMap<String,String>(); // capture labels/ids
// OWLObject c = graph.getOWLObjectByIdentifier(clsId);
// Set<OWLPropertyExpression> ps = Collections.singleton((OWLPropertyExpression)getPartOfProperty());
// Set<OWLObject> ancs = graph.getAncestors(c, ps);
// for (OWLObject t : ancs) {
// if (! (t instanceof OWLClass))
// continue;
// String tid = graph.getIdentifier(t);
// //System.out.println(edge+" TGT:"+tid);
// String tlabel = null;
// if (t != null)
// tlabel = graph.getLabel(t);
// annotation_doc.addField("isa_partof_closure", tid);
// addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
// if (tlabel != null) {
// annotation_doc.addField("isa_partof_closure_label", tlabel);
// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
// // Map both ways.
// // TODO: collisions shouldn't be an issue here?
// isa_partof_map.put(tid, tlabel);
// isa_partof_map.put(tlabel, tid);
// }else{
// // For the time being at least, I want to ensure that the id and label closures
// // mirror eachother as much as possible (for facets and mapping, etc.). Without
// // this, in some cases there is simply nothing returned to drill on.
// annotation_doc.addField("isa_partof_closure_label", tid);
// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tid);
// // Map just the one way I guess--see above.
// isa_partof_map.put(tid, tid);
// }
//
// // Annotation evidence aggregate base.
// String evAggId = eid + "_:ev:_" + clsId;
// SolrInputDocument ev_agg_doc;
// if (evAggDocMap.containsKey(evAggId)) {
// ev_agg_doc = evAggDocMap.get(evAggId);
// }
// else {
// ev_agg_doc = new SolrInputDocument();
// evAggDocMap.put(evAggId, ev_agg_doc);
// ev_agg_doc.addField("id", evAggId);
// ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
// ev_agg_doc.addField("bioentity", eid);
// ev_agg_doc.addField("bioentity_label", esym);
// ev_agg_doc.addField("annotation_class", tid);
// ev_agg_doc.addField("annotation_class_label", tlabel);
// ev_agg_doc.addField("taxon", taxId);
// addLabelField(ev_agg_doc, "taxon_label", taxId);
// }
//
// //evidence_type is single valued
// //aggDoc.addField("evidence_type", a.getEvidenceCls());
//
// // Drag in "with" (col 8), this time for ev_agg.
// for (WithInfo wi : a.getWithInfos()) {
// ev_agg_doc.addField("evidence_with", wi.getWithXref());
// }
//
// //aggDoc.getFieldValues(name)
// // TODO:
// ev_agg_doc.addField("evidence_type_closure", a.getEvidenceCls());
// }
// Column 16.
Map<String,String> ann_ext_map = new HashMap<String,String>(); // capture labels/ids
for (ExtensionExpression ee : a.getExtensionExpressions()) {
ee.getRelation(); // TODO
String eeid = ee.getCls();
OWLObject eObj = graph.getOWLObjectByIdentifier(eeid);
annotation_doc.addField("annotation_extension_class", eeid);
addLabelField(annotation_doc, "annotation_extension_class_label", eeid);
if (eObj != null) {
for (OWLGraphEdge edge : graph.getOutgoingEdgesClosureReflexive(eObj)) {
OWLObject t = edge.getTarget();
if (!(t instanceof OWLClass))
continue;
String annExtID = graph.getIdentifier(t);
String annExtLabel = graph.getLabel(edge.getTarget());
annotation_doc.addField("annotation_extension_class_closure", annExtID);
annotation_doc.addField("annotation_extension_class_closure_label", annExtLabel);
ann_ext_map.put(annExtID, annExtLabel);
ann_ext_map.put(annExtLabel, annExtID);
}
}
}
// Add annotation ext closure map to annotation doc.
if( ! ann_ext_map.isEmpty() ){
String jsonized_ann_ext_map = gson.toJson(ann_ext_map);
annotation_doc.addField("annotation_extension_class_closure_map", jsonized_ann_ext_map);
}
// Finally add doc.
add(annotation_doc);
}
// Add the necessary aggregates to the bio doc. These cannot be done incrementally like the multi-valued closures
// sonce there can only be a single map.
if( ! isap_map.isEmpty() ){
String jsonized_cmap = gson.toJson(isap_map);
bioentity_doc.addField("isa_partof_closure_map", jsonized_cmap);
}
if( ! reg_map.isEmpty() ){
String jsonized_cmap = gson.toJson(reg_map);
bioentity_doc.addField("regulates_closure_map", jsonized_cmap);
}
add(bioentity_doc);
for (SolrInputDocument ev_agg_doc : evAggDocMap.values()) {
add(ev_agg_doc);
}
}
private void addFieldUnique(SolrInputDocument d, String field, String val) {
if (val == null)
return;
Collection<Object> vals = d.getFieldValues(field);
if (vals != null && vals.contains(val))
return;
d.addField(field, val);
}
private void addLabelField(SolrInputDocument d, String field, String id) {
OWLObject obj = graph.getOWLObjectByIdentifier(id);
if (obj == null)
return;
String label = graph.getLabel(obj);
if (label != null)
d.addField(field, label);
}
private void addLabelFields(SolrInputDocument d, String field, List<String> ids) {
List<String> labelAccumu = new ArrayList<String>();
for( String id : ids ){
OWLObject obj = graph.getOWLObjectByIdentifier(id);
if (obj != null){
String label = graph.getLabel(obj);
if (label != null){
labelAccumu.add(label);
}
}
}
if( ! labelAccumu.isEmpty() ){
d.addField(field, labelAccumu);
}
}
// private Set<String> edgeToField(OWLGraphEdge edge) {
// List<OWLQuantifiedProperty> qpl = edge.getQuantifiedPropertyList();
// if (qpl.size() == 0) {
// return Collections.singleton("isa_partof");
// }
// else if (qpl.size() == 1) {
// return qpToFields(qpl.get(0));
// }
// else {
// return Collections.EMPTY_SET;
// }
// }
//
// private Set<String> qpToFields(OWLQuantifiedProperty qp) {
// if (qp.isSubClassOf()) {
// return Collections.singleton("isa_partof");
// }
// else {
// // TODO
// return Collections.singleton("isa_partof");
// }
// //return Collections.EMPTY_SET;
// }
/*
* Add specified closure of OWLObject to annotation and bioentity docs.
*/
private Map<String, String> addClosureToAnnAndBio(ArrayList<String> relations, String closureName, String closureNameLabel, String closureMap,
OWLObject cls, OWLGraphWrapper graph, SolrInputDocument ann_doc, SolrInputDocument bio_doc, Gson gson){
// Add closures to doc; label and id.
List<String> idClosure = graph.getRelationIDClosure(cls, relations);
List<String> labelClosure = graph.getRelationLabelClosure(cls, relations);
ann_doc.addField(closureName, idClosure);
ann_doc.addField(closureNameLabel, labelClosure);
for( String tid : idClosure){
addFieldUnique(bio_doc, closureName, tid);
}
for( String tlabel : labelClosure){
addFieldUnique(bio_doc, closureNameLabel, tlabel);
}
// Compile closure maps to JSON.
Map<String, String> cmap = graph.getRelationClosureMap(cls, relations);
if( ! cmap.isEmpty() ){
String jsonized_cmap = gson.toJson(cmap);
ann_doc.addField(closureMap, jsonized_cmap);
// NOTE: This is harder since we'd be adding multiple, so the is done on a collector variable elsewhere.
//bio_doc.addField(closureMap, jsonized_cmap);
}
return cmap;
}
}
|
OWLTools-Solr/src/main/java/owltools/solrj/GafSolrDocumentLoader.java
|
package owltools.solrj;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLPropertyExpression;
import owltools.gaf.Bioentity;
import owltools.gaf.EcoTools;
import owltools.gaf.ExtensionExpression;
import owltools.gaf.GafDocument;
import owltools.gaf.GeneAnnotation;
import owltools.gaf.TaxonTools;
import owltools.gaf.WithInfo;
import owltools.graph.OWLGraphEdge;
import owltools.graph.OWLQuantifiedProperty;
import owltools.panther.PANTHERForest;
import owltools.panther.PANTHERTree;
import com.google.gson.*;
/**
* A very specific class for the specific use case of loading in a GAF-like document into a Solr index.
* This method is very non-generic and GO-specific, and does not use the YAML configuration files to make
* things easy for mirroring a BBOP-JS constrained SOlr index.
*/
public class GafSolrDocumentLoader extends AbstractSolrLoader {
private static Logger LOG = Logger.getLogger(GafSolrDocumentLoader.class);
EcoTools eco = null;
TaxonTools taxo = null;
PANTHERForest pset = null;
GafDocument gafDocument;
int doc_limit_trigger = 1000; // the number of documents to add before pushing out to solr
//int doc_limit_trigger = 1; // the number of documents to add before pushing out to solr
int current_doc_number;
public GafSolrDocumentLoader(String url) throws MalformedURLException {
super(url);
current_doc_number = 0;
}
public GafDocument getGafDocument() {
return gafDocument;
}
public void setGafDocument(GafDocument gafDocument) {
this.gafDocument = gafDocument;
}
public void setEcoTools(EcoTools inEco) {
this.eco = inEco;
}
public void setTaxonTools(TaxonTools inTaxo) {
this.taxo = inTaxo;
}
public void setPANTHERSet(PANTHERForest inPSet) {
this.pset = inPSet;
}
@Override
public void load() throws SolrServerException, IOException {
gafDocument.index();
LOG.info("Loading: " + gafDocument.getDocumentPath());
for (Bioentity e : gafDocument.getBioentities()) {
add(e);
current_doc_number++;
if( current_doc_number % doc_limit_trigger == 0 ){
LOG.info("Processed " + doc_limit_trigger + " bioentities at " + current_doc_number + " and committing...");
incrementalAddAndCommit();
}
}
LOG.info("Doing cleanup commit.");
incrementalAddAndCommit(); // pick up anything that we didn't catch
//LOG.info("Optimizing.");
//server.optimize();
LOG.info("Done.");
}
// private OWLObjectProperty getPartOfProperty() {
// OWLObjectProperty p = graph.getOWLObjectPropertyByIdentifier("BFO:0000050");
// return p;
// }
// Main wrapping for adding non-ontology documents to GOlr.
// Also see OntologySolrLoader.
private void add(Bioentity e) {
String eid = e.getId();
String esym = e.getSymbol();
String edb = e.getDb();
String etype = e.getTypeCls();
String ename = e.getFullName();
String edbid = e.getDBID();
//LOG.info("Adding: " + eid + " " + esym);
// We'll need this for serializing later.
Gson gson = new Gson();
SolrInputDocument bioentity_doc = new SolrInputDocument();
// Bioentity document base.
bioentity_doc.addField("document_category", "bioentity");
bioentity_doc.addField("id", eid);
bioentity_doc.addField("bioentity", eid);
bioentity_doc.addField("bioentity_internal_id", edbid);
bioentity_doc.addField("bioentity_label", esym);
bioentity_doc.addField("bioentity_name", ename);
bioentity_doc.addField("source", edb);
bioentity_doc.addField("type", etype);
// A little more work for the synonyms.
List<String> esynonyms = e.getSynonyms();
if( ! esynonyms.isEmpty() ){
bioentity_doc.addField("synonym", esynonyms);
}
// Various taxon and taxon closure calculations, including map.
String etaxid = e.getNcbiTaxonId();
bioentity_doc.addField("taxon", etaxid);
addLabelField(bioentity_doc, "taxon_label", etaxid);
// Add taxon_closure and taxon_closure_label.
OWLClass tcls = graph.getOWLClassByIdentifier(etaxid);
Set<OWLClass> taxSuper = taxo.getAncestors(tcls, true);
// Collect information: ids and labels.
List<String> taxIDClosure = new ArrayList<String>();
List<String> taxLabelClosure = new ArrayList<String>();
Map<String,String> taxon_closure_map = new HashMap<String,String>();
for( OWLClass ts : taxSuper ){
String tid = graph.getIdentifier(ts);
String tlbl = graph.getLabel(ts);
taxIDClosure.add(tid);
taxLabelClosure.add(tlbl);
taxon_closure_map.put(tid, tlbl);
}
// Compile closure map to JSON and add to the document.
String jsonized_taxon_map = null;
if( ! taxon_closure_map.isEmpty() ){
jsonized_taxon_map = gson.toJson(taxon_closure_map);
}
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
bioentity_doc.addField("taxon_closure", taxIDClosure);
bioentity_doc.addField("taxon_closure_label", taxLabelClosure);
bioentity_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, pull information from the PANTHER file set.
List<String> pantherFamilyIDs = new ArrayList<String>();
List<String> pantherFamilyLabels = new ArrayList<String>();
List<String> pantherTreeGraphs = new ArrayList<String>();
//List<String> pantherTreeAnnAncestors = new ArrayList<String>();
//List<String> pantherTreeAnnDescendants = new ArrayList<String>();
if( pset != null && pset.getNumberOfFilesInSet() > 0 ){
Set<PANTHERTree> pTrees = pset.getAssociatedTrees(eid);
if( pTrees != null ){
Iterator<PANTHERTree> piter = pTrees.iterator();
int pcnt = 0; // DEBUG
while( piter.hasNext() ){
pcnt++; // DEBUG
PANTHERTree ptree = piter.next();
pantherFamilyIDs.add(ptree.getTreeID());
pantherFamilyLabels.add(ptree.getTreeLabel());
pantherTreeGraphs.add(ptree.getOWLShuntGraph().toJSON());
//pantherTreeAnnAncestors = new ArrayList<String>(ptree.getAncestorAnnotations(eid));
//pantherTreeAnnDescendants = new ArrayList<String>(ptree.getDescendantAnnotations(eid));
if( pcnt > 1 ){ // DEBUG
LOG.info("Belongs to multiple families: " + StringUtils.join(pantherFamilyIDs, ", "));
}
}
}
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
bioentity_doc.addField("family_tag", pantherFamilyIDs);
bioentity_doc.addField("family_tag_label", pantherFamilyLabels);
bioentity_doc.addField("phylo_graph", pantherTreeGraphs);
//if( ! pantherTreeAnnAncestors.isEmpty() ){
// bioentity_doc.addField("phylo_ancestor_closure", pantherTreeAnnAncestors);
//}
//if( ! pantherTreeAnnDescendants.isEmpty() ){
// bioentity_doc.addField("phylo_descendant_closure", pantherTreeAnnDescendants);
//}
}
// Something that we'll need for the annotation evidence aggregate later.
Map<String,SolrInputDocument> evAggDocMap = new HashMap<String,SolrInputDocument>();
// Annotation doc
for (GeneAnnotation a : gafDocument.getGeneAnnotations(e.getId())) {
SolrInputDocument annotation_doc = new SolrInputDocument();
String clsId = a.getCls();
String refId = a.getReferenceId();
// Annotation document base from static and previous bioentity.
annotation_doc.addField("document_category", "annotation"); // n/a
annotation_doc.addField("source", edb); // Col. 1 (from bioentity above)
annotation_doc.addField("bioentity", eid); // n/a, should be c1+c2.
annotation_doc.addField("bioentity_internal_id", edbid); // Col. 2 (from bioentity above)
annotation_doc.addField("bioentity_label", esym); // Col. 3 (from bioentity above)
String aqual = a.getCompositeQualifier();
annotation_doc.addField("qualifier", aqual); // Col. 4
annotation_doc.addField("annotation_class", clsId); // Col. 5
addLabelField(annotation_doc, "annotation_class_label", clsId); // n/a
annotation_doc.addField("reference", refId); // Col. 6
String a_ev_type = a.getEvidenceCls();
annotation_doc.addField("evidence_type", a_ev_type); // Col. 7
// NOTE: Col. 8 generation is below...
String a_aspect = a.getAspect();
annotation_doc.addField("aspect", a_aspect); // Col. 9
annotation_doc.addField("bioentity_name", ename); // Col. 10 (from bioentity above)
annotation_doc.addField("synonym", esynonyms); // Col. 11 (from bioentity above)
annotation_doc.addField("type", etype); // Col. 12 (from bioentity above)
annotation_doc.addField("taxon", etaxid); // Col. 13(?) (from bioentity above)
addLabelField(annotation_doc, "taxon_label", etaxid); // n/a
String adate = a.getLastUpdateDate();
annotation_doc.addField("date", adate); // Col. 14
String assgnb = a.getAssignedBy();
annotation_doc.addField("assigned_by", assgnb); // Col. 15
// NOTE: Col. generation is 16 below...
annotation_doc.addField("bioentity_isoform", a.getGeneProductForm()); // Col. 17
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
annotation_doc.addField("taxon_closure", taxIDClosure);
annotation_doc.addField("taxon_closure_label", taxLabelClosure);
annotation_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
annotation_doc.addField("family_tag", pantherFamilyIDs);
annotation_doc.addField("family_tag_label", pantherFamilyLabels);
}
// BUG/TODO: Make the ID /really/ unique - ask Chris
annotation_doc.addField("id", eid + "_:_" + clsId + "_:_" + a_ev_type + "_:_" + assgnb + "_:_" + etaxid + "_:_" + adate);
// Evidence type closure.
Set<OWLClass> ecoClasses = eco.getClassesForGoCode(a_ev_type);
Set<OWLClass> ecoSuper = eco.getAncestors(ecoClasses, true);
List<String> ecoIDClosure = new ArrayList<String>();
for( OWLClass es : ecoSuper ){
String itemID = es.toStringID();
ecoIDClosure.add(itemID);
}
addLabelFields(annotation_doc, "evidence_type_closure", ecoIDClosure);
// Drag in "with" (col 8).
//annotation_doc.addField("evidence_with", a.getWithExpression());
for (WithInfo wi : a.getWithInfos()) {
annotation_doc.addField("evidence_with", wi.getWithXref());
}
///
/// isa_partof_closure
///
OWLObject cls = graph.getOWLObjectByIdentifier(clsId);
// TODO: This may be a bug workaround, or it may be the way things are.
// getOWLObjectByIdentifier returns null on alt_ids, so skip them for now.
if( cls != null ){
// System.err.println(clsId);
// Add to annotation and bioentity isa_partof closures; label and id.
List<String> idClosure = graph.getIsaPartofIDClosure(cls);
List<String> labelClosure = graph.getIsaPartofLabelClosure(cls);
annotation_doc.addField("isa_partof_closure", idClosure);
annotation_doc.addField("isa_partof_closure_label", labelClosure);
for( String tlabel : labelClosure){
addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
}
for( String tid : idClosure){
addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
}
// Compile closure maps to JSON.
Map<String, String> isa_partof_map = graph.getIsaPartofClosureMap(cls);
if( ! isa_partof_map.isEmpty() ){
String jsonized_isa_partof_map = gson.toJson(isa_partof_map);
annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map);
}
// When we cycle, we'll also want to do some stuff to track all of the evidence codes we see.
List<String> aggEvIDClosure = new ArrayList<String>();
List<String> aggEvWiths = new ArrayList<String>();
// Cycle through and pick up all the associated bits for the terms in the closure.
SolrInputDocument ev_agg_doc = null;
for( String tid : idClosure ){
String tlabel = isa_partof_map.get(tid);
//OWLObject c = graph.getOWLObjectByIdentifier(tid);
// Only have to do the annotation evidence aggregate base once.
// Otherwise, just skip over and add the multi fields separately.
String evAggId = eid + "_:ev:_" + clsId;
if (evAggDocMap.containsKey(evAggId)) {
ev_agg_doc = evAggDocMap.get(evAggId);
} else {
ev_agg_doc = new SolrInputDocument();
evAggDocMap.put(evAggId, ev_agg_doc);
ev_agg_doc.addField("id", evAggId);
ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
ev_agg_doc.addField("bioentity", eid);
ev_agg_doc.addField("bioentity_label", esym);
ev_agg_doc.addField("annotation_class", tid);
ev_agg_doc.addField("annotation_class_label", tlabel);
ev_agg_doc.addField("taxon", etaxid);
addLabelField(ev_agg_doc, "taxon_label", etaxid);
// Optionally, if there is enough taxon for a map, add the collections to the document.
if( jsonized_taxon_map != null ){
ev_agg_doc.addField("taxon_closure", taxIDClosure);
ev_agg_doc.addField("taxon_closure_label", taxLabelClosure);
ev_agg_doc.addField("taxon_closure_map", jsonized_taxon_map);
}
// Optionally, actually /add/ the PANTHER family data to the document.
if( ! pantherFamilyIDs.isEmpty() ){
ev_agg_doc.addField("family_tag", pantherFamilyIDs);
ev_agg_doc.addField("family_tag_label", pantherFamilyLabels);
}
}
// Drag in "with" (col 8), this time for ev_agg.
for (WithInfo wi : a.getWithInfos()) {
aggEvWiths.add(wi.getWithXref());
}
// Make note for the evidence type closure.
aggEvIDClosure.add(a.getEvidenceCls());
}
// If there was actually a doc created/there, add the cumulative fields to it.
if( ev_agg_doc != null ){
addLabelFields(ev_agg_doc, "evidence_type_closure", aggEvIDClosure);
addLabelFields(ev_agg_doc, "evidence_with", aggEvWiths);
}
}
// Map<String,String> isa_partof_map = new HashMap<String,String>(); // capture labels/ids
// OWLObject c = graph.getOWLObjectByIdentifier(clsId);
// Set<OWLPropertyExpression> ps = Collections.singleton((OWLPropertyExpression)getPartOfProperty());
// Set<OWLObject> ancs = graph.getAncestors(c, ps);
// for (OWLObject t : ancs) {
// if (! (t instanceof OWLClass))
// continue;
// String tid = graph.getIdentifier(t);
// //System.out.println(edge+" TGT:"+tid);
// String tlabel = null;
// if (t != null)
// tlabel = graph.getLabel(t);
// annotation_doc.addField("isa_partof_closure", tid);
// addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
// if (tlabel != null) {
// annotation_doc.addField("isa_partof_closure_label", tlabel);
// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
// // Map both ways.
// // TODO: collisions shouldn't be an issue here?
// isa_partof_map.put(tid, tlabel);
// isa_partof_map.put(tlabel, tid);
// }else{
// // For the time being at least, I want to ensure that the id and label closures
// // mirror eachother as much as possible (for facets and mapping, etc.). Without
// // this, in some cases there is simply nothing returned to drill on.
// annotation_doc.addField("isa_partof_closure_label", tid);
// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tid);
// // Map just the one way I guess--see above.
// isa_partof_map.put(tid, tid);
// }
//
// // Annotation evidence aggregate base.
// String evAggId = eid + "_:ev:_" + clsId;
// SolrInputDocument ev_agg_doc;
// if (evAggDocMap.containsKey(evAggId)) {
// ev_agg_doc = evAggDocMap.get(evAggId);
// }
// else {
// ev_agg_doc = new SolrInputDocument();
// evAggDocMap.put(evAggId, ev_agg_doc);
// ev_agg_doc.addField("id", evAggId);
// ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
// ev_agg_doc.addField("bioentity", eid);
// ev_agg_doc.addField("bioentity_label", esym);
// ev_agg_doc.addField("annotation_class", tid);
// ev_agg_doc.addField("annotation_class_label", tlabel);
// ev_agg_doc.addField("taxon", taxId);
// addLabelField(ev_agg_doc, "taxon_label", taxId);
// }
//
// //evidence_type is single valued
// //aggDoc.addField("evidence_type", a.getEvidenceCls());
//
// // Drag in "with" (col 8), this time for ev_agg.
// for (WithInfo wi : a.getWithInfos()) {
// ev_agg_doc.addField("evidence_with", wi.getWithXref());
// }
//
// //aggDoc.getFieldValues(name)
// // TODO:
// ev_agg_doc.addField("evidence_type_closure", a.getEvidenceCls());
// }
// Column 16.
Map<String,String> ann_ext_map = new HashMap<String,String>(); // capture labels/ids
for (ExtensionExpression ee : a.getExtensionExpressions()) {
ee.getRelation(); // TODO
String eeid = ee.getCls();
OWLObject eObj = graph.getOWLObjectByIdentifier(eeid);
annotation_doc.addField("annotation_extension_class", eeid);
addLabelField(annotation_doc, "annotation_extension_class_label", eeid);
if (eObj != null) {
for (OWLGraphEdge edge : graph.getOutgoingEdgesClosureReflexive(eObj)) {
OWLObject t = edge.getTarget();
if (!(t instanceof OWLClass))
continue;
String annExtID = graph.getIdentifier(t);
String annExtLabel = graph.getLabel(edge.getTarget());
annotation_doc.addField("annotation_extension_class_closure", annExtID);
annotation_doc.addField("annotation_extension_class_closure_label", annExtLabel);
ann_ext_map.put(annExtID, annExtLabel);
ann_ext_map.put(annExtLabel, annExtID);
}
}
}
// Add annotation ext closure map to annotation doc.
if( ! ann_ext_map.isEmpty() ){
String jsonized_ann_ext_map = gson.toJson(ann_ext_map);
annotation_doc.addField("annotation_extension_class_closure_map", jsonized_ann_ext_map);
}
// Finally add doc.
add(annotation_doc);
}
add(bioentity_doc);
for (SolrInputDocument ev_agg_doc : evAggDocMap.values()) {
add(ev_agg_doc);
}
}
private void addFieldUnique(SolrInputDocument d, String field, String val) {
if (val == null)
return;
Collection<Object> vals = d.getFieldValues(field);
if (vals != null && vals.contains(val))
return;
d.addField(field, val);
}
private void addLabelField(SolrInputDocument d, String field, String id) {
OWLObject obj = graph.getOWLObjectByIdentifier(id);
if (obj == null)
return;
String label = graph.getLabel(obj);
if (label != null)
d.addField(field, label);
}
private void addLabelFields(SolrInputDocument d, String field, List<String> ids) {
List<String> labelAccumu = new ArrayList<String>();
for( String id : ids ){
OWLObject obj = graph.getOWLObjectByIdentifier(id);
if (obj != null){
String label = graph.getLabel(obj);
if (label != null){
labelAccumu.add(label);
}
}
}
if( ! labelAccumu.isEmpty() ){
d.addField(field, labelAccumu);
}
}
private Set<String> edgeToField(OWLGraphEdge edge) {
List<OWLQuantifiedProperty> qpl = edge.getQuantifiedPropertyList();
if (qpl.size() == 0) {
return Collections.singleton("isa_partof");
}
else if (qpl.size() == 1) {
return qpToFields(qpl.get(0));
}
else {
return Collections.EMPTY_SET;
}
}
private Set<String> qpToFields(OWLQuantifiedProperty qp) {
if (qp.isSubClassOf()) {
return Collections.singleton("isa_partof");
}
else {
// TODO
return Collections.singleton("isa_partof");
}
//return Collections.EMPTY_SET;
}
}
|
some rework to push reg closures into bio and ann
git-svn-id: f705032614e1ff11fed11a7e506afa6fa6966044@1146 18f1da76-1bb4-b526-5913-e828fe20442d
|
OWLTools-Solr/src/main/java/owltools/solrj/GafSolrDocumentLoader.java
|
some rework to push reg closures into bio and ann
|
<ide><path>WLTools-Solr/src/main/java/owltools/solrj/GafSolrDocumentLoader.java
<ide> import owltools.gaf.TaxonTools;
<ide> import owltools.gaf.WithInfo;
<ide> import owltools.graph.OWLGraphEdge;
<add>import owltools.graph.OWLGraphWrapper;
<ide> import owltools.graph.OWLQuantifiedProperty;
<ide> import owltools.panther.PANTHERForest;
<ide> import owltools.panther.PANTHERTree;
<ide> // Something that we'll need for the annotation evidence aggregate later.
<ide> Map<String,SolrInputDocument> evAggDocMap = new HashMap<String,SolrInputDocument>();
<ide>
<del> // Annotation doc
<add> // Annotation doc.
<add> // We'll also need to be collecting some aggregate information, like for the GP term closures, which will be
<add> // added at the end of this section.
<add> Map<String, String> isap_map = new HashMap<String, String>();
<add> Map<String, String> reg_map = new HashMap<String, String>();
<ide> for (GeneAnnotation a : gafDocument.getGeneAnnotations(e.getId())) {
<ide> SolrInputDocument annotation_doc = new SolrInputDocument();
<ide>
<ide> if( cls != null ){
<ide> // System.err.println(clsId);
<ide>
<del> // Add to annotation and bioentity isa_partof closures; label and id.
<del> List<String> idClosure = graph.getIsaPartofIDClosure(cls);
<del> List<String> labelClosure = graph.getIsaPartofLabelClosure(cls);
<del> annotation_doc.addField("isa_partof_closure", idClosure);
<del> annotation_doc.addField("isa_partof_closure_label", labelClosure);
<del> for( String tlabel : labelClosure){
<del> addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
<del> }
<del> for( String tid : idClosure){
<del> addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
<del> }
<del>
<del> // Compile closure maps to JSON.
<del> Map<String, String> isa_partof_map = graph.getIsaPartofClosureMap(cls);
<del> if( ! isa_partof_map.isEmpty() ){
<del> String jsonized_isa_partof_map = gson.toJson(isa_partof_map);
<del> annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map);
<del> }
<del>
<add> // Is-a part-of closures.
<add> ArrayList<String> isap = new ArrayList<String>();
<add> isap.add("BFO:0000050");
<add> Map<String, String> curr_isap_map = addClosureToAnnAndBio(isap, "isa_partof_closure", "isa_partof_closure_label", "isa_partof_closure_map",
<add> cls, graph, annotation_doc, bioentity_doc, gson);
<add> isap_map.putAll(curr_isap_map); // add to aggregate map
<add>
<add>// // Add to annotation and bioentity isa_partof closures; label and id.
<add>// List<String> idClosure = graph.getRelationIDClosure(cls, isap);
<add>// List<String> labelClosure = graph.getRelationLabelClosure(cls, isap);
<add>// annotation_doc.addField("isa_partof_closure", idClosure);
<add>// annotation_doc.addField("isa_partof_closure_label", labelClosure);
<add>// for( String tlabel : labelClosure){
<add>// addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
<add>// }
<add>// for( String tid : idClosure){
<add>// addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
<add>// }
<add>//
<add>// // Compile closure maps to JSON.
<add>// Map<String, String> isa_partof_map = graph.getRelationClosureMap(cls, isap);
<add>// if( ! isa_partof_map.isEmpty() ){
<add>// String jsonized_isa_partof_map = gson.toJson(isa_partof_map);
<add>// annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map);
<add>// }
<add>
<add> // Regulates closures.
<add> ArrayList<String> reg = new ArrayList<String>();
<add> reg.add("BFO:0000050");
<add> reg.add("RO:0002211");
<add> reg.add("RO:0002212");
<add> reg.add("RO:0002213");
<add> Map<String, String> curr_reg_map = addClosureToAnnAndBio(reg, "regulates_closure", "regulates_closure_label", "regulates_closure_map",
<add> cls, graph, annotation_doc, bioentity_doc, gson);
<add> reg_map.putAll(curr_reg_map); // add to aggregate map
<add>
<add> ///
<add> /// Next, work on the evidence aggregate...
<add> ///
<add>
<add> // Bug/TODO: This is a bit os a slowdown since we're not reusing our work from above here anymore.
<add> List<String> idIsapClosure = graph.getRelationIDClosure(cls, isap);
<add> Map<String, String> isaPartofMap = graph.getRelationClosureMap(cls, isap);
<add>
<ide> // When we cycle, we'll also want to do some stuff to track all of the evidence codes we see.
<ide> List<String> aggEvIDClosure = new ArrayList<String>();
<ide> List<String> aggEvWiths = new ArrayList<String>();
<ide>
<ide> // Cycle through and pick up all the associated bits for the terms in the closure.
<ide> SolrInputDocument ev_agg_doc = null;
<del> for( String tid : idClosure ){
<del>
<del> String tlabel = isa_partof_map.get(tid);
<add> for( String tid : idIsapClosure ){
<add>
<add> String tlabel = isaPartofMap.get(tid);
<ide> //OWLObject c = graph.getOWLObjectByIdentifier(tid);
<ide>
<ide> // Only have to do the annotation evidence aggregate base once.
<ide> // Finally add doc.
<ide> add(annotation_doc);
<ide> }
<add>
<add> // Add the necessary aggregates to the bio doc. These cannot be done incrementally like the multi-valued closures
<add> // sonce there can only be a single map.
<add> if( ! isap_map.isEmpty() ){
<add> String jsonized_cmap = gson.toJson(isap_map);
<add> bioentity_doc.addField("isa_partof_closure_map", jsonized_cmap);
<add> }
<add> if( ! reg_map.isEmpty() ){
<add> String jsonized_cmap = gson.toJson(reg_map);
<add> bioentity_doc.addField("regulates_closure_map", jsonized_cmap);
<add> }
<add>
<ide> add(bioentity_doc);
<ide>
<ide> for (SolrInputDocument ev_agg_doc : evAggDocMap.values()) {
<ide> }
<ide> }
<ide>
<del> private Set<String> edgeToField(OWLGraphEdge edge) {
<del> List<OWLQuantifiedProperty> qpl = edge.getQuantifiedPropertyList();
<del> if (qpl.size() == 0) {
<del> return Collections.singleton("isa_partof");
<del> }
<del> else if (qpl.size() == 1) {
<del> return qpToFields(qpl.get(0));
<del> }
<del> else {
<del> return Collections.EMPTY_SET;
<del> }
<del> }
<del>
<del> private Set<String> qpToFields(OWLQuantifiedProperty qp) {
<del> if (qp.isSubClassOf()) {
<del> return Collections.singleton("isa_partof");
<del> }
<del> else {
<del> // TODO
<del> return Collections.singleton("isa_partof");
<del> }
<del> //return Collections.EMPTY_SET;
<del> }
<del>
<del>
<del>
<del>
<del>
<add>// private Set<String> edgeToField(OWLGraphEdge edge) {
<add>// List<OWLQuantifiedProperty> qpl = edge.getQuantifiedPropertyList();
<add>// if (qpl.size() == 0) {
<add>// return Collections.singleton("isa_partof");
<add>// }
<add>// else if (qpl.size() == 1) {
<add>// return qpToFields(qpl.get(0));
<add>// }
<add>// else {
<add>// return Collections.EMPTY_SET;
<add>// }
<add>// }
<add>//
<add>// private Set<String> qpToFields(OWLQuantifiedProperty qp) {
<add>// if (qp.isSubClassOf()) {
<add>// return Collections.singleton("isa_partof");
<add>// }
<add>// else {
<add>// // TODO
<add>// return Collections.singleton("isa_partof");
<add>// }
<add>// //return Collections.EMPTY_SET;
<add>// }
<add>
<add> /*
<add> * Add specified closure of OWLObject to annotation and bioentity docs.
<add> */
<add> private Map<String, String> addClosureToAnnAndBio(ArrayList<String> relations, String closureName, String closureNameLabel, String closureMap,
<add> OWLObject cls, OWLGraphWrapper graph, SolrInputDocument ann_doc, SolrInputDocument bio_doc, Gson gson){
<add>
<add> // Add closures to doc; label and id.
<add> List<String> idClosure = graph.getRelationIDClosure(cls, relations);
<add> List<String> labelClosure = graph.getRelationLabelClosure(cls, relations);
<add> ann_doc.addField(closureName, idClosure);
<add> ann_doc.addField(closureNameLabel, labelClosure);
<add> for( String tid : idClosure){
<add> addFieldUnique(bio_doc, closureName, tid);
<add> }
<add> for( String tlabel : labelClosure){
<add> addFieldUnique(bio_doc, closureNameLabel, tlabel);
<add> }
<add>
<add> // Compile closure maps to JSON.
<add> Map<String, String> cmap = graph.getRelationClosureMap(cls, relations);
<add> if( ! cmap.isEmpty() ){
<add> String jsonized_cmap = gson.toJson(cmap);
<add> ann_doc.addField(closureMap, jsonized_cmap);
<add> // NOTE: This is harder since we'd be adding multiple, so the is done on a collector variable elsewhere.
<add> //bio_doc.addField(closureMap, jsonized_cmap);
<add> }
<add>
<add>
<add> return cmap;
<add> }
<ide>
<ide> }
|
|
Java
|
mit
|
error: pathspec 'src/parser/Parser.java' did not match any file(s) known to git
|
a7bfc92c2f6cf441019009faa2232d133494f733
| 1 |
Giraudux/java-quel-bazar
|
package parser;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Collection;
/**
* @author Alexis Giraudet
*/
public class Parser {
public static boolean parseLine(String fileName, Collection<String> c) {
try {
BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName));
String line;
while ((line = bufferedReader.readLine()) != null) {
c.add(line);
}
} catch (Exception e) {
return false;
}
return true;
}
}
|
src/parser/Parser.java
|
add Parser class
|
src/parser/Parser.java
|
add Parser class
|
<ide><path>rc/parser/Parser.java
<add>package parser;
<add>
<add>import java.io.BufferedReader;
<add>import java.io.FileReader;
<add>import java.util.Collection;
<add>
<add>/**
<add> * @author Alexis Giraudet
<add> */
<add>public class Parser {
<add> public static boolean parseLine(String fileName, Collection<String> c) {
<add> try {
<add> BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName));
<add> String line;
<add> while ((line = bufferedReader.readLine()) != null) {
<add> c.add(line);
<add> }
<add> } catch (Exception e) {
<add> return false;
<add> }
<add> return true;
<add> }
<add>}
|
|
Java
|
lgpl-2.1
|
b2b0000b3476577dfd49f9ae6c6b406999909d36
| 0 |
jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,jimregan/languagetool
|
/* LanguageTool, a natural language style checker
* Copyright (C) 2005 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.rules.de;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.Nullable;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedToken;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.JLanguageTool;
import org.languagetool.language.German;
import org.languagetool.rules.Categories;
import org.languagetool.rules.Example;
import org.languagetool.rules.Rule;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.patterns.PatternToken;
import org.languagetool.rules.patterns.PatternTokenBuilder;
import org.languagetool.tagging.de.AnalyzedGermanToken;
import org.languagetool.tagging.de.GermanToken;
import org.languagetool.tagging.de.GermanToken.POSType;
import org.languagetool.tagging.disambiguation.rules.DisambiguationPatternRule;
/**
* Simple agreement checker for German noun phrases. Checks agreement in:
*
* <ul>
* <li>DET/PRO NOUN: e.g. "mein Auto", "der Mann", "die Frau" (correct), "die Haus" (incorrect)</li>
* <li>DET/PRO ADJ NOUN: e.g. "der riesige Tisch" (correct), "die riesigen Tisch" (incorrect)</li>
* </ul>
*
* Note that this rule only checks agreement inside the noun phrase, not whether
* e.g. the correct case is used. For example, "Es ist das Haus dem Mann" is not
* detected as incorrect.
*
* <p>TODO: the implementation could use a re-write that first detects the relevant noun phrases and then checks agreement
*
* @author Daniel Naber
*/
public class AgreementRule extends Rule {
private final German language;
private enum GrammarCategory {
KASUS("Kasus (Fall: Wer/Was, Wessen, Wem, Wen/Was - Beispiel: 'das Fahrrads' statt 'des Fahrrads')"),
GENUS("Genus (männlich, weiblich, sächlich - Beispiel: 'der Fahrrad' statt 'das Fahrrad')"),
NUMERUS("Numerus (Einzahl, Mehrzahl - Beispiel: 'das Fahrräder' statt 'die Fahrräder')");
private final String displayName;
GrammarCategory(String displayName) {
this.displayName = displayName;
}
}
private static final List<List<PatternToken>> ANTI_PATTERNS = Arrays.asList(
Arrays.asList( // "Wir bereinigen das nächsten Dienstag."
new PatternTokenBuilder().posRegex("VER:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().tokenRegex("nächste[ns]?").build(),
new PatternTokenBuilder().tokenRegex("Montag|D(ien|onner)stag|Mittwoch|Freitag|S(ams|onn)tag|Woche|Monat|Jahr").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("(?i:ist|war)").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().token("Zufall").build()
),
Arrays.asList( // "So hatte das Vorteile|Auswirkungen|Konsequenzen..."
new PatternTokenBuilder().tokenRegex("(?i:hat(te)?)").build(),
new PatternTokenBuilder().token("das").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("von|bei").build(),
new PatternTokenBuilder().tokenRegex("(vielen|allen)").build(),
new PatternTokenBuilder().posRegex("PA2:.*|ADJ:AKK:PLU:.*").build() // "ein von vielen bewundertes Haus" / "Das weckte bei vielen ungute Erinnerungen."
),
Arrays.asList(
new PatternTokenBuilder().token("für").build(),
new PatternTokenBuilder().tokenRegex("(viele|alle|[dm]ich|ihn|sie|uns)").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:.*").build() // "Ein für viele wichtiges Anliegen."
),
Arrays.asList(
new PatternTokenBuilder().csToken("machen").matchInflectedForms().build(),
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("Angst").build() // "Dinge, die/ Etwas, das einem Angst macht"
),
Arrays.asList(
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("Angst").build(), // "Was einem Angst macht"
new PatternTokenBuilder().tokenRegex("machen|einjagen").matchInflectedForms().build()
),
Arrays.asList(
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("geschenkten").build(),
new PatternTokenBuilder().token("Gaul").build()
),
Arrays.asList(
new PatternTokenBuilder().token("einer").build(),
new PatternTokenBuilder().token("jeden").build(),
new PatternTokenBuilder().posRegex("SUB:GEN:.*").build() // "Kern einer jeden Tragödie..."
),
Arrays.asList(
new PatternTokenBuilder().token("kein").build(),
new PatternTokenBuilder().token("schöner").build(),
new PatternTokenBuilder().token("Land").build() // https://de.wikipedia.org/wiki/Kein_sch%C3%B6ner_Land
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("Ist|Sind|Macht|Wird").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().posRegex("PKT|KON:NEB|ZUS").build()// "Ist das Kunst?" / "Ist das Kunst oder Abfall?" / "Sind das Eier aus Bodenhaltung"
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("Meist(ens)?|Oft(mals)?|Häufig|Selten").build(),
new PatternTokenBuilder().tokenRegex("sind|waren|ist").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build() // Meistens sind das Frauen, die damit besser umgehen können.
),
Arrays.asList(
new PatternTokenBuilder().token("des").build(),
new PatternTokenBuilder().token("Lied").build(),
new PatternTokenBuilder().token("ich").build()// Wes Brot ich ess, des Lied ich sing
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("D(a|ie)s").build(),
new PatternTokenBuilder().posRegex("VER:[123]:.*").build(),
new PatternTokenBuilder().posRegex("SUB:NOM:.*").build()// "Das erfordert Können und..." / "Dies bestätigte Polizeimeister Huber"
),
Arrays.asList(
new PatternTokenBuilder().posRegex("ART:.*").build(), // "Das wenige Kilometer breite Tal"
new PatternTokenBuilder().posRegex("ADJ:.*").build(),
new PatternTokenBuilder().tokenRegex("(Kilo|Zenti|Milli)?meter|Jahre|Monate|Wochen|Tage|Stunden|Minuten|Sekunden").build()
),
Arrays.asList(
new PatternTokenBuilder().token("Van").build(), // https://de.wikipedia.org/wiki/Alexander_Van_der_Bellen
new PatternTokenBuilder().token("der").build(),
new PatternTokenBuilder().token("Bellen").build()
),
Arrays.asList(
new PatternTokenBuilder().token("mehrere").build(), // "mehrere Verwundete" http://forum.languagetool.org/t/de-false-positives-and-false-false/1516
new PatternTokenBuilder().pos("SUB:NOM:SIN:FEM:ADJ").build()
),
Arrays.asList(
new PatternTokenBuilder().token("allen").build(),
new PatternTokenBuilder().tokenRegex("Besitz|Mut").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("d(ie|en)|[md]einen?").build(),
new PatternTokenBuilder().token("Top").build(),
new PatternTokenBuilder().tokenRegex("\\d+").build()
),
Arrays.asList( //"Unter diesen rief das großen Unmut hervor."
new PatternTokenBuilder().posRegex("VER:3:SIN:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:.*").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:.*").build(),
new PatternTokenBuilder().pos("ZUS").build(),
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_END_TAGNAME).build()
),
Arrays.asList( // "Bei mir löste das Panik aus."
new PatternTokenBuilder().posRegex("VER:3:SIN:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:.*").build(),
new PatternTokenBuilder().pos("ZUS").build(),
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_END_TAGNAME).build()
),
Arrays.asList(
new PatternTokenBuilder().token("Außenring").build(),
new PatternTokenBuilder().token("Autobahn").build()
),
Arrays.asList( // "Ehre, wem Ehre gebührt"
new PatternTokenBuilder().tokenRegex("[dw]em").build(),
new PatternTokenBuilder().csToken("Ehre").build(),
new PatternTokenBuilder().csToken("gebührt").build()
),
Arrays.asList(
new PatternTokenBuilder().token("Eurovision").build(),
new PatternTokenBuilder().token("Song").build(),
new PatternTokenBuilder().token("Contest").build()
),
Arrays.asList( // "Das Holocaust Memorial Museum."
new PatternTokenBuilder().posRegex("ART:.*").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().pos("UNKNOWN").build()
),
Arrays.asList( // "Er fragte, ob das Spaß macht."
new PatternTokenBuilder().csToken(",").build(),
new PatternTokenBuilder().posRegex("KON:UNT|ADV:INR").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().posRegex("VER:3:SIN.*").build()
),
Arrays.asList( // "Es gibt viele solcher Bilder"
new PatternTokenBuilder().tokenRegex("viele|wenige|einige|mehrere").build(),
new PatternTokenBuilder().csToken("solcher").build(),
new PatternTokenBuilder().posRegex("SUB:GEN:PLU:.*").build()
),
Arrays.asList( // "der französischen First Lady"
new PatternTokenBuilder().tokenRegex("[dD](ie|er)").build(),
new PatternTokenBuilder().csToken("First").build(),
new PatternTokenBuilder().csToken("Lady").build()
),
Arrays.asList( // "der französischen First Lady"
new PatternTokenBuilder().tokenRegex("[dD](ie|er)").build(),
new PatternTokenBuilder().posRegex("ADJ:.*").build(),
new PatternTokenBuilder().csToken("First").build(),
new PatternTokenBuilder().csToken("Lady").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("[dD]e[rn]").build(),
new PatternTokenBuilder().csToken("Gold").build(),
new PatternTokenBuilder().csToken("Cup").build()
),
Arrays.asList(
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().tokenRegex("viele|wenige").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build()
),
Arrays.asList( // "Er verspricht allen/niemandem/jedem hohe Gewinne."
new PatternTokenBuilder().tokenRegex("allen|(nieman|je(man)?)dem").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:PLU:.*").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:PLU:.*").build()
),
Arrays.asList( // "Er verspricht allen/niemandem/jedem Gewinne von über 15 Prozent."
new PatternTokenBuilder().tokenRegex("allen|(nieman|je(man)?)dem").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:PLU:.*").build()
),
Arrays.asList( // "Für ihn ist das Alltag." / "Für die Religiösen ist das Blasphemie."
new PatternTokenBuilder().token("für").setSkip(2).build(),
new PatternTokenBuilder().tokenRegex("ist|war").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:NOM:.*").build(),
new PatternTokenBuilder().pos("PKT").build()
),
Arrays.asList( // "Sie sagte, dass das Rache bedeuten würden"
new PatternTokenBuilder().pos("KON:UNT").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:.+").build(),
new PatternTokenBuilder().tokenRegex("bedeuten|sein").matchInflectedForms().build()
),
Arrays.asList( // "Karl sagte, dass sie niemandem Bescheid gegeben habe."
new PatternTokenBuilder().token("niemand").matchInflectedForms().build(),
new PatternTokenBuilder().posRegex("SUB:.+").build()
),
Arrays.asList(
new PatternTokenBuilder().token("alles").build(),
new PatternTokenBuilder().csToken("Walzer").build()
),
Arrays.asList( // "ei der Daus"
new PatternTokenBuilder().csToken("der").build(),
new PatternTokenBuilder().csToken("Daus").build()
),
Arrays.asList(
new PatternTokenBuilder().csToken("dem").build(),
new PatternTokenBuilder().csToken("Achtung").setSkip(1).build(),
new PatternTokenBuilder().csToken("schenken").matchInflectedForms().build()
),
Arrays.asList(
new PatternTokenBuilder().csToken("schenken").matchInflectedForms().build(),
new PatternTokenBuilder().csToken("dem").build(),
new PatternTokenBuilder().csToken("Achtung").build()
)
);
private static final Set<String> MODIFIERS = new HashSet<>(Arrays.asList(
"besonders",
"fast",
"geradezu",
"sehr",
"überaus",
"ziemlich"
));
private static final Set<String> VIELE_WENIGE_LOWERCASE = new HashSet<>(Arrays.asList(
"viele",
"vieler",
"wenige",
"weniger",
"einige",
"einiger",
"mehrerer",
"mehrere"
));
private static final String[] REL_PRONOUN_LEMMAS = {"der", "welch"};
private static final Pattern UNITS = Pattern.compile(".*([gG]ramm|[mM]eter)");
private static final Set<String> PRONOUNS_TO_BE_IGNORED = new HashSet<>(Arrays.asList(
"ich",
"dir",
"du",
"er", "sie", "es",
"wir",
"mir",
"uns",
"ihnen",
"euch",
"ihm",
"ihr",
"ihn",
"dessen",
"deren",
"denen",
"sich",
"unser",
"aller",
"man",
"beide",
"beiden",
"beider",
"wessen",
"a",
"alle",
"etwas",
"irgendetwas",
"was",
"wer",
"jenen", // "...und mit jenen anderer Arbeitsgruppen verwoben"
"diejenigen",
"jemand", "jemandes",
"niemand", "niemandes"
));
private static final Set<String> NOUNS_TO_BE_IGNORED = new HashSet<>(Arrays.asList(
"Prozent", // Plural "Prozente", trotzdem ist "mehrere Prozent" korrekt
"Gramm",
"Kilogramm",
"Uhr" // "um ein Uhr"
));
public AgreementRule(ResourceBundle messages, German language) {
this.language = language;
super.setCategory(Categories.GRAMMAR.getCategory(messages));
addExamplePair(Example.wrong("<marker>Der Haus</marker> wurde letztes Jahr gebaut."),
Example.fixed("<marker>Das Haus</marker> wurde letztes Jahr gebaut."));
}
@Override
public String getId() {
return "DE_AGREEMENT";
}
@Override
public String getDescription() {
return "Kongruenz von Nominalphrasen (unvollständig!), z.B. 'mein kleiner(kleines) Haus'";
}
@Override
public RuleMatch[] match(AnalyzedSentence sentence) {
List<RuleMatch> ruleMatches = new ArrayList<>();
AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace();
for (int i = 0; i < tokens.length; i++) {
//defaulting to the first reading
//TODO: check for all readings
String posToken = tokens[i].getAnalyzedToken(0).getPOSTag();
if (posToken != null && posToken.equals(JLanguageTool.SENTENCE_START_TAGNAME)) {
continue;
}
if (tokens[i].isImmunized()) {
continue;
}
AnalyzedTokenReadings tokenReadings = tokens[i];
boolean relevantPronoun = isRelevantPronoun(tokens, i);
boolean ignore = couldBeRelativeOrDependentClause(tokens, i);
if (i > 0) {
String prevToken = tokens[i-1].getToken().toLowerCase();
if (StringUtils.equalsAny(tokens[i].getToken(), "eine", "einen")
&& StringUtils.equalsAny(prevToken, "der", "die", "das", "des", "dieses")) {
// TODO: "der eine Polizist" -> nicht ignorieren, sondern "der polizist" checken; "auf der einen Seite"
ignore = true;
}
}
// avoid false alarm on "nichts Gutes" and "alles Gute"
if (StringUtils.equalsAny(tokenReadings.getToken(), "nichts", "alles", "dies")) {
ignore = true;
}
// avoid false alarm on "Art. 1" and "bisherigen Art. 1" (Art. = Artikel):
boolean detAbbrev = i < tokens.length-2 && tokens[i+1].getToken().equals("Art") && tokens[i+2].getToken().equals(".");
boolean detAdjAbbrev = i < tokens.length-3 && tokens[i+2].getToken().equals("Art") && tokens[i+3].getToken().equals(".");
// "einen Hochwasser führenden Fluss", "die Gott zugeschriebenen Eigenschaften":
boolean followingParticiple = i < tokens.length-3 && (tokens[i+2].hasPartialPosTag("PA1") || tokens[i+2].getToken().matches("zugeschriebenen?|genannten?"));
if (detAbbrev || detAdjAbbrev || followingParticiple) {
ignore = true;
}
if ((GermanHelper.hasReadingOfType(tokenReadings, POSType.DETERMINER) || relevantPronoun) && !ignore) {
int tokenPosAfterModifier = getPosAfterModifier(i+1, tokens);
int tokenPos = tokenPosAfterModifier;
if (tokenPos >= tokens.length) {
break;
}
AnalyzedTokenReadings nextToken = tokens[tokenPos];
if (isNonPredicativeAdjective(nextToken) || isParticiple(nextToken)) {
tokenPos = tokenPosAfterModifier + 1;
if (tokenPos >= tokens.length) {
break;
}
if (GermanHelper.hasReadingOfType(tokens[tokenPos], POSType.NOMEN)) {
// TODO: add a case (checkAdjNounAgreement) for special cases like "deren",
// e.g. "deren komisches Geschenke" isn't yet detected as incorrect
if (i >= 2 && GermanHelper.hasReadingOfType(tokens[i-2], POSType.ADJEKTIV)
&& "als".equals(tokens[i-1].getToken())
&& "das".equals(tokens[i].getToken())) {
// avoid false alarm for e.g. "weniger farbenprächtig als das anderer Papageien"
continue;
}
RuleMatch ruleMatch = checkDetAdjNounAgreement(tokens[i],
nextToken, tokens[tokenPos], sentence);
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
}
}
} else if (GermanHelper.hasReadingOfType(nextToken, POSType.NOMEN) && !"Herr".equals(nextToken.getToken())) {
RuleMatch ruleMatch = checkDetNounAgreement(tokens[i], nextToken, sentence);
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
}
}
}
} // for each token
return toRuleMatchArray(ruleMatches);
}
/**
* Search for modifiers (such as "sehr", "1,4 Meter") which can expand a
* determiner - adjective - noun group ("ein hohes Haus" -> "ein sehr hohes Haus",
* "ein 500 Meter hohes Haus") and return the index of the first non-modifier token ("Haus")
* @param startAt index of array where to start searching for modifier
* @return index of first non-modifier token
*/
private int getPosAfterModifier(int startAt, AnalyzedTokenReadings[] tokens) {
if ((startAt + 1) < tokens.length && MODIFIERS.contains(tokens[startAt].getToken())) {
startAt++;
}
if ((startAt + 1) < tokens.length && (StringUtils.isNumeric(tokens[startAt].getToken()) || tokens[startAt].hasPosTag("ZAL"))) {
int posAfterModifier = startAt + 1;
if ((startAt + 3) < tokens.length && ",".equals(tokens[startAt+1].getToken()) && StringUtils.isNumeric(tokens[startAt+2].getToken())) {
posAfterModifier = startAt + 3;
}
if (UNITS.matcher(tokens[posAfterModifier].getToken()).matches()) {
return posAfterModifier + 1;
}
}
return startAt;
}
@Override
public List<DisambiguationPatternRule> getAntiPatterns() {
return makeAntiPatterns(ANTI_PATTERNS, language);
}
private boolean isNonPredicativeAdjective(AnalyzedTokenReadings tokensReadings) {
for (AnalyzedToken reading : tokensReadings.getReadings()) {
String posTag = reading.getPOSTag();
if (posTag != null && posTag.startsWith("ADJ:") && !posTag.contains("PRD")) {
return true;
}
}
return false;
}
private boolean isParticiple(AnalyzedTokenReadings tokensReadings) {
return tokensReadings.hasPartialPosTag("PA1") || tokensReadings.hasPartialPosTag("PA2");
}
private boolean isRelevantPronoun(AnalyzedTokenReadings[] tokens, int pos) {
AnalyzedTokenReadings analyzedToken = tokens[pos];
boolean relevantPronoun = GermanHelper.hasReadingOfType(analyzedToken, POSType.PRONOMEN);
// avoid false alarms:
String token = tokens[pos].getToken();
if (PRONOUNS_TO_BE_IGNORED.contains(token.toLowerCase()) ||
(pos > 0 && tokens[pos-1].getToken().equalsIgnoreCase("vor") && token.equalsIgnoreCase("allem"))) {
relevantPronoun = false;
}
return relevantPronoun;
}
// TODO: improve this so it only returns true for real relative clauses
private boolean couldBeRelativeOrDependentClause(AnalyzedTokenReadings[] tokens, int pos) {
boolean comma;
boolean relPronoun;
if (pos >= 1) {
// avoid false alarm: "Das Wahlrecht, das Frauen zugesprochen bekamen." etc:
comma = tokens[pos-1].getToken().equals(",");
relPronoun = comma && tokens[pos].hasAnyLemma(REL_PRONOUN_LEMMAS);
if (relPronoun && pos+3 < tokens.length) {
return true;
}
}
if (pos >= 2) {
// avoid false alarm: "Der Mann, in dem quadratische Fische schwammen."
// or: "Die Polizei erwischte die Diebin, weil diese Ausweis und Visitenkarte hinterließ."
comma = tokens[pos-2].getToken().equals(",");
if(comma) {
boolean prep = tokens[pos-1].hasPosTagStartingWith("PRP:");
relPronoun = tokens[pos].hasAnyLemma(REL_PRONOUN_LEMMAS);
return prep && relPronoun || (tokens[pos-1].hasPosTag("KON:UNT") && (tokens[pos].hasLemma("jen") || tokens[pos].hasLemma("dies")));
}
}
return false;
}
@Nullable
private RuleMatch checkDetNounAgreement(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedSentence sentence) {
// TODO: remove "-".equals(token2.getToken()) after the bug fix
// see Daniel's comment from 20.12.2016 at https://github.com/languagetool-org/languagetool/issues/635
if (token2.isImmunized() || NOUNS_TO_BE_IGNORED.contains(token2.getToken()) || "-".equals(token2.getToken())) {
return null;
}
Set<String> set1 = null;
if (token1.getReadings().size() == 1 &&
token1.getReadings().get(0).getPOSTag() != null &&
token1.getReadings().get(0).getPOSTag().endsWith(":STV")) {
// catch the error in "Meiner Chef raucht."
set1 = Collections.emptySet();
} else {
set1 = getAgreementCategories(token1);
}
if (set1 == null) {
return null; // word not known, assume it's correct
}
Set<String> set2 = getAgreementCategories(token2);
if (set2 == null) {
return null;
}
set1.retainAll(set2);
RuleMatch ruleMatch = null;
if (set1.isEmpty() && !isException(token1, token2)) {
List<String> errorCategories = getCategoriesCausingError(token1, token2);
String errorDetails = errorCategories.isEmpty() ?
"Kasus, Genus oder Numerus" : String.join(" und ", errorCategories);
String msg = "Möglicherweise fehlende grammatische Übereinstimmung zwischen Artikel und Nomen " +
"bezüglich " + errorDetails + ".";
String shortMsg = "Möglicherweise keine Übereinstimmung bezüglich " + errorDetails;
ruleMatch = new RuleMatch(this, sentence, token1.getStartPos(),
token2.getEndPos(), msg, shortMsg);
/*try {
// this will not give a match for compounds that are not in the dictionary...
ruleMatch.setUrl(new URL("https://www.korrekturen.de/flexion/deklination/" + token2.getToken() + "/"));
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}*/
AgreementSuggestor suggestor = new AgreementSuggestor(language.getSynthesizer(), token1, token2);
List<String> suggestions = suggestor.getSuggestions();
ruleMatch.setSuggestedReplacements(suggestions);
}
return ruleMatch;
}
private boolean isException(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2) {
return "allen".equals(token1.getToken()) && "Grund".equals(token2.getToken());
}
private List<String> getCategoriesCausingError(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2) {
List<String> categories = new ArrayList<>();
List<GrammarCategory> categoriesToCheck = Arrays.asList(GrammarCategory.KASUS, GrammarCategory.GENUS, GrammarCategory.NUMERUS);
for (GrammarCategory category : categoriesToCheck) {
if (agreementWithCategoryRelaxation(token1, token2, category)) {
categories.add(category.displayName);
}
}
return categories;
}
private RuleMatch checkDetAdjNounAgreement(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedTokenReadings token3, AnalyzedSentence sentence) {
// TODO: remove (token3 == null || token3.getToken().length() < 2)
// see Daniel's comment from 20.12.2016 at https://github.com/languagetool-org/languagetool/issues/635
if(token3 == null || token3.getToken().length() < 2) {
return null;
}
Set<String> set = retainCommonCategories(token1, token2, token3);
RuleMatch ruleMatch = null;
if (set == null || set.isEmpty()) {
// TODO: more detailed error message:
String msg = "Möglicherweise fehlende grammatische Übereinstimmung zwischen Artikel, Adjektiv und " +
"Nomen bezüglich Kasus, Numerus oder Genus. Beispiel: 'mein kleiner Haus' " +
"statt 'mein kleines Haus'";
String shortMsg = "Möglicherweise keine Übereinstimmung bezüglich Kasus, Numerus oder Genus";
ruleMatch = new RuleMatch(this, sentence, token1.getStartPos(), token3.getEndPos(), msg, shortMsg);
}
return ruleMatch;
}
private boolean agreementWithCategoryRelaxation(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, GrammarCategory categoryToRelax) {
Set<GrammarCategory> categoryToRelaxSet;
if (categoryToRelax != null) {
categoryToRelaxSet = Collections.singleton(categoryToRelax);
} else {
categoryToRelaxSet = Collections.emptySet();
}
Set<String> set1 = getAgreementCategories(token1, categoryToRelaxSet, true);
if (set1 == null) {
return true; // word not known, assume it's correct
}
Set<String> set2 = getAgreementCategories(token2, categoryToRelaxSet, true);
if (set2 == null) {
return true;
}
set1.retainAll(set2);
return set1.size() > 0;
}
@Nullable
private Set<String> retainCommonCategories(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedTokenReadings token3) {
Set<GrammarCategory> categoryToRelaxSet = Collections.emptySet();
Set<String> set1 = getAgreementCategories(token1, categoryToRelaxSet, true);
if (set1 == null) {
return null; // word not known, assume it's correct
}
boolean skipSol = !VIELE_WENIGE_LOWERCASE.contains(token1.getToken().toLowerCase());
Set<String> set2 = getAgreementCategories(token2, categoryToRelaxSet, skipSol);
if (set2 == null) {
return null;
}
Set<String> set3 = getAgreementCategories(token3, categoryToRelaxSet, true);
if (set3 == null) {
return null;
}
set1.retainAll(set2);
set1.retainAll(set3);
return set1;
}
private Set<String> getAgreementCategories(AnalyzedTokenReadings aToken) {
return getAgreementCategories(aToken, new HashSet<>(), false);
}
/** Return Kasus, Numerus, Genus of those forms with a determiner. */
private Set<String> getAgreementCategories(AnalyzedTokenReadings aToken, Set<GrammarCategory> omit, boolean skipSol) {
Set<String> set = new HashSet<>();
List<AnalyzedToken> readings = aToken.getReadings();
for (AnalyzedToken tmpReading : readings) {
if (skipSol && tmpReading.getPOSTag() != null && tmpReading.getPOSTag().endsWith(":SOL")) {
// SOL = alleinstehend - needs to be skipped so we find errors like "An der roter Ampel."
continue;
}
AnalyzedGermanToken reading = new AnalyzedGermanToken(tmpReading);
if (reading.getCasus() == null && reading.getNumerus() == null &&
reading.getGenus() == null) {
continue;
}
if (reading.getGenus() == GermanToken.Genus.ALLGEMEIN &&
tmpReading.getPOSTag() != null && !tmpReading.getPOSTag().endsWith(":STV") && // STV: stellvertretend (!= begleitend)
!possessiveSpecialCase(aToken, tmpReading)) {
// genus=ALG in the original data. Not sure if this is allowed, but expand this so
// e.g. "Ich Arbeiter" doesn't get flagged as incorrect:
if (reading.getDetermination() == null) {
// Nouns don't have the determination property (definite/indefinite), and as we don't want to
// introduce a special case for that, we just pretend they always fulfill both properties:
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, GermanToken.Determination.INDEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, GermanToken.Determination.INDEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, GermanToken.Determination.INDEFINITE, omit));
} else {
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, reading.getDetermination(), omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, reading.getDetermination(), omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, reading.getDetermination(), omit));
}
} else {
if (reading.getDetermination() == null || "jed".equals(tmpReading.getLemma()) || "manch".equals(tmpReading.getLemma())) { // "jeder" etc. needs a special case to avoid false alarm
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), GermanToken.Determination.INDEFINITE, omit));
} else {
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), reading.getDetermination(), omit));
}
}
}
return set;
}
private boolean possessiveSpecialCase(AnalyzedTokenReadings aToken, AnalyzedToken tmpReading) {
// would cause error misses as it contains 'ALG', e.g. in "Der Zustand meiner Gehirns."
return aToken.hasPartialPosTag("PRO:POS") && ("ich".equals(tmpReading.getLemma()) || "sich".equals(tmpReading.getLemma()));
}
private String makeString(GermanToken.Kasus casus, GermanToken.Numerus num, GermanToken.Genus gen,
GermanToken.Determination determination, Set<GrammarCategory> omit) {
List<String> l = new ArrayList<>();
if (casus != null && !omit.contains(GrammarCategory.KASUS)) {
l.add(casus.toString());
}
if (num != null && !omit.contains(GrammarCategory.NUMERUS)) {
l.add(num.toString());
}
if (gen != null && !omit.contains(GrammarCategory.GENUS)) {
l.add(gen.toString());
}
if (determination != null) {
l.add(determination.toString());
}
return String.join("/", l);
}
}
|
languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/AgreementRule.java
|
/* LanguageTool, a natural language style checker
* Copyright (C) 2005 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.rules.de;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.Nullable;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedToken;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.JLanguageTool;
import org.languagetool.language.German;
import org.languagetool.rules.Categories;
import org.languagetool.rules.Example;
import org.languagetool.rules.Rule;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.patterns.PatternToken;
import org.languagetool.rules.patterns.PatternTokenBuilder;
import org.languagetool.tagging.de.AnalyzedGermanToken;
import org.languagetool.tagging.de.GermanToken;
import org.languagetool.tagging.de.GermanToken.POSType;
import org.languagetool.tagging.disambiguation.rules.DisambiguationPatternRule;
/**
* Simple agreement checker for German noun phrases. Checks agreement in:
*
* <ul>
* <li>DET/PRO NOUN: e.g. "mein Auto", "der Mann", "die Frau" (correct), "die Haus" (incorrect)</li>
* <li>DET/PRO ADJ NOUN: e.g. "der riesige Tisch" (correct), "die riesigen Tisch" (incorrect)</li>
* </ul>
*
* Note that this rule only checks agreement inside the noun phrase, not whether
* e.g. the correct case is used. For example, "Es ist das Haus dem Mann" is not
* detected as incorrect.
*
* <p>TODO: the implementation could use a re-write that first detects the relevant noun phrases and then checks agreement
*
* @author Daniel Naber
*/
public class AgreementRule extends Rule {
private final German language;
private enum GrammarCategory {
KASUS("Kasus (Fall: Wer/Was, Wessen, Wem, Wen/Was - Beispiel: 'das Fahrrads' statt 'des Fahrrads')"),
GENUS("Genus (männlich, weiblich, sächlich - Beispiel: 'der Fahrrad' statt 'das Fahrrad')"),
NUMERUS("Numerus (Einzahl, Mehrzahl - Beispiel: 'das Fahrräder' statt 'die Fahrräder')");
private final String displayName;
GrammarCategory(String displayName) {
this.displayName = displayName;
}
}
private static final List<List<PatternToken>> ANTI_PATTERNS = Arrays.asList(
Arrays.asList( // "Wir bereinigen das nächsten Dienstag."
new PatternTokenBuilder().posRegex("VER:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().tokenRegex("nächste[ns]?").build(),
new PatternTokenBuilder().tokenRegex("Montag|D(ien|onner)stag|Mittwoch|Freitag|S(ams|onn)tag|Woche|Monat|Jahr").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("(?i:ist|war)").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().token("Zufall").build()
),
Arrays.asList( // "So hatte das Vorteile|Auswirkungen|Konsequenzen..."
new PatternTokenBuilder().tokenRegex("(?i:hat(te)?)").build(),
new PatternTokenBuilder().token("das").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("von|bei").build(),
new PatternTokenBuilder().tokenRegex("(vielen|allen)").build(),
new PatternTokenBuilder().posRegex("PA2:.*|ADJ:AKK:PLU:.*").build() // "ein von vielen bewundertes Haus" / "Das weckte bei vielen ungute Erinnerungen."
),
Arrays.asList(
new PatternTokenBuilder().token("für").build(),
new PatternTokenBuilder().tokenRegex("(viele|alle|[dm]ich|ihn|sie|uns)").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:.*").build() // "Ein für viele wichtiges Anliegen."
),
Arrays.asList(
new PatternTokenBuilder().csToken("machen").matchInflectedForms().build(),
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("Angst").build() // "Dinge, die/ Etwas, das einem Angst macht"
),
Arrays.asList(
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("Angst").build(), // "Was einem Angst macht"
new PatternTokenBuilder().tokenRegex("machen|einjagen").matchInflectedForms().build()
),
Arrays.asList(
new PatternTokenBuilder().token("einem").build(),
new PatternTokenBuilder().token("geschenkten").build(),
new PatternTokenBuilder().token("Gaul").build()
),
Arrays.asList(
new PatternTokenBuilder().token("einer").build(),
new PatternTokenBuilder().token("jeden").build(),
new PatternTokenBuilder().posRegex("SUB:GEN:.*").build() // "Kern einer jeden Tragödie..."
),
Arrays.asList(
new PatternTokenBuilder().token("kein").build(),
new PatternTokenBuilder().token("schöner").build(),
new PatternTokenBuilder().token("Land").build() // https://de.wikipedia.org/wiki/Kein_sch%C3%B6ner_Land
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("Ist|Sind|Macht|Wird").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().posRegex("PKT|KON:NEB|ZUS").build()// "Ist das Kunst?" / "Ist das Kunst oder Abfall?" / "Sind das Eier aus Bodenhaltung"
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("Meist(ens)?|Oft(mals)?|Häufig|Selten").build(),
new PatternTokenBuilder().tokenRegex("sind|waren|ist").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build() // Meistens sind das Frauen, die damit besser umgehen können.
),
Arrays.asList(
new PatternTokenBuilder().token("des").build(),
new PatternTokenBuilder().token("Lied").build(),
new PatternTokenBuilder().token("ich").build()// Wes Brot ich ess, des Lied ich sing
),
Arrays.asList(
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_START_TAGNAME).build(),
new PatternTokenBuilder().tokenRegex("D(a|ie)s").build(),
new PatternTokenBuilder().posRegex("VER:[123]:.*").build(),
new PatternTokenBuilder().posRegex("SUB:NOM:.*").build()// "Das erfordert Können und..." / "Dies bestätigte Polizeimeister Huber"
),
Arrays.asList(
new PatternTokenBuilder().posRegex("ART:.*").build(), // "Das wenige Kilometer breite Tal"
new PatternTokenBuilder().posRegex("ADJ:.*").build(),
new PatternTokenBuilder().tokenRegex("(Kilo|Zenti|Milli)?meter|Jahre|Monate|Wochen|Tage|Stunden|Minuten|Sekunden").build()
),
Arrays.asList(
new PatternTokenBuilder().token("Van").build(), // https://de.wikipedia.org/wiki/Alexander_Van_der_Bellen
new PatternTokenBuilder().token("der").build(),
new PatternTokenBuilder().token("Bellen").build()
),
Arrays.asList(
new PatternTokenBuilder().token("mehrere").build(), // "mehrere Verwundete" http://forum.languagetool.org/t/de-false-positives-and-false-false/1516
new PatternTokenBuilder().pos("SUB:NOM:SIN:FEM:ADJ").build()
),
Arrays.asList(
new PatternTokenBuilder().token("allen").build(),
new PatternTokenBuilder().token("Besitz").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("d(ie|en)|[md]einen?").build(),
new PatternTokenBuilder().token("Top").build(),
new PatternTokenBuilder().tokenRegex("\\d+").build()
),
Arrays.asList( //"Unter diesen rief das großen Unmut hervor."
new PatternTokenBuilder().posRegex("VER:3:SIN:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:.*").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:.*").build(),
new PatternTokenBuilder().pos("ZUS").build(),
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_END_TAGNAME).build()
),
Arrays.asList( // "Bei mir löste das Panik aus."
new PatternTokenBuilder().posRegex("VER:3:SIN:.*").build(),
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:.*").build(),
new PatternTokenBuilder().pos("ZUS").build(),
new PatternTokenBuilder().pos(JLanguageTool.SENTENCE_END_TAGNAME).build()
),
Arrays.asList(
new PatternTokenBuilder().token("Außenring").build(),
new PatternTokenBuilder().token("Autobahn").build()
),
Arrays.asList( // "Ehre, wem Ehre gebührt"
new PatternTokenBuilder().tokenRegex("[dw]em").build(),
new PatternTokenBuilder().csToken("Ehre").build(),
new PatternTokenBuilder().csToken("gebührt").build()
),
Arrays.asList(
new PatternTokenBuilder().token("Eurovision").build(),
new PatternTokenBuilder().token("Song").build(),
new PatternTokenBuilder().token("Contest").build()
),
Arrays.asList( // "Das Holocaust Memorial Museum."
new PatternTokenBuilder().posRegex("ART:.*").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().pos("UNKNOWN").build()
),
Arrays.asList( // "Er fragte, ob das Spaß macht."
new PatternTokenBuilder().csToken(",").build(),
new PatternTokenBuilder().posRegex("KON:UNT|ADV:INR").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build(),
new PatternTokenBuilder().posRegex("VER:3:SIN.*").build()
),
Arrays.asList( // "Es gibt viele solcher Bilder"
new PatternTokenBuilder().tokenRegex("viele|wenige|einige|mehrere").build(),
new PatternTokenBuilder().csToken("solcher").build(),
new PatternTokenBuilder().posRegex("SUB:GEN:PLU:.*").build()
),
Arrays.asList( // "der französischen First Lady"
new PatternTokenBuilder().tokenRegex("[dD](ie|er)").build(),
new PatternTokenBuilder().csToken("First").build(),
new PatternTokenBuilder().csToken("Lady").build()
),
Arrays.asList( // "der französischen First Lady"
new PatternTokenBuilder().tokenRegex("[dD](ie|er)").build(),
new PatternTokenBuilder().posRegex("ADJ:.*").build(),
new PatternTokenBuilder().csToken("First").build(),
new PatternTokenBuilder().csToken("Lady").build()
),
Arrays.asList(
new PatternTokenBuilder().tokenRegex("[dD]e[rn]").build(),
new PatternTokenBuilder().csToken("Gold").build(),
new PatternTokenBuilder().csToken("Cup").build()
),
Arrays.asList(
new PatternTokenBuilder().token("das").build(),
new PatternTokenBuilder().tokenRegex("viele|wenige").build(),
new PatternTokenBuilder().posRegex("SUB:.*").build()
),
Arrays.asList( // "Er verspricht allen/niemandem/jedem hohe Gewinne."
new PatternTokenBuilder().tokenRegex("allen|(nieman|je(man)?)dem").build(),
new PatternTokenBuilder().posRegex("ADJ:AKK:PLU:.*").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:PLU:.*").build()
),
Arrays.asList( // "Er verspricht allen/niemandem/jedem Gewinne von über 15 Prozent."
new PatternTokenBuilder().tokenRegex("allen|(nieman|je(man)?)dem").build(),
new PatternTokenBuilder().posRegex("SUB:AKK:PLU:.*").build()
),
Arrays.asList( // "Für ihn ist das Alltag." / "Für die Religiösen ist das Blasphemie."
new PatternTokenBuilder().token("für").setSkip(2).build(),
new PatternTokenBuilder().tokenRegex("ist|war").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:NOM:.*").build(),
new PatternTokenBuilder().pos("PKT").build()
),
Arrays.asList( // "Sie sagte, dass das Rache bedeuten würden"
new PatternTokenBuilder().pos("KON:UNT").build(),
new PatternTokenBuilder().csToken("das").build(),
new PatternTokenBuilder().posRegex("SUB:.+").build(),
new PatternTokenBuilder().tokenRegex("bedeuten|sein").matchInflectedForms().build()
),
Arrays.asList( // "Karl sagte, dass sie niemandem Bescheid gegeben habe."
new PatternTokenBuilder().token("niemand").matchInflectedForms().build(),
new PatternTokenBuilder().posRegex("SUB:.+").build()
),
Arrays.asList(
new PatternTokenBuilder().token("alles").build(),
new PatternTokenBuilder().csToken("Walzer").build()
),
Arrays.asList( // "ei der Daus"
new PatternTokenBuilder().csToken("der").build(),
new PatternTokenBuilder().csToken("Daus").build()
),
Arrays.asList( // "Er nahm allen Mut zusammen"
new PatternTokenBuilder().csToken("allen").build(),
new PatternTokenBuilder().csToken("Mut").build(),
new PatternTokenBuilder().csToken("zusammen").build()
),
Arrays.asList(
new PatternTokenBuilder().csToken("dem").build(),
new PatternTokenBuilder().csToken("Achtung").setSkip(1).build(),
new PatternTokenBuilder().csToken("schenken").matchInflectedForms().build()
),
Arrays.asList(
new PatternTokenBuilder().csToken("schenken").matchInflectedForms().build(),
new PatternTokenBuilder().csToken("dem").build(),
new PatternTokenBuilder().csToken("Achtung").build()
)
);
private static final Set<String> MODIFIERS = new HashSet<>(Arrays.asList(
"besonders",
"fast",
"geradezu",
"sehr",
"überaus",
"ziemlich"
));
private static final Set<String> VIELE_WENIGE_LOWERCASE = new HashSet<>(Arrays.asList(
"viele",
"vieler",
"wenige",
"weniger",
"einige",
"einiger",
"mehrerer",
"mehrere"
));
private static final String[] REL_PRONOUN_LEMMAS = {"der", "welch"};
private static final Pattern UNITS = Pattern.compile(".*([gG]ramm|[mM]eter)");
private static final Set<String> PRONOUNS_TO_BE_IGNORED = new HashSet<>(Arrays.asList(
"ich",
"dir",
"du",
"er", "sie", "es",
"wir",
"mir",
"uns",
"ihnen",
"euch",
"ihm",
"ihr",
"ihn",
"dessen",
"deren",
"denen",
"sich",
"unser",
"aller",
"man",
"beide",
"beiden",
"beider",
"wessen",
"a",
"alle",
"etwas",
"irgendetwas",
"was",
"wer",
"jenen", // "...und mit jenen anderer Arbeitsgruppen verwoben"
"diejenigen",
"jemand", "jemandes",
"niemand", "niemandes"
));
private static final Set<String> NOUNS_TO_BE_IGNORED = new HashSet<>(Arrays.asList(
"Prozent", // Plural "Prozente", trotzdem ist "mehrere Prozent" korrekt
"Gramm",
"Kilogramm",
"Uhr" // "um ein Uhr"
));
public AgreementRule(ResourceBundle messages, German language) {
this.language = language;
super.setCategory(Categories.GRAMMAR.getCategory(messages));
addExamplePair(Example.wrong("<marker>Der Haus</marker> wurde letztes Jahr gebaut."),
Example.fixed("<marker>Das Haus</marker> wurde letztes Jahr gebaut."));
}
@Override
public String getId() {
return "DE_AGREEMENT";
}
@Override
public String getDescription() {
return "Kongruenz von Nominalphrasen (unvollständig!), z.B. 'mein kleiner(kleines) Haus'";
}
@Override
public RuleMatch[] match(AnalyzedSentence sentence) {
List<RuleMatch> ruleMatches = new ArrayList<>();
AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace();
for (int i = 0; i < tokens.length; i++) {
//defaulting to the first reading
//TODO: check for all readings
String posToken = tokens[i].getAnalyzedToken(0).getPOSTag();
if (posToken != null && posToken.equals(JLanguageTool.SENTENCE_START_TAGNAME)) {
continue;
}
if (tokens[i].isImmunized()) {
continue;
}
AnalyzedTokenReadings tokenReadings = tokens[i];
boolean relevantPronoun = isRelevantPronoun(tokens, i);
boolean ignore = couldBeRelativeOrDependentClause(tokens, i);
if (i > 0) {
String prevToken = tokens[i-1].getToken().toLowerCase();
if (StringUtils.equalsAny(tokens[i].getToken(), "eine", "einen")
&& StringUtils.equalsAny(prevToken, "der", "die", "das", "des", "dieses")) {
// TODO: "der eine Polizist" -> nicht ignorieren, sondern "der polizist" checken; "auf der einen Seite"
ignore = true;
}
}
// avoid false alarm on "nichts Gutes" and "alles Gute"
if (StringUtils.equalsAny(tokenReadings.getToken(), "nichts", "alles", "dies")) {
ignore = true;
}
// avoid false alarm on "Art. 1" and "bisherigen Art. 1" (Art. = Artikel):
boolean detAbbrev = i < tokens.length-2 && tokens[i+1].getToken().equals("Art") && tokens[i+2].getToken().equals(".");
boolean detAdjAbbrev = i < tokens.length-3 && tokens[i+2].getToken().equals("Art") && tokens[i+3].getToken().equals(".");
// "einen Hochwasser führenden Fluss", "die Gott zugeschriebenen Eigenschaften":
boolean followingParticiple = i < tokens.length-3 && (tokens[i+2].hasPartialPosTag("PA1") || tokens[i+2].getToken().matches("zugeschriebenen?|genannten?"));
if (detAbbrev || detAdjAbbrev || followingParticiple) {
ignore = true;
}
if ((GermanHelper.hasReadingOfType(tokenReadings, POSType.DETERMINER) || relevantPronoun) && !ignore) {
int tokenPosAfterModifier = getPosAfterModifier(i+1, tokens);
int tokenPos = tokenPosAfterModifier;
if (tokenPos >= tokens.length) {
break;
}
AnalyzedTokenReadings nextToken = tokens[tokenPos];
if (isNonPredicativeAdjective(nextToken) || isParticiple(nextToken)) {
tokenPos = tokenPosAfterModifier + 1;
if (tokenPos >= tokens.length) {
break;
}
if (GermanHelper.hasReadingOfType(tokens[tokenPos], POSType.NOMEN)) {
// TODO: add a case (checkAdjNounAgreement) for special cases like "deren",
// e.g. "deren komisches Geschenke" isn't yet detected as incorrect
if (i >= 2 && GermanHelper.hasReadingOfType(tokens[i-2], POSType.ADJEKTIV)
&& "als".equals(tokens[i-1].getToken())
&& "das".equals(tokens[i].getToken())) {
// avoid false alarm for e.g. "weniger farbenprächtig als das anderer Papageien"
continue;
}
RuleMatch ruleMatch = checkDetAdjNounAgreement(tokens[i],
nextToken, tokens[tokenPos], sentence);
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
}
}
} else if (GermanHelper.hasReadingOfType(nextToken, POSType.NOMEN) && !"Herr".equals(nextToken.getToken())) {
RuleMatch ruleMatch = checkDetNounAgreement(tokens[i], nextToken, sentence);
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
}
}
}
} // for each token
return toRuleMatchArray(ruleMatches);
}
/**
* Search for modifiers (such as "sehr", "1,4 Meter") which can expand a
* determiner - adjective - noun group ("ein hohes Haus" -> "ein sehr hohes Haus",
* "ein 500 Meter hohes Haus") and return the index of the first non-modifier token ("Haus")
* @param startAt index of array where to start searching for modifier
* @return index of first non-modifier token
*/
private int getPosAfterModifier(int startAt, AnalyzedTokenReadings[] tokens) {
if ((startAt + 1) < tokens.length && MODIFIERS.contains(tokens[startAt].getToken())) {
startAt++;
}
if ((startAt + 1) < tokens.length && (StringUtils.isNumeric(tokens[startAt].getToken()) || tokens[startAt].hasPosTag("ZAL"))) {
int posAfterModifier = startAt + 1;
if ((startAt + 3) < tokens.length && ",".equals(tokens[startAt+1].getToken()) && StringUtils.isNumeric(tokens[startAt+2].getToken())) {
posAfterModifier = startAt + 3;
}
if (UNITS.matcher(tokens[posAfterModifier].getToken()).matches()) {
return posAfterModifier + 1;
}
}
return startAt;
}
@Override
public List<DisambiguationPatternRule> getAntiPatterns() {
return makeAntiPatterns(ANTI_PATTERNS, language);
}
private boolean isNonPredicativeAdjective(AnalyzedTokenReadings tokensReadings) {
for (AnalyzedToken reading : tokensReadings.getReadings()) {
String posTag = reading.getPOSTag();
if (posTag != null && posTag.startsWith("ADJ:") && !posTag.contains("PRD")) {
return true;
}
}
return false;
}
private boolean isParticiple(AnalyzedTokenReadings tokensReadings) {
return tokensReadings.hasPartialPosTag("PA1") || tokensReadings.hasPartialPosTag("PA2");
}
private boolean isRelevantPronoun(AnalyzedTokenReadings[] tokens, int pos) {
AnalyzedTokenReadings analyzedToken = tokens[pos];
boolean relevantPronoun = GermanHelper.hasReadingOfType(analyzedToken, POSType.PRONOMEN);
// avoid false alarms:
String token = tokens[pos].getToken();
if (PRONOUNS_TO_BE_IGNORED.contains(token.toLowerCase()) ||
(pos > 0 && tokens[pos-1].getToken().equalsIgnoreCase("vor") && token.equalsIgnoreCase("allem"))) {
relevantPronoun = false;
}
return relevantPronoun;
}
// TODO: improve this so it only returns true for real relative clauses
private boolean couldBeRelativeOrDependentClause(AnalyzedTokenReadings[] tokens, int pos) {
boolean comma;
boolean relPronoun;
if (pos >= 1) {
// avoid false alarm: "Das Wahlrecht, das Frauen zugesprochen bekamen." etc:
comma = tokens[pos-1].getToken().equals(",");
relPronoun = comma && tokens[pos].hasAnyLemma(REL_PRONOUN_LEMMAS);
if (relPronoun && pos+3 < tokens.length) {
return true;
}
}
if (pos >= 2) {
// avoid false alarm: "Der Mann, in dem quadratische Fische schwammen."
// or: "Die Polizei erwischte die Diebin, weil diese Ausweis und Visitenkarte hinterließ."
comma = tokens[pos-2].getToken().equals(",");
if(comma) {
boolean prep = tokens[pos-1].hasPosTagStartingWith("PRP:");
relPronoun = tokens[pos].hasAnyLemma(REL_PRONOUN_LEMMAS);
return prep && relPronoun || (tokens[pos-1].hasPosTag("KON:UNT") && (tokens[pos].hasLemma("jen") || tokens[pos].hasLemma("dies")));
}
}
return false;
}
@Nullable
private RuleMatch checkDetNounAgreement(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedSentence sentence) {
// TODO: remove "-".equals(token2.getToken()) after the bug fix
// see Daniel's comment from 20.12.2016 at https://github.com/languagetool-org/languagetool/issues/635
if (token2.isImmunized() || NOUNS_TO_BE_IGNORED.contains(token2.getToken()) || "-".equals(token2.getToken())) {
return null;
}
Set<String> set1 = null;
if (token1.getReadings().size() == 1 &&
token1.getReadings().get(0).getPOSTag() != null &&
token1.getReadings().get(0).getPOSTag().endsWith(":STV")) {
// catch the error in "Meiner Chef raucht."
set1 = Collections.emptySet();
} else {
set1 = getAgreementCategories(token1);
}
if (set1 == null) {
return null; // word not known, assume it's correct
}
Set<String> set2 = getAgreementCategories(token2);
if (set2 == null) {
return null;
}
set1.retainAll(set2);
RuleMatch ruleMatch = null;
if (set1.isEmpty() && !isException(token1, token2)) {
List<String> errorCategories = getCategoriesCausingError(token1, token2);
String errorDetails = errorCategories.isEmpty() ?
"Kasus, Genus oder Numerus" : String.join(" und ", errorCategories);
String msg = "Möglicherweise fehlende grammatische Übereinstimmung zwischen Artikel und Nomen " +
"bezüglich " + errorDetails + ".";
String shortMsg = "Möglicherweise keine Übereinstimmung bezüglich " + errorDetails;
ruleMatch = new RuleMatch(this, sentence, token1.getStartPos(),
token2.getEndPos(), msg, shortMsg);
/*try {
// this will not give a match for compounds that are not in the dictionary...
ruleMatch.setUrl(new URL("https://www.korrekturen.de/flexion/deklination/" + token2.getToken() + "/"));
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}*/
AgreementSuggestor suggestor = new AgreementSuggestor(language.getSynthesizer(), token1, token2);
List<String> suggestions = suggestor.getSuggestions();
ruleMatch.setSuggestedReplacements(suggestions);
}
return ruleMatch;
}
private boolean isException(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2) {
return "allen".equals(token1.getToken()) && "Grund".equals(token2.getToken());
}
private List<String> getCategoriesCausingError(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2) {
List<String> categories = new ArrayList<>();
List<GrammarCategory> categoriesToCheck = Arrays.asList(GrammarCategory.KASUS, GrammarCategory.GENUS, GrammarCategory.NUMERUS);
for (GrammarCategory category : categoriesToCheck) {
if (agreementWithCategoryRelaxation(token1, token2, category)) {
categories.add(category.displayName);
}
}
return categories;
}
private RuleMatch checkDetAdjNounAgreement(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedTokenReadings token3, AnalyzedSentence sentence) {
// TODO: remove (token3 == null || token3.getToken().length() < 2)
// see Daniel's comment from 20.12.2016 at https://github.com/languagetool-org/languagetool/issues/635
if(token3 == null || token3.getToken().length() < 2) {
return null;
}
Set<String> set = retainCommonCategories(token1, token2, token3);
RuleMatch ruleMatch = null;
if (set == null || set.isEmpty()) {
// TODO: more detailed error message:
String msg = "Möglicherweise fehlende grammatische Übereinstimmung zwischen Artikel, Adjektiv und " +
"Nomen bezüglich Kasus, Numerus oder Genus. Beispiel: 'mein kleiner Haus' " +
"statt 'mein kleines Haus'";
String shortMsg = "Möglicherweise keine Übereinstimmung bezüglich Kasus, Numerus oder Genus";
ruleMatch = new RuleMatch(this, sentence, token1.getStartPos(), token3.getEndPos(), msg, shortMsg);
}
return ruleMatch;
}
private boolean agreementWithCategoryRelaxation(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, GrammarCategory categoryToRelax) {
Set<GrammarCategory> categoryToRelaxSet;
if (categoryToRelax != null) {
categoryToRelaxSet = Collections.singleton(categoryToRelax);
} else {
categoryToRelaxSet = Collections.emptySet();
}
Set<String> set1 = getAgreementCategories(token1, categoryToRelaxSet, true);
if (set1 == null) {
return true; // word not known, assume it's correct
}
Set<String> set2 = getAgreementCategories(token2, categoryToRelaxSet, true);
if (set2 == null) {
return true;
}
set1.retainAll(set2);
return set1.size() > 0;
}
@Nullable
private Set<String> retainCommonCategories(AnalyzedTokenReadings token1,
AnalyzedTokenReadings token2, AnalyzedTokenReadings token3) {
Set<GrammarCategory> categoryToRelaxSet = Collections.emptySet();
Set<String> set1 = getAgreementCategories(token1, categoryToRelaxSet, true);
if (set1 == null) {
return null; // word not known, assume it's correct
}
boolean skipSol = !VIELE_WENIGE_LOWERCASE.contains(token1.getToken().toLowerCase());
Set<String> set2 = getAgreementCategories(token2, categoryToRelaxSet, skipSol);
if (set2 == null) {
return null;
}
Set<String> set3 = getAgreementCategories(token3, categoryToRelaxSet, true);
if (set3 == null) {
return null;
}
set1.retainAll(set2);
set1.retainAll(set3);
return set1;
}
private Set<String> getAgreementCategories(AnalyzedTokenReadings aToken) {
return getAgreementCategories(aToken, new HashSet<>(), false);
}
/** Return Kasus, Numerus, Genus of those forms with a determiner. */
private Set<String> getAgreementCategories(AnalyzedTokenReadings aToken, Set<GrammarCategory> omit, boolean skipSol) {
Set<String> set = new HashSet<>();
List<AnalyzedToken> readings = aToken.getReadings();
for (AnalyzedToken tmpReading : readings) {
if (skipSol && tmpReading.getPOSTag() != null && tmpReading.getPOSTag().endsWith(":SOL")) {
// SOL = alleinstehend - needs to be skipped so we find errors like "An der roter Ampel."
continue;
}
AnalyzedGermanToken reading = new AnalyzedGermanToken(tmpReading);
if (reading.getCasus() == null && reading.getNumerus() == null &&
reading.getGenus() == null) {
continue;
}
if (reading.getGenus() == GermanToken.Genus.ALLGEMEIN &&
tmpReading.getPOSTag() != null && !tmpReading.getPOSTag().endsWith(":STV") && // STV: stellvertretend (!= begleitend)
!possessiveSpecialCase(aToken, tmpReading)) {
// genus=ALG in the original data. Not sure if this is allowed, but expand this so
// e.g. "Ich Arbeiter" doesn't get flagged as incorrect:
if (reading.getDetermination() == null) {
// Nouns don't have the determination property (definite/indefinite), and as we don't want to
// introduce a special case for that, we just pretend they always fulfill both properties:
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, GermanToken.Determination.INDEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, GermanToken.Determination.INDEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, GermanToken.Determination.INDEFINITE, omit));
} else {
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.MASKULINUM, reading.getDetermination(), omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.FEMININUM, reading.getDetermination(), omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), GermanToken.Genus.NEUTRUM, reading.getDetermination(), omit));
}
} else {
if (reading.getDetermination() == null || "jed".equals(tmpReading.getLemma()) || "manch".equals(tmpReading.getLemma())) { // "jeder" etc. needs a special case to avoid false alarm
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), GermanToken.Determination.DEFINITE, omit));
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), GermanToken.Determination.INDEFINITE, omit));
} else {
set.add(makeString(reading.getCasus(), reading.getNumerus(), reading.getGenus(), reading.getDetermination(), omit));
}
}
}
return set;
}
private boolean possessiveSpecialCase(AnalyzedTokenReadings aToken, AnalyzedToken tmpReading) {
// would cause error misses as it contains 'ALG', e.g. in "Der Zustand meiner Gehirns."
return aToken.hasPartialPosTag("PRO:POS") && ("ich".equals(tmpReading.getLemma()) || "sich".equals(tmpReading.getLemma()));
}
private String makeString(GermanToken.Kasus casus, GermanToken.Numerus num, GermanToken.Genus gen,
GermanToken.Determination determination, Set<GrammarCategory> omit) {
List<String> l = new ArrayList<>();
if (casus != null && !omit.contains(GrammarCategory.KASUS)) {
l.add(casus.toString());
}
if (num != null && !omit.contains(GrammarCategory.NUMERUS)) {
l.add(num.toString());
}
if (gen != null && !omit.contains(GrammarCategory.GENUS)) {
l.add(gen.toString());
}
if (determination != null) {
l.add(determination.toString());
}
return String.join("/", l);
}
}
|
[de] combine 2 antipatterns in DE_AGREEMENT
|
languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/AgreementRule.java
|
[de] combine 2 antipatterns in DE_AGREEMENT
|
<ide><path>anguagetool-language-modules/de/src/main/java/org/languagetool/rules/de/AgreementRule.java
<ide> ),
<ide> Arrays.asList(
<ide> new PatternTokenBuilder().token("allen").build(),
<del> new PatternTokenBuilder().token("Besitz").build()
<add> new PatternTokenBuilder().tokenRegex("Besitz|Mut").build()
<ide> ),
<ide> Arrays.asList(
<ide> new PatternTokenBuilder().tokenRegex("d(ie|en)|[md]einen?").build(),
<ide> Arrays.asList( // "ei der Daus"
<ide> new PatternTokenBuilder().csToken("der").build(),
<ide> new PatternTokenBuilder().csToken("Daus").build()
<del> ),
<del> Arrays.asList( // "Er nahm allen Mut zusammen"
<del> new PatternTokenBuilder().csToken("allen").build(),
<del> new PatternTokenBuilder().csToken("Mut").build(),
<del> new PatternTokenBuilder().csToken("zusammen").build()
<ide> ),
<ide> Arrays.asList(
<ide> new PatternTokenBuilder().csToken("dem").build(),
|
|
Java
|
apache-2.0
|
c0aa4fdd160aea3a60b51fc78b4cbabdf2675b32
| 0 |
MuShiiii/commons-collections,apache/commons-collections,mohanaraosv/commons-collections,jankill/commons-collections,mohanaraosv/commons-collections,apache/commons-collections,sandrineBeauche/commons-collections,sandrineBeauche/commons-collections,apache/commons-collections,gonmarques/commons-collections,mohanaraosv/commons-collections,jankill/commons-collections,jankill/commons-collections,MuShiiii/commons-collections,sandrineBeauche/commons-collections,MuShiiii/commons-collections,gonmarques/commons-collections
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.collection;
import java.util.Collection;
import org.apache.commons.collections.Predicate;
/**
* Decorates another <code>Collection</code> to validate that additions
* match a specified predicate.
* <p>
* This collection exists to provide validation for the decorated collection.
* It is normally created to decorate an empty collection.
* If an object cannot be added to the collection, an IllegalArgumentException is thrown.
* <p>
* One usage would be to ensure that no null entries are added to the collection.
* <pre>Collection coll = PredicatedCollection.decorate(new ArrayList(), NotNullPredicate.INSTANCE);</pre>
* <p>
* This class is Serializable from Commons Collections 3.1.
*
* @param <E> the type of the elements in the collection
* @since Commons Collections 3.0
* @version $Revision$ $Date$
*
* @author Stephen Colebourne
* @author Paul Jack
*/
public class PredicatedCollection<E> extends AbstractCollectionDecorator<E> {
/** Serialization version */
private static final long serialVersionUID = -5259182142076705162L;
/** The predicate to use */
protected final Predicate<? super E> predicate;
/**
* Factory method to create a predicated (validating) collection.
* <p>
* If there are any elements already in the collection being decorated, they
* are validated.
*
* @param <T> the type of the elements in the collection
* @param coll the collection to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @return a new predicated collection
* @throws IllegalArgumentException if collection or predicate is null
* @throws IllegalArgumentException if the collection contains invalid elements
*/
public static <T> Collection<T> decorate(Collection<T> coll, Predicate<? super T> predicate) {
return new PredicatedCollection<T>(coll, predicate);
}
//-----------------------------------------------------------------------
/**
* Constructor that wraps (not copies).
* <p>
* If there are any elements already in the collection being decorated, they
* are validated.
*
* @param coll the collection to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @throws IllegalArgumentException if collection or predicate is null
* @throws IllegalArgumentException if the collection contains invalid elements
*/
protected PredicatedCollection(Collection<E> coll, Predicate<? super E> predicate) {
super(coll);
if (predicate == null) {
throw new IllegalArgumentException("Predicate must not be null");
}
this.predicate = predicate;
for (E item : coll) {
validate(item);
}
}
/**
* Validates the object being added to ensure it matches the predicate.
* <p>
* The predicate itself should not throw an exception, but return false to
* indicate that the object cannot be added.
*
* @param object the object being added
* @throws IllegalArgumentException if the add is invalid
*/
protected void validate(E object) {
if (predicate.evaluate(object) == false) {
throw new IllegalArgumentException("Cannot add Object '" + object + "' - Predicate '" + predicate + "' rejected it");
}
}
//-----------------------------------------------------------------------
/**
* Override to validate the object being added to ensure it matches
* the predicate.
*
* @param object the object being added
* @return the result of adding to the underlying collection
* @throws IllegalArgumentException if the add is invalid
*/
public boolean add(E object) {
validate(object);
return decorated().add(object);
}
/**
* Override to validate the objects being added to ensure they match
* the predicate. If any one fails, no update is made to the underlying
* collection.
*
* @param coll the collection being added
* @return the result of adding to the underlying collection
* @throws IllegalArgumentException if the add is invalid
*/
public boolean addAll(Collection<? extends E> coll) {
for (E item : coll) {
validate(item);
}
return decorated().addAll(coll);
}
}
|
src/java/org/apache/commons/collections/collection/PredicatedCollection.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.collection;
import java.util.Collection;
import java.util.Iterator;
import org.apache.commons.collections.Predicate;
/**
* Decorates another <code>Collection</code> to validate that additions
* match a specified predicate.
* <p>
* This collection exists to provide validation for the decorated collection.
* It is normally created to decorate an empty collection.
* If an object cannot be added to the collection, an IllegalArgumentException is thrown.
* <p>
* One usage would be to ensure that no null entries are added to the collection.
* <pre>Collection coll = PredicatedCollection.decorate(new ArrayList(), NotNullPredicate.INSTANCE);</pre>
* <p>
* This class is Serializable from Commons Collections 3.1.
*
* @since Commons Collections 3.0
* @version $Revision$ $Date$
*
* @author Stephen Colebourne
* @author Paul Jack
*/
public class PredicatedCollection extends AbstractSerializableCollectionDecorator {
/** Serialization version */
private static final long serialVersionUID = -5259182142076705162L;
/** The predicate to use */
protected final Predicate predicate;
/**
* Factory method to create a predicated (validating) collection.
* <p>
* If there are any elements already in the collection being decorated, they
* are validated.
*
* @param coll the collection to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @return a new predicated collection
* @throws IllegalArgumentException if collection or predicate is null
* @throws IllegalArgumentException if the collection contains invalid elements
*/
public static Collection decorate(Collection coll, Predicate predicate) {
return new PredicatedCollection(coll, predicate);
}
//-----------------------------------------------------------------------
/**
* Constructor that wraps (not copies).
* <p>
* If there are any elements already in the collection being decorated, they
* are validated.
*
* @param coll the collection to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @throws IllegalArgumentException if collection or predicate is null
* @throws IllegalArgumentException if the collection contains invalid elements
*/
protected PredicatedCollection(Collection coll, Predicate predicate) {
super(coll);
if (predicate == null) {
throw new IllegalArgumentException("Predicate must not be null");
}
this.predicate = predicate;
for (Iterator it = coll.iterator(); it.hasNext(); ) {
validate(it.next());
}
}
/**
* Validates the object being added to ensure it matches the predicate.
* <p>
* The predicate itself should not throw an exception, but return false to
* indicate that the object cannot be added.
*
* @param object the object being added
* @throws IllegalArgumentException if the add is invalid
*/
protected void validate(Object object) {
if (predicate.evaluate(object) == false) {
throw new IllegalArgumentException("Cannot add Object '" + object + "' - Predicate '" + predicate + "' rejected it");
}
}
//-----------------------------------------------------------------------
/**
* Override to validate the object being added to ensure it matches
* the predicate.
*
* @param object the object being added
* @return the result of adding to the underlying collection
* @throws IllegalArgumentException if the add is invalid
*/
public boolean add(Object object) {
validate(object);
return getCollection().add(object);
}
/**
* Override to validate the objects being added to ensure they match
* the predicate. If any one fails, no update is made to the underlying
* collection.
*
* @param coll the collection being added
* @return the result of adding to the underlying collection
* @throws IllegalArgumentException if the add is invalid
*/
public boolean addAll(Collection coll) {
for (Iterator it = coll.iterator(); it.hasNext(); ) {
validate(it.next());
}
return getCollection().addAll(coll);
}
}
|
Merging from -r468106:814127 of collections_jdk5_branch - namely where this code was generified; mostly in r738956.
Also see the following revisions:
------------------------------------------------------------------------
r471575 | scolebourne | 2006-11-05 15:58:08 -0800 (Sun, 05 Nov 2006) | 1 line
Generify and remove AbstractSerializableCollectionDecorator
------------------------------------------------------------------------
r471202 | scolebourne | 2006-11-04 06:21:44 -0800 (Sat, 04 Nov 2006) | 1 line
Remove getCollection() - use covariant decorated()
------------------------------------------------------------------------
git-svn-id: 53f0c1087cb9b05f99ff63ab1f4d1687a227fef1@815037 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/commons/collections/collection/PredicatedCollection.java
|
Merging from -r468106:814127 of collections_jdk5_branch - namely where this code was generified; mostly in r738956.
|
<ide><path>rc/java/org/apache/commons/collections/collection/PredicatedCollection.java
<ide> package org.apache.commons.collections.collection;
<ide>
<ide> import java.util.Collection;
<del>import java.util.Iterator;
<ide>
<ide> import org.apache.commons.collections.Predicate;
<ide>
<ide> * <p>
<ide> * This class is Serializable from Commons Collections 3.1.
<ide> *
<add> * @param <E> the type of the elements in the collection
<ide> * @since Commons Collections 3.0
<ide> * @version $Revision$ $Date$
<ide> *
<ide> * @author Stephen Colebourne
<ide> * @author Paul Jack
<ide> */
<del>public class PredicatedCollection extends AbstractSerializableCollectionDecorator {
<add>public class PredicatedCollection<E> extends AbstractCollectionDecorator<E> {
<ide>
<ide> /** Serialization version */
<ide> private static final long serialVersionUID = -5259182142076705162L;
<ide>
<ide> /** The predicate to use */
<del> protected final Predicate predicate;
<add> protected final Predicate<? super E> predicate;
<ide>
<ide> /**
<ide> * Factory method to create a predicated (validating) collection.
<ide> * If there are any elements already in the collection being decorated, they
<ide> * are validated.
<ide> *
<add> * @param <T> the type of the elements in the collection
<ide> * @param coll the collection to decorate, must not be null
<ide> * @param predicate the predicate to use for validation, must not be null
<ide> * @return a new predicated collection
<ide> * @throws IllegalArgumentException if collection or predicate is null
<ide> * @throws IllegalArgumentException if the collection contains invalid elements
<ide> */
<del> public static Collection decorate(Collection coll, Predicate predicate) {
<del> return new PredicatedCollection(coll, predicate);
<add> public static <T> Collection<T> decorate(Collection<T> coll, Predicate<? super T> predicate) {
<add> return new PredicatedCollection<T>(coll, predicate);
<ide> }
<del>
<add>
<ide> //-----------------------------------------------------------------------
<ide> /**
<ide> * Constructor that wraps (not copies).
<ide> * @throws IllegalArgumentException if collection or predicate is null
<ide> * @throws IllegalArgumentException if the collection contains invalid elements
<ide> */
<del> protected PredicatedCollection(Collection coll, Predicate predicate) {
<add> protected PredicatedCollection(Collection<E> coll, Predicate<? super E> predicate) {
<ide> super(coll);
<ide> if (predicate == null) {
<ide> throw new IllegalArgumentException("Predicate must not be null");
<ide> }
<ide> this.predicate = predicate;
<del> for (Iterator it = coll.iterator(); it.hasNext(); ) {
<del> validate(it.next());
<add> for (E item : coll) {
<add> validate(item);
<ide> }
<ide> }
<ide>
<ide> * @param object the object being added
<ide> * @throws IllegalArgumentException if the add is invalid
<ide> */
<del> protected void validate(Object object) {
<add> protected void validate(E object) {
<ide> if (predicate.evaluate(object) == false) {
<ide> throw new IllegalArgumentException("Cannot add Object '" + object + "' - Predicate '" + predicate + "' rejected it");
<ide> }
<ide> * @return the result of adding to the underlying collection
<ide> * @throws IllegalArgumentException if the add is invalid
<ide> */
<del> public boolean add(Object object) {
<add> public boolean add(E object) {
<ide> validate(object);
<del> return getCollection().add(object);
<add> return decorated().add(object);
<ide> }
<ide>
<ide> /**
<ide> * @return the result of adding to the underlying collection
<ide> * @throws IllegalArgumentException if the add is invalid
<ide> */
<del> public boolean addAll(Collection coll) {
<del> for (Iterator it = coll.iterator(); it.hasNext(); ) {
<del> validate(it.next());
<add> public boolean addAll(Collection<? extends E> coll) {
<add> for (E item : coll) {
<add> validate(item);
<ide> }
<del> return getCollection().addAll(coll);
<add> return decorated().addAll(coll);
<ide> }
<ide>
<ide> }
|
|
Java
|
apache-2.0
|
75a74d59d50b31db3daa312892c4b7cfc0f42fcf
| 0 |
google/safe-html-types,google/safe-html-types
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.html.types;
import javax.annotation.concurrent.Immutable;
import jsinterop.annotations.JsType;
/**
* A string-like object which represents a sequence of CSS declarations
* ({@code propertyName1: propertyvalue1; propertyName2: propertyValue2; ...}) and that carries the
* security type contract that its value, as a string, will not cause untrusted script execution
* (XSS) when evaluated as CSS in a browser.
*
* <p>A SafeStyle's string representation ({@link #getSafeStyleString()}) can safely be:
* <ul>
* <li>Interpolated as the content of a <b>quoted</b> HTML style attribute. However, the SafeStyle
* string <b>must be HTML-attribute-escaped</b> before interpolation.
* <li>Interpolated as the content of a {}-wrapped block within a stylesheet. '<' characters in the
* SafeStyle string <b>must be CSS-escaped</b> before interpolation. The SafeStyle string is also
* guaranteed not to be able to introduce new properties or elide existing ones.
* <li>Interpolated as the content of a {}-wrapped block within an HTML <style> element. '<'
* characters in the SafeStyle string <b>must be CSS-escaped</b> before interpolation.
* <li>Assigned to the style property of a DOM node. The SafeStyle string should not be escaped
* before being assigned to the property.
* </ul>
*
* TODO(mlourenco): Do we need to require SafeStyle to be the entire content of a style attribute
* or the {}-wrapped block above? It would seem that validating untrusted properties would be
* enough to guarantee that it also would not affect any surrounding, constant, properties. See
* discussion in cl/61826926.
*
* <p>A SafeStyle may never contain literal angle brackets. Otherwise, it could be unsafe to place
* a SafeStyle into a <style> tag (where it can't be HTML escaped). For example, if the
* SafeStyle containing "{@code font: 'foo <style/><script>evil</script>'}" were
* interpolated within a <style> tag, this would then break out of the style context into
* HTML.
*
* <p>A SafeStyle may contain literal single or double quotes, and as such the entire style string
* must be escaped when used in a style attribute (if this were not the case, the string could
* contain a matching quote that would escape from the style attribute).
*
* <p>Values of this type must be composable, i.e. for any two values {@code style1} and
* {@code style2} of this type, {@code style1.getSafeStyleString() + style2.getSafeStyleString()}
* must itself be a value that satisfies the SafeStyle type constraint. This requirement implies
* that for any value {@code style} of this type, {@code style.getSafeStyleString()} must not end
* in a "property value" or "property name" context. For example, a value of
* {@code background:url("} or {@code font-} would not satisfy the SafeStyle contract. This is
* because concatenating such strings with a second value that itself does not contain unsafe CSS
* can result in an overall string that does. For example, if {@code javascript:evil())"} is
* appended to {@code background:url("}, the resulting string may result in the execution of a
* malicious script.
*
* TODO(mlourenco): Consider whether we should implement UTF-8 interchange-validity checks and
* blacklisting of newlines (including Unicode ones) and other whitespace characters (\t, \f).
* Document here if so and also update SafeStyles.fromConstant().
*
* <p>The following example values comply with this type's contract:
* <ul>
* <li><code>width: 1em;</code></li>
* <li><code>height:1em;</code></li>
* <li><code>width: 1em;height: 1em;</code></li>
* <li><code>background:url('http://url');</code></li>
* </ul>
* In addition, the empty string is safe for use in a CSS attribute.
*
* <p>The following example values do <em>not</em> comply with this type's contract:
* <ul>
* <li><code>background: red</code> (missing a trailing semi-colon)</li>
* <li><code>background:</code> (missing a value and a trailing semi-colon)</li>
* <li><code>1em</code> (missing an attribute name, which provides context for the value)</li>
* </ul>
*
* @see http://www.w3.org/TR/css3-syntax/
*/
@Immutable
@JsType
public final class SafeStyle {
/** The SafeStyle wrapping an empty string. */
public static final SafeStyle EMPTY = new SafeStyle("");
private final String privateDoNotAccessOrElseSafeStyleWrappedValue;
SafeStyle(String style) {
if (style == null) {
throw new NullPointerException();
}
privateDoNotAccessOrElseSafeStyleWrappedValue = style;
}
@Override
public int hashCode() {
return privateDoNotAccessOrElseSafeStyleWrappedValue.hashCode() ^ 0x70173910;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof SafeStyle)) {
return false;
}
SafeStyle that = (SafeStyle) other;
return this.privateDoNotAccessOrElseSafeStyleWrappedValue.equals(
that.privateDoNotAccessOrElseSafeStyleWrappedValue);
}
/**
* Returns a debug representation of this value's underlying string, NOT the string representation
* of the style declaration(s).
*
* <p>Having {@code toString()} return a debug representation is intentional. This type has
* a GWT-compiled JavaScript version; JavaScript has no static typing and a distinct method
* method name provides a modicum of type-safety.
*
* @see #getSafeStyleString
*/
@Override
public String toString() {
return "SafeStyle{" + privateDoNotAccessOrElseSafeStyleWrappedValue + "}";
}
/**
* Returns this value's underlying string. See class documentation for what guarantees exist on
* the returned string.
*/
// NOTE(mlourenco): jslayout depends on this exact method name when generating code, be careful if
// changing it.
public String getSafeStyleString() {
return privateDoNotAccessOrElseSafeStyleWrappedValue;
}
}
|
types/src/main/java/com/google/common/html/types/SafeStyle.java
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.html.types;
import javax.annotation.concurrent.Immutable;
import jsinterop.annotations.JsType;
/**
* A string-like object which represents a sequence of CSS declarations
* ({@code propertyName1: propertyvalue1; propertyName2: propertyValue2; ...}) and that carries the
* security type contract that its value, as a string, will not cause untrusted script execution
* (XSS) when evaluated as CSS in a browser.
*
* <p>A SafeStyle's string representation ({@link #getSafeStyleString()}) can safely:
* <ul>
* <li>Be interpolated as the content of a <b>quoted</b> HTML style attribute. The SafeStyle
* string <b>must be HTML-attribute-escaped</b> (where " and ' are escaped) before interpolation.
* <li>Be interpolated as the content of a {}-wrapped block within a stylesheet. The SafeStyle
* string should not be escaped before interpolation. SafeStyle's contract also guarantees that the
* string will not be able to introduce new properties or elide existing ones.
* <li>Be assigned to the style property of a DOM node. The SafeStyle string should not be escaped
* before being assigned to the property.
* </ul>
*
* TODO(mlourenco): Do we need to require SafeStyle to be the entire content of a style attribute
* or the {}-wrapped block above? It would seem that validating untrusted properties would be
* enough to guarantee that it also would not affect any surrounding, constant, properties. See
* discussion in cl/61826926.
*
* <p>A SafeStyle may never contain literal angle brackets. Otherwise, it could be unsafe to place
* a SafeStyle into a <style> tag (where it can't be HTML escaped). For example, if the
* SafeStyle containing "{@code font: 'foo <style/><script>evil</script>'}" were
* interpolated within a <style> tag, this would then break out of the style context into
* HTML.
*
* <p>A SafeStyle may contain literal single or double quotes, and as such the entire style string
* must be escaped when used in a style attribute (if this were not the case, the string could
* contain a matching quote that would escape from the style attribute).
*
* <p>Values of this type must be composable, i.e. for any two values {@code style1} and
* {@code style2} of this type, {@code style1.getSafeStyleString() + style2.getSafeStyleString()}
* must itself be a value that satisfies the SafeStyle type constraint. This requirement implies
* that for any value {@code style} of this type, {@code style.getSafeStyleString()} must not end
* in a "property value" or "property name" context. For example, a value of
* {@code background:url("} or {@code font-} would not satisfy the SafeStyle contract. This is
* because concatenating such strings with a second value that itself does not contain unsafe CSS
* can result in an overall string that does. For example, if {@code javascript:evil())"} is
* appended to {@code background:url("}, the resulting string may result in the execution of a
* malicious script.
*
* TODO(mlourenco): Consider whether we should implement UTF-8 interchange-validity checks and
* blacklisting of newlines (including Unicode ones) and other whitespace characters (\t, \f).
* Document here if so and also update SafeStyles.fromConstant().
*
* <p>The following example values comply with this type's contract:
* <ul>
* <li><code>width: 1em;</code></li>
* <li><code>height:1em;</code></li>
* <li><code>width: 1em;height: 1em;</code></li>
* <li><code>background:url('http://url');</code></li>
* </ul>
* In addition, the empty string is safe for use in a CSS attribute.
*
* <p>The following example values do <em>not</em> comply with this type's contract:
* <ul>
* <li><code>background: red</code> (missing a trailing semi-colon)</li>
* <li><code>background:</code> (missing a value and a trailing semi-colon)</li>
* <li><code>1em</code> (missing an attribute name, which provides context for the value)</li>
* </ul>
*
* @see http://www.w3.org/TR/css3-syntax/
*/
@Immutable
@JsType
public final class SafeStyle {
/** The SafeStyle wrapping an empty string. */
public static final SafeStyle EMPTY = new SafeStyle("");
private final String privateDoNotAccessOrElseSafeStyleWrappedValue;
SafeStyle(String style) {
if (style == null) {
throw new NullPointerException();
}
privateDoNotAccessOrElseSafeStyleWrappedValue = style;
}
@Override
public int hashCode() {
return privateDoNotAccessOrElseSafeStyleWrappedValue.hashCode() ^ 0x70173910;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof SafeStyle)) {
return false;
}
SafeStyle that = (SafeStyle) other;
return this.privateDoNotAccessOrElseSafeStyleWrappedValue.equals(
that.privateDoNotAccessOrElseSafeStyleWrappedValue);
}
/**
* Returns a debug representation of this value's underlying string, NOT the string representation
* of the style declaration(s).
*
* <p>Having {@code toString()} return a debug representation is intentional. This type has
* a GWT-compiled JavaScript version; JavaScript has no static typing and a distinct method
* method name provides a modicum of type-safety.
*
* @see #getSafeStyleString
*/
@Override
public String toString() {
return "SafeStyle{" + privateDoNotAccessOrElseSafeStyleWrappedValue + "}";
}
/**
* Returns this value's underlying string. See class documentation for what guarantees exist on
* the returned string.
*/
// NOTE(mlourenco): jslayout depends on this exact method name when generating code, be careful if
// changing it.
public String getSafeStyleString() {
return privateDoNotAccessOrElseSafeStyleWrappedValue;
}
}
|
Align SafeStyle documentation
Documentation changes were made to Go's safe Style type in
[] Propagate these changes to other languages.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=158292583
|
types/src/main/java/com/google/common/html/types/SafeStyle.java
|
Align SafeStyle documentation
|
<ide><path>ypes/src/main/java/com/google/common/html/types/SafeStyle.java
<ide> * security type contract that its value, as a string, will not cause untrusted script execution
<ide> * (XSS) when evaluated as CSS in a browser.
<ide> *
<del> * <p>A SafeStyle's string representation ({@link #getSafeStyleString()}) can safely:
<add> * <p>A SafeStyle's string representation ({@link #getSafeStyleString()}) can safely be:
<ide> * <ul>
<del> * <li>Be interpolated as the content of a <b>quoted</b> HTML style attribute. The SafeStyle
<del> * string <b>must be HTML-attribute-escaped</b> (where " and ' are escaped) before interpolation.
<del> * <li>Be interpolated as the content of a {}-wrapped block within a stylesheet. The SafeStyle
<del> * string should not be escaped before interpolation. SafeStyle's contract also guarantees that the
<del> * string will not be able to introduce new properties or elide existing ones.
<del> * <li>Be assigned to the style property of a DOM node. The SafeStyle string should not be escaped
<add> * <li>Interpolated as the content of a <b>quoted</b> HTML style attribute. However, the SafeStyle
<add> * string <b>must be HTML-attribute-escaped</b> before interpolation.
<add> * <li>Interpolated as the content of a {}-wrapped block within a stylesheet. '<' characters in the
<add> * SafeStyle string <b>must be CSS-escaped</b> before interpolation. The SafeStyle string is also
<add> * guaranteed not to be able to introduce new properties or elide existing ones.
<add> * <li>Interpolated as the content of a {}-wrapped block within an HTML <style> element. '<'
<add> * characters in the SafeStyle string <b>must be CSS-escaped</b> before interpolation.
<add> * <li>Assigned to the style property of a DOM node. The SafeStyle string should not be escaped
<ide> * before being assigned to the property.
<ide> * </ul>
<ide> *
|
|
Java
|
apache-2.0
|
53fcae58e7e9aa5fcd43b9dedd03cfddb242da3a
| 0 |
imageprocessor/cv4j,imageprocessor/cv4j
|
package com.cv4j.rxjava;
import android.graphics.Bitmap;
import android.util.Log;
import android.widget.ImageView;
import com.cv4j.core.datamodel.CV4JImage;
import com.cv4j.core.datamodel.ImageProcessor;
import com.cv4j.core.filters.CommonFilter;
import org.reactivestreams.Publisher;
import java.util.ArrayList;
import java.util.List;
import io.reactivex.Flowable;
import io.reactivex.FlowableTransformer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.annotations.NonNull;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Tony Shen on 2017/3/14.
*/
public class RxImageData {
CV4JImage image;
Flowable flowable;
MemCache memCache;
boolean useCache = true;
ImageView imageView;
List<CommonFilter> filters;
WrappedCV4JImage wrappedCV4JImage;
private RxImageData(Bitmap bitmap) {
this.image = new CV4JImage(bitmap);
filters = new ArrayList<>();
memCache = MemCache.getInstance();
wrappedCV4JImage = new WrappedCV4JImage(image,filters);
flowable = Flowable.just(wrappedCV4JImage);
}
private RxImageData(CV4JImage image) {
this.image = image;
filters = new ArrayList<>();
memCache = MemCache.getInstance();
wrappedCV4JImage = new WrappedCV4JImage(image,filters);
flowable = Flowable.just(wrappedCV4JImage);
}
public static RxImageData bitmap(Bitmap bitmap) {
return new RxImageData(bitmap);
}
public static RxImageData image(CV4JImage image) {
return new RxImageData(image);
}
/**
* 使用滤镜,支持链式调用多个滤镜
* @param filter
* @return
*/
public RxImageData addFilter(final CommonFilter filter) {
if (filter==null) {
Log.e("RxImageData","filter is null");
return this;
}
filters.add(filter);
return this;
}
/**
* 判断是否使用缓存,默认情况下是使用缓存。
* 该方法需要在into()方法之前使用。
* @param useCache
* @return
*/
public RxImageData isUseCache(boolean useCache) {
this.useCache = useCache;
return this;
}
/**
* RxImageData.bitmap(bitmap).addFilter(new ColorFilter()).into(view);
* @param imageview
*/
public void into(final ImageView imageview) {
this.imageView = imageview;
render();
}
/**
* 渲染imageview
*/
private void render() {
if (imageView == null) {
return;
}
if (filters.size()==0) {
this.flowable.compose(RxImageData.toMain()).subscribe(new Consumer<WrappedCV4JImage>() {
@Override
public void accept(@NonNull WrappedCV4JImage wrapped) throws Exception {
imageView.setImageBitmap(wrapped.image.toBitmap());
}
});
} else if (filters.size() == 1) {
this.flowable
.map(new Function<WrappedCV4JImage,ImageProcessor>() {
@Override
public ImageProcessor apply(@NonNull WrappedCV4JImage wrap) throws
Exception {
if (useCache) {
String key = wrap.filters.get(0).getClass().getSimpleName()+imageView.getId();
if (memCache.get(key)==null) {
ImageProcessor imageProcessor = wrap.filters.get(0).filter(image.getProcessor());
memCache.put(key,imageProcessor.getImage().toBitmap());
return imageProcessor;
} else {
image.getProcessor().getImage().setBitmap(memCache.get(key));
return image.getProcessor();
}
} else {
return wrap.filters.get(0).filter(image.getProcessor());
}
}
}).compose(RxImageData.toMain()).subscribe(new Consumer<ImageProcessor>() {
@Override
public void accept(@NonNull ImageProcessor processor) throws Exception {
imageView.setImageBitmap(processor.getImage().toBitmap());
}
});
} else {
this.flowable.map(new Function<WrappedCV4JImage,List<CommonFilter>>() {
@Override
public List<CommonFilter> apply(@NonNull WrappedCV4JImage wrap) throws Exception {
return wrap.filters;
}
}).map(new Function<List<CommonFilter>,ImageProcessor>() {
@Override
public ImageProcessor apply(@NonNull List<CommonFilter> filters) throws Exception {
return filter(image.getProcessor());
}
}).compose(RxImageData.toMain()).subscribe(new Consumer<ImageProcessor>() {
@Override
public void accept(@NonNull ImageProcessor processor) throws Exception {
imageView.setImageBitmap((processor.getImage().toBitmap()));
}
});
}
}
private ImageProcessor filter(ImageProcessor imageData) {
if (filters.size()>0) {
return filter(imageData,filters.size());
}
return imageData;
}
private ImageProcessor filter(ImageProcessor imageData, int size) {
if (size==1) {
CommonFilter filter = filters.get(0);
return filter.filter(imageData);
}
CommonFilter filter = filters.get(size-1);
imageData = filter.filter(imageData);
return filter(imageData,size-1);
}
/**
*
* @param <T>
* @return
*/
private static <T> FlowableTransformer<T, T> toMain() {
return new FlowableTransformer<T, T>() {
@Override
public Publisher<T> apply(Flowable<T> upstream) {
return upstream.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
};
}
}
|
rxcv4j/src/main/java/com/cv4j/rxjava/RxImageData.java
|
package com.cv4j.rxjava;
import android.graphics.Bitmap;
import android.util.Log;
import android.widget.ImageView;
import com.cv4j.core.datamodel.CV4JImage;
import com.cv4j.core.datamodel.ImageProcessor;
import com.cv4j.core.filters.CommonFilter;
import org.reactivestreams.Publisher;
import java.util.ArrayList;
import java.util.List;
import io.reactivex.Flowable;
import io.reactivex.FlowableTransformer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.annotations.NonNull;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Tony Shen on 2017/3/14.
*/
public class RxImageData {
CV4JImage image;
Flowable flowable;
MemCache memCache;
boolean useCache = true;
ImageView imageView;
List<CommonFilter> filters;
WrappedCV4JImage wrappedCV4JImage;
private RxImageData(Bitmap bitmap) {
this.image = new CV4JImage(bitmap);
filters = new ArrayList<>();
memCache = MemCache.getInstance();
wrappedCV4JImage = new WrappedCV4JImage(image,filters);
flowable = Flowable.just(wrappedCV4JImage);
}
private RxImageData(CV4JImage image) {
this.image = image;
filters = new ArrayList<>();
memCache = MemCache.getInstance();
wrappedCV4JImage = new WrappedCV4JImage(image,filters);
flowable = Flowable.just(wrappedCV4JImage);
}
public static RxImageData bitmap(Bitmap bitmap) {
return new RxImageData(bitmap);
}
public static RxImageData image(CV4JImage image) {
return new RxImageData(image);
}
/**
* 使用滤镜
* @param filter
* @return
*/
public RxImageData addFilter(final CommonFilter filter) {
if (filter==null) {
Log.e("RxImageData","filter is null");
return this;
}
filters.add(filter);
return this;
}
/**
* 判断是否使用缓存,默认情况使用缓存。
* @param useCache
* @return
*/
public RxImageData isUseCache(boolean useCache) {
this.useCache = useCache;
return this;
}
/**
* RxImageData.bitmap(bitmap).addFilter(new ColorFilter()).into(view);
* @param imageview
*/
public void into(final ImageView imageview) {
this.imageView = imageview;
render();
}
private void render() {
if (imageView == null) {
return;
}
if (filters.size()==0) {
this.flowable.compose(RxImageData.toMain()).subscribe(new Consumer<WrappedCV4JImage>() {
@Override
public void accept(@NonNull WrappedCV4JImage wrapped) throws Exception {
imageView.setImageBitmap(wrapped.image.toBitmap());
}
});
} else if (filters.size() == 1) {
this.flowable
.map(new Function<WrappedCV4JImage,ImageProcessor>() {
@Override
public ImageProcessor apply(@NonNull WrappedCV4JImage wrap) throws
Exception {
if (useCache) {
String key = wrap.filters.get(0).getClass().getName()+imageView.getId();
if (memCache.get(key)==null) {
ImageProcessor imageProcessor = wrap.filters.get(0).filter(image.getProcessor());
memCache.put(key,imageProcessor.getImage().toBitmap());
return imageProcessor;
} else {
image.getProcessor().getImage().setBitmap(memCache.get(key));
return image.getProcessor();
}
} else {
return wrap.filters.get(0).filter(image.getProcessor());
}
}
}).compose(RxImageData.toMain()).subscribe(new Consumer<ImageProcessor>() {
@Override
public void accept(@NonNull ImageProcessor processor) throws Exception {
imageView.setImageBitmap(processor.getImage().toBitmap());
}
});
} else {
this.flowable.map(new Function<WrappedCV4JImage,List<CommonFilter>>() {
@Override
public List<CommonFilter> apply(@NonNull WrappedCV4JImage wrap) throws Exception {
return wrap.filters;
}
}).map(new Function<List<CommonFilter>,ImageProcessor>() {
@Override
public ImageProcessor apply(@NonNull List<CommonFilter> filters) throws Exception {
return filter(image.getProcessor());
}
}).compose(RxImageData.toMain()).subscribe(new Consumer<ImageProcessor>() {
@Override
public void accept(@NonNull ImageProcessor processor) throws Exception {
imageView.setImageBitmap((processor.getImage().toBitmap()));
}
});
}
}
private ImageProcessor filter(ImageProcessor imageData) {
if (filters.size()>0) {
return filter(imageData,filters.size());
}
return imageData;
}
private ImageProcessor filter(ImageProcessor imageData, int size) {
if (size==1) {
CommonFilter filter = filters.get(0);
return filter.filter(imageData);
}
CommonFilter filter = filters.get(size-1);
imageData = filter.filter(imageData);
return filter(imageData,size-1);
}
/**
*
* @param <T>
* @return
*/
private static <T> FlowableTransformer<T, T> toMain() {
return new FlowableTransformer<T, T>() {
@Override
public Publisher<T> apply(Flowable<T> upstream) {
return upstream.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
};
}
}
|
add commit
|
rxcv4j/src/main/java/com/cv4j/rxjava/RxImageData.java
|
add commit
|
<ide><path>xcv4j/src/main/java/com/cv4j/rxjava/RxImageData.java
<ide> }
<ide>
<ide> /**
<del> * 使用滤镜
<add> * 使用滤镜,支持链式调用多个滤镜
<ide> * @param filter
<ide> * @return
<ide> */
<ide> }
<ide>
<ide> /**
<del> * 判断是否使用缓存,默认情况使用缓存。
<add> * 判断是否使用缓存,默认情况下是使用缓存。
<add> * 该方法需要在into()方法之前使用。
<ide> * @param useCache
<ide> * @return
<ide> */
<ide> render();
<ide> }
<ide>
<add> /**
<add> * 渲染imageview
<add> */
<ide> private void render() {
<ide>
<ide> if (imageView == null) {
<ide> Exception {
<ide>
<ide> if (useCache) {
<del> String key = wrap.filters.get(0).getClass().getName()+imageView.getId();
<add> String key = wrap.filters.get(0).getClass().getSimpleName()+imageView.getId();
<ide>
<ide> if (memCache.get(key)==null) {
<ide>
|
|
Java
|
mit
|
error: pathspec 'src/main/java/com/rigiresearch/quizgen/CompoundQuestion.java' did not match any file(s) known to git
|
67151101414cd05ffb51a3af7c3a6794ad59b0c4
| 1 |
jachinte/examgen
|
/**
* Copyright 2017 University of Victoria
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package com.rigiresearch.quizgen;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.experimental.Accessors;
/**
* A question composed of several sub-questions.
* @author Miguel Jimenez ([email protected])
* @date 2017-08-19
* @version $Id$
* @since 0.0.1
*/
@Accessors(fluent = true)
@AllArgsConstructor
@Getter
public final class CompoundQuestion implements Question {
/**
* This question's segment.
*/
private final TextSegment statement;
/**
* This question's sub-questions.
*/
private final List<Question> children;
/* (non-Javadoc)
* @see com.rigiresearch.quizgen.Question#header()
*/
@Override
public TextSegment header() {
return this.statement;
}
/* (non-Javadoc)
* @see com.rigiresearch.quizgen.Question#body()
*/
@Override
public List<TextSegment> body() {
return this.children.stream()
.flatMap(child -> child.body().stream())
.collect(Collectors.toList());
}
/* (non-Javadoc)
* @see com.rigiresearch.quizgen.Question#children()
*/
@Override
public List<Question> children() {
return this.children;
}
}
|
src/main/java/com/rigiresearch/quizgen/CompoundQuestion.java
|
Add new type of question
|
src/main/java/com/rigiresearch/quizgen/CompoundQuestion.java
|
Add new type of question
|
<ide><path>rc/main/java/com/rigiresearch/quizgen/CompoundQuestion.java
<add>/**
<add> * Copyright 2017 University of Victoria
<add> *
<add> * Permission is hereby granted, free of charge, to any person obtaining a copy
<add> * of this software and associated documentation files (the "Software"), to
<add> * deal in the Software without restriction, including without limitation the
<add> * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
<add> * sell copies of the Software, and to permit persons to whom the Software is
<add> * furnished to do so, subject to the following conditions:
<add> *
<add> * The above copyright notice and this permission notice shall be included in
<add> * all copies or substantial portions of the Software.
<add> *
<add> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
<add> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<add> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
<add> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
<add> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
<add> * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
<add> * IN THE SOFTWARE.
<add> */
<add>package com.rigiresearch.quizgen;
<add>
<add>import java.util.List;
<add>import java.util.stream.Collectors;
<add>import lombok.AllArgsConstructor;
<add>import lombok.Getter;
<add>import lombok.experimental.Accessors;
<add>
<add>/**
<add> * A question composed of several sub-questions.
<add> * @author Miguel Jimenez ([email protected])
<add> * @date 2017-08-19
<add> * @version $Id$
<add> * @since 0.0.1
<add> */
<add>@Accessors(fluent = true)
<add>@AllArgsConstructor
<add>@Getter
<add>public final class CompoundQuestion implements Question {
<add>
<add> /**
<add> * This question's segment.
<add> */
<add> private final TextSegment statement;
<add>
<add> /**
<add> * This question's sub-questions.
<add> */
<add> private final List<Question> children;
<add>
<add> /* (non-Javadoc)
<add> * @see com.rigiresearch.quizgen.Question#header()
<add> */
<add> @Override
<add> public TextSegment header() {
<add> return this.statement;
<add> }
<add>
<add> /* (non-Javadoc)
<add> * @see com.rigiresearch.quizgen.Question#body()
<add> */
<add> @Override
<add> public List<TextSegment> body() {
<add> return this.children.stream()
<add> .flatMap(child -> child.body().stream())
<add> .collect(Collectors.toList());
<add> }
<add>
<add> /* (non-Javadoc)
<add> * @see com.rigiresearch.quizgen.Question#children()
<add> */
<add> @Override
<add> public List<Question> children() {
<add> return this.children;
<add> }
<add>
<add>}
|
|
Java
|
apache-2.0
|
b81d58dbc38db6c46e0bec991245fef0d98720f6
| 0 |
scala/scala,felixmulder/scala,jvican/scala,shimib/scala,felixmulder/scala,shimib/scala,scala/scala,martijnhoekstra/scala,scala/scala,scala/scala,slothspot/scala,lrytz/scala,shimib/scala,slothspot/scala,martijnhoekstra/scala,lrytz/scala,lrytz/scala,slothspot/scala,scala/scala,lrytz/scala,jvican/scala,scala/scala,slothspot/scala,lrytz/scala,shimib/scala,shimib/scala,felixmulder/scala,jvican/scala,felixmulder/scala,felixmulder/scala,slothspot/scala,felixmulder/scala,martijnhoekstra/scala,slothspot/scala,lrytz/scala,martijnhoekstra/scala,jvican/scala,felixmulder/scala,slothspot/scala,martijnhoekstra/scala,martijnhoekstra/scala,jvican/scala,jvican/scala,shimib/scala,jvican/scala
|
/* ____ ____ ____ ____ ______ *\
** / __// __ \/ __// __ \/ ____/ SOcos COmpiles Scala **
** __\_ \/ /_/ / /__/ /_/ /\_ \ (c) 2002, LAMP/EPFL **
** /_____/\____/\___/\____/____/ **
\* */
// $OldId: Erasure.java,v 1.48 2003/01/16 14:21:19 schinz Exp $
// $Id$
package scalac.transformer;
import java.util.HashMap;
import scalac.Global;
import scalac.PhaseDescriptor;
import scalac.Unit;
import scalac.ast.Tree;
import scalac.ast.Tree.Template;
import scalac.ast.Tree.TypeDef;
import scalac.ast.Tree.ValDef;
import scalac.ast.TreeList;
import scalac.ast.Transformer;
import scalac.symtab.Definitions;
import scalac.symtab.Kinds;
import scalac.symtab.Type;
import scalac.symtab.TypeTags;
import scalac.symtab.Modifiers;
import scalac.symtab.Scope;
import scalac.symtab.SymSet;
import scalac.symtab.Symbol;
import scalac.backend.Primitives;
import scalac.util.Name;
import scalac.util.Names;
import scalac.util.Debug;
/** A transformer for type erasure and bridge building
*
* @author Martin Odersky
* @version 1.0
*
* What it does:
* (1) Map every type to its erasure.
* (2) If method A overrides a method B, and the erased type ETA of A is
* different from the erased type ETB of B seen as a member of A's class,
* add a bridge method with the same name as A,B, with signature ETB
* which calls A after casting parameters.
*/
public class Erasure extends Transformer implements Modifiers {
private final Global global;
private final Definitions definitions;
private final Primitives primitives;
private final boolean noTyped;
private Unit unit;
public Erasure(Global global, PhaseDescriptor descr) {
super(global, descr);
this.global = global;
this.definitions = global.definitions;
this.primitives = global.primitives;
this.noTyped = global.target != global.TARGET_JAVA;
}
public void apply(Unit unit) {
this.unit = unit;
unit.body = transform(unit.body);
}
//////////////////////////////////////////////////////////////////////////////////
// Box/Unbox and Coercions
/////////////////////////////////////////////////////////////////////////////////
boolean isUnboxed(Type type) {
switch (type) {
case UnboxedType(_): case UnboxedArrayType(_): return true;
default: return false;
}
}
boolean isUnboxedArray(Type type) {
switch (type) {
case UnboxedArrayType(_): return true;
default: return false;
}
}
boolean isBoxed(Type type) {
return type.unbox() != type || type.symbol().fullName() == Names.scala_Array;
}
Type boxedType(Type tp) {
switch (tp) {
case UnboxedType(int kind):
return definitions.getType(Type.boxedFullName(kind));
case UnboxedArrayType(Type elemtp):
return definitions.arrayType(boxedType(elemtp));
default:
return tp;
}
}
Symbol boxSym(Type unboxedtp) {
return primitives.getBoxValueSymbol(unboxedtp);
}
/** Emit `scala.RunTime.box(tree)' or
* `{ tree ; scala.RunTime.box() }' if type of `tree' is `void'.
*/
Tree box(Tree tree) {
Tree boxtree = gen.mkRef(tree.pos, primitives.RUNTIME_TYPE, boxSym(tree.type));
switch (tree.type) {
case UnboxedType(int kind):
if (kind == TypeTags.UNIT)
return gen.Block(
tree.pos, new Tree[]{tree, gen.Apply(boxtree, new Tree[0])});
}
return gen.Apply(boxtree, new Tree[]{tree});
}
/** The symbol of the unbox method corresponding to unboxed type`unboxedtp'
*/
Symbol unboxSym(Type unboxedtp) {
return primitives.getUnboxValueSymbol(unboxedtp);
}
/** Emit tree.asType() or tree.asTypeArray(), where pt = Type or pt = Type[].
*/
Tree unbox(Tree tree, Type pt) {
Tree sel = gen.Select(tree, unboxSym(pt));
return gen.Apply(sel, new Tree[0]);
}
/** Generate a select from an unboxed type.
*/
public Tree unboxedSelect(Tree qual, Symbol sym) {
return make.Select(qual.pos, qual, sym.name)
.setSymbol(sym)
// !!! .setType(Prefix.TypePrefix(boxedType(qual.type)).memberType(sym).erasure());
.setType(Type.singleType(boxedType(qual.type),sym).erasure());
}
/** Subclass relation for class types; empty for other types.
*/
boolean isSubClass(Type tp1, Type tp2) {
Symbol sym1 = tp1.symbol();
Symbol sym2 = tp2.symbol();
return sym1 != null && sym2 != null && sym1.isSubClass(sym2);
}
/** Subtyping relation on erased types.
*/
boolean isSubType(Type tp1, Type tp2) {
if (tp1.isSameAs(tp2)) return true;
switch (tp2) {
case UnboxedType(_):
return tp1.isSubType(tp2);
case UnboxedArrayType(Type elemtp2):
switch (tp1) {
case UnboxedArrayType(Type elemtp1):
return !(elemtp1 instanceof Type.UnboxedType) &&
isSubType(elemtp1, elemtp2);
}
}
return isSubClass(tp1, tp2);
}
Tree coerce(Tree tree, Type pt) {
return isSubType(tree.type, pt) ? tree : cast(tree, pt);
}
Tree cast(Tree tree, Type pt) {
if (global.debug) global.log("cast " + tree + ":" + tree.type + " to " + pt);//debug
if (tree.type.isSameAs(pt)) {
return tree;
} else if (isSubType(tree.type, pt)) {
return noTyped ? tree : gen.Typed(tree, pt);
} else if (isUnboxed(tree.type) && !isUnboxed(pt)) {
return cast(box(tree), pt);
} else if ((isUnboxedArray(tree.type)
|| (tree.type.symbol() == definitions.ANY_CLASS))
&& isUnboxedArray(pt)) {
return
make.Apply(tree.pos,
make.TypeApply(tree.pos,
unboxedSelect(tree, definitions.AS),
new Tree[]{gen.mkType(tree.pos, pt)})
.setType(new Type.MethodType(Symbol.EMPTY_ARRAY, pt)),
new Tree[0])
.setType(pt);
} else if (!isUnboxed(tree.type) && isUnboxed(pt)) {
if (isBoxed(tree.type)) {
return coerce(unbox(tree, pt), pt);
} else {
Type bt = boxedType(pt);
while (isBoxed(bt.parents()[0])) {
bt = bt.parents()[0];
}
return cast(coerce(tree, bt), pt);
}
} else if (isUnboxed(tree.type) && isUnboxed(pt)) {
return gen.Apply(
unboxedSelect(box(tree), unboxSym(pt)),
new Tree[0]);
} else if (!isUnboxed(tree.type) && !isUnboxed(pt) ||
isUnboxedArray(tree.type) && isUnboxedArray(pt)) {
return
gen.Apply(
gen.TypeApply(
gen.Select(tree, definitions.AS),
new Tree[]{gen.mkType(tree.pos, pt)}),
new Tree[0]);
} else {
throw Debug.abort("cannot cast " + tree.type + " to " + pt);
}
}
//////////////////////////////////////////////////////////////////////////////////
// Bridge Building
/////////////////////////////////////////////////////////////////////////////////
private TreeList bridges;
private HashMap bridgeSyms;
/** Add bridge which Java-overrides `sym1' and which forwards to `sym'
*/
public void addBridge(Symbol sym, Symbol sym1) {
Type bridgeType = sym1.type().erasure();
// create bridge symbol and add to bridgeSyms(sym)
// or return if bridge with required type already exists for sym.
SymSet bridgesOfSym = (SymSet) bridgeSyms.get(sym);
if (bridgesOfSym == null) bridgesOfSym = SymSet.EMPTY;
Symbol[] brs = bridgesOfSym.toArray();
for (int i = 0; i < brs.length; i++) {
if (brs[i].type().isSameAs(bridgeType)) return;
}
Symbol bridgeSym = sym.cloneSymbol();
bridgeSym.flags |= (SYNTHETIC | BRIDGE);
bridgeSym.flags &= ~JAVA;
bridgesOfSym = bridgesOfSym.incl(bridgeSym);
bridgeSyms.put(sym, bridgesOfSym);
// check that there is no overloaded symbol with same erasure as bridge
Symbol overSym = sym.owner().members().lookup(sym.name);
switch (overSym.type()) {
case OverloadedType(Symbol[] alts, Type[] alttypes):
for (int i = 0; i < alts.length; i++) {
if (sym != alts[i] && bridgeType.isSameAs(alttypes[i].erasure())) {
unit.error(sym.pos, "overlapping overloaded alternatives; " +
"overridden " + sym1 + sym1.locationString() +
" has same erasure as " + alts[i] +
alttypes[i] + alts[i].locationString());
}
}
}
switch (bridgeType) {
case MethodType(Symbol[] params, Type restp):
// assign to bridge symbol its bridge type
// where owner of all parameters is bridge symbol itself.
Symbol[] params1 = new Symbol[params.length];
for (int i = 0; i < params.length; i++) {
params1[i] = params[i].cloneSymbol();
params1[i].setOwner(bridgeSym);
}
bridgeSym.setType(Type.MethodType(params1, restp));
// create bridge definition
Type symtype = sym.type().erasure();
switch (symtype) {
case MethodType(Symbol[] symparams, Type symrestp):
assert params1.length == symparams.length;
Tree[] args = new Tree[params1.length];
for (int i = 0; i < args.length; i++) {
args[i] = cast(gen.Ident(params1[i]), symparams[i].type());
}
Tree fwd = make.Apply(sym.pos, gen.Ident(sym).setType(symtype), args)
.setType(symrestp);
bridges.append(gen.DefDef(bridgeSym, coerce(fwd, restp)));
return;
}
}
throw Debug.abort("bad bridge types " + bridgeType + "," + sym.type().erasure());
}
public void addBridges(Symbol sym) {
Symbol c = sym.owner();
if (c.isClass() && !c.isInterface()) {
Type[] basetypes = c.parents();
//System.out.println("trying " + c + " <= " + ArrayApply.toString(c.basetypes()));//DEBUG
for (int i = 0; i < basetypes.length; i++) {
Symbol sym1 = sym.overriddenSymbol(basetypes[i]);
//if (sym1.kind != NONE) System.out.println("overridden: " + sym1 + sym1.locationString() + " by " + sym + sym.locationString());//DEBUG
if (sym1.kind != Kinds.NONE &&
!sym1.type().erasure().isSameAs(sym.type().erasure())) {
//System.out.println("add bridge: " + sym1 + sym1.locationString() + " by " + sym + sym.locationString());//DEBUG
addBridge(sym, sym1);
}
}
}
}
//////////////////////////////////////////////////////////////////////////////////
// Transformer
/////////////////////////////////////////////////////////////////////////////////
/** Contract: every node needs to be transformed so that it's type is the
* erasure of the node's original type. The only exception are functions;
* these are mapped to the erasure of the function symbol's type.
*/
Symbol currentClass = null;
public Tree transform(Tree tree, boolean eraseFully) {
assert tree.type != null : tree;
Type owntype = eraseFully ? tree.type.fullErasure() : tree.type.erasure();
switch (tree) {
case ClassDef(int mods, Name name, TypeDef[] tparams, ValDef[][] vparams, Tree tpe, Template impl):
Symbol oldCurrentClass = currentClass;
currentClass = tree.symbol();
Tree newTree =
copy.ClassDef(tree, mods, name, new TypeDef[0],
transform(vparams), tpe, transform(impl, tree.symbol()))
.setType(owntype);
currentClass = oldCurrentClass;
return newTree;
case DefDef(int mods, Name name, TypeDef[] tparams, ValDef[][] vparams, Tree tpe, Tree rhs):
addBridges(tree.symbol());
Tree tpe1 = gen.mkType(tpe.pos, tpe.type.fullErasure());
Tree rhs1 = (rhs == Tree.Empty) ? rhs : transform(rhs, tpe1.type);
return copy.DefDef(
tree, mods, name, new TypeDef[0], transform(vparams), tpe1, rhs1)
.setType(owntype);
case ValDef(int mods, Name name, Tree tpe, Tree rhs):
Tree tpe1 = transform(tpe);
Tree rhs1 = (rhs == Tree.Empty) ? rhs : transform(rhs, tpe1.type);
return copy.ValDef(
tree, mods, name, tpe1, rhs1)
.setType(owntype);
case TypeDef(_, _, _, _):
// eliminate
return Tree.Empty;
case Block(Tree[] stats):
Tree[] newStats = new Tree[stats.length];
for (int i = 0; i < stats.length; ++i)
newStats[i] = transform(stats[i], true);
return copy.Block(tree, newStats).setType(owntype.fullErasure());
case Assign(Tree lhs, Tree rhs):
Tree lhs1 = transformLhs(lhs);
Tree rhs1 = transform(rhs, lhs1.type);
return copy.Assign(tree, lhs1, rhs1).setType(owntype.fullErasure());
case If(Tree cond, Tree thenp, Tree elsep):
Tree cond1 = transform(cond, Type.unboxedType(TypeTags.BOOLEAN));
Tree thenp1 = transform(thenp, owntype);
Tree elsep1 = (elsep == Tree.Empty) ? elsep : transform(elsep, owntype);
return copy.If(tree, cond1, thenp1, elsep1).setType(owntype);
case New(Template templ):
if (tree.type.symbol() == definitions.UNIT_CLASS)
// !!! return Tree.Literal(UNIT, null).setType(owntype);
throw Debug.abort("found unit literal");
switch (owntype) {
case UnboxedArrayType(Type elemtp):
Tree apply = transform(templ.parents[0]);
switch (apply) {
case Apply(_, Tree[] args):
assert args.length == 1;
switch (elemtp) {
case UnboxedType(int kind):
return genNewArray(tree.pos,args[0],kind);
default:
return genNewArray(tree.pos,args[0],elemtp);
}
default:
throw Debug.abort("illegal case", apply);
}
}
return super.transform(tree).setType(owntype);
case Typed(Tree expr, Tree tpe):
// coerce expr to tpe
Tree tpe1 = transform(tpe);
Tree expr1 = transform(expr, tpe1.type);
return noTyped ? expr1 : copy.Typed(tree, expr1, tpe1).setType(owntype);
case TypeApply(Tree fun, Tree[] args):
Symbol sym = fun.symbol();
if (sym == definitions.AS || sym == definitions.IS) {
Type tp = args[0].type.erasure();
if (isUnboxed(tp)) {
Tree qual1 = transform(getQualifier(currentClass, fun));
if (isUnboxed(qual1.type)) qual1 = box(qual1);
Symbol primSym = (sym == definitions.AS)
? primitives.getUnboxValueSymbol(tp)
: primitives.getInstanceTestSymbol(tp);
return gen.Select(qual1, primSym);
} else
return copy.TypeApply(tree, transform(fun), transform(args))
.setType(owntype);
} else
return transform(fun);
case Apply(Tree fun, Tree[] args):
switch (fun) {
case Select(Tree array, _):
if (isUnboxedArray(array.type().erasure())) {
switch (primitives.getPrimitive(fun.symbol())) {
case APPLY: return transformApply(tree);
case UPDATE: return transformUpdate(tree);
}
}
}
Tree fun1 = transform(fun);
if (fun1.symbol() == definitions.NULL) return fun1.setType(owntype);
if (global.debug) global.log("fn: " + fun1.symbol() + ":" + fun1.type);//debug
switch (fun1.type) {
case MethodType(Symbol[] params, Type restpe):
Tree[] args1 = args;
for (int i = 0; i < args.length; i++) {
Tree arg = args[i];
Type pt1 = params[i].type().erasure();
Tree arg1 = cast(transform(arg, pt1), pt1);
if (arg1 != arg && args1 == args) {
args1 = new Tree[args.length];
System.arraycopy(args, 0, args1, 0, i);
}
args1[i] = arg1;
}
return coerce(copy.Apply(tree, fun1, args1).setType(restpe), owntype);
default:
global.debugPrinter.print(fun1);
throw Debug.abort("bad method type: " + fun1.type + " " + fun1.symbol());
}
case Select(_, _):
case Ident(_):
Tree tree1 = transformLhs(tree);
//global.log("id: " + tree1+": "+tree1.type+" -> "+owntype);//DEBUG
return (tree1.type instanceof Type.MethodType) ? tree1
: coerce(tree1, owntype);
case AppliedType(_, _):
return gen.mkType(tree.pos, owntype);
default:
return super.transform(tree).setType(owntype);
}
}
public Tree transform(Tree tree) {
return transform(tree, false);
}
public Template transform(Template templ, Symbol clazz) {
TreeList savedBridges = bridges;
HashMap savedBridgeSyms = bridgeSyms;
bridges = new TreeList();
bridgeSyms = new HashMap();
Tree[] bases1 = transform(templ.parents);
TreeList body1 = new TreeList(transform(templ.body));
body1.append(bridges);
if (bridges.length() > 0) {
Type info = clazz.nextInfo();
switch (info) {
case CompoundType(Type[] parts, Scope members):
members = new Scope(members);
for (int i = 0; i < bridges.length(); i++) {
Tree bridge = (Tree)bridges.get(i);
members.enterOrOverload(bridge.symbol());
}
clazz.updateInfo(Type.compoundType(parts, members, info.symbol()));
break;
default:
throw Debug.abort("class = " + Debug.show(clazz) + ", " +
"info = " + Debug.show(info));
}
}
bridges = savedBridges;
bridgeSyms = savedBridgeSyms;
return (Template) copy.Template(templ, bases1, body1.toArray())
.setType(templ.type.erasure());
}
/** Transform without keeping the previous transform's contract.
*/
Tree transformLhs(Tree tree) {
Tree tree1;
switch (tree) {
case Ident(_):
tree1 = tree;
break;
case Select(Tree qual, Name name):
Tree qual1 = transform(qual);
if (isUnboxed(qual1.type))
if (!isUnboxedArray(qual1.type) || tree.symbol() == definitions.ARRAY_CLASS)
qual1 = box(qual1);
tree1 = copy.Select(tree, qual1, name);
break;
default:
throw Debug.abort("illegal case", tree);
}
if (global.debug) global.log("id: " + tree1.symbol() + ":" + tree1.symbol().type().erasure());//debug
return tree1.setType(tree1.symbol().type().erasure());
}
/** Transform with prototype
*/
Tree transform(Tree expr, Type pt) {
return coerce(transform(expr), pt);
}
/** Transform an array apply */
Tree transformApply(Tree tree) {
switch (tree) {
case Apply(Select(Tree array, _), Tree[] args):
assert args.length == 1 : Debug.show(args);
Type finalType = tree.type().erasure();
array = transform(array);
Symbol symbol = primitives.getArrayGetSymbol(array.type());
Tree method = gen.mkRef(tree.pos,primitives.RUNTIME_TYPE,symbol);
args = new Tree[] { array, transform(args[0]) };
return coerce(gen.Apply(tree.pos, method, args), finalType);
default:
throw Debug.abort("illegal case", tree);
}
}
/** Transform an array update */
Tree transformUpdate(Tree tree) {
switch (tree) {
case Apply(Select(Tree array, _), Tree[] args):
assert args.length == 2 : Debug.show(args);
array = transform(array);
Symbol symbol = primitives.getArraySetSymbol(array.type());
Tree method = gen.mkRef(tree.pos,primitives.RUNTIME_TYPE,symbol);
args = new Tree[] { array, transform(args[0]),transform(args[1]) };
return gen.Apply(tree.pos, method, args);
default:
throw Debug.abort("illegal case", tree);
}
}
private Tree getQualifier(Symbol currentClass, Tree tree) {
switch (tree) {
case Select(Tree qual, _):
return qual;
case Ident(_):
assert currentClass != null;
if (currentClass.isSubClass(tree.symbol().owner()))
return gen.This(tree.pos, currentClass);
else
throw Debug.abort("no qualifier for tree", tree);
default:
throw Debug.abort("no qualifier for tree", tree);
}
}
private Tree genNewArray(int pos, Tree size, Type elemtp) {
Tree classname = make.Literal(pos,
primitives.getNameForClassForName(elemtp))
.setType(definitions.JAVA_STRING_TYPE);
Tree array = gen.Apply(pos,
gen.mkRef(pos, primitives.RUNTIME_TYPE, primitives.NEW_OARRAY),
new Tree[] {size, classname});
Tree cast = gen.TypeApply(pos, gen.Select(pos, array, definitions.AS),
new Tree[] {gen.mkType(pos, Type.UnboxedArrayType(elemtp))});
return gen.Apply(cast, new Tree[0]);
}
private Tree genNewArray(int pos, Tree size, int kind) {
return gen.Apply(pos,
gen.mkRef(pos,
primitives.RUNTIME_TYPE, primitives.getNewArraySymbol(kind)),
new Tree[] {size});
}
}
|
sources/scalac/transformer/Erasure.java
|
/* ____ ____ ____ ____ ______ *\
** / __// __ \/ __// __ \/ ____/ SOcos COmpiles Scala **
** __\_ \/ /_/ / /__/ /_/ /\_ \ (c) 2002, LAMP/EPFL **
** /_____/\____/\___/\____/____/ **
\* */
// $OldId: Erasure.java,v 1.48 2003/01/16 14:21:19 schinz Exp $
// $Id$
package scalac.transformer;
import java.util.HashMap;
import scalac.Global;
import scalac.PhaseDescriptor;
import scalac.Unit;
import scalac.ast.Tree;
import scalac.ast.Tree.Template;
import scalac.ast.Tree.TypeDef;
import scalac.ast.Tree.ValDef;
import scalac.ast.TreeList;
import scalac.ast.Transformer;
import scalac.symtab.Definitions;
import scalac.symtab.Kinds;
import scalac.symtab.Type;
import scalac.symtab.TypeTags;
import scalac.symtab.Modifiers;
import scalac.symtab.Scope;
import scalac.symtab.SymSet;
import scalac.symtab.Symbol;
import scalac.backend.Primitives;
import scalac.util.Name;
import scalac.util.Names;
import scalac.util.Debug;
/** A transformer for type erasure and bridge building
*
* @author Martin Odersky
* @version 1.0
*
* What it does:
* (1) Map every type to its erasure.
* (2) If method A overrides a method B, and the erased type ETA of A is
* different from the erased type ETB of B seen as a member of A's class,
* add a bridge method with the same name as A,B, with signature ETB
* which calls A after casting parameters.
*/
public class Erasure extends Transformer implements Modifiers {
private final Global global;
private final Definitions definitions;
private final Primitives primitives;
private final boolean noTyped;
private Unit unit;
public Erasure(Global global, PhaseDescriptor descr) {
super(global, descr);
this.global = global;
this.definitions = global.definitions;
this.primitives = global.primitives;
this.noTyped = global.target != global.TARGET_JAVA;
}
public void apply(Unit unit) {
this.unit = unit;
unit.body = transform(unit.body);
}
//////////////////////////////////////////////////////////////////////////////////
// Box/Unbox and Coercions
/////////////////////////////////////////////////////////////////////////////////
boolean isUnboxed(Type type) {
switch (type) {
case UnboxedType(_): case UnboxedArrayType(_): return true;
default: return false;
}
}
boolean isUnboxedArray(Type type) {
switch (type) {
case UnboxedArrayType(_): return true;
default: return false;
}
}
boolean isBoxed(Type type) {
return type.unbox() != type || type.symbol().fullName() == Names.scala_Array;
}
Type boxedType(Type tp) {
switch (tp) {
case UnboxedType(int kind):
return definitions.getType(Type.boxedFullName(kind));
case UnboxedArrayType(Type elemtp):
return definitions.arrayType(boxedType(elemtp));
default:
return tp;
}
}
Symbol boxSym(Type unboxedtp) {
return primitives.getBoxValueSymbol(unboxedtp);
}
/** Emit `scala.RunTime.box(tree)' or
* `{ tree ; scala.RunTime.box() }' if type of `tree' is `void'.
*/
Tree box(Tree tree) {
Tree boxtree = gen.mkRef(tree.pos, primitives.RUNTIME_TYPE, boxSym(tree.type));
switch (tree.type) {
case UnboxedType(int kind):
if (kind == TypeTags.UNIT)
return gen.Block(
tree.pos, new Tree[]{tree, gen.Apply(boxtree, new Tree[0])});
}
return gen.Apply(boxtree, new Tree[]{tree});
}
/** The symbol of the unbox method corresponding to unboxed type`unboxedtp'
*/
Symbol unboxSym(Type unboxedtp) {
return primitives.getUnboxValueSymbol(unboxedtp);
}
/** Emit tree.asType() or tree.asTypeArray(), where pt = Type or pt = Type[].
*/
Tree unbox(Tree tree, Type pt) {
Tree sel = gen.Select(tree, unboxSym(pt));
return gen.Apply(sel, new Tree[0]);
}
/** Generate a select from an unboxed type.
*/
public Tree unboxedSelect(Tree qual, Symbol sym) {
return make.Select(qual.pos, qual, sym.name)
.setSymbol(sym)
// !!! .setType(Prefix.TypePrefix(boxedType(qual.type)).memberType(sym).erasure());
.setType(Type.singleType(boxedType(qual.type),sym).erasure());
}
/** Subclass relation for class types; empty for other types.
*/
boolean isSubClass(Type tp1, Type tp2) {
Symbol sym1 = tp1.symbol();
Symbol sym2 = tp2.symbol();
return sym1 != null && sym2 != null && sym1.isSubClass(sym2);
}
/** Subtyping relation on erased types.
*/
boolean isSubType(Type tp1, Type tp2) {
if (tp1.isSameAs(tp2)) return true;
switch (tp2) {
case UnboxedType(_):
return tp1.isSubType(tp2);
case UnboxedArrayType(Type elemtp2):
switch (tp1) {
case UnboxedArrayType(Type elemtp1):
return !(elemtp1 instanceof Type.UnboxedType) &&
isSubType(elemtp1, elemtp2);
}
}
return isSubClass(tp1, tp2);
}
Tree coerce(Tree tree, Type pt) {
return isSubType(tree.type, pt) ? tree : cast(tree, pt);
}
Tree cast(Tree tree, Type pt) {
if (global.debug) global.log("cast " + tree + ":" + tree.type + " to " + pt);//debug
if (tree.type.isSameAs(pt)) {
return tree;
} else if (isSubType(tree.type, pt)) {
return noTyped ? tree : gen.Typed(tree, pt);
} else if (isUnboxed(tree.type) && !isUnboxed(pt)) {
return cast(box(tree), pt);
} else if ((isUnboxedArray(tree.type)
|| (tree.type.symbol() == definitions.ANY_CLASS))
&& isUnboxedArray(pt)) {
return
make.Apply(tree.pos,
make.TypeApply(tree.pos,
unboxedSelect(tree, definitions.AS),
new Tree[]{gen.mkType(tree.pos, pt)})
.setType(new Type.MethodType(Symbol.EMPTY_ARRAY, pt)),
new Tree[0])
.setType(pt);
} else if (!isUnboxed(tree.type) && isUnboxed(pt)) {
if (isBoxed(tree.type)) {
return coerce(unbox(tree, pt), pt);
} else {
Type bt = boxedType(pt);
while (isBoxed(bt.parents()[0])) {
bt = bt.parents()[0];
}
return cast(coerce(tree, bt), pt);
}
} else if (isUnboxed(tree.type) && isUnboxed(pt)) {
return gen.Apply(
unboxedSelect(box(tree), unboxSym(pt)),
new Tree[0]);
} else if (!isUnboxed(tree.type) && !isUnboxed(pt) ||
isUnboxedArray(tree.type) && isUnboxedArray(pt)) {
return
gen.Apply(
gen.TypeApply(
gen.Select(tree, definitions.AS),
new Tree[]{gen.mkType(tree.pos, pt)}),
new Tree[0]);
} else {
throw Debug.abort("cannot cast " + tree.type + " to " + pt);
}
}
//////////////////////////////////////////////////////////////////////////////////
// Bridge Building
/////////////////////////////////////////////////////////////////////////////////
private TreeList bridges;
private HashMap bridgeSyms;
/** Add bridge which Java-overrides `sym1' and which forwards to `sym'
*/
public void addBridge(Symbol sym, Symbol sym1) {
Type bridgeType = sym1.type().erasure();
// create bridge symbol and add to bridgeSyms(sym)
// or return if bridge with required type already exists for sym.
SymSet bridgesOfSym = (SymSet) bridgeSyms.get(sym);
if (bridgesOfSym == null) bridgesOfSym = SymSet.EMPTY;
Symbol[] brs = bridgesOfSym.toArray();
for (int i = 0; i < brs.length; i++) {
if (brs[i].type().isSameAs(bridgeType)) return;
}
Symbol bridgeSym = sym.cloneSymbol();
bridgeSym.flags |= (SYNTHETIC | BRIDGE);
bridgeSym.flags &= ~JAVA;
bridgesOfSym = bridgesOfSym.incl(bridgeSym);
bridgeSyms.put(sym, bridgesOfSym);
// check that there is no overloaded symbol with same erasure as bridge
Symbol overSym = sym.owner().members().lookup(sym.name);
switch (overSym.type()) {
case OverloadedType(Symbol[] alts, Type[] alttypes):
for (int i = 0; i < alts.length; i++) {
if (sym != alts[i] && bridgeType.isSameAs(alttypes[i].erasure())) {
unit.error(sym.pos, "overlapping overloaded alternatives; " +
"overridden " + sym1 + sym1.locationString() +
" has same erasure as " + alts[i] +
alttypes[i] + alts[i].locationString());
}
}
}
switch (bridgeType) {
case MethodType(Symbol[] params, Type restp):
// assign to bridge symbol its bridge type
// where owner of all parameters is bridge symbol itself.
Symbol[] params1 = new Symbol[params.length];
for (int i = 0; i < params.length; i++) {
params1[i] = params[i].cloneSymbol();
params1[i].setOwner(bridgeSym);
}
bridgeSym.setType(Type.MethodType(params1, restp));
// create bridge definition
Type symtype = sym.type().erasure();
switch (symtype) {
case MethodType(Symbol[] symparams, Type symrestp):
assert params1.length == symparams.length;
Tree[] args = new Tree[params1.length];
for (int i = 0; i < args.length; i++) {
args[i] = cast(gen.Ident(params1[i]), symparams[i].type());
}
Tree fwd = make.Apply(sym.pos, gen.Ident(sym).setType(symtype), args)
.setType(symrestp);
bridges.append(gen.DefDef(bridgeSym, coerce(fwd, restp)));
return;
}
}
throw Debug.abort("bad bridge types " + bridgeType + "," + sym.type().erasure());
}
public void addBridges(Symbol sym) {
Symbol c = sym.owner();
if (c.isClass() && !c.isInterface()) {
Type[] basetypes = c.parents();
//System.out.println("trying " + c + " <= " + ArrayApply.toString(c.basetypes()));//DEBUG
for (int i = 0; i < basetypes.length; i++) {
Symbol sym1 = sym.overriddenSymbol(basetypes[i]);
//if (sym1.kind != NONE) System.out.println("overridden: " + sym1 + sym1.locationString() + " by " + sym + sym.locationString());//DEBUG
if (sym1.kind != Kinds.NONE &&
!sym1.type().erasure().isSameAs(sym.type().erasure())) {
//System.out.println("add bridge: " + sym1 + sym1.locationString() + " by " + sym + sym.locationString());//DEBUG
addBridge(sym, sym1);
}
}
}
}
//////////////////////////////////////////////////////////////////////////////////
// Transformer
/////////////////////////////////////////////////////////////////////////////////
/** Contract: every node needs to be transformed so that it's type is the
* erasure of the node's original type. The only exception are functions;
* these are mapped to the erasure of the function symbol's type.
*/
Symbol currentClass = null;
public Tree transform(Tree tree, boolean eraseFully) {
assert tree.type != null : tree;
Type owntype = eraseFully ? tree.type.fullErasure() : tree.type.erasure();
switch (tree) {
case ClassDef(int mods, Name name, TypeDef[] tparams, ValDef[][] vparams, Tree tpe, Template impl):
Symbol oldCurrentClass = currentClass;
currentClass = tree.symbol();
Tree newTree =
copy.ClassDef(tree, mods, name, new TypeDef[0],
transform(vparams), tpe, transform(impl, tree.symbol()))
.setType(owntype);
currentClass = oldCurrentClass;
return newTree;
case DefDef(int mods, Name name, TypeDef[] tparams, ValDef[][] vparams, Tree tpe, Tree rhs):
addBridges(tree.symbol());
Tree tpe1 = gen.mkType(tpe.pos, tpe.type.fullErasure());
Tree rhs1 = (rhs == Tree.Empty) ? rhs : transform(rhs, tpe1.type);
return copy.DefDef(
tree, mods, name, new TypeDef[0], transform(vparams), tpe1, rhs1)
.setType(owntype);
case ValDef(int mods, Name name, Tree tpe, Tree rhs):
Tree tpe1 = transform(tpe);
Tree rhs1 = (rhs == Tree.Empty) ? rhs : transform(rhs, tpe1.type);
return copy.ValDef(
tree, mods, name, tpe1, rhs1)
.setType(owntype);
case TypeDef(_, _, _, _):
// eliminate
return Tree.Empty;
case Block(Tree[] stats):
Tree[] newStats = new Tree[stats.length];
for (int i = 0; i < stats.length; ++i)
newStats[i] = transform(stats[i], true);
return copy.Block(tree, newStats).setType(owntype.fullErasure());
case Assign(Tree lhs, Tree rhs):
Tree lhs1 = transformLhs(lhs);
Tree rhs1 = transform(rhs, lhs1.type);
return copy.Assign(tree, lhs1, rhs1).setType(owntype.fullErasure());
case If(Tree cond, Tree thenp, Tree elsep):
Tree cond1 = transform(cond, Type.unboxedType(TypeTags.BOOLEAN));
Tree thenp1 = transform(thenp, owntype);
Tree elsep1 = (elsep == Tree.Empty) ? elsep : transform(elsep, owntype);
return copy.If(tree, cond1, thenp1, elsep1).setType(owntype);
case New(Template templ):
if (tree.type.symbol() == definitions.UNIT_CLASS)
// !!! return Tree.Literal(UNIT, null).setType(owntype);
throw Debug.abort("found unit literal");
switch (owntype) {
case UnboxedArrayType(Type elemtp):
Tree apply = transform(templ.parents[0]);
switch (apply) {
case Apply(_, Tree[] args):
assert args.length == 1;
switch (elemtp) {
case UnboxedType(int kind):
return genNewArray(tree.pos,args[0],kind);
default:
return genNewArray(tree.pos,args[0],elemtp);
}
default:
throw Debug.abort("illegal case", apply);
}
}
return super.transform(tree).setType(owntype);
case Typed(Tree expr, Tree tpe):
// coerce expr to tpe
Tree tpe1 = transform(tpe);
Tree expr1 = transform(expr, tpe1.type);
return noTyped ? expr1 : copy.Typed(tree, expr1, tpe1).setType(owntype);
case TypeApply(Tree fun, Tree[] args):
Symbol sym = fun.symbol();
if (sym == definitions.AS || sym == definitions.IS) {
Type tp = args[0].type.erasure();
if (isUnboxed(tp)) {
Tree qual1 = transform(getQualifier(currentClass, fun));
if (isUnboxed(qual1.type)) qual1 = box(qual1);
Symbol primSym = (sym == definitions.AS)
? primitives.getUnboxValueSymbol(tp)
: primitives.getInstanceTestSymbol(tp);
return gen.Select(qual1, primSym);
} else
return copy.TypeApply(tree, transform(fun), transform(args))
.setType(owntype);
} else
return transform(fun);
case Apply(Tree fun, Tree[] args):
switch (fun) {
case Select(Tree array, _):
if (isUnboxedArray(array.type().erasure())) {
switch (primitives.getPrimitive(fun.symbol())) {
case APPLY: return transformApply(tree);
case UPDATE: return transformUpdate(tree);
}
}
}
Tree fun1 = transform(fun);
if (fun1.symbol() == definitions.NULL) return fun1.setType(owntype);
if (global.debug) global.log("fn: " + fun1.symbol() + ":" + fun1.type);//debug
switch (fun1.type) {
case MethodType(Symbol[] params, Type restpe):
Tree[] args1 = args;
for (int i = 0; i < args.length; i++) {
Tree arg = args[i];
Type pt1 = params[i].type().erasure();
Tree arg1 = cast(transform(arg, pt1), pt1);
if (arg1 != arg && args1 == args) {
args1 = new Tree[args.length];
System.arraycopy(args, 0, args1, 0, i);
}
args1[i] = arg1;
}
return coerce(copy.Apply(tree, fun1, args1).setType(restpe), owntype);
default:
global.debugPrinter.print(fun1);
throw Debug.abort("bad method type: " + fun1.type + " " + fun1.symbol());
}
case Select(_, _):
case Ident(_):
Tree tree1 = transformLhs(tree);
//global.log("id: " + tree1+": "+tree1.type+" -> "+owntype);//DEBUG
return (tree1.type instanceof Type.MethodType) ? tree1
: coerce(tree1, owntype);
case AppliedType(_, _):
return gen.mkType(tree.pos, owntype);
default:
return super.transform(tree).setType(owntype);
}
}
public Tree transform(Tree tree) {
return transform(tree, false);
}
public Template transform(Template templ, Symbol clazz) {
TreeList savedBridges = bridges;
HashMap savedBridgeSyms = bridgeSyms;
bridges = new TreeList();
bridgeSyms = new HashMap();
Tree[] bases1 = transform(templ.parents);
TreeList body1 = new TreeList(transform(templ.body));
body1.append(bridges);
if (bridges.length() > 0) {
switch (clazz.nextInfo()) {
case CompoundType(Type[] basetypes, Scope members):
members = new Scope(members);
for (int i = 0; i < bridges.length(); i++) {
Tree bridge = (Tree)bridges.get(i);
members.enterOrOverload(bridge.symbol());
}
clazz.updateInfo(Type.CompoundType(basetypes, members));
break;
default:
throw Debug.abort("class = " + Debug.show(clazz) + ", " +
"info = " + Debug.show(clazz.info()));
}
}
bridges = savedBridges;
bridgeSyms = savedBridgeSyms;
return (Template) copy.Template(templ, bases1, body1.toArray())
.setType(templ.type.erasure());
}
/** Transform without keeping the previous transform's contract.
*/
Tree transformLhs(Tree tree) {
Tree tree1;
switch (tree) {
case Ident(_):
tree1 = tree;
break;
case Select(Tree qual, Name name):
Tree qual1 = transform(qual);
if (isUnboxed(qual1.type))
if (!isUnboxedArray(qual1.type) || tree.symbol() == definitions.ARRAY_CLASS)
qual1 = box(qual1);
tree1 = copy.Select(tree, qual1, name);
break;
default:
throw Debug.abort("illegal case", tree);
}
if (global.debug) global.log("id: " + tree1.symbol() + ":" + tree1.symbol().type().erasure());//debug
return tree1.setType(tree1.symbol().type().erasure());
}
/** Transform with prototype
*/
Tree transform(Tree expr, Type pt) {
return coerce(transform(expr), pt);
}
/** Transform an array apply */
Tree transformApply(Tree tree) {
switch (tree) {
case Apply(Select(Tree array, _), Tree[] args):
assert args.length == 1 : Debug.show(args);
Type finalType = tree.type().erasure();
array = transform(array);
Symbol symbol = primitives.getArrayGetSymbol(array.type());
Tree method = gen.mkRef(tree.pos,primitives.RUNTIME_TYPE,symbol);
args = new Tree[] { array, transform(args[0]) };
return coerce(gen.Apply(tree.pos, method, args), finalType);
default:
throw Debug.abort("illegal case", tree);
}
}
/** Transform an array update */
Tree transformUpdate(Tree tree) {
switch (tree) {
case Apply(Select(Tree array, _), Tree[] args):
assert args.length == 2 : Debug.show(args);
array = transform(array);
Symbol symbol = primitives.getArraySetSymbol(array.type());
Tree method = gen.mkRef(tree.pos,primitives.RUNTIME_TYPE,symbol);
args = new Tree[] { array, transform(args[0]),transform(args[1]) };
return gen.Apply(tree.pos, method, args);
default:
throw Debug.abort("illegal case", tree);
}
}
private Tree getQualifier(Symbol currentClass, Tree tree) {
switch (tree) {
case Select(Tree qual, _):
return qual;
case Ident(_):
assert currentClass != null;
if (currentClass.isSubClass(tree.symbol().owner()))
return gen.This(tree.pos, currentClass);
else
throw Debug.abort("no qualifier for tree", tree);
default:
throw Debug.abort("no qualifier for tree", tree);
}
}
private Tree genNewArray(int pos, Tree size, Type elemtp) {
Tree classname = make.Literal(pos,
primitives.getNameForClassForName(elemtp))
.setType(definitions.JAVA_STRING_TYPE);
Tree array = gen.Apply(pos,
gen.mkRef(pos, primitives.RUNTIME_TYPE, primitives.NEW_OARRAY),
new Tree[] {size, classname});
Tree cast = gen.TypeApply(pos, gen.Select(pos, array, definitions.AS),
new Tree[] {gen.mkType(pos, Type.UnboxedArrayType(elemtp))});
return gen.Apply(cast, new Tree[0]);
}
private Tree genNewArray(int pos, Tree size, int kind) {
return gen.Apply(pos,
gen.mkRef(pos,
primitives.RUNTIME_TYPE, primitives.getNewArraySymbol(kind)),
new Tree[] {size});
}
}
|
- Fixed construction of CompoundType
|
sources/scalac/transformer/Erasure.java
|
- Fixed construction of CompoundType
|
<ide><path>ources/scalac/transformer/Erasure.java
<ide> TreeList body1 = new TreeList(transform(templ.body));
<ide> body1.append(bridges);
<ide> if (bridges.length() > 0) {
<del> switch (clazz.nextInfo()) {
<del> case CompoundType(Type[] basetypes, Scope members):
<add> Type info = clazz.nextInfo();
<add> switch (info) {
<add> case CompoundType(Type[] parts, Scope members):
<ide> members = new Scope(members);
<ide> for (int i = 0; i < bridges.length(); i++) {
<ide> Tree bridge = (Tree)bridges.get(i);
<ide> members.enterOrOverload(bridge.symbol());
<ide> }
<del> clazz.updateInfo(Type.CompoundType(basetypes, members));
<add> clazz.updateInfo(Type.compoundType(parts, members, info.symbol()));
<ide> break;
<ide> default:
<ide> throw Debug.abort("class = " + Debug.show(clazz) + ", " +
<del> "info = " + Debug.show(clazz.info()));
<add> "info = " + Debug.show(info));
<ide> }
<ide> }
<ide> bridges = savedBridges;
|
|
Java
|
apache-2.0
|
1c17e82afc7e9ac7ff28246c108d4bf8d051fec8
| 0 |
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
|
package org.commcare.dalvik.services;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.support.v4.app.NotificationCompat;
import android.text.format.DateFormat;
import android.widget.RemoteViews;
import net.sqlcipher.database.SQLiteDatabase;
import org.commcare.android.crypt.CipherPool;
import org.commcare.android.crypt.CryptUtil;
import org.commcare.android.database.app.models.UserKeyRecord;
import org.commcare.android.database.user.CommCareUserOpenHelper;
import org.commcare.android.database.user.UserSandboxUtils;
import org.commcare.android.database.user.models.User;
import org.commcare.android.javarosa.AndroidLogger;
import org.commcare.android.tasks.DataSubmissionListener;
import org.commcare.android.tasks.ProcessAndSendTask;
import org.commcare.android.util.SessionUnavailableException;
import org.commcare.dalvik.R;
import org.commcare.dalvik.activities.CommCareHomeActivity;
import org.commcare.dalvik.application.CommCareApplication;
import org.commcare.dalvik.preferences.CommCarePreferences;
import org.javarosa.core.services.Logger;
import org.odk.collect.android.listeners.FormSaveCallback;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.locks.ReentrantLock;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
/**
* The CommCare Session Service is a persistent service which maintains
* a CommCare login session
*
* @author ctsims
*
*/
public class CommCareSessionService extends Service {
private NotificationManager mNM;
/**
* Milliseconds to wait before rechecking if the session is still fresh.
*/
private static final long MAINTENANCE_PERIOD = 1000;
/**
* Session length in MS
*/
private static long sessionLength = 1000 * 60 * 60 * 24;
/**
* Lock that must be held to expire the session. Thus if a task holds it,
* the session remains alive. Allows server syncing tasks to prevent the
* session from expiring and closing the user DB while they are running.
*/
public static final ReentrantLock sessionAliveLock = new ReentrantLock();
private Timer maintenanceTimer;
private CipherPool pool;
private byte[] key = null;
private boolean multimediaIsVerified=false;
private Date sessionExpireDate;
private final Object lock = new Object();
private User user;
private SQLiteDatabase userDatabase;
// Unique Identification Number for the Notification.
// We use it on Notification start, and to cancel it.
private final int NOTIFICATION = org.commcare.dalvik.R.string.notificationtitle;
private final int SUBMISSION_NOTIFICATION = org.commcare.dalvik.R.string.submission_notification_title;
// How long to wait until we force the session to finish logging out. Set
// at 90 seconds to make sure huge forms on slow phones actually get saved
private static final long LOGOUT_TIMEOUT = 1000 * 90;
// The logout process start time, used to wrap up logging out if
// the saving of incomplete forms takes too long
private long logoutStartedAt = -1;
// Once key expiration process starts, we want to call this function to
// save the current form if it exists.
private FormSaveCallback formSaver;
/**
* Class for clients to access. Because we know this service always
* runs in the same process as its clients, we don't need to deal with
* IPC.
*/
public class LocalBinder extends Binder {
public CommCareSessionService getService() {
return CommCareSessionService.this;
}
}
@Override
public void onCreate() {
mNM = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
setSessionLength();
pool = new CipherPool() {
/*
* (non-Javadoc)
* @see org.commcare.android.crypt.CipherPool#generateNewCipher()
*/
@Override
public Cipher generateNewCipher() {
synchronized(lock) {
try {
synchronized(key) {
SecretKeySpec spec = new SecretKeySpec(key, "AES");
Cipher decrypter = Cipher.getInstance("AES");
decrypter.init(Cipher.DECRYPT_MODE, spec);
return decrypter;
}
} catch (InvalidKeyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchPaddingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return null;
}
};
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_STICKY;
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
this.stopForeground(true);
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
// This is the object that receives interactions from clients. See
// RemoteService for a more complete example.
private final IBinder mBinder = new LocalBinder();
/**
* Show a notification while this service is running.
*/
private void showLoggedInNotification(User user) {
//We always want this click to simply bring the live stack back to the top
Intent callable = new Intent(this, CommCareHomeActivity.class);
callable.setAction("android.intent.action.MAIN");
callable.addCategory("android.intent.category.LAUNCHER");
// The PendingIntent to launch our activity if the user selects this notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, callable, 0);
// Set the icon, scrolling text and timestamp
Notification notification = new NotificationCompat.Builder(this)
.setContentTitle(this.getString(org.commcare.dalvik.R.string.notificationtitle))
.setContentText("Session Expires: " + DateFormat.format("MMM dd h:mmaa", sessionExpireDate))
.setSmallIcon(org.commcare.dalvik.R.drawable.notification)
.setContentIntent(contentIntent)
.build();
if(user != null) {
//Send the notification.
this.startForeground(NOTIFICATION, notification);
}
}
/**
* Notify the user that they've been timed out and need to relog in
*/
private void showLoggedOutNotification() {
this.stopForeground(true);
Intent i = new Intent(this, CommCareHomeActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, i, PendingIntent.FLAG_UPDATE_CURRENT);
Notification notification = new NotificationCompat.Builder(this)
.setContentTitle(this.getString(R.string.expirenotification))
.setContentText("Click here to log back into your session")
.setSmallIcon(org.commcare.dalvik.R.drawable.notification)
.setContentIntent(contentIntent)
.build();
// Send the notification.
mNM.notify(NOTIFICATION, notification);
}
//Start CommCare Specific Functionality
public SQLiteDatabase getUserDbHandle() {
synchronized(lock){
return userDatabase;
}
}
/**
* (Re-)open user database
*/
public void prepareStorage(byte[] symetricKey, UserKeyRecord record) {
synchronized(lock){
this.key = symetricKey;
pool.init();
if(userDatabase != null && userDatabase.isOpen()) {
userDatabase.close();
}
userDatabase = new CommCareUserOpenHelper(CommCareApplication._(), record.getUuid()).getWritableDatabase(UserSandboxUtils.getSqlCipherEncodedKey(key));
}
}
/**
* Register a user with a session and start the session expiration timer.
* Assumes user database and key pool have already been setup .
*
* @param user attach this user to the session
*/
public void startSession(User user) {
synchronized(lock){
if(user != null) {
Logger.log(AndroidLogger.TYPE_USER, "login|" + user.getUsername() + "|" + user.getUniqueId());
//Let anyone who is listening know!
Intent i = new Intent("org.commcare.dalvik.api.action.session.login");
this.sendBroadcast(i);
}
this.user = user;
this.sessionExpireDate = new Date(new Date().getTime() + sessionLength);
// Display a notification about us starting. We put an icon in the status bar.
showLoggedInNotification(user);
maintenanceTimer = new Timer("CommCareService");
maintenanceTimer.schedule(new TimerTask() {
@Override
public void run() {
timeToExpireSession();
}
}, MAINTENANCE_PERIOD, MAINTENANCE_PERIOD);
}
}
/**
* If the session has been alive for longer than its specified duration
* then save any open forms and close it down. If data syncing is in
* progess then don't do anything.
*/
private void timeToExpireSession() {
long currentTime = new Date().getTime();
// If logout process started and has taken longer than the logout
// timeout then wrap-up the process.
if (logoutStartedAt != -1 &&
currentTime > (logoutStartedAt + LOGOUT_TIMEOUT)) {
// Try and grab the logout lock, aborting if synchronization is in
// progress.
if (!CommCareSessionService.sessionAliveLock.tryLock()) {
return;
}
try {
CommCareApplication._().expireUserSession();
} finally {
CommCareSessionService.sessionAliveLock.unlock();
}
} else if (isActive() && logoutStartedAt == -1 &&
(currentTime > sessionExpireDate.getTime() ||
(sessionExpireDate.getTime() - currentTime > sessionLength))) {
// If we haven't started closing the session and we're either past
// the session expire time, or the session expires more than its
// period in the future, we need to log the user out. The second
// case occurs if the system's clock is altered.
// Try and grab the logout lock, aborting if synchronization is in
// progress.
if (!CommCareSessionService.sessionAliveLock.tryLock()) {
return;
}
try {
saveFormAndCloseSession();
} finally {
CommCareSessionService.sessionAliveLock.unlock();
}
showLoggedOutNotification();
}
}
/**
* Notify any open form that it needs to save, then close the key session
* after waiting for the form save to complete/timeout.
*/
private void saveFormAndCloseSession() {
// Remember when we started so that if form saving takes too long, the
// maintenance timer will launch CommCareApplication._().expireUserSession
logoutStartedAt = new Date().getTime();
// save form progress, if any
synchronized(lock) {
if (formSaver != null) {
formSaver.formSaveCallback();
} else {
CommCareApplication._().expireUserSession();
}
}
}
/**
* Allow for the form entry engine to register a method that can be used to
* save any forms being editted when key expiration begins.
*
* @param callbackObj object with a method for saving the current form
* being edited
*/
public void registerFormSaveCallback(FormSaveCallback callbackObj) {
this.formSaver = callbackObj;
}
/**
* Unregister the form save callback; should occur when there is no longer
* a form open that might need to be saved if the session expires.
*/
public void unregisterFormSaveCallback() {
synchronized(lock) {
this.formSaver = null;
}
}
/**
* Closes the key pool and user database.
*/
public void closeServiceResources() {
synchronized(lock){
if (!isActive()) {
// Since both the FormSaveCallback callback and the maintenance
// timer might call this, only run if it hasn't been called
// before.
return;
}
key = null;
String msg = "Logging out service login";
// Let anyone who is listening know!
Intent i = new Intent("org.commcare.dalvik.api.action.session.logout");
this.sendBroadcast(i);
Logger.log(AndroidLogger.TYPE_MAINTENANCE, msg);
if (user != null) {
if (user.getUsername() != null) {
msg = "Logging out user " + user.getUsername();
}
user = null;
}
if (userDatabase != null) {
if (userDatabase.isOpen()) {
userDatabase.close();
}
userDatabase = null;
}
// timer is null if we aren't actually in the foreground
if (maintenanceTimer != null) {
maintenanceTimer.cancel();
}
logoutStartedAt = -1;
pool.expire();
}
}
/**
* Is the session active? Active sessions have an open key pool and user
* database.
*/
public boolean isActive() {
synchronized(lock){
return (key != null);
}
}
public Cipher getEncrypter() throws SessionUnavailableException {
synchronized(lock){
if(key == null) {
throw new SessionUnavailableException();
}
synchronized(key) {
SecretKeySpec spec = new SecretKeySpec(key, "AES");
try{
Cipher encrypter = Cipher.getInstance("AES");
encrypter.init(Cipher.ENCRYPT_MODE, spec);
return encrypter;
} catch (InvalidKeyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchPaddingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
}
public CipherPool getDecrypterPool() throws SessionUnavailableException{
synchronized(lock){
if(key == null) {
throw new SessionUnavailableException();
}
return pool;
}
}
public SecretKey createNewSymetricKey() {
return CryptUtil.generateSymetricKey(CryptUtil.uniqueSeedFromSecureStatic(key));
}
public User getLoggedInUser() throws SessionUnavailableException {
if(user == null) {
throw new SessionUnavailableException();
}
return user;
}
public DataSubmissionListener startDataSubmissionListener() {
return this.startDataSubmissionListener(SUBMISSION_NOTIFICATION);
}
public DataSubmissionListener startDataSubmissionListener(final int notificationId) {
return new DataSubmissionListener() {
// START - Submission Listening Hooks
int totalItems = -1;
long currentSize = -1;
long totalSent = -1;
Notification submissionNotification;
int lastUpdate = 0;
@Override
public void beginSubmissionProcess(int totalItems) {
this.totalItems = totalItems;
String text = getSubmissionText(1, totalItems);
// Set the icon, scrolling text and timestamp
submissionNotification = new Notification(org.commcare.dalvik.R.drawable.notification, getTickerText(1, totalItems), System.currentTimeMillis());
submissionNotification.flags |= (Notification.FLAG_NO_CLEAR | Notification.FLAG_ONGOING_EVENT);
//We always want this click to simply bring the live stack back to the top
Intent callable = new Intent(CommCareSessionService.this, CommCareHomeActivity.class);
callable.setAction("android.intent.action.MAIN");
callable.addCategory("android.intent.category.LAUNCHER");
// The PendingIntent to launch our activity if the user selects this notification
//TODO: Put something here that will, I dunno, cancel submission or something? Maybe show it live?
PendingIntent contentIntent = PendingIntent.getActivity(CommCareSessionService.this, 0, callable, 0);
RemoteViews contentView = new RemoteViews(getPackageName(), R.layout.submit_notification);
contentView.setImageViewResource(R.id.image, R.drawable.notification);
contentView.setTextViewText(R.id.submitTitle, getString(notificationId));
contentView.setTextViewText(R.id.progressText, text);
contentView.setTextViewText(R.id.submissionDetails,"0b transmitted");
// Set the info for the views that show in the notification panel.
submissionNotification.setLatestEventInfo(CommCareSessionService.this, getString(notificationId), text, contentIntent);
submissionNotification.contentView = contentView;
if(user != null) {
//Send the notification.
mNM.notify(notificationId, submissionNotification);
}
}
@Override
public void startSubmission(int itemNumber, long length) {
currentSize = length;
submissionNotification.contentView.setTextViewText(R.id.progressText, getSubmissionText(itemNumber + 1, totalItems));
submissionNotification.contentView.setProgressBar(R.id.submissionProgress, 100, 0, false);
mNM.notify(notificationId, submissionNotification);
}
@Override
public void notifyProgress(int itemNumber, long progress) {
int progressPercent = (int)Math.floor((progress * 1.0 / currentSize) * 100);
if(progressPercent - lastUpdate > 5) {
String progressDetails = "";
if(progress < 1024) {
progressDetails = progress + "b transmitted";
} else if (progress < 1024 * 1024) {
progressDetails = String.format("%1$,.1f", (progress / 1024.0))+ "kb transmitted";
} else {
progressDetails = String.format("%1$,.1f", (progress / (1024.0 * 1024.0)))+ "mb transmitted";
}
int pending = ProcessAndSendTask.pending();
if(pending > 1) {
submissionNotification.contentView.setTextViewText(R.id.submissionsPending, pending -1 + " Pending");
}
submissionNotification.contentView.setTextViewText(R.id.submissionDetails,progressDetails);
submissionNotification.contentView.setProgressBar(R.id.submissionProgress, 100, progressPercent, false);
mNM.notify(notificationId, submissionNotification);
lastUpdate = progressPercent;
}
}
@Override
public void endSubmissionProcess() {
mNM.cancel(notificationId);
submissionNotification = null;
totalItems = -1;
currentSize = -1;
totalSent = -1;
lastUpdate = 0;
}
private String getSubmissionText(int current, int total) {
return current + "/" + total;
}
private String getTickerText(int current, int total) {
return "CommCare submitting " + total +" forms";
}
// END - Submission Listening Hooks
};
}
/**
* Read the login session duration from app preferences and set the session
* length accordingly.
*/
private void setSessionLength(){
sessionLength = CommCarePreferences.getLoginDuration() * 1000;
}
public boolean isMultimediaVerified(){
return multimediaIsVerified;
}
public void setMultiMediaVerified(boolean toggle){
multimediaIsVerified = toggle;
}
}
|
app/src/org/commcare/dalvik/services/CommCareSessionService.java
|
package org.commcare.dalvik.services;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.support.v4.app.NotificationCompat;
import android.text.format.DateFormat;
import android.widget.RemoteViews;
import net.sqlcipher.database.SQLiteDatabase;
import org.commcare.android.crypt.CipherPool;
import org.commcare.android.crypt.CryptUtil;
import org.commcare.android.database.app.models.UserKeyRecord;
import org.commcare.android.database.user.CommCareUserOpenHelper;
import org.commcare.android.database.user.UserSandboxUtils;
import org.commcare.android.database.user.models.User;
import org.commcare.android.javarosa.AndroidLogger;
import org.commcare.android.tasks.DataSubmissionListener;
import org.commcare.android.tasks.ProcessAndSendTask;
import org.commcare.android.util.SessionUnavailableException;
import org.commcare.dalvik.R;
import org.commcare.dalvik.activities.CommCareHomeActivity;
import org.commcare.dalvik.application.CommCareApplication;
import org.commcare.dalvik.preferences.CommCarePreferences;
import org.javarosa.core.services.Logger;
import org.odk.collect.android.listeners.FormSaveCallback;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.locks.ReentrantLock;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
/**
* The CommCare Session Service is a persistent service which maintains
* a CommCare login session
*
* @author ctsims
*
*/
public class CommCareSessionService extends Service {
private NotificationManager mNM;
/**
* Milliseconds to wait before rechecking if the session is still fresh.
*/
private static final long MAINTENANCE_PERIOD = 1000;
/**
* Session length in MS
*/
private static long sessionLength = 1000 * 60 * 60 * 24;
/**
* Lock that must be held to expire the session. Thus if a task holds it,
* the session remains alive. Allows server syncing tasks to prevent the
* session from expiring and closing the user DB while they are running.
*/
public static final ReentrantLock sessionAliveLock = new ReentrantLock();
private Timer maintenanceTimer;
private CipherPool pool;
private byte[] key = null;
private boolean multimediaIsVerified=false;
private Date sessionExpireDate;
private final Object lock = new Object();
private User user;
private SQLiteDatabase userDatabase;
// Unique Identification Number for the Notification.
// We use it on Notification start, and to cancel it.
private final int NOTIFICATION = org.commcare.dalvik.R.string.notificationtitle;
private final int SUBMISSION_NOTIFICATION = org.commcare.dalvik.R.string.submission_notification_title;
// How long to wait until we force the session to finish logging out. Set
// at 90 seconds to make sure huge forms on slow phones actually get saved
private static final long LOGOUT_TIMEOUT = 1000 * 90;
// The logout process start time, used to wrap up logging out if
// the saving of incomplete forms takes too long
private long logoutStartedAt = -1;
// Once key expiration process starts, we want to call this function to
// save the current form if it exists.
private FormSaveCallback formSaver;
/**
* Class for clients to access. Because we know this service always
* runs in the same process as its clients, we don't need to deal with
* IPC.
*/
public class LocalBinder extends Binder {
public CommCareSessionService getService() {
return CommCareSessionService.this;
}
}
@Override
public void onCreate() {
mNM = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
setSessionLength();
pool = new CipherPool() {
/*
* (non-Javadoc)
* @see org.commcare.android.crypt.CipherPool#generateNewCipher()
*/
@Override
public Cipher generateNewCipher() {
synchronized(lock) {
try {
synchronized(key) {
SecretKeySpec spec = new SecretKeySpec(key, "AES");
Cipher decrypter = Cipher.getInstance("AES");
decrypter.init(Cipher.DECRYPT_MODE, spec);
return decrypter;
}
} catch (InvalidKeyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchPaddingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return null;
}
};
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_STICKY;
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
this.stopForeground(true);
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
// This is the object that receives interactions from clients. See
// RemoteService for a more complete example.
private final IBinder mBinder = new LocalBinder();
/**
* Show a notification while this service is running.
*/
private void showLoggedInNotification(User user) {
//We always want this click to simply bring the live stack back to the top
Intent callable = new Intent(this, CommCareHomeActivity.class);
callable.setAction("android.intent.action.MAIN");
callable.addCategory("android.intent.category.LAUNCHER");
// The PendingIntent to launch our activity if the user selects this notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, callable, 0);
// Set the icon, scrolling text and timestamp
Notification notification = new NotificationCompat.Builder(this)
.setContentTitle(this.getString(org.commcare.dalvik.R.string.notificationtitle))
.setContentText("Session Expires: " + DateFormat.format("MMM dd h:mmaa", sessionExpireDate))
.setSmallIcon(org.commcare.dalvik.R.drawable.notification)
.setContentIntent(contentIntent)
.build();
if(user != null) {
//Send the notification.
this.startForeground(NOTIFICATION, notification);
}
}
/*
* Notify the user that they've been timed out and need to relog in
*/
private void showLoggedOutNotification() {
this.stopForeground(true);
String text = "Click here to log back into your session";
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(org.commcare.dalvik.R.drawable.notification, text, System.currentTimeMillis());
// The PendingIntent to launch our activity if the user selects this notification
Intent i = new Intent(this, CommCareHomeActivity.class);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, i, PendingIntent.FLAG_ONE_SHOT);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this, this.getString(org.commcare.dalvik.R.string.expirenotification), text, contentIntent);
// Send the notification.
mNM.notify(NOTIFICATION, notification);
}
//Start CommCare Specific Functionality
public SQLiteDatabase getUserDbHandle() {
synchronized(lock){
return userDatabase;
}
}
/**
* (Re-)open user database
*/
public void prepareStorage(byte[] symetricKey, UserKeyRecord record) {
synchronized(lock){
this.key = symetricKey;
pool.init();
if(userDatabase != null && userDatabase.isOpen()) {
userDatabase.close();
}
userDatabase = new CommCareUserOpenHelper(CommCareApplication._(), record.getUuid()).getWritableDatabase(UserSandboxUtils.getSqlCipherEncodedKey(key));
}
}
/**
* Register a user with a session and start the session expiration timer.
* Assumes user database and key pool have already been setup .
*
* @param user attach this user to the session
*/
public void startSession(User user) {
synchronized(lock){
if(user != null) {
Logger.log(AndroidLogger.TYPE_USER, "login|" + user.getUsername() + "|" + user.getUniqueId());
//Let anyone who is listening know!
Intent i = new Intent("org.commcare.dalvik.api.action.session.login");
this.sendBroadcast(i);
}
this.user = user;
this.sessionExpireDate = new Date(new Date().getTime() + sessionLength);
// Display a notification about us starting. We put an icon in the status bar.
showLoggedInNotification(user);
maintenanceTimer = new Timer("CommCareService");
maintenanceTimer.schedule(new TimerTask() {
@Override
public void run() {
timeToExpireSession();
}
}, MAINTENANCE_PERIOD, MAINTENANCE_PERIOD);
}
}
/**
* If the session has been alive for longer than its specified duration
* then save any open forms and close it down. If data syncing is in
* progess then don't do anything.
*/
private void timeToExpireSession() {
long currentTime = new Date().getTime();
// If logout process started and has taken longer than the logout
// timeout then wrap-up the process.
if (logoutStartedAt != -1 &&
currentTime > (logoutStartedAt + LOGOUT_TIMEOUT)) {
// Try and grab the logout lock, aborting if synchronization is in
// progress.
if (!CommCareSessionService.sessionAliveLock.tryLock()) {
return;
}
try {
CommCareApplication._().expireUserSession();
} finally {
CommCareSessionService.sessionAliveLock.unlock();
}
} else if (isActive() && logoutStartedAt == -1 &&
(currentTime > sessionExpireDate.getTime() ||
(sessionExpireDate.getTime() - currentTime > sessionLength))) {
// If we haven't started closing the session and we're either past
// the session expire time, or the session expires more than its
// period in the future, we need to log the user out. The second
// case occurs if the system's clock is altered.
// Try and grab the logout lock, aborting if synchronization is in
// progress.
if (!CommCareSessionService.sessionAliveLock.tryLock()) {
return;
}
try {
saveFormAndCloseSession();
} finally {
CommCareSessionService.sessionAliveLock.unlock();
}
showLoggedOutNotification();
}
}
/**
* Notify any open form that it needs to save, then close the key session
* after waiting for the form save to complete/timeout.
*/
private void saveFormAndCloseSession() {
// Remember when we started so that if form saving takes too long, the
// maintenance timer will launch CommCareApplication._().expireUserSession
logoutStartedAt = new Date().getTime();
// save form progress, if any
synchronized(lock) {
if (formSaver != null) {
formSaver.formSaveCallback();
} else {
CommCareApplication._().expireUserSession();
}
}
}
/**
* Allow for the form entry engine to register a method that can be used to
* save any forms being editted when key expiration begins.
*
* @param callbackObj object with a method for saving the current form
* being edited
*/
public void registerFormSaveCallback(FormSaveCallback callbackObj) {
this.formSaver = callbackObj;
}
/**
* Unregister the form save callback; should occur when there is no longer
* a form open that might need to be saved if the session expires.
*/
public void unregisterFormSaveCallback() {
synchronized(lock) {
this.formSaver = null;
}
}
/**
* Closes the key pool and user database.
*/
public void closeServiceResources() {
synchronized(lock){
if (!isActive()) {
// Since both the FormSaveCallback callback and the maintenance
// timer might call this, only run if it hasn't been called
// before.
return;
}
key = null;
String msg = "Logging out service login";
// Let anyone who is listening know!
Intent i = new Intent("org.commcare.dalvik.api.action.session.logout");
this.sendBroadcast(i);
Logger.log(AndroidLogger.TYPE_MAINTENANCE, msg);
if (user != null) {
if (user.getUsername() != null) {
msg = "Logging out user " + user.getUsername();
}
user = null;
}
if (userDatabase != null) {
if (userDatabase.isOpen()) {
userDatabase.close();
}
userDatabase = null;
}
// timer is null if we aren't actually in the foreground
if (maintenanceTimer != null) {
maintenanceTimer.cancel();
}
logoutStartedAt = -1;
pool.expire();
}
}
/**
* Is the session active? Active sessions have an open key pool and user
* database.
*/
public boolean isActive() {
synchronized(lock){
return (key != null);
}
}
public Cipher getEncrypter() throws SessionUnavailableException {
synchronized(lock){
if(key == null) {
throw new SessionUnavailableException();
}
synchronized(key) {
SecretKeySpec spec = new SecretKeySpec(key, "AES");
try{
Cipher encrypter = Cipher.getInstance("AES");
encrypter.init(Cipher.ENCRYPT_MODE, spec);
return encrypter;
} catch (InvalidKeyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchPaddingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
}
public CipherPool getDecrypterPool() throws SessionUnavailableException{
synchronized(lock){
if(key == null) {
throw new SessionUnavailableException();
}
return pool;
}
}
public SecretKey createNewSymetricKey() {
return CryptUtil.generateSymetricKey(CryptUtil.uniqueSeedFromSecureStatic(key));
}
public User getLoggedInUser() throws SessionUnavailableException {
if(user == null) {
throw new SessionUnavailableException();
}
return user;
}
public DataSubmissionListener startDataSubmissionListener() {
return this.startDataSubmissionListener(SUBMISSION_NOTIFICATION);
}
public DataSubmissionListener startDataSubmissionListener(final int notificationId) {
return new DataSubmissionListener() {
// START - Submission Listening Hooks
int totalItems = -1;
long currentSize = -1;
long totalSent = -1;
Notification submissionNotification;
int lastUpdate = 0;
@Override
public void beginSubmissionProcess(int totalItems) {
this.totalItems = totalItems;
String text = getSubmissionText(1, totalItems);
// Set the icon, scrolling text and timestamp
submissionNotification = new Notification(org.commcare.dalvik.R.drawable.notification, getTickerText(1, totalItems), System.currentTimeMillis());
submissionNotification.flags |= (Notification.FLAG_NO_CLEAR | Notification.FLAG_ONGOING_EVENT);
//We always want this click to simply bring the live stack back to the top
Intent callable = new Intent(CommCareSessionService.this, CommCareHomeActivity.class);
callable.setAction("android.intent.action.MAIN");
callable.addCategory("android.intent.category.LAUNCHER");
// The PendingIntent to launch our activity if the user selects this notification
//TODO: Put something here that will, I dunno, cancel submission or something? Maybe show it live?
PendingIntent contentIntent = PendingIntent.getActivity(CommCareSessionService.this, 0, callable, 0);
RemoteViews contentView = new RemoteViews(getPackageName(), R.layout.submit_notification);
contentView.setImageViewResource(R.id.image, R.drawable.notification);
contentView.setTextViewText(R.id.submitTitle, getString(notificationId));
contentView.setTextViewText(R.id.progressText, text);
contentView.setTextViewText(R.id.submissionDetails,"0b transmitted");
// Set the info for the views that show in the notification panel.
submissionNotification.setLatestEventInfo(CommCareSessionService.this, getString(notificationId), text, contentIntent);
submissionNotification.contentView = contentView;
if(user != null) {
//Send the notification.
mNM.notify(notificationId, submissionNotification);
}
}
@Override
public void startSubmission(int itemNumber, long length) {
currentSize = length;
submissionNotification.contentView.setTextViewText(R.id.progressText, getSubmissionText(itemNumber + 1, totalItems));
submissionNotification.contentView.setProgressBar(R.id.submissionProgress, 100, 0, false);
mNM.notify(notificationId, submissionNotification);
}
@Override
public void notifyProgress(int itemNumber, long progress) {
int progressPercent = (int)Math.floor((progress * 1.0 / currentSize) * 100);
if(progressPercent - lastUpdate > 5) {
String progressDetails = "";
if(progress < 1024) {
progressDetails = progress + "b transmitted";
} else if (progress < 1024 * 1024) {
progressDetails = String.format("%1$,.1f", (progress / 1024.0))+ "kb transmitted";
} else {
progressDetails = String.format("%1$,.1f", (progress / (1024.0 * 1024.0)))+ "mb transmitted";
}
int pending = ProcessAndSendTask.pending();
if(pending > 1) {
submissionNotification.contentView.setTextViewText(R.id.submissionsPending, pending -1 + " Pending");
}
submissionNotification.contentView.setTextViewText(R.id.submissionDetails,progressDetails);
submissionNotification.contentView.setProgressBar(R.id.submissionProgress, 100, progressPercent, false);
mNM.notify(notificationId, submissionNotification);
lastUpdate = progressPercent;
}
}
@Override
public void endSubmissionProcess() {
mNM.cancel(notificationId);
submissionNotification = null;
totalItems = -1;
currentSize = -1;
totalSent = -1;
lastUpdate = 0;
}
private String getSubmissionText(int current, int total) {
return current + "/" + total;
}
private String getTickerText(int current, int total) {
return "CommCare submitting " + total +" forms";
}
// END - Submission Listening Hooks
};
}
/**
* Read the login session duration from app preferences and set the session
* length accordingly.
*/
private void setSessionLength(){
sessionLength = CommCarePreferences.getLoginDuration() * 1000;
}
public boolean isMultimediaVerified(){
return multimediaIsVerified;
}
public void setMultiMediaVerified(boolean toggle){
multimediaIsVerified = toggle;
}
}
|
Fix deprecated pinned notification creation
|
app/src/org/commcare/dalvik/services/CommCareSessionService.java
|
Fix deprecated pinned notification creation
|
<ide><path>pp/src/org/commcare/dalvik/services/CommCareSessionService.java
<ide> this.startForeground(NOTIFICATION, notification);
<ide> }
<ide> }
<del>
<del> /*
<add>
<add> /**
<ide> * Notify the user that they've been timed out and need to relog in
<ide> */
<ide> private void showLoggedOutNotification() {
<del>
<ide> this.stopForeground(true);
<del>
<del> String text = "Click here to log back into your session";
<del>
<del> // Set the icon, scrolling text and timestamp
<del> Notification notification = new Notification(org.commcare.dalvik.R.drawable.notification, text, System.currentTimeMillis());
<del>
<del> // The PendingIntent to launch our activity if the user selects this notification
<add>
<ide> Intent i = new Intent(this, CommCareHomeActivity.class);
<del>
<del> PendingIntent contentIntent = PendingIntent.getActivity(this, 0, i, PendingIntent.FLAG_ONE_SHOT);
<del>
<del> // Set the info for the views that show in the notification panel.
<del> notification.setLatestEventInfo(this, this.getString(org.commcare.dalvik.R.string.expirenotification), text, contentIntent);
<add> i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
<add> PendingIntent contentIntent = PendingIntent.getActivity(this, 0, i, PendingIntent.FLAG_UPDATE_CURRENT);
<add>
<add> Notification notification = new NotificationCompat.Builder(this)
<add> .setContentTitle(this.getString(R.string.expirenotification))
<add> .setContentText("Click here to log back into your session")
<add> .setSmallIcon(org.commcare.dalvik.R.drawable.notification)
<add> .setContentIntent(contentIntent)
<add> .build();
<ide>
<ide> // Send the notification.
<ide> mNM.notify(NOTIFICATION, notification);
<del>
<ide> }
<ide>
<ide> //Start CommCare Specific Functionality
|
|
Java
|
mit
|
1a5c207862f2abbe665ad59c4dc6ff088abbcadd
| 0 |
Atti-kmu/Atti-Android
|
package com.atti.atti_android.data;
import android.os.AsyncTask;
import android.util.Log;
import com.atti.atti_android.ssl.ConnectSSLClient;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.util.ArrayList;
/**
* Created by BoWoon on 2016-05-11.
*/
public class DataPostThread extends AsyncTask<String, Integer, Void> {
@Override
protected Void doInBackground(String... params) {
HttpClient httpClient = ConnectSSLClient.getHttpClient();
String responseString = null;
String urlString = "http://52.79.147.144/atti/family";
try {
HttpPost httpPost = new HttpPost(urlString);
ArrayList<BasicNameValuePair> nameValuePairs = new ArrayList<BasicNameValuePair>();
nameValuePairs.add(new BasicNameValuePair("Channel", params[0]));
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
HttpResponse response = httpClient.execute(httpPost);
responseString = EntityUtils.toString(response.getEntity(), HTTP.UTF_8);
Log.i("DataPostThread", String.valueOf(params[0]));
Log.i("response String", responseString);
} catch (ClientProtocolException e) {
Log.e("ClientProtocolException", e.getLocalizedMessage());
e.printStackTrace();
} catch (IOException e) {
Log.e("IOException", e.getLocalizedMessage());
e.printStackTrace();
}
return null;
}
public DataPostThread() {
super();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(Void s) {
super.onPostExecute(s);
}
@Override
protected void onProgressUpdate(Integer... values) {
super.onProgressUpdate(values);
}
@Override
protected void onCancelled(Void s) {
super.onCancelled(s);
}
@Override
protected void onCancelled() {
super.onCancelled();
}
}
|
app/src/main/java/com/atti/atti_android/data/DataPostThread.java
|
package com.atti.atti_android.data;
import android.os.AsyncTask;
import android.util.Log;
import com.atti.atti_android.ssl.ConnectSSLClient;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import java.io.IOException;
import java.util.ArrayList;
/**
* Created by BoWoon on 2016-05-11.
*/
public class DataPostThread extends AsyncTask<String, Integer, Void> {
@Override
protected Void doInBackground(String... params) {
HttpClient httpClient = ConnectSSLClient.getHttpClient();
String responseString = null;
String urlString = "http://52.79.147.144/atti/family";
try {
HttpPost httpPost = new HttpPost(urlString);
ArrayList<BasicNameValuePair> nameValuePairs = new ArrayList<BasicNameValuePair>();
nameValuePairs.add(new BasicNameValuePair("Channel", params[0]));
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
HttpResponse response = httpClient.execute(httpPost);
// responseString = EntityUtils.toString(response.getEntity(), HTTP.UTF_8);
Log.i("DataPostThread", String.valueOf(params[0]));
} catch (ClientProtocolException e) {
Log.e("ClientProtocolException", e.getLocalizedMessage());
e.printStackTrace();
} catch (IOException e) {
Log.e("IOException", e.getLocalizedMessage());
e.printStackTrace();
}
return null;
}
public DataPostThread() {
super();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(Void s) {
super.onPostExecute(s);
}
@Override
protected void onProgressUpdate(Integer... values) {
super.onProgressUpdate(values);
}
@Override
protected void onCancelled(Void s) {
super.onCancelled(s);
}
@Override
protected void onCancelled() {
super.onCancelled();
}
}
|
DataPostThread Modify
|
app/src/main/java/com/atti/atti_android/data/DataPostThread.java
|
DataPostThread Modify
|
<ide><path>pp/src/main/java/com/atti/atti_android/data/DataPostThread.java
<ide> import org.apache.http.client.entity.UrlEncodedFormEntity;
<ide> import org.apache.http.client.methods.HttpPost;
<ide> import org.apache.http.message.BasicNameValuePair;
<add>import org.apache.http.protocol.HTTP;
<add>import org.apache.http.util.EntityUtils;
<ide>
<ide> import java.io.IOException;
<ide> import java.util.ArrayList;
<ide> httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
<ide>
<ide> HttpResponse response = httpClient.execute(httpPost);
<del>// responseString = EntityUtils.toString(response.getEntity(), HTTP.UTF_8);
<add> responseString = EntityUtils.toString(response.getEntity(), HTTP.UTF_8);
<ide>
<ide> Log.i("DataPostThread", String.valueOf(params[0]));
<add> Log.i("response String", responseString);
<ide> } catch (ClientProtocolException e) {
<ide> Log.e("ClientProtocolException", e.getLocalizedMessage());
<ide> e.printStackTrace();
|
|
Java
|
apache-2.0
|
97a852b5078015baa6d5bbb083c2edff40a49fd1
| 0 |
bwahn/ngrinder,nanpa83/ngrinder,songeunwoo/ngrinder,songeunwoo/ngrinder,bwahn/ngrinder,chengaomin/ngrinder,songeunwoo/ngrinder,songeunwoo/ngrinder,naver/ngrinder,GwonGisoo/ngrinder,bwahn/ngrinder,SRCB-CloudPart/ngrinder,songeunwoo/ngrinder,GwonGisoo/ngrinder,nanpa83/ngrinder,bwahn/ngrinder,ropik/ngrinder,GwonGisoo/ngrinder,chengaomin/ngrinder,chengaomin/ngrinder,SRCB-CloudPart/ngrinder,naver/ngrinder,chengaomin/ngrinder,SRCB-CloudPart/ngrinder,ropik/ngrinder,GwonGisoo/ngrinder,naver/ngrinder,ropik/ngrinder,nanpa83/ngrinder,naver/ngrinder,GwonGisoo/ngrinder,nanpa83/ngrinder,chengaomin/ngrinder,ropik/ngrinder,SRCB-CloudPart/ngrinder,naver/ngrinder,nanpa83/ngrinder,bwahn/ngrinder,SRCB-CloudPart/ngrinder
|
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect;
import java.sql.Types;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
import org.hibernate.type.StandardBasicTypes;
/**
* An SQL dialect for CUBRID (8.3.x and later).
*
* @author Seok Jeong Il
*/
public class CUBRIDDialect extends Dialect {
public CUBRIDDialect() {
super();
registerColumnType( Types.BIGINT, "bigint" );
registerColumnType( Types.BIT, "bit(8)" );
registerColumnType( Types.BLOB, "bit varying(65535)" );
registerColumnType( Types.BOOLEAN, "bit(1)");
registerColumnType( Types.CHAR, "char(1)" );
registerColumnType( Types.CLOB, "string" );
registerColumnType( Types.DATE, "date" );
registerColumnType( Types.DECIMAL, "decimal" );
registerColumnType( Types.DOUBLE, "double" );
registerColumnType( Types.FLOAT, "float" );
registerColumnType( Types.INTEGER, "int" );
registerColumnType( Types.NUMERIC, "numeric($p,$s)" );
registerColumnType( Types.REAL, "double" );
registerColumnType( Types.SMALLINT, "smallint" );
registerColumnType( Types.TIME, "time" );
registerColumnType( Types.TIMESTAMP, "timestamp" );
registerColumnType( Types.TINYINT, "smallint" );
registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" );
registerColumnType( Types.VARCHAR, 4000, "varchar($l)" );
getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true");
getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE);
registerFunction("ascii", new StandardSQLFunction("ascii", StandardBasicTypes.INTEGER) );
registerFunction("bin", new StandardSQLFunction("bin", StandardBasicTypes.STRING) );
registerFunction("char_length", new StandardSQLFunction("char_length", StandardBasicTypes.LONG) );
registerFunction("character_length", new StandardSQLFunction("character_length", StandardBasicTypes.LONG) );
registerFunction("lengthb", new StandardSQLFunction("lengthb", StandardBasicTypes.LONG) );
registerFunction("lengthh", new StandardSQLFunction("lengthh", StandardBasicTypes.LONG) );
registerFunction("lcase", new StandardSQLFunction("lcase") );
registerFunction("lower", new StandardSQLFunction("lower") );
registerFunction("ltrim", new StandardSQLFunction("ltrim") );
registerFunction("reverse", new StandardSQLFunction("reverse") );
registerFunction("rtrim", new StandardSQLFunction("rtrim") );
registerFunction("trim", new StandardSQLFunction("trim") );
registerFunction("space", new StandardSQLFunction("space", StandardBasicTypes.STRING) );
registerFunction("ucase", new StandardSQLFunction("ucase") );
registerFunction("upper", new StandardSQLFunction("upper") );
registerFunction("abs", new StandardSQLFunction("abs") );
registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER) );
registerFunction("acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) );
registerFunction("asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) );
registerFunction("atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) );
registerFunction("cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) );
registerFunction("cot", new StandardSQLFunction("cot", StandardBasicTypes.DOUBLE) );
registerFunction("exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) );
registerFunction("ln", new StandardSQLFunction("ln", StandardBasicTypes.DOUBLE) );
registerFunction("log2", new StandardSQLFunction("log2", StandardBasicTypes.DOUBLE) );
registerFunction("log10", new StandardSQLFunction("log10", StandardBasicTypes.DOUBLE) );
registerFunction("pi", new NoArgSQLFunction("pi", StandardBasicTypes.DOUBLE) );
registerFunction("rand", new NoArgSQLFunction("rand", StandardBasicTypes.DOUBLE) );
registerFunction("random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) );
registerFunction("sin", new StandardSQLFunction("sin", StandardBasicTypes.DOUBLE) );
registerFunction("sqrt", new StandardSQLFunction("sqrt", StandardBasicTypes.DOUBLE) );
registerFunction("tan", new StandardSQLFunction("tan", StandardBasicTypes.DOUBLE) );
registerFunction("radians", new StandardSQLFunction("radians", StandardBasicTypes.DOUBLE) );
registerFunction("degrees", new StandardSQLFunction("degrees", StandardBasicTypes.DOUBLE) );
registerFunction("ceil", new StandardSQLFunction("ceil", StandardBasicTypes.INTEGER) );
registerFunction("floor", new StandardSQLFunction("floor", StandardBasicTypes.INTEGER) );
registerFunction("round", new StandardSQLFunction("round") );
registerFunction("datediff", new StandardSQLFunction("datediff", StandardBasicTypes.INTEGER) );
registerFunction("timediff", new StandardSQLFunction("timediff", StandardBasicTypes.TIME) );
registerFunction("date", new StandardSQLFunction("date", StandardBasicTypes.DATE) );
registerFunction("curdate", new NoArgSQLFunction("curdate", StandardBasicTypes.DATE) );
registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) );
registerFunction("sys_date", new NoArgSQLFunction("sys_date", StandardBasicTypes.DATE, false) );
registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false) );
registerFunction("time", new StandardSQLFunction("time", StandardBasicTypes.TIME) );
registerFunction("curtime", new NoArgSQLFunction("curtime", StandardBasicTypes.TIME) );
registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false) );
registerFunction("sys_time", new NoArgSQLFunction("sys_time", StandardBasicTypes.TIME, false) );
registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false) );
registerFunction("timestamp", new StandardSQLFunction("timestamp", StandardBasicTypes.TIMESTAMP) );
registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP, false) );
registerFunction("sys_timestamp", new NoArgSQLFunction("sys_timestamp", StandardBasicTypes.TIMESTAMP, false) );
registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false) );
registerFunction("localtime", new NoArgSQLFunction("localtime", StandardBasicTypes.TIMESTAMP, false) );
registerFunction("localtimestamp", new NoArgSQLFunction("localtimestamp", StandardBasicTypes.TIMESTAMP, false) );
registerFunction("day", new StandardSQLFunction("day", StandardBasicTypes.INTEGER) );
registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", StandardBasicTypes.INTEGER) );
registerFunction("dayofweek", new StandardSQLFunction("dayofweek", StandardBasicTypes.INTEGER) );
registerFunction("dayofyear", new StandardSQLFunction("dayofyear", StandardBasicTypes.INTEGER) );
registerFunction("from_days", new StandardSQLFunction("from_days", StandardBasicTypes.DATE) );
registerFunction("from_unixtime", new StandardSQLFunction("from_unixtime", StandardBasicTypes.TIMESTAMP) );
registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE) );
registerFunction("minute", new StandardSQLFunction("minute", StandardBasicTypes.INTEGER) );
registerFunction("month", new StandardSQLFunction("month", StandardBasicTypes.INTEGER) );
registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.DOUBLE) );
registerFunction("now", new NoArgSQLFunction("now", StandardBasicTypes.TIMESTAMP) );
registerFunction("quarter", new StandardSQLFunction("quarter", StandardBasicTypes.INTEGER) );
registerFunction("second", new StandardSQLFunction("second", StandardBasicTypes.INTEGER) );
registerFunction("sec_to_time", new StandardSQLFunction("sec_to_time", StandardBasicTypes.TIME) );
registerFunction("time_to_sec", new StandardSQLFunction("time_to_sec", StandardBasicTypes.INTEGER) );
registerFunction("to_days", new StandardSQLFunction("to_days", StandardBasicTypes.LONG) );
registerFunction("unix_timestamp", new StandardSQLFunction("unix_timestamp", StandardBasicTypes.LONG) );
registerFunction("utc_date", new NoArgSQLFunction("utc_date", StandardBasicTypes.STRING) );
registerFunction("utc_time", new NoArgSQLFunction("utc_time", StandardBasicTypes.STRING) );
registerFunction("week", new StandardSQLFunction("week", StandardBasicTypes.INTEGER) );
registerFunction("weekday", new StandardSQLFunction("weekday", StandardBasicTypes.INTEGER) );
registerFunction("year", new StandardSQLFunction("year", StandardBasicTypes.INTEGER) );
registerFunction("hex", new StandardSQLFunction("hex", StandardBasicTypes.STRING) );
registerFunction("octet_length", new StandardSQLFunction("octet_length", StandardBasicTypes.LONG) );
registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.LONG) );
registerFunction("bit_count", new StandardSQLFunction("bit_count", StandardBasicTypes.LONG) );
registerFunction("md5", new StandardSQLFunction("md5", StandardBasicTypes.STRING) );
registerFunction( "concat", new StandardSQLFunction( "concat", StandardBasicTypes.STRING ) );
registerFunction("substring", new StandardSQLFunction("substring", StandardBasicTypes.STRING) );
registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING) );
registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER) );
registerFunction("bit_length",new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER) );
registerFunction("coalesce", new StandardSQLFunction("coalesce") );
registerFunction("nullif", new StandardSQLFunction("nullif") );
registerFunction("mod", new StandardSQLFunction("mod") );
registerFunction("power", new StandardSQLFunction("power") );
registerFunction("stddev", new StandardSQLFunction("stddev") );
registerFunction("variance", new StandardSQLFunction("variance") );
registerFunction("trunc", new StandardSQLFunction("trunc") );
registerFunction("nvl", new StandardSQLFunction("nvl") );
registerFunction("nvl2", new StandardSQLFunction("nvl2") );
registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER));
registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING) );
registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP));
registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER) );
registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER) );
registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING) );
registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING) );
registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING) );
registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING) );
registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE) );
registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false) );
registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false) );
registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", ""));
}
public boolean supportsIdentityColumns() {
return true;
}
public String getIdentityInsertString() {
return "NULL";
}
public boolean supportsColumnCheck() {
return false;
}
public boolean supportsPooledSequences() {
return true;
}
public String getIdentitySelectString() {
return "select last_insert_id()";
}
protected String getIdentityColumnString() {
return "not null auto_increment"; //starts with 1, implicitly
}
/*
* CUBRID supports "ADD [COLUMN | ATTRIBUTE]"
*/
public String getAddColumnString() {
return "add";
}
public String getSequenceNextValString(String sequenceName) {
return "select " + sequenceName + ".next_value from table({1}) as T(X)";
}
public String getCreateSequenceString(String sequenceName) {
return "create serial " + sequenceName;
}
public String getDropSequenceString(String sequenceName) {
return "drop serial " + sequenceName;
}
public String getDropForeignKeyString() {
return " drop foreign key ";
}
public boolean qualifyIndexName() {
return false;
}
public boolean supportsSequences() {
return true;
}
public boolean supportsExistsInSelect() {
return false;
}
public String getQuerySequencesString() {
return "select name from db_serial";
}
/**
* The character specific to this dialect used to close a quoted identifier.
* CUBRID supports square brackets (MSSQL style), backticks (MySQL style),
* as well as double quotes (Oracle style).
*
* @return The dialect's specific open quote character.
*/
public char openQuote() {
return '[';
}
public char closeQuote() {
return ']';
}
public String getForUpdateString() {
return " ";
}
public boolean supportsUnionAll() {
return true;
}
public boolean supportsCurrentTimestampSelection() {
return true;
}
public String getCurrentTimestampSelectString() {
return "select now()";
}
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
public boolean supportsEmptyInList() {
return false;
}
public boolean supportsIfExistsBeforeTableName() {
return true;
}
public boolean supportsTupleDistinctCounts() {
return false;
}
}
|
ngrinder-controller/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
|
/*
* Copyright (C) 2012 - 2012 NHN Corporation
* All rights reserved.
*
* This file is part of The nGrinder software distribution. Refer to
* the file LICENSE which is part of The nGrinder distribution for
* licensing details. The nGrinder distribution is available on the
* Internet at http://nhnopensource.org/ngrinder
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hibernate.dialect;
import java.sql.Types;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
import org.hibernate.type.StandardBasicTypes;
/**
* Hibernate CUBRID Dialect.
*
* @author JunHo Yoon
* @since 3.0
*/
public class CUBRIDDialect extends Dialect {
@Override
protected String getIdentityColumnString() {
return "auto_increment"; // starts with 1, implicitly
}
@Override
public String getIdentitySelectString(final String table, final String column, final int type) {
return "select last_insert_id()";
}
private static final int VAR_CHAR_SIZE = 4000;
private static final int VARING_BIT = 2000;
private static final int GET_LIMIT_BUFFER = 2000;
/**
* Constructor.
*/
public CUBRIDDialect() {
super();
registerColumnType(Types.BIT, "bit(8)");
registerColumnType(Types.BIGINT, "numeric(19,0)");
registerColumnType(Types.BOOLEAN, "char(2)");
registerColumnType(Types.SMALLINT, "smallint");
registerColumnType(Types.TINYINT, "smallint");
registerColumnType(Types.INTEGER, "integer");
registerColumnType(Types.CHAR, "char(1)");
registerColumnType(Types.VARCHAR, VAR_CHAR_SIZE, "varchar($l)");
registerColumnType(Types.FLOAT, "float");
registerColumnType(Types.DOUBLE, "double");
registerColumnType(Types.DATE, "date");
registerColumnType(Types.TIME, "time");
registerColumnType(Types.TIMESTAMP, "timestamp");
registerColumnType(Types.VARBINARY, VARING_BIT, "bit varying($l)");
registerColumnType(Types.VARBINARY, "bit varying(2000)");
registerColumnType(Types.NUMERIC, "numeric($p,$s)");
registerColumnType(Types.BLOB, "blob");
registerColumnType(Types.CLOB, "string");
getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true");
getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE);
registerFunction("substring", new StandardSQLFunction("substr", StandardBasicTypes.STRING));
registerFunction("trim", new StandardSQLFunction("trim"));
registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER));
registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER));
registerFunction("coalesce", new StandardSQLFunction("coalesce"));
registerFunction("nullif", new StandardSQLFunction("nullif"));
registerFunction("abs", new StandardSQLFunction("abs"));
registerFunction("mod", new StandardSQLFunction("mod"));
registerFunction("upper", new StandardSQLFunction("upper"));
registerFunction("lower", new StandardSQLFunction("lower"));
registerFunction("power", new StandardSQLFunction("power"));
registerFunction("stddev", new StandardSQLFunction("stddev"));
registerFunction("variance", new StandardSQLFunction("variance"));
registerFunction("round", new StandardSQLFunction("round"));
registerFunction("trunc", new StandardSQLFunction("trunc"));
registerFunction("ceil", new StandardSQLFunction("ceil"));
registerFunction("floor", new StandardSQLFunction("floor"));
registerFunction("ltrim", new StandardSQLFunction("ltrim"));
registerFunction("rtrim", new StandardSQLFunction("rtrim"));
registerFunction("nvl", new StandardSQLFunction("nvl"));
registerFunction("nvl2", new StandardSQLFunction("nvl2"));
registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER));
registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER));
registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING));
registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP));
registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE));
registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER));
registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER));
registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING));
registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING));
registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING));
registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING));
registerFunction("substrb", new StandardSQLFunction("substrb", StandardBasicTypes.STRING));
registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING));
registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE));
registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.FLOAT));
registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false));
registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false));
registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP,
false));
registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false));
registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false));
registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false));
registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false));
registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false));
registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", ""));
}
@Override
public String getAddColumnString() {
return "add";
}
@Override
public String getSequenceNextValString(final String sequenceName) {
return "select " + sequenceName + ".next_value from table({1}) as T(X)";
}
@Override
public String getCreateSequenceString(final String sequenceName) {
return "create serial " + sequenceName;
}
@Override
public String getDropSequenceString(final String sequenceName) {
return "drop serial " + sequenceName;
}
@Override
public boolean supportsSequences() {
return true;
}
@Override
public String getQuerySequencesString() {
return "select name from db_serial";
}
@Override
public boolean dropConstraints() {
return false;
}
@Override
public boolean supportsLimit() {
return true;
}
@Override
public String getLimitString(final String sql, final boolean hasOffset) {
// CUBRID 8.3.0 support limit
return new StringBuffer(sql.length() + GET_LIMIT_BUFFER).append(sql)
.append(hasOffset ? " limit ?, ?" : " limit ?").toString();
}
@Override
public boolean useMaxForLimit() {
return true;
}
@Override
public boolean forUpdateOfColumns() {
return true;
}
@Override
public char closeQuote() {
return ']';
}
@Override
public char openQuote() {
return '[';
}
@Override
public boolean hasAlterTable() {
return false;
}
@Override
public String getForUpdateString() {
return " ";
}
@Override
public boolean supportsUnionAll() {
return true;
}
@Override
public boolean supportsCommentOn() {
return false;
}
@Override
public boolean supportsTemporaryTables() {
return false;
}
@Override
public boolean supportsCurrentTimestampSelection() {
return true;
}
@Override
public String getCurrentTimestampSelectString() {
return "select systimestamp from table({1}) as T(X)";
}
@Override
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
@Override
public String toBooleanValueString(final boolean bool) {
return bool ? "1" : "0";
}
}
|
[NGRINDER-140] Block unapproved agent
- update cubrid dialect
|
ngrinder-controller/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
|
[NGRINDER-140] Block unapproved agent - update cubrid dialect
|
<ide><path>grinder-controller/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
<ide> /*
<del> * Copyright (C) 2012 - 2012 NHN Corporation
<del> * All rights reserved.
<del> *
<del> * This file is part of The nGrinder software distribution. Refer to
<del> * the file LICENSE which is part of The nGrinder distribution for
<del> * licensing details. The nGrinder distribution is available on the
<del> * Internet at http://nhnopensource.org/ngrinder
<del> *
<del> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
<del> * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
<del> * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
<del> * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
<del> * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
<del> * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
<del> * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
<del> * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
<del> * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
<del> * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
<del> * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
<del> * OF THE POSSIBILITY OF SUCH DAMAGE.
<add> * Hibernate, Relational Persistence for Idiomatic Java
<add> *
<add> * Copyright (c) 2011, Red Hat Inc. or third-party contributors as
<add> * indicated by the @author tags or express copyright attribution
<add> * statements applied by the authors. All third-party contributions are
<add> * distributed under license by Red Hat Inc.
<add> *
<add> * This copyrighted material is made available to anyone wishing to use, modify,
<add> * copy, or redistribute it subject to the terms and conditions of the GNU
<add> * Lesser General Public License, as published by the Free Software Foundation.
<add> *
<add> * This program is distributed in the hope that it will be useful,
<add> * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
<add> * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
<add> * for more details.
<add> *
<add> * You should have received a copy of the GNU Lesser General Public License
<add> * along with this distribution; if not, write to:
<add> * Free Software Foundation, Inc.
<add> * 51 Franklin Street, Fifth Floor
<add> * Boston, MA 02110-1301 USA
<ide> */
<add>
<ide> package org.hibernate.dialect;
<ide>
<ide> import java.sql.Types;
<ide> import org.hibernate.type.StandardBasicTypes;
<ide>
<ide> /**
<del> * Hibernate CUBRID Dialect.
<del> *
<del> * @author JunHo Yoon
<del> * @since 3.0
<add> * An SQL dialect for CUBRID (8.3.x and later).
<add> *
<add> * @author Seok Jeong Il
<ide> */
<ide> public class CUBRIDDialect extends Dialect {
<del> @Override
<del> protected String getIdentityColumnString() {
<del> return "auto_increment"; // starts with 1, implicitly
<del> }
<del>
<del> @Override
<del> public String getIdentitySelectString(final String table, final String column, final int type) {
<del> return "select last_insert_id()";
<del> }
<del>
<del> private static final int VAR_CHAR_SIZE = 4000;
<del> private static final int VARING_BIT = 2000;
<del> private static final int GET_LIMIT_BUFFER = 2000;
<del>
<del> /**
<del> * Constructor.
<del> */
<del> public CUBRIDDialect() {
<del> super();
<del>
<del> registerColumnType(Types.BIT, "bit(8)");
<del> registerColumnType(Types.BIGINT, "numeric(19,0)");
<del> registerColumnType(Types.BOOLEAN, "char(2)");
<del> registerColumnType(Types.SMALLINT, "smallint");
<del> registerColumnType(Types.TINYINT, "smallint");
<del> registerColumnType(Types.INTEGER, "integer");
<del> registerColumnType(Types.CHAR, "char(1)");
<del> registerColumnType(Types.VARCHAR, VAR_CHAR_SIZE, "varchar($l)");
<del> registerColumnType(Types.FLOAT, "float");
<del> registerColumnType(Types.DOUBLE, "double");
<del> registerColumnType(Types.DATE, "date");
<del> registerColumnType(Types.TIME, "time");
<del> registerColumnType(Types.TIMESTAMP, "timestamp");
<del> registerColumnType(Types.VARBINARY, VARING_BIT, "bit varying($l)");
<del> registerColumnType(Types.VARBINARY, "bit varying(2000)");
<del> registerColumnType(Types.NUMERIC, "numeric($p,$s)");
<del> registerColumnType(Types.BLOB, "blob");
<del> registerColumnType(Types.CLOB, "string");
<del>
<del> getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true");
<del> getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE);
<del>
<del> registerFunction("substring", new StandardSQLFunction("substr", StandardBasicTypes.STRING));
<del> registerFunction("trim", new StandardSQLFunction("trim"));
<del> registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER));
<del> registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER));
<del> registerFunction("coalesce", new StandardSQLFunction("coalesce"));
<del> registerFunction("nullif", new StandardSQLFunction("nullif"));
<del> registerFunction("abs", new StandardSQLFunction("abs"));
<del> registerFunction("mod", new StandardSQLFunction("mod"));
<del> registerFunction("upper", new StandardSQLFunction("upper"));
<del> registerFunction("lower", new StandardSQLFunction("lower"));
<del>
<del> registerFunction("power", new StandardSQLFunction("power"));
<del> registerFunction("stddev", new StandardSQLFunction("stddev"));
<del> registerFunction("variance", new StandardSQLFunction("variance"));
<del> registerFunction("round", new StandardSQLFunction("round"));
<del> registerFunction("trunc", new StandardSQLFunction("trunc"));
<del> registerFunction("ceil", new StandardSQLFunction("ceil"));
<del> registerFunction("floor", new StandardSQLFunction("floor"));
<del> registerFunction("ltrim", new StandardSQLFunction("ltrim"));
<del> registerFunction("rtrim", new StandardSQLFunction("rtrim"));
<del> registerFunction("nvl", new StandardSQLFunction("nvl"));
<del> registerFunction("nvl2", new StandardSQLFunction("nvl2"));
<del> registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER));
<del> registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER));
<del> registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING));
<del> registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP));
<del> registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE));
<del> registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER));
<del> registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER));
<del> registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING));
<del> registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING));
<del> registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING));
<del> registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING));
<del> registerFunction("substrb", new StandardSQLFunction("substrb", StandardBasicTypes.STRING));
<del> registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING));
<del> registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE));
<del> registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.FLOAT));
<del>
<del> registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false));
<del> registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false));
<del> registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP,
<del> false));
<del> registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false));
<del> registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false));
<del> registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false));
<del> registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false));
<del> registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false));
<del> registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", ""));
<del> }
<del>
<del> @Override
<del> public String getAddColumnString() {
<del> return "add";
<del> }
<del>
<del> @Override
<del> public String getSequenceNextValString(final String sequenceName) {
<del> return "select " + sequenceName + ".next_value from table({1}) as T(X)";
<del> }
<del>
<del> @Override
<del> public String getCreateSequenceString(final String sequenceName) {
<del> return "create serial " + sequenceName;
<del> }
<del>
<del> @Override
<del> public String getDropSequenceString(final String sequenceName) {
<del> return "drop serial " + sequenceName;
<del> }
<del>
<del> @Override
<del> public boolean supportsSequences() {
<add> public CUBRIDDialect() {
<add> super();
<add>
<add> registerColumnType( Types.BIGINT, "bigint" );
<add> registerColumnType( Types.BIT, "bit(8)" );
<add> registerColumnType( Types.BLOB, "bit varying(65535)" );
<add> registerColumnType( Types.BOOLEAN, "bit(1)");
<add> registerColumnType( Types.CHAR, "char(1)" );
<add> registerColumnType( Types.CLOB, "string" );
<add> registerColumnType( Types.DATE, "date" );
<add> registerColumnType( Types.DECIMAL, "decimal" );
<add> registerColumnType( Types.DOUBLE, "double" );
<add> registerColumnType( Types.FLOAT, "float" );
<add> registerColumnType( Types.INTEGER, "int" );
<add> registerColumnType( Types.NUMERIC, "numeric($p,$s)" );
<add> registerColumnType( Types.REAL, "double" );
<add> registerColumnType( Types.SMALLINT, "smallint" );
<add> registerColumnType( Types.TIME, "time" );
<add> registerColumnType( Types.TIMESTAMP, "timestamp" );
<add> registerColumnType( Types.TINYINT, "smallint" );
<add> registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" );
<add> registerColumnType( Types.VARCHAR, 4000, "varchar($l)" );
<add>
<add> getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true");
<add> getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE);
<add>
<add> registerFunction("ascii", new StandardSQLFunction("ascii", StandardBasicTypes.INTEGER) );
<add> registerFunction("bin", new StandardSQLFunction("bin", StandardBasicTypes.STRING) );
<add> registerFunction("char_length", new StandardSQLFunction("char_length", StandardBasicTypes.LONG) );
<add> registerFunction("character_length", new StandardSQLFunction("character_length", StandardBasicTypes.LONG) );
<add> registerFunction("lengthb", new StandardSQLFunction("lengthb", StandardBasicTypes.LONG) );
<add> registerFunction("lengthh", new StandardSQLFunction("lengthh", StandardBasicTypes.LONG) );
<add> registerFunction("lcase", new StandardSQLFunction("lcase") );
<add> registerFunction("lower", new StandardSQLFunction("lower") );
<add> registerFunction("ltrim", new StandardSQLFunction("ltrim") );
<add> registerFunction("reverse", new StandardSQLFunction("reverse") );
<add> registerFunction("rtrim", new StandardSQLFunction("rtrim") );
<add> registerFunction("trim", new StandardSQLFunction("trim") );
<add> registerFunction("space", new StandardSQLFunction("space", StandardBasicTypes.STRING) );
<add> registerFunction("ucase", new StandardSQLFunction("ucase") );
<add> registerFunction("upper", new StandardSQLFunction("upper") );
<add>
<add> registerFunction("abs", new StandardSQLFunction("abs") );
<add> registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER) );
<add>
<add> registerFunction("acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) );
<add> registerFunction("asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) );
<add> registerFunction("atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) );
<add> registerFunction("cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) );
<add> registerFunction("cot", new StandardSQLFunction("cot", StandardBasicTypes.DOUBLE) );
<add> registerFunction("exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) );
<add> registerFunction("ln", new StandardSQLFunction("ln", StandardBasicTypes.DOUBLE) );
<add> registerFunction("log2", new StandardSQLFunction("log2", StandardBasicTypes.DOUBLE) );
<add> registerFunction("log10", new StandardSQLFunction("log10", StandardBasicTypes.DOUBLE) );
<add> registerFunction("pi", new NoArgSQLFunction("pi", StandardBasicTypes.DOUBLE) );
<add> registerFunction("rand", new NoArgSQLFunction("rand", StandardBasicTypes.DOUBLE) );
<add> registerFunction("random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) );
<add> registerFunction("sin", new StandardSQLFunction("sin", StandardBasicTypes.DOUBLE) );
<add> registerFunction("sqrt", new StandardSQLFunction("sqrt", StandardBasicTypes.DOUBLE) );
<add> registerFunction("tan", new StandardSQLFunction("tan", StandardBasicTypes.DOUBLE) );
<add>
<add> registerFunction("radians", new StandardSQLFunction("radians", StandardBasicTypes.DOUBLE) );
<add> registerFunction("degrees", new StandardSQLFunction("degrees", StandardBasicTypes.DOUBLE) );
<add>
<add> registerFunction("ceil", new StandardSQLFunction("ceil", StandardBasicTypes.INTEGER) );
<add> registerFunction("floor", new StandardSQLFunction("floor", StandardBasicTypes.INTEGER) );
<add> registerFunction("round", new StandardSQLFunction("round") );
<add>
<add> registerFunction("datediff", new StandardSQLFunction("datediff", StandardBasicTypes.INTEGER) );
<add> registerFunction("timediff", new StandardSQLFunction("timediff", StandardBasicTypes.TIME) );
<add>
<add> registerFunction("date", new StandardSQLFunction("date", StandardBasicTypes.DATE) );
<add> registerFunction("curdate", new NoArgSQLFunction("curdate", StandardBasicTypes.DATE) );
<add> registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) );
<add> registerFunction("sys_date", new NoArgSQLFunction("sys_date", StandardBasicTypes.DATE, false) );
<add> registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false) );
<add>
<add> registerFunction("time", new StandardSQLFunction("time", StandardBasicTypes.TIME) );
<add> registerFunction("curtime", new NoArgSQLFunction("curtime", StandardBasicTypes.TIME) );
<add> registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false) );
<add> registerFunction("sys_time", new NoArgSQLFunction("sys_time", StandardBasicTypes.TIME, false) );
<add> registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false) );
<add>
<add> registerFunction("timestamp", new StandardSQLFunction("timestamp", StandardBasicTypes.TIMESTAMP) );
<add> registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP, false) );
<add> registerFunction("sys_timestamp", new NoArgSQLFunction("sys_timestamp", StandardBasicTypes.TIMESTAMP, false) );
<add> registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false) );
<add> registerFunction("localtime", new NoArgSQLFunction("localtime", StandardBasicTypes.TIMESTAMP, false) );
<add> registerFunction("localtimestamp", new NoArgSQLFunction("localtimestamp", StandardBasicTypes.TIMESTAMP, false) );
<add>
<add> registerFunction("day", new StandardSQLFunction("day", StandardBasicTypes.INTEGER) );
<add> registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", StandardBasicTypes.INTEGER) );
<add> registerFunction("dayofweek", new StandardSQLFunction("dayofweek", StandardBasicTypes.INTEGER) );
<add> registerFunction("dayofyear", new StandardSQLFunction("dayofyear", StandardBasicTypes.INTEGER) );
<add> registerFunction("from_days", new StandardSQLFunction("from_days", StandardBasicTypes.DATE) );
<add> registerFunction("from_unixtime", new StandardSQLFunction("from_unixtime", StandardBasicTypes.TIMESTAMP) );
<add> registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE) );
<add> registerFunction("minute", new StandardSQLFunction("minute", StandardBasicTypes.INTEGER) );
<add> registerFunction("month", new StandardSQLFunction("month", StandardBasicTypes.INTEGER) );
<add> registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.DOUBLE) );
<add> registerFunction("now", new NoArgSQLFunction("now", StandardBasicTypes.TIMESTAMP) );
<add> registerFunction("quarter", new StandardSQLFunction("quarter", StandardBasicTypes.INTEGER) );
<add> registerFunction("second", new StandardSQLFunction("second", StandardBasicTypes.INTEGER) );
<add> registerFunction("sec_to_time", new StandardSQLFunction("sec_to_time", StandardBasicTypes.TIME) );
<add> registerFunction("time_to_sec", new StandardSQLFunction("time_to_sec", StandardBasicTypes.INTEGER) );
<add> registerFunction("to_days", new StandardSQLFunction("to_days", StandardBasicTypes.LONG) );
<add> registerFunction("unix_timestamp", new StandardSQLFunction("unix_timestamp", StandardBasicTypes.LONG) );
<add> registerFunction("utc_date", new NoArgSQLFunction("utc_date", StandardBasicTypes.STRING) );
<add> registerFunction("utc_time", new NoArgSQLFunction("utc_time", StandardBasicTypes.STRING) );
<add> registerFunction("week", new StandardSQLFunction("week", StandardBasicTypes.INTEGER) );
<add> registerFunction("weekday", new StandardSQLFunction("weekday", StandardBasicTypes.INTEGER) );
<add> registerFunction("year", new StandardSQLFunction("year", StandardBasicTypes.INTEGER) );
<add>
<add> registerFunction("hex", new StandardSQLFunction("hex", StandardBasicTypes.STRING) );
<add>
<add> registerFunction("octet_length", new StandardSQLFunction("octet_length", StandardBasicTypes.LONG) );
<add> registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.LONG) );
<add>
<add> registerFunction("bit_count", new StandardSQLFunction("bit_count", StandardBasicTypes.LONG) );
<add> registerFunction("md5", new StandardSQLFunction("md5", StandardBasicTypes.STRING) );
<add>
<add> registerFunction( "concat", new StandardSQLFunction( "concat", StandardBasicTypes.STRING ) );
<add>
<add> registerFunction("substring", new StandardSQLFunction("substring", StandardBasicTypes.STRING) );
<add> registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING) );
<add>
<add> registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER) );
<add> registerFunction("bit_length",new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER) );
<add> registerFunction("coalesce", new StandardSQLFunction("coalesce") );
<add> registerFunction("nullif", new StandardSQLFunction("nullif") );
<add> registerFunction("mod", new StandardSQLFunction("mod") );
<add>
<add> registerFunction("power", new StandardSQLFunction("power") );
<add> registerFunction("stddev", new StandardSQLFunction("stddev") );
<add> registerFunction("variance", new StandardSQLFunction("variance") );
<add> registerFunction("trunc", new StandardSQLFunction("trunc") );
<add> registerFunction("nvl", new StandardSQLFunction("nvl") );
<add> registerFunction("nvl2", new StandardSQLFunction("nvl2") );
<add> registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER));
<add> registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING) );
<add> registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP));
<add> registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER) );
<add> registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER) );
<add> registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING) );
<add> registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING) );
<add> registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING) );
<add> registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING) );
<add>
<add> registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE) );
<add> registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false) );
<add> registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false) );
<add> registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", ""));
<add> }
<add>
<add> public boolean supportsIdentityColumns() {
<ide> return true;
<ide> }
<ide>
<del> @Override
<del> public String getQuerySequencesString() {
<del> return "select name from db_serial";
<del> }
<del>
<del> @Override
<del> public boolean dropConstraints() {
<add> public String getIdentityInsertString() {
<add> return "NULL";
<add> }
<add>
<add> public boolean supportsColumnCheck() {
<ide> return false;
<ide> }
<del>
<del> @Override
<del> public boolean supportsLimit() {
<add>
<add> public boolean supportsPooledSequences() {
<ide> return true;
<ide> }
<ide>
<del> @Override
<del> public String getLimitString(final String sql, final boolean hasOffset) {
<del> // CUBRID 8.3.0 support limit
<del> return new StringBuffer(sql.length() + GET_LIMIT_BUFFER).append(sql)
<del> .append(hasOffset ? " limit ?, ?" : " limit ?").toString();
<del> }
<del>
<del> @Override
<del> public boolean useMaxForLimit() {
<add> public String getIdentitySelectString() {
<add> return "select last_insert_id()";
<add> }
<add>
<add> protected String getIdentityColumnString() {
<add> return "not null auto_increment"; //starts with 1, implicitly
<add> }
<add>
<add> /*
<add> * CUBRID supports "ADD [COLUMN | ATTRIBUTE]"
<add> */
<add> public String getAddColumnString() {
<add> return "add";
<add> }
<add>
<add> public String getSequenceNextValString(String sequenceName) {
<add> return "select " + sequenceName + ".next_value from table({1}) as T(X)";
<add> }
<add>
<add> public String getCreateSequenceString(String sequenceName) {
<add> return "create serial " + sequenceName;
<add> }
<add>
<add> public String getDropSequenceString(String sequenceName) {
<add> return "drop serial " + sequenceName;
<add> }
<add>
<add> public String getDropForeignKeyString() {
<add> return " drop foreign key ";
<add> }
<add>
<add> public boolean qualifyIndexName() {
<add> return false;
<add> }
<add>
<add> public boolean supportsSequences() {
<add> return true;
<add> }
<add>
<add> public boolean supportsExistsInSelect() {
<add> return false;
<add> }
<add>
<add> public String getQuerySequencesString() {
<add> return "select name from db_serial";
<add> }
<add>
<add> /**
<add> * The character specific to this dialect used to close a quoted identifier.
<add> * CUBRID supports square brackets (MSSQL style), backticks (MySQL style),
<add> * as well as double quotes (Oracle style).
<add> *
<add> * @return The dialect's specific open quote character.
<add> */
<add> public char openQuote() {
<add> return '[';
<add> }
<add>
<add> public char closeQuote() {
<add> return ']';
<add> }
<add>
<add> public String getForUpdateString() {
<add> return " ";
<add> }
<add>
<add> public boolean supportsUnionAll() {
<add> return true;
<add> }
<add>
<add> public boolean supportsCurrentTimestampSelection() {
<add> return true;
<add> }
<add>
<add> public String getCurrentTimestampSelectString() {
<add> return "select now()";
<add> }
<add>
<add> public boolean isCurrentTimestampSelectStringCallable() {
<add> return false;
<add> }
<add>
<add> public boolean supportsEmptyInList() {
<add> return false;
<add> }
<add>
<add> public boolean supportsIfExistsBeforeTableName() {
<ide> return true;
<ide> }
<ide>
<del> @Override
<del> public boolean forUpdateOfColumns() {
<del> return true;
<del> }
<del>
<del> @Override
<del> public char closeQuote() {
<del> return ']';
<del> }
<del>
<del> @Override
<del> public char openQuote() {
<del> return '[';
<del> }
<del>
<del> @Override
<del> public boolean hasAlterTable() {
<add> public boolean supportsTupleDistinctCounts() {
<ide> return false;
<ide> }
<ide>
<del> @Override
<del> public String getForUpdateString() {
<del> return " ";
<del> }
<del>
<del> @Override
<del> public boolean supportsUnionAll() {
<del> return true;
<del> }
<del>
<del> @Override
<del> public boolean supportsCommentOn() {
<del> return false;
<del> }
<del>
<del> @Override
<del> public boolean supportsTemporaryTables() {
<del> return false;
<del> }
<del>
<del> @Override
<del> public boolean supportsCurrentTimestampSelection() {
<del> return true;
<del> }
<del>
<del> @Override
<del> public String getCurrentTimestampSelectString() {
<del> return "select systimestamp from table({1}) as T(X)";
<del> }
<del>
<del> @Override
<del> public boolean isCurrentTimestampSelectStringCallable() {
<del> return false;
<del> }
<del>
<del> @Override
<del> public String toBooleanValueString(final boolean bool) {
<del> return bool ? "1" : "0";
<del> }
<ide> }
|
|
JavaScript
|
mit
|
3f822574ceb641cd0585aba40d61a3176cd33eaf
| 0 |
DNepovim/kraj-praha,DNepovim/kraj-praha,DNepovim/kraj-praha
|
const
gulp = require('gulp'),
plumber = require('gulp-plumber'),
rename = require('gulp-rename'),
concat = require('gulp-concat'),
imagemin = require('gulp-imagemin'),
pngquant = require('imagemin-pngquant'),
svgSprite = require('gulp-svg-sprite'),
less = require('gulp-less'),
postcss = require('gulp-postcss'),
autoprefixer = require('autoprefixer'),
pixrem = require('gulp-pixrem'),
sourcemaps = require('gulp-sourcemaps'),
minifycss = require('gulp-minify-css'),
uglify = require('gulp-uglify'),
jade = require('gulp-jade-php'),
browserSync = require('browser-sync');
const
localhostURL = 'praha.dev'
pathToTemplate = 'wp-content/themes/template/';
gulp.task('images', function () {
return gulp.src(pathToTemplate + 'src/images/*')
.pipe(imagemin({
progressive: true,
svgoPlugins: [{removeViewBox: false}],
use: [pngquant()]
}))
.pipe(gulp.dest(pathToTemplate + 'dist/images'));
});
gulp.task('svg', function () {
gulp.src(pathToTemplate + 'src/svg/**/*.svg')
.pipe(svgSprite({
mode: {
symbol: {
inline: true,
sprite: 'shapes'
}
}
}))
.pipe(gulp.dest(pathToTemplate + 'dist'))
.pipe(browserSync.stream())
})
gulp.task('styles', function () {
gulp.src([pathToTemplate + 'src/styles/styles.less'])
.pipe(plumber({
errorHandler: function (error) {
console.log(error.message);
this.emit('end');
}
}))
.pipe(less())
.pipe(sourcemaps.init())
.pipe(pixrem())
.pipe(postcss([autoprefixer({browsers: ['last 2 versions']})]))
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(rename({suffix: '.min'}))
.pipe(minifycss())
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(browserSync.stream())
});
gulp.task('scripts', function () {
return gulp.src(pathToTemplate + 'src/scripts/**/*.js')
.pipe(plumber({
errorHandler: function (error) {
console.log(error.message);
this.emit('end');
}
}))
.pipe(concat('scripts.js'))
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(rename({suffix: '.min'}))
.pipe(uglify())
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(browserSync.stream())
});
gulp.task('templates', function() {
gulp.src(pathToTemplate + 'src/templates/*.jade')
.pipe(jade())
.pipe(gulp.dest(pathToTemplate))
.pipe(browserSync.stream())
});
gulp.task('default', ['images','svg', 'styles', 'scripts','templates'] , function () {
browserSync.init({
proxy: localhostURL
});
gulp.watch(pathToTemplate + 'src/images/**', ['images']);
gulp.watch(pathToTemplate + 'src/svg/**', ['svg']);
gulp.watch(pathToTemplate + 'src/styles/**/*.less', ['styles']);
gulp.watch(pathToTemplate + 'src/scripts/**/*.js', ['scripts']);
gulp.watch(pathToTemplate + 'src/templates/**/*.jade', ['templates']);
});
|
gulpfile.js
|
const
gulp = require('gulp'),
plumber = require('gulp-plumber'),
rename = require('gulp-rename'),
concat = require('gulp-concat'),
imagemin = require('gulp-imagemin'),
pngquant = require('imagemin-pngquant'),
less = require('gulp-less'),
postcss = require('gulp-postcss'),
autoprefixer = require('autoprefixer'),
pixrem = require('gulp-pixrem'),
sourcemaps = require('gulp-sourcemaps'),
minifycss = require('gulp-minify-css'),
uglify = require('gulp-uglify'),
jade = require('gulp-jade-php'),
browserSync = require('browser-sync');
const
localhostURL = 'praha.dev'
pathToTemplate = 'wp-content/themes/template/';
gulp.task('images', function () {
return gulp.src(pathToTemplate + 'src/images/*')
.pipe(imagemin({
progressive: true,
svgoPlugins: [{removeViewBox: false}],
use: [pngquant()]
}))
.pipe(gulp.dest(pathToTemplate + 'dist/images'));
});
gulp.task('styles', function () {
gulp.src([pathToTemplate + 'src/styles/styles.less'])
.pipe(plumber({
errorHandler: function (error) {
console.log(error.message);
this.emit('end');
}
}))
.pipe(less())
.pipe(sourcemaps.init())
.pipe(pixrem())
.pipe(postcss([autoprefixer({browsers: ['last 2 versions']})]))
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(rename({suffix: '.min'}))
.pipe(minifycss())
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(browserSync.stream())
});
gulp.task('scripts', function () {
return gulp.src(pathToTemplate + 'src/scripts/**/*.js')
.pipe(plumber({
errorHandler: function (error) {
console.log(error.message);
this.emit('end');
}
}))
.pipe(concat('scripts.js'))
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(rename({suffix: '.min'}))
.pipe(uglify())
.pipe(gulp.dest(pathToTemplate + 'dist/'))
.pipe(browserSync.stream())
});
gulp.task('templates', function() {
gulp.src(pathToTemplate + 'src/templates/*.jade')
.pipe(jade())
.pipe(gulp.dest(pathToTemplate))
.pipe(browserSync.stream())
});
gulp.task('default', ['images', 'styles', 'scripts','templates'] , function () {
browserSync.init({
proxy: localhostURL
});
gulp.watch(pathToTemplate + 'src/images/**', ['images']);
gulp.watch(pathToTemplate + 'src/styles/**/*.less', ['styles']);
gulp.watch(pathToTemplate + 'src/scripts/**/*.js', ['scripts']);
gulp.watch(pathToTemplate + 'src/templates/**/*.jade', ['templates']);
});
|
add sprit support to gulp
|
gulpfile.js
|
add sprit support to gulp
|
<ide><path>ulpfile.js
<ide> concat = require('gulp-concat'),
<ide> imagemin = require('gulp-imagemin'),
<ide> pngquant = require('imagemin-pngquant'),
<add> svgSprite = require('gulp-svg-sprite'),
<ide> less = require('gulp-less'),
<ide> postcss = require('gulp-postcss'),
<ide> autoprefixer = require('autoprefixer'),
<ide> }))
<ide> .pipe(gulp.dest(pathToTemplate + 'dist/images'));
<ide> });
<add>
<add>
<add>
<add>gulp.task('svg', function () {
<add> gulp.src(pathToTemplate + 'src/svg/**/*.svg')
<add> .pipe(svgSprite({
<add> mode: {
<add> symbol: {
<add> inline: true,
<add> sprite: 'shapes'
<add> }
<add> }
<add> }))
<add> .pipe(gulp.dest(pathToTemplate + 'dist'))
<add> .pipe(browserSync.stream())
<add>})
<ide>
<ide> gulp.task('styles', function () {
<ide> gulp.src([pathToTemplate + 'src/styles/styles.less'])
<ide> .pipe(browserSync.stream())
<ide> });
<ide>
<del>gulp.task('default', ['images', 'styles', 'scripts','templates'] , function () {
<add>gulp.task('default', ['images','svg', 'styles', 'scripts','templates'] , function () {
<ide> browserSync.init({
<ide> proxy: localhostURL
<ide> });
<ide> gulp.watch(pathToTemplate + 'src/images/**', ['images']);
<add> gulp.watch(pathToTemplate + 'src/svg/**', ['svg']);
<ide> gulp.watch(pathToTemplate + 'src/styles/**/*.less', ['styles']);
<ide> gulp.watch(pathToTemplate + 'src/scripts/**/*.js', ['scripts']);
<ide> gulp.watch(pathToTemplate + 'src/templates/**/*.jade', ['templates']);
|
|
Java
|
epl-1.0
|
44e90b89548838d5fca22ea9b92b8b5d367bd165
| 0 |
gnodet/wikitext
|
/*******************************************************************************
* Copyright (c) 2004 - 2005 University Of British Columbia and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* University Of British Columbia - initial API and implementation
*******************************************************************************/
package org.eclipse.mylar.monitor.monitors;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.mylar.core.InteractionEvent;
import org.eclipse.mylar.core.MylarPlugin;
import org.eclipse.mylar.monitor.MylarMonitorPlugin;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Item;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.ToolItem;
/**
* @author Leah Findlater and Mik Kersten
*/
public class MenuCommandMonitor implements Listener {
public static final String MENU_ITEM_ID = "item.label.";
public static final String MENU_ITEM_SELECTED = "menu";
public static final String TOOLBAR_ITEM_SELECTED = "toolbar";
public static final String MENU_PATH_DELIM = "/";
public void handleEvent(Event event) {
try {
if (!(event.widget instanceof Item)) return;
Item item = (Item)event.widget;
if (item.getData() == null) return;
Object target = event.widget.getData();
String id = null;
String delta = null;
if (target instanceof IContributionItem) id = ((IContributionItem)target).getId();
if (item instanceof MenuItem) {
MenuItem menu = (MenuItem)item;
Menu parentMenu = menu.getParent();
String location = "";
if(parentMenu != null) {
while(parentMenu.getParentItem() != null) {
location = parentMenu.getParentItem().getText() + MENU_PATH_DELIM + location;
parentMenu = parentMenu.getParentMenu();
}
}
String simpleId = "";
if (id == null) id = "null";
String itemText = obfuscateValueIfContainsPath(item.getText());
id = id + "$" + MENU_ITEM_ID + simpleId + location + itemText;
delta = MENU_ITEM_SELECTED;
} else if (item instanceof ToolItem) {
ToolItem tool = (ToolItem) item;
String simpleId = "";
if (id == null) id = "null";
id = id + "$" + MENU_ITEM_ID + simpleId + '.' + tool.getToolTipText();
delta = TOOLBAR_ITEM_SELECTED;
}
InteractionEvent interactionEvent = InteractionEvent.makeCommand(id, delta);
MylarPlugin.getDefault().notifyInteractionObserved(interactionEvent);
} catch(Throwable t) {
MylarPlugin.fail(t, "Could not log selection", false);
}
}
/**
* TODO: generalize this to other resources whose names are private
*/
private String obfuscateValueIfContainsPath(String text) {
if (text.indexOf(".java") != -1 || text.indexOf(".xml") != -1) {
return MylarMonitorPlugin.OBFUSCATED_LABEL;
} else {
return text;
}
}
}
|
org.eclipse.mylyn.monitor.ui/src/org/eclipse/mylyn/monitor/monitors/MenuCommandMonitor.java
|
/*******************************************************************************
* Copyright (c) 2004 - 2005 University Of British Columbia and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* University Of British Columbia - initial API and implementation
*******************************************************************************/
package org.eclipse.mylar.monitor.monitors;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.mylar.core.InteractionEvent;
import org.eclipse.mylar.core.MylarPlugin;
import org.eclipse.mylar.monitor.MylarMonitorPlugin;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Item;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.ToolItem;
/**
* @author Leah Findlater and Mik Kersten
*/
public class MenuCommandMonitor implements Listener {
public static final String MENU_ITEM_ID = "item.label.";
public static final String MENU_ITEM_SELECTED = "menu";
public static final String TOOLBAR_ITEM_SELECTED = "toolbar";
public static final String MENU_PATH_DELIM = "/";
public void handleEvent(Event event) {
try {
if (!(event.widget instanceof Item)) return;
Item item = (Item)event.widget;
if (item.getData() == null) return;
Object target = event.widget.getData();
String id = null;
String delta = null;
if (target instanceof IContributionItem) id = ((IContributionItem)target).getId();
if (item instanceof MenuItem) {
if (id == null || id.indexOf('.') == -1) {
MenuItem menu = (MenuItem)item;
Menu parentMenu = menu.getParent();
String location = "";
if(parentMenu != null) {
while(parentMenu.getParentItem() != null) {
location = parentMenu.getParentItem().getText() + MENU_PATH_DELIM + location;
parentMenu = parentMenu.getParentMenu();
}
}
String simpleId = "";
if (id != null) simpleId = id + '.';
String itemText = obfuscateValueIfContainsPath(item.getText());
id = MENU_ITEM_ID + simpleId + location + itemText;
}
delta = MENU_ITEM_SELECTED;
} else if (item instanceof ToolItem) {
if (id == null || id.indexOf('.') == -1) {
ToolItem tool = (ToolItem) item;
String simpleId = "";
if (id != null) simpleId = id + '.';
id = MENU_ITEM_ID + simpleId + '.' + tool.getToolTipText();
}
delta = TOOLBAR_ITEM_SELECTED;
}
InteractionEvent interactionEvent = InteractionEvent.makeCommand(id, delta);
MylarPlugin.getDefault().notifyInteractionObserved(interactionEvent);
} catch(Throwable t) {
MylarPlugin.fail(t, "Could not log selection", false);
}
}
/**
* TODO: generalize this to other resources whose names are private
*/
private String obfuscateValueIfContainsPath(String text) {
if (text.indexOf(".java") != -1 || text.indexOf(".xml") != -1) {
return MylarMonitorPlugin.OBFUSCATED_LABEL;
} else {
return text;
}
}
}
|
Fixed Bugzilla Bug 112715 Increase detail of information recorded about menu commands
|
org.eclipse.mylyn.monitor.ui/src/org/eclipse/mylyn/monitor/monitors/MenuCommandMonitor.java
|
Fixed Bugzilla Bug 112715 Increase detail of information recorded about menu commands
|
<ide><path>rg.eclipse.mylyn.monitor.ui/src/org/eclipse/mylyn/monitor/monitors/MenuCommandMonitor.java
<ide> if (target instanceof IContributionItem) id = ((IContributionItem)target).getId();
<ide>
<ide> if (item instanceof MenuItem) {
<del> if (id == null || id.indexOf('.') == -1) {
<del> MenuItem menu = (MenuItem)item;
<del> Menu parentMenu = menu.getParent();
<del> String location = "";
<del> if(parentMenu != null) {
<del> while(parentMenu.getParentItem() != null) {
<del> location = parentMenu.getParentItem().getText() + MENU_PATH_DELIM + location;
<del> parentMenu = parentMenu.getParentMenu();
<del> }
<add> MenuItem menu = (MenuItem)item;
<add> Menu parentMenu = menu.getParent();
<add> String location = "";
<add> if(parentMenu != null) {
<add> while(parentMenu.getParentItem() != null) {
<add> location = parentMenu.getParentItem().getText() + MENU_PATH_DELIM + location;
<add> parentMenu = parentMenu.getParentMenu();
<ide> }
<del> String simpleId = "";
<del> if (id != null) simpleId = id + '.';
<del> String itemText = obfuscateValueIfContainsPath(item.getText());
<del> id = MENU_ITEM_ID + simpleId + location + itemText;
<ide> }
<add> String simpleId = "";
<add> if (id == null) id = "null";
<add> String itemText = obfuscateValueIfContainsPath(item.getText());
<add> id = id + "$" + MENU_ITEM_ID + simpleId + location + itemText;
<add>
<ide> delta = MENU_ITEM_SELECTED;
<ide> } else if (item instanceof ToolItem) {
<del> if (id == null || id.indexOf('.') == -1) {
<del> ToolItem tool = (ToolItem) item;
<del> String simpleId = "";
<del> if (id != null) simpleId = id + '.';
<del> id = MENU_ITEM_ID + simpleId + '.' + tool.getToolTipText();
<del> }
<add> ToolItem tool = (ToolItem) item;
<add> String simpleId = "";
<add> if (id == null) id = "null";
<add> id = id + "$" + MENU_ITEM_ID + simpleId + '.' + tool.getToolTipText();
<add>
<ide> delta = TOOLBAR_ITEM_SELECTED;
<ide> }
<ide> InteractionEvent interactionEvent = InteractionEvent.makeCommand(id, delta);
|
|
Java
|
bsd-3-clause
|
d61012b4da31595699c565f29657ab426edc1ed7
| 0 |
exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent
|
// Copyright 2015-present 650 Industries. All rights reserved.
package host.exp.exponent;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import androidx.annotation.Nullable;
import expo.modules.updates.UpdatesConfiguration;
import expo.modules.updates.UpdatesUtils;
import expo.modules.updates.db.DatabaseHolder;
import expo.modules.updates.db.entity.UpdateEntity;
import expo.modules.updates.launcher.Launcher;
import expo.modules.updates.launcher.NoDatabaseLauncher;
import expo.modules.updates.launcher.SelectionPolicy;
import expo.modules.updates.launcher.SelectionPolicyNewest;
import expo.modules.updates.loader.EmbeddedLoader;
import expo.modules.updates.loader.LoaderTask;
import expo.modules.updates.manifest.Manifest;
import host.exp.exponent.di.NativeModuleDepsProvider;
import host.exp.exponent.exceptions.ManifestException;
import host.exp.exponent.kernel.ExpoViewKernel;
import host.exp.exponent.kernel.Kernel;
import host.exp.exponent.kernel.KernelConfig;
import host.exp.exponent.storage.ExponentSharedPreferences;
public class ExpoUpdatesAppLoader {
@Inject
ExponentManifest mExponentManifest;
@Inject
ExponentSharedPreferences mExponentSharedPreferences;
@Inject
DatabaseHolder mDatabaseHolder;
@Inject
Kernel mKernel;
private static final String TAG = ExpoUpdatesAppLoader.class.getSimpleName();
public static final String UPDATES_EVENT_NAME = "Expo.nativeUpdatesEvent";
public static final String UPDATE_AVAILABLE_EVENT = "updateAvailable";
public static final String UPDATE_NO_UPDATE_AVAILABLE_EVENT = "noUpdateAvailable";
public static final String UPDATE_ERROR_EVENT = "error";
public enum AppLoaderStatus {
CHECKING_FOR_UPDATE, DOWNLOADING_NEW_UPDATE
}
private String mManifestUrl;
private AppLoaderCallback mCallback;
private final boolean mUseCacheOnly;
private UpdatesConfiguration mUpdatesConfiguration;
private File mUpdatesDirectory;
private SelectionPolicy mSelectionPolicy;
private Launcher mLauncher;
private boolean mIsEmergencyLaunch = false;
private boolean mIsUpToDate = true;
private AppLoaderStatus mStatus;
private boolean mShouldShowAppLoaderStatus = true;
private boolean isStarted = false;
public interface AppLoaderCallback {
void onOptimisticManifest(JSONObject optimisticManifest);
void onManifestCompleted(JSONObject manifest);
void onBundleCompleted(String localBundlePath);
void emitEvent(JSONObject params);
void updateStatus(AppLoaderStatus status);
void onError(Exception e);
}
public ExpoUpdatesAppLoader(String manifestUrl, AppLoaderCallback callback) {
this(manifestUrl, callback, false);
}
public ExpoUpdatesAppLoader(String manifestUrl, AppLoaderCallback callback, boolean useCacheOnly) {
NativeModuleDepsProvider.getInstance().inject(ExpoUpdatesAppLoader.class, this);
mManifestUrl = manifestUrl;
mCallback = callback;
mUseCacheOnly = useCacheOnly;
}
public UpdatesConfiguration getUpdatesConfiguration() {
if (mUpdatesConfiguration == null) {
throw new IllegalStateException("Tried to access UpdatesConfiguration before it was set");
}
return mUpdatesConfiguration;
}
public File getUpdatesDirectory() {
if (mUpdatesDirectory == null) {
throw new IllegalStateException("Tried to access UpdatesDirectory before it was set");
}
return mUpdatesDirectory;
}
public SelectionPolicy getSelectionPolicy() {
if (mSelectionPolicy == null) {
throw new IllegalStateException("Tried to access SelectionPolicy before it was set");
}
return mSelectionPolicy;
}
public Launcher getLauncher() {
if (mLauncher == null) {
throw new IllegalStateException("Tried to access Launcher before it was set");
}
return mLauncher;
}
public boolean isEmergencyLaunch() {
return mIsEmergencyLaunch;
}
public boolean isUpToDate() {
return mIsUpToDate;
}
public AppLoaderStatus getStatus() {
return mStatus;
}
public boolean shouldShowAppLoaderStatus() {
return mShouldShowAppLoaderStatus;
}
private void updateStatus(AppLoaderStatus status) {
mStatus = status;
mCallback.updateStatus(status);
}
public void start(Context context) {
if (isStarted) {
throw new IllegalStateException("AppLoader for " + mManifestUrl + " was started twice. AppLoader.start() may only be called once per instance.");
}
isStarted = true;
mStatus = AppLoaderStatus.CHECKING_FOR_UPDATE;
mKernel.addAppLoaderForManifestUrl(mManifestUrl, this);
Uri httpManifestUrl = mExponentManifest.httpManifestUrl(mManifestUrl);
HashMap<String, Object> configMap = new HashMap<>();
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_UPDATE_URL_KEY, httpManifestUrl);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_SCOPE_KEY_KEY, httpManifestUrl.toString());
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_SDK_VERSION_KEY, Constants.SDK_VERSIONS);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_RELEASE_CHANNEL_KEY, Constants.RELEASE_CHANNEL);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_HAS_EMBEDDED_UPDATE, Constants.isStandaloneApp());
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_ENABLED_KEY, Constants.ARE_REMOTE_UPDATES_ENABLED);
if (mUseCacheOnly) {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, "NEVER");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, 0);
} else {
if (Constants.isStandaloneApp()) {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, Constants.UPDATES_CHECK_AUTOMATICALLY ? "ALWAYS" : "NEVER");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, Constants.UPDATES_FALLBACK_TO_CACHE_TIMEOUT);
} else {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, "ALWAYS");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, 60000);
}
}
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_REQUEST_HEADERS_KEY, getRequestHeaders());
UpdatesConfiguration configuration = new UpdatesConfiguration();
configuration.loadValuesFromMap(configMap);
List<String> sdkVersionsList = new ArrayList<>(Constants.SDK_VERSIONS_LIST);
sdkVersionsList.add(RNObject.UNVERSIONED);
SelectionPolicy selectionPolicy = new SelectionPolicyNewest(sdkVersionsList);
File directory;
try {
directory = UpdatesUtils.getOrCreateUpdatesDirectory(context);
} catch (Exception e) {
mCallback.onError(e);
return;
}
startLoaderTask(configuration, directory, selectionPolicy, context);
}
private void startLoaderTask(final UpdatesConfiguration configuration, final File directory, final SelectionPolicy selectionPolicy, final Context context) {
mUpdatesConfiguration = configuration;
mUpdatesDirectory = directory;
mSelectionPolicy = selectionPolicy;
if (!configuration.isEnabled()) {
launchWithNoDatabase(context, null);
return;
}
new LoaderTask(configuration, mDatabaseHolder, directory, selectionPolicy, new LoaderTask.LoaderTaskCallback() {
private boolean didAbort = false;
@Override
public void onFailure(Exception e) {
if (Constants.isStandaloneApp()) {
mIsEmergencyLaunch = true;
launchWithNoDatabase(context, e);
} else {
if (didAbort) {
return;
}
Exception exception = e;
try {
JSONObject errorJson = new JSONObject(e.getMessage());
exception = new ManifestException(e, mManifestUrl, errorJson);
} catch (Exception ex) {
// do nothing, expected if the error payload does not come from a conformant server
}
mCallback.onError(exception);
}
}
@Override
public boolean onCachedUpdateLoaded(UpdateEntity update) {
setShouldShowAppLoaderStatus(update.metadata);
if (isUsingDeveloperTool(update.metadata)) {
return false;
} else {
try {
String experienceId = update.metadata.getString(ExponentManifest.MANIFEST_ID_KEY);
// if previous run of this app failed due to a loading error, we want to make sure to check for remote updates
JSONObject experienceMetadata = mExponentSharedPreferences.getExperienceMetadata(experienceId);
if (experienceMetadata != null && experienceMetadata.optBoolean(ExponentSharedPreferences.EXPERIENCE_METADATA_LOADING_ERROR)) {
return false;
}
} catch (Exception e) {
return true;
}
}
return true;
}
@Override
public void onRemoteManifestLoaded(Manifest manifest) {
// expo-cli does not always respect our SDK version headers and respond with a compatible update or an error
// so we need to check the compatibility here
if (!isValidSdkVersion(manifest.getRawManifestJson().optString("sdkVersion"))) {
mCallback.onError(formatExceptionForIncompatibleSdk(manifest.getRawManifestJson().optString("sdkVersion", "null")));
didAbort = true;
return;
}
setShouldShowAppLoaderStatus(manifest.getRawManifestJson());
mCallback.onOptimisticManifest(manifest.getRawManifestJson());
updateStatus(AppLoaderStatus.DOWNLOADING_NEW_UPDATE);
}
@Override
public void onSuccess(Launcher launcher, boolean isUpToDate) {
if (didAbort) {
return;
}
mLauncher = launcher;
mIsUpToDate = isUpToDate;
try {
JSONObject manifest = processManifest(launcher.getLaunchedUpdate().metadata);
mCallback.onManifestCompleted(manifest);
// ReactAndroid will load the bundle on its own in development mode
if (!ExponentManifest.isDebugModeEnabled(manifest)) {
mCallback.onBundleCompleted(launcher.getLaunchAssetFile());
}
} catch (Exception e) {
mCallback.onError(e);
}
}
@Override
public void onBackgroundUpdateFinished(LoaderTask.BackgroundUpdateStatus status, @Nullable UpdateEntity update, @Nullable Exception exception) {
if (didAbort) {
return;
}
try {
JSONObject jsonParams = new JSONObject();
if (status == LoaderTask.BackgroundUpdateStatus.ERROR) {
if (exception == null) {
throw new AssertionError("Background update with error status must have a nonnull exception object");
}
jsonParams.put("type", UPDATE_ERROR_EVENT);
jsonParams.put("message", exception.getMessage());
} else if (status == LoaderTask.BackgroundUpdateStatus.UPDATE_AVAILABLE) {
if (update == null) {
throw new AssertionError("Background update with error status must have a nonnull update object");
}
jsonParams.put("type", UPDATE_AVAILABLE_EVENT);
jsonParams.put("manifestString", update.metadata.toString());
} else if (status == LoaderTask.BackgroundUpdateStatus.NO_UPDATE_AVAILABLE) {
jsonParams.put("type", UPDATE_NO_UPDATE_AVAILABLE_EVENT);
}
mCallback.emitEvent(jsonParams);
} catch (Exception e) {
Log.e(TAG, "Failed to emit event to JS", e);
}
}
}).start(context);
}
private void launchWithNoDatabase(Context context, Exception e) {
mLauncher = new NoDatabaseLauncher(context, mUpdatesConfiguration, e);
JSONObject manifest = EmbeddedLoader.readEmbeddedManifest(context, mUpdatesConfiguration).getRawManifestJson();
try {
manifest = processManifest(manifest);
} catch (Exception ex) {
Log.e(TAG, "Failed to process manifest; attempting to launch with raw manifest. This may cause errors or unexpected behavior.", e);
}
mCallback.onManifestCompleted(manifest);
String launchAssetFile = mLauncher.getLaunchAssetFile();
if (launchAssetFile == null) {
// ReactInstanceManagerBuilder accepts embedded assets as strings with "assets://" prefixed
launchAssetFile = "assets://" + mLauncher.getBundleAssetName();
}
mCallback.onBundleCompleted(launchAssetFile);
}
private JSONObject processManifest(JSONObject manifest) throws JSONException {
Uri parsedManifestUrl = Uri.parse(mManifestUrl);
if (!manifest.has(ExponentManifest.MANIFEST_IS_VERIFIED_KEY) &&
isThirdPartyHosted(parsedManifestUrl) &&
!Constants.isStandaloneApp()) {
// Sandbox third party apps and consider them verified
// for https urls, sandboxed id is of form quinlanj.github.io/myProj-myApp
// for http urls, sandboxed id is of form UNVERIFIED-quinlanj.github.io/myProj-myApp
String protocol = parsedManifestUrl.getScheme();
String securityPrefix = protocol.equals("https") || protocol.equals("exps") ? "" : "UNVERIFIED-";
String path = parsedManifestUrl.getPath() != null ? parsedManifestUrl.getPath() : "";
String slug = manifest.has(ExponentManifest.MANIFEST_SLUG) ? manifest.getString(ExponentManifest.MANIFEST_SLUG) : "";
String sandboxedId = securityPrefix + parsedManifestUrl.getHost() + path + "-" + slug;
manifest.put(ExponentManifest.MANIFEST_ID_KEY, sandboxedId);
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
}
if (Constants.isStandaloneApp()) {
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
}
if (!manifest.has(ExponentManifest.MANIFEST_IS_VERIFIED_KEY)) {
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, false);
}
if (!manifest.optBoolean(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, false) &&
mExponentManifest.isAnonymousExperience(manifest)) {
// automatically verified
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
}
return manifest;
}
private boolean isThirdPartyHosted(Uri uri) {
String host = uri.getHost();
return !(host.equals("exp.host") || host.equals("expo.io") || host.equals("exp.direct") || host.equals("expo.test") ||
host.endsWith(".exp.host") || host.endsWith(".expo.io") || host.endsWith(".exp.direct") || host.endsWith(".expo.test"));
}
private boolean isUsingDeveloperTool(JSONObject manifest) {
try {
return manifest.has(ExponentManifest.MANIFEST_DEVELOPER_KEY) &&
manifest.getJSONObject(ExponentManifest.MANIFEST_DEVELOPER_KEY).has(ExponentManifest.MANIFEST_DEVELOPER_TOOL_KEY);
} catch (JSONException e) {
return false;
}
}
private void setShouldShowAppLoaderStatus(JSONObject manifest) {
// we don't want to show the cached experience alert when Updates.reloadAsync() is called
if (mUseCacheOnly) {
mShouldShowAppLoaderStatus = false;
return;
}
try {
mShouldShowAppLoaderStatus = !(manifest.has(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_KEY) &&
manifest.getJSONObject(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_KEY)
.optBoolean(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_SILENT_LAUNCH_KEY, false));
if (mShouldShowAppLoaderStatus) {
// we want to avoid showing the status for older snack SDK versions, too
// we make our best guess based on the manifest fields
// TODO: remove this after SDK 38 is phased out
if (manifest.has(ExponentManifest.MANIFEST_SDK_VERSION_KEY) &&
ABIVersion.toNumber("39.0.0") > ABIVersion.toNumber(manifest.getString(ExponentManifest.MANIFEST_SDK_VERSION_KEY)) &&
"snack".equals(manifest.optString(ExponentManifest.MANIFEST_SLUG)) &&
manifest.optString(ExponentManifest.MANIFEST_BUNDLE_URL_KEY, "").startsWith("https://d1wp6m56sqw74a.cloudfront.net/%40exponent%2Fsnack")
) {
mShouldShowAppLoaderStatus = false;
}
}
} catch (JSONException e) {
mShouldShowAppLoaderStatus = true;
}
}
private Map<String, String> getRequestHeaders() {
HashMap<String, String> headers = new HashMap<>();
headers.put("Expo-Updates-Environment", getClientEnvironment());
headers.put("Expo-Client-Environment", getClientEnvironment());
if (ExpoViewKernel.getInstance().getVersionName() != null) {
headers.put("Exponent-Version", ExpoViewKernel.getInstance().getVersionName());
}
String sessionSecret = mExponentSharedPreferences.getSessionSecret();
if (sessionSecret != null) {
headers.put("Expo-Session", sessionSecret);
}
// XDL expects the full "exponent-" header names
headers.put("Exponent-Accept-Signature", "true");
headers.put("Exponent-Platform", "android");
if (KernelConfig.FORCE_UNVERSIONED_PUBLISHED_EXPERIENCES) {
headers.put("Exponent-SDK-Version", "UNVERSIONED");
} else {
headers.put("Exponent-SDK-Version", Constants.SDK_VERSIONS);
}
return headers;
}
private String getClientEnvironment() {
if (Constants.isStandaloneApp()) {
return "STANDALONE";
} else if (Build.FINGERPRINT.contains("vbox") || Build.FINGERPRINT.contains("generic")) {
return "EXPO_SIMULATOR";
} else {
return "EXPO_DEVICE";
}
}
private boolean isValidSdkVersion(String sdkVersion) {
if (RNObject.UNVERSIONED.equals(sdkVersion)) {
return true;
}
for (final String version : Constants.SDK_VERSIONS_LIST) {
if (version.equals(sdkVersion)) {
return true;
}
}
return false;
}
private ManifestException formatExceptionForIncompatibleSdk(String sdkVersion) {
JSONObject errorJson = new JSONObject();
try {
errorJson.put("errorCode", "EXPERIENCE_SDK_VERSION_OUTDATED");
errorJson.put("message", "Invalid SDK version");
errorJson.put("metadata", new JSONObject().put(
"availableSDKVersions",
new JSONArray().put(sdkVersion))
);
} catch (Exception e) {
Log.e(TAG, "Failed to format error message for incompatible SDK version", e);
}
return new ManifestException(new Exception("Incompatible SDK version"), mManifestUrl, errorJson);
}
}
|
android/expoview/src/main/java/host/exp/exponent/ExpoUpdatesAppLoader.java
|
// Copyright 2015-present 650 Industries. All rights reserved.
package host.exp.exponent;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import androidx.annotation.Nullable;
import expo.modules.updates.UpdatesConfiguration;
import expo.modules.updates.UpdatesUtils;
import expo.modules.updates.db.DatabaseHolder;
import expo.modules.updates.db.entity.UpdateEntity;
import expo.modules.updates.launcher.Launcher;
import expo.modules.updates.launcher.NoDatabaseLauncher;
import expo.modules.updates.launcher.SelectionPolicy;
import expo.modules.updates.launcher.SelectionPolicyNewest;
import expo.modules.updates.loader.EmbeddedLoader;
import expo.modules.updates.loader.LoaderTask;
import expo.modules.updates.manifest.Manifest;
import host.exp.exponent.di.NativeModuleDepsProvider;
import host.exp.exponent.exceptions.ManifestException;
import host.exp.exponent.kernel.ExpoViewKernel;
import host.exp.exponent.kernel.Kernel;
import host.exp.exponent.kernel.KernelConfig;
import host.exp.exponent.storage.ExponentSharedPreferences;
public class ExpoUpdatesAppLoader {
@Inject
ExponentManifest mExponentManifest;
@Inject
ExponentSharedPreferences mExponentSharedPreferences;
@Inject
DatabaseHolder mDatabaseHolder;
@Inject
Kernel mKernel;
private static final String TAG = ExpoUpdatesAppLoader.class.getSimpleName();
public static final String UPDATES_EVENT_NAME = "Expo.nativeUpdatesEvent";
public static final String UPDATE_AVAILABLE_EVENT = "updateAvailable";
public static final String UPDATE_NO_UPDATE_AVAILABLE_EVENT = "noUpdateAvailable";
public static final String UPDATE_ERROR_EVENT = "error";
public enum AppLoaderStatus {
CHECKING_FOR_UPDATE, DOWNLOADING_NEW_UPDATE
}
private String mManifestUrl;
private AppLoaderCallback mCallback;
private final boolean mUseCacheOnly;
private UpdatesConfiguration mUpdatesConfiguration;
private File mUpdatesDirectory;
private SelectionPolicy mSelectionPolicy;
private Launcher mLauncher;
private boolean mIsEmergencyLaunch = false;
private boolean mIsUpToDate = true;
private AppLoaderStatus mStatus;
private boolean mShouldShowAppLoaderStatus = true;
private boolean isStarted = false;
public interface AppLoaderCallback {
void onOptimisticManifest(JSONObject optimisticManifest);
void onManifestCompleted(JSONObject manifest);
void onBundleCompleted(String localBundlePath);
void emitEvent(JSONObject params);
void updateStatus(AppLoaderStatus status);
void onError(Exception e);
}
public ExpoUpdatesAppLoader(String manifestUrl, AppLoaderCallback callback) {
this(manifestUrl, callback, false);
}
public ExpoUpdatesAppLoader(String manifestUrl, AppLoaderCallback callback, boolean useCacheOnly) {
NativeModuleDepsProvider.getInstance().inject(ExpoUpdatesAppLoader.class, this);
mManifestUrl = manifestUrl;
mCallback = callback;
mUseCacheOnly = useCacheOnly;
}
public UpdatesConfiguration getUpdatesConfiguration() {
if (mUpdatesConfiguration == null) {
throw new IllegalStateException("Tried to access UpdatesConfiguration before it was set");
}
return mUpdatesConfiguration;
}
public File getUpdatesDirectory() {
if (mUpdatesDirectory == null) {
throw new IllegalStateException("Tried to access UpdatesDirectory before it was set");
}
return mUpdatesDirectory;
}
public SelectionPolicy getSelectionPolicy() {
if (mSelectionPolicy == null) {
throw new IllegalStateException("Tried to access SelectionPolicy before it was set");
}
return mSelectionPolicy;
}
public Launcher getLauncher() {
if (mLauncher == null) {
throw new IllegalStateException("Tried to access Launcher before it was set");
}
return mLauncher;
}
public boolean isEmergencyLaunch() {
return mIsEmergencyLaunch;
}
public boolean isUpToDate() {
return mIsUpToDate;
}
public AppLoaderStatus getStatus() {
return mStatus;
}
public boolean shouldShowAppLoaderStatus() {
return mShouldShowAppLoaderStatus;
}
private void updateStatus(AppLoaderStatus status) {
mStatus = status;
mCallback.updateStatus(status);
}
public void start(Context context) {
if (isStarted) {
throw new IllegalStateException("AppLoader for " + mManifestUrl + " was started twice. AppLoader.start() may only be called once per instance.");
}
isStarted = true;
mStatus = AppLoaderStatus.CHECKING_FOR_UPDATE;
mKernel.addAppLoaderForManifestUrl(mManifestUrl, this);
Uri httpManifestUrl = mExponentManifest.httpManifestUrl(mManifestUrl);
HashMap<String, Object> configMap = new HashMap<>();
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_UPDATE_URL_KEY, httpManifestUrl);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_SCOPE_KEY_KEY, httpManifestUrl.toString());
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_SDK_VERSION_KEY, Constants.SDK_VERSIONS);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_RELEASE_CHANNEL_KEY, Constants.RELEASE_CHANNEL);
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_HAS_EMBEDDED_UPDATE, Constants.isStandaloneApp());
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_ENABLED_KEY, Constants.ARE_REMOTE_UPDATES_ENABLED);
if (mUseCacheOnly) {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, "NEVER");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, 0);
} else {
if (Constants.isStandaloneApp()) {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, Constants.UPDATES_CHECK_AUTOMATICALLY ? "ALWAYS" : "NEVER");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, Constants.UPDATES_FALLBACK_TO_CACHE_TIMEOUT);
} else {
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_CHECK_ON_LAUNCH_KEY, "ALWAYS");
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_LAUNCH_WAIT_MS_KEY, 60000);
}
}
configMap.put(UpdatesConfiguration.UPDATES_CONFIGURATION_REQUEST_HEADERS_KEY, getRequestHeaders());
UpdatesConfiguration configuration = new UpdatesConfiguration();
configuration.loadValuesFromMap(configMap);
List<String> sdkVersionsList = new ArrayList<>(Constants.SDK_VERSIONS_LIST);
sdkVersionsList.add(RNObject.UNVERSIONED);
SelectionPolicy selectionPolicy = new SelectionPolicyNewest(sdkVersionsList);
File directory;
try {
directory = UpdatesUtils.getOrCreateUpdatesDirectory(context);
} catch (Exception e) {
mCallback.onError(e);
return;
}
startLoaderTask(configuration, directory, selectionPolicy, context);
}
private void startLoaderTask(final UpdatesConfiguration configuration, final File directory, final SelectionPolicy selectionPolicy, final Context context) {
mUpdatesConfiguration = configuration;
mUpdatesDirectory = directory;
mSelectionPolicy = selectionPolicy;
if (!configuration.isEnabled()) {
launchWithNoDatabase(context, null);
return;
}
new LoaderTask(configuration, mDatabaseHolder, directory, selectionPolicy, new LoaderTask.LoaderTaskCallback() {
private boolean didAbort = false;
@Override
public void onFailure(Exception e) {
if (Constants.isStandaloneApp()) {
mIsEmergencyLaunch = true;
launchWithNoDatabase(context, e);
} else {
if (didAbort) {
return;
}
Exception exception = e;
try {
JSONObject errorJson = new JSONObject(e.getMessage());
exception = new ManifestException(e, mManifestUrl, errorJson);
} catch (Exception ex) {
// do nothing, expected if the error payload does not come from a conformant server
}
mCallback.onError(exception);
}
}
@Override
public boolean onCachedUpdateLoaded(UpdateEntity update) {
setShouldShowAppLoaderStatus(update.metadata);
if (isUsingDeveloperTool(update.metadata)) {
return false;
} else {
try {
String experienceId = update.metadata.getString(ExponentManifest.MANIFEST_ID_KEY);
// if previous run of this app failed due to a loading error, we want to make sure to check for remote updates
JSONObject experienceMetadata = mExponentSharedPreferences.getExperienceMetadata(experienceId);
if (experienceMetadata != null && experienceMetadata.optBoolean(ExponentSharedPreferences.EXPERIENCE_METADATA_LOADING_ERROR)) {
return false;
}
} catch (Exception e) {
return true;
}
}
return true;
}
@Override
public void onRemoteManifestLoaded(Manifest manifest) {
// expo-cli does not always respect our SDK version headers and respond with a compatible update or an error
// so we need to check the compatibility here
if (!isValidSdkVersion(manifest.getRawManifestJson().optString("sdkVersion"))) {
mCallback.onError(formatExceptionForIncompatibleSdk(manifest.getRawManifestJson().optString("sdkVersion", "null")));
didAbort = true;
return;
}
setShouldShowAppLoaderStatus(manifest.getRawManifestJson());
mCallback.onOptimisticManifest(manifest.getRawManifestJson());
updateStatus(AppLoaderStatus.DOWNLOADING_NEW_UPDATE);
}
@Override
public void onSuccess(Launcher launcher, boolean isUpToDate) {
if (didAbort) {
return;
}
mLauncher = launcher;
mIsUpToDate = isUpToDate;
try {
JSONObject manifest = processManifest(launcher.getLaunchedUpdate().metadata);
mCallback.onManifestCompleted(manifest);
// ReactAndroid will load the bundle on its own in development mode
if (!ExponentManifest.isDebugModeEnabled(manifest)) {
mCallback.onBundleCompleted(launcher.getLaunchAssetFile());
}
} catch (Exception e) {
mCallback.onError(e);
}
}
@Override
public void onBackgroundUpdateFinished(LoaderTask.BackgroundUpdateStatus status, @Nullable UpdateEntity update, @Nullable Exception exception) {
if (didAbort) {
return;
}
try {
JSONObject jsonParams = new JSONObject();
if (status == LoaderTask.BackgroundUpdateStatus.ERROR) {
if (exception == null) {
throw new AssertionError("Background update with error status must have a nonnull exception object");
}
jsonParams.put("type", UPDATE_ERROR_EVENT);
jsonParams.put("message", exception.getMessage());
} else if (status == LoaderTask.BackgroundUpdateStatus.UPDATE_AVAILABLE) {
if (update == null) {
throw new AssertionError("Background update with error status must have a nonnull update object");
}
jsonParams.put("type", UPDATE_AVAILABLE_EVENT);
jsonParams.put("manifestString", update.metadata.toString());
} else if (status == LoaderTask.BackgroundUpdateStatus.NO_UPDATE_AVAILABLE) {
jsonParams.put("type", UPDATE_NO_UPDATE_AVAILABLE_EVENT);
}
mCallback.emitEvent(jsonParams);
} catch (Exception e) {
Log.e(TAG, "Failed to emit event to JS", e);
}
}
}).start(context);
}
private void launchWithNoDatabase(Context context, Exception e) {
mLauncher = new NoDatabaseLauncher(context, mUpdatesConfiguration, e);
JSONObject manifest = EmbeddedLoader.readEmbeddedManifest(context, mUpdatesConfiguration).getRawManifestJson();
try {
manifest = processManifest(manifest);
} catch (Exception ex) {
Log.e(TAG, "Failed to process manifest; attempting to launch with raw manifest. This may cause errors or unexpected behavior.", e);
}
mCallback.onManifestCompleted(manifest);
String launchAssetFile = mLauncher.getLaunchAssetFile();
if (launchAssetFile == null) {
// ReactInstanceManagerBuilder accepts embedded assets as strings with "assets://" prefixed
launchAssetFile = "assets://" + mLauncher.getBundleAssetName();
}
mCallback.onBundleCompleted(launchAssetFile);
}
private JSONObject processManifest(JSONObject manifest) throws JSONException {
Uri parsedManifestUrl = Uri.parse(mManifestUrl);
if (!manifest.has(ExponentManifest.MANIFEST_IS_VERIFIED_KEY) &&
isThirdPartyHosted(parsedManifestUrl) &&
!Constants.isStandaloneApp()) {
// Sandbox third party apps and consider them verified
// for https urls, sandboxed id is of form quinlanj.github.io/myProj-myApp
// for http urls, sandboxed id is of form UNVERIFIED-quinlanj.github.io/myProj-myApp
String protocol = parsedManifestUrl.getScheme();
String securityPrefix = protocol.equals("https") || protocol.equals("exps") ? "" : "UNVERIFIED-";
String path = parsedManifestUrl.getPath() != null ? parsedManifestUrl.getPath() : "";
String slug = manifest.has(ExponentManifest.MANIFEST_SLUG) ? manifest.getString(ExponentManifest.MANIFEST_SLUG) : "";
String sandboxedId = securityPrefix + parsedManifestUrl.getHost() + path + "-" + slug;
manifest.put(ExponentManifest.MANIFEST_ID_KEY, sandboxedId);
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
}
if (!manifest.has(ExponentManifest.MANIFEST_IS_VERIFIED_KEY)) {
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, false);
}
if (!manifest.optBoolean(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, false) &&
mExponentManifest.isAnonymousExperience(manifest)) {
// automatically verified
manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
}
return manifest;
}
private boolean isThirdPartyHosted(Uri uri) {
String host = uri.getHost();
return !(host.equals("exp.host") || host.equals("expo.io") || host.equals("exp.direct") || host.equals("expo.test") ||
host.endsWith(".exp.host") || host.endsWith(".expo.io") || host.endsWith(".exp.direct") || host.endsWith(".expo.test"));
}
private boolean isUsingDeveloperTool(JSONObject manifest) {
try {
return manifest.has(ExponentManifest.MANIFEST_DEVELOPER_KEY) &&
manifest.getJSONObject(ExponentManifest.MANIFEST_DEVELOPER_KEY).has(ExponentManifest.MANIFEST_DEVELOPER_TOOL_KEY);
} catch (JSONException e) {
return false;
}
}
private void setShouldShowAppLoaderStatus(JSONObject manifest) {
// we don't want to show the cached experience alert when Updates.reloadAsync() is called
if (mUseCacheOnly) {
mShouldShowAppLoaderStatus = false;
return;
}
try {
mShouldShowAppLoaderStatus = !(manifest.has(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_KEY) &&
manifest.getJSONObject(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_KEY)
.optBoolean(ExponentManifest.MANIFEST_DEVELOPMENT_CLIENT_SILENT_LAUNCH_KEY, false));
if (mShouldShowAppLoaderStatus) {
// we want to avoid showing the status for older snack SDK versions, too
// we make our best guess based on the manifest fields
// TODO: remove this after SDK 38 is phased out
if (manifest.has(ExponentManifest.MANIFEST_SDK_VERSION_KEY) &&
ABIVersion.toNumber("39.0.0") > ABIVersion.toNumber(manifest.getString(ExponentManifest.MANIFEST_SDK_VERSION_KEY)) &&
"snack".equals(manifest.optString(ExponentManifest.MANIFEST_SLUG)) &&
manifest.optString(ExponentManifest.MANIFEST_BUNDLE_URL_KEY, "").startsWith("https://d1wp6m56sqw74a.cloudfront.net/%40exponent%2Fsnack")
) {
mShouldShowAppLoaderStatus = false;
}
}
} catch (JSONException e) {
mShouldShowAppLoaderStatus = true;
}
}
private Map<String, String> getRequestHeaders() {
HashMap<String, String> headers = new HashMap<>();
headers.put("Expo-Updates-Environment", getClientEnvironment());
headers.put("Expo-Client-Environment", getClientEnvironment());
if (ExpoViewKernel.getInstance().getVersionName() != null) {
headers.put("Exponent-Version", ExpoViewKernel.getInstance().getVersionName());
}
String sessionSecret = mExponentSharedPreferences.getSessionSecret();
if (sessionSecret != null) {
headers.put("Expo-Session", sessionSecret);
}
// XDL expects the full "exponent-" header names
headers.put("Exponent-Accept-Signature", "true");
headers.put("Exponent-Platform", "android");
if (KernelConfig.FORCE_UNVERSIONED_PUBLISHED_EXPERIENCES) {
headers.put("Exponent-SDK-Version", "UNVERSIONED");
} else {
headers.put("Exponent-SDK-Version", Constants.SDK_VERSIONS);
}
return headers;
}
private String getClientEnvironment() {
if (Constants.isStandaloneApp()) {
return "STANDALONE";
} else if (Build.FINGERPRINT.contains("vbox") || Build.FINGERPRINT.contains("generic")) {
return "EXPO_SIMULATOR";
} else {
return "EXPO_DEVICE";
}
}
private boolean isValidSdkVersion(String sdkVersion) {
if (RNObject.UNVERSIONED.equals(sdkVersion)) {
return true;
}
for (final String version : Constants.SDK_VERSIONS_LIST) {
if (version.equals(sdkVersion)) {
return true;
}
}
return false;
}
private ManifestException formatExceptionForIncompatibleSdk(String sdkVersion) {
JSONObject errorJson = new JSONObject();
try {
errorJson.put("errorCode", "EXPERIENCE_SDK_VERSION_OUTDATED");
errorJson.put("message", "Invalid SDK version");
errorJson.put("metadata", new JSONObject().put(
"availableSDKVersions",
new JSONArray().put(sdkVersion))
);
} catch (Exception e) {
Log.e(TAG, "Failed to format error message for incompatible SDK version", e);
}
return new ManifestException(new Exception("Incompatible SDK version"), mManifestUrl, errorJson);
}
}
|
[android] manifests in standalone apps are always verified (#10784)
|
android/expoview/src/main/java/host/exp/exponent/ExpoUpdatesAppLoader.java
|
[android] manifests in standalone apps are always verified (#10784)
|
<ide><path>ndroid/expoview/src/main/java/host/exp/exponent/ExpoUpdatesAppLoader.java
<ide> manifest.put(ExponentManifest.MANIFEST_ID_KEY, sandboxedId);
<ide> manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
<ide> }
<add> if (Constants.isStandaloneApp()) {
<add> manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, true);
<add> }
<ide> if (!manifest.has(ExponentManifest.MANIFEST_IS_VERIFIED_KEY)) {
<ide> manifest.put(ExponentManifest.MANIFEST_IS_VERIFIED_KEY, false);
<ide> }
|
|
JavaScript
|
apache-2.0
|
4ad4aec9cd5590e9a53a9e529f05a89eb83a760e
| 0 |
phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida
|
$(document).ready(function() {
//Patterns for restrictions
var ucase = new RegExp("^.*[A-Z].*$");
var lcase = new RegExp("^.*[a-z].*$");
var num = new RegExp("^.*[0-9].*$");
$("input[type=password]").keyup(function() {
var visible = 0;
var MIN_LENGTH = 6;
var NUM_RULES = 4;
var passwordField = $("#password")
// Password must be 6 characters or longer
if(passwordField.val().length >= MIN_LENGTH) {
$('#password-minlength').hide();
visible++;
}else{
$('#password-minlength').show();
visible--;
}
// Password must contain an uppercase letter
if(ucase.test(passwordField.val())) {
$('#password-uppercase').hide();
visible++;
}else{
$('#password-uppercase').show();
visible--;
}
// Password must contain a lowercase letter
if(lcase.test(passwordField.val())) {
$('#password-lowercase').hide();
visible++;
}else{
$('#password-lowercase').show();
visible--;
}
//Password must contain a number
if(num.test(passwordField.val())) {
$('#password-number').hide();
visible++;
}else{
$('#password-number').show();
visible--;
}
// Once all are met, hide the alert
if (visible === NUM_RULES) {
$('#passwordRequirements').hide();
}else{
$('#passwordRequirements').show();
}
});
});
|
src/main/webapp/resources/js/pages/users/users-password.js
|
$(document).ready(function() {
$("input[type=password]").keyup(function() {
var ucase = new RegExp("^.*[A-Z].*$");
var lcase = new RegExp("^.*[a-z].*$");
var num = new RegExp("^.*[0-9].*$");
var visible = 0;
var MIN_LENGTH = 6;
var NUM_RULES = 4;
// Password must be 6 characters or longer
if($("#password").val().length >= MIN_LENGTH) {
$('#password-minlength').hide();
visible++;
}else{
$('#password-minlength').show();
visible--;
}
// Password must contain an uppercase letter
if(ucase.test($("#password").val())) {
$('#password-uppercase').hide();
visible++;
}else{
$('#password-uppercase').show();
visible--;
}
// Password must contain a lowercase letter
if(lcase.test($("#password").val())) {
$('#password-lowercase').hide();
visible++;
}else{
$('#password-lowercase').show();
visible--;
}
//Password must contain a number
if(num.test($("#password").val())) {
$('#password-number').hide();
visible++;
}else{
$('#password-number').show();
visible--;
}
// Once all are met, hide the alert
if (visible === NUM_RULES) {
$('#passwordRequirements').hide();
}else{
$('#passwordRequirements').show();
}
});
});
|
Minor changes to `users-password.js` for performance.
|
src/main/webapp/resources/js/pages/users/users-password.js
|
Minor changes to `users-password.js` for performance.
|
<ide><path>rc/main/webapp/resources/js/pages/users/users-password.js
<ide> $(document).ready(function() {
<add>
<add> //Patterns for restrictions
<add> var ucase = new RegExp("^.*[A-Z].*$");
<add> var lcase = new RegExp("^.*[a-z].*$");
<add> var num = new RegExp("^.*[0-9].*$");
<add>
<ide> $("input[type=password]").keyup(function() {
<del> var ucase = new RegExp("^.*[A-Z].*$");
<del> var lcase = new RegExp("^.*[a-z].*$");
<del> var num = new RegExp("^.*[0-9].*$");
<ide>
<ide> var visible = 0;
<ide> var MIN_LENGTH = 6;
<ide> var NUM_RULES = 4;
<ide>
<add> var passwordField = $("#password")
<add>
<ide> // Password must be 6 characters or longer
<del> if($("#password").val().length >= MIN_LENGTH) {
<add> if(passwordField.val().length >= MIN_LENGTH) {
<ide> $('#password-minlength').hide();
<ide> visible++;
<ide> }else{
<ide> }
<ide>
<ide> // Password must contain an uppercase letter
<del> if(ucase.test($("#password").val())) {
<add> if(ucase.test(passwordField.val())) {
<ide> $('#password-uppercase').hide();
<ide> visible++;
<ide> }else{
<ide> }
<ide>
<ide> // Password must contain a lowercase letter
<del> if(lcase.test($("#password").val())) {
<add> if(lcase.test(passwordField.val())) {
<ide> $('#password-lowercase').hide();
<ide> visible++;
<ide> }else{
<ide> }
<ide>
<ide> //Password must contain a number
<del> if(num.test($("#password").val())) {
<add> if(num.test(passwordField.val())) {
<ide> $('#password-number').hide();
<ide> visible++;
<ide> }else{
|
|
Java
|
apache-2.0
|
e1ad77b56f0cebf93ff63eb5002cdb4dc83e60cd
| 0 |
apache/commons-configuration,apache/commons-configuration,apache/commons-configuration
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration2.resolver;
import java.io.IOException;
import java.io.InputStream;
import java.net.FileNameMap;
import java.net.URL;
import java.net.URLConnection;
import java.util.Vector;
import org.apache.commons.configuration2.io.ConfigurationLogger;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.configuration2.interpol.ConfigurationInterpolator;
import org.apache.commons.configuration2.io.FileLocator;
import org.apache.commons.configuration2.io.FileLocatorUtils;
import org.apache.commons.configuration2.io.FileSystem;
import org.apache.xml.resolver.CatalogException;
import org.apache.xml.resolver.readers.CatalogReader;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Thin wrapper around xml commons CatalogResolver to allow list of catalogs
* to be provided.
* @since 1.7
*/
public class CatalogResolver implements EntityResolver
{
/**
* Debug everything.
*/
private static final int DEBUG_ALL = 9;
/**
* Normal debug setting.
*/
private static final int DEBUG_NORMAL = 4;
/**
* Debug nothing.
*/
private static final int DEBUG_NONE = 0;
/**
* The CatalogManager
*/
private final CatalogManager manager = new CatalogManager();
/**
* The FileSystem in use.
*/
private FileSystem fs = FileLocatorUtils.DEFAULT_FILE_SYSTEM;
/**
* The CatalogResolver
*/
private org.apache.xml.resolver.tools.CatalogResolver resolver;
/**
* Stores the logger.
*/
private ConfigurationLogger log;
/**
* Constructs the CatalogResolver
*/
public CatalogResolver()
{
manager.setIgnoreMissingProperties(true);
manager.setUseStaticCatalog(false);
manager.setFileSystem(fs);
initLogger(null);
}
/**
* Sets the list of catalog file names
*
* @param catalogs The delimited list of catalog files.
*/
public void setCatalogFiles(final String catalogs)
{
manager.setCatalogFiles(catalogs);
}
/**
* Sets the FileSystem.
* @param fileSystem The FileSystem.
*/
public void setFileSystem(final FileSystem fileSystem)
{
this.fs = fileSystem;
manager.setFileSystem(fileSystem);
}
/**
* Sets the base path.
* @param baseDir The base path String.
*/
public void setBaseDir(final String baseDir)
{
manager.setBaseDir(baseDir);
}
/**
* Sets the {@code ConfigurationInterpolator}.
* @param ci the {@code ConfigurationInterpolator}
*/
public void setInterpolator(final ConfigurationInterpolator ci)
{
manager.setInterpolator(ci);
}
/**
* Enables debug logging of xml-commons Catalog processing.
* @param debug True if debugging should be enabled, false otherwise.
*/
public void setDebug(final boolean debug)
{
if (debug)
{
manager.setVerbosity(DEBUG_ALL);
}
else
{
manager.setVerbosity(DEBUG_NONE);
}
}
/**
* <p>
* Implements the {@code resolveEntity} method
* for the SAX interface.
* </p>
* <p>Presented with an optional public identifier and a system
* identifier, this function attempts to locate a mapping in the
* catalogs.</p>
* <p>If such a mapping is found, the resolver attempts to open
* the mapped value as an InputSource and return it. Exceptions are
* ignored and null is returned if the mapped value cannot be opened
* as an input source.</p>
* <p>If no mapping is found (or an error occurs attempting to open
* the mapped value as an input source), null is returned and the system
* will use the specified system identifier as if no entityResolver
* was specified.</p>
*
* @param publicId The public identifier for the entity in question.
* This may be null.
* @param systemId The system identifier for the entity in question.
* XML requires a system identifier on all external entities, so this
* value is always specified.
* @return An InputSource for the mapped identifier, or null.
* @throws SAXException if an error occurs.
*/
@Override
public InputSource resolveEntity(final String publicId, final String systemId)
throws SAXException
{
String resolved = getResolver().getResolvedEntity(publicId, systemId);
if (resolved != null)
{
final String badFilePrefix = "file://";
final String correctFilePrefix = "file:///";
// Java 5 has a bug when constructing file URLS
if (resolved.startsWith(badFilePrefix) && !resolved.startsWith(correctFilePrefix))
{
resolved = correctFilePrefix + resolved.substring(badFilePrefix.length());
}
try
{
final URL url = locate(fs, null, resolved);
if (url == null)
{
throw new ConfigurationException("Could not locate "
+ resolved);
}
final InputStream is = fs.getInputStream(url);
final InputSource iSource = new InputSource(resolved);
iSource.setPublicId(publicId);
iSource.setByteStream(is);
return iSource;
}
catch (final Exception e)
{
log.warn("Failed to create InputSource for " + resolved, e);
return null;
}
}
return null;
}
/**
* Gets the logger used by this configuration object.
*
* @return the logger
*/
public ConfigurationLogger getLogger()
{
return log;
}
/**
* Allows setting the logger to be used by this object. This
* method makes it possible for clients to exactly control logging behavior.
* Per default a logger is set that will ignore all log messages. Derived
* classes that want to enable logging should call this method during their
* initialization with the logger to be used. Passing in <b>null</b> as
* argument disables logging.
*
* @param log the new logger
*/
public void setLogger(final ConfigurationLogger log)
{
initLogger(log);
}
/**
* Initializes the logger. Checks for null parameters.
*
* @param log the new logger
*/
private void initLogger(final ConfigurationLogger log)
{
this.log = (log != null) ? log : ConfigurationLogger.newDummyLogger();
}
private synchronized org.apache.xml.resolver.tools.CatalogResolver getResolver()
{
if (resolver == null)
{
resolver = new org.apache.xml.resolver.tools.CatalogResolver(manager);
}
return resolver;
}
/**
* Locates a given file. This implementation delegates to
* the corresponding method in {@link FileLocatorUtils}.
*
* @param fs the {@code FileSystem}
* @param basePath the base path
* @param name the file name
* @return the URL pointing to the file
*/
private static URL locate(final FileSystem fs, final String basePath, final String name)
{
final FileLocator locator =
FileLocatorUtils.fileLocator().fileSystem(fs)
.basePath(basePath).fileName(name).create();
return FileLocatorUtils.locate(locator);
}
/**
* Extends the CatalogManager to make the FileSystem and base directory accessible.
*/
public static class CatalogManager extends org.apache.xml.resolver.CatalogManager
{
/** The static catalog used by this manager. */
private static org.apache.xml.resolver.Catalog staticCatalog;
/** The FileSystem */
private FileSystem fs;
/** The base directory */
private String baseDir = System.getProperty("user.dir");
/** The object for handling interpolation. */
private ConfigurationInterpolator interpolator;
/**
* Sets the FileSystem
* @param fileSystem The FileSystem in use.
*/
public void setFileSystem(final FileSystem fileSystem)
{
this.fs = fileSystem;
}
/**
* Gets the FileSystem.
* @return The FileSystem.
*/
public FileSystem getFileSystem()
{
return this.fs;
}
/**
* Sets the base directory.
* @param baseDir The base directory.
*/
public void setBaseDir(final String baseDir)
{
if (baseDir != null)
{
this.baseDir = baseDir;
}
}
/**
* Gets the base directory.
* @return The base directory.
*/
public String getBaseDir()
{
return this.baseDir;
}
public void setInterpolator(final ConfigurationInterpolator ci)
{
interpolator = ci;
}
public ConfigurationInterpolator getInterpolator()
{
return interpolator;
}
/**
* Gets a new catalog instance. This method is only overridden because xml-resolver
* might be in a parent ClassLoader and will be incapable of loading our Catalog
* implementation.
*
* This method always returns a new instance of the underlying catalog class.
* @return the Catalog.
*/
@Override
public org.apache.xml.resolver.Catalog getPrivateCatalog()
{
org.apache.xml.resolver.Catalog catalog = staticCatalog;
if (catalog == null || !getUseStaticCatalog())
{
try
{
catalog = new Catalog();
catalog.setCatalogManager(this);
catalog.setupReaders();
catalog.loadSystemCatalogs();
}
catch (final Exception ex)
{
ex.printStackTrace();
}
if (getUseStaticCatalog())
{
staticCatalog = catalog;
}
}
return catalog;
}
/**
* Gets a catalog instance.
*
* If this manager uses static catalogs, the same static catalog will
* always be returned. Otherwise a new catalog will be returned.
* @return The Catalog.
*/
@Override
public org.apache.xml.resolver.Catalog getCatalog()
{
return getPrivateCatalog();
}
}
/**
* Overrides the Catalog implementation to use the underlying FileSystem.
*/
public static class Catalog extends org.apache.xml.resolver.Catalog
{
/** The FileSystem */
private FileSystem fs;
/** FileNameMap to determine the mime type */
private final FileNameMap fileNameMap = URLConnection.getFileNameMap();
/**
* Load the catalogs.
* @throws IOException if an error occurs.
*/
@Override
public void loadSystemCatalogs() throws IOException
{
fs = ((CatalogManager) catalogManager).getFileSystem();
final String base = ((CatalogManager) catalogManager).getBaseDir();
// This is safe because the catalog manager returns a vector of strings.
final Vector<String> catalogs = catalogManager.getCatalogFiles();
if (catalogs != null)
{
for (int count = 0; count < catalogs.size(); count++)
{
final String fileName = catalogs.elementAt(count);
URL url = null;
InputStream is = null;
try
{
url = locate(fs, base, fileName);
if (url != null)
{
is = fs.getInputStream(url);
}
}
catch (final ConfigurationException ce)
{
final String name = url.toString();
// Ignore the exception.
catalogManager.debug.message(DEBUG_ALL,
"Unable to get input stream for " + name + ". " + ce.getMessage());
}
if (is != null)
{
final String mimeType = fileNameMap.getContentTypeFor(fileName);
try
{
if (mimeType != null)
{
parseCatalog(mimeType, is);
continue;
}
}
catch (final Exception ex)
{
// Ignore the exception.
catalogManager.debug.message(DEBUG_ALL,
"Exception caught parsing input stream for " + fileName + ". "
+ ex.getMessage());
}
finally
{
is.close();
}
}
parseCatalog(base, fileName);
}
}
}
/**
* Parses the specified catalog file.
* @param baseDir The base directory, if not included in the file name.
* @param fileName The catalog file. May be a full URI String.
* @throws IOException If an error occurs.
*/
public void parseCatalog(final String baseDir, final String fileName) throws IOException
{
base = locate(fs, baseDir, fileName);
catalogCwd = base;
default_override = catalogManager.getPreferPublic();
catalogManager.debug.message(DEBUG_NORMAL, "Parse catalog: " + fileName);
boolean parsed = false;
for (int count = 0; !parsed && count < readerArr.size(); count++)
{
final CatalogReader reader = (CatalogReader) readerArr.get(count);
InputStream inStream;
try
{
inStream = fs.getInputStream(base);
}
catch (final Exception ex)
{
catalogManager.debug.message(DEBUG_NORMAL, "Unable to access " + base
+ ex.getMessage());
break;
}
try
{
reader.readCatalog(this, inStream);
parsed = true;
}
catch (final CatalogException ce)
{
catalogManager.debug.message(DEBUG_NORMAL, "Parse failed for " + fileName
+ ce.getMessage());
if (ce.getExceptionType() == CatalogException.PARSE_FAILED)
{
break;
}
// try again!
continue;
}
finally
{
try
{
inStream.close();
}
catch (final IOException ioe)
{
// Ignore the exception.
inStream = null;
}
}
}
if (parsed)
{
parsePendingCatalogs();
}
}
/**
* Performs character normalization on a URI reference.
*
* @param uriref The URI reference
* @return The normalized URI reference.
*/
@Override
protected String normalizeURI(final String uriref)
{
final ConfigurationInterpolator ci = ((CatalogManager) catalogManager).getInterpolator();
final String resolved = ci != null ? String.valueOf(ci.interpolate(uriref)) : uriref;
return super.normalizeURI(resolved);
}
}
}
|
src/main/java/org/apache/commons/configuration2/resolver/CatalogResolver.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration2.resolver;
import java.io.IOException;
import java.io.InputStream;
import java.net.FileNameMap;
import java.net.URL;
import java.net.URLConnection;
import java.util.Vector;
import org.apache.commons.configuration2.io.ConfigurationLogger;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.configuration2.interpol.ConfigurationInterpolator;
import org.apache.commons.configuration2.io.FileLocator;
import org.apache.commons.configuration2.io.FileLocatorUtils;
import org.apache.commons.configuration2.io.FileSystem;
import org.apache.xml.resolver.CatalogException;
import org.apache.xml.resolver.readers.CatalogReader;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Thin wrapper around xml commons CatalogResolver to allow list of catalogs
* to be provided.
* @since 1.7
*/
public class CatalogResolver implements EntityResolver
{
/**
* Debug everything.
*/
private static final int DEBUG_ALL = 9;
/**
* Normal debug setting.
*/
private static final int DEBUG_NORMAL = 4;
/**
* Debug nothing.
*/
private static final int DEBUG_NONE = 0;
/**
* The CatalogManager
*/
private final CatalogManager manager = new CatalogManager();
/**
* The FileSystem in use.
*/
private FileSystem fs = FileLocatorUtils.DEFAULT_FILE_SYSTEM;
/**
* The CatalogResolver
*/
private org.apache.xml.resolver.tools.CatalogResolver resolver;
/**
* Stores the logger.
*/
private ConfigurationLogger log;
/**
* Constructs the CatalogResolver
*/
public CatalogResolver()
{
manager.setIgnoreMissingProperties(true);
manager.setUseStaticCatalog(false);
manager.setFileSystem(fs);
initLogger(null);
}
/**
* Set the list of catalog file names
*
* @param catalogs The delimited list of catalog files.
*/
public void setCatalogFiles(final String catalogs)
{
manager.setCatalogFiles(catalogs);
}
/**
* Set the FileSystem.
* @param fileSystem The FileSystem.
*/
public void setFileSystem(final FileSystem fileSystem)
{
this.fs = fileSystem;
manager.setFileSystem(fileSystem);
}
/**
* Set the base path.
* @param baseDir The base path String.
*/
public void setBaseDir(final String baseDir)
{
manager.setBaseDir(baseDir);
}
/**
* Set the {@code ConfigurationInterpolator}.
* @param ci the {@code ConfigurationInterpolator}
*/
public void setInterpolator(final ConfigurationInterpolator ci)
{
manager.setInterpolator(ci);
}
/**
* Enables debug logging of xml-commons Catalog processing.
* @param debug True if debugging should be enabled, false otherwise.
*/
public void setDebug(final boolean debug)
{
if (debug)
{
manager.setVerbosity(DEBUG_ALL);
}
else
{
manager.setVerbosity(DEBUG_NONE);
}
}
/**
* <p>
* Implements the {@code resolveEntity} method
* for the SAX interface.
* </p>
* <p>Presented with an optional public identifier and a system
* identifier, this function attempts to locate a mapping in the
* catalogs.</p>
* <p>If such a mapping is found, the resolver attempts to open
* the mapped value as an InputSource and return it. Exceptions are
* ignored and null is returned if the mapped value cannot be opened
* as an input source.</p>
* <p>If no mapping is found (or an error occurs attempting to open
* the mapped value as an input source), null is returned and the system
* will use the specified system identifier as if no entityResolver
* was specified.</p>
*
* @param publicId The public identifier for the entity in question.
* This may be null.
* @param systemId The system identifier for the entity in question.
* XML requires a system identifier on all external entities, so this
* value is always specified.
* @return An InputSource for the mapped identifier, or null.
* @throws SAXException if an error occurs.
*/
@Override
public InputSource resolveEntity(final String publicId, final String systemId)
throws SAXException
{
String resolved = getResolver().getResolvedEntity(publicId, systemId);
if (resolved != null)
{
final String badFilePrefix = "file://";
final String correctFilePrefix = "file:///";
// Java 5 has a bug when constructing file URLS
if (resolved.startsWith(badFilePrefix) && !resolved.startsWith(correctFilePrefix))
{
resolved = correctFilePrefix + resolved.substring(badFilePrefix.length());
}
try
{
final URL url = locate(fs, null, resolved);
if (url == null)
{
throw new ConfigurationException("Could not locate "
+ resolved);
}
final InputStream is = fs.getInputStream(url);
final InputSource iSource = new InputSource(resolved);
iSource.setPublicId(publicId);
iSource.setByteStream(is);
return iSource;
}
catch (final Exception e)
{
log.warn("Failed to create InputSource for " + resolved, e);
return null;
}
}
return null;
}
/**
* Returns the logger used by this configuration object.
*
* @return the logger
*/
public ConfigurationLogger getLogger()
{
return log;
}
/**
* Allows setting the logger to be used by this object. This
* method makes it possible for clients to exactly control logging behavior.
* Per default a logger is set that will ignore all log messages. Derived
* classes that want to enable logging should call this method during their
* initialization with the logger to be used. Passing in <b>null</b> as
* argument disables logging.
*
* @param log the new logger
*/
public void setLogger(final ConfigurationLogger log)
{
initLogger(log);
}
/**
* Initializes the logger. Checks for null parameters.
*
* @param log the new logger
*/
private void initLogger(final ConfigurationLogger log)
{
this.log = (log != null) ? log : ConfigurationLogger.newDummyLogger();
}
private synchronized org.apache.xml.resolver.tools.CatalogResolver getResolver()
{
if (resolver == null)
{
resolver = new org.apache.xml.resolver.tools.CatalogResolver(manager);
}
return resolver;
}
/**
* Helper method for locating a given file. This implementation delegates to
* the corresponding method in {@link FileLocatorUtils}.
*
* @param fs the {@code FileSystem}
* @param basePath the base path
* @param name the file name
* @return the URL pointing to the file
*/
private static URL locate(final FileSystem fs, final String basePath, final String name)
{
final FileLocator locator =
FileLocatorUtils.fileLocator().fileSystem(fs)
.basePath(basePath).fileName(name).create();
return FileLocatorUtils.locate(locator);
}
/**
* Extend the CatalogManager to make the FileSystem and base directory accessible.
*/
public static class CatalogManager extends org.apache.xml.resolver.CatalogManager
{
/** The static catalog used by this manager. */
private static org.apache.xml.resolver.Catalog staticCatalog;
/** The FileSystem */
private FileSystem fs;
/** The base directory */
private String baseDir = System.getProperty("user.dir");
/** The object for handling interpolation. */
private ConfigurationInterpolator interpolator;
/**
* Set the FileSystem
* @param fileSystem The FileSystem in use.
*/
public void setFileSystem(final FileSystem fileSystem)
{
this.fs = fileSystem;
}
/**
* Retrieve the FileSystem.
* @return The FileSystem.
*/
public FileSystem getFileSystem()
{
return this.fs;
}
/**
* Set the base directory.
* @param baseDir The base directory.
*/
public void setBaseDir(final String baseDir)
{
if (baseDir != null)
{
this.baseDir = baseDir;
}
}
/**
* Return the base directory.
* @return The base directory.
*/
public String getBaseDir()
{
return this.baseDir;
}
public void setInterpolator(final ConfigurationInterpolator ci)
{
interpolator = ci;
}
public ConfigurationInterpolator getInterpolator()
{
return interpolator;
}
/**
* Get a new catalog instance. This method is only overridden because xml-resolver
* might be in a parent ClassLoader and will be incapable of loading our Catalog
* implementation.
*
* This method always returns a new instance of the underlying catalog class.
* @return the Catalog.
*/
@Override
public org.apache.xml.resolver.Catalog getPrivateCatalog()
{
org.apache.xml.resolver.Catalog catalog = staticCatalog;
if (catalog == null || !getUseStaticCatalog())
{
try
{
catalog = new Catalog();
catalog.setCatalogManager(this);
catalog.setupReaders();
catalog.loadSystemCatalogs();
}
catch (final Exception ex)
{
ex.printStackTrace();
}
if (getUseStaticCatalog())
{
staticCatalog = catalog;
}
}
return catalog;
}
/**
* Get a catalog instance.
*
* If this manager uses static catalogs, the same static catalog will
* always be returned. Otherwise a new catalog will be returned.
* @return The Catalog.
*/
@Override
public org.apache.xml.resolver.Catalog getCatalog()
{
return getPrivateCatalog();
}
}
/**
* Overrides the Catalog implementation to use the underlying FileSystem.
*/
public static class Catalog extends org.apache.xml.resolver.Catalog
{
/** The FileSystem */
private FileSystem fs;
/** FileNameMap to determine the mime type */
private final FileNameMap fileNameMap = URLConnection.getFileNameMap();
/**
* Load the catalogs.
* @throws IOException if an error occurs.
*/
@Override
public void loadSystemCatalogs() throws IOException
{
fs = ((CatalogManager) catalogManager).getFileSystem();
final String base = ((CatalogManager) catalogManager).getBaseDir();
// This is safe because the catalog manager returns a vector of strings.
final Vector<String> catalogs = catalogManager.getCatalogFiles();
if (catalogs != null)
{
for (int count = 0; count < catalogs.size(); count++)
{
final String fileName = catalogs.elementAt(count);
URL url = null;
InputStream is = null;
try
{
url = locate(fs, base, fileName);
if (url != null)
{
is = fs.getInputStream(url);
}
}
catch (final ConfigurationException ce)
{
final String name = url.toString();
// Ignore the exception.
catalogManager.debug.message(DEBUG_ALL,
"Unable to get input stream for " + name + ". " + ce.getMessage());
}
if (is != null)
{
final String mimeType = fileNameMap.getContentTypeFor(fileName);
try
{
if (mimeType != null)
{
parseCatalog(mimeType, is);
continue;
}
}
catch (final Exception ex)
{
// Ignore the exception.
catalogManager.debug.message(DEBUG_ALL,
"Exception caught parsing input stream for " + fileName + ". "
+ ex.getMessage());
}
finally
{
is.close();
}
}
parseCatalog(base, fileName);
}
}
}
/**
* Parse the specified catalog file.
* @param baseDir The base directory, if not included in the file name.
* @param fileName The catalog file. May be a full URI String.
* @throws IOException If an error occurs.
*/
public void parseCatalog(final String baseDir, final String fileName) throws IOException
{
base = locate(fs, baseDir, fileName);
catalogCwd = base;
default_override = catalogManager.getPreferPublic();
catalogManager.debug.message(DEBUG_NORMAL, "Parse catalog: " + fileName);
boolean parsed = false;
for (int count = 0; !parsed && count < readerArr.size(); count++)
{
final CatalogReader reader = (CatalogReader) readerArr.get(count);
InputStream inStream;
try
{
inStream = fs.getInputStream(base);
}
catch (final Exception ex)
{
catalogManager.debug.message(DEBUG_NORMAL, "Unable to access " + base
+ ex.getMessage());
break;
}
try
{
reader.readCatalog(this, inStream);
parsed = true;
}
catch (final CatalogException ce)
{
catalogManager.debug.message(DEBUG_NORMAL, "Parse failed for " + fileName
+ ce.getMessage());
if (ce.getExceptionType() == CatalogException.PARSE_FAILED)
{
break;
}
// try again!
continue;
}
finally
{
try
{
inStream.close();
}
catch (final IOException ioe)
{
// Ignore the exception.
inStream = null;
}
}
}
if (parsed)
{
parsePendingCatalogs();
}
}
/**
* Perform character normalization on a URI reference.
*
* @param uriref The URI reference
* @return The normalized URI reference.
*/
@Override
protected String normalizeURI(final String uriref)
{
final ConfigurationInterpolator ci = ((CatalogManager) catalogManager).getInterpolator();
final String resolved = ci != null ? String.valueOf(ci.interpolate(uriref)) : uriref;
return super.normalizeURI(resolved);
}
}
}
|
Javadoc.
|
src/main/java/org/apache/commons/configuration2/resolver/CatalogResolver.java
|
Javadoc.
|
<ide><path>rc/main/java/org/apache/commons/configuration2/resolver/CatalogResolver.java
<ide> }
<ide>
<ide> /**
<del> * Set the list of catalog file names
<add> * Sets the list of catalog file names
<ide> *
<ide> * @param catalogs The delimited list of catalog files.
<ide> */
<ide> }
<ide>
<ide> /**
<del> * Set the FileSystem.
<add> * Sets the FileSystem.
<ide> * @param fileSystem The FileSystem.
<ide> */
<ide> public void setFileSystem(final FileSystem fileSystem)
<ide> }
<ide>
<ide> /**
<del> * Set the base path.
<add> * Sets the base path.
<ide> * @param baseDir The base path String.
<ide> */
<ide> public void setBaseDir(final String baseDir)
<ide> }
<ide>
<ide> /**
<del> * Set the {@code ConfigurationInterpolator}.
<add> * Sets the {@code ConfigurationInterpolator}.
<ide> * @param ci the {@code ConfigurationInterpolator}
<ide> */
<ide> public void setInterpolator(final ConfigurationInterpolator ci)
<ide> }
<ide>
<ide> /**
<del> * Returns the logger used by this configuration object.
<add> * Gets the logger used by this configuration object.
<ide> *
<ide> * @return the logger
<ide> */
<ide> }
<ide>
<ide> /**
<del> * Helper method for locating a given file. This implementation delegates to
<add> * Locates a given file. This implementation delegates to
<ide> * the corresponding method in {@link FileLocatorUtils}.
<ide> *
<ide> * @param fs the {@code FileSystem}
<ide> }
<ide>
<ide> /**
<del> * Extend the CatalogManager to make the FileSystem and base directory accessible.
<add> * Extends the CatalogManager to make the FileSystem and base directory accessible.
<ide> */
<ide> public static class CatalogManager extends org.apache.xml.resolver.CatalogManager
<ide> {
<ide> private ConfigurationInterpolator interpolator;
<ide>
<ide> /**
<del> * Set the FileSystem
<add> * Sets the FileSystem
<ide> * @param fileSystem The FileSystem in use.
<ide> */
<ide> public void setFileSystem(final FileSystem fileSystem)
<ide> }
<ide>
<ide> /**
<del> * Retrieve the FileSystem.
<add> * Gets the FileSystem.
<ide> * @return The FileSystem.
<ide> */
<ide> public FileSystem getFileSystem()
<ide> }
<ide>
<ide> /**
<del> * Set the base directory.
<add> * Sets the base directory.
<ide> * @param baseDir The base directory.
<ide> */
<ide> public void setBaseDir(final String baseDir)
<ide> }
<ide>
<ide> /**
<del> * Return the base directory.
<add> * Gets the base directory.
<ide> * @return The base directory.
<ide> */
<ide> public String getBaseDir()
<ide>
<ide>
<ide> /**
<del> * Get a new catalog instance. This method is only overridden because xml-resolver
<add> * Gets a new catalog instance. This method is only overridden because xml-resolver
<ide> * might be in a parent ClassLoader and will be incapable of loading our Catalog
<ide> * implementation.
<ide> *
<ide> }
<ide>
<ide> /**
<del> * Get a catalog instance.
<add> * Gets a catalog instance.
<ide> *
<ide> * If this manager uses static catalogs, the same static catalog will
<ide> * always be returned. Otherwise a new catalog will be returned.
<ide> }
<ide>
<ide> /**
<del> * Parse the specified catalog file.
<add> * Parses the specified catalog file.
<ide> * @param baseDir The base directory, if not included in the file name.
<ide> * @param fileName The catalog file. May be a full URI String.
<ide> * @throws IOException If an error occurs.
<ide> }
<ide>
<ide> /**
<del> * Perform character normalization on a URI reference.
<add> * Performs character normalization on a URI reference.
<ide> *
<ide> * @param uriref The URI reference
<ide> * @return The normalized URI reference.
|
|
Java
|
apache-2.0
|
33e006db784ae2b5f9622e1e56b66d87ba0fae9a
| 0 |
amygithub/vavr,amygithub/vavr,ummels/vavr,dx-pbuckley/vavr,dx-pbuckley/vavr,ummels/vavr
|
/* / \____ _ _ ____ ______ / \ ____ __ _______
* / / \/ \ / \/ \ / /\__\/ // \/ \ // /\__\ JΛVΛSLΛNG
* _/ / /\ \ \/ / /\ \\__\\ \ // /\ \ /\\/ \ /__\ \ Copyright 2014-2016 Javaslang, http://javaslang.io
* /___/\_/ \_/\____/\_/ \_/\__\/__/\__\_/ \_// \__/\_____/ Licensed under the Apache License, Version 2.0
*/
package javaslang.test;
import javaslang.Function1;
import javaslang.Tuple;
import javaslang.collection.Iterator;
import javaslang.collection.List;
import javaslang.collection.Stream;
import org.junit.Test;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Comparator;
import java.util.Random;
import static org.assertj.core.api.Assertions.assertThat;
public class ArbitraryTest {
// equally distributed random number generator
private static final Random RANDOM = new Random();
// predictable random number generator (seed = 1)
private Random predictableRandom = new Random(1L);
// -- apply
@Test
public void shouldApplyIntegerObject() {
final Gen<BinaryTree<Integer>> gen = new ArbitraryBinaryTree(0, 0).apply(0);
assertThat(gen).isNotNull();
}
// -- flatMap
@Test
public void shouldFlatMapArbitrary() {
final Arbitrary<Integer> arbitraryInt = size -> Gen.choose(-size, size);
final Arbitrary<BinaryTree<Integer>> arbitraryTree = arbitraryInt.flatMap(i -> new ArbitraryBinaryTree(-i, i));
assertThat(arbitraryTree.apply(0).apply(RANDOM)).isNotNull();
}
// -- map
@Test
public void shouldMapArbitrary() {
final Arbitrary<Integer> arbitraryInt = size -> Gen.choose(-size, size);
final Arbitrary<BinaryTree<Integer>> arbitraryTree = arbitraryInt.map(BinaryTree::leaf);
assertThat(arbitraryTree.apply(0).apply(RANDOM)).isNotNull();
}
// -- filter
@Test
public void shouldFilterArbitrary() {
final Arbitrary<Integer> ints = Arbitrary.integer();
final Arbitrary<Integer> evenInts = ints.filter(i -> i % 2 == 0);
assertThat(evenInts.apply(10).apply(RANDOM)).isNotNull();
}
// -- peek
@Test
public void shouldPeekArbitrary() {
final int[] actual = new int[] { Integer.MIN_VALUE };
final int expected = Arbitrary.integer().peek(i -> actual[0] = i).apply(10).apply(RANDOM);
assertThat(actual[0]).isEqualTo(expected);
}
// factory methods
@Test
public void shouldCreateArbitraryInteger() {
final Arbitrary<Integer> arbitrary = Arbitrary.integer();
final Integer actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryString() {
final Arbitrary<String> arbitrary = Arbitrary.string(Gen.choose('a', 'z'));
final String actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryList() {
final Arbitrary<List<Integer>> arbitrary = Arbitrary.list(Arbitrary.integer());
final List<Integer> actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryStream() {
final Arbitrary<Stream<Integer>> arbitrary = Arbitrary.stream(Arbitrary.integer());
final Stream<Integer> actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateFixedContentArbitrary() {
final Gen<String> arbitrary = Arbitrary.of("test", "content").apply(10);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("test", "content");
}
}
@Test
public void shouldCreateNonDistinctArbitrary() {
final Gen<String> arbitrary = Arbitrary.string(Gen.choose('a', 'b')).apply(2);
List.range(0, 1000)
.map(i -> arbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isGreaterThan(1));
}
@Test
public void shouldCreateDistinctArbitrary() {
final Gen<String> distinctArbitrary = Arbitrary.string(Gen.choose('a', 'b')).distinct().apply(100);
List.range(0, 1000)
.map(i -> distinctArbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isEqualTo(1));
}
@Test
public void shouldCreateDistinctByArbitrary() {
final Gen<String> distinctByArbitrary = Arbitrary.string(Gen.choose('a', 'b'))
.distinctBy(Comparator.naturalOrder()).apply(100);
List.range(0, 10000)
.map(i -> distinctByArbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isEqualTo(1));
}
@Test
public void shouldCreateInterspersedFixedContentArbitrary() {
final Gen<String> arbitrary = Arbitrary.of("test")
.intersperse(Arbitrary.of("content"))
.apply(10);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("test", "content");
}
}
@Test
public void shouldCreateInterspersedFixedContentArbitraryWithConstantOrder() {
final Gen<String> arbitrary = Arbitrary.of("test")
.intersperse(Arbitrary.of("content"))
.apply(10);
final Iterator<Stream<String>> generatedStringPairs = Stream.range(0, 10)
.map(i -> arbitrary.apply(RANDOM))
.grouped(2);
for (Stream<String> stringPairs : generatedStringPairs) {
assertThat(stringPairs.mkString(",")).isEqualTo("test,content");
}
}
@Test
public void shouldCreateCharArrayArbitrary() {
final Gen<String> arbitrary = Arbitrary.string(Gen.choose("test".toCharArray()))
.filter(s -> !"".equals(s))
.apply(1);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("t", "e", "s");
}
}
@Test
public void shouldCreateArbitraryStreamAndEvaluateAllElements() {
final Arbitrary<Stream<Integer>> arbitrary = Arbitrary.stream(Arbitrary.integer());
final Stream<Integer> actual = arbitrary.apply(10).apply(new Random() {
private static final long serialVersionUID = 1L;
@Override
public int nextInt(int bound) {
return bound - 1;
}
});
assertThat(actual.length()).isEqualTo(10);
}
@Test
public void shouldCreateArbitraryLocalDateTime(){
final Arbitrary<LocalDateTime> date = Arbitrary.localDateTime();
assertThat(date).isNotNull();
}
@Test(expected = NullPointerException.class)
public void shouldNotAcceptNullMedianLocalDateTime(){
Arbitrary.localDateTime(null, ChronoUnit.DAYS);
}
@Test(expected = NullPointerException.class)
public void shouldNotAcceptNullChronoUnit(){
Arbitrary.localDateTime(LocalDateTime.now(), null);
}
@Test
public void shouldCreateArbitraryLocalDateTimeAdjustedWithGivenChronoUnit(){
final LocalDateTime median = LocalDateTime.of(2017, 2, 17, 3, 40);
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.YEARS);
final LocalDateTime date = arbitrary.apply(100).apply(predictableRandom);
assertThat(date).isEqualTo("2063-04-22T01:46:10.312");
}
@Test
public void shouldCreateMedianLocalDateTimeIfSizeIsZero(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
final LocalDateTime date = arbitrary.apply(0).apply(RANDOM);
assertThat(date).isEqualTo(median);
}
@Test
public void shouldCreateDatesInInRangeOfSize(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
Property.def("With size of 100 days, dates should be in range of +/- 100 days")
.forAll(arbitrary)
.suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
.check(100, 1000);
}
@Test
public void shouldIgnoreNegativeSignInRangeOfDates(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
Property.def("With negative size of -100 days, dates should be in range of +/- 100 days")
.forAll(arbitrary)
.suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
.check(-100, 1000);
}
@Test
public void shouldGenerateTwoDifferentSuccessiveDates(){
final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime();
final LocalDateTime firstDate = dates.apply(100).apply(RANDOM);
final LocalDateTime secondDate = dates.apply(100).apply(RANDOM);
assertThat(firstDate).isNotEqualTo(secondDate);
}
// -- transform
@Test
public void shouldTransformArbitrary() {
final Arbitrary<Integer> arbitrary = ignored -> Gen.of(1);
final String s = arbitrary.transform(a -> a.apply(0).apply(RANDOM).toString());
assertThat(s).isEqualTo("1");
}
// helpers
/**
* Represents arbitrary binary trees of a certain depth n with values of type int.
*/
static class ArbitraryBinaryTree implements Arbitrary<BinaryTree<Integer>> {
final int minValue;
final int maxValue;
ArbitraryBinaryTree(int minValue, int maxValue) {
this.minValue = Math.min(minValue, maxValue);
this.maxValue = Math.max(minValue, maxValue);
}
@Override
public Gen<BinaryTree<Integer>> apply(int n) {
return random -> Gen.choose(minValue, maxValue).flatMap(value -> {
if (n == 0) {
return Gen.of(BinaryTree.leaf(value));
} else {
return Gen.frequency(
Tuple.of(1, Gen.of(BinaryTree.leaf(value))),
Tuple.of(4, Gen.of(BinaryTree.branch(apply(n / 2).apply(random), value, apply(n / 2).apply(random))))
);
}
}
).apply(random);
}
}
interface BinaryTree<T> {
static <T> Branch<T> branch(BinaryTree<T> left, T value, BinaryTree<T> right) {
return new Branch<>(left, value, right);
}
static <T> Branch<T> leaf(T value) {
return new Branch<>(empty(), value, empty());
}
static <T> Empty<T> empty() {
return Empty.instance();
}
class Branch<T> implements BinaryTree<T> {
final BinaryTree<T> left;
final T value;
final BinaryTree<T> right;
Branch(BinaryTree<T> left, T value, BinaryTree<T> right) {
this.left = left;
this.value = value;
this.right = right;
}
}
class Empty<T> implements BinaryTree<T> {
private static final Empty<?> INSTANCE = new Empty<>();
@SuppressWarnings("unchecked")
static <T> Empty<T> instance() {
return (Empty<T>) INSTANCE;
}
}
}
}
|
javaslang-test/src/test/java/javaslang/test/ArbitraryTest.java
|
/* / \____ _ _ ____ ______ / \ ____ __ _______
* / / \/ \ / \/ \ / /\__\/ // \/ \ // /\__\ JΛVΛSLΛNG
* _/ / /\ \ \/ / /\ \\__\\ \ // /\ \ /\\/ \ /__\ \ Copyright 2014-2016 Javaslang, http://javaslang.io
* /___/\_/ \_/\____/\_/ \_/\__\/__/\__\_/ \_// \__/\_____/ Licensed under the Apache License, Version 2.0
*/
package javaslang.test;
import javaslang.Function1;
import javaslang.Tuple;
import javaslang.collection.Iterator;
import javaslang.collection.List;
import javaslang.collection.Stream;
import org.junit.Test;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Comparator;
import java.util.Random;
import static org.assertj.core.api.Assertions.assertThat;
public class ArbitraryTest {
// equally distributed random number generator
private static final Random RANDOM = new Random();
// predictable random number generator (seed = 1)
private Random predictableRandom = new Random(1L);
// -- apply
@Test
public void shouldApplyIntegerObject() {
final Gen<BinaryTree<Integer>> gen = new ArbitraryBinaryTree(0, 0).apply(0);
assertThat(gen).isNotNull();
}
// -- flatMap
@Test
public void shouldFlatMapArbitrary() {
final Arbitrary<Integer> arbitraryInt = size -> Gen.choose(-size, size);
final Arbitrary<BinaryTree<Integer>> arbitraryTree = arbitraryInt.flatMap(i -> new ArbitraryBinaryTree(-i, i));
assertThat(arbitraryTree.apply(0).apply(RANDOM)).isNotNull();
}
// -- map
@Test
public void shouldMapArbitrary() {
final Arbitrary<Integer> arbitraryInt = size -> Gen.choose(-size, size);
final Arbitrary<BinaryTree<Integer>> arbitraryTree = arbitraryInt.map(BinaryTree::leaf);
assertThat(arbitraryTree.apply(0).apply(RANDOM)).isNotNull();
}
// -- filter
@Test
public void shouldFilterArbitrary() {
final Arbitrary<Integer> ints = Arbitrary.integer();
final Arbitrary<Integer> evenInts = ints.filter(i -> i % 2 == 0);
assertThat(evenInts.apply(10).apply(RANDOM)).isNotNull();
}
// -- peek
@Test
public void shouldPeekArbitrary() {
final int[] actual = new int[] { Integer.MIN_VALUE };
final int expected = Arbitrary.integer().peek(i -> actual[0] = i).apply(10).apply(RANDOM);
assertThat(actual[0]).isEqualTo(expected);
}
// factory methods
@Test
public void shouldCreateArbitraryInteger() {
final Arbitrary<Integer> arbitrary = Arbitrary.integer();
final Integer actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryString() {
final Arbitrary<String> arbitrary = Arbitrary.string(Gen.choose('a', 'z'));
final String actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryList() {
final Arbitrary<List<Integer>> arbitrary = Arbitrary.list(Arbitrary.integer());
final List<Integer> actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateArbitraryStream() {
final Arbitrary<Stream<Integer>> arbitrary = Arbitrary.stream(Arbitrary.integer());
final Stream<Integer> actual = arbitrary.apply(10).apply(RANDOM);
assertThat(actual).isNotNull();
}
@Test
public void shouldCreateFixedContentArbitrary() {
final Gen<String> arbitrary = Arbitrary.of("test", "content").apply(10);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("test", "content");
}
}
@Test
public void shouldCreateNonDistinctArbitrary() {
final Gen<String> arbitrary = Arbitrary.string(Gen.choose('a', 'b')).apply(2);
List.range(0, 1000)
.map(i -> arbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isGreaterThan(1));
}
@Test
public void shouldCreateDistinctArbitrary() {
final Gen<String> distinctArbitrary = Arbitrary.string(Gen.choose('a', 'b')).distinct().apply(100);
List.range(0, 1000)
.map(i -> distinctArbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isEqualTo(1));
}
@Test
public void shouldCreateDistinctByArbitrary() {
final Gen<String> distinctByArbitrary = Arbitrary.string(Gen.choose('a', 'b'))
.distinctBy(Comparator.naturalOrder()).apply(100);
List.range(0, 10000)
.map(i -> distinctByArbitrary.apply(RANDOM))
.groupBy(Function1.identity())
.forEach((key, value) -> assertThat(value.length())
.describedAs(key)
.isEqualTo(1));
}
@Test
public void shouldCreateInterspersedFixedContentArbitrary() {
final Gen<String> arbitrary = Arbitrary.of("test")
.intersperse(Arbitrary.of("content"))
.apply(10);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("test", "content");
}
}
@Test
public void shouldCreateInterspersedFixedContentArbitraryWithConstantOrder() {
final Gen<String> arbitrary = Arbitrary.of("test")
.intersperse(Arbitrary.of("content"))
.apply(10);
final Iterator<Stream<String>> generatedStringPairs = Stream.range(0, 10)
.map(i -> arbitrary.apply(RANDOM))
.grouped(2);
for (Stream<String> stringPairs : generatedStringPairs) {
assertThat(stringPairs.mkString(",")).isEqualTo("test,content");
}
}
@Test
public void shouldCreateCharArrayArbitrary() {
final Gen<String> arbitrary = Arbitrary.string(Gen.choose("test".toCharArray()))
.filter(s -> !"".equals(s))
.apply(1);
for (int i = 0; i < 100; i++) {
assertThat(arbitrary.apply(RANDOM)).isIn("t", "e", "s");
}
}
@Test
public void shouldCreateArbitraryStreamAndEvaluateAllElements() {
final Arbitrary<Stream<Integer>> arbitrary = Arbitrary.stream(Arbitrary.integer());
final Stream<Integer> actual = arbitrary.apply(10).apply(new Random() {
private static final long serialVersionUID = 1L;
@Override
public int nextInt(int bound) {
return bound - 1;
}
});
assertThat(actual.length()).isEqualTo(10);
}
@Test
public void shouldCreateArbitraryLocalDateTime(){
final Arbitrary<LocalDateTime> date = Arbitrary.localDateTime();
assertThat(date).isNotNull();
}
@Test(expected = NullPointerException.class)
public void shouldNotAcceptNullMedianLocalDateTime(){
final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime(null, ChronoUnit.DAYS);
}
@Test(expected = NullPointerException.class)
public void shouldNotAcceptNullChronoUnit(){
final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime(LocalDateTime.now(), null);
}
@Test
public void shouldCreateArbitraryLocalDateTimeAdjustedWithGivenChronoUnit(){
final LocalDateTime median = LocalDateTime.of(2017, 2, 17, 3, 40);
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.YEARS);
final LocalDateTime date = arbitrary.apply(100).apply(predictableRandom);
assertThat(date).isEqualTo("2063-04-22T01:46:10.312");
}
@Test
public void shouldCreateMedianLocalDateTimeIfSizeIsZero(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
final LocalDateTime date = arbitrary.apply(0).apply(RANDOM);
assertThat(date).isEqualTo(median);
}
@Test
public void shouldCreateDatesInInRangeOfSize(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
Property.def("With negative size of -100 days, dates should be in range of +/- 100 days")
.forAll(arbitrary)
.suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
.check(100, 1000);
}
@Test
public void shouldIgnoreNegativeSignInRangeOfDates(){
final LocalDateTime median = LocalDateTime.now();
final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
Property.def("With size of 100 days, dates should be in range of +/- 100 days")
.forAll(arbitrary)
.suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
.check(-100, 1000);
}
@Test
public void shouldGenerateTwoDifferentSuccessiveDates(){
final LocalDateTime end = LocalDateTime.now().minusDays(2);
final LocalDateTime start = LocalDateTime.now().minusDays(3);
final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime();
final LocalDateTime firstDate = dates.apply(100).apply(RANDOM);
final LocalDateTime secondDate = dates.apply(100).apply(RANDOM);
assertThat(firstDate).isNotEqualTo(secondDate);
}
// -- transform
@Test
public void shouldTransformArbitrary() {
final Arbitrary<Integer> arbitrary = ignored -> Gen.of(1);
final String s = arbitrary.transform(a -> a.apply(0).apply(RANDOM).toString());
assertThat(s).isEqualTo("1");
}
// helpers
/**
* Represents arbitrary binary trees of a certain depth n with values of type int.
*/
static class ArbitraryBinaryTree implements Arbitrary<BinaryTree<Integer>> {
final int minValue;
final int maxValue;
ArbitraryBinaryTree(int minValue, int maxValue) {
this.minValue = Math.min(minValue, maxValue);
this.maxValue = Math.max(minValue, maxValue);
}
@Override
public Gen<BinaryTree<Integer>> apply(int n) {
return random -> Gen.choose(minValue, maxValue).flatMap(value -> {
if (n == 0) {
return Gen.of(BinaryTree.leaf(value));
} else {
return Gen.frequency(
Tuple.of(1, Gen.of(BinaryTree.leaf(value))),
Tuple.of(4, Gen.of(BinaryTree.branch(apply(n / 2).apply(random), value, apply(n / 2).apply(random))))
);
}
}
).apply(random);
}
}
interface BinaryTree<T> {
static <T> Branch<T> branch(BinaryTree<T> left, T value, BinaryTree<T> right) {
return new Branch<>(left, value, right);
}
static <T> Branch<T> leaf(T value) {
return new Branch<>(empty(), value, empty());
}
static <T> Empty<T> empty() {
return Empty.instance();
}
class Branch<T> implements BinaryTree<T> {
final BinaryTree<T> left;
final T value;
final BinaryTree<T> right;
Branch(BinaryTree<T> left, T value, BinaryTree<T> right) {
this.left = left;
this.value = value;
this.right = right;
}
}
class Empty<T> implements BinaryTree<T> {
private static final Empty<?> INSTANCE = new Empty<>();
@SuppressWarnings("unchecked")
static <T> Empty<T> instance() {
return (Empty<T>) INSTANCE;
}
}
}
}
|
Cleaned tests
Removed unecessary or unused variables. Switched property descriptions.
|
javaslang-test/src/test/java/javaslang/test/ArbitraryTest.java
|
Cleaned tests
|
<ide><path>avaslang-test/src/test/java/javaslang/test/ArbitraryTest.java
<ide>
<ide> @Test(expected = NullPointerException.class)
<ide> public void shouldNotAcceptNullMedianLocalDateTime(){
<del> final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime(null, ChronoUnit.DAYS);
<add> Arbitrary.localDateTime(null, ChronoUnit.DAYS);
<ide> }
<ide>
<ide> @Test(expected = NullPointerException.class)
<ide> public void shouldNotAcceptNullChronoUnit(){
<del> final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime(LocalDateTime.now(), null);
<add> Arbitrary.localDateTime(LocalDateTime.now(), null);
<ide> }
<ide>
<ide> @Test
<ide> final LocalDateTime median = LocalDateTime.now();
<ide> final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
<ide>
<add> Property.def("With size of 100 days, dates should be in range of +/- 100 days")
<add> .forAll(arbitrary)
<add> .suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
<add> .check(100, 1000);
<add> }
<add>
<add> @Test
<add> public void shouldIgnoreNegativeSignInRangeOfDates(){
<add> final LocalDateTime median = LocalDateTime.now();
<add> final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
<add>
<ide> Property.def("With negative size of -100 days, dates should be in range of +/- 100 days")
<ide> .forAll(arbitrary)
<ide> .suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
<del> .check(100, 1000);
<del> }
<del>
<del> @Test
<del> public void shouldIgnoreNegativeSignInRangeOfDates(){
<del> final LocalDateTime median = LocalDateTime.now();
<del> final Arbitrary<LocalDateTime> arbitrary = Arbitrary.localDateTime(median, ChronoUnit.DAYS);
<del>
<del> Property.def("With size of 100 days, dates should be in range of +/- 100 days")
<del> .forAll(arbitrary)
<del> .suchThat(d -> d.isAfter(median.minusDays(100)) && d.isBefore(median.plusDays(100)))
<ide> .check(-100, 1000);
<ide> }
<ide>
<ide> @Test
<ide> public void shouldGenerateTwoDifferentSuccessiveDates(){
<del> final LocalDateTime end = LocalDateTime.now().minusDays(2);
<del> final LocalDateTime start = LocalDateTime.now().minusDays(3);
<del>
<ide> final Arbitrary<LocalDateTime> dates = Arbitrary.localDateTime();
<ide> final LocalDateTime firstDate = dates.apply(100).apply(RANDOM);
<ide> final LocalDateTime secondDate = dates.apply(100).apply(RANDOM);
|
|
Java
|
apache-2.0
|
01a5c2dad071c0883d36cb3757fb3db53bf0c6f0
| 0 |
IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service
|
/*
* Copyright 2019 West Coast Informatics, LLC
*/
package org.ihtsdo.otf.mapping.mojo;
import java.text.SimpleDateFormat;
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.ihtsdo.otf.mapping.helpers.MapRefsetPattern;
import org.ihtsdo.otf.mapping.helpers.MapUserRole;
import org.ihtsdo.otf.mapping.helpers.RelationStyle;
import org.ihtsdo.otf.mapping.helpers.ReportFrequency;
import org.ihtsdo.otf.mapping.helpers.ReportQueryType;
import org.ihtsdo.otf.mapping.helpers.ReportResultType;
import org.ihtsdo.otf.mapping.helpers.ReportTimePeriod;
import org.ihtsdo.otf.mapping.helpers.WorkflowType;
import org.ihtsdo.otf.mapping.jpa.MapAdviceJpa;
import org.ihtsdo.otf.mapping.jpa.MapProjectJpa;
import org.ihtsdo.otf.mapping.jpa.MapRelationJpa;
import org.ihtsdo.otf.mapping.jpa.MapUserJpa;
import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.MappingServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.ReportServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.SecurityServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.WorkflowServiceJpa;
import org.ihtsdo.otf.mapping.model.MapAdvice;
import org.ihtsdo.otf.mapping.model.MapProject;
import org.ihtsdo.otf.mapping.model.MapRelation;
import org.ihtsdo.otf.mapping.reports.ReportDefinition;
import org.ihtsdo.otf.mapping.reports.ReportDefinitionJpa;
import org.ihtsdo.otf.mapping.services.ContentService;
import org.ihtsdo.otf.mapping.services.MappingService;
import org.ihtsdo.otf.mapping.services.ReportService;
import org.ihtsdo.otf.mapping.services.SecurityService;
import org.ihtsdo.otf.mapping.services.WorkflowService;
/**
* Mojo for generating sample data for a demo of map application.
*
* See admin/loader/pom.xml for a sample execution.
*
* @goal generate-demo-data
*/
public class GenerateDemoDataMojo extends AbstractMojo {
/** The security service. */
SecurityService securityService = null;
/** The content service. */
ContentService contentService = null;
/** The mapping service. */
MappingService mappingService = null;
/** The workflow service. */
WorkflowService workflowService = null;
/** The report service. */
ReportService reportService = null;
/* see superclass */
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Generate demo data");
try {
securityService = new SecurityServiceJpa();
contentService = new ContentServiceJpa();
mappingService = new MappingServiceJpa();
workflowService = new WorkflowServiceJpa();
reportService = new ReportServiceJpa();
loadDemoData();
getLog().info("Finished");
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException("Ad-hoc mojo failed to complete", e);
} finally {
try {
securityService.close();
contentService.close();
mappingService.close();
workflowService.close();
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException(
"Ad-hoc mojo failed to close services.", e);
}
}
}
/**
* Load sample data.
*
* @throws Exception the exception
*/
private void loadDemoData() throws Exception {
//
// Add lead users
//
Logger.getLogger(getClass()).info("Add new lead users");
MapUserJpa lead1 = (MapUserJpa) securityService.getMapUser("lead1");
if (lead1 == null) {
lead1 = makeMapUser("lead1", "Lead1");
lead1 = (MapUserJpa) securityService.addMapUser(lead1);
}
MapUserJpa lead2 = (MapUserJpa) securityService.getMapUser("lead2");
if (lead2 == null) {
lead2 = makeMapUser("lead2", "Lead2");
lead2 = (MapUserJpa) securityService.addMapUser(lead2);
}
MapUserJpa lead3 = (MapUserJpa) securityService.getMapUser("lead3");
if (lead3 == null) {
lead3 = makeMapUser("lead3", "Lead3");
lead3 = (MapUserJpa) securityService.addMapUser(lead3);
}
//
// Add specialist users
//
Logger.getLogger(getClass()).info("Add new specialist users");
MapUserJpa specialist1 =
(MapUserJpa) securityService.getMapUser("specialist1");
if (specialist1 == null) {
specialist1 = makeMapUser("specialist1", "Specialist1");
specialist1 = (MapUserJpa) securityService.addMapUser(specialist1);
}
MapUserJpa specialist2 =
(MapUserJpa) securityService.getMapUser("specialist2");
if (specialist2 == null) {
specialist2 = makeMapUser("specialist2", "Specialist2");
specialist2 = (MapUserJpa) securityService.addMapUser(specialist2);
}
MapUserJpa specialist3 =
(MapUserJpa) securityService.getMapUser("specialist3");
if (specialist3 == null) {
specialist3 = makeMapUser("specialist3", "Specialist3");
specialist3 = (MapUserJpa) securityService.addMapUser(specialist3);
}
//
// Mapping relationships
//
final Set<MapRelation> mapRelations = new HashSet<>();
for (final String rel : new String[] {
"exact", "partial", "narrower", "broader", "none"
}) {
final String ucRel = rel.substring(0, 1).toUpperCase() + rel.substring(1);
final MapRelation relation = new MapRelationJpa();
relation.setAbbreviation(ucRel);
relation.setAllowableForNullTarget(false);
relation.setComputed(false);
if (rel.equals("none")) {
relation.setAllowableForNullTarget(true);
relation.setComputed(true);
}
relation.setName(ucRel + " match");
relation.setTerminologyId(rel);
mappingService.addMapRelation(relation);
mapRelations.add(relation);
}
//
// Mapping Advice
//
final Set<MapAdvice> mapAdvices = new HashSet<>();
for (final String adv : new String[] {
"Test advice 1", "Test advice 2", "Test advice 3", "Null target advice"
}) {
final MapAdvice advice = new MapAdviceJpa();
advice.setAllowableForNullTarget(false);
advice.setComputed(false);
if (adv.contains("Null")) {
advice.setAllowableForNullTarget(true);
advice.setComputed(true);
}
advice.setDetail(adv);
advice.setName(adv);
mappingService.addMapAdvice(advice);
mapAdvices.add(advice);
}
//
// Create project Allergy to SNOMED project
//
Logger.getLogger(getClass())
.info("Create project ALLERGY to SNOMEDCT with REVIEW");
MapProject project1 = new MapProjectJpa();
project1.setDestinationTerminology("SNOMEDCT");
project1.setDestinationTerminologyVersion("20140731");
project1.setGroupStructure(true);
project1.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
project1.setName("ALLERGY to SNOMEDCT with REVIEW");
project1.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.AllergyProjectSpecificAlgorithmHandler");
project1.setPropagatedFlag(false);
project1.setPublic(true);
project1.setRefSetId("12345");
project1.setRefSetName("Allergy to SNOMED Refset");
project1.setSourceTerminology("ALLERGY");
project1.setSourceTerminologyVersion("latest");
project1.setWorkflowType(WorkflowType.REVIEW_PROJECT);
project1.setMapRelationStyle(RelationStyle.RELATIONSHIP_STYLE);
project1.getScopeConcepts().add("root");
project1.setScopeDescendantsFlag(true);
project1.setMapRelations(mapRelations);
project1.getMapLeads().add(lead1);
project1.getMapLeads().add(lead2);
project1.getMapSpecialists().add(specialist1);
project1.getMapSpecialists().add(specialist2);
project1.getMapSpecialists().add(specialist3);
// Add project
Logger.getLogger(getClass()).info(" add " + project1);
project1 = mappingService.addMapProject(project1);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project1);
// Create project MEDICATION to RXNORM with REVIEW
Logger.getLogger(getClass())
.info("Create project MEDICATION to RXNORM with REVIEW");
MapProject project2 = new MapProjectJpa();
project2.setDestinationTerminology("RXNORM");
project2.setDestinationTerminologyVersion("2016AA");
project2.setGroupStructure(true);
project2.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
project2.setName("MEDICATION to RXNORM with REVIEW");
project2.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.MedicationProjectSpecificAlgorithmHandler");
project2.setPropagatedFlag(false);
project2.setPublic(true);
project2.setRefSetId("23456");
project2.setRefSetName("Medication to RXNORM Mapping");
project2.setSourceTerminology("MEDICATION");
project2.setSourceTerminologyVersion("latest");
project2.setWorkflowType(WorkflowType.REVIEW_PROJECT);
project2.setMapRelationStyle(RelationStyle.RELATIONSHIP_STYLE);
project2.getScopeConcepts().add("root");
project2.setScopeDescendantsFlag(true);
project2.setMapRelations(mapRelations);
project2.getMapLeads().add(lead1);
project2.getMapLeads().add(lead2);
project2.getMapSpecialists().add(specialist1);
project2.getMapSpecialists().add(specialist2);
project2.getMapSpecialists().add(specialist3);
// Add project
Logger.getLogger(getClass()).info(" add " + project2);
project2 = mappingService.addMapProject(project2);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project2);
// Add project
Logger.getLogger(getClass()).info(" add " + project1);
project1 = mappingService.addMapProject(project1);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project1);
// Create project SNOMED to ICD10
Logger.getLogger(getClass())
.info("Create project SNOMEDCT to ICD10 with NON-LEGACY");
MapProject project3 = new MapProjectJpa();
project3.setDestinationTerminology("ICD10");
project3.setDestinationTerminologyVersion("2016");
project3.setGroupStructure(true);
project3.setMapRefsetPattern(MapRefsetPattern.ExtendedMap);
project3.setName("SNOMEDCT to ICD10 with NON-LEGACY");
project3.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.ICD10ProjectSpecificAlgorithmHandler");
project3.setPropagatedFlag(false);
project3.setPublic(true);
project3.setRefSetId("3333333");
project3.setRefSetName("SNOMEDCT to ICD10");
project3.setSourceTerminology("SNOMEDCT");
project3.setSourceTerminologyVersion("20140731");
project3.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
project3.setMapRelationStyle(RelationStyle.MAP_CATEGORY_STYLE);
project3.getScopeConcepts().add("404684003");
project3.setScopeDescendantsFlag(true);
project3.setMapRelations(mapRelations);
project3.setMapAdvices(mapAdvices);
project3.getMapLeads().add(lead1);
project3.getMapLeads().add(lead2);
project3.getMapSpecialists().add(specialist1);
project3.getMapSpecialists().add(specialist2);
project3.getMapSpecialists().add(specialist3);
// Add project
Logger.getLogger(getClass()).info(" add " + project3);
project3 = mappingService.addMapProject(project3);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project3);
// Create project SNOMED to ICD10CM
Logger.getLogger(getClass())
.info("Create project SNOMEDCT to ICD10CM with NON-LEGACY");
MapProject project4 = new MapProjectJpa();
project4.setDestinationTerminology("ICD10CM");
project4.setDestinationTerminologyVersion("2016");
project4.setGroupStructure(true);
project4.setMapRefsetPattern(MapRefsetPattern.ExtendedMap);
project4.setName("SNOMEDCT to ICD10CM with NON-LEGACY");
project4.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.ICD10ProjectSpecificAlgorithmHandler");
project4.setPropagatedFlag(false);
project4.setPublic(true);
project4.setRefSetId("3333333");
project4.setRefSetName("SNOMEDCT to ICD10CM");
project4.setSourceTerminology("SNOMEDCT");
project4.setSourceTerminologyVersion("20140731");
project4.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
project4.setMapRelationStyle(RelationStyle.MAP_CATEGORY_STYLE);
project4.getScopeConcepts().add("404684003");
project4.setScopeDescendantsFlag(true);
project4.setMapRelations(mapRelations);
project4.setMapAdvices(mapAdvices);
project4.getMapLeads().add(lead1);
project4.getMapLeads().add(lead2);
project4.getMapSpecialists().add(specialist1);
project4.getMapSpecialists().add(specialist2);
project4.getMapSpecialists().add(specialist3);
// Add project
Logger.getLogger(getClass()).info(" add " + project4);
project4 = mappingService.addMapProject(project4);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project4);
//
// Cross-project steps
//
// Start editing cycle
SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMdd");
project1.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
mappingService.updateMapProject(project1);
project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
mappingService.updateMapProject(project2);
project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
mappingService.updateMapProject(project3);
project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
mappingService.updateMapProject(project4);
// Reports
ReportDefinition def1 = new ReportDefinitionJpa();
def1.setDescription("Specialist productivity report.");
def1.setDiffReport(false);
def1.setFrequency(ReportFrequency.DAILY);
def1.setName("Specialist productivity");
def1.setQACheck(false);
def1.setQuery(
"select distinct mu.userName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records_AUD mr, map_projects mp, map_users mu "
+ "where mp.id = :MAP_PROJECT_ID: "
+ " and mr.lastModified >= :EDITING_CYCLE_BEGIN_DATE: "
+ " and mr.mapProjectId = mp.id "
+ " and mr.workflowStatus IN ('REVIEW_NEEDED','EDITING_DONE','QA_RESOLVED') "
+ " and mu.userName not in ('loader','qa') "
+ " and mr.owner_id = mu.id " + " and mr.revtype != 2 "
+ "group by mu.userName, mr.lastModified " + "ORDER BY 1,2");
def1.setQueryType(ReportQueryType.SQL);
def1.setResultType(ReportResultType.CONCEPT);
def1.setRoleRequired(MapUserRole.SPECIALIST);
def1.setTimePeriod(null);
reportService.addReportDefinition(def1);
ReportDefinition def2 = new ReportDefinitionJpa();
def2.setDescription("Lead productivity report.");
def2.setDiffReport(false);
def2.setFrequency(ReportFrequency.DAILY);
def2.setName("Lead productivity");
def2.setQACheck(false);
def2.setQuery(
"select distinct mu.userName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records_AUD mr, map_projects mp, map_users mu "
+ "where mp.id = :MAP_PROJECT_ID: "
+ " and mr.lastModified >= :EDITING_CYCLE_BEGIN_DATE: "
+ " and mr.mapProjectId = mp.id "
+ " and mr.workflowStatus IN ('READY_FOR_PUBLICATION') "
+ " and mu.userName != 'loader' " + " and mr.owner_id = mu.id "
+ " and mr.lastModified <= :TIMESTAMP: "
+ " and mr.revtype != 2 " + " and mr.conceptId IN "
+ " (select conceptid from map_records_AUD mr2 "
+ " where mapProjectId = :MAP_PROJECT_ID: "
+ " and workflowStatus in ('CONFLICT_RESOLVED','REVIEW_RESOLVED') "
+ " and mr.owner_id = mr2.owner_id) "
+ "group by mu.userName, mr.lastModified " + "ORDER BY 1,2");
def2.setQueryType(ReportQueryType.SQL);
def2.setResultType(ReportResultType.CONCEPT);
def2.setRoleRequired(MapUserRole.LEAD);
def2.setTimePeriod(null);
reportService.addReportDefinition(def2);
// Daily specialist report
ReportDefinition def3 = new ReportDefinitionJpa();
def3.setDescription("Daily specialist productivity report.");
def3.setDiffReport(true);
def3.setFrequency(ReportFrequency.DAILY);
def3.setName("Daily specialist productivity");
def3.setQACheck(false);
def3.setQuery(null);
def3.setQueryType(ReportQueryType.NONE);
def3.setResultType(ReportResultType.CONCEPT);
def3.setRoleRequired(MapUserRole.SPECIALIST);
def3.setTimePeriod(ReportTimePeriod.DAILY);
def3.setDiffReportDefinitionName("Specialist productivity");
reportService.addReportDefinition(def3);
// Daily lead report
ReportDefinition def4 = new ReportDefinitionJpa();
def4.setDescription("Daily lead productivity report.");
def4.setDiffReport(true);
def4.setFrequency(ReportFrequency.DAILY);
def4.setName("Daily lead productivity");
def4.setQACheck(false);
def4.setQuery(null);
def4.setQueryType(ReportQueryType.NONE);
def4.setResultType(ReportResultType.CONCEPT);
def4.setRoleRequired(MapUserRole.LEAD);
def4.setTimePeriod(ReportTimePeriod.DAILY);
def3.setDiffReportDefinitionName("Lead productivity");
reportService.addReportDefinition(def4);
// specialist productivity, lead productivity
// QA checks
ReportDefinition qa1 = new ReportDefinitionJpa();
qa1.setDescription(
"Sample QA check to identify mappings involving the word 'peanut'");
qa1.setDiffReport(false);
qa1.setFrequency(ReportFrequency.ON_DEMAND);
qa1.setName("Peanut records");
qa1.setQACheck(true);
qa1.setQuery(
"select distinct mr.conceptName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records mr "
+ "where mr.mapProjectId = :MAP_PROJECT_ID: "
+ " and mr.conceptName like '%eanut%'");
qa1.setQueryType(ReportQueryType.SQL);
qa1.setResultType(ReportResultType.CONCEPT);
qa1.setRoleRequired(MapUserRole.SPECIALIST);
qa1.setTimePeriod(null);
reportService.addReportDefinition(qa1);
// Add report definitions to the project(s)
project1.getReportDefinitions().add(def1);
project1.getReportDefinitions().add(def2);
project1.getReportDefinitions().add(def3);
project1.getReportDefinitions().add(def4);
project1.getReportDefinitions().add(qa1);
mappingService.updateMapProject(project1);
project2.getReportDefinitions().add(def1);
project2.getReportDefinitions().add(def2);
project2.getReportDefinitions().add(def3);
project2.getReportDefinitions().add(def4);
project2.getReportDefinitions().add(qa1);
mappingService.updateMapProject(project2);
project3.getReportDefinitions().add(def1);
project3.getReportDefinitions().add(def2);
project3.getReportDefinitions().add(def3);
project3.getReportDefinitions().add(def4);
project3.getReportDefinitions().add(qa1);
mappingService.updateMapProject(project3);
project4.getReportDefinitions().add(def1);
project4.getReportDefinitions().add(def2);
project4.getReportDefinitions().add(def3);
project4.getReportDefinitions().add(def4);
project4.getReportDefinitions().add(qa1);
mappingService.updateMapProject(project4);
// Generate the reports
reportService.generateDailyReports(project1, lead1);
reportService.generateDailyReports(project2, lead1);
reportService.generateDailyReports(project3, lead1);
reportService.generateDailyReports(project4, lead1);
// TODO: add qa check for "invalid codes"
}
/**
* Make user.
*
* @param userName the user name
* @param name the name
* @return the user
*/
@SuppressWarnings("static-method")
private MapUserJpa makeMapUser(String userName, String name) {
final MapUserJpa user = new MapUserJpa();
user.setUserName(userName);
user.setName(name);
user.setEmail(userName + "@example.com");
user.setApplicationRole(MapUserRole.VIEWER);
return user;
}
}
|
admin/mojo/src/main/java/org/ihtsdo/otf/mapping/mojo/GenerateDemoDataMojo.java
|
/*
* Copyright 2015 West Coast Informatics, LLC
*/
package org.ihtsdo.otf.mapping.mojo;
import java.text.SimpleDateFormat;
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.ihtsdo.otf.mapping.helpers.MapRefsetPattern;
import org.ihtsdo.otf.mapping.helpers.MapUserRole;
import org.ihtsdo.otf.mapping.helpers.RelationStyle;
import org.ihtsdo.otf.mapping.helpers.ReportFrequency;
import org.ihtsdo.otf.mapping.helpers.ReportQueryType;
import org.ihtsdo.otf.mapping.helpers.ReportResultType;
import org.ihtsdo.otf.mapping.helpers.ReportTimePeriod;
import org.ihtsdo.otf.mapping.helpers.WorkflowType;
import org.ihtsdo.otf.mapping.jpa.MapAdviceJpa;
import org.ihtsdo.otf.mapping.jpa.MapProjectJpa;
import org.ihtsdo.otf.mapping.jpa.MapRelationJpa;
import org.ihtsdo.otf.mapping.jpa.MapUserJpa;
import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.MappingServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.ReportServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.SecurityServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.WorkflowServiceJpa;
import org.ihtsdo.otf.mapping.model.MapAdvice;
import org.ihtsdo.otf.mapping.model.MapProject;
import org.ihtsdo.otf.mapping.model.MapRelation;
import org.ihtsdo.otf.mapping.reports.ReportDefinition;
import org.ihtsdo.otf.mapping.reports.ReportDefinitionJpa;
import org.ihtsdo.otf.mapping.services.ContentService;
import org.ihtsdo.otf.mapping.services.MappingService;
import org.ihtsdo.otf.mapping.services.ReportService;
import org.ihtsdo.otf.mapping.services.SecurityService;
import org.ihtsdo.otf.mapping.services.WorkflowService;
/**
* Mojo for generating sample data for a demo of map application.
*
* See admin/loader/pom.xml for a sample execution.
*
* @goal generate-demo-data
*/
public class GenerateDemoDataMojo extends AbstractMojo {
/** The security service. */
SecurityService securityService = null;
/** The content service. */
ContentService contentService = null;
/** The mapping service. */
MappingService mappingService = null;
/** The workflow service. */
WorkflowService workflowService = null;
/** The report service. */
ReportService reportService = null;
/* see superclass */
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Generate demo data");
try {
securityService = new SecurityServiceJpa();
contentService = new ContentServiceJpa();
mappingService = new MappingServiceJpa();
workflowService = new WorkflowServiceJpa();
reportService = new ReportServiceJpa();
loadDemoData();
getLog().info("Finished");
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException("Ad-hoc mojo failed to complete", e);
} finally {
try {
securityService.close();
contentService.close();
mappingService.close();
workflowService.close();
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException(
"Ad-hoc mojo failed to close services.", e);
}
}
}
/**
* Load sample data.
*
* @throws Exception the exception
*/
private void loadDemoData() throws Exception {
//
// Add lead users
//
/* Logger.getLogger(getClass()).info("Add new lead users");
MapUserJpa lead1 = (MapUserJpa) securityService.getMapUser("lead1");
if (lead1 == null) {
lead1 = makeMapUser("lead1", "Lead1");
lead1 = (MapUserJpa) securityService.addMapUser(lead1);
}
MapUserJpa lead2 = (MapUserJpa) securityService.getMapUser("lead2");
if (lead2 == null) {
lead2 = makeMapUser("lead2", "Lead2");
lead2 = (MapUserJpa) securityService.addMapUser(lead2);
}
MapUserJpa lead3 = (MapUserJpa) securityService.getMapUser("lead3");
if (lead3 == null) {
lead3 = makeMapUser("lead3", "Lead3");
lead3 = (MapUserJpa) securityService.addMapUser(lead3);
}
*/
//
// Add specialist users
//
/* Logger.getLogger(getClass()).info("Add new specialist users");
MapUserJpa specialist1 =
(MapUserJpa) securityService.getMapUser("specialist1");
if (specialist1 == null) {
specialist1 = makeMapUser("specialist1", "Specialist1");
specialist1 = (MapUserJpa) securityService.addMapUser(specialist1);
}
MapUserJpa specialist2 =
(MapUserJpa) securityService.getMapUser("specialist2");
if (specialist2 == null) {
specialist2 = makeMapUser("specialist2", "Specialist2");
specialist2 = (MapUserJpa) securityService.addMapUser(specialist2);
}
MapUserJpa specialist3 =
(MapUserJpa) securityService.getMapUser("specialist3");
if (specialist3 == null) {
specialist3 = makeMapUser("specialist3", "Specialist3");
specialist3 = (MapUserJpa) securityService.addMapUser(specialist3);
}
*/
//
// Mapping relationships
//
/* final Set<MapRelation> mapRelations = new HashSet<>();
for (final String rel : new String[] {
"exact", "partial", "narrower", "broader", "none"
}) {
final String ucRel = rel.substring(0, 1).toUpperCase() + rel.substring(1);
final MapRelation relation = new MapRelationJpa();
relation.setAbbreviation(ucRel);
relation.setAllowableForNullTarget(false);
relation.setComputed(false);
if (rel.equals("none")) {
relation.setAllowableForNullTarget(true);
relation.setComputed(true);
}
relation.setName(ucRel + " match");
relation.setTerminologyId(rel);
mappingService.addMapRelation(relation);
mapRelations.add(relation);
}
*/
//
// Mapping Advice
//
/* final Set<MapAdvice> mapAdvices = new HashSet<>();
for (final String adv : new String[] {
"Test advice 1", "Test advice 2", "Test advice 3", "Null target advice"
}) {
final MapAdvice advice = new MapAdviceJpa();
advice.setAllowableForNullTarget(false);
advice.setComputed(false);
if (adv.contains("Null")) {
advice.setAllowableForNullTarget(true);
advice.setComputed(true);
}
advice.setDetail(adv);
advice.setName(adv);
mappingService.addMapAdvice(advice);
mapAdvices.add(advice);
}
*/
//
// Create project SNOMEDCT to MedDRA project
//
Logger.getLogger(getClass())
.info("Create project SNOMEDCT to MedDRA with REVIEW");
MapProject project1 = new MapProjectJpa();
project1.setDestinationTerminology("MEDDRA");
project1.setDestinationTerminologyVersion("latest");
project1.setGroupStructure(true);
project1.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
project1.setName("SNOMEDCT to MedDRA with REVIEW");
project1.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.DefaultProjectSpecificAlgorithmHandler");
project1.setPropagatedFlag(false);
project1.setPublic(false);
project1.setTeamBased(true);
project1.setRefSetId("12345");
project1.setRefSetName("SNOMED to MedDRA Refset");
project1.setSourceTerminology("SNOMEDCT");
project1.setSourceTerminologyVersion("latest");
project1.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
project1.setMapRelationStyle(RelationStyle.NONE);
project1.getScopeConcepts().add("404684003");
project1.setScopeDescendantsFlag(false);
/* project1.setMapRelations(mapRelations);
project1.getMapLeads().add(lead1);
project1.getMapLeads().add(lead2);
project1.getMapSpecialists().add(specialist1);
project1.getMapSpecialists().add(specialist2);
project1.getMapSpecialists().add(specialist3);
*/
// Add project
Logger.getLogger(getClass()).info(" add " + project1);
project1 = mappingService.addMapProject(project1);
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project1);
//
// Create project SNOMEDCT to MedDRA project
//
Logger.getLogger(getClass())
.info("Create project MedDRA to SNOMEDCT with REVIEW");
MapProject project2 = new MapProjectJpa();
project2.setDestinationTerminology("SNOMEDCT");
project2.setDestinationTerminologyVersion("latest");
project2.setGroupStructure(true);
project2.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
project2.setName("MedDRA to SNOMEDCT with REVIEW");
project2.setProjectSpecificAlgorithmHandlerClass(
"org.ihtsdo.otf.mapping.jpa.handlers.DefaultProjectSpecificAlgorithmHandler");
project2.setPropagatedFlag(false);
project2.setPublic(false);
project2.setTeamBased(true);
project2.setRefSetId("67890");
project2.setRefSetName("MedDRA to SNOMEDCT Refset");
project2.setSourceTerminology("MEDDRA");
project2.setSourceTerminologyVersion("latest");
project2.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
project2.setMapRelationStyle(RelationStyle.NONE);
//project2.getScopeConcepts().add("root");
project2.setScopeDescendantsFlag(false);
/* project2.setMapRelations(mapRelations);
project2.getMapLeads().add(lead1);
project2.getMapLeads().add(lead2);
project2.getMapSpecialists().add(specialist1);
project2.getMapSpecialists().add(specialist2);
project2.getMapSpecialists().add(specialist3);
*/
// Add project
Logger.getLogger(getClass()).info(" add " + project2);
project2 = mappingService.addMapProject(project2);
// TODO: Figure out why hanging. Until then, commented out.
/*
Logger.getLogger(getClass()).info(" compute workflow");
workflowService.computeWorkflow(project2);
*/
//
// Cross-project steps
//
// Start editing cycle
SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMdd");
project1.setEditingCycleBeginDate(DATE_FORMAT.parse("20190116"));
project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20190116"));
mappingService.updateMapProject(project1);
mappingService.updateMapProject(project2);
// Reports
/*
ReportDefinition def1 = new ReportDefinitionJpa();
def1.setDescription("Specialist productivity report.");
def1.setDiffReport(false);
def1.setFrequency(ReportFrequency.DAILY);
def1.setName("Specialist productivity");
def1.setQACheck(false);
def1.setQuery(
"select distinct mu.userName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records_AUD mr, map_projects mp, map_users mu "
+ "where mp.id = :MAP_PROJECT_ID: "
+ " and mr.lastModified >= :EDITING_CYCLE_BEGIN_DATE: "
+ " and mr.mapProjectId = mp.id "
+ " and mr.workflowStatus IN ('REVIEW_NEEDED','EDITING_DONE','QA_RESOLVED') "
+ " and mu.userName not in ('loader','qa') "
+ " and mr.owner_id = mu.id " + " and mr.revtype != 2 "
+ "group by mu.userName, mr.lastModified " + "ORDER BY 1,2");
def1.setQueryType(ReportQueryType.SQL);
def1.setResultType(ReportResultType.CONCEPT);
def1.setRoleRequired(MapUserRole.SPECIALIST);
def1.setTimePeriod(null);
reportService.addReportDefinition(def1);
ReportDefinition def2 = new ReportDefinitionJpa();
def2.setDescription("Lead productivity report.");
def2.setDiffReport(false);
def2.setFrequency(ReportFrequency.DAILY);
def2.setName("Lead productivity");
def2.setQACheck(false);
def2.setQuery(
"select distinct mu.userName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records_AUD mr, map_projects mp, map_users mu "
+ "where mp.id = :MAP_PROJECT_ID: "
+ " and mr.lastModified >= :EDITING_CYCLE_BEGIN_DATE: "
+ " and mr.mapProjectId = mp.id "
+ " and mr.workflowStatus IN ('READY_FOR_PUBLICATION') "
+ " and mu.userName != 'loader' " + " and mr.owner_id = mu.id "
+ " and mr.lastModified <= :TIMESTAMP: "
+ " and mr.revtype != 2 " + " and mr.conceptId IN "
+ " (select conceptid from map_records_AUD mr2 "
+ " where mapProjectId = :MAP_PROJECT_ID: "
+ " and workflowStatus in ('CONFLICT_RESOLVED','REVIEW_RESOLVED') "
+ " and mr.owner_id = mr2.owner_id) "
+ "group by mu.userName, mr.lastModified " + "ORDER BY 1,2");
def2.setQueryType(ReportQueryType.SQL);
def2.setResultType(ReportResultType.CONCEPT);
def2.setRoleRequired(MapUserRole.LEAD);
def2.setTimePeriod(null);
reportService.addReportDefinition(def2);
// Daily specialist report
ReportDefinition def3 = new ReportDefinitionJpa();
def3.setDescription("Daily specialist productivity report.");
def3.setDiffReport(true);
def3.setFrequency(ReportFrequency.DAILY);
def3.setName("Daily specialist productivity");
def3.setQACheck(false);
def3.setQuery(null);
def3.setQueryType(ReportQueryType.NONE);
def3.setResultType(ReportResultType.CONCEPT);
def3.setRoleRequired(MapUserRole.SPECIALIST);
def3.setTimePeriod(ReportTimePeriod.DAILY);
def3.setDiffReportDefinitionName("Specialist productivity");
reportService.addReportDefinition(def3);
// Daily lead report
ReportDefinition def4 = new ReportDefinitionJpa();
def4.setDescription("Daily lead productivity report.");
def4.setDiffReport(true);
def4.setFrequency(ReportFrequency.DAILY);
def4.setName("Daily lead productivity");
def4.setQACheck(false);
def4.setQuery(null);
def4.setQueryType(ReportQueryType.NONE);
def4.setResultType(ReportResultType.CONCEPT);
def4.setRoleRequired(MapUserRole.LEAD);
def4.setTimePeriod(ReportTimePeriod.DAILY);
def3.setDiffReportDefinitionName("Lead productivity");
reportService.addReportDefinition(def4);
*/
/*
// specialist productivity, lead productivity
// QA checks
ReportDefinition qa1 = new ReportDefinitionJpa();
qa1.setDescription(
"Sample QA check to identify mappings involving the word 'peanut'");
qa1.setDiffReport(false);
qa1.setFrequency(ReportFrequency.ON_DEMAND);
qa1.setName("Peanut records");
qa1.setQACheck(true);
qa1.setQuery(
"select distinct mr.conceptName value, mr.conceptId itemId, mr.conceptName itemName "
+ "from map_records mr "
+ "where mr.mapProjectId = :MAP_PROJECT_ID: "
+ " and mr.conceptName like '%eanut%'");
qa1.setQueryType(ReportQueryType.SQL);
qa1.setResultType(ReportResultType.CONCEPT);
qa1.setRoleRequired(MapUserRole.SPECIALIST);
qa1.setTimePeriod(null);
reportService.addReportDefinition(qa1);
// Add report definitions to the project(s)
project1.getReportDefinitions().add(def1);
project1.getReportDefinitions().add(def2);
project1.getReportDefinitions().add(def3);
project1.getReportDefinitions().add(def4);
project1.getReportDefinitions().add(qa1);
*/
mappingService.updateMapProject(project1);
// Generate the reports
/*reportService.generateDailyReports(project1, lead1);
// Add report definitions to the project(s)
project2.getReportDefinitions().add(def1);
project2.getReportDefinitions().add(def2);
project2.getReportDefinitions().add(def3);
project2.getReportDefinitions().add(def4);
project2.getReportDefinitions().add(qa1);
*/
mappingService.updateMapProject(project2);
// Generate the reports
/* reportService.generateDailyReports(project2, lead1);
*/
// TODO: add qa check for "invalid codes"
}
/**
* Make user.
*
* @param userName the user name
* @param name the name
* @return the user
*/
@SuppressWarnings("static-method")
private MapUserJpa makeMapUser(String userName, String name) {
final MapUserJpa user = new MapUserJpa();
user.setUserName(userName);
user.setName(name);
user.setEmail(userName + "@example.com");
user.setApplicationRole(MapUserRole.VIEWER);
return user;
}
}
|
Rolling back modifications to GenerateDemoDataMojo.java so it can load the demo database, instead of the MedDRA data.
|
admin/mojo/src/main/java/org/ihtsdo/otf/mapping/mojo/GenerateDemoDataMojo.java
|
Rolling back modifications to GenerateDemoDataMojo.java so it can load the demo database, instead of the MedDRA data.
|
<ide><path>dmin/mojo/src/main/java/org/ihtsdo/otf/mapping/mojo/GenerateDemoDataMojo.java
<ide> /*
<del> * Copyright 2015 West Coast Informatics, LLC
<add> * Copyright 2019 West Coast Informatics, LLC
<ide> */
<ide> package org.ihtsdo.otf.mapping.mojo;
<ide>
<ide> //
<ide> // Add lead users
<ide> //
<del>/* Logger.getLogger(getClass()).info("Add new lead users");
<add> Logger.getLogger(getClass()).info("Add new lead users");
<ide> MapUserJpa lead1 = (MapUserJpa) securityService.getMapUser("lead1");
<ide> if (lead1 == null) {
<ide> lead1 = makeMapUser("lead1", "Lead1");
<ide> lead3 = makeMapUser("lead3", "Lead3");
<ide> lead3 = (MapUserJpa) securityService.addMapUser(lead3);
<ide> }
<del>*/
<add>
<ide> //
<ide> // Add specialist users
<ide> //
<del>/* Logger.getLogger(getClass()).info("Add new specialist users");
<add> Logger.getLogger(getClass()).info("Add new specialist users");
<ide> MapUserJpa specialist1 =
<ide> (MapUserJpa) securityService.getMapUser("specialist1");
<ide> if (specialist1 == null) {
<ide> specialist3 = makeMapUser("specialist3", "Specialist3");
<ide> specialist3 = (MapUserJpa) securityService.addMapUser(specialist3);
<ide> }
<del>*/
<add>
<ide> //
<ide> // Mapping relationships
<ide> //
<del>/* final Set<MapRelation> mapRelations = new HashSet<>();
<add> final Set<MapRelation> mapRelations = new HashSet<>();
<ide> for (final String rel : new String[] {
<ide> "exact", "partial", "narrower", "broader", "none"
<ide> }) {
<ide> mappingService.addMapRelation(relation);
<ide> mapRelations.add(relation);
<ide> }
<del>*/
<add>
<ide> //
<ide> // Mapping Advice
<ide> //
<del>/* final Set<MapAdvice> mapAdvices = new HashSet<>();
<add> final Set<MapAdvice> mapAdvices = new HashSet<>();
<ide> for (final String adv : new String[] {
<ide> "Test advice 1", "Test advice 2", "Test advice 3", "Null target advice"
<ide> }) {
<ide> mappingService.addMapAdvice(advice);
<ide> mapAdvices.add(advice);
<ide> }
<del>*/
<del> //
<del> // Create project SNOMEDCT to MedDRA project
<add>
<add> //
<add> // Create project Allergy to SNOMED project
<ide> //
<ide> Logger.getLogger(getClass())
<del> .info("Create project SNOMEDCT to MedDRA with REVIEW");
<add> .info("Create project ALLERGY to SNOMEDCT with REVIEW");
<ide> MapProject project1 = new MapProjectJpa();
<del> project1.setDestinationTerminology("MEDDRA");
<del> project1.setDestinationTerminologyVersion("latest");
<add> project1.setDestinationTerminology("SNOMEDCT");
<add> project1.setDestinationTerminologyVersion("20140731");
<ide> project1.setGroupStructure(true);
<ide> project1.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
<del> project1.setName("SNOMEDCT to MedDRA with REVIEW");
<add> project1.setName("ALLERGY to SNOMEDCT with REVIEW");
<ide> project1.setProjectSpecificAlgorithmHandlerClass(
<del> "org.ihtsdo.otf.mapping.jpa.handlers.DefaultProjectSpecificAlgorithmHandler");
<add> "org.ihtsdo.otf.mapping.jpa.handlers.AllergyProjectSpecificAlgorithmHandler");
<ide> project1.setPropagatedFlag(false);
<del> project1.setPublic(false);
<del> project1.setTeamBased(true);
<add> project1.setPublic(true);
<ide> project1.setRefSetId("12345");
<del> project1.setRefSetName("SNOMED to MedDRA Refset");
<del> project1.setSourceTerminology("SNOMEDCT");
<add> project1.setRefSetName("Allergy to SNOMED Refset");
<add> project1.setSourceTerminology("ALLERGY");
<ide> project1.setSourceTerminologyVersion("latest");
<del> project1.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
<del> project1.setMapRelationStyle(RelationStyle.NONE);
<del> project1.getScopeConcepts().add("404684003");
<del> project1.setScopeDescendantsFlag(false);
<del> /* project1.setMapRelations(mapRelations);
<add> project1.setWorkflowType(WorkflowType.REVIEW_PROJECT);
<add> project1.setMapRelationStyle(RelationStyle.RELATIONSHIP_STYLE);
<add> project1.getScopeConcepts().add("root");
<add> project1.setScopeDescendantsFlag(true);
<add> project1.setMapRelations(mapRelations);
<ide> project1.getMapLeads().add(lead1);
<ide> project1.getMapLeads().add(lead2);
<ide> project1.getMapSpecialists().add(specialist1);
<ide> project1.getMapSpecialists().add(specialist2);
<ide> project1.getMapSpecialists().add(specialist3);
<del>*/
<add>
<ide> // Add project
<ide> Logger.getLogger(getClass()).info(" add " + project1);
<ide> project1 = mappingService.addMapProject(project1);
<ide> Logger.getLogger(getClass()).info(" compute workflow");
<ide> workflowService.computeWorkflow(project1);
<ide>
<del> //
<del> // Create project SNOMEDCT to MedDRA project
<del> //
<add> // Create project MEDICATION to RXNORM with REVIEW
<ide> Logger.getLogger(getClass())
<del> .info("Create project MedDRA to SNOMEDCT with REVIEW");
<add> .info("Create project MEDICATION to RXNORM with REVIEW");
<ide> MapProject project2 = new MapProjectJpa();
<del> project2.setDestinationTerminology("SNOMEDCT");
<del> project2.setDestinationTerminologyVersion("latest");
<add> project2.setDestinationTerminology("RXNORM");
<add> project2.setDestinationTerminologyVersion("2016AA");
<ide> project2.setGroupStructure(true);
<ide> project2.setMapRefsetPattern(MapRefsetPattern.ComplexMap);
<del> project2.setName("MedDRA to SNOMEDCT with REVIEW");
<add> project2.setName("MEDICATION to RXNORM with REVIEW");
<ide> project2.setProjectSpecificAlgorithmHandlerClass(
<del> "org.ihtsdo.otf.mapping.jpa.handlers.DefaultProjectSpecificAlgorithmHandler");
<add> "org.ihtsdo.otf.mapping.jpa.handlers.MedicationProjectSpecificAlgorithmHandler");
<ide> project2.setPropagatedFlag(false);
<del> project2.setPublic(false);
<del> project2.setTeamBased(true);
<del> project2.setRefSetId("67890");
<del> project2.setRefSetName("MedDRA to SNOMEDCT Refset");
<del> project2.setSourceTerminology("MEDDRA");
<add> project2.setPublic(true);
<add> project2.setRefSetId("23456");
<add> project2.setRefSetName("Medication to RXNORM Mapping");
<add> project2.setSourceTerminology("MEDICATION");
<ide> project2.setSourceTerminologyVersion("latest");
<del> project2.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
<del> project2.setMapRelationStyle(RelationStyle.NONE);
<del> //project2.getScopeConcepts().add("root");
<del> project2.setScopeDescendantsFlag(false);
<del>/* project2.setMapRelations(mapRelations);
<add> project2.setWorkflowType(WorkflowType.REVIEW_PROJECT);
<add> project2.setMapRelationStyle(RelationStyle.RELATIONSHIP_STYLE);
<add> project2.getScopeConcepts().add("root");
<add> project2.setScopeDescendantsFlag(true);
<add> project2.setMapRelations(mapRelations);
<ide> project2.getMapLeads().add(lead1);
<ide> project2.getMapLeads().add(lead2);
<ide> project2.getMapSpecialists().add(specialist1);
<ide> project2.getMapSpecialists().add(specialist2);
<ide> project2.getMapSpecialists().add(specialist3);
<del>*/
<add>
<ide> // Add project
<ide> Logger.getLogger(getClass()).info(" add " + project2);
<ide> project2 = mappingService.addMapProject(project2);
<del>
<del> // TODO: Figure out why hanging. Until then, commented out.
<del> /*
<ide> Logger.getLogger(getClass()).info(" compute workflow");
<ide> workflowService.computeWorkflow(project2);
<del> */
<add>
<add> // Add project
<add> Logger.getLogger(getClass()).info(" add " + project1);
<add> project1 = mappingService.addMapProject(project1);
<add> Logger.getLogger(getClass()).info(" compute workflow");
<add> workflowService.computeWorkflow(project1);
<add>
<add> // Create project SNOMED to ICD10
<add> Logger.getLogger(getClass())
<add> .info("Create project SNOMEDCT to ICD10 with NON-LEGACY");
<add> MapProject project3 = new MapProjectJpa();
<add> project3.setDestinationTerminology("ICD10");
<add> project3.setDestinationTerminologyVersion("2016");
<add> project3.setGroupStructure(true);
<add> project3.setMapRefsetPattern(MapRefsetPattern.ExtendedMap);
<add> project3.setName("SNOMEDCT to ICD10 with NON-LEGACY");
<add> project3.setProjectSpecificAlgorithmHandlerClass(
<add> "org.ihtsdo.otf.mapping.jpa.handlers.ICD10ProjectSpecificAlgorithmHandler");
<add> project3.setPropagatedFlag(false);
<add> project3.setPublic(true);
<add> project3.setRefSetId("3333333");
<add> project3.setRefSetName("SNOMEDCT to ICD10");
<add> project3.setSourceTerminology("SNOMEDCT");
<add> project3.setSourceTerminologyVersion("20140731");
<add> project3.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
<add> project3.setMapRelationStyle(RelationStyle.MAP_CATEGORY_STYLE);
<add> project3.getScopeConcepts().add("404684003");
<add> project3.setScopeDescendantsFlag(true);
<add> project3.setMapRelations(mapRelations);
<add> project3.setMapAdvices(mapAdvices);
<add> project3.getMapLeads().add(lead1);
<add> project3.getMapLeads().add(lead2);
<add> project3.getMapSpecialists().add(specialist1);
<add> project3.getMapSpecialists().add(specialist2);
<add> project3.getMapSpecialists().add(specialist3);
<add>
<add> // Add project
<add> Logger.getLogger(getClass()).info(" add " + project3);
<add> project3 = mappingService.addMapProject(project3);
<add> Logger.getLogger(getClass()).info(" compute workflow");
<add> workflowService.computeWorkflow(project3);
<add>
<add> // Create project SNOMED to ICD10CM
<add> Logger.getLogger(getClass())
<add> .info("Create project SNOMEDCT to ICD10CM with NON-LEGACY");
<add> MapProject project4 = new MapProjectJpa();
<add> project4.setDestinationTerminology("ICD10CM");
<add> project4.setDestinationTerminologyVersion("2016");
<add> project4.setGroupStructure(true);
<add> project4.setMapRefsetPattern(MapRefsetPattern.ExtendedMap);
<add> project4.setName("SNOMEDCT to ICD10CM with NON-LEGACY");
<add> project4.setProjectSpecificAlgorithmHandlerClass(
<add> "org.ihtsdo.otf.mapping.jpa.handlers.ICD10ProjectSpecificAlgorithmHandler");
<add> project4.setPropagatedFlag(false);
<add> project4.setPublic(true);
<add> project4.setRefSetId("3333333");
<add> project4.setRefSetName("SNOMEDCT to ICD10CM");
<add> project4.setSourceTerminology("SNOMEDCT");
<add> project4.setSourceTerminologyVersion("20140731");
<add> project4.setWorkflowType(WorkflowType.CONFLICT_PROJECT);
<add> project4.setMapRelationStyle(RelationStyle.MAP_CATEGORY_STYLE);
<add> project4.getScopeConcepts().add("404684003");
<add> project4.setScopeDescendantsFlag(true);
<add> project4.setMapRelations(mapRelations);
<add> project4.setMapAdvices(mapAdvices);
<add> project4.getMapLeads().add(lead1);
<add> project4.getMapLeads().add(lead2);
<add> project4.getMapSpecialists().add(specialist1);
<add> project4.getMapSpecialists().add(specialist2);
<add> project4.getMapSpecialists().add(specialist3);
<add>
<add> // Add project
<add> Logger.getLogger(getClass()).info(" add " + project4);
<add> project4 = mappingService.addMapProject(project4);
<add> Logger.getLogger(getClass()).info(" compute workflow");
<add> workflowService.computeWorkflow(project4);
<ide>
<ide> //
<ide> // Cross-project steps
<ide>
<ide> // Start editing cycle
<ide> SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMdd");
<del> project1.setEditingCycleBeginDate(DATE_FORMAT.parse("20190116"));
<del> project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20190116"));
<add> project1.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
<ide> mappingService.updateMapProject(project1);
<add> project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
<ide> mappingService.updateMapProject(project2);
<add> project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
<add> mappingService.updateMapProject(project3);
<add> project2.setEditingCycleBeginDate(DATE_FORMAT.parse("20160101"));
<add> mappingService.updateMapProject(project4);
<ide>
<ide> // Reports
<del> /*
<ide> ReportDefinition def1 = new ReportDefinitionJpa();
<del>
<ide> def1.setDescription("Specialist productivity report.");
<ide> def1.setDiffReport(false);
<ide> def1.setFrequency(ReportFrequency.DAILY);
<ide> def4.setTimePeriod(ReportTimePeriod.DAILY);
<ide> def3.setDiffReportDefinitionName("Lead productivity");
<ide> reportService.addReportDefinition(def4);
<del>*/
<del>
<del>/*
<add>
<ide> // specialist productivity, lead productivity
<ide>
<ide> // QA checks
<ide> project1.getReportDefinitions().add(def3);
<ide> project1.getReportDefinitions().add(def4);
<ide> project1.getReportDefinitions().add(qa1);
<del>*/
<ide> mappingService.updateMapProject(project1);
<ide>
<del> // Generate the reports
<del> /*reportService.generateDailyReports(project1, lead1);
<del>
<del> // Add report definitions to the project(s)
<ide> project2.getReportDefinitions().add(def1);
<ide> project2.getReportDefinitions().add(def2);
<ide> project2.getReportDefinitions().add(def3);
<ide> project2.getReportDefinitions().add(def4);
<ide> project2.getReportDefinitions().add(qa1);
<del> */
<ide> mappingService.updateMapProject(project2);
<ide>
<add> project3.getReportDefinitions().add(def1);
<add> project3.getReportDefinitions().add(def2);
<add> project3.getReportDefinitions().add(def3);
<add> project3.getReportDefinitions().add(def4);
<add> project3.getReportDefinitions().add(qa1);
<add> mappingService.updateMapProject(project3);
<add>
<add> project4.getReportDefinitions().add(def1);
<add> project4.getReportDefinitions().add(def2);
<add> project4.getReportDefinitions().add(def3);
<add> project4.getReportDefinitions().add(def4);
<add> project4.getReportDefinitions().add(qa1);
<add> mappingService.updateMapProject(project4);
<add>
<ide> // Generate the reports
<del>/* reportService.generateDailyReports(project2, lead1);
<del>*/
<add> reportService.generateDailyReports(project1, lead1);
<add> reportService.generateDailyReports(project2, lead1);
<add> reportService.generateDailyReports(project3, lead1);
<add> reportService.generateDailyReports(project4, lead1);
<add>
<ide> // TODO: add qa check for "invalid codes"
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
error: pathspec 'xmpbox/src/test/java/org/apache/xmpbox/schema/SchemaTester.java' did not match any file(s) known to git
|
78547f80e417a66b393fe6a5c24e68d5fe6d5c3b
| 1 |
apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox
|
/*****************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
package org.apache.xmpbox.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Random;
import org.apache.xmpbox.XMPMetadata;
import org.apache.xmpbox.type.AbstractSimpleProperty;
import org.apache.xmpbox.type.ArrayProperty;
import org.apache.xmpbox.type.Cardinality;
import org.apache.xmpbox.type.TypeMapping;
import org.apache.xmpbox.type.AbstractTypeTester;
import org.apache.xmpbox.type.Types;
class SchemaTester extends AbstractTypeTester {
private XMPMetadata xmp;
private XMPSchema xmpSchema;
private String fieldName;
private Types type;
private Cardinality cardinality;
private TypeMapping typeMapping;;
private XMPSchema getSchema() {
return xmpSchema;
}
private Class<? extends XMPSchema> getSchemaClass() {
return getSchema().getClass();
}
public SchemaTester(XMPMetadata xmp, XMPSchema schema, String fieldName, Types type, Cardinality card)
{
this.xmp = xmp;
this.xmpSchema = schema;
this.typeMapping = xmp.getTypeMapping();
this.fieldName = fieldName;
this.type = type;
this.cardinality = card;
}
public void testInitializedToNull() throws Exception
{
XMPSchema schema = getSchema();
// default method
assertNull(schema.getProperty(fieldName));
// accessor
if (cardinality == Cardinality.Simple)
{
String getter = calculateSimpleGetter(fieldName);
Method get = getSchemaClass().getMethod(getter);
Object result = get.invoke(schema);
assertNull(result);
}
else
{
// arrays
String getter = calculateArrayGetter(fieldName);
Method get = getSchemaClass().getMethod(getter);
Object result = get.invoke(schema);
assertNull(result);
}
}
public void testSettingValue() throws Exception
{
internalTestSettingValue();
}
public void testRandomSettingValue() throws Exception
{
initializeSeed(new Random());
for (int i=0; i < RAND_LOOP_COUNT;i++)
{
internalTestSettingValue();
}
}
private void internalTestSettingValue() throws Exception
{
if (cardinality != Cardinality.Simple)
{
return;
}
XMPSchema schema = getSchema();
// only test simple properties
Object value = getJavaValue(type);
AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value);
schema.addProperty(property);
String qn = getPropertyQualifiedName(fieldName);
assertNotNull(schema.getProperty(fieldName));
// check other properties not modified
List<Field> fields = getXmpFields(getSchemaClass());
for (Field field : fields)
{
// do not check the current name
String fqn = getPropertyQualifiedName(field.get(null).toString());
if (!fqn.equals(qn))
{
assertNull(schema.getProperty(fqn));
}
}
}
public void testSettingValueInArray() throws Exception
{
internalTestSettingValueInArray();
}
public void testRandomSettingValueInArray() throws Exception
{
initializeSeed(new Random());
for (int i=0; i < RAND_LOOP_COUNT;i++)
{
internalTestSettingValueInArray();
}
}
private void internalTestSettingValueInArray() throws Exception
{
if (cardinality == Cardinality.Simple)
{
return;
}
XMPSchema schema = getSchema();
// only test array properties
Object value = getJavaValue(type);
AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value);
switch (cardinality)
{
case Seq:
schema.addUnqualifiedSequenceValue(property.getPropertyName(), property);
break;
case Bag:
schema.addBagValue(property.getPropertyName(), property);
break;
default:
throw new Exception("Unexpected case in test : " + cardinality.name());
}
String qn = getPropertyQualifiedName(fieldName);
assertNotNull(schema.getProperty(fieldName));
// check other properties not modified
List<Field> fields = getXmpFields(getSchemaClass());
for (Field field : fields)
{
// do not check the current name
String fqn = getPropertyQualifiedName(field.get(null).toString());
if (!fqn.equals(qn))
{
assertNull(schema.getProperty(fqn));
}
}
}
public void testPropertySetterSimple() throws Exception
{
internalTestPropertySetterSimple();
}
public void testRandomPropertySetterSimple() throws Exception
{
initializeSeed(new Random());
for (int i=0; i < RAND_LOOP_COUNT;i++)
{
internalTestPropertySetterSimple();
}
}
private void internalTestPropertySetterSimple() throws Exception
{
if (cardinality != Cardinality.Simple)
{
return;
}
XMPSchema schema = getSchema();
String setter = calculateSimpleSetter(fieldName) + "Property";
Object value = getJavaValue(type);
AbstractSimpleProperty asp = typeMapping.instanciateSimpleProperty(schema.getNamespace(), schema
.getPrefix(), fieldName, value, type);
Method set = getSchemaClass().getMethod(setter, type.getImplementingClass());
set.invoke(schema, asp);
// check property set
AbstractSimpleProperty stored = (AbstractSimpleProperty) schema.getProperty(fieldName);
assertEquals(value, stored.getValue());
// check getter
String getter = calculateSimpleGetter(fieldName) + "Property";
Method get = getSchemaClass().getMethod(getter);
Object result = get.invoke(schema);
assertTrue(type.getImplementingClass().isAssignableFrom(result.getClass()));
assertEquals(asp, result);
}
public void testPropertySetterInArray() throws Exception
{
internalTestPropertySetterInArray();
}
public void testRandomPropertySetterInArray() throws Exception
{
initializeSeed(new Random());
for (int i=0; i < RAND_LOOP_COUNT;i++)
{
internalTestPropertySetterInArray();
}
}
private void internalTestPropertySetterInArray() throws Exception
{
if (cardinality == Cardinality.Simple)
{
return;
}
XMPSchema schema = getSchema();
// add value
String setter = "add" + calculateFieldNameForMethod(fieldName);
// TypeDescription<AbstractSimpleProperty> td =
// typeMapping.getSimpleDescription(type);
Object value1 = getJavaValue(type);
Method set = getSchemaClass().getMethod(setter, getJavaType(type));
set.invoke(schema, value1);
// retrieve complex property
String getter = calculateArrayGetter(fieldName) + "Property";
Method getcp = getSchemaClass().getMethod(getter);
Object ocp = getcp.invoke(schema);
assertTrue(ocp instanceof ArrayProperty);
ArrayProperty cp = (ArrayProperty) ocp;
// check size is ok (1)
assertEquals(1, cp.getContainer().getAllProperties().size());
// add a new one
Object value2 = getJavaValue(type);
set.invoke(schema, value2);
assertEquals(2, cp.getContainer().getAllProperties().size());
// remove the first
String remover = "remove" + calculateFieldNameForMethod(fieldName);
Method remove = getSchemaClass().getMethod(remover, getJavaType(type));
remove.invoke(schema, value1);
assertEquals(1, cp.getContainer().getAllProperties().size());
}
protected String getPropertyQualifiedName(String name)
{
StringBuilder sb = new StringBuilder();
sb.append(getSchema().getPrefix()).append(":").append(name);
return sb.toString();
}
}
|
xmpbox/src/test/java/org/apache/xmpbox/schema/SchemaTester.java
|
PDFBOX-5017: new class for schema testing derived from AbstractSchemaTester (which will be removed)
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1883479 13f79535-47bb-0310-9956-ffa450edef68
|
xmpbox/src/test/java/org/apache/xmpbox/schema/SchemaTester.java
|
PDFBOX-5017: new class for schema testing derived from AbstractSchemaTester (which will be removed)
|
<ide><path>mpbox/src/test/java/org/apache/xmpbox/schema/SchemaTester.java
<add>/*****************************************************************************
<add> * Licensed to the Apache Software Foundation (ASF) under one
<add> * or more contributor license agreements. See the NOTICE file
<add> * distributed with this work for additional information
<add> * regarding copyright ownership. The ASF licenses this file
<add> * to you under the Apache License, Version 2.0 (the
<add> * "License"); you may not use this file except in compliance
<add> * with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing,
<add> * software distributed under the License is distributed on an
<add> * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add> * KIND, either express or implied. See the License for the
<add> * specific language governing permissions and limitations
<add> * under the License.
<add> *
<add> ****************************************************************************/
<add>
<add>package org.apache.xmpbox.schema;
<add>
<add>import static org.junit.jupiter.api.Assertions.assertEquals;
<add>import static org.junit.jupiter.api.Assertions.assertNotNull;
<add>import static org.junit.jupiter.api.Assertions.assertNull;
<add>import static org.junit.jupiter.api.Assertions.assertTrue;
<add>
<add>import java.lang.reflect.Field;
<add>import java.lang.reflect.Method;
<add>import java.util.List;
<add>import java.util.Random;
<add>
<add>import org.apache.xmpbox.XMPMetadata;
<add>import org.apache.xmpbox.type.AbstractSimpleProperty;
<add>import org.apache.xmpbox.type.ArrayProperty;
<add>import org.apache.xmpbox.type.Cardinality;
<add>import org.apache.xmpbox.type.TypeMapping;
<add>import org.apache.xmpbox.type.AbstractTypeTester;
<add>import org.apache.xmpbox.type.Types;
<add>
<add>class SchemaTester extends AbstractTypeTester {
<add>
<add> private XMPMetadata xmp;
<add>
<add> private XMPSchema xmpSchema;
<add>
<add> private String fieldName;
<add>
<add> private Types type;
<add>
<add> private Cardinality cardinality;
<add>
<add> private TypeMapping typeMapping;;
<add>
<add> private XMPSchema getSchema() {
<add> return xmpSchema;
<add> }
<add>
<add> private Class<? extends XMPSchema> getSchemaClass() {
<add> return getSchema().getClass();
<add> }
<add>
<add> public SchemaTester(XMPMetadata xmp, XMPSchema schema, String fieldName, Types type, Cardinality card)
<add> {
<add> this.xmp = xmp;
<add> this.xmpSchema = schema;
<add> this.typeMapping = xmp.getTypeMapping();
<add> this.fieldName = fieldName;
<add> this.type = type;
<add> this.cardinality = card;
<add> }
<add>
<add> public void testInitializedToNull() throws Exception
<add> {
<add> XMPSchema schema = getSchema();
<add> // default method
<add> assertNull(schema.getProperty(fieldName));
<add> // accessor
<add> if (cardinality == Cardinality.Simple)
<add> {
<add> String getter = calculateSimpleGetter(fieldName);
<add> Method get = getSchemaClass().getMethod(getter);
<add> Object result = get.invoke(schema);
<add> assertNull(result);
<add> }
<add> else
<add> {
<add> // arrays
<add> String getter = calculateArrayGetter(fieldName);
<add> Method get = getSchemaClass().getMethod(getter);
<add> Object result = get.invoke(schema);
<add> assertNull(result);
<add> }
<add>
<add> }
<add>
<add> public void testSettingValue() throws Exception
<add> {
<add> internalTestSettingValue();
<add> }
<add>
<add> public void testRandomSettingValue() throws Exception
<add> {
<add> initializeSeed(new Random());
<add> for (int i=0; i < RAND_LOOP_COUNT;i++)
<add> {
<add> internalTestSettingValue();
<add> }
<add> }
<add>
<add> private void internalTestSettingValue() throws Exception
<add> {
<add> if (cardinality != Cardinality.Simple)
<add> {
<add> return;
<add> }
<add> XMPSchema schema = getSchema();
<add> // only test simple properties
<add> Object value = getJavaValue(type);
<add> AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value);
<add> schema.addProperty(property);
<add> String qn = getPropertyQualifiedName(fieldName);
<add> assertNotNull(schema.getProperty(fieldName));
<add> // check other properties not modified
<add> List<Field> fields = getXmpFields(getSchemaClass());
<add> for (Field field : fields)
<add> {
<add> // do not check the current name
<add> String fqn = getPropertyQualifiedName(field.get(null).toString());
<add> if (!fqn.equals(qn))
<add> {
<add> assertNull(schema.getProperty(fqn));
<add> }
<add> }
<add> }
<add>
<add> public void testSettingValueInArray() throws Exception
<add> {
<add> internalTestSettingValueInArray();
<add> }
<add>
<add> public void testRandomSettingValueInArray() throws Exception
<add> {
<add> initializeSeed(new Random());
<add> for (int i=0; i < RAND_LOOP_COUNT;i++)
<add> {
<add> internalTestSettingValueInArray();
<add> }
<add> }
<add>
<add> private void internalTestSettingValueInArray() throws Exception
<add> {
<add> if (cardinality == Cardinality.Simple)
<add> {
<add> return;
<add> }
<add> XMPSchema schema = getSchema();
<add> // only test array properties
<add> Object value = getJavaValue(type);
<add> AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value);
<add> switch (cardinality)
<add> {
<add> case Seq:
<add> schema.addUnqualifiedSequenceValue(property.getPropertyName(), property);
<add> break;
<add> case Bag:
<add> schema.addBagValue(property.getPropertyName(), property);
<add> break;
<add> default:
<add> throw new Exception("Unexpected case in test : " + cardinality.name());
<add> }
<add> String qn = getPropertyQualifiedName(fieldName);
<add> assertNotNull(schema.getProperty(fieldName));
<add> // check other properties not modified
<add> List<Field> fields = getXmpFields(getSchemaClass());
<add> for (Field field : fields)
<add> {
<add> // do not check the current name
<add> String fqn = getPropertyQualifiedName(field.get(null).toString());
<add> if (!fqn.equals(qn))
<add> {
<add> assertNull(schema.getProperty(fqn));
<add> }
<add> }
<add> }
<add>
<add> public void testPropertySetterSimple() throws Exception
<add> {
<add> internalTestPropertySetterSimple();
<add> }
<add>
<add> public void testRandomPropertySetterSimple() throws Exception
<add> {
<add> initializeSeed(new Random());
<add> for (int i=0; i < RAND_LOOP_COUNT;i++)
<add> {
<add> internalTestPropertySetterSimple();
<add> }
<add> }
<add>
<add> private void internalTestPropertySetterSimple() throws Exception
<add> {
<add> if (cardinality != Cardinality.Simple)
<add> {
<add> return;
<add> }
<add> XMPSchema schema = getSchema();
<add> String setter = calculateSimpleSetter(fieldName) + "Property";
<add> Object value = getJavaValue(type);
<add> AbstractSimpleProperty asp = typeMapping.instanciateSimpleProperty(schema.getNamespace(), schema
<add> .getPrefix(), fieldName, value, type);
<add> Method set = getSchemaClass().getMethod(setter, type.getImplementingClass());
<add> set.invoke(schema, asp);
<add> // check property set
<add> AbstractSimpleProperty stored = (AbstractSimpleProperty) schema.getProperty(fieldName);
<add> assertEquals(value, stored.getValue());
<add> // check getter
<add> String getter = calculateSimpleGetter(fieldName) + "Property";
<add> Method get = getSchemaClass().getMethod(getter);
<add> Object result = get.invoke(schema);
<add> assertTrue(type.getImplementingClass().isAssignableFrom(result.getClass()));
<add> assertEquals(asp, result);
<add> }
<add>
<add> public void testPropertySetterInArray() throws Exception
<add> {
<add> internalTestPropertySetterInArray();
<add> }
<add>
<add> public void testRandomPropertySetterInArray() throws Exception
<add> {
<add> initializeSeed(new Random());
<add> for (int i=0; i < RAND_LOOP_COUNT;i++)
<add> {
<add> internalTestPropertySetterInArray();
<add> }
<add> }
<add>
<add> private void internalTestPropertySetterInArray() throws Exception
<add> {
<add> if (cardinality == Cardinality.Simple)
<add> {
<add> return;
<add> }
<add> XMPSchema schema = getSchema();
<add> // add value
<add> String setter = "add" + calculateFieldNameForMethod(fieldName);
<add> // TypeDescription<AbstractSimpleProperty> td =
<add> // typeMapping.getSimpleDescription(type);
<add> Object value1 = getJavaValue(type);
<add> Method set = getSchemaClass().getMethod(setter, getJavaType(type));
<add> set.invoke(schema, value1);
<add> // retrieve complex property
<add> String getter = calculateArrayGetter(fieldName) + "Property";
<add> Method getcp = getSchemaClass().getMethod(getter);
<add> Object ocp = getcp.invoke(schema);
<add> assertTrue(ocp instanceof ArrayProperty);
<add> ArrayProperty cp = (ArrayProperty) ocp;
<add> // check size is ok (1)
<add> assertEquals(1, cp.getContainer().getAllProperties().size());
<add> // add a new one
<add> Object value2 = getJavaValue(type);
<add> set.invoke(schema, value2);
<add> assertEquals(2, cp.getContainer().getAllProperties().size());
<add> // remove the first
<add> String remover = "remove" + calculateFieldNameForMethod(fieldName);
<add> Method remove = getSchemaClass().getMethod(remover, getJavaType(type));
<add> remove.invoke(schema, value1);
<add> assertEquals(1, cp.getContainer().getAllProperties().size());
<add>
<add> }
<add>
<add> protected String getPropertyQualifiedName(String name)
<add> {
<add> StringBuilder sb = new StringBuilder();
<add> sb.append(getSchema().getPrefix()).append(":").append(name);
<add> return sb.toString();
<add> }
<add>
<add>}
|
|
JavaScript
|
mit
|
ad9d1cc3d745bdb9c3193fe7a7b44c4935dffd12
| 0 |
baishuiz/AirUI,baishuiz/AirUI
|
Air.Module("AirUI.ui.Imageclip", function(require){
var imgCamera = require('AirUI.UI.imgCamera');
var offset = require('AirUI.UI.offset');
var dragAble = require('AirUI.UI.dragAble');
var defaultOptions = {
};
function createImageElement(imgURL, width){
var imgElement = document.createElement('img');
imgElement.src = imgURL;
imgElement.width = width;
return imgElement;
}
function createCameraCar(width, height, x, y){
var car = document.createElement('div');
car.style.position = 'static';
car.style.width = width + 'px';
car.style.height = height + 'px';
car.style.top = y + 'px';
car.style.left = x + 'px';
car.style.border = "1px dashed #fff";
car.style.cursor = 'move'
car.style.boxSizing = 'border-box';
car.style.webkitBoxSizing = 'border-box';
car.style.mozBoxSizing = 'border-box';
car.style.boxSizing = 'border-box';
return car;
}
function createMask(){
var mask = document.createElement('div');
mask.style.opacity = 0.8;
mask.style.background = 'black';
mask.style.width = '100%';
mask.style.height = '100%';
mask.style.position = 'absolute';
mask.style.left = 0;
mask.style.top = 0;
return mask;
}
function ImageClip(imgURL, options){
var imgContainer = options.imgContainer;
var scale = options.scale;
var imgElement = createImageElement(imgURL, imgContainer.offsetWidth);
var cameraWidth = options.width * scale;
var cameraHeight = options.height * scale;
var cameraWidthWithoutBorder = cameraWidth - 2;
var cameraHeightWithoutBorder = cameraHeight - 2;
var cameraCarX = options.x || 0;
var cameraCarY = options.y || 0;
var cameraCar = createCameraCar(cameraWidth, cameraHeight, cameraCarX, cameraCarY);
cameraCar.appendChild(imgElement);
// clip有1px边框,所以camera需要使用减去2后的值
var camera = imgCamera(imgElement,cameraWidthWithoutBorder, cameraHeightWithoutBorder );
camera.translateTo({x:cameraCarX,y:cameraCarY,width:cameraWidthWithoutBorder, height:cameraHeightWithoutBorder});
dragAble(cameraCar).startDrag({position:'absolute',range:imgContainer , callBack:{
moving : cameraMovingCallbac
}})
imgContainer.appendChild(createMask());
imgContainer.appendChild(cameraCar);
function cameraMovingCallbac(e){
camera.translateTo({x:cameraCar.offsetLeft,y:cameraCar.offsetTop,width:cameraWidthWithoutBorder,height:cameraHeightWithoutBorder});
}
this.getOffset = function(){
var result = {
x: Math.floor(cameraCar.offsetLeft / scale),
y: Math.floor(cameraCar.offsetTop / scale)
};
return result;
}
}
return ImageClip;
});
|
src/module/UI/Imageclip.js
|
Air.Module("AirUI.ui.Imageclip", function(require){
var imgCamera = require('AirUI.UI.imgCamera');
var offset = require('AirUI.UI.offset');
var dragAble = require('AirUI.UI.dragAble');
var defaultOptions = {
};
function createImageElement(imgURL, width){
var imgElement = document.createElement('img');
imgElement.src = imgURL;
imgElement.width = width;
return imgElement;
}
function createCameraCar(width, height, x, y){
var car = document.createElement('div');
car.style.position = 'static';
car.style.width = width + 'px';
car.style.height = height + 'px';
car.style.top = y + 'px';
car.style.left = x + 'px';
car.style.border = "1px dashed #fff";
car.style.cursor = 'move'
return car;
}
function createMask(){
var mask = document.createElement('div');
mask.style.opacity = 0.8;
mask.style.background = 'black';
mask.style.width = '100%';
mask.style.height = '100%';
mask.style.position = 'absolute';
mask.style.left = 0;
mask.style.top = 0;
return mask;
}
function ImageClip(imgURL, options){
var imgContainer = options.imgContainer;
var scale = options.scale;
var imgElement = createImageElement(imgURL, imgContainer.offsetWidth);
var cameraWidth = options.width * scale;
var cameraHeight = options.height * scale;
var cameraCarX = options.x || 0;
var cameraCarY = options.y || 0;
var cameraCar = createCameraCar(cameraWidth, cameraHeight, cameraCarX, cameraCarY);
cameraCar.appendChild(imgElement);
var camera = imgCamera(imgElement,cameraWidth, cameraHeight );
camera.translateTo({x:cameraCarX,y:cameraCarY,width:cameraWidth, height:cameraHeight});
dragAble(cameraCar).startDrag({position:'absolute',range:imgContainer , callBack:{
moving : cameraMovingCallbac
}})
imgContainer.appendChild(createMask());
imgContainer.appendChild(cameraCar);
function cameraMovingCallbac(e){
camera.translateTo({x:cameraCar.offsetLeft,y:cameraCar.offsetTop,width:cameraWidth,height:cameraHeight});
}
this.getOffset = function(){
var result = {
x: Math.floor(cameraCar.offsetLeft / scale),
y: Math.floor(cameraCar.offsetTop / scale)
};
return result;
}
}
return ImageClip;
});
|
remove border width
|
src/module/UI/Imageclip.js
|
remove border width
|
<ide><path>rc/module/UI/Imageclip.js
<ide> car.style.left = x + 'px';
<ide> car.style.border = "1px dashed #fff";
<ide> car.style.cursor = 'move'
<add> car.style.boxSizing = 'border-box';
<add> car.style.webkitBoxSizing = 'border-box';
<add> car.style.mozBoxSizing = 'border-box';
<add> car.style.boxSizing = 'border-box';
<ide> return car;
<ide> }
<ide>
<ide> var imgElement = createImageElement(imgURL, imgContainer.offsetWidth);
<ide> var cameraWidth = options.width * scale;
<ide> var cameraHeight = options.height * scale;
<add> var cameraWidthWithoutBorder = cameraWidth - 2;
<add> var cameraHeightWithoutBorder = cameraHeight - 2;
<ide> var cameraCarX = options.x || 0;
<ide> var cameraCarY = options.y || 0;
<ide> var cameraCar = createCameraCar(cameraWidth, cameraHeight, cameraCarX, cameraCarY);
<ide>
<ide>
<ide> cameraCar.appendChild(imgElement);
<del> var camera = imgCamera(imgElement,cameraWidth, cameraHeight );
<add> // clip有1px边框,所以camera需要使用减去2后的值
<add> var camera = imgCamera(imgElement,cameraWidthWithoutBorder, cameraHeightWithoutBorder );
<ide>
<del> camera.translateTo({x:cameraCarX,y:cameraCarY,width:cameraWidth, height:cameraHeight});
<add> camera.translateTo({x:cameraCarX,y:cameraCarY,width:cameraWidthWithoutBorder, height:cameraHeightWithoutBorder});
<ide> dragAble(cameraCar).startDrag({position:'absolute',range:imgContainer , callBack:{
<ide> moving : cameraMovingCallbac
<ide> }})
<ide> imgContainer.appendChild(createMask());
<ide> imgContainer.appendChild(cameraCar);
<ide> function cameraMovingCallbac(e){
<del> camera.translateTo({x:cameraCar.offsetLeft,y:cameraCar.offsetTop,width:cameraWidth,height:cameraHeight});
<add> camera.translateTo({x:cameraCar.offsetLeft,y:cameraCar.offsetTop,width:cameraWidthWithoutBorder,height:cameraHeightWithoutBorder});
<ide> }
<ide>
<ide> this.getOffset = function(){
|
|
JavaScript
|
mit
|
0c465487903fad39c5812e53ffa1b83f3895ea34
| 0 |
danielgindi/jquery-backstretch,srobbin/jquery-backstretch,danielgindi/jquery-backstretch,jquery-backstretch/jquery-backstretch,jquery-backstretch/jquery-backstretch
|
/*
* Backstretch
* http://srobbin.com/jquery-plugins/backstretch/
*
* Copyright (c) 2013 Scott Robbin
* Licensed under the MIT license.
*/
;(function ($, window, undefined) {
'use strict';
/** @const */
var YOUTUBE_REGEXP = /^.*(youtu\.be\/|v\/|u\/\w\/|embed\/|watch\?v=|\&v=)([^#\&\?]*).*/i;
/* PLUGIN DEFINITION
* ========================= */
$.fn.backstretch = function (images, options) {
var args = arguments;
/*
* Scroll the page one pixel to get the right window height on iOS
* Pretty harmless for everyone else
*/
if ($(window).scrollTop() === 0 ) {
window.scrollTo(0, 0);
}
var returnValues;
this.each(function (eachIndex) {
var $this = $(this)
, obj = $this.data('backstretch');
// Do we already have an instance attached to this element?
if (obj) {
// Is this a method they're trying to execute?
if (typeof args[0] === 'string' &&
typeof obj[args[0]] === 'function') {
// Call the method
var returnValue = obj[args[0]].apply(obj, Array.prototype.slice.call(args, 1));
if (returnValue === obj) { // If a method is chaining
returnValue = undefined;
}
if (returnValue !== undefined) {
returnValues = returnValues || [];
returnValues[eachIndex] = returnValue;
}
return; // Nothing further to do
}
// Merge the old options with the new
options = $.extend(obj.options, options);
// Remove the old instance
if ( obj.hasOwnProperty('destroy') ) {
obj.destroy(true);
}
}
// We need at least one image
if (!images || (images && images.length === 0)) {
var cssBackgroundImage = $this.css('background-image');
if (cssBackgroundImage && cssBackgroundImage !== 'none') {
images = [ { url: $this.css('backgroundImage').replace(/url\(|\)|"|'/g,"") } ];
} else {
$.error('No images were supplied for Backstretch, or element must have a CSS-defined background image.');
}
}
obj = new Backstretch(this, images, options || {});
$this.data('backstretch', obj);
});
return returnValues ? returnValues.length === 1 ? returnValues[0] : returnValues : this;
};
// If no element is supplied, we'll attach to body
$.backstretch = function (images, options) {
// Return the instance
return $('body')
.backstretch(images, options)
.data('backstretch');
};
// Custom selector
$.expr[':'].backstretch = function(elem) {
return $(elem).data('backstretch') !== undefined;
};
/* DEFAULTS
* ========================= */
$.fn.backstretch.defaults = {
duration: 5000 // Amount of time in between slides (if slideshow)
, transition: 'fade' // Type of transition between slides
, transitionDuration: 0 // Duration of transition between slides
, animateFirst: true // Animate the transition of first image of slideshow in?
, alignX: 0.5 // The x-alignment for the image, can be 'left'|'center'|'right' or any number between 0.0 and 1.0
, alignY: 0.5 // The y-alignment for the image, can be 'top'|'center'|'bottom' or any number between 0.0 and 1.0
, paused: false // Whether the images should slide after given duration
, start: 0 // Index of the first image to show
, preload: 2 // How many images preload at a time?
, preloadSize: 1 // How many images can we preload in parallel?
, resolutionRefreshRate: 2500 // How long to wait before switching resolution?
, resolutionChangeRatioThreshold: 0.1 // How much a change should it be before switching resolution?
};
/* STYLES
*
* Baked-in styles that we'll apply to our elements.
* In an effort to keep the plugin simple, these are not exposed as options.
* That said, anyone can override these in their own stylesheet.
* ========================= */
var styles = {
wrap: {
left: 0
, top: 0
, overflow: 'hidden'
, margin: 0
, padding: 0
, height: '100%'
, width: '100%'
, zIndex: -999999
}
, itemWrapper: {
position: 'absolute'
, display: 'none'
, margin: 0
, padding: 0
, border: 'none'
, width: '100%'
, height: '100%'
, zIndex: -999999
}
, item: {
position: 'absolute'
, margin: 0
, padding: 0
, border: 'none'
, width: '100%'
, height: '100%'
, maxWidth: 'none'
}
};
/* Given an array of different options for an image,
* choose the optimal image for the container size.
*
* Given an image template (a string with {{ width }} and/or
* {{height}} inside) and a container object, returns the
* image url with the exact values for the size of that
* container.
*
* Returns an array of urls optimized for the specified resolution.
*
*/
var optimalSizeImages = (function () {
/* Sorts the array of image sizes based on width */
var widthInsertSort = function (arr) {
for (var i = 1; i < arr.length; i++) {
var tmp = arr[i],
j = i;
while (arr[j - 1] && parseInt(arr[j - 1].width, 10) > parseInt(tmp.width, 10)) {
arr[j] = arr[j - 1];
--j;
}
arr[j] = tmp;
}
return arr;
};
/* Given an array of various sizes of the same image and a container width,
* return the best image.
*/
var selectBest = function (containerWidth, imageSizes) {
var devicePixelRatio = window.devicePixelRatio || 1;
var lastAllowedImage = 0;
var testWidth;
for (var j = 0, image; j < imageSizes.length; j++) {
image = imageSizes[j];
// In case a new image was pushed in, process it:
if (typeof image === 'string') {
image = imageSizes[j] = { url: image };
}
if (image.pixelRatio && image.pixelRatio !== 'auto' && parseFloat(image.pixelRatio) !== devicePixelRatio) {
// We disallowed choosing this image for current device pixel ratio,
// So skip this one.
continue;
}
// Mark this one as the last one we investigated
// which does not violate device pixel ratio rules.
// We may choose this one later if there's no match.
lastAllowedImage = j;
// For most images, we match the specified width against element width,
// And enforcing a limit depending on the "pixelRatio" property if specified.
// But if a pixelRatio="auto", then we consider the width as the physical width of the image,
// And match it while considering the device's pixel ratio.
testWidth = containerWidth;
if (image.pixelRatio === 'auto') {
containerWidth *= devicePixelRatio;
}
// Stop when the width of the image is larger or equal to the container width
if (image.width >= testWidth) {
break;
}
}
// Use the image located at where we stopped
return imageSizes[Math.min(j, lastAllowedImage)];
};
var replaceTagsInUrl = function (url, templateReplacer) {
if (typeof url === 'string') {
url = url.replace(/{{(width|height)}}/g, templateReplacer);
} else if (url instanceof Array) {
for (var i = 0; i < url.length; i++) {
if (url[i].src) {
url[i].src = replaceTagsInUrl(url[i].src, templateReplacer);
} else {
url[i] = replaceTagsInUrl(url[i], templateReplacer);
}
}
}
return url;
};
return function ($container, images) {
var containerWidth = $container.width(),
containerHeight = $container.height();
var chosenImages = [];
var templateReplacer = function (match, key) {
if (key === 'width') {
return containerWidth;
}
if (key === 'height') {
return containerHeight;
}
return match;
};
for (var i = 0; i < images.length; i++) {
if ($.isArray(images[i])) {
images[i] = widthInsertSort(images[i]);
var chosen = selectBest(containerWidth, images[i]);
chosenImages.push(chosen);
} else {
// In case a new image was pushed in, process it:
if (typeof images[i] === 'string') {
images[i] = { url: images[i] };
}
var item = $.extend({}, images[i]);
item.url = replaceTagsInUrl(item.url, templateReplacer);
chosenImages.push(item);
}
}
return chosenImages;
};
})();
var isVideoSource = function (source) {
return YOUTUBE_REGEXP.test(source.url) || source.isVideo;
};
/* Preload images */
var preload = (function (sources, startAt, count, batchSize, callback) {
// Plugin cache
var cache = [];
// Wrapper for cache
var caching = function(image){
for (var i = 0; i < cache.length; i++) {
if (cache[i].src === image.src) {
return cache[i];
}
}
cache.push(image);
return image;
};
// Execute callback
var exec = function(sources, callback, last){
if (typeof callback === 'function') {
callback.call(sources, last);
}
};
// Closure to hide cache
return function preload (sources, startAt, count, batchSize, callback){
// Check input data
if (typeof sources === 'undefined') {
return;
}
if (!$.isArray(sources)) {
sources = [sources];
}
if (arguments.length < 5 && typeof arguments[arguments.length - 1] === 'function') {
callback = arguments[arguments.length - 1];
}
startAt = (typeof startAt === 'function' || !startAt) ? 0 : startAt;
count = (typeof count === 'function' || !count || count < 0) ? sources.length : Math.min(count, sources.length);
batchSize = (typeof batchSize === 'function' || !batchSize) ? 1 : batchSize;
if (startAt >= sources.length) {
startAt = 0;
count = 0;
}
if (batchSize < 0) {
batchSize = count;
}
batchSize = Math.min(batchSize, count);
var next = sources.slice(startAt + batchSize, count - batchSize);
sources = sources.slice(startAt, batchSize);
count = sources.length;
// If sources array is empty
if (!count) {
exec(sources, callback, true);
return;
}
// Image loading callback
var countLoaded = 0;
var loaded = function() {
countLoaded++;
if (countLoaded !== count) {
return;
}
exec(sources, callback, !next);
preload(next, 0, 0, batchSize, callback);
};
// Loop sources to preload
var image;
for (var i = 0; i < sources.length; i++) {
if (isVideoSource(sources[i])) {
// Do not preload videos. There are issues with that.
// First - we need to keep an instance of the preloaded and use that exactly, not a copy.
// Second - there are memory issues.
// If there will be a requirement from users - I'll try to implement this.
continue;
} else {
image = new Image();
image.src = sources[i].url;
image = caching(image);
if (image.complete) {
loaded();
} else {
$(image).on('load error', loaded);
}
}
}
};
})();
/* Process images array */
var processImagesArray = function (images) {
var processed = [];
for (var i = 0; i < images.length; i++) {
if (typeof images[i] === 'string') {
processed.push({ url: images[i] });
}
else if ($.isArray(images[i])) {
processed.push(processImagesArray(images[i]));
}
else {
processed.push(processOptions(images[i]));
}
}
return processed;
};
/* Process options */
var processOptions = function (options, required) {
// Convert old options
// centeredX/centeredY are deprecated
if (options.centeredX || options.centeredY) {
if (window.console && window.console.log) {
window.console.log('jquery.backstretch: `centeredX`/`centeredY` is deprecated, please use `alignX`/`alignY`');
}
if (options.centeredX) {
options.alignX = 0.5;
}
if (options.centeredY) {
options.alignY = 0.5;
}
}
// Deprecated spec
if (options.speed !== undefined) {
if (window.console && window.console.log) {
window.console.log('jquery.backstretch: `speed` is deprecated, please use `transitionDuration`');
}
options.transitionDuration = options.speed;
options.transition = 'fade';
}
// Typo
if (options.resolutionChangeRatioTreshold !== undefined) {
window.console.log('jquery.backstretch: `treshold` is a typo!');
options.resolutionChangeRatioThreshold = options.resolutionChangeRatioTreshold;
}
// Current spec that needs processing
if (options.fadeFirst !== undefined) {
options.animateFirst = options.fadeFirst;
}
if (options.fade !== undefined) {
options.transitionDuration = options.fade;
options.transition = 'fade';
}
return processAlignOptions(options);
};
/* Process align options */
var processAlignOptions = function (options, required) {
if (options.alignX === 'left') {
options.alignX = 0.0;
}
else if (options.alignX === 'center') {
options.alignX = 0.5;
}
else if (options.alignX === 'right') {
options.alignX = 1.0;
}
else {
if (options.alignX !== undefined || required) {
options.alignX = parseFloat(options.alignX);
if (isNaN(options.alignX)) {
options.alignX = 0.5;
}
}
}
if (options.alignY === 'top') {
options.alignY = 0.0;
}
else if (options.alignY === 'center') {
options.alignY = 0.5;
}
else if (options.alignY === 'bottom') {
options.alignY = 1.0;
}
else {
if (options.alignX !== undefined || required) {
options.alignY = parseFloat(options.alignY);
if (isNaN(options.alignY)) {
options.alignY = 0.5;
}
}
}
return options;
};
/* CLASS DEFINITION
* ========================= */
var Backstretch = function (container, images, options) {
this.options = $.extend({}, $.fn.backstretch.defaults, options || {});
this.firstShow = true;
// Process options
processOptions(this.options, true);
/* In its simplest form, we allow Backstretch to be called on an image path.
* e.g. $.backstretch('/path/to/image.jpg')
* So, we need to turn this back into an array.
*/
this.images = processImagesArray($.isArray(images) ? images : [images]);
/**
* Paused-Option
*/
if (this.options.paused) {
this.paused = true;
}
/**
* Start-Option (Index)
*/
if (this.options.start >= this.images.length)
{
this.options.start = this.images.length - 1;
}
if (this.options.start < 0)
{
this.options.start = 0;
}
// Convenience reference to know if the container is body.
this.isBody = container === document.body;
/* We're keeping track of a few different elements
*
* Container: the element that Backstretch was called on.
* Wrap: a DIV that we place the image into, so we can hide the overflow.
* Root: Convenience reference to help calculate the correct height.
*/
var $window = $(window);
this.$container = $(container);
this.$root = this.isBody ? supportsFixedPosition ? $window : $(document) : this.$container;
this.originalImages = this.images;
this.images = optimalSizeImages(
this.options.alwaysTestWindowResolution ? $window : this.$root,
this.originalImages);
/**
* Pre-Loading.
* This is the first image, so we will preload a minimum of 1 images.
*/
preload(this.images, this.options.start || 0, this.options.preload || 1);
// Don't create a new wrap if one already exists (from a previous instance of Backstretch)
var $existing = this.$container.children(".backstretch").first();
this.$wrap = $existing.length ? $existing :
$('<div class="backstretch"></div>')
.css(this.options.bypassCss ? {} : styles.wrap)
.appendTo(this.$container);
if (!this.options.bypassCss) {
// Non-body elements need some style adjustments
if (!this.isBody) {
// If the container is statically positioned, we need to make it relative,
// and if no zIndex is defined, we should set it to zero.
var position = this.$container.css('position')
, zIndex = this.$container.css('zIndex');
this.$container.css({
position: position === 'static' ? 'relative' : position
, zIndex: zIndex === 'auto' ? 0 : zIndex
});
// Needs a higher z-index
this.$wrap.css({zIndex: -999998});
}
// Fixed or absolute positioning?
this.$wrap.css({
position: this.isBody && supportsFixedPosition ? 'fixed' : 'absolute'
});
}
// Set the first image
this.index = this.options.start;
this.show(this.index);
// Listen for resize
$window.on('resize.backstretch', $.proxy(this.resize, this))
.on('orientationchange.backstretch', $.proxy(function () {
// Need to do this in order to get the right window height
if (this.isBody && window.pageYOffset === 0) {
window.scrollTo(0, 1);
this.resize();
}
}, this));
};
var performTransition = function (options) {
var transition = options.transition || 'fade';
// Look for multiple options
if (typeof transition === 'string' && transition.indexOf('|') > -1) {
transition = transition.split('|');
}
if (transition instanceof Array) {
transition = transition[Math.round(Math.random() * (transition.length - 1))];
}
switch (transition.toString().toLowerCase()) {
default:
case 'fade':
options['new'].fadeIn({
duration: options.duration,
complete: options.complete,
easing: options.easing || undefined
});
break;
case 'pushleft':
case 'push_left':
case 'pushright':
case 'push_right':
case 'pushup':
case 'push_up':
case 'pushdown':
case 'push_down':
case 'coverleft':
case 'cover_left':
case 'coverright':
case 'cover_right':
case 'coverup':
case 'cover_up':
case 'coverdown':
case 'cover_down':
var transitionParts = transition.match(/^(cover|push)_?(.*)$/);
var animProp = transitionParts[2] === 'left' ? 'right' :
transitionParts[2] === 'right' ? 'left' :
transitionParts[2] === 'down' ? 'top' :
transitionParts[2] === 'up' ? 'bottom' :
'right';
var newCssStart = {
'display': ''
}, newCssAnim = {};
newCssStart[animProp] = '-100%';
newCssAnim[animProp] = 0;
options['new']
.css(newCssStart)
.animate(newCssAnim, {
duration: options.duration,
complete: function () {
options['new'].css(animProp, '');
options.complete.apply(this, arguments);
},
easing: options.easing || undefined
});
if (transitionParts[1] === 'push' && options['old']) {
var oldCssAnim = {};
oldCssAnim[animProp] = '100%';
options['old']
.animate(oldCssAnim, {
duration: options.duration,
complete: function () {
options['old'].css('display', 'none');
},
easing: options.easing || undefined
});
}
break;
}
};
/* PUBLIC METHODS
* ========================= */
Backstretch.prototype = {
resize: function () {
try {
// Check for a better suited image after the resize
var $resTest = this.options.alwaysTestWindowResolution ? $(window) : this.$root;
var newContainerWidth = $resTest.width();
var newContainerHeight = $resTest.height();
var changeRatioW = newContainerWidth / (this._lastResizeContainerWidth || 0);
var changeRatioH = newContainerHeight / (this._lastResizeContainerHeight || 0);
var resolutionChangeRatioThreshold = this.options.resolutionChangeRatioThreshold || 0.0;
// check for big changes in container size
if ((newContainerWidth !== this._lastResizeContainerWidth ||
newContainerHeight !== this._lastResizeContainerHeight) &&
((Math.abs(changeRatioW - 1) >= resolutionChangeRatioThreshold || isNaN(changeRatioW)) ||
(Math.abs(changeRatioH - 1) >= resolutionChangeRatioThreshold || isNaN(changeRatioH)))) {
this._lastResizeContainerWidth = newContainerWidth;
this._lastResizeContainerHeight = newContainerHeight;
// Big change: rebuild the entire images array
this.images = optimalSizeImages($resTest, this.originalImages);
// Preload them (they will be automatically inserted on the next cycle)
if (this.options.preload) {
preload(this.images, (this.index + 1) % this.images.length, this.options.preload);
}
// In case there is no cycle and the new source is different than the current
if (this.images.length === 1 &&
this._currentImage !== this.images[0]) {
// Wait a little an update the image being showed
var that = this;
clearTimeout(that._selectAnotherResolutionTimeout);
that._selectAnotherResolutionTimeout = setTimeout(function () {
that.show(0);
}, this.options.resolutionRefreshRate);
}
}
var bgCSS = {left: 0, top: 0, right: 'auto', bottom: 'auto'}
, rootWidth = this.isBody ? this.$root.width() : this.$root.innerWidth()
, rootHeight = this.isBody ? ( window.innerHeight ? window.innerHeight : this.$root.height() ) : this.$root.innerHeight()
, bgWidth = rootWidth
, bgHeight = bgWidth / this.$itemWrapper.data('ratio')
, evt = $.Event('backstretch.resize', {
relatedTarget: this.$container[0]
})
, bgOffset
, alignX = this._currentImage.alignX === undefined ? this.options.alignX : this._currentImage.alignX
, alignY = this._currentImage.alignY === undefined ? this.options.alignY : this._currentImage.alignY;
// Make adjustments based on image ratio
if (bgHeight >= rootHeight) {
bgCSS.top = -(bgHeight - rootHeight) * alignY;
} else {
bgHeight = rootHeight;
bgWidth = bgHeight * this.$itemWrapper.data('ratio');
bgOffset = (bgWidth - rootWidth) / 2;
bgCSS.left = -(bgWidth - rootWidth) * alignX;
}
if (!this.options.bypassCss) {
this.$wrap
.css({width: rootWidth, height: rootHeight})
.find('>.backstretch-item').not('.deleteable')
.each(function () {
var $wrapper = $(this);
$wrapper.find('img,video,iframe')
.css({width: bgWidth, height: bgHeight})
.css(bgCSS);
});
}
this.$container.trigger(evt, this);
} catch(err) {
// IE7 seems to trigger resize before the image is loaded.
// This try/catch block is a hack to let it fail gracefully.
}
return this;
}
// Show the slide at a certain position
, show: function (newIndex, overrideOptions) {
// Validate index
if (Math.abs(newIndex) > this.images.length - 1) {
return;
}
// Vars
var that = this
, $oldItemWrapper = that.$wrap.find('>.backstretch-item').addClass('deleteable')
, oldVideoWrapper = that.videoWrapper
, evtOptions = { relatedTarget: that.$container[0] };
// Trigger the "before" event
that.$container.trigger($.Event('backstretch.before', evtOptions), [that, newIndex]);
// Set the new frame index
this.index = newIndex;
var selectedImage = that.images[newIndex];
// Pause the slideshow
clearTimeout(that._cycleTimeout);
// New image
delete that.videoWrapper; // Current item may not be a video
var isVideo = isVideoSource(selectedImage);
if (isVideo) {
that.videoWrapper = new VideoWrapper(selectedImage);
that.$item = that.videoWrapper.$video.css('pointer-events', 'none');
} else {
that.$item = $('<img />');
}
that.$itemWrapper = $('<div class="backstretch-item">')
.append(that.$item);
if (this.options.bypassCss) {
that.$itemWrapper.css({
'display': 'none'
});
} else {
that.$itemWrapper.css(styles.itemWrapper);
that.$item.css(styles.item);
}
that.$item.bind(isVideo ? 'canplay' : 'load', function (e) {
var $this = $(this)
, $wrapper = $this.parent()
, options = $wrapper.data('options');
if (overrideOptions) {
options = $.extend({}, options, overrideOptions);
}
var imgWidth = this.naturalWidth || this.videoWidth || this.width
, imgHeight = this.naturalHeight || this.videoHeight || this.height;
// Save the ratio
$wrapper.data('ratio', imgWidth / imgHeight);
var getOption = function (opt) {
return options[opt] !== undefined ?
options[opt] :
that.options[opt];
};
var transition = getOption('transition');
var transitionEasing = getOption('transitionEasing');
var transitionDuration = getOption('transitionDuration');
// Show the image, then delete the old one
var bringInNextImage = function () {
if (oldVideoWrapper) {
oldVideoWrapper.stop();
oldVideoWrapper.destroy();
}
$oldItemWrapper.remove();
// Resume the slideshow
if (!that.paused && that.images.length > 1) {
that.cycle();
}
// Now we can clear the background on the element, to spare memory
if (!that.options.bypassCss && !that.isBody) {
that.$container.css('background', 'none');
}
// Trigger the "after" and "show" events
// "show" is being deprecated
$(['after', 'show']).each(function () {
that.$container.trigger($.Event('backstretch.' + this, evtOptions), [that, newIndex]);
});
if (isVideo) {
that.videoWrapper.play();
}
};
if ((that.firstShow && !that.options.animateFirst) || !transitionDuration || !transition) {
// Avoid transition on first show or if there's no transitionDuration value
$wrapper.show();
bringInNextImage();
} else {
performTransition({
'new': $wrapper,
old: $oldItemWrapper,
transition: transition,
duration: transitionDuration,
easing: transitionEasing,
complete: bringInNextImage
});
}
that.firstShow = false;
// Resize
that.resize();
});
that.$itemWrapper.appendTo(that.$wrap);
that.$item.attr('alt', selectedImage.alt || '');
that.$itemWrapper.data('options', selectedImage);
if (!isVideo) {
that.$item.attr('src', selectedImage.url);
}
that._currentImage = selectedImage;
return that;
}
, current: function () {
return this.index;
}
, next: function () {
var args = Array.prototype.slice.call(arguments, 0);
args.unshift(this.index < this.images.length - 1 ? this.index + 1 : 0);
return this.show.apply(this, args);
}
, prev: function () {
var args = Array.prototype.slice.call(arguments, 0);
args.unshift(this.index === 0 ? this.images.length - 1 : this.index - 1);
return this.show.apply(this, args);
}
, pause: function () {
// Pause the slideshow
this.paused = true;
if (this.videoWrapper) {
this.videoWrapper.pause();
}
return this;
}
, resume: function () {
// Resume the slideshow
this.paused = false;
if (this.videoWrapper) {
this.videoWrapper.play();
}
this.cycle();
return this;
}
, cycle: function () {
// Start/resume the slideshow
if(this.images.length > 1) {
// Clear the timeout, just in case
clearTimeout(this._cycleTimeout);
var duration = (this._currentImage && this._currentImage.duration) || this.options.duration;
var isVideo = isVideoSource(this._currentImage);
var callNext = function () {
this.$item.off('.cycle');
// Check for paused slideshow
if (!this.paused) {
this.next();
}
};
// Special video handling
if (isVideo) {
// Leave video at last frame
if (!this._currentImage.loop) {
var lastFrameTimeout = 0;
this.$item
.on('playing.cycle', function () {
var player = $(this).data('player');
clearTimeout(lastFrameTimeout);
lastFrameTimeout = setTimeout(function () {
player.pause();
player.$video.trigger('ended');
}, (player.getDuration() - player.getCurrentTime()) * 1000);
})
.on('ended.cycle', function () {
clearTimeout(lastFrameTimeout);
});
}
// On error go to next
this.$item.on('error.cycle initerror.cycle', $.proxy(callNext, this));
}
if (isVideo && !this._currentImage.duration) {
// It's a video - playing until end
this.$item.on('ended.cycle', $.proxy(callNext, this));
} else {
// Cycling according to specified duration
this._cycleTimeout = setTimeout($.proxy(callNext, this), duration);
}
}
return this;
}
, destroy: function (preserveBackground) {
// Stop the resize events
$(window).off('resize.backstretch orientationchange.backstretch');
// Stop any videos
if (this.videoWrapper) {
this.videoWrapper.destroy();
}
// Clear the timeout
clearTimeout(this._cycleTimeout);
// Remove Backstretch
if(!preserveBackground) {
this.$wrap.remove();
}
this.$container.removeData('backstretch');
}
};
/**
* Video Abstraction Layer
*
* Static methods:
* > VideoWrapper.loadYoutubeAPI() -> Call in order to load the Youtube API.
* An 'youtube_api_load' event will be triggered on $(window) when the API is loaded.
*
* Generic:
* > player.type -> type of the video
* > player.video / player.$video -> contains the element holding the video
* > player.play() -> plays the video
* > player.pause() -> pauses the video
* > player.setCurrentTime(position) -> seeks to a position by seconds
*
* Youtube:
* > player.ytId will contain the youtube ID if the source is a youtube url
* > player.ytReady is a flag telling whether the youtube source is ready for playback
* */
var VideoWrapper = function () { this.init.apply(this, arguments); };
/**
* @param {Object} options
* @param {String|Array<String>|Array<{{src: String, type: String?}}>} options.url
* @param {Boolean} options.loop=false
* @param {Boolean?} options.mute=true
* @param {String?} options.poster
* loop, mute, poster
*/
VideoWrapper.prototype.init = function (options) {
var that = this;
var $video;
var setVideoElement = function () {
that.$video = $video;
that.video = $video[0];
};
// Determine video type
var videoType = 'video';
if (!(options.url instanceof Array) &&
YOUTUBE_REGEXP.test(options.url)) {
videoType = 'youtube';
}
that.type = videoType;
if (videoType === 'youtube') {
// Try to load the API in the meantime
VideoWrapper.loadYoutubeAPI();
that.ytId = options.url.match(YOUTUBE_REGEXP)[2];
var src = 'https://www.youtube.com/embed/' + that.ytId +
'?rel=0&autoplay=0&showinfo=0&controls=0&modestbranding=1' +
'&cc_load_policy=0&disablekb=1&iv_load_policy=3&loop=0' +
'&enablejsapi=1&origin=' + encodeURIComponent(window.location.origin);
that.__ytStartMuted = !!options.mute || options.mute === undefined;
$video = $('<iframe />')
.attr({ 'src_to_load': src })
.css({ 'border': 0, 'margin': 0, 'padding': 0 })
.data('player', that);
if (options.loop) {
$video.on('ended.loop', function () {
if (!that.__manuallyStopped) {
that.play();
}
});
}
that.ytReady = false;
setVideoElement();
if (window['YT']) {
that._initYoutube();
$video.trigger('initsuccess');
} else {
$(window).one('youtube_api_load', function () {
that._initYoutube();
$video.trigger('initsuccess');
});
}
}
else {
// Traditional <video> tag with multiple sources
$video = $('<video>')
.prop('autoplay', false)
.prop('controls', false)
.prop('loop', !!options.loop)
.prop('muted', !!options.mute || options.mute === undefined)
// Let the first frames be available before playback, as we do transitions
.prop('preload', 'auto')
.prop('poster', options.poster || '');
var sources = (options.url instanceof Array) ? options.url : [options.url];
for (var i = 0; i < sources.length; i++) {
var sourceItem = sources[i];
if (typeof(sourceItem) === 'string') {
sourceItem = { src: sourceItem };
}
$('<src>')
.attr('src', sourceItem.src)
// Make sure to not specify type if unknown -
// so the browser will try to autodetect.
.attr('type', sourceItem.type || null)
.appendTo($video);
}
if (!$video[0].canPlayType || !sources.length) {
$video.trigger('initerror');
} else {
$video.trigger('initsuccess');
}
setVideoElement();
}
};
VideoWrapper.prototype._initYoutube = function () {
var that = this;
var YT = window['YT'];
that.$video
.attr('src', that.$video.attr('src_to_load'))
.removeAttr('src_to_load');
// It won't init if it's not in the DOM, so we emulate that
var hasParent = !!that.$video[0].parentNode;
if (!hasParent) {
var $tmpParent = $('<div>').css('display', 'none !important').appendTo(document.body);
that.$video.appendTo($tmpParent);
}
var player = new YT.Player(that.video, {
events: {
'onReady': function () {
if (that.__ytStartMuted) {
player.mute();
}
if (!hasParent) {
// Restore parent to old state - without interrupting any changes
if (that.$video[0].parentNode === $tmpParent[0]) {
that.$video.detach();
}
$tmpParent.remove();
}
that.ytReady = true;
that._updateYoutubeSize();
that.$video.trigger('canplay');
},
'onStateChange': function (event) {
switch (event.data) {
case YT.PlayerState.PLAYING:
that.$video.trigger('playing');
break;
case YT.PlayerState.ENDED:
that.$video.trigger('ended');
break;
case YT.PlayerState.PAUSED:
that.$video.trigger('pause');
break;
case YT.PlayerState.BUFFERING:
that.$video.trigger('waiting');
break;
case YT.PlayerState.CUED:
that.$video.trigger('canplay');
break;
}
},
'onPlaybackQualityChange': function () {
that._updateYoutubeSize();
that.$video.trigger('resize');
},
'onError': function (err) {
that.hasError = true;
that.$video.trigger({ 'type': 'error', 'error': err });
}
}
});
that.ytPlayer = player;
return that;
};
VideoWrapper.prototype._updateYoutubeSize = function () {
var that = this;
switch (that.ytPlayer.getPlaybackQuality() || 'medium') {
case 'small':
that.video.videoWidth = 426;
that.video.videoHeight = 240;
break;
case 'medium':
that.video.videoWidth = 640;
that.video.videoHeight = 360;
break;
default:
case 'large':
that.video.videoWidth = 854;
that.video.videoHeight = 480;
break;
case 'hd720':
that.video.videoWidth = 1280;
that.video.videoHeight = 720;
break;
case 'hd1080':
that.video.videoWidth = 1920;
that.video.videoHeight = 1080;
break;
case 'highres':
that.video.videoWidth = 2560;
that.video.videoHeight = 1440;
break;
}
return that;
};
VideoWrapper.prototype.play = function () {
var that = this;
that.__manuallyStopped = false;
if (that.type === 'youtube') {
if (that.ytReady) {
that.$video.trigger('play');
that.ytPlayer.playVideo();
}
} else {
that.video.play();
}
return that;
};
VideoWrapper.prototype.pause = function () {
var that = this;
that.__manuallyStopped = false;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.pauseVideo();
}
} else {
that.video.pause();
}
return that;
};
VideoWrapper.prototype.stop = function () {
var that = this;
that.__manuallyStopped = true;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.pauseVideo();
that.ytPlayer.seekTo(0);
}
} else {
that.video.pause();
that.video.currentTime = 0;
}
return that;
};
VideoWrapper.prototype.destroy = function () {
var that = this;
if (that.ytPlayer) {
that.ytPlayer.destroy();
}
that.$video.remove();
return that;
};
VideoWrapper.prototype.getCurrentTime = function (seconds) {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
return that.ytPlayer.getCurrentTime();
}
} else {
return that.video.currentTime;
}
return 0;
};
VideoWrapper.prototype.setCurrentTime = function (seconds) {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.seekTo(seconds, true);
}
} else {
that.video.currentTime = seconds;
}
return that;
};
VideoWrapper.prototype.getDuration = function () {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
return that.ytPlayer.getDuration();
}
} else {
return that.video.duration;
}
return 0;
};
/**
* This will load the youtube API (if not loaded yet)
* Use $(window).one('youtube_api_load', ...) to listen for API loaded event
*/
VideoWrapper.loadYoutubeAPI = function () {
if (window['YT']) {
return;
}
if (!$('script[src*=www\\.youtube\\.com\\/iframe_api]').length) {
$('<script type="text/javascript" src="https://www.youtube.com/iframe_api">').appendTo('body');
}
var ytAPILoadInt = setInterval(function () {
if (window['YT'] && window['YT'].loaded) {
$(window).trigger('youtube_api_load');
clearTimeout(ytAPILoadInt);
}
}, 50);
};
/* SUPPORTS FIXED POSITION?
*
* Based on code from jQuery Mobile 1.1.0
* http://jquerymobile.com/
*
* In a nutshell, we need to figure out if fixed positioning is supported.
* Unfortunately, this is very difficult to do on iOS, and usually involves
* injecting content, scrolling the page, etc.. It's ugly.
* jQuery Mobile uses this workaround. It's not ideal, but works.
*
* Modified to detect IE6
* ========================= */
var supportsFixedPosition = (function () {
var ua = navigator.userAgent
, platform = navigator.platform
// Rendering engine is Webkit, and capture major version
, wkmatch = ua.match( /AppleWebKit\/([0-9]+)/ )
, wkversion = !!wkmatch && wkmatch[ 1 ]
, ffmatch = ua.match( /Fennec\/([0-9]+)/ )
, ffversion = !!ffmatch && ffmatch[ 1 ]
, operammobilematch = ua.match( /Opera Mobi\/([0-9]+)/ )
, omversion = !!operammobilematch && operammobilematch[ 1 ]
, iematch = ua.match( /MSIE ([0-9]+)/ )
, ieversion = !!iematch && iematch[ 1 ];
return !(
// iOS 4.3 and older : Platform is iPhone/Pad/Touch and Webkit version is less than 534 (ios5)
((platform.indexOf( "iPhone" ) > -1 || platform.indexOf( "iPad" ) > -1 || platform.indexOf( "iPod" ) > -1 ) && wkversion && wkversion < 534) ||
// Opera Mini
(window.operamini && ({}).toString.call( window.operamini ) === "[object OperaMini]") ||
(operammobilematch && omversion < 7458) ||
//Android lte 2.1: Platform is Android and Webkit version is less than 533 (Android 2.2)
(ua.indexOf( "Android" ) > -1 && wkversion && wkversion < 533) ||
// Firefox Mobile before 6.0 -
(ffversion && ffversion < 6) ||
// WebOS less than 3
("palmGetResource" in window && wkversion && wkversion < 534) ||
// MeeGo
(ua.indexOf( "MeeGo" ) > -1 && ua.indexOf( "NokiaBrowser/8.5.0" ) > -1) ||
// IE6
(ieversion && ieversion <= 6)
);
}());
}(jQuery, window));
|
src/jquery.backstretch.js
|
/*
* Backstretch
* http://srobbin.com/jquery-plugins/backstretch/
*
* Copyright (c) 2013 Scott Robbin
* Licensed under the MIT license.
*/
;(function ($, window, undefined) {
'use strict';
/** @const */
var YOUTUBE_REGEXP = /^.*(youtu\.be\/|v\/|u\/\w\/|embed\/|watch\?v=|\&v=)([^#\&\?]*).*/i;
/* PLUGIN DEFINITION
* ========================= */
$.fn.backstretch = function (images, options) {
var args = arguments;
/*
* Scroll the page one pixel to get the right window height on iOS
* Pretty harmless for everyone else
*/
if ($(window).scrollTop() === 0 ) {
window.scrollTo(0, 0);
}
var returnValues;
this.each(function (eachIndex) {
var $this = $(this)
, obj = $this.data('backstretch');
// Do we already have an instance attached to this element?
if (obj) {
// Is this a method they're trying to execute?
if (typeof args[0] === 'string' &&
typeof obj[args[0]] === 'function') {
// Call the method
var returnValue = obj[args[0]].apply(obj, Array.prototype.slice.call(args, 1));
if (returnValue === obj) { // If a method is chaining
returnValue = undefined;
}
if (returnValue !== undefined) {
returnValues = returnValues || [];
returnValues[eachIndex] = returnValue;
}
return; // Nothing further to do
}
// Merge the old options with the new
options = $.extend(obj.options, options);
// Remove the old instance
if ( obj.hasOwnProperty('destroy') ) {
obj.destroy(true);
}
}
// We need at least one image
if (!images || (images && images.length === 0)) {
var cssBackgroundImage = $this.css('background-image');
if (cssBackgroundImage && cssBackgroundImage !== 'none') {
images = [ { url: $this.css('backgroundImage').replace(/url\(|\)|"|'/g,"") } ];
} else {
$.error('No images were supplied for Backstretch, or element must have a CSS-defined background image.');
}
}
obj = new Backstretch(this, images, options || {});
$this.data('backstretch', obj);
});
return returnValues ? returnValues.length === 1 ? returnValues[0] : returnValues : this;
};
// If no element is supplied, we'll attach to body
$.backstretch = function (images, options) {
// Return the instance
return $('body')
.backstretch(images, options)
.data('backstretch');
};
// Custom selector
$.expr[':'].backstretch = function(elem) {
return $(elem).data('backstretch') !== undefined;
};
/* DEFAULTS
* ========================= */
$.fn.backstretch.defaults = {
duration: 5000 // Amount of time in between slides (if slideshow)
, transition: 'fade' // Type of transition between slides
, transitionDuration: 0 // Duration of transition between slides
, animateFirst: true // Animate the transition of first image of slideshow in?
, alignX: 0.5 // The x-alignment for the image, can be 'left'|'center'|'right' or any number between 0.0 and 1.0
, alignY: 0.5 // The y-alignment for the image, can be 'top'|'center'|'bottom' or any number between 0.0 and 1.0
, paused: false // Whether the images should slide after given duration
, start: 0 // Index of the first image to show
, preload: 2 // How many images preload at a time?
, preloadSize: 1 // How many images can we preload in parallel?
, resolutionRefreshRate: 2500 // How long to wait before switching resolution?
, resolutionChangeRatioThreshold: 0.1 // How much a change should it be before switching resolution?
};
/* STYLES
*
* Baked-in styles that we'll apply to our elements.
* In an effort to keep the plugin simple, these are not exposed as options.
* That said, anyone can override these in their own stylesheet.
* ========================= */
var styles = {
wrap: {
left: 0
, top: 0
, overflow: 'hidden'
, margin: 0
, padding: 0
, height: '100%'
, width: '100%'
, zIndex: -999999
}
, itemWrapper: {
position: 'absolute'
, display: 'none'
, margin: 0
, padding: 0
, border: 'none'
, width: '100%'
, height: '100%'
, zIndex: -999999
}
, item: {
position: 'absolute'
, margin: 0
, padding: 0
, border: 'none'
, width: '100%'
, height: '100%'
, maxWidth: 'none'
}
};
/* Given an array of different options for an image,
* choose the optimal image for the container size.
*
* Given an image template (a string with {{ width }} and/or
* {{height}} inside) and a container object, returns the
* image url with the exact values for the size of that
* container.
*
* Returns an array of urls optimized for the specified resolution.
*
*/
var optimalSizeImages = (function () {
/* Sorts the array of image sizes based on width */
var widthInsertSort = function (arr) {
for (var i = 1; i < arr.length; i++) {
var tmp = arr[i],
j = i;
while (arr[j - 1] && parseInt(arr[j - 1].width, 10) > parseInt(tmp.width, 10)) {
arr[j] = arr[j - 1];
--j;
}
arr[j] = tmp;
}
return arr;
};
/* Given an array of various sizes of the same image and a container width,
* return the best image.
*/
var selectBest = function (containerWidth, imageSizes) {
var devicePixelRatio = window.devicePixelRatio || 1;
var lastAllowedImage = 0;
var testWidth;
for (var j = 0, image; j < imageSizes.length; j++) {
image = imageSizes[j];
// In case a new image was pushed in, process it:
if (typeof image === 'string') {
image = imageSizes[j] = { url: image };
}
if (image.pixelRatio && parseFloat(image.pixelRatio) !== devicePixelRatio) {
// We disallowed choosing this image for current device pixel ratio,
// So skip this one.
continue;
}
// Mark this one as the last one we investigated
// which does not violate device pixel ratio rules.
// We may choose this one later if there's no match.
lastAllowedImage = j;
// For most images, we match the specified width against element width,
// And enforcing a limit depending on the "pixelRatio" property if specified.
// But if a pixelRatio="auto", then we consider the width as the physical width of the image,
// And match it while considering the device's pixel ratio.
testWidth = containerWidth;
if (image.pixelRatio === 'auto') {
containerWidth *= devicePixelRatio;
}
// Stop when the width of the image is larger or equal to the container width
if (image.width >= testWidth) {
break;
}
}
// Use the image located at where we stopped
return imageSizes[Math.min(j, lastAllowedImage)];
};
var replaceTagsInUrl = function (url, templateReplacer) {
if (typeof url === 'string') {
url = url.replace(/{{(width|height)}}/g, templateReplacer);
} else if (url instanceof Array) {
for (var i = 0; i < url.length; i++) {
if (url[i].src) {
url[i].src = replaceTagsInUrl(url[i].src, templateReplacer);
} else {
url[i] = replaceTagsInUrl(url[i], templateReplacer);
}
}
}
return url;
};
return function ($container, images) {
var containerWidth = $container.width(),
containerHeight = $container.height();
var chosenImages = [];
var templateReplacer = function (match, key) {
if (key === 'width') {
return containerWidth;
}
if (key === 'height') {
return containerHeight;
}
return match;
};
for (var i = 0; i < images.length; i++) {
if ($.isArray(images[i])) {
images[i] = widthInsertSort(images[i]);
var chosen = selectBest(containerWidth, images[i]);
chosenImages.push(chosen);
} else {
// In case a new image was pushed in, process it:
if (typeof images[i] === 'string') {
images[i] = { url: images[i] };
}
var item = $.extend({}, images[i]);
item.url = replaceTagsInUrl(item.url, templateReplacer);
chosenImages.push(item);
}
}
return chosenImages;
};
})();
var isVideoSource = function (source) {
return YOUTUBE_REGEXP.test(source.url) || source.isVideo;
};
/* Preload images */
var preload = (function (sources, startAt, count, batchSize, callback) {
// Plugin cache
var cache = [];
// Wrapper for cache
var caching = function(image){
for (var i = 0; i < cache.length; i++) {
if (cache[i].src === image.src) {
return cache[i];
}
}
cache.push(image);
return image;
};
// Execute callback
var exec = function(sources, callback, last){
if (typeof callback === 'function') {
callback.call(sources, last);
}
};
// Closure to hide cache
return function preload (sources, startAt, count, batchSize, callback){
// Check input data
if (typeof sources === 'undefined') {
return;
}
if (!$.isArray(sources)) {
sources = [sources];
}
if (arguments.length < 5 && typeof arguments[arguments.length - 1] === 'function') {
callback = arguments[arguments.length - 1];
}
startAt = (typeof startAt === 'function' || !startAt) ? 0 : startAt;
count = (typeof count === 'function' || !count || count < 0) ? sources.length : Math.min(count, sources.length);
batchSize = (typeof batchSize === 'function' || !batchSize) ? 1 : batchSize;
if (startAt >= sources.length) {
startAt = 0;
count = 0;
}
if (batchSize < 0) {
batchSize = count;
}
batchSize = Math.min(batchSize, count);
var next = sources.slice(startAt + batchSize, count - batchSize);
sources = sources.slice(startAt, batchSize);
count = sources.length;
// If sources array is empty
if (!count) {
exec(sources, callback, true);
return;
}
// Image loading callback
var countLoaded = 0;
var loaded = function() {
countLoaded++;
if (countLoaded !== count) {
return;
}
exec(sources, callback, !next);
preload(next, 0, 0, batchSize, callback);
};
// Loop sources to preload
var image;
for (var i = 0; i < sources.length; i++) {
if (isVideoSource(sources[i])) {
// Do not preload videos. There are issues with that.
// First - we need to keep an instance of the preloaded and use that exactly, not a copy.
// Second - there are memory issues.
// If there will be a requirement from users - I'll try to implement this.
continue;
} else {
image = new Image();
image.src = sources[i].url;
image = caching(image);
if (image.complete) {
loaded();
} else {
$(image).on('load error', loaded);
}
}
}
};
})();
/* Process images array */
var processImagesArray = function (images) {
var processed = [];
for (var i = 0; i < images.length; i++) {
if (typeof images[i] === 'string') {
processed.push({ url: images[i] });
}
else if ($.isArray(images[i])) {
processed.push(processImagesArray(images[i]));
}
else {
processed.push(processOptions(images[i]));
}
}
return processed;
};
/* Process options */
var processOptions = function (options, required) {
// Convert old options
// centeredX/centeredY are deprecated
if (options.centeredX || options.centeredY) {
if (window.console && window.console.log) {
window.console.log('jquery.backstretch: `centeredX`/`centeredY` is deprecated, please use `alignX`/`alignY`');
}
if (options.centeredX) {
options.alignX = 0.5;
}
if (options.centeredY) {
options.alignY = 0.5;
}
}
// Deprecated spec
if (options.speed !== undefined) {
if (window.console && window.console.log) {
window.console.log('jquery.backstretch: `speed` is deprecated, please use `transitionDuration`');
}
options.transitionDuration = options.speed;
options.transition = 'fade';
}
// Typo
if (options.resolutionChangeRatioTreshold !== undefined) {
window.console.log('jquery.backstretch: `treshold` is a typo!');
options.resolutionChangeRatioThreshold = options.resolutionChangeRatioTreshold;
}
// Current spec that needs processing
if (options.fadeFirst !== undefined) {
options.animateFirst = options.fadeFirst;
}
if (options.fade !== undefined) {
options.transitionDuration = options.fade;
options.transition = 'fade';
}
return processAlignOptions(options);
};
/* Process align options */
var processAlignOptions = function (options, required) {
if (options.alignX === 'left') {
options.alignX = 0.0;
}
else if (options.alignX === 'center') {
options.alignX = 0.5;
}
else if (options.alignX === 'right') {
options.alignX = 1.0;
}
else {
if (options.alignX !== undefined || required) {
options.alignX = parseFloat(options.alignX);
if (isNaN(options.alignX)) {
options.alignX = 0.5;
}
}
}
if (options.alignY === 'top') {
options.alignY = 0.0;
}
else if (options.alignY === 'center') {
options.alignY = 0.5;
}
else if (options.alignY === 'bottom') {
options.alignY = 1.0;
}
else {
if (options.alignX !== undefined || required) {
options.alignY = parseFloat(options.alignY);
if (isNaN(options.alignY)) {
options.alignY = 0.5;
}
}
}
return options;
};
/* CLASS DEFINITION
* ========================= */
var Backstretch = function (container, images, options) {
this.options = $.extend({}, $.fn.backstretch.defaults, options || {});
this.firstShow = true;
// Process options
processOptions(this.options, true);
/* In its simplest form, we allow Backstretch to be called on an image path.
* e.g. $.backstretch('/path/to/image.jpg')
* So, we need to turn this back into an array.
*/
this.images = processImagesArray($.isArray(images) ? images : [images]);
/**
* Paused-Option
*/
if (this.options.paused) {
this.paused = true;
}
/**
* Start-Option (Index)
*/
if (this.options.start >= this.images.length)
{
this.options.start = this.images.length - 1;
}
if (this.options.start < 0)
{
this.options.start = 0;
}
// Convenience reference to know if the container is body.
this.isBody = container === document.body;
/* We're keeping track of a few different elements
*
* Container: the element that Backstretch was called on.
* Wrap: a DIV that we place the image into, so we can hide the overflow.
* Root: Convenience reference to help calculate the correct height.
*/
var $window = $(window);
this.$container = $(container);
this.$root = this.isBody ? supportsFixedPosition ? $window : $(document) : this.$container;
this.originalImages = this.images;
this.images = optimalSizeImages(
this.options.alwaysTestWindowResolution ? $window : this.$root,
this.originalImages);
/**
* Pre-Loading.
* This is the first image, so we will preload a minimum of 1 images.
*/
preload(this.images, this.options.start || 0, this.options.preload || 1);
// Don't create a new wrap if one already exists (from a previous instance of Backstretch)
var $existing = this.$container.children(".backstretch").first();
this.$wrap = $existing.length ? $existing :
$('<div class="backstretch"></div>')
.css(this.options.bypassCss ? {} : styles.wrap)
.appendTo(this.$container);
if (!this.options.bypassCss) {
// Non-body elements need some style adjustments
if (!this.isBody) {
// If the container is statically positioned, we need to make it relative,
// and if no zIndex is defined, we should set it to zero.
var position = this.$container.css('position')
, zIndex = this.$container.css('zIndex');
this.$container.css({
position: position === 'static' ? 'relative' : position
, zIndex: zIndex === 'auto' ? 0 : zIndex
});
// Needs a higher z-index
this.$wrap.css({zIndex: -999998});
}
// Fixed or absolute positioning?
this.$wrap.css({
position: this.isBody && supportsFixedPosition ? 'fixed' : 'absolute'
});
}
// Set the first image
this.index = this.options.start;
this.show(this.index);
// Listen for resize
$window.on('resize.backstretch', $.proxy(this.resize, this))
.on('orientationchange.backstretch', $.proxy(function () {
// Need to do this in order to get the right window height
if (this.isBody && window.pageYOffset === 0) {
window.scrollTo(0, 1);
this.resize();
}
}, this));
};
var performTransition = function (options) {
var transition = options.transition || 'fade';
// Look for multiple options
if (typeof transition === 'string' && transition.indexOf('|') > -1) {
transition = transition.split('|');
}
if (transition instanceof Array) {
transition = transition[Math.round(Math.random() * (transition.length - 1))];
}
switch (transition.toString().toLowerCase()) {
default:
case 'fade':
options['new'].fadeIn({
duration: options.duration,
complete: options.complete,
easing: options.easing || undefined
});
break;
case 'pushleft':
case 'push_left':
case 'pushright':
case 'push_right':
case 'pushup':
case 'push_up':
case 'pushdown':
case 'push_down':
case 'coverleft':
case 'cover_left':
case 'coverright':
case 'cover_right':
case 'coverup':
case 'cover_up':
case 'coverdown':
case 'cover_down':
var transitionParts = transition.match(/^(cover|push)_?(.*)$/);
var animProp = transitionParts[2] === 'left' ? 'right' :
transitionParts[2] === 'right' ? 'left' :
transitionParts[2] === 'down' ? 'top' :
transitionParts[2] === 'up' ? 'bottom' :
'right';
var newCssStart = {
'display': ''
}, newCssAnim = {};
newCssStart[animProp] = '-100%';
newCssAnim[animProp] = 0;
options['new']
.css(newCssStart)
.animate(newCssAnim, {
duration: options.duration,
complete: function () {
options['new'].css(animProp, '');
options.complete.apply(this, arguments);
},
easing: options.easing || undefined
});
if (transitionParts[1] === 'push' && options['old']) {
var oldCssAnim = {};
oldCssAnim[animProp] = '100%';
options['old']
.animate(oldCssAnim, {
duration: options.duration,
complete: function () {
options['old'].css('display', 'none');
},
easing: options.easing || undefined
});
}
break;
}
};
/* PUBLIC METHODS
* ========================= */
Backstretch.prototype = {
resize: function () {
try {
// Check for a better suited image after the resize
var $resTest = this.options.alwaysTestWindowResolution ? $(window) : this.$root;
var newContainerWidth = $resTest.width();
var newContainerHeight = $resTest.height();
var changeRatioW = newContainerWidth / (this._lastResizeContainerWidth || 0);
var changeRatioH = newContainerHeight / (this._lastResizeContainerHeight || 0);
var resolutionChangeRatioThreshold = this.options.resolutionChangeRatioThreshold || 0.0;
// check for big changes in container size
if ((newContainerWidth !== this._lastResizeContainerWidth ||
newContainerHeight !== this._lastResizeContainerHeight) &&
((Math.abs(changeRatioW - 1) >= resolutionChangeRatioThreshold || isNaN(changeRatioW)) ||
(Math.abs(changeRatioH - 1) >= resolutionChangeRatioThreshold || isNaN(changeRatioH)))) {
this._lastResizeContainerWidth = newContainerWidth;
this._lastResizeContainerHeight = newContainerHeight;
// Big change: rebuild the entire images array
this.images = optimalSizeImages($resTest, this.originalImages);
// Preload them (they will be automatically inserted on the next cycle)
if (this.options.preload) {
preload(this.images, (this.index + 1) % this.images.length, this.options.preload);
}
// In case there is no cycle and the new source is different than the current
if (this.images.length === 1 &&
this._currentImage !== this.images[0]) {
// Wait a little an update the image being showed
var that = this;
clearTimeout(that._selectAnotherResolutionTimeout);
that._selectAnotherResolutionTimeout = setTimeout(function () {
that.show(0);
}, this.options.resolutionRefreshRate);
}
}
var bgCSS = {left: 0, top: 0, right: 'auto', bottom: 'auto'}
, rootWidth = this.isBody ? this.$root.width() : this.$root.innerWidth()
, rootHeight = this.isBody ? ( window.innerHeight ? window.innerHeight : this.$root.height() ) : this.$root.innerHeight()
, bgWidth = rootWidth
, bgHeight = bgWidth / this.$itemWrapper.data('ratio')
, evt = $.Event('backstretch.resize', {
relatedTarget: this.$container[0]
})
, bgOffset
, alignX = this._currentImage.alignX === undefined ? this.options.alignX : this._currentImage.alignX
, alignY = this._currentImage.alignY === undefined ? this.options.alignY : this._currentImage.alignY;
// Make adjustments based on image ratio
if (bgHeight >= rootHeight) {
bgCSS.top = -(bgHeight - rootHeight) * alignY;
} else {
bgHeight = rootHeight;
bgWidth = bgHeight * this.$itemWrapper.data('ratio');
bgOffset = (bgWidth - rootWidth) / 2;
bgCSS.left = -(bgWidth - rootWidth) * alignX;
}
if (!this.options.bypassCss) {
this.$wrap
.css({width: rootWidth, height: rootHeight})
.find('>.backstretch-item').not('.deleteable')
.each(function () {
var $wrapper = $(this);
$wrapper.find('img,video,iframe')
.css({width: bgWidth, height: bgHeight})
.css(bgCSS);
});
}
this.$container.trigger(evt, this);
} catch(err) {
// IE7 seems to trigger resize before the image is loaded.
// This try/catch block is a hack to let it fail gracefully.
}
return this;
}
// Show the slide at a certain position
, show: function (newIndex, overrideOptions) {
// Validate index
if (Math.abs(newIndex) > this.images.length - 1) {
return;
}
// Vars
var that = this
, $oldItemWrapper = that.$wrap.find('>.backstretch-item').addClass('deleteable')
, oldVideoWrapper = that.videoWrapper
, evtOptions = { relatedTarget: that.$container[0] };
// Trigger the "before" event
that.$container.trigger($.Event('backstretch.before', evtOptions), [that, newIndex]);
// Set the new frame index
this.index = newIndex;
var selectedImage = that.images[newIndex];
// Pause the slideshow
clearTimeout(that._cycleTimeout);
// New image
delete that.videoWrapper; // Current item may not be a video
var isVideo = isVideoSource(selectedImage);
if (isVideo) {
that.videoWrapper = new VideoWrapper(selectedImage);
that.$item = that.videoWrapper.$video.css('pointer-events', 'none');
} else {
that.$item = $('<img />');
}
that.$itemWrapper = $('<div class="backstretch-item">')
.append(that.$item);
if (this.options.bypassCss) {
that.$itemWrapper.css({
'display': 'none'
});
} else {
that.$itemWrapper.css(styles.itemWrapper);
that.$item.css(styles.item);
}
that.$item.bind(isVideo ? 'canplay' : 'load', function (e) {
var $this = $(this)
, $wrapper = $this.parent()
, options = $wrapper.data('options');
if (overrideOptions) {
options = $.extend({}, options, overrideOptions);
}
var imgWidth = this.naturalWidth || this.videoWidth || this.width
, imgHeight = this.naturalHeight || this.videoHeight || this.height;
// Save the ratio
$wrapper.data('ratio', imgWidth / imgHeight);
var getOption = function (opt) {
return options[opt] !== undefined ?
options[opt] :
that.options[opt];
};
var transition = getOption('transition');
var transitionEasing = getOption('transitionEasing');
var transitionDuration = getOption('transitionDuration');
// Show the image, then delete the old one
var bringInNextImage = function () {
if (oldVideoWrapper) {
oldVideoWrapper.stop();
oldVideoWrapper.destroy();
}
$oldItemWrapper.remove();
// Resume the slideshow
if (!that.paused && that.images.length > 1) {
that.cycle();
}
// Now we can clear the background on the element, to spare memory
if (!that.options.bypassCss && !that.isBody) {
that.$container.css('background', 'none');
}
// Trigger the "after" and "show" events
// "show" is being deprecated
$(['after', 'show']).each(function () {
that.$container.trigger($.Event('backstretch.' + this, evtOptions), [that, newIndex]);
});
if (isVideo) {
that.videoWrapper.play();
}
};
if ((that.firstShow && !that.options.animateFirst) || !transitionDuration || !transition) {
// Avoid transition on first show or if there's no transitionDuration value
$wrapper.show();
bringInNextImage();
} else {
performTransition({
'new': $wrapper,
old: $oldItemWrapper,
transition: transition,
duration: transitionDuration,
easing: transitionEasing,
complete: bringInNextImage
});
}
that.firstShow = false;
// Resize
that.resize();
});
that.$itemWrapper.appendTo(that.$wrap);
that.$item.attr('alt', selectedImage.alt || '');
that.$itemWrapper.data('options', selectedImage);
if (!isVideo) {
that.$item.attr('src', selectedImage.url);
}
that._currentImage = selectedImage;
return that;
}
, current: function () {
return this.index;
}
, next: function () {
var args = Array.prototype.slice.call(arguments, 0);
args.unshift(this.index < this.images.length - 1 ? this.index + 1 : 0);
return this.show.apply(this, args);
}
, prev: function () {
var args = Array.prototype.slice.call(arguments, 0);
args.unshift(this.index === 0 ? this.images.length - 1 : this.index - 1);
return this.show.apply(this, args);
}
, pause: function () {
// Pause the slideshow
this.paused = true;
if (this.videoWrapper) {
this.videoWrapper.pause();
}
return this;
}
, resume: function () {
// Resume the slideshow
this.paused = false;
if (this.videoWrapper) {
this.videoWrapper.play();
}
this.cycle();
return this;
}
, cycle: function () {
// Start/resume the slideshow
if(this.images.length > 1) {
// Clear the timeout, just in case
clearTimeout(this._cycleTimeout);
var duration = (this._currentImage && this._currentImage.duration) || this.options.duration;
var isVideo = isVideoSource(this._currentImage);
var callNext = function () {
this.$item.off('.cycle');
// Check for paused slideshow
if (!this.paused) {
this.next();
}
};
// Special video handling
if (isVideo) {
// Leave video at last frame
if (!this._currentImage.loop) {
var lastFrameTimeout = 0;
this.$item
.on('playing.cycle', function () {
var player = $(this).data('player');
clearTimeout(lastFrameTimeout);
lastFrameTimeout = setTimeout(function () {
player.pause();
player.$video.trigger('ended');
}, (player.getDuration() - player.getCurrentTime()) * 1000);
})
.on('ended.cycle', function () {
clearTimeout(lastFrameTimeout);
});
}
// On error go to next
this.$item.on('error.cycle initerror.cycle', $.proxy(callNext, this));
}
if (isVideo && !this._currentImage.duration) {
// It's a video - playing until end
this.$item.on('ended.cycle', $.proxy(callNext, this));
} else {
// Cycling according to specified duration
this._cycleTimeout = setTimeout($.proxy(callNext, this), duration);
}
}
return this;
}
, destroy: function (preserveBackground) {
// Stop the resize events
$(window).off('resize.backstretch orientationchange.backstretch');
// Stop any videos
if (this.videoWrapper) {
this.videoWrapper.destroy();
}
// Clear the timeout
clearTimeout(this._cycleTimeout);
// Remove Backstretch
if(!preserveBackground) {
this.$wrap.remove();
}
this.$container.removeData('backstretch');
}
};
/**
* Video Abstraction Layer
*
* Static methods:
* > VideoWrapper.loadYoutubeAPI() -> Call in order to load the Youtube API.
* An 'youtube_api_load' event will be triggered on $(window) when the API is loaded.
*
* Generic:
* > player.type -> type of the video
* > player.video / player.$video -> contains the element holding the video
* > player.play() -> plays the video
* > player.pause() -> pauses the video
* > player.setCurrentTime(position) -> seeks to a position by seconds
*
* Youtube:
* > player.ytId will contain the youtube ID if the source is a youtube url
* > player.ytReady is a flag telling whether the youtube source is ready for playback
* */
var VideoWrapper = function () { this.init.apply(this, arguments); };
/**
* @param {Object} options
* @param {String|Array<String>|Array<{{src: String, type: String?}}>} options.url
* @param {Boolean} options.loop=false
* @param {Boolean?} options.mute=true
* @param {String?} options.poster
* loop, mute, poster
*/
VideoWrapper.prototype.init = function (options) {
var that = this;
var $video;
var setVideoElement = function () {
that.$video = $video;
that.video = $video[0];
};
// Determine video type
var videoType = 'video';
if (!(options.url instanceof Array) &&
YOUTUBE_REGEXP.test(options.url)) {
videoType = 'youtube';
}
that.type = videoType;
if (videoType === 'youtube') {
// Try to load the API in the meantime
VideoWrapper.loadYoutubeAPI();
that.ytId = options.url.match(YOUTUBE_REGEXP)[2];
var src = 'https://www.youtube.com/embed/' + that.ytId +
'?rel=0&autoplay=0&showinfo=0&controls=0&modestbranding=1' +
'&cc_load_policy=0&disablekb=1&iv_load_policy=3&loop=0' +
'&enablejsapi=1&origin=' + encodeURIComponent(window.location.origin);
that.__ytStartMuted = !!options.mute || options.mute === undefined;
$video = $('<iframe />')
.attr({ 'src_to_load': src })
.css({ 'border': 0, 'margin': 0, 'padding': 0 })
.data('player', that);
if (options.loop) {
$video.on('ended.loop', function () {
if (!that.__manuallyStopped) {
that.play();
}
});
}
that.ytReady = false;
setVideoElement();
if (window['YT']) {
that._initYoutube();
$video.trigger('initsuccess');
} else {
$(window).one('youtube_api_load', function () {
that._initYoutube();
$video.trigger('initsuccess');
});
}
}
else {
// Traditional <video> tag with multiple sources
$video = $('<video>')
.prop('autoplay', false)
.prop('controls', false)
.prop('loop', !!options.loop)
.prop('muted', !!options.mute || options.mute === undefined)
// Let the first frames be available before playback, as we do transitions
.prop('preload', 'auto')
.prop('poster', options.poster || '');
var sources = (options.url instanceof Array) ? options.url : [options.url];
for (var i = 0; i < sources.length; i++) {
var sourceItem = sources[i];
if (typeof(sourceItem) === 'string') {
sourceItem = { src: sourceItem };
}
$('<src>')
.attr('src', sourceItem.src)
// Make sure to not specify type if unknown -
// so the browser will try to autodetect.
.attr('type', sourceItem.type || null)
.appendTo($video);
}
if (!$video[0].canPlayType || !sources.length) {
$video.trigger('initerror');
} else {
$video.trigger('initsuccess');
}
setVideoElement();
}
};
VideoWrapper.prototype._initYoutube = function () {
var that = this;
var YT = window['YT'];
that.$video
.attr('src', that.$video.attr('src_to_load'))
.removeAttr('src_to_load');
// It won't init if it's not in the DOM, so we emulate that
var hasParent = !!that.$video[0].parentNode;
if (!hasParent) {
var $tmpParent = $('<div>').css('display', 'none !important').appendTo(document.body);
that.$video.appendTo($tmpParent);
}
var player = new YT.Player(that.video, {
events: {
'onReady': function () {
if (that.__ytStartMuted) {
player.mute();
}
if (!hasParent) {
// Restore parent to old state - without interrupting any changes
if (that.$video[0].parentNode === $tmpParent[0]) {
that.$video.detach();
}
$tmpParent.remove();
}
that.ytReady = true;
that._updateYoutubeSize();
that.$video.trigger('canplay');
},
'onStateChange': function (event) {
switch (event.data) {
case YT.PlayerState.PLAYING:
that.$video.trigger('playing');
break;
case YT.PlayerState.ENDED:
that.$video.trigger('ended');
break;
case YT.PlayerState.PAUSED:
that.$video.trigger('pause');
break;
case YT.PlayerState.BUFFERING:
that.$video.trigger('waiting');
break;
case YT.PlayerState.CUED:
that.$video.trigger('canplay');
break;
}
},
'onPlaybackQualityChange': function () {
that._updateYoutubeSize();
that.$video.trigger('resize');
},
'onError': function (err) {
that.hasError = true;
that.$video.trigger({ 'type': 'error', 'error': err });
}
}
});
that.ytPlayer = player;
return that;
};
VideoWrapper.prototype._updateYoutubeSize = function () {
var that = this;
switch (that.ytPlayer.getPlaybackQuality() || 'medium') {
case 'small':
that.video.videoWidth = 426;
that.video.videoHeight = 240;
break;
case 'medium':
that.video.videoWidth = 640;
that.video.videoHeight = 360;
break;
default:
case 'large':
that.video.videoWidth = 854;
that.video.videoHeight = 480;
break;
case 'hd720':
that.video.videoWidth = 1280;
that.video.videoHeight = 720;
break;
case 'hd1080':
that.video.videoWidth = 1920;
that.video.videoHeight = 1080;
break;
case 'highres':
that.video.videoWidth = 2560;
that.video.videoHeight = 1440;
break;
}
return that;
};
VideoWrapper.prototype.play = function () {
var that = this;
that.__manuallyStopped = false;
if (that.type === 'youtube') {
if (that.ytReady) {
that.$video.trigger('play');
that.ytPlayer.playVideo();
}
} else {
that.video.play();
}
return that;
};
VideoWrapper.prototype.pause = function () {
var that = this;
that.__manuallyStopped = false;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.pauseVideo();
}
} else {
that.video.pause();
}
return that;
};
VideoWrapper.prototype.stop = function () {
var that = this;
that.__manuallyStopped = true;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.pauseVideo();
that.ytPlayer.seekTo(0);
}
} else {
that.video.pause();
that.video.currentTime = 0;
}
return that;
};
VideoWrapper.prototype.destroy = function () {
var that = this;
if (that.ytPlayer) {
that.ytPlayer.destroy();
}
that.$video.remove();
return that;
};
VideoWrapper.prototype.getCurrentTime = function (seconds) {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
return that.ytPlayer.getCurrentTime();
}
} else {
return that.video.currentTime;
}
return 0;
};
VideoWrapper.prototype.setCurrentTime = function (seconds) {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
that.ytPlayer.seekTo(seconds, true);
}
} else {
that.video.currentTime = seconds;
}
return that;
};
VideoWrapper.prototype.getDuration = function () {
var that = this;
if (that.type === 'youtube') {
if (that.ytReady) {
return that.ytPlayer.getDuration();
}
} else {
return that.video.duration;
}
return 0;
};
/**
* This will load the youtube API (if not loaded yet)
* Use $(window).one('youtube_api_load', ...) to listen for API loaded event
*/
VideoWrapper.loadYoutubeAPI = function () {
if (window['YT']) {
return;
}
if (!$('script[src*=www\\.youtube\\.com\\/iframe_api]').length) {
$('<script type="text/javascript" src="https://www.youtube.com/iframe_api">').appendTo('body');
}
var ytAPILoadInt = setInterval(function () {
if (window['YT'] && window['YT'].loaded) {
$(window).trigger('youtube_api_load');
clearTimeout(ytAPILoadInt);
}
}, 50);
};
/* SUPPORTS FIXED POSITION?
*
* Based on code from jQuery Mobile 1.1.0
* http://jquerymobile.com/
*
* In a nutshell, we need to figure out if fixed positioning is supported.
* Unfortunately, this is very difficult to do on iOS, and usually involves
* injecting content, scrolling the page, etc.. It's ugly.
* jQuery Mobile uses this workaround. It's not ideal, but works.
*
* Modified to detect IE6
* ========================= */
var supportsFixedPosition = (function () {
var ua = navigator.userAgent
, platform = navigator.platform
// Rendering engine is Webkit, and capture major version
, wkmatch = ua.match( /AppleWebKit\/([0-9]+)/ )
, wkversion = !!wkmatch && wkmatch[ 1 ]
, ffmatch = ua.match( /Fennec\/([0-9]+)/ )
, ffversion = !!ffmatch && ffmatch[ 1 ]
, operammobilematch = ua.match( /Opera Mobi\/([0-9]+)/ )
, omversion = !!operammobilematch && operammobilematch[ 1 ]
, iematch = ua.match( /MSIE ([0-9]+)/ )
, ieversion = !!iematch && iematch[ 1 ];
return !(
// iOS 4.3 and older : Platform is iPhone/Pad/Touch and Webkit version is less than 534 (ios5)
((platform.indexOf( "iPhone" ) > -1 || platform.indexOf( "iPad" ) > -1 || platform.indexOf( "iPod" ) > -1 ) && wkversion && wkversion < 534) ||
// Opera Mini
(window.operamini && ({}).toString.call( window.operamini ) === "[object OperaMini]") ||
(operammobilematch && omversion < 7458) ||
//Android lte 2.1: Platform is Android and Webkit version is less than 533 (Android 2.2)
(ua.indexOf( "Android" ) > -1 && wkversion && wkversion < 533) ||
// Firefox Mobile before 6.0 -
(ffversion && ffversion < 6) ||
// WebOS less than 3
("palmGetResource" in window && wkversion && wkversion < 534) ||
// MeeGo
(ua.indexOf( "MeeGo" ) > -1 && ua.indexOf( "NokiaBrowser/8.5.0" ) > -1) ||
// IE6
(ieversion && ieversion <= 6)
);
}());
}(jQuery, window));
|
Fixed pixelRatio=auto bug
|
src/jquery.backstretch.js
|
Fixed pixelRatio=auto bug
|
<ide><path>rc/jquery.backstretch.js
<ide> image = imageSizes[j] = { url: image };
<ide> }
<ide>
<del> if (image.pixelRatio && parseFloat(image.pixelRatio) !== devicePixelRatio) {
<add> if (image.pixelRatio && image.pixelRatio !== 'auto' && parseFloat(image.pixelRatio) !== devicePixelRatio) {
<ide> // We disallowed choosing this image for current device pixel ratio,
<ide> // So skip this one.
<ide> continue;
|
|
Java
|
apache-2.0
|
68942fb9379bfe4b0dd0d7669a783f137694c11c
| 0 |
DataSketches/sketches-core
|
/*
* Copyright 2017, Yahoo! Inc. Licensed under the terms of the
* Apache License 2.0. See LICENSE file at the project root for terms.
*/
package com.yahoo.sketches.hll;
import static com.yahoo.sketches.hll.TgtHllType.HLL_4;
import static com.yahoo.sketches.hll.TgtHllType.HLL_6;
import static com.yahoo.sketches.hll.TgtHllType.HLL_8;
import static java.lang.Math.min;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import org.testng.annotations.Test;
import com.yahoo.memory.Memory;
import com.yahoo.sketches.SketchesArgumentException;
/**
* @author Lee Rhodes
*/
@SuppressWarnings("unused")
public class UnionTest {
static final String LS = System.getProperty("line.separator");
static final int[] nArr = new int[] {1, 3, 10, 30, 100, 300, 1000, 3000, 10000, 30000};
// n1,... lgK,... tgtHll, Mode Ooo Est
static final String hdrFmt =
"%6s%6s%6s" + "%8s%5s%5s%5s" + "%7s%6s" + "%7s%6s%6s" +"%3s%2s%2s"+ "%13s%12s";
static final String hdr = String.format(hdrFmt,
"n1", "n2", "tot",
"lgMaxK", "lgK1", "lgK2", "lgKR",
"tgt1", "tgt2",
"Mode1", "Mode2", "ModeR",
"1", "2", "R",
"Est", "Err%");
/**
* The task here is to check the transition boundaries as the sketch morphs between LIST to
* SET to HLL modes. The transition points vary as a function of lgConfigK. In addition,
* this checks that the union operation is operating properly based on the order the
* sketches are presented to the union.
*/
@Test
public void checkUnions() {
//HLL_4: t=0, HLL_6: t=1, HLL_8: t=2
int t1 = 2; //type = HLL_8
int t2 = 2;
int rt = 2; //result type
println("TgtR: " + TgtHllType.values()[rt].toString());
int lgK1 = 7;
int lgK2 = 7;
int lgMaxK = 7;
int n1 = 7;
int n2 = 7;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 8;
n2 = 7;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 7;
n2 = 8;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 8;
n2 = 8;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 7;
n2 = 14;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
println("++END BASE GROUP++");
int i = 0;
for (i = 7; i <= 13; i++)
{
lgK1 = i;
lgK2 = i;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4; //compute the transition point
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("--END MINOR GROUP--");
lgK1 = i;
lgK2 = i + 1;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("--END MINOR GROUP--");
lgK1 = i + 1;
lgK2 = i;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("--END MINOR GROUP--");
lgK1 = i + 1;
lgK2 = i + 1;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("++END MAJOR GROUP++");
} //End for
}
@Test
public void check() { //n1=8, n2=7, lgK1=lgK2=lgMaxK=7, all HLL_8
basicUnion(8, 7, 7, 7, 7, 2, 2, 2);
}
private static void basicUnion(int n1, int n2, int lgK1, int lgK2,
int lgMaxK, int t1, int t2, int rt) {
long v = 0;
int tot = n1 + n2;
TgtHllType type1 = TgtHllType.values()[t1];
String t1str = type1.toString();
TgtHllType type2 = TgtHllType.values()[t2];
String t2str = type2.toString();
TgtHllType resultType = TgtHllType.values()[rt];
//String rtStr = resultType.toString();
HllSketch h1 = new HllSketch(lgK1, type1);
HllSketch h2 = new HllSketch(lgK2, type2);
int lgControlK = min(min(lgK1, lgK2), lgMaxK); //min of all 3
HllSketch control = new HllSketch(lgControlK, resultType);
String dataFmt = "%6d%6d%6d," + "%7d%5d%5d%5d," + "%6s%6s," + "%6s%6s%6s,"
+"%2s%2s%2s," + "%12f%12f%%";
for (long i = 0; i < n1; i++) {
h1.update(v + i);
control.update(v + i);
}
v += n1;
for (long i = 0; i < n2; i++) {
h2.update(v + i);
control.update(v + i);
}
v += n2;
String h1SketchStr = ("H1 SKETCH: \n" + h1.toString());
String h2SketchStr = ("H2 SKETCH: \n" + h2.toString());
Union union = newUnion(lgMaxK);
union.update(h1);
String uH1SketchStr = ("Union after H1: \n" + union.getResult(resultType).toString());
//println(uH1SketchStr);
union.update(h2);
HllSketch result = union.getResult(resultType);
int lgKR = result.getLgConfigK();
String uSketchStr =("Union after H2: \n" + result.toString());
double uEst = result.getEstimate();
double uUb = result.getUpperBound(2);
double uLb = result.getLowerBound(2);
double rerr = ((uEst/tot) - 1.0) * 100;
String mode1 = h1.getCurrentMode().toString();
String mode2 = h2.getCurrentMode().toString();
String modeR = result.getCurrentMode().toString();
//Control
String cSketchStr = ("CONTROL SKETCH: \n" + control.toString());
double controlEst = control.getEstimate();
double controlUb = control.getUpperBound(2);
double controlLb = control.getLowerBound(2);
String h1ooo = h1.isOutOfOrderFlag() ? "T" : "F";
String h2ooo = h2.isOutOfOrderFlag() ? "T" : "F";
String resultooo = result.isOutOfOrderFlag() ? "T" : "F";
String row = String.format(dataFmt,
n1, n2, tot,
lgMaxK, lgK1, lgK2, lgKR,
t1str, t2str,
mode1, mode2, modeR,
h1ooo, h2ooo, resultooo,
uEst, rerr);
println(h1SketchStr);
println(h2SketchStr);
println(uH1SketchStr);
println(uSketchStr);
println(cSketchStr);
println(hdr);
println(row);
assertTrue((controlUb - controlEst) <= (uUb - uEst));
assertTrue((controlEst - controlLb) <= (uEst - uLb));
}
@Test
public void checkToFromUnion1() {
for (int i = 0; i < 10; i++) {
int n = nArr[i];
for (int lgK = 4; lgK <= 13; lgK++) {
toFrom1(lgK, HLL_4, n);
toFrom1(lgK, HLL_6, n);
toFrom1(lgK, HLL_8, n);
}
println("=======");
}
}
private static void toFrom1(int lgK, TgtHllType tgtHllType, int n) {
Union srcU = newUnion(lgK);
HllSketch srcSk = new HllSketch(lgK, tgtHllType);
for (int i = 0; i < n; i++) {
srcSk.update(i);
}
println("n: " + n + ", lgK: " + lgK + ", type: " + tgtHllType);
//printSketch(src, "SRC");
srcU.update(srcSk);
byte[] byteArr = srcU.toCompactByteArray();
Memory mem = Memory.wrap(byteArr);
Union dstU = Union.heapify(mem);
assertFalse(dstU.isSameResource(mem));
assertEquals(dstU.getEstimate(), srcU.getEstimate(), 0.0);
}
@Test
public void checkToFromUnion2() {
for (int i = 0; i < 10; i++) {
int n = nArr[i];
for (int lgK = 4; lgK <= 13; lgK++) {
toFrom2(lgK, HLL_4, n);
toFrom2(lgK, HLL_6, n);
toFrom2(lgK, HLL_8, n);
}
println("=======");
}
}
private static void toFrom2(int lgK, TgtHllType tgtHllType, int n) {
Union srcU = newUnion(lgK);
HllSketch srcSk = new HllSketch(lgK, tgtHllType);
for (int i = 0; i < n; i++) {
srcSk.update(i);
}
println("n: " + n + ", lgK: " + lgK + ", type: " + tgtHllType);
//printSketch(src, "SRC");
srcU.update(srcSk);
byte[] byteArr = srcU.toCompactByteArray();
Union dstU = Union.heapify(byteArr);
assertEquals(dstU.getEstimate(), srcU.getEstimate(), 0.0);
}
@Test
public void checkCompositeEst() {
Union u = newUnion(12);
assertEquals(u.getCompositeEstimate(), 0, .03);
for (int i = 1; i <= 15; i++) { u.update(i); }
assertEquals(u.getCompositeEstimate(), 15, 15 *.03);
for (int i = 15; i <= 1000; i++) { u.update(i); }
assertEquals(u.getCompositeEstimate(), 1000, 1000 * .03);
}
@Test
public void checkMisc() {
try {
Union u = newUnion(HllUtil.MIN_LOG_K - 1);
fail();
} catch (SketchesArgumentException e) {
//expected
}
try {
Union u = newUnion(HllUtil.MAX_LOG_K + 1);
fail();
} catch (SketchesArgumentException e) {
//expected
}
Union u = newUnion(7);
HllSketch sk = u.getResult();
assertTrue(sk.isEmpty());
}
@Test
public void checkHeapify() {
Union u = newUnion(16);
for (int i = 0; i < (1 << 20); i++) {
u.update(i);
}
double est1 = u.getEstimate();
byte[] byteArray = u.toUpdatableByteArray();
Union u2 = Union.heapify(byteArray);
assertEquals(u2.getEstimate(), est1, 0.0);
}
@Test //for lgK <= 12
public void checkUbLb() {
int lgK = 4;
int n = 1 << 20;
boolean oooFlag = false;
println("LgK="+lgK+", UB3, " + ((getBound(lgK, true, oooFlag, 3, n) / n) - 1));
println("LgK="+lgK+", UB2, " + ((getBound(lgK, true, oooFlag, 2, n) / n) - 1));
println("LgK="+lgK+", UB1, " + ((getBound(lgK, true, oooFlag, 1, n) / n) - 1));
println("LgK="+lgK+", LB1, " + ((getBound(lgK, false, oooFlag, 1, n) / n) - 1));
println("LgK="+lgK+", LB2, " + ((getBound(lgK, false, oooFlag, 2, n) / n) - 1));
println("LgK="+lgK+", LB3, " + ((getBound(lgK, false, oooFlag, 3, n) / n) - 1));
}
@Test
public void checkEmptyCouponMisc() {
int lgK = 8;
Union union = newUnion(lgK);
for (int i = 0; i < 20; i++) { union.update(i); } //SET mode
union.couponUpdate(0);
assertEquals(union.getEstimate(), 20.0, 0.001);
assertEquals(union.getTgtHllType(), TgtHllType.HLL_8);
assertFalse(union.isMemory());
assertFalse(union.isOffHeap());
int bytes = union.getUpdatableSerializationBytes();
assertTrue(bytes <= Union.getMaxSerializationBytes(lgK));
assertFalse(union.isCompact());
}
@Test
public void checkUnionWithWrap() {
int lgConfigK = 4;
TgtHllType type = TgtHllType.HLL_4;
int n = 2;
HllSketch sk = new HllSketch(lgConfigK, type);
for (int i = 0; i < n; i++) { sk.update(i); }
double est = sk.getEstimate();
byte[] skByteArr = sk.toCompactByteArray();
HllSketch sk2 = HllSketch.wrap(Memory.wrap(skByteArr));
assertEquals(sk2.getEstimate(), est, 0.0);
Union union = newUnion(lgConfigK);
union.update(HllSketch.wrap(Memory.wrap(skByteArr)));
assertEquals(union.getEstimate(), est, 0.0);
}
@Test
public void checkUnionWithWrap2() {
int lgConfigK = 10;
int n = 128;
HllSketch sk1 = new HllSketch(lgConfigK);
for (int i = 0; i < n; i++) { sk1.update(i); }
double est1 = sk1.getEstimate();
byte[] byteArr1 = sk1.toCompactByteArray();
Union union = newUnion(lgConfigK);
union.update(HllSketch.wrap(Memory.wrap(byteArr1)));
double est2 = union.getEstimate();
assertEquals(est2, est1);
}
@Test
public void checkConversions() {
int lgK = 4;
HllSketch sk1 = new HllSketch(lgK, TgtHllType.HLL_8);
HllSketch sk2 = new HllSketch(lgK, TgtHllType.HLL_8);
int u = 1 << 20;
for (int i = 0; i < u; i++) {
sk1.update(i);
sk2.update(i + u);
}
Union union = new Union(lgK);
union.update(sk1);
union.update(sk2);
HllSketch rsk1 = union.getResult(TgtHllType.HLL_8);
HllSketch rsk2 = union.getResult(TgtHllType.HLL_6);
HllSketch rsk3 = union.getResult(TgtHllType.HLL_4);
double est1 = rsk1.getEstimate();
double est2 = rsk2.getEstimate();
double est3 = rsk3.getEstimate();
//println("Est1: " + est1);
//println("Est2: " + est2);
//println("Est3: " + est3);
//println("Result HLL8: " + rsk1.toString(true, true, true, false));
//println("Result HLL4: " + rsk3.toString(true, true, true, false));
assertEquals(est2, est1, 0.0);
assertEquals(est3, est1, 0.0);
}
private static Union newUnion(int lgK) {
return new Union(lgK);
}
private static double getBound(int lgK, boolean ub, boolean oooFlag, int numStdDev, double est) {
double re = RelativeErrorTables.getRelErr(ub, oooFlag, lgK, numStdDev);
return est / (1.0 + re);
}
@Test
public void printlnTest() {
println("PRINTING: "+this.getClass().getName());
}
/**
* @param s value to print
*/
static void println(String s) {
print(s + LS);
}
/**
* @param s value to print
*/
static void print(String s) {
System.out.print(s); //disable here
}
}
|
src/test/java/com/yahoo/sketches/hll/UnionTest.java
|
/*
* Copyright 2017, Yahoo! Inc. Licensed under the terms of the
* Apache License 2.0. See LICENSE file at the project root for terms.
*/
package com.yahoo.sketches.hll;
import static com.yahoo.sketches.hll.TgtHllType.HLL_4;
import static com.yahoo.sketches.hll.TgtHllType.HLL_6;
import static com.yahoo.sketches.hll.TgtHllType.HLL_8;
import static java.lang.Math.min;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import org.testng.annotations.Test;
import com.yahoo.memory.Memory;
import com.yahoo.sketches.SketchesArgumentException;
/**
* @author Lee Rhodes
*/
@SuppressWarnings("unused")
public class UnionTest {
static final String LS = System.getProperty("line.separator");
static final int[] nArr = new int[] {1, 3, 10, 30, 100, 300, 1000, 3000, 10000, 30000};
@Test
public void checkUnions() {
//HLL_4=0, HLL_6=1, HLL_8=2
// n1,... lgK,... tgtHll, Mode Ooo Est
String hdrFmt = "%6s%6s%6s" + "%7s%5s%5s%5s" + "%6s%6s" + "%6s%6s%6s" +"%2s%1s%1s"+ "%12s%12s";
String hdr = String.format(hdrFmt,
"n1", "n2", "tot",
"lgMaxK", "lgK1", "lgK2", "lgKR",
"tgt1", "tgt2",
"Mode1", "Mode2", "ModeR",
"1", "2", "R",
"Est", "Err%");
int t1 = 2;
int t2 = 2;
int rt = 2; //result type
println("TgtR: " + TgtHllType.values()[rt].toString());
println(hdr);
int lgK1 = 7;
int lgK2 = 7;
int lgMaxK = 7;
int n1 = 7;
int n2 = 7;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 8;
n2 = 7;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 7;
n2 = 8;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 8;
n2 = 8;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 = 7;
n2 = 14;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
println("++");
int i = 0;
for (i = 7; i <= 13; i++)
{
lgK1 = i;
lgK2 = i;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("-");
lgK1 = i;
lgK2 = i+1;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("-");
lgK1 = i + 1;
lgK2 = i;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("-");
lgK1 = i + 1;
lgK2 = i + 1;
lgMaxK = i;
{
n1 = ((1 << (i - 3)) * 3)/4;
n2 = n1;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 -= 2;
n2 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
n1 += 2;
basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
}
println("++");
}
}
@Test
public void check() { //n1=8, n2=7, lgK1=lgK2=lgMaxK=7, all HLL_8
basicUnion(8, 7, 7, 7, 7, 2, 2, 2);
}
private static void basicUnion(int n1, int n2, int lgK1, int lgK2,
int lgMaxK, int t1, int t2, int rt) {
long v = 0;
int tot = n1 + n2;
TgtHllType type1 = TgtHllType.values()[t1];
String t1str = type1.toString();
TgtHllType type2 = TgtHllType.values()[t2];
String t2str = type2.toString();
TgtHllType resultType = TgtHllType.values()[rt];
//String rtStr = resultType.toString();
HllSketch h1 = new HllSketch(lgK1, type1);
HllSketch h2 = new HllSketch(lgK2, type2);
int lgControlK = min(min(lgK1, lgK2), lgMaxK); //min of all 3
HllSketch control = new HllSketch(lgControlK, resultType);
String fmt = "%6d%6d%6d," + "%7d%5d%5d%5d," + "%6s%6s," + "%6s%6s%6s,"
+"%2s%2s%2s," + "%12f%12f%%";
for (long i = 0; i < n1; i++) {
h1.update(v + i);
control.update(v + i);
}
v += n1;
for (long i = 0; i < n2; i++) {
h2.update(v + i);
control.update(v + i);
}
v += n2;
String h1SketchStr = ("H1 SKETCH: \n" + h1.toString());
String h2SketchStr = ("H2 SKETCH: \n" + h2.toString());
Union union = newUnion(lgMaxK);
union.update(h1);
String uH1SketchStr = ("Union after H1: \n" + union.getResult(resultType).toString());
//println(uH1SketchStr);
union.update(h2);
HllSketch result = union.getResult(resultType);
int lgKR = result.getLgConfigK();
String uSketchStr =("Union after H2: \n" + result.toString());
double uEst = result.getEstimate();
double uUb = result.getUpperBound(2);
double uLb = result.getLowerBound(2);
double rerr = ((uEst/tot) - 1.0) * 100;
String mode1 = h1.getCurrentMode().toString();
String mode2 = h2.getCurrentMode().toString();
String modeR = result.getCurrentMode().toString();
//Control
String cSketchStr = ("CONTROL SKETCH: \n" + control.toString());
double controlEst = control.getEstimate();
double controlUb = control.getUpperBound(2);
double controlLb = control.getLowerBound(2);
String h1ooo = h1.isOutOfOrderFlag() ? "T" : "F";
String h2ooo = h2.isOutOfOrderFlag() ? "T" : "F";
String resultooo = result.isOutOfOrderFlag() ? "T" : "F";
String row = String.format(fmt,
n1, n2, tot,
lgMaxK, lgK1, lgK2, lgKR,
t1str, t2str,
mode1, mode2, modeR,
h1ooo, h2ooo, resultooo,
uEst, rerr);
println(row);
println(h1SketchStr);
println(h2SketchStr);
println(uH1SketchStr);
println(uSketchStr);
println(cSketchStr);
assertTrue((controlUb - controlEst) <= (uUb - uEst));
assertTrue((controlEst - controlLb) <= (uEst - uLb));
}
@Test
public void checkToFromUnion1() {
for (int i = 0; i < 10; i++) {
int n = nArr[i];
for (int lgK = 4; lgK <= 13; lgK++) {
toFrom1(lgK, HLL_4, n);
toFrom1(lgK, HLL_6, n);
toFrom1(lgK, HLL_8, n);
}
println("=======");
}
}
private static void toFrom1(int lgK, TgtHllType tgtHllType, int n) {
Union srcU = newUnion(lgK);
HllSketch srcSk = new HllSketch(lgK, tgtHllType);
for (int i = 0; i < n; i++) {
srcSk.update(i);
}
println("n: " + n + ", lgK: " + lgK + ", type: " + tgtHllType);
//printSketch(src, "SRC");
srcU.update(srcSk);
byte[] byteArr = srcU.toCompactByteArray();
Memory mem = Memory.wrap(byteArr);
Union dstU = Union.heapify(mem);
assertFalse(dstU.isSameResource(mem));
assertEquals(dstU.getEstimate(), srcU.getEstimate(), 0.0);
}
@Test
public void checkToFromUnion2() {
for (int i = 0; i < 10; i++) {
int n = nArr[i];
for (int lgK = 4; lgK <= 13; lgK++) {
toFrom2(lgK, HLL_4, n);
toFrom2(lgK, HLL_6, n);
toFrom2(lgK, HLL_8, n);
}
println("=======");
}
}
private static void toFrom2(int lgK, TgtHllType tgtHllType, int n) {
Union srcU = newUnion(lgK);
HllSketch srcSk = new HllSketch(lgK, tgtHllType);
for (int i = 0; i < n; i++) {
srcSk.update(i);
}
println("n: " + n + ", lgK: " + lgK + ", type: " + tgtHllType);
//printSketch(src, "SRC");
srcU.update(srcSk);
byte[] byteArr = srcU.toCompactByteArray();
Union dstU = Union.heapify(byteArr);
assertEquals(dstU.getEstimate(), srcU.getEstimate(), 0.0);
}
@Test
public void checkCompositeEst() {
Union u = newUnion(12);
assertEquals(u.getCompositeEstimate(), 0, .03);
for (int i = 1; i <= 15; i++) { u.update(i); }
assertEquals(u.getCompositeEstimate(), 15, 15 *.03);
for (int i = 15; i <= 1000; i++) { u.update(i); }
assertEquals(u.getCompositeEstimate(), 1000, 1000 * .03);
}
@Test
public void checkMisc() {
try {
Union u = newUnion(HllUtil.MIN_LOG_K - 1);
fail();
} catch (SketchesArgumentException e) {
//expected
}
try {
Union u = newUnion(HllUtil.MAX_LOG_K + 1);
fail();
} catch (SketchesArgumentException e) {
//expected
}
Union u = newUnion(7);
HllSketch sk = u.getResult();
assertTrue(sk.isEmpty());
}
@Test
public void checkHeapify() {
Union u = newUnion(16);
for (int i = 0; i < (1 << 20); i++) {
u.update(i);
}
double est1 = u.getEstimate();
byte[] byteArray = u.toUpdatableByteArray();
Union u2 = Union.heapify(byteArray);
assertEquals(u2.getEstimate(), est1, 0.0);
}
@Test //for lgK <= 12
public void checkUbLb() {
int lgK = 4;
int n = 1 << 20;
boolean oooFlag = false;
println("LgK="+lgK+", UB3, " + ((getBound(lgK, true, oooFlag, 3, n) / n) - 1));
println("LgK="+lgK+", UB2, " + ((getBound(lgK, true, oooFlag, 2, n) / n) - 1));
println("LgK="+lgK+", UB1, " + ((getBound(lgK, true, oooFlag, 1, n) / n) - 1));
println("LgK="+lgK+", LB1, " + ((getBound(lgK, false, oooFlag, 1, n) / n) - 1));
println("LgK="+lgK+", LB2, " + ((getBound(lgK, false, oooFlag, 2, n) / n) - 1));
println("LgK="+lgK+", LB3, " + ((getBound(lgK, false, oooFlag, 3, n) / n) - 1));
}
@Test
public void checkEmptyCouponMisc() {
int lgK = 8;
Union union = newUnion(lgK);
for (int i = 0; i < 20; i++) { union.update(i); } //SET mode
union.couponUpdate(0);
assertEquals(union.getEstimate(), 20.0, 0.001);
assertEquals(union.getTgtHllType(), TgtHllType.HLL_8);
assertFalse(union.isMemory());
assertFalse(union.isOffHeap());
int bytes = union.getUpdatableSerializationBytes();
assertTrue(bytes <= Union.getMaxSerializationBytes(lgK));
assertFalse(union.isCompact());
}
@Test
public void checkUnionWithWrap() {
int lgConfigK = 4;
TgtHllType type = TgtHllType.HLL_4;
int n = 2;
HllSketch sk = new HllSketch(lgConfigK, type);
for (int i = 0; i < n; i++) { sk.update(i); }
double est = sk.getEstimate();
byte[] skByteArr = sk.toCompactByteArray();
HllSketch sk2 = HllSketch.wrap(Memory.wrap(skByteArr));
assertEquals(sk2.getEstimate(), est, 0.0);
Union union = newUnion(lgConfigK);
union.update(HllSketch.wrap(Memory.wrap(skByteArr)));
assertEquals(union.getEstimate(), est, 0.0);
}
@Test
public void checkUnionWithWrap2() {
int lgConfigK = 10;
int n = 128;
HllSketch sk1 = new HllSketch(lgConfigK);
for (int i = 0; i < n; i++) { sk1.update(i); }
double est1 = sk1.getEstimate();
byte[] byteArr1 = sk1.toCompactByteArray();
Union union = newUnion(lgConfigK);
union.update(HllSketch.wrap(Memory.wrap(byteArr1)));
double est2 = union.getEstimate();
assertEquals(est2, est1);
}
@Test
public void checkConversions() {
int lgK = 4;
HllSketch sk1 = new HllSketch(lgK, TgtHllType.HLL_8);
HllSketch sk2 = new HllSketch(lgK, TgtHllType.HLL_8);
int u = 1 << 20;
for (int i = 0; i < u; i++) {
sk1.update(i);
sk2.update(i + u);
}
Union union = new Union(lgK);
union.update(sk1);
union.update(sk2);
HllSketch rsk1 = union.getResult(TgtHllType.HLL_8);
HllSketch rsk2 = union.getResult(TgtHllType.HLL_6);
HllSketch rsk3 = union.getResult(TgtHllType.HLL_4);
double est1 = rsk1.getEstimate();
double est2 = rsk2.getEstimate();
double est3 = rsk3.getEstimate();
//println("Est1: " + est1);
//println("Est2: " + est2);
//println("Est3: " + est3);
//println("Result HLL8: " + rsk1.toString(true, true, true, false));
//println("Result HLL4: " + rsk3.toString(true, true, true, false));
assertEquals(est2, est1, 0.0);
assertEquals(est3, est1, 0.0);
}
private static Union newUnion(int lgK) {
return new Union(lgK);
}
private static double getBound(int lgK, boolean ub, boolean oooFlag, int numStdDev, double est) {
double re = RelativeErrorTables.getRelErr(ub, oooFlag, lgK, numStdDev);
return est / (1.0 + re);
}
@Test
public void printlnTest() {
println("PRINTING: "+this.getClass().getName());
}
/**
* @param s value to print
*/
static void println(String s) {
print(s + LS);
}
/**
* @param s value to print
*/
static void print(String s) {
//System.out.print(s); //disable here
}
}
|
tweek UnionTest
|
src/test/java/com/yahoo/sketches/hll/UnionTest.java
|
tweek UnionTest
|
<ide><path>rc/test/java/com/yahoo/sketches/hll/UnionTest.java
<ide>
<ide> static final int[] nArr = new int[] {1, 3, 10, 30, 100, 300, 1000, 3000, 10000, 30000};
<ide>
<add> // n1,... lgK,... tgtHll, Mode Ooo Est
<add> static final String hdrFmt =
<add> "%6s%6s%6s" + "%8s%5s%5s%5s" + "%7s%6s" + "%7s%6s%6s" +"%3s%2s%2s"+ "%13s%12s";
<add> static final String hdr = String.format(hdrFmt,
<add> "n1", "n2", "tot",
<add> "lgMaxK", "lgK1", "lgK2", "lgKR",
<add> "tgt1", "tgt2",
<add> "Mode1", "Mode2", "ModeR",
<add> "1", "2", "R",
<add> "Est", "Err%");
<add>
<add> /**
<add> * The task here is to check the transition boundaries as the sketch morphs between LIST to
<add> * SET to HLL modes. The transition points vary as a function of lgConfigK. In addition,
<add> * this checks that the union operation is operating properly based on the order the
<add> * sketches are presented to the union.
<add> */
<ide> @Test
<ide> public void checkUnions() {
<del> //HLL_4=0, HLL_6=1, HLL_8=2
<del> // n1,... lgK,... tgtHll, Mode Ooo Est
<del> String hdrFmt = "%6s%6s%6s" + "%7s%5s%5s%5s" + "%6s%6s" + "%6s%6s%6s" +"%2s%1s%1s"+ "%12s%12s";
<del> String hdr = String.format(hdrFmt,
<del> "n1", "n2", "tot",
<del> "lgMaxK", "lgK1", "lgK2", "lgKR",
<del> "tgt1", "tgt2",
<del> "Mode1", "Mode2", "ModeR",
<del> "1", "2", "R",
<del> "Est", "Err%");
<del>
<del> int t1 = 2;
<add>
<add> //HLL_4: t=0, HLL_6: t=1, HLL_8: t=2
<add> int t1 = 2; //type = HLL_8
<ide> int t2 = 2;
<ide> int rt = 2; //result type
<ide> println("TgtR: " + TgtHllType.values()[rt].toString());
<del> println(hdr);
<ide>
<ide> int lgK1 = 7;
<ide> int lgK2 = 7;
<ide> n1 = 7;
<ide> n2 = 14;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<del> println("++");
<add> println("++END BASE GROUP++");
<ide>
<ide> int i = 0;
<ide> for (i = 7; i <= 13; i++)
<ide> lgK2 = i;
<ide> lgMaxK = i;
<ide> {
<del> n1 = ((1 << (i - 3)) * 3)/4;
<add> n1 = ((1 << (i - 3)) * 3)/4; //compute the transition point
<ide> n2 = n1;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<ide> n1 += 2;
<ide> n1 += 2;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<ide> }
<del> println("-");
<add> println("--END MINOR GROUP--");
<ide> lgK1 = i;
<del> lgK2 = i+1;
<add> lgK2 = i + 1;
<ide> lgMaxK = i;
<ide> {
<ide> n1 = ((1 << (i - 3)) * 3)/4;
<ide> n1 += 2;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<ide> }
<del> println("-");
<add> println("--END MINOR GROUP--");
<ide> lgK1 = i + 1;
<ide> lgK2 = i;
<ide> lgMaxK = i;
<ide> n1 += 2;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<ide> }
<del> println("-");
<add> println("--END MINOR GROUP--");
<ide> lgK1 = i + 1;
<ide> lgK2 = i + 1;
<ide> lgMaxK = i;
<ide> n1 += 2;
<ide> basicUnion(n1, n2, lgK1, lgK2, lgMaxK, t1, t2, rt);
<ide> }
<del> println("++");
<del> }
<add> println("++END MAJOR GROUP++");
<add> } //End for
<ide> }
<ide>
<ide> @Test
<ide> HllSketch h2 = new HllSketch(lgK2, type2);
<ide> int lgControlK = min(min(lgK1, lgK2), lgMaxK); //min of all 3
<ide> HllSketch control = new HllSketch(lgControlK, resultType);
<del> String fmt = "%6d%6d%6d," + "%7d%5d%5d%5d," + "%6s%6s," + "%6s%6s%6s,"
<add> String dataFmt = "%6d%6d%6d," + "%7d%5d%5d%5d," + "%6s%6s," + "%6s%6s%6s,"
<ide> +"%2s%2s%2s," + "%12f%12f%%";
<ide>
<ide> for (long i = 0; i < n1; i++) {
<ide> String h1ooo = h1.isOutOfOrderFlag() ? "T" : "F";
<ide> String h2ooo = h2.isOutOfOrderFlag() ? "T" : "F";
<ide> String resultooo = result.isOutOfOrderFlag() ? "T" : "F";
<del> String row = String.format(fmt,
<add> String row = String.format(dataFmt,
<ide> n1, n2, tot,
<ide> lgMaxK, lgK1, lgK2, lgKR,
<ide> t1str, t2str,
<ide> mode1, mode2, modeR,
<ide> h1ooo, h2ooo, resultooo,
<ide> uEst, rerr);
<del> println(row);
<ide> println(h1SketchStr);
<ide> println(h2SketchStr);
<ide> println(uH1SketchStr);
<ide> println(uSketchStr);
<ide> println(cSketchStr);
<del>
<add> println(hdr);
<add> println(row);
<ide> assertTrue((controlUb - controlEst) <= (uUb - uEst));
<ide> assertTrue((controlEst - controlLb) <= (uEst - uLb));
<ide> }
<ide> * @param s value to print
<ide> */
<ide> static void print(String s) {
<del> //System.out.print(s); //disable here
<add> System.out.print(s); //disable here
<ide> }
<ide>
<ide> }
|
|
Java
|
apache-2.0
|
52a919c4007f18c54bf628f64c7aaafbaa1d6dbd
| 0 |
openmash/mashmesh,openmash/mashmesh
|
package com.sheepdog.mashmesh.models;
import com.googlecode.objectify.Objectify;
import com.googlecode.objectify.ObjectifyFactory;
import com.googlecode.objectify.ObjectifyService;
public class OfyService {
static {
factory().register(UserProfile.class);
factory().register(VolunteerProfile.class);
factory().register(RideRecord.class);
}
public static Objectify ofy() {
return ObjectifyService.begin();
}
public static ObjectifyFactory factory() {
return ObjectifyService.factory();
}
}
|
src/main/java/com/sheepdog/mashmesh/models/OfyService.java
|
package com.sheepdog.mashmesh.models;
import com.googlecode.objectify.Objectify;
import com.googlecode.objectify.ObjectifyFactory;
import com.googlecode.objectify.ObjectifyService;
public class OfyService {
static {
factory().register(UserProfile.class);
factory().register(VolunteerProfile.class);
}
public static Objectify ofy() {
return ObjectifyService.begin();
}
public static ObjectifyFactory factory() {
return ObjectifyService.factory();
}
}
|
Fix datastore access: RideRecord was unregistered
|
src/main/java/com/sheepdog/mashmesh/models/OfyService.java
|
Fix datastore access: RideRecord was unregistered
|
<ide><path>rc/main/java/com/sheepdog/mashmesh/models/OfyService.java
<ide> static {
<ide> factory().register(UserProfile.class);
<ide> factory().register(VolunteerProfile.class);
<add> factory().register(RideRecord.class);
<ide> }
<ide>
<ide> public static Objectify ofy() {
|
|
Java
|
bsd-2-clause
|
cfee3b79e963e97517ba174d51c5da2bfae7c4b9
| 0 |
JordanMartinez/RichTextFX,FXMisc/RichTextFX,TomasMikula/RichTextFX,TomasMikula/RichTextFX,FXMisc/RichTextFX,cemartins/RichTextFX,JFormDesigner/RichTextFX,JFormDesigner/RichTextFX,afester/RichTextFX,afester/RichTextFX,JordanMartinez/RichTextFX
|
package org.fxmisc.richtext;
import static org.fxmisc.richtext.PopupAlignment.*;
import static org.fxmisc.richtext.TwoDimensional.Bias.*;
import static org.reactfx.EventStreams.invalidationsOf;
import static org.reactfx.EventStreams.merge;
import static org.reactfx.EventStreams.valuesOf;
import static org.reactfx.util.Tuples.*;
import java.time.Duration;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.IntFunction;
import java.util.function.IntSupplier;
import java.util.function.IntUnaryOperator;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import javafx.beans.binding.Binding;
import javafx.beans.binding.Bindings;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.binding.ObjectBinding;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.value.ObservableBooleanValue;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableSet;
import javafx.css.StyleableObjectProperty;
import javafx.event.Event;
import javafx.geometry.BoundingBox;
import javafx.geometry.Bounds;
import javafx.geometry.Insets;
import javafx.geometry.Point2D;
import javafx.scene.Node;
import javafx.scene.control.IndexRange;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.paint.Paint;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.scene.text.TextFlow;
import javafx.stage.PopupWindow;
import org.fxmisc.flowless.Cell;
import org.fxmisc.flowless.VirtualFlow;
import org.fxmisc.flowless.VirtualFlowHit;
import org.fxmisc.flowless.Virtualized;
import org.fxmisc.flowless.VirtualizedScrollPane;
import org.fxmisc.richtext.CssProperties.EditableProperty;
import org.fxmisc.richtext.CssProperties.FontProperty;
import org.fxmisc.undo.UndoManager;
import org.fxmisc.undo.UndoManagerFactory;
import org.reactfx.EventStream;
import org.reactfx.EventStreams;
import org.reactfx.Guard;
import org.reactfx.Suspendable;
import org.reactfx.SuspendableEventStream;
import org.reactfx.SuspendableNo;
import org.reactfx.collection.LiveList;
import org.reactfx.collection.SuspendableList;
import org.reactfx.util.Tuple2;
import org.reactfx.value.SuspendableVal;
import org.reactfx.value.SuspendableVar;
import org.reactfx.value.Val;
import org.reactfx.value.Var;
/**
* Text editing control. Accepts user input (keyboard, mouse) and
* provides API to assign style to text ranges. It is suitable for
* syntax highlighting and rich-text editors.
*
* <p>Subclassing is allowed to define the type of style, e.g. inline
* style or style classes.</p>
*
* <p>Note: Scroll bars no longer appear when the content spans outside
* of the viewport. To add scroll bars, the area needs to be embedded in
* a {@link VirtualizedScrollPane}. {@link AreaFactory} is provided to make
* this more convenient.</p>
*
* <h3>Overriding keyboard shortcuts</h3>
*
* {@code StyledTextArea} comes with {@link #onKeyTypedProperty()} and
* {@link #onKeyPressedProperty()} handlers installed to handle keyboard input.
* Ordinary character input is handled by the {@code onKeyTyped} handler and
* control key combinations (including Enter and Tab) are handled by the
* {@code onKeyPressed} handler. To add or override some keyboard shortcuts,
* but keep the rest in place, you would combine the default event handler with
* a new one that adds or overrides some of the default key combinations. This
* is how to bind {@code Ctrl+S} to the {@code save()} operation:
* <pre>
* {@code
* import static javafx.scene.input.KeyCode.*;
* import static javafx.scene.input.KeyCombination.*;
* import static org.fxmisc.wellbehaved.event.EventPattern.*;
*
* import org.fxmisc.wellbehaved.event.EventHandlerHelper;
*
* EventHandler<? super KeyEvent> ctrlS = EventHandlerHelper
* .on(keyPressed(S, CONTROL_DOWN)).act(event -> save())
* .create();
*
* EventHandlerHelper.install(area.onKeyPressedProperty(), ctrlS);
* }
* </pre>
*
* @param <S> type of style that can be applied to text.
*/
public class StyledTextArea<S, PS> extends Region
implements
TextEditingArea<S, PS>,
EditActions<S, PS>,
ClipboardActions<S, PS>,
NavigationActions<S, PS>,
UndoActions<S>,
TwoDimensional,
Virtualized {
/**
* Index range [0, 0).
*/
public static final IndexRange EMPTY_RANGE = new IndexRange(0, 0);
/**
* Private helper method.
*/
private static int clamp(int min, int val, int max) {
return val < min ? min
: val > max ? max
: val;
}
/* ********************************************************************** *
* *
* Properties *
* *
* Properties affect behavior and/or appearance of this control. *
* *
* They are readable and writable by the client code and never change by *
* other means, i.e. they contain either the default value or the value *
* set by the client code. *
* *
* ********************************************************************** */
/**
* Background fill for highlighted text.
*/
private final StyleableObjectProperty<Paint> highlightFill
= new CssProperties.HighlightFillProperty(this, Color.DODGERBLUE);
/**
* Text color for highlighted text.
*/
private final StyleableObjectProperty<Paint> highlightTextFill
= new CssProperties.HighlightTextFillProperty(this, Color.WHITE);
// editable property
private final BooleanProperty editable = new EditableProperty<>(this);
@Override public final boolean isEditable() { return editable.get(); }
@Override public final void setEditable(boolean value) { editable.set(value); }
@Override public final BooleanProperty editableProperty() { return editable; }
// wrapText property
private final BooleanProperty wrapText = new SimpleBooleanProperty(this, "wrapText");
@Override public final boolean isWrapText() { return wrapText.get(); }
@Override public final void setWrapText(boolean value) { wrapText.set(value); }
@Override public final BooleanProperty wrapTextProperty() { return wrapText; }
// undo manager
private UndoManager undoManager;
@Override
public UndoManager getUndoManager() { return undoManager; }
@Override
public void setUndoManager(UndoManagerFactory undoManagerFactory) {
undoManager.close();
undoManager = preserveStyle
? createRichUndoManager(undoManagerFactory)
: createPlainUndoManager(undoManagerFactory);
}
// font property
/**
* The default font to use where font is not specified otherwise.
*/
private final StyleableObjectProperty<Font> font = new FontProperty<>(this);
public final StyleableObjectProperty<Font> fontProperty() { return font; }
public final void setFont(Font value) { font.setValue(value); }
public final Font getFont() { return font.getValue(); }
/**
* Popup window that will be positioned by this text area relative to the
* caret or selection. Use {@link #popupAlignmentProperty()} to specify
* how the popup should be positioned relative to the caret or selection.
* Use {@link #popupAnchorOffsetProperty()} or
* {@link #popupAnchorAdjustmentProperty()} to further adjust the position.
*/
private final ObjectProperty<PopupWindow> popupWindow = new SimpleObjectProperty<>();
public void setPopupWindow(PopupWindow popup) { popupWindow.set(popup); }
public PopupWindow getPopupWindow() { return popupWindow.get(); }
public ObjectProperty<PopupWindow> popupWindowProperty() { return popupWindow; }
/** @deprecated Use {@link #setPopupWindow(PopupWindow)}. */
@Deprecated
public void setPopupAtCaret(PopupWindow popup) { popupWindow.set(popup); }
/** @deprecated Use {@link #getPopupWindow()}. */
@Deprecated
public PopupWindow getPopupAtCaret() { return popupWindow.get(); }
/** @deprecated Use {@link #popupWindowProperty()}. */
@Deprecated
public ObjectProperty<PopupWindow> popupAtCaretProperty() { return popupWindow; }
/**
* Specifies further offset (in pixels) of the popup window from the
* position specified by {@link #popupAlignmentProperty()}.
*
* <p>If {@link #popupAnchorAdjustmentProperty()} is also specified, then
* it overrides the offset set by this property.
*/
private final ObjectProperty<Point2D> popupAnchorOffset = new SimpleObjectProperty<>();
public void setPopupAnchorOffset(Point2D offset) { popupAnchorOffset.set(offset); }
public Point2D getPopupAnchorOffset() { return popupAnchorOffset.get(); }
public ObjectProperty<Point2D> popupAnchorOffsetProperty() { return popupAnchorOffset; }
/**
* Specifies how to adjust the popup window's anchor point. The given
* operator is invoked with the screen position calculated according to
* {@link #popupAlignmentProperty()} and should return a new screen
* position. This position will be used as the popup window's anchor point.
*
* <p>Setting this property overrides {@link #popupAnchorOffsetProperty()}.
*/
private final ObjectProperty<UnaryOperator<Point2D>> popupAnchorAdjustment = new SimpleObjectProperty<>();
public void setPopupAnchorAdjustment(UnaryOperator<Point2D> f) { popupAnchorAdjustment.set(f); }
public UnaryOperator<Point2D> getPopupAnchorAdjustment() { return popupAnchorAdjustment.get(); }
public ObjectProperty<UnaryOperator<Point2D>> popupAnchorAdjustmentProperty() { return popupAnchorAdjustment; }
/**
* Defines where the popup window given in {@link #popupWindowProperty()}
* is anchored, i.e. where its anchor point is positioned. This position
* can further be adjusted by {@link #popupAnchorOffsetProperty()} or
* {@link #popupAnchorAdjustmentProperty()}.
*/
private final ObjectProperty<PopupAlignment> popupAlignment = new SimpleObjectProperty<>(CARET_TOP);
public void setPopupAlignment(PopupAlignment pos) { popupAlignment.set(pos); }
public PopupAlignment getPopupAlignment() { return popupAlignment.get(); }
public ObjectProperty<PopupAlignment> popupAlignmentProperty() { return popupAlignment; }
/**
* Defines how long the mouse has to stay still over the text before a
* {@link MouseOverTextEvent} of type {@code MOUSE_OVER_TEXT_BEGIN} is
* fired on this text area. When set to {@code null}, no
* {@code MouseOverTextEvent}s are fired on this text area.
*
* <p>Default value is {@code null}.
*/
private final ObjectProperty<Duration> mouseOverTextDelay = new SimpleObjectProperty<>(null);
public void setMouseOverTextDelay(Duration delay) { mouseOverTextDelay.set(delay); }
public Duration getMouseOverTextDelay() { return mouseOverTextDelay.get(); }
public ObjectProperty<Duration> mouseOverTextDelayProperty() { return mouseOverTextDelay; }
private final ObjectProperty<IntFunction<? extends Node>> paragraphGraphicFactory = new SimpleObjectProperty<>(null);
public void setParagraphGraphicFactory(IntFunction<? extends Node> factory) { paragraphGraphicFactory.set(factory); }
public IntFunction<? extends Node> getParagraphGraphicFactory() { return paragraphGraphicFactory.get(); }
public ObjectProperty<IntFunction<? extends Node>> paragraphGraphicFactoryProperty() { return paragraphGraphicFactory; }
/**
* Indicates whether the initial style should also be used for plain text
* inserted into this text area. When {@code false}, the style immediately
* preceding the insertion position is used. Default value is {@code false}.
*/
public BooleanProperty useInitialStyleForInsertionProperty() { return content.useInitialStyleForInsertion; }
public void setUseInitialStyleForInsertion(boolean value) { content.useInitialStyleForInsertion.set(value); }
public boolean getUseInitialStyleForInsertion() { return content.useInitialStyleForInsertion.get(); }
private Optional<Tuple2<Codec<S>, Codec<PS>>> styleCodecs = Optional.empty();
/**
* Sets codecs to encode/decode style information to/from binary format.
* Providing codecs enables clipboard actions to retain the style information.
*/
public void setStyleCodecs(Codec<S> textStyleCodec, Codec<PS> paragraphStyleCodec) {
styleCodecs = Optional.of(t(textStyleCodec, paragraphStyleCodec));
}
@Override
public Optional<Tuple2<Codec<S>, Codec<PS>>> getStyleCodecs() {
return styleCodecs;
}
/**
* The <em>estimated</em> scrollX value. This can be set in order to scroll the content.
* Value is only accurate when area does not wrap lines and uses the same font size
* throughout the entire area.
*/
public Var<Double> estimatedScrollXProperty() { return virtualFlow.estimatedScrollXProperty(); }
public double getEstimatedScrollX() { return virtualFlow.estimatedScrollXProperty().getValue(); }
public void setEstimatedScrollX(double value) { virtualFlow.estimatedScrollXProperty().setValue(value); }
/**
* The <em>estimated</em> scrollY value. This can be set in order to scroll the content.
* Value is only accurate when area does not wrap lines and uses the same font size
* throughout the entire area.
*/
public Var<Double> estimatedScrollYProperty() { return virtualFlow.estimatedScrollYProperty(); }
public double getEstimatedScrollY() { return virtualFlow.estimatedScrollYProperty().getValue(); }
public void setEstimatedScrollY(double value) { virtualFlow.estimatedScrollYProperty().setValue(value); }
/* ********************************************************************** *
* *
* Observables *
* *
* Observables are "dynamic" (i.e. changing) characteristics of this *
* control. They are not directly settable by the client code, but change *
* in response to user input and/or API actions. *
* *
* ********************************************************************** */
// text
private final SuspendableVal<String> text;
@Override public final String getText() { return text.getValue(); }
@Override public final ObservableValue<String> textProperty() { return text; }
// rich text
@Override public final StyledDocument<S, PS> getDocument() { return content.snapshot(); };
// length
private final SuspendableVal<Integer> length;
@Override public final int getLength() { return length.getValue(); }
@Override public final ObservableValue<Integer> lengthProperty() { return length; }
// caret position
private final Var<Integer> internalCaretPosition = Var.newSimpleVar(0);
private final SuspendableVal<Integer> caretPosition = internalCaretPosition.suspendable();
@Override public final int getCaretPosition() { return caretPosition.getValue(); }
@Override public final ObservableValue<Integer> caretPositionProperty() { return caretPosition; }
// selection anchor
private final SuspendableVar<Integer> anchor = Var.newSimpleVar(0).suspendable();
@Override public final int getAnchor() { return anchor.getValue(); }
@Override public final ObservableValue<Integer> anchorProperty() { return anchor; }
// selection
private final Var<IndexRange> internalSelection = Var.newSimpleVar(EMPTY_RANGE);
private final SuspendableVal<IndexRange> selection = internalSelection.suspendable();
@Override public final IndexRange getSelection() { return selection.getValue(); }
@Override public final ObservableValue<IndexRange> selectionProperty() { return selection; }
// selected text
private final SuspendableVal<String> selectedText;
@Override public final String getSelectedText() { return selectedText.getValue(); }
@Override public final ObservableValue<String> selectedTextProperty() { return selectedText; }
// current paragraph index
private final SuspendableVal<Integer> currentParagraph;
@Override public final int getCurrentParagraph() { return currentParagraph.getValue(); }
@Override public final ObservableValue<Integer> currentParagraphProperty() { return currentParagraph; }
// caret column
private final SuspendableVal<Integer> caretColumn;
@Override public final int getCaretColumn() { return caretColumn.getValue(); }
@Override public final ObservableValue<Integer> caretColumnProperty() { return caretColumn; }
// paragraphs
private final SuspendableList<Paragraph<S, PS>> paragraphs;
@Override public ObservableList<Paragraph<S, PS>> getParagraphs() {
return paragraphs;
}
// beingUpdated
private final SuspendableNo beingUpdated = new SuspendableNo();
public ObservableBooleanValue beingUpdatedProperty() { return beingUpdated; }
public boolean isBeingUpdated() { return beingUpdated.get(); }
// total width estimate
/**
* The <em>estimated</em> width of the entire document. Accurate when area does not wrap lines and
* uses the same font size throughout the entire area. Value is only supposed to be <em>set</em> by
* the skin, not the user.
*/
public Val<Double> totalWidthEstimateProperty() { return virtualFlow.totalWidthEstimateProperty(); }
public double getTotalWidthEstimate() { return virtualFlow.totalWidthEstimateProperty().getValue(); }
// total height estimate
/**
* The <em>estimated</em> height of the entire document. Accurate when area does not wrap lines and
* uses the same font size throughout the entire area. Value is only supposed to be <em>set</em> by
* the skin, not the user.
*/
public Val<Double> totalHeightEstimateProperty() { return virtualFlow.totalHeightEstimateProperty(); }
public double getTotalHeightEstimate() { return virtualFlow.totalHeightEstimateProperty().getValue(); }
/* ********************************************************************** *
* *
* Event streams *
* *
* ********************************************************************** */
// text changes
private final SuspendableEventStream<PlainTextChange> plainTextChanges;
@Override
public final EventStream<PlainTextChange> plainTextChanges() { return plainTextChanges; }
// rich text changes
private final SuspendableEventStream<RichTextChange<S, PS>> richTextChanges;
@Override
public final EventStream<RichTextChange<S, PS>> richChanges() { return richTextChanges; }
/* ********************************************************************** *
* *
* Private fields *
* *
* ********************************************************************** */
private final Binding<Boolean> caretVisible;
// TODO: this is initialized but never used. Should it be removed?
private final Val<UnaryOperator<Point2D>> _popupAnchorAdjustment;
private final VirtualFlow<Paragraph<S, PS>, Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> virtualFlow;
private final VirtualizedScrollPane<VirtualFlow> virtualizedScrollPane;
// used for two-level navigation, where on the higher level are
// paragraphs and on the lower level are lines within a paragraph
private final TwoLevelNavigator navigator;
private boolean followCaretRequested = false;
private Position selectionStart2D;
private Position selectionEnd2D;
/**
* content model
*/
private final EditableStyledDocument<S, PS> content;
/**
* Style used by default when no other style is provided.
*/
private final S initialStyle;
/**
* Style used by default when no other style is provided.
*/
private final PS initialParagraphStyle;
/**
* Style applicator used by the default skin.
*/
private final BiConsumer<? super TextExt, S> applyStyle;
/**
* Style applicator used by the default skin.
*/
private final BiConsumer<TextFlow, PS> applyParagraphStyle;
/**
* Indicates whether style should be preserved on undo/redo,
* copy/paste and text move.
* TODO: Currently, only undo/redo respect this flag.
*/
private final boolean preserveStyle;
private final Suspendable omniSuspendable;
/* ********************************************************************** *
* *
* Constructors *
* *
* ********************************************************************** */
/**
* Creates a text area with empty text content.
*
* @param initialStyle style to use in places where no other style is
* specified (yet).
* @param applyStyle function that, given a {@link Text} node and
* a style, applies the style to the text node. This function is
* used by the default skin to apply style to text nodes.
* @param initialParagraphStyle style to use in places where no other style is
* specified (yet).
* @param applyParagraphStyle function that, given a {@link TextFlow} node and
* a style, applies the style to the paragraph node. This function is
* used by the default skin to apply style to paragraph nodes.
*/
public StyledTextArea(S initialStyle, BiConsumer<? super TextExt, S> applyStyle, PS initialParagraphStyle, BiConsumer<TextFlow, PS> applyParagraphStyle) {
this(initialStyle, applyStyle, initialParagraphStyle, applyParagraphStyle, true);
}
public <C> StyledTextArea(S initialStyle, BiConsumer<? super TextExt, S> applyStyle,
PS initialParagraphStyle, BiConsumer<TextFlow, PS> applyParagraphStyle,
boolean preserveStyle) {
this.initialStyle = initialStyle;
this.initialParagraphStyle = initialParagraphStyle;
this.applyStyle = applyStyle;
this.applyParagraphStyle = applyParagraphStyle;
this.preserveStyle = preserveStyle;
content = new EditableStyledDocument<>(initialStyle, initialParagraphStyle);
paragraphs = LiveList.suspendable(content.getParagraphs());
text = Val.suspendable(content.textProperty());
length = Val.suspendable(content.lengthProperty());
plainTextChanges = content.plainTextChanges().pausable();
richTextChanges = content.richChanges().pausable();
undoManager = preserveStyle
? createRichUndoManager(UndoManagerFactory.unlimitedHistoryFactory())
: createPlainUndoManager(UndoManagerFactory.unlimitedHistoryFactory());
Val<Position> caretPosition2D = Val.create(
() -> content.offsetToPosition(internalCaretPosition.getValue(), Forward),
internalCaretPosition, paragraphs);
currentParagraph = caretPosition2D.map(Position::getMajor).suspendable();
caretColumn = caretPosition2D.map(Position::getMinor).suspendable();
selectionStart2D = position(0, 0);
selectionEnd2D = position(0, 0);
internalSelection.addListener(obs -> {
IndexRange sel = internalSelection.getValue();
selectionStart2D = offsetToPosition(sel.getStart(), Forward);
selectionEnd2D = sel.getLength() == 0
? selectionStart2D
: selectionStart2D.offsetBy(sel.getLength(), Backward);
});
selectedText = Val.create(
() -> content.getText(internalSelection.getValue()),
internalSelection, content.getParagraphs()).suspendable();
omniSuspendable = Suspendable.combine(
beingUpdated, // must be first, to be the last one to release
text,
length,
caretPosition,
anchor,
selection,
selectedText,
currentParagraph,
caretColumn,
// add streams after properties, to be released before them
plainTextChanges,
richTextChanges,
// paragraphs to be released first
paragraphs);
this.setBackground(new Background(new BackgroundFill(Color.WHITE, CornerRadii.EMPTY, Insets.EMPTY)));
getStyleClass().add("styled-text-area");
// CONSTRUCT THE SKIN
// keeps track of currently used non-empty cells
@SuppressWarnings("unchecked")
ObservableSet<ParagraphBox<S, PS>> nonEmptyCells = FXCollections.observableSet();
// Initialize content
virtualFlow = VirtualFlow.createVertical(
getParagraphs(),
par -> {
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = createCell(
par,
applyStyle,
initialParagraphStyle,
applyParagraphStyle);
nonEmptyCells.add(cell.getNode());
return cell.beforeReset(() -> nonEmptyCells.remove(cell.getNode()))
.afterUpdateItem(p -> nonEmptyCells.add(cell.getNode()));
});
virtualizedScrollPane = new VirtualizedScrollPane<>(virtualFlow);
getChildren().add(virtualizedScrollPane);
// initialize navigator
IntSupplier cellCount = () -> getParagraphs().size();
IntUnaryOperator cellLength = i -> virtualFlow.getCell(i).getNode().getLineCount();
navigator = new TwoLevelNavigator(cellCount, cellLength);
// follow the caret every time the caret position or paragraphs change
EventStream<?> caretPosDirty = invalidationsOf(caretPositionProperty());
EventStream<?> paragraphsDirty = invalidationsOf(getParagraphs());
EventStream<?> selectionDirty = invalidationsOf(selectionProperty());
// need to reposition popup even when caret hasn't moved, but selection has changed (been deselected)
EventStream<?> caretDirty = merge(caretPosDirty, paragraphsDirty, selectionDirty);
caretDirty.subscribe(x -> requestFollowCaret());
// whether or not to animate the caret
BooleanBinding blinkCaret = focusedProperty()
.and(editableProperty())
.and(disabledProperty().not());
// The caret is visible in periodic intervals,
// but only when blinkCaret is true.
caretVisible = EventStreams.valuesOf(blinkCaret)
.flatMap(blink -> blink
? booleanPulse(Duration.ofMillis(500))
: valuesOf(Val.constant(false)))
.toBinding(false);
// Adjust popup anchor by either a user-provided function,
// or user-provided offset, or don't adjust at all.
Val<UnaryOperator<Point2D>> userOffset = Val.map(
popupAnchorOffsetProperty(),
offset -> anchor -> anchor.add(offset));
_popupAnchorAdjustment =
Val.orElse(
popupAnchorAdjustmentProperty(),
userOffset)
.orElseConst(UnaryOperator.identity());
// dispatch MouseOverTextEvents when mouseOverTextDelay is not null
EventStreams.valuesOf(mouseOverTextDelayProperty())
.flatMap(delay -> delay != null
? mouseOverTextEvents(nonEmptyCells, delay)
: EventStreams.never())
.subscribe(evt -> Event.fireEvent(this, evt));
new StyledTextAreaBehavior(this, virtualFlow);
getChildren().add(virtualFlow);
}
/* ********************************************************************** *
* *
* Queries *
* *
* Queries are parameterized observables. *
* *
* ********************************************************************** */
/**
* Returns caret bounds relative to the viewport, i.e. the visual bounds
* of the embedded VirtualFlow.
*/
Optional<Bounds> getCaretBounds() {
return virtualFlow.getCellIfVisible(getCurrentParagraph())
.map(c -> {
Bounds cellBounds = c.getNode().getCaretBounds();
return virtualFlow.cellToViewport(c, cellBounds);
});
}
/**
* Returns x coordinate of the caret in the current paragraph.
*/
ParagraphBox.CaretOffsetX getCaretOffsetX() {
int idx = getCurrentParagraph();
return getCell(idx).getCaretOffsetX();
}
double getViewportHeight() {
return virtualFlow.getHeight();
}
CharacterHit hit(ParagraphBox.CaretOffsetX x, TwoDimensional.Position targetLine) {
int parIdx = targetLine.getMajor();
ParagraphBox<S, PS> cell = virtualFlow.getCell(parIdx).getNode();
CharacterHit parHit = cell.hitTextLine(x, targetLine.getMinor());
return parHit.offset(getParagraphOffset(parIdx));
}
CharacterHit hit(ParagraphBox.CaretOffsetX x, double y) {
VirtualFlowHit<Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> hit = virtualFlow.hit(0.0, y);
if(hit.isBeforeCells()) {
return CharacterHit.insertionAt(0);
} else if(hit.isAfterCells()) {
return CharacterHit.insertionAt(getLength());
} else {
int parIdx = hit.getCellIndex();
int parOffset = getParagraphOffset(parIdx);
ParagraphBox<S, PS> cell = hit.getCell().getNode();
Point2D cellOffset = hit.getCellOffset();
CharacterHit parHit = cell.hitText(x, cellOffset.getY());
return parHit.offset(parOffset);
}
}
CharacterHit hit(double x, double y) {
VirtualFlowHit<Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> hit = virtualFlow.hit(x, y);
if(hit.isBeforeCells()) {
return CharacterHit.insertionAt(0);
} else if(hit.isAfterCells()) {
return CharacterHit.insertionAt(getLength());
} else {
int parIdx = hit.getCellIndex();
int parOffset = getParagraphOffset(parIdx);
ParagraphBox<S, PS> cell = hit.getCell().getNode();
Point2D cellOffset = hit.getCellOffset();
CharacterHit parHit = cell.hit(cellOffset);
return parHit.offset(parOffset);
}
}
/**
* Returns the current line as a two-level index.
* The major number is the paragraph index, the minor
* number is the line number within the paragraph.
*
* <p>This method has a side-effect of bringing the current
* paragraph to the viewport if it is not already visible.
*/
TwoDimensional.Position currentLine() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
int lineIdx = cell.getNode().getCurrentLineIndex();
return _position(parIdx, lineIdx);
}
TwoDimensional.Position _position(int par, int line) {
return navigator.position(par, line);
}
@Override
public final String getText(int start, int end) {
return content.getText(start, end);
}
@Override
public String getText(int paragraph) {
return paragraphs.get(paragraph).toString();
}
public Paragraph<S, PS> getParagraph(int index) {
return paragraphs.get(index);
}
@Override
public StyledDocument<S, PS> subDocument(int start, int end) {
return content.subSequence(start, end);
}
@Override
public StyledDocument<S, PS> subDocument(int paragraphIndex) {
return content.subDocument(paragraphIndex);
}
/**
* Returns the selection range in the given paragraph.
*/
public IndexRange getParagraphSelection(int paragraph) {
int startPar = selectionStart2D.getMajor();
int endPar = selectionEnd2D.getMajor();
if(paragraph < startPar || paragraph > endPar) {
return EMPTY_RANGE;
}
int start = paragraph == startPar ? selectionStart2D.getMinor() : 0;
int end = paragraph == endPar ? selectionEnd2D.getMinor() : paragraphs.get(paragraph).length();
// force selectionProperty() to be valid
getSelection();
return new IndexRange(start, end);
}
/**
* Returns the style of the character with the given index.
* If {@code index} points to a line terminator character,
* the last style used in the paragraph terminated by that
* line terminator is returned.
*/
public S getStyleOfChar(int index) {
return content.getStyleOfChar(index);
}
/**
* Returns the style at the given position. That is the style of the
* character immediately preceding {@code position}, except when
* {@code position} points to a paragraph boundary, in which case it
* is the style at the beginning of the latter paragraph.
*
* <p>In other words, most of the time {@code getStyleAtPosition(p)}
* is equivalent to {@code getStyleOfChar(p-1)}, except when {@code p}
* points to a paragraph boundary, in which case it is equivalent to
* {@code getStyleOfChar(p)}.
*/
public S getStyleAtPosition(int position) {
return content.getStyleAtPosition(position);
}
/**
* Returns the range of homogeneous style that includes the given position.
* If {@code position} points to a boundary between two styled ranges, then
* the range preceding {@code position} is returned. If {@code position}
* points to a boundary between two paragraphs, then the first styled range
* of the latter paragraph is returned.
*/
public IndexRange getStyleRangeAtPosition(int position) {
return content.getStyleRangeAtPosition(position);
}
/**
* Returns the styles in the given character range.
*/
public StyleSpans<S> getStyleSpans(int from, int to) {
return content.getStyleSpans(from, to);
}
/**
* Returns the styles in the given character range.
*/
public StyleSpans<S> getStyleSpans(IndexRange range) {
return getStyleSpans(range.getStart(), range.getEnd());
}
/**
* Returns the style of the character with the given index in the given
* paragraph. If {@code index} is beyond the end of the paragraph, the
* style at the end of line is returned. If {@code index} is negative, it
* is the same as if it was 0.
*/
public S getStyleOfChar(int paragraph, int index) {
return content.getStyleOfChar(paragraph, index);
}
/**
* Returns the style at the given position in the given paragraph.
* This is equivalent to {@code getStyleOfChar(paragraph, position-1)}.
*/
public S getStyleAtPosition(int paragraph, int position) {
return content.getStyleOfChar(paragraph, position);
}
/**
* Returns the range of homogeneous style that includes the given position
* in the given paragraph. If {@code position} points to a boundary between
* two styled ranges, then the range preceding {@code position} is returned.
*/
public IndexRange getStyleRangeAtPosition(int paragraph, int position) {
return content.getStyleRangeAtPosition(paragraph, position);
}
/**
* Returns styles of the whole paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph) {
return content.getStyleSpans(paragraph);
}
/**
* Returns the styles in the given character range of the given paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph, int from, int to) {
return content.getStyleSpans(paragraph, from, to);
}
/**
* Returns the styles in the given character range of the given paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph, IndexRange range) {
return getStyleSpans(paragraph, range.getStart(), range.getEnd());
}
@Override
public Position position(int row, int col) {
return content.position(row, col);
}
@Override
public Position offsetToPosition(int charOffset, Bias bias) {
return content.offsetToPosition(charOffset, bias);
}
/* ********************************************************************** *
* *
* Actions *
* *
* Actions change the state of this control. They typically cause a *
* change of one or more observables and/or produce an event. *
* *
* ********************************************************************** */
void scrollBy(Point2D deltas) {
virtualFlow.scrollXBy(deltas.getX());
virtualFlow.scrollYBy(deltas.getY());
}
void show(double y) {
virtualFlow.show(y);
}
void showCaretAtBottom() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double y = caretBounds.getMaxY();
virtualFlow.showAtOffset(parIdx, getViewportHeight() - y);
}
void showCaretAtTop() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double y = caretBounds.getMinY();
virtualFlow.showAtOffset(parIdx, -y);
}
void requestFollowCaret() {
followCaretRequested = true;
requestLayout();
}
private void followCaret() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double graphicWidth = cell.getNode().getGraphicPrefWidth();
Bounds region = extendLeft(caretBounds, graphicWidth);
virtualFlow.show(parIdx, region);
}
/**
* Sets style for the given character range.
*/
public void setStyle(int from, int to, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(from, to, style);
}
}
/**
* Sets style for the whole paragraph.
*/
public void setStyle(int paragraph, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(paragraph, style);
}
}
/**
* Sets style for the given range relative in the given paragraph.
*/
public void setStyle(int paragraph, int from, int to, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(paragraph, from, to, style);
}
}
/**
* Set multiple style ranges at once. This is equivalent to
* <pre>
* for(StyleSpan{@code <S>} span: styleSpans) {
* setStyle(from, from + span.getLength(), span.getStyle());
* from += span.getLength();
* }
* </pre>
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int from, StyleSpans<? extends S> styleSpans) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyleSpans(from, styleSpans);
}
}
/**
* Set multiple style ranges of a paragraph at once. This is equivalent to
* <pre>
* for(StyleSpan{@code <S>} span: styleSpans) {
* setStyle(paragraph, from, from + span.getLength(), span.getStyle());
* from += span.getLength();
* }
* </pre>
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int paragraph, int from, StyleSpans<? extends S> styleSpans) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyleSpans(paragraph, from, styleSpans);
}
}
/**
* Sets style for the whole paragraph.
*/
public void setParagraphStyle(int paragraph, PS paragraphStyle) {
try(Guard g = omniSuspendable.suspend()) {
content.setParagraphStyle(paragraph, paragraphStyle);
}
}
/**
* Resets the style of the given range to the initial style.
*/
public void clearStyle(int from, int to) {
setStyle(from, to, initialStyle);
}
/**
* Resets the style of the given paragraph to the initial style.
*/
public void clearStyle(int paragraph) {
setStyle(paragraph, initialStyle);
}
/**
* Resets the style of the given range in the given paragraph
* to the initial style.
*/
public void clearStyle(int paragraph, int from, int to) {
setStyle(paragraph, from, to, initialStyle);
}
/**
* Resets the style of the given paragraph to the initial style.
*/
public void clearParagraphStyle(int paragraph) {
setParagraphStyle(paragraph, initialParagraphStyle);
}
@Override
public void replaceText(int start, int end, String text) {
StyledDocument<S, PS> doc = ReadOnlyStyledDocument.fromString(
text, content.getStyleForInsertionAt(start), content.getParagraphStyleForInsertionAt(start));
replace(start, end, doc);
}
@Override
public void replace(int start, int end, StyledDocument<S, PS> replacement) {
try(Guard g = omniSuspendable.suspend()) {
start = clamp(0, start, getLength());
end = clamp(0, end, getLength());
content.replace(start, end, replacement);
int newCaretPos = start + replacement.length();
selectRange(newCaretPos, newCaretPos);
}
}
@Override
public void selectRange(int anchor, int caretPosition) {
try(Guard g = suspend(
this.caretPosition, currentParagraph,
caretColumn, this.anchor,
selection, selectedText)) {
this.internalCaretPosition.setValue(clamp(0, caretPosition, getLength()));
this.anchor.setValue(clamp(0, anchor, getLength()));
this.internalSelection.setValue(IndexRange.normalize(getAnchor(), getCaretPosition()));
}
}
@Override
public void positionCaret(int pos) {
try(Guard g = suspend(caretPosition, currentParagraph, caretColumn)) {
internalCaretPosition.setValue(pos);
}
}
/* ********************************************************************** *
* *
* Layout *
* *
* ********************************************************************** */
@Override
protected void layoutChildren() {
virtualizedScrollPane.resize(getWidth(), getHeight());
if(followCaretRequested) {
followCaretRequested = false;
followCaret();
}
// position popup
PopupWindow popup = getPopupWindow();
PopupAlignment alignment = getPopupAlignment();
UnaryOperator<Point2D> adjustment = _popupAnchorAdjustment.getValue();
if(popup != null) {
positionPopup(popup, alignment, adjustment);
}
}
/* ********************************************************************** *
* *
* Private methods *
* *
* ********************************************************************** */
private Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> createCell(
Paragraph<S, PS> paragraph,
BiConsumer<? super TextExt, S> applyStyle,
PS initialParagraphStyle,
BiConsumer<TextFlow, PS> applyParagraphStyle) {
ParagraphBox<S, PS> box = new ParagraphBox<>(paragraph, applyStyle, applyParagraphStyle);
box.highlightFillProperty().bind(highlightFill);
box.highlightTextFillProperty().bind(highlightTextFill);
box.wrapTextProperty().bind(wrapTextProperty());
box.graphicFactoryProperty().bind(paragraphGraphicFactoryProperty());
box.graphicOffset.bind(virtualFlow.breadthOffsetProperty());
Val<Boolean> hasCaret = Val.combine(
box.indexProperty(),
currentParagraphProperty(),
(bi, cp) -> bi.intValue() == cp.intValue());
// caret is visible only in the paragraph with the caret
Val<Boolean> cellCaretVisible = Val.combine(hasCaret, caretVisible, (a, b) -> a && b);
box.caretVisibleProperty().bind(cellCaretVisible);
// bind cell's caret position to area's caret column,
// when the cell is the one with the caret
box.caretPositionProperty().bind(hasCaret.flatMap(has -> has
? caretColumnProperty()
: Val.constant(0)));
// keep paragraph selection updated
ObjectBinding<IndexRange> cellSelection = Bindings.createObjectBinding(() -> {
int idx = box.getIndex();
return idx != -1
? getParagraphSelection(idx)
: StyledTextArea.EMPTY_RANGE;
}, selectionProperty(), box.indexProperty());
box.selectionProperty().bind(cellSelection);
return new Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>() {
@Override
public ParagraphBox<S, PS> getNode() {
return box;
}
@Override
public void updateIndex(int index) {
box.setIndex(index);
}
@Override
public void dispose() {
box.highlightFillProperty().unbind();
box.highlightTextFillProperty().unbind();
box.wrapTextProperty().unbind();
box.graphicFactoryProperty().unbind();
box.graphicOffset.unbind();
box.caretVisibleProperty().unbind();
box.caretPositionProperty().unbind();
box.selectionProperty().unbind();
cellSelection.dispose();
}
};
}
private ParagraphBox<S, PS> getCell(int index) {
return virtualFlow.getCell(index).getNode();
}
private EventStream<MouseOverTextEvent> mouseOverTextEvents(ObservableSet<ParagraphBox<S, PS>> cells, Duration delay) {
return merge(cells, c -> c.stationaryIndices(delay).map(e -> e.unify(
l -> l.map((pos, charIdx) -> MouseOverTextEvent.beginAt(c.localToScreen(pos), getParagraphOffset(c.getIndex()) + charIdx)),
r -> MouseOverTextEvent.end())));
}
private int getParagraphOffset(int parIdx) {
return position(parIdx, 0).toOffset();
}
private void positionPopup(
PopupWindow popup,
PopupAlignment alignment,
UnaryOperator<Point2D> adjustment) {
Optional<Bounds> bounds = null;
switch(alignment.getAnchorObject()) {
case CARET: bounds = getCaretBoundsOnScreen(); break;
case SELECTION: bounds = getSelectionBoundsOnScreen(); break;
}
bounds.ifPresent(b -> {
double x = 0, y = 0;
switch(alignment.getHorizontalAlignment()) {
case LEFT: x = b.getMinX(); break;
case H_CENTER: x = (b.getMinX() + b.getMaxX()) / 2; break;
case RIGHT: x = b.getMaxX(); break;
}
switch(alignment.getVerticalAlignment()) {
case TOP: y = b.getMinY();
case V_CENTER: y = (b.getMinY() + b.getMaxY()) / 2; break;
case BOTTOM: y = b.getMaxY(); break;
}
Point2D anchor = adjustment.apply(new Point2D(x, y));
popup.setAnchorX(anchor.getX());
popup.setAnchorY(anchor.getY());
});
}
private Optional<Bounds> getCaretBoundsOnScreen() {
return virtualFlow.getCellIfVisible(getCurrentParagraph())
.map(c -> c.getNode().getCaretBoundsOnScreen());
}
private Optional<Bounds> getSelectionBoundsOnScreen() {
IndexRange selection = getSelection();
if(selection.getLength() == 0) {
return getCaretBoundsOnScreen();
}
Bounds[] bounds = virtualFlow.visibleCells().stream()
.map(c -> c.getNode().getSelectionBoundsOnScreen())
.filter(Optional::isPresent)
.map(Optional::get)
.toArray(Bounds[]::new);
if(bounds.length == 0) {
return Optional.empty();
}
double minX = Stream.of(bounds).mapToDouble(Bounds::getMinX).min().getAsDouble();
double maxX = Stream.of(bounds).mapToDouble(Bounds::getMaxX).max().getAsDouble();
double minY = Stream.of(bounds).mapToDouble(Bounds::getMinY).min().getAsDouble();
double maxY = Stream.of(bounds).mapToDouble(Bounds::getMaxY).max().getAsDouble();
return Optional.of(new BoundingBox(minX, minY, maxX-minX, maxY-minY));
}
private static Bounds extendLeft(Bounds b, double w) {
if(w == 0) {
return b;
} else {
return new BoundingBox(
b.getMinX() - w, b.getMinY(),
b.getWidth() + w, b.getHeight());
}
}
private static EventStream<Boolean> booleanPulse(Duration duration) {
return EventStreams.ticks(duration).accumulate(true, (b, x) -> !b);
}
private UndoManager createPlainUndoManager(UndoManagerFactory factory) {
Consumer<PlainTextChange> apply = change -> replaceText(change.getPosition(), change.getPosition() + change.getRemoved().length(), change.getInserted());
BiFunction<PlainTextChange, PlainTextChange, Optional<PlainTextChange>> merge = (change1, change2) -> change1.mergeWith(change2);
return factory.create(plainTextChanges(), PlainTextChange::invert, apply, merge);
}
private UndoManager createRichUndoManager(UndoManagerFactory factory) {
Consumer<RichTextChange<S, PS>> apply = change -> replace(change.getPosition(), change.getPosition() + change.getRemoved().length(), change.getInserted());
BiFunction<RichTextChange<S, PS>, RichTextChange<S, PS>, Optional<RichTextChange<S, PS>>> merge = (change1, change2) -> change1.mergeWith(change2);
return factory.create(richChanges(), RichTextChange::invert, apply, merge);
}
private Guard suspend(Suspendable... suspendables) {
return Suspendable.combine(beingUpdated, Suspendable.combine(suspendables)).suspend();
}
}
|
richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
|
package org.fxmisc.richtext;
import static org.fxmisc.richtext.PopupAlignment.*;
import static org.fxmisc.richtext.TwoDimensional.Bias.*;
import static org.reactfx.EventStreams.invalidationsOf;
import static org.reactfx.EventStreams.merge;
import static org.reactfx.EventStreams.valuesOf;
import static org.reactfx.util.Tuples.*;
import java.time.Duration;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.IntFunction;
import java.util.function.IntSupplier;
import java.util.function.IntUnaryOperator;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import javafx.beans.binding.Binding;
import javafx.beans.binding.Bindings;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.binding.ObjectBinding;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.value.ObservableBooleanValue;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableSet;
import javafx.css.StyleableObjectProperty;
import javafx.event.Event;
import javafx.geometry.BoundingBox;
import javafx.geometry.Bounds;
import javafx.geometry.Insets;
import javafx.geometry.Point2D;
import javafx.scene.Node;
import javafx.scene.control.IndexRange;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.paint.Paint;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.scene.text.TextFlow;
import javafx.stage.PopupWindow;
import org.fxmisc.flowless.Cell;
import org.fxmisc.flowless.VirtualFlow;
import org.fxmisc.flowless.VirtualFlowHit;
import org.fxmisc.flowless.Virtualized;
import org.fxmisc.flowless.VirtualizedScrollPane;
import org.fxmisc.richtext.CssProperties.EditableProperty;
import org.fxmisc.richtext.CssProperties.FontProperty;
import org.fxmisc.undo.UndoManager;
import org.fxmisc.undo.UndoManagerFactory;
import org.reactfx.EventStream;
import org.reactfx.EventStreams;
import org.reactfx.Guard;
import org.reactfx.Subscription;
import org.reactfx.Suspendable;
import org.reactfx.SuspendableEventStream;
import org.reactfx.SuspendableNo;
import org.reactfx.collection.LiveList;
import org.reactfx.collection.SuspendableList;
import org.reactfx.util.Tuple2;
import org.reactfx.value.SuspendableVal;
import org.reactfx.value.SuspendableVar;
import org.reactfx.value.Val;
import org.reactfx.value.Var;
/**
* Text editing control. Accepts user input (keyboard, mouse) and
* provides API to assign style to text ranges. It is suitable for
* syntax highlighting and rich-text editors.
*
* <p>Subclassing is allowed to define the type of style, e.g. inline
* style or style classes.</p>
*
* <p>Note: Scroll bars no longer appear when the content spans outside
* of the viewport. To add scroll bars, the area needs to be embedded in
* a {@link VirtualizedScrollPane}. {@link AreaFactory} is provided to make
* this more convenient.</p>
*
* <h3>Overriding keyboard shortcuts</h3>
*
* {@code StyledTextArea} comes with {@link #onKeyTypedProperty()} and
* {@link #onKeyPressedProperty()} handlers installed to handle keyboard input.
* Ordinary character input is handled by the {@code onKeyTyped} handler and
* control key combinations (including Enter and Tab) are handled by the
* {@code onKeyPressed} handler. To add or override some keyboard shortcuts,
* but keep the rest in place, you would combine the default event handler with
* a new one that adds or overrides some of the default key combinations. This
* is how to bind {@code Ctrl+S} to the {@code save()} operation:
* <pre>
* {@code
* import static javafx.scene.input.KeyCode.*;
* import static javafx.scene.input.KeyCombination.*;
* import static org.fxmisc.wellbehaved.event.EventPattern.*;
*
* import org.fxmisc.wellbehaved.event.EventHandlerHelper;
*
* EventHandler<? super KeyEvent> ctrlS = EventHandlerHelper
* .on(keyPressed(S, CONTROL_DOWN)).act(event -> save())
* .create();
*
* EventHandlerHelper.install(area.onKeyPressedProperty(), ctrlS);
* }
* </pre>
*
* @param <S> type of style that can be applied to text.
*/
public class StyledTextArea<S, PS> extends Region
implements
TextEditingArea<S, PS>,
EditActions<S, PS>,
ClipboardActions<S, PS>,
NavigationActions<S, PS>,
UndoActions<S>,
TwoDimensional,
Virtualized {
/**
* Index range [0, 0).
*/
public static final IndexRange EMPTY_RANGE = new IndexRange(0, 0);
/**
* Private helper method.
*/
private static int clamp(int min, int val, int max) {
return val < min ? min
: val > max ? max
: val;
}
/* ********************************************************************** *
* *
* Properties *
* *
* Properties affect behavior and/or appearance of this control. *
* *
* They are readable and writable by the client code and never change by *
* other means, i.e. they contain either the default value or the value *
* set by the client code. *
* *
* ********************************************************************** */
/**
* Background fill for highlighted text.
*/
private final StyleableObjectProperty<Paint> highlightFill
= new CssProperties.HighlightFillProperty(this, Color.DODGERBLUE);
/**
* Text color for highlighted text.
*/
private final StyleableObjectProperty<Paint> highlightTextFill
= new CssProperties.HighlightTextFillProperty(this, Color.WHITE);
// editable property
private final BooleanProperty editable = new EditableProperty<>(this);
@Override public final boolean isEditable() { return editable.get(); }
@Override public final void setEditable(boolean value) { editable.set(value); }
@Override public final BooleanProperty editableProperty() { return editable; }
// wrapText property
private final BooleanProperty wrapText = new SimpleBooleanProperty(this, "wrapText");
@Override public final boolean isWrapText() { return wrapText.get(); }
@Override public final void setWrapText(boolean value) { wrapText.set(value); }
@Override public final BooleanProperty wrapTextProperty() { return wrapText; }
// undo manager
private UndoManager undoManager;
@Override
public UndoManager getUndoManager() { return undoManager; }
@Override
public void setUndoManager(UndoManagerFactory undoManagerFactory) {
undoManager.close();
undoManager = preserveStyle
? createRichUndoManager(undoManagerFactory)
: createPlainUndoManager(undoManagerFactory);
}
// font property
/**
* The default font to use where font is not specified otherwise.
*/
private final StyleableObjectProperty<Font> font = new FontProperty<>(this);
public final StyleableObjectProperty<Font> fontProperty() { return font; }
public final void setFont(Font value) { font.setValue(value); }
public final Font getFont() { return font.getValue(); }
/**
* Popup window that will be positioned by this text area relative to the
* caret or selection. Use {@link #popupAlignmentProperty()} to specify
* how the popup should be positioned relative to the caret or selection.
* Use {@link #popupAnchorOffsetProperty()} or
* {@link #popupAnchorAdjustmentProperty()} to further adjust the position.
*/
private final ObjectProperty<PopupWindow> popupWindow = new SimpleObjectProperty<>();
public void setPopupWindow(PopupWindow popup) { popupWindow.set(popup); }
public PopupWindow getPopupWindow() { return popupWindow.get(); }
public ObjectProperty<PopupWindow> popupWindowProperty() { return popupWindow; }
/** @deprecated Use {@link #setPopupWindow(PopupWindow)}. */
@Deprecated
public void setPopupAtCaret(PopupWindow popup) { popupWindow.set(popup); }
/** @deprecated Use {@link #getPopupWindow()}. */
@Deprecated
public PopupWindow getPopupAtCaret() { return popupWindow.get(); }
/** @deprecated Use {@link #popupWindowProperty()}. */
@Deprecated
public ObjectProperty<PopupWindow> popupAtCaretProperty() { return popupWindow; }
/**
* Specifies further offset (in pixels) of the popup window from the
* position specified by {@link #popupAlignmentProperty()}.
*
* <p>If {@link #popupAnchorAdjustmentProperty()} is also specified, then
* it overrides the offset set by this property.
*/
private final ObjectProperty<Point2D> popupAnchorOffset = new SimpleObjectProperty<>();
public void setPopupAnchorOffset(Point2D offset) { popupAnchorOffset.set(offset); }
public Point2D getPopupAnchorOffset() { return popupAnchorOffset.get(); }
public ObjectProperty<Point2D> popupAnchorOffsetProperty() { return popupAnchorOffset; }
/**
* Specifies how to adjust the popup window's anchor point. The given
* operator is invoked with the screen position calculated according to
* {@link #popupAlignmentProperty()} and should return a new screen
* position. This position will be used as the popup window's anchor point.
*
* <p>Setting this property overrides {@link #popupAnchorOffsetProperty()}.
*/
private final ObjectProperty<UnaryOperator<Point2D>> popupAnchorAdjustment = new SimpleObjectProperty<>();
public void setPopupAnchorAdjustment(UnaryOperator<Point2D> f) { popupAnchorAdjustment.set(f); }
public UnaryOperator<Point2D> getPopupAnchorAdjustment() { return popupAnchorAdjustment.get(); }
public ObjectProperty<UnaryOperator<Point2D>> popupAnchorAdjustmentProperty() { return popupAnchorAdjustment; }
/**
* Defines where the popup window given in {@link #popupWindowProperty()}
* is anchored, i.e. where its anchor point is positioned. This position
* can further be adjusted by {@link #popupAnchorOffsetProperty()} or
* {@link #popupAnchorAdjustmentProperty()}.
*/
private final ObjectProperty<PopupAlignment> popupAlignment = new SimpleObjectProperty<>(CARET_TOP);
public void setPopupAlignment(PopupAlignment pos) { popupAlignment.set(pos); }
public PopupAlignment getPopupAlignment() { return popupAlignment.get(); }
public ObjectProperty<PopupAlignment> popupAlignmentProperty() { return popupAlignment; }
/**
* Defines how long the mouse has to stay still over the text before a
* {@link MouseOverTextEvent} of type {@code MOUSE_OVER_TEXT_BEGIN} is
* fired on this text area. When set to {@code null}, no
* {@code MouseOverTextEvent}s are fired on this text area.
*
* <p>Default value is {@code null}.
*/
private final ObjectProperty<Duration> mouseOverTextDelay = new SimpleObjectProperty<>(null);
public void setMouseOverTextDelay(Duration delay) { mouseOverTextDelay.set(delay); }
public Duration getMouseOverTextDelay() { return mouseOverTextDelay.get(); }
public ObjectProperty<Duration> mouseOverTextDelayProperty() { return mouseOverTextDelay; }
private final ObjectProperty<IntFunction<? extends Node>> paragraphGraphicFactory = new SimpleObjectProperty<>(null);
public void setParagraphGraphicFactory(IntFunction<? extends Node> factory) { paragraphGraphicFactory.set(factory); }
public IntFunction<? extends Node> getParagraphGraphicFactory() { return paragraphGraphicFactory.get(); }
public ObjectProperty<IntFunction<? extends Node>> paragraphGraphicFactoryProperty() { return paragraphGraphicFactory; }
/**
* Indicates whether the initial style should also be used for plain text
* inserted into this text area. When {@code false}, the style immediately
* preceding the insertion position is used. Default value is {@code false}.
*/
public BooleanProperty useInitialStyleForInsertionProperty() { return content.useInitialStyleForInsertion; }
public void setUseInitialStyleForInsertion(boolean value) { content.useInitialStyleForInsertion.set(value); }
public boolean getUseInitialStyleForInsertion() { return content.useInitialStyleForInsertion.get(); }
private Optional<Tuple2<Codec<S>, Codec<PS>>> styleCodecs = Optional.empty();
/**
* Sets codecs to encode/decode style information to/from binary format.
* Providing codecs enables clipboard actions to retain the style information.
*/
public void setStyleCodecs(Codec<S> textStyleCodec, Codec<PS> paragraphStyleCodec) {
styleCodecs = Optional.of(t(textStyleCodec, paragraphStyleCodec));
}
@Override
public Optional<Tuple2<Codec<S>, Codec<PS>>> getStyleCodecs() {
return styleCodecs;
}
/**
* The <em>estimated</em> scrollX value. This can be set in order to scroll the content.
* Value is only accurate when area does not wrap lines and uses the same font size
* throughout the entire area.
*/
public Var<Double> estimatedScrollXProperty() { return virtualFlow.estimatedScrollXProperty(); }
public double getEstimatedScrollX() { return virtualFlow.estimatedScrollXProperty().getValue(); }
public void setEstimatedScrollX(double value) { virtualFlow.estimatedScrollXProperty().setValue(value); }
/**
* The <em>estimated</em> scrollY value. This can be set in order to scroll the content.
* Value is only accurate when area does not wrap lines and uses the same font size
* throughout the entire area.
*/
public Var<Double> estimatedScrollYProperty() { return virtualFlow.estimatedScrollYProperty(); }
public double getEstimatedScrollY() { return virtualFlow.estimatedScrollYProperty().getValue(); }
public void setEstimatedScrollY(double value) { virtualFlow.estimatedScrollYProperty().setValue(value); }
/* ********************************************************************** *
* *
* Observables *
* *
* Observables are "dynamic" (i.e. changing) characteristics of this *
* control. They are not directly settable by the client code, but change *
* in response to user input and/or API actions. *
* *
* ********************************************************************** */
// text
private final SuspendableVal<String> text;
@Override public final String getText() { return text.getValue(); }
@Override public final ObservableValue<String> textProperty() { return text; }
// rich text
@Override public final StyledDocument<S, PS> getDocument() { return content.snapshot(); };
// length
private final SuspendableVal<Integer> length;
@Override public final int getLength() { return length.getValue(); }
@Override public final ObservableValue<Integer> lengthProperty() { return length; }
// caret position
private final Var<Integer> internalCaretPosition = Var.newSimpleVar(0);
private final SuspendableVal<Integer> caretPosition = internalCaretPosition.suspendable();
@Override public final int getCaretPosition() { return caretPosition.getValue(); }
@Override public final ObservableValue<Integer> caretPositionProperty() { return caretPosition; }
// selection anchor
private final SuspendableVar<Integer> anchor = Var.newSimpleVar(0).suspendable();
@Override public final int getAnchor() { return anchor.getValue(); }
@Override public final ObservableValue<Integer> anchorProperty() { return anchor; }
// selection
private final Var<IndexRange> internalSelection = Var.newSimpleVar(EMPTY_RANGE);
private final SuspendableVal<IndexRange> selection = internalSelection.suspendable();
@Override public final IndexRange getSelection() { return selection.getValue(); }
@Override public final ObservableValue<IndexRange> selectionProperty() { return selection; }
// selected text
private final SuspendableVal<String> selectedText;
@Override public final String getSelectedText() { return selectedText.getValue(); }
@Override public final ObservableValue<String> selectedTextProperty() { return selectedText; }
// current paragraph index
private final SuspendableVal<Integer> currentParagraph;
@Override public final int getCurrentParagraph() { return currentParagraph.getValue(); }
@Override public final ObservableValue<Integer> currentParagraphProperty() { return currentParagraph; }
// caret column
private final SuspendableVal<Integer> caretColumn;
@Override public final int getCaretColumn() { return caretColumn.getValue(); }
@Override public final ObservableValue<Integer> caretColumnProperty() { return caretColumn; }
// paragraphs
private final SuspendableList<Paragraph<S, PS>> paragraphs;
@Override public ObservableList<Paragraph<S, PS>> getParagraphs() {
return paragraphs;
}
// beingUpdated
private final SuspendableNo beingUpdated = new SuspendableNo();
public ObservableBooleanValue beingUpdatedProperty() { return beingUpdated; }
public boolean isBeingUpdated() { return beingUpdated.get(); }
// total width estimate
/**
* The <em>estimated</em> width of the entire document. Accurate when area does not wrap lines and
* uses the same font size throughout the entire area. Value is only supposed to be <em>set</em> by
* the skin, not the user.
*/
public Val<Double> totalWidthEstimateProperty() { return virtualFlow.totalWidthEstimateProperty(); }
public double getTotalWidthEstimate() { return virtualFlow.totalWidthEstimateProperty().getValue(); }
// total height estimate
/**
* The <em>estimated</em> height of the entire document. Accurate when area does not wrap lines and
* uses the same font size throughout the entire area. Value is only supposed to be <em>set</em> by
* the skin, not the user.
*/
public Val<Double> totalHeightEstimateProperty() { return virtualFlow.totalHeightEstimateProperty(); }
public double getTotalHeightEstimate() { return virtualFlow.totalHeightEstimateProperty().getValue(); }
/* ********************************************************************** *
* *
* Event streams *
* *
* ********************************************************************** */
// text changes
private final SuspendableEventStream<PlainTextChange> plainTextChanges;
@Override
public final EventStream<PlainTextChange> plainTextChanges() { return plainTextChanges; }
// rich text changes
private final SuspendableEventStream<RichTextChange<S, PS>> richTextChanges;
@Override
public final EventStream<RichTextChange<S, PS>> richChanges() { return richTextChanges; }
/* ********************************************************************** *
* *
* Private fields *
* *
* ********************************************************************** */
private final StyledTextAreaBehavior behavior;
private Subscription subscriptions = () -> {};
private final Binding<Boolean> caretVisible;
// TODO: this is initialized but never used. Should it be removed?
private final Val<UnaryOperator<Point2D>> _popupAnchorAdjustment;
private final VirtualFlow<Paragraph<S, PS>, Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> virtualFlow;
private final VirtualizedScrollPane<VirtualFlow> virtualizedScrollPane;
// used for two-level navigation, where on the higher level are
// paragraphs and on the lower level are lines within a paragraph
private final TwoLevelNavigator navigator;
private boolean followCaretRequested = false;
private Position selectionStart2D;
private Position selectionEnd2D;
/**
* content model
*/
private final EditableStyledDocument<S, PS> content;
/**
* Style used by default when no other style is provided.
*/
private final S initialStyle;
/**
* Style used by default when no other style is provided.
*/
private final PS initialParagraphStyle;
/**
* Style applicator used by the default skin.
*/
private final BiConsumer<? super TextExt, S> applyStyle;
/**
* Style applicator used by the default skin.
*/
private final BiConsumer<TextFlow, PS> applyParagraphStyle;
/**
* Indicates whether style should be preserved on undo/redo,
* copy/paste and text move.
* TODO: Currently, only undo/redo respect this flag.
*/
private final boolean preserveStyle;
private final Suspendable omniSuspendable;
/* ********************************************************************** *
* *
* Constructors *
* *
* ********************************************************************** */
/**
* Creates a text area with empty text content.
*
* @param initialStyle style to use in places where no other style is
* specified (yet).
* @param applyStyle function that, given a {@link Text} node and
* a style, applies the style to the text node. This function is
* used by the default skin to apply style to text nodes.
* @param initialParagraphStyle style to use in places where no other style is
* specified (yet).
* @param applyParagraphStyle function that, given a {@link TextFlow} node and
* a style, applies the style to the paragraph node. This function is
* used by the default skin to apply style to paragraph nodes.
*/
public StyledTextArea(S initialStyle, BiConsumer<? super TextExt, S> applyStyle, PS initialParagraphStyle, BiConsumer<TextFlow, PS> applyParagraphStyle) {
this(initialStyle, applyStyle, initialParagraphStyle, applyParagraphStyle, true);
}
public <C> StyledTextArea(S initialStyle, BiConsumer<? super TextExt, S> applyStyle,
PS initialParagraphStyle, BiConsumer<TextFlow, PS> applyParagraphStyle,
boolean preserveStyle) {
this.initialStyle = initialStyle;
this.initialParagraphStyle = initialParagraphStyle;
this.applyStyle = applyStyle;
this.applyParagraphStyle = applyParagraphStyle;
this.preserveStyle = preserveStyle;
content = new EditableStyledDocument<>(initialStyle, initialParagraphStyle);
paragraphs = LiveList.suspendable(content.getParagraphs());
text = Val.suspendable(content.textProperty());
length = Val.suspendable(content.lengthProperty());
plainTextChanges = content.plainTextChanges().pausable();
richTextChanges = content.richChanges().pausable();
undoManager = preserveStyle
? createRichUndoManager(UndoManagerFactory.unlimitedHistoryFactory())
: createPlainUndoManager(UndoManagerFactory.unlimitedHistoryFactory());
Val<Position> caretPosition2D = Val.create(
() -> content.offsetToPosition(internalCaretPosition.getValue(), Forward),
internalCaretPosition, paragraphs);
currentParagraph = caretPosition2D.map(Position::getMajor).suspendable();
caretColumn = caretPosition2D.map(Position::getMinor).suspendable();
selectionStart2D = position(0, 0);
selectionEnd2D = position(0, 0);
internalSelection.addListener(obs -> {
IndexRange sel = internalSelection.getValue();
selectionStart2D = offsetToPosition(sel.getStart(), Forward);
selectionEnd2D = sel.getLength() == 0
? selectionStart2D
: selectionStart2D.offsetBy(sel.getLength(), Backward);
});
selectedText = Val.create(
() -> content.getText(internalSelection.getValue()),
internalSelection, content.getParagraphs()).suspendable();
omniSuspendable = Suspendable.combine(
beingUpdated, // must be first, to be the last one to release
text,
length,
caretPosition,
anchor,
selection,
selectedText,
currentParagraph,
caretColumn,
// add streams after properties, to be released before them
plainTextChanges,
richTextChanges,
// paragraphs to be released first
paragraphs);
this.setBackground(new Background(new BackgroundFill(Color.WHITE, CornerRadii.EMPTY, Insets.EMPTY)));
getStyleClass().add("styled-text-area");
// CONSTRUCT THE SKIN
// keeps track of currently used non-empty cells
@SuppressWarnings("unchecked")
ObservableSet<ParagraphBox<S, PS>> nonEmptyCells = FXCollections.observableSet();
// Initialize content
virtualFlow = VirtualFlow.createVertical(
getParagraphs(),
par -> {
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = createCell(
par,
applyStyle,
initialParagraphStyle,
applyParagraphStyle);
nonEmptyCells.add(cell.getNode());
return cell.beforeReset(() -> nonEmptyCells.remove(cell.getNode()))
.afterUpdateItem(p -> nonEmptyCells.add(cell.getNode()));
});
virtualizedScrollPane = new VirtualizedScrollPane<>(virtualFlow);
getChildren().add(virtualizedScrollPane);
// initialize navigator
IntSupplier cellCount = () -> getParagraphs().size();
IntUnaryOperator cellLength = i -> virtualFlow.getCell(i).getNode().getLineCount();
navigator = new TwoLevelNavigator(cellCount, cellLength);
// follow the caret every time the caret position or paragraphs change
EventStream<?> caretPosDirty = invalidationsOf(caretPositionProperty());
EventStream<?> paragraphsDirty = invalidationsOf(getParagraphs());
EventStream<?> selectionDirty = invalidationsOf(selectionProperty());
// need to reposition popup even when caret hasn't moved, but selection has changed (been deselected)
EventStream<?> caretDirty = merge(caretPosDirty, paragraphsDirty, selectionDirty);
subscribeTo(caretDirty, x -> requestFollowCaret());
// whether or not to animate the caret
BooleanBinding blinkCaret = focusedProperty()
.and(editableProperty())
.and(disabledProperty().not());
manageBinding(blinkCaret);
// The caret is visible in periodic intervals,
// but only when blinkCaret is true.
caretVisible = EventStreams.valuesOf(blinkCaret)
.flatMap(blink -> blink
? booleanPulse(Duration.ofMillis(500))
: valuesOf(Val.constant(false)))
.toBinding(false);
manageBinding(caretVisible);
// Adjust popup anchor by either a user-provided function,
// or user-provided offset, or don't adjust at all.
Val<UnaryOperator<Point2D>> userOffset = Val.map(
popupAnchorOffsetProperty(),
offset -> anchor -> anchor.add(offset));
_popupAnchorAdjustment =
Val.orElse(
popupAnchorAdjustmentProperty(),
userOffset)
.orElseConst(UnaryOperator.identity());
// dispatch MouseOverTextEvents when mouseOverTextDelay is not null
EventStreams.valuesOf(mouseOverTextDelayProperty())
.flatMap(delay -> delay != null
? mouseOverTextEvents(nonEmptyCells, delay)
: EventStreams.never())
.subscribe(evt -> Event.fireEvent(this, evt));
behavior = new StyledTextAreaBehavior(this, virtualFlow);
getChildren().add(virtualFlow);
}
/* ********************************************************************** *
* *
* Queries *
* *
* Queries are parameterized observables. *
* *
* ********************************************************************** */
/**
* Returns caret bounds relative to the viewport, i.e. the visual bounds
* of the embedded VirtualFlow.
*/
Optional<Bounds> getCaretBounds() {
return virtualFlow.getCellIfVisible(getCurrentParagraph())
.map(c -> {
Bounds cellBounds = c.getNode().getCaretBounds();
return virtualFlow.cellToViewport(c, cellBounds);
});
}
/**
* Returns x coordinate of the caret in the current paragraph.
*/
ParagraphBox.CaretOffsetX getCaretOffsetX() {
int idx = getCurrentParagraph();
return getCell(idx).getCaretOffsetX();
}
double getViewportHeight() {
return virtualFlow.getHeight();
}
CharacterHit hit(ParagraphBox.CaretOffsetX x, TwoDimensional.Position targetLine) {
int parIdx = targetLine.getMajor();
ParagraphBox<S, PS> cell = virtualFlow.getCell(parIdx).getNode();
CharacterHit parHit = cell.hitTextLine(x, targetLine.getMinor());
return parHit.offset(getParagraphOffset(parIdx));
}
CharacterHit hit(ParagraphBox.CaretOffsetX x, double y) {
VirtualFlowHit<Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> hit = virtualFlow.hit(0.0, y);
if(hit.isBeforeCells()) {
return CharacterHit.insertionAt(0);
} else if(hit.isAfterCells()) {
return CharacterHit.insertionAt(getLength());
} else {
int parIdx = hit.getCellIndex();
int parOffset = getParagraphOffset(parIdx);
ParagraphBox<S, PS> cell = hit.getCell().getNode();
Point2D cellOffset = hit.getCellOffset();
CharacterHit parHit = cell.hitText(x, cellOffset.getY());
return parHit.offset(parOffset);
}
}
CharacterHit hit(double x, double y) {
VirtualFlowHit<Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>> hit = virtualFlow.hit(x, y);
if(hit.isBeforeCells()) {
return CharacterHit.insertionAt(0);
} else if(hit.isAfterCells()) {
return CharacterHit.insertionAt(getLength());
} else {
int parIdx = hit.getCellIndex();
int parOffset = getParagraphOffset(parIdx);
ParagraphBox<S, PS> cell = hit.getCell().getNode();
Point2D cellOffset = hit.getCellOffset();
CharacterHit parHit = cell.hit(cellOffset);
return parHit.offset(parOffset);
}
}
/**
* Returns the current line as a two-level index.
* The major number is the paragraph index, the minor
* number is the line number within the paragraph.
*
* <p>This method has a side-effect of bringing the current
* paragraph to the viewport if it is not already visible.
*/
TwoDimensional.Position currentLine() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
int lineIdx = cell.getNode().getCurrentLineIndex();
return _position(parIdx, lineIdx);
}
TwoDimensional.Position _position(int par, int line) {
return navigator.position(par, line);
}
@Override
public final String getText(int start, int end) {
return content.getText(start, end);
}
@Override
public String getText(int paragraph) {
return paragraphs.get(paragraph).toString();
}
public Paragraph<S, PS> getParagraph(int index) {
return paragraphs.get(index);
}
@Override
public StyledDocument<S, PS> subDocument(int start, int end) {
return content.subSequence(start, end);
}
@Override
public StyledDocument<S, PS> subDocument(int paragraphIndex) {
return content.subDocument(paragraphIndex);
}
/**
* Returns the selection range in the given paragraph.
*/
public IndexRange getParagraphSelection(int paragraph) {
int startPar = selectionStart2D.getMajor();
int endPar = selectionEnd2D.getMajor();
if(paragraph < startPar || paragraph > endPar) {
return EMPTY_RANGE;
}
int start = paragraph == startPar ? selectionStart2D.getMinor() : 0;
int end = paragraph == endPar ? selectionEnd2D.getMinor() : paragraphs.get(paragraph).length();
// force selectionProperty() to be valid
getSelection();
return new IndexRange(start, end);
}
/**
* Returns the style of the character with the given index.
* If {@code index} points to a line terminator character,
* the last style used in the paragraph terminated by that
* line terminator is returned.
*/
public S getStyleOfChar(int index) {
return content.getStyleOfChar(index);
}
/**
* Returns the style at the given position. That is the style of the
* character immediately preceding {@code position}, except when
* {@code position} points to a paragraph boundary, in which case it
* is the style at the beginning of the latter paragraph.
*
* <p>In other words, most of the time {@code getStyleAtPosition(p)}
* is equivalent to {@code getStyleOfChar(p-1)}, except when {@code p}
* points to a paragraph boundary, in which case it is equivalent to
* {@code getStyleOfChar(p)}.
*/
public S getStyleAtPosition(int position) {
return content.getStyleAtPosition(position);
}
/**
* Returns the range of homogeneous style that includes the given position.
* If {@code position} points to a boundary between two styled ranges, then
* the range preceding {@code position} is returned. If {@code position}
* points to a boundary between two paragraphs, then the first styled range
* of the latter paragraph is returned.
*/
public IndexRange getStyleRangeAtPosition(int position) {
return content.getStyleRangeAtPosition(position);
}
/**
* Returns the styles in the given character range.
*/
public StyleSpans<S> getStyleSpans(int from, int to) {
return content.getStyleSpans(from, to);
}
/**
* Returns the styles in the given character range.
*/
public StyleSpans<S> getStyleSpans(IndexRange range) {
return getStyleSpans(range.getStart(), range.getEnd());
}
/**
* Returns the style of the character with the given index in the given
* paragraph. If {@code index} is beyond the end of the paragraph, the
* style at the end of line is returned. If {@code index} is negative, it
* is the same as if it was 0.
*/
public S getStyleOfChar(int paragraph, int index) {
return content.getStyleOfChar(paragraph, index);
}
/**
* Returns the style at the given position in the given paragraph.
* This is equivalent to {@code getStyleOfChar(paragraph, position-1)}.
*/
public S getStyleAtPosition(int paragraph, int position) {
return content.getStyleOfChar(paragraph, position);
}
/**
* Returns the range of homogeneous style that includes the given position
* in the given paragraph. If {@code position} points to a boundary between
* two styled ranges, then the range preceding {@code position} is returned.
*/
public IndexRange getStyleRangeAtPosition(int paragraph, int position) {
return content.getStyleRangeAtPosition(paragraph, position);
}
/**
* Returns styles of the whole paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph) {
return content.getStyleSpans(paragraph);
}
/**
* Returns the styles in the given character range of the given paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph, int from, int to) {
return content.getStyleSpans(paragraph, from, to);
}
/**
* Returns the styles in the given character range of the given paragraph.
*/
public StyleSpans<S> getStyleSpans(int paragraph, IndexRange range) {
return getStyleSpans(paragraph, range.getStart(), range.getEnd());
}
@Override
public Position position(int row, int col) {
return content.position(row, col);
}
@Override
public Position offsetToPosition(int charOffset, Bias bias) {
return content.offsetToPosition(charOffset, bias);
}
/* ********************************************************************** *
* *
* Actions *
* *
* Actions change the state of this control. They typically cause a *
* change of one or more observables and/or produce an event. *
* *
* ********************************************************************** */
void scrollBy(Point2D deltas) {
virtualFlow.scrollXBy(deltas.getX());
virtualFlow.scrollYBy(deltas.getY());
}
void show(double y) {
virtualFlow.show(y);
}
void showCaretAtBottom() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double y = caretBounds.getMaxY();
virtualFlow.showAtOffset(parIdx, getViewportHeight() - y);
}
void showCaretAtTop() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double y = caretBounds.getMinY();
virtualFlow.showAtOffset(parIdx, -y);
}
void requestFollowCaret() {
followCaretRequested = true;
requestLayout();
}
private void followCaret() {
int parIdx = getCurrentParagraph();
Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> cell = virtualFlow.getCell(parIdx);
Bounds caretBounds = cell.getNode().getCaretBounds();
double graphicWidth = cell.getNode().getGraphicPrefWidth();
Bounds region = extendLeft(caretBounds, graphicWidth);
virtualFlow.show(parIdx, region);
}
/**
* Sets style for the given character range.
*/
public void setStyle(int from, int to, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(from, to, style);
}
}
/**
* Sets style for the whole paragraph.
*/
public void setStyle(int paragraph, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(paragraph, style);
}
}
/**
* Sets style for the given range relative in the given paragraph.
*/
public void setStyle(int paragraph, int from, int to, S style) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyle(paragraph, from, to, style);
}
}
/**
* Set multiple style ranges at once. This is equivalent to
* <pre>
* for(StyleSpan{@code <S>} span: styleSpans) {
* setStyle(from, from + span.getLength(), span.getStyle());
* from += span.getLength();
* }
* </pre>
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int from, StyleSpans<? extends S> styleSpans) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyleSpans(from, styleSpans);
}
}
/**
* Set multiple style ranges of a paragraph at once. This is equivalent to
* <pre>
* for(StyleSpan{@code <S>} span: styleSpans) {
* setStyle(paragraph, from, from + span.getLength(), span.getStyle());
* from += span.getLength();
* }
* </pre>
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int paragraph, int from, StyleSpans<? extends S> styleSpans) {
try(Guard g = omniSuspendable.suspend()) {
content.setStyleSpans(paragraph, from, styleSpans);
}
}
/**
* Sets style for the whole paragraph.
*/
public void setParagraphStyle(int paragraph, PS paragraphStyle) {
try(Guard g = omniSuspendable.suspend()) {
content.setParagraphStyle(paragraph, paragraphStyle);
}
}
/**
* Resets the style of the given range to the initial style.
*/
public void clearStyle(int from, int to) {
setStyle(from, to, initialStyle);
}
/**
* Resets the style of the given paragraph to the initial style.
*/
public void clearStyle(int paragraph) {
setStyle(paragraph, initialStyle);
}
/**
* Resets the style of the given range in the given paragraph
* to the initial style.
*/
public void clearStyle(int paragraph, int from, int to) {
setStyle(paragraph, from, to, initialStyle);
}
/**
* Resets the style of the given paragraph to the initial style.
*/
public void clearParagraphStyle(int paragraph) {
setParagraphStyle(paragraph, initialParagraphStyle);
}
@Override
public void replaceText(int start, int end, String text) {
StyledDocument<S, PS> doc = ReadOnlyStyledDocument.fromString(
text, content.getStyleForInsertionAt(start), content.getParagraphStyleForInsertionAt(start));
replace(start, end, doc);
}
@Override
public void replace(int start, int end, StyledDocument<S, PS> replacement) {
try(Guard g = omniSuspendable.suspend()) {
start = clamp(0, start, getLength());
end = clamp(0, end, getLength());
content.replace(start, end, replacement);
int newCaretPos = start + replacement.length();
selectRange(newCaretPos, newCaretPos);
}
}
@Override
public void selectRange(int anchor, int caretPosition) {
try(Guard g = suspend(
this.caretPosition, currentParagraph,
caretColumn, this.anchor,
selection, selectedText)) {
this.internalCaretPosition.setValue(clamp(0, caretPosition, getLength()));
this.anchor.setValue(clamp(0, anchor, getLength()));
this.internalSelection.setValue(IndexRange.normalize(getAnchor(), getCaretPosition()));
}
}
@Override
public void positionCaret(int pos) {
try(Guard g = suspend(caretPosition, currentParagraph, caretColumn)) {
internalCaretPosition.setValue(pos);
}
}
/* ********************************************************************** *
* *
* Public API *
* *
* ********************************************************************** */
public void dispose() {
subscriptions.unsubscribe();
behavior.dispose();
virtualFlow.dispose();
}
/* ********************************************************************** *
* *
* Layout *
* *
* ********************************************************************** */
@Override
protected void layoutChildren() {
virtualizedScrollPane.resize(getWidth(), getHeight());
if(followCaretRequested) {
followCaretRequested = false;
followCaret();
}
// position popup
PopupWindow popup = getPopupWindow();
PopupAlignment alignment = getPopupAlignment();
UnaryOperator<Point2D> adjustment = _popupAnchorAdjustment.getValue();
if(popup != null) {
positionPopup(popup, alignment, adjustment);
}
}
/* ********************************************************************** *
* *
* Private methods *
* *
* ********************************************************************** */
private Cell<Paragraph<S, PS>, ParagraphBox<S, PS>> createCell(
Paragraph<S, PS> paragraph,
BiConsumer<? super TextExt, S> applyStyle,
PS initialParagraphStyle,
BiConsumer<TextFlow, PS> applyParagraphStyle) {
ParagraphBox<S, PS> box = new ParagraphBox<>(paragraph, applyStyle, applyParagraphStyle);
box.highlightFillProperty().bind(highlightFill);
box.highlightTextFillProperty().bind(highlightTextFill);
box.wrapTextProperty().bind(wrapTextProperty());
box.graphicFactoryProperty().bind(paragraphGraphicFactoryProperty());
box.graphicOffset.bind(virtualFlow.breadthOffsetProperty());
Val<Boolean> hasCaret = Val.combine(
box.indexProperty(),
currentParagraphProperty(),
(bi, cp) -> bi.intValue() == cp.intValue());
// caret is visible only in the paragraph with the caret
Val<Boolean> cellCaretVisible = Val.combine(hasCaret, caretVisible, (a, b) -> a && b);
box.caretVisibleProperty().bind(cellCaretVisible);
// bind cell's caret position to area's caret column,
// when the cell is the one with the caret
box.caretPositionProperty().bind(hasCaret.flatMap(has -> has
? caretColumnProperty()
: Val.constant(0)));
// keep paragraph selection updated
ObjectBinding<IndexRange> cellSelection = Bindings.createObjectBinding(() -> {
int idx = box.getIndex();
return idx != -1
? getParagraphSelection(idx)
: StyledTextArea.EMPTY_RANGE;
}, selectionProperty(), box.indexProperty());
box.selectionProperty().bind(cellSelection);
return new Cell<Paragraph<S, PS>, ParagraphBox<S, PS>>() {
@Override
public ParagraphBox<S, PS> getNode() {
return box;
}
@Override
public void updateIndex(int index) {
box.setIndex(index);
}
@Override
public void dispose() {
box.highlightFillProperty().unbind();
box.highlightTextFillProperty().unbind();
box.wrapTextProperty().unbind();
box.graphicFactoryProperty().unbind();
box.graphicOffset.unbind();
box.caretVisibleProperty().unbind();
box.caretPositionProperty().unbind();
box.selectionProperty().unbind();
cellSelection.dispose();
}
};
}
private ParagraphBox<S, PS> getCell(int index) {
return virtualFlow.getCell(index).getNode();
}
private EventStream<MouseOverTextEvent> mouseOverTextEvents(ObservableSet<ParagraphBox<S, PS>> cells, Duration delay) {
return merge(cells, c -> c.stationaryIndices(delay).map(e -> e.unify(
l -> l.map((pos, charIdx) -> MouseOverTextEvent.beginAt(c.localToScreen(pos), getParagraphOffset(c.getIndex()) + charIdx)),
r -> MouseOverTextEvent.end())));
}
private int getParagraphOffset(int parIdx) {
return position(parIdx, 0).toOffset();
}
private void positionPopup(
PopupWindow popup,
PopupAlignment alignment,
UnaryOperator<Point2D> adjustment) {
Optional<Bounds> bounds = null;
switch(alignment.getAnchorObject()) {
case CARET: bounds = getCaretBoundsOnScreen(); break;
case SELECTION: bounds = getSelectionBoundsOnScreen(); break;
}
bounds.ifPresent(b -> {
double x = 0, y = 0;
switch(alignment.getHorizontalAlignment()) {
case LEFT: x = b.getMinX(); break;
case H_CENTER: x = (b.getMinX() + b.getMaxX()) / 2; break;
case RIGHT: x = b.getMaxX(); break;
}
switch(alignment.getVerticalAlignment()) {
case TOP: y = b.getMinY();
case V_CENTER: y = (b.getMinY() + b.getMaxY()) / 2; break;
case BOTTOM: y = b.getMaxY(); break;
}
Point2D anchor = adjustment.apply(new Point2D(x, y));
popup.setAnchorX(anchor.getX());
popup.setAnchorY(anchor.getY());
});
}
private Optional<Bounds> getCaretBoundsOnScreen() {
return virtualFlow.getCellIfVisible(getCurrentParagraph())
.map(c -> c.getNode().getCaretBoundsOnScreen());
}
private Optional<Bounds> getSelectionBoundsOnScreen() {
IndexRange selection = getSelection();
if(selection.getLength() == 0) {
return getCaretBoundsOnScreen();
}
Bounds[] bounds = virtualFlow.visibleCells().stream()
.map(c -> c.getNode().getSelectionBoundsOnScreen())
.filter(Optional::isPresent)
.map(Optional::get)
.toArray(Bounds[]::new);
if(bounds.length == 0) {
return Optional.empty();
}
double minX = Stream.of(bounds).mapToDouble(Bounds::getMinX).min().getAsDouble();
double maxX = Stream.of(bounds).mapToDouble(Bounds::getMaxX).max().getAsDouble();
double minY = Stream.of(bounds).mapToDouble(Bounds::getMinY).min().getAsDouble();
double maxY = Stream.of(bounds).mapToDouble(Bounds::getMaxY).max().getAsDouble();
return Optional.of(new BoundingBox(minX, minY, maxX-minX, maxY-minY));
}
private <T> void subscribeTo(EventStream<T> src, Consumer<T> consumer) {
manageSubscription(src.subscribe(consumer));
}
private void manageSubscription(Subscription subscription) {
subscriptions = subscriptions.and(subscription);
}
private void manageBinding(Binding<?> binding) {
subscriptions = subscriptions.and(binding::dispose);
}
private static Bounds extendLeft(Bounds b, double w) {
if(w == 0) {
return b;
} else {
return new BoundingBox(
b.getMinX() - w, b.getMinY(),
b.getWidth() + w, b.getHeight());
}
}
private static EventStream<Boolean> booleanPulse(Duration duration) {
return EventStreams.ticks(duration).accumulate(true, (b, x) -> !b);
}
private UndoManager createPlainUndoManager(UndoManagerFactory factory) {
Consumer<PlainTextChange> apply = change -> replaceText(change.getPosition(), change.getPosition() + change.getRemoved().length(), change.getInserted());
BiFunction<PlainTextChange, PlainTextChange, Optional<PlainTextChange>> merge = (change1, change2) -> change1.mergeWith(change2);
return factory.create(plainTextChanges(), PlainTextChange::invert, apply, merge);
}
private UndoManager createRichUndoManager(UndoManagerFactory factory) {
Consumer<RichTextChange<S, PS>> apply = change -> replace(change.getPosition(), change.getPosition() + change.getRemoved().length(), change.getInserted());
BiFunction<RichTextChange<S, PS>, RichTextChange<S, PS>, Optional<RichTextChange<S, PS>>> merge = (change1, change2) -> change1.mergeWith(change2);
return factory.create(richChanges(), RichTextChange::invert, apply, merge);
}
private Guard suspend(Suspendable... suspendables) {
return Suspendable.combine(beingUpdated, Suspendable.combine(suspendables)).suspend();
}
}
|
Removed `dispose()` and its dependencies. This should be reimplemented when Issue #152 is implemented.
|
richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
|
Removed `dispose()` and its dependencies. This should be reimplemented when Issue #152 is implemented.
|
<ide><path>ichtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
<ide> import org.reactfx.EventStream;
<ide> import org.reactfx.EventStreams;
<ide> import org.reactfx.Guard;
<del>import org.reactfx.Subscription;
<ide> import org.reactfx.Suspendable;
<ide> import org.reactfx.SuspendableEventStream;
<ide> import org.reactfx.SuspendableNo;
<ide> * *
<ide> * ********************************************************************** */
<ide>
<del> private final StyledTextAreaBehavior behavior;
<del>
<del> private Subscription subscriptions = () -> {};
<del>
<ide> private final Binding<Boolean> caretVisible;
<ide>
<ide> // TODO: this is initialized but never used. Should it be removed?
<ide> EventStream<?> selectionDirty = invalidationsOf(selectionProperty());
<ide> // need to reposition popup even when caret hasn't moved, but selection has changed (been deselected)
<ide> EventStream<?> caretDirty = merge(caretPosDirty, paragraphsDirty, selectionDirty);
<del> subscribeTo(caretDirty, x -> requestFollowCaret());
<add> caretDirty.subscribe(x -> requestFollowCaret());
<ide>
<ide> // whether or not to animate the caret
<ide> BooleanBinding blinkCaret = focusedProperty()
<ide> .and(editableProperty())
<ide> .and(disabledProperty().not());
<del> manageBinding(blinkCaret);
<ide>
<ide> // The caret is visible in periodic intervals,
<ide> // but only when blinkCaret is true.
<ide> ? booleanPulse(Duration.ofMillis(500))
<ide> : valuesOf(Val.constant(false)))
<ide> .toBinding(false);
<del> manageBinding(caretVisible);
<ide>
<ide> // Adjust popup anchor by either a user-provided function,
<ide> // or user-provided offset, or don't adjust at all.
<ide> : EventStreams.never())
<ide> .subscribe(evt -> Event.fireEvent(this, evt));
<ide>
<del> behavior = new StyledTextAreaBehavior(this, virtualFlow);
<add> new StyledTextAreaBehavior(this, virtualFlow);
<ide> getChildren().add(virtualFlow);
<ide> }
<ide>
<ide> internalCaretPosition.setValue(pos);
<ide> }
<ide> }
<del>
<del> /* ********************************************************************** *
<del> * *
<del> * Public API *
<del> * *
<del> * ********************************************************************** */
<del>
<del> public void dispose() {
<del> subscriptions.unsubscribe();
<del> behavior.dispose();
<del> virtualFlow.dispose();
<del> }
<del>
<ide>
<ide> /* ********************************************************************** *
<ide> * *
<ide> return Optional.of(new BoundingBox(minX, minY, maxX-minX, maxY-minY));
<ide> }
<ide>
<del> private <T> void subscribeTo(EventStream<T> src, Consumer<T> consumer) {
<del> manageSubscription(src.subscribe(consumer));
<del> }
<del>
<del> private void manageSubscription(Subscription subscription) {
<del> subscriptions = subscriptions.and(subscription);
<del> }
<del>
<del> private void manageBinding(Binding<?> binding) {
<del> subscriptions = subscriptions.and(binding::dispose);
<del> }
<del>
<ide> private static Bounds extendLeft(Bounds b, double w) {
<ide> if(w == 0) {
<ide> return b;
|
|
Java
|
mit
|
ea7fd08e0a9939ec2e9110a873d558ad179f3dcf
| 0 |
n0nick/awesomeRB
|
/**
*
* RBTree
*
* An implementation of a Red Black Tree with
* non-negative, distinct integer values
*
*/
public class RBTree {
//TODO document
private RBNode root;
//TODO document
public RBNode getRoot() {
return this.root;
}
//TODO document
public void setRoot(RBNode root) {
this.root = root;
}
/**
* public boolean empty()
*
* returns true if and only if the tree is empty
*
* preconditions: none postcondition: return true iff the data structure
* does not contain any item
*/
public boolean empty() {
return root == null;
}
/**
* public boolean contains(int i)
*
* returns true if and only if the tree contains i
*
* preconditions: none
* postcondition: returns true iff i is in the tree
*/
public boolean contains(int i) {
if (!empty()) {
return root.contains(i);
} else {
return false;
}
}
/**
* public void insert(int i)
*
* inserts the integer i into the binary tree; the tree
* must remain valid (keep its invariants).
*
* precondition: none
* postcondition: contains(i) == true (that is, i is in the list)
*/
public void insert(int i) {
RBNode newNode = new RBNode(i);
if (root == null) {
setRoot(newNode);
} else {
this.redBlackInsert(newNode);
}
}
//TODO document
public void redBlackInsert(RBNode newNode) {
RBNode y;
root.insert(newNode);
newNode.setRed();
while ((newNode != root) && (!newNode.getParent().isBlack())) {
if (newNode.getParent() == newNode.getGrandParent().getLeftChild()) {
y = newNode.getGrandParent().getLeftChild();
if (!y.isBlack()) {
newNode.getParent().setBlack();
y.setBlack();
newNode.getGrandParent().setRed();
newNode = newNode.getGrandParent();
} else if (newNode == newNode.getParent().getRightChild()) {
newNode = newNode.getParent();
leftRotate(newNode);
}
newNode.getParent().setBlack();
newNode.getGrandParent().setRed();
rightRotate(newNode.getGrandParent());
} else {
}
}
}
/**
* public void delete(int i)
*
* deletes the integer i from the binary tree, if it is there;
* the tree must remain valid (keep its invariants).
*
* precondition: none
* postcondition: contains(i) == false (that is, i is in the list)
*/
public void delete(int i) {
return; // to be replaced by student code
}
/**
* public int min()
*
* Returns the smallest key in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int min() {
return 42; // to be replaced by student code
}
/**
* public int max()
*
* Returns the largest key in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int max() {
return 42; // to be replaced by student code
}
/**
* public int[] toIntArray()
*
* returns an int[] array containing the values stored in the tree,
* in ascending order.
*
* preconditions: none
* postconditions: returns an array containing exactly the tree's elements in
* ascending order.
*/
public int[] toIntArray() {
int[] arr = new int[42]; //
return arr; // to be replaced by student code
}
/**
* public boolean isValid()
*
* Returns true if and only if the tree is a valid red-black tree.
*
* precondition: none
* postcondition: none
*
*/
public boolean isValid() {
return false; // should be replaced by student code
}
/**
* public int maxDepth()
*
* Returns the maximum depth of a node in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int maxDepth() {
return 42; // to be replaced by student code
}
/**
* public int minLeafDepth()
*
* Returns the minimum depth of a leaf in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int minLeafDepth() {
return 42; // to be replaced by student code
}
/**
* public int size()
*
* Returns the number of nodes in the tree.
*
* precondition: none
* postcondition: none
*/
public int size() {
return 42; // to be replaced by student code
}
//TODO document
public String toString() {
return root.toString();
}
/**
* precondition: x != null, x.right != null
* postcondition: rotates x to the left
* @param x
*/
private void leftRotate(RBNode x) {
RBNode y = x.getRightChild();
x.setRightChild(y.getLeftChild());
if (y.getLeftChild() != null) {
y.getLeftChild().setParent(x);
}
y.setParent(x.getParent());
if (x.getParent() == null) {
this.setRoot(y);
} else if (x == x.getParent().getLeftChild()) {
x.getParent().setLeftChild(y);
} else {
x.getParent().setRightChild(y);
}
y.setLeftChild(x);
x.setParent(y);
}
/**
* precondition: x != null, x.right != null
* postcondition: rotates x to the right
* @param x
*/
private void rightRotate(RBNode x) {
RBNode y = x.getLeftChild();
x.setLeftChild(y.getRightChild());
if (y.getRightChild() != null) {
y.getRightChild().setParent(x);
}
y.setParent(x.getParent());
if (x.getParent() == null) {
this.setRoot(y);
} else if (x == x.getParent().getRightChild()) {
x.getParent().setRightChild(y);
} else {
x.getParent().setLeftChild(y);
}
y.setRightChild(x);
x.setParent(y);
}
/**
* public class RBNode
*
* If you wish to implement classes other than RBTree
* (for example RBNode), do it in this file, not in
* another file
*
*/
//TODO document everything!
public class RBNode {
private int key;
private boolean isBlack;
private RBNode leftChild;
private RBNode rightChild;
private RBNode parent;
public RBNode(int key, boolean isBlack) {
this.key = key;
this.isBlack = isBlack;
}
public RBNode(int key) {
this(key, true);
}
public RBNode getParent() {
return this.parent;
}
public RBNode getGrandParent() {
return getParent().getParent();
}
public void setParent(RBNode parent) {
this.parent = parent;
}
public int getKey() {
return key;
}
public void setKey(int key) {
this.key = key;
}
public boolean isBlack() {
return isBlack;
}
public void setBlack() {
this.isBlack = true;
}
public boolean isRed() {
return !isBlack();
}
public void setRed() {
this.isBlack = false;
}
public RBNode getLeftChild() {
return leftChild;
}
public void setLeftChild(RBNode leftChild) {
this.leftChild = leftChild;
if (leftChild != null) {
leftChild.setParent(this);
}
}
public RBNode getRightChild() {
return rightChild;
}
public void setRightChild(RBNode rightChild) {
this.rightChild = rightChild;
if (rightChild != null) {
rightChild.setParent(this);
}
}
public boolean isLeaf() {
return !hasLeftChild() && !hasRightChild();
}
public boolean hasLeftChild() {
return leftChild != null;
}
public boolean hasRightChild() {
return rightChild != null;
}
public boolean contains(int i) {
if (getKey() == i) {
return true;
} else {
if (i < getKey() && hasLeftChild()) {
return getLeftChild().contains(i);
} else if (hasRightChild()) {
return getRightChild().contains(i);
}
}
return false;
}
public void insert(RBNode newNode) {
if (newNode.getKey() < this.getKey()) {
if (this.hasLeftChild()) {
this.getLeftChild().insert(newNode);
} else {
this.setLeftChild(newNode);
}
} else if (newNode.getKey() > this.getKey()) {
if (this.hasRightChild()) {
this.getRightChild().insert(newNode);
} else {
this.setRightChild(newNode);
}
}
}
// TODO: remove this
public String toString() {
String st;
st = "[ " + getKey() + " [";
st += hasLeftChild() ? getLeftChild().toString() : "x";
st += "] [";
st += hasRightChild() ? getRightChild().toString() : "x";
st += " ]]";
return st;
}
}
/**
* @original author Shai Vardi
* Modified for semester 2011/2012 a
*/
}
|
src/RBTree.java
|
/**
*
* RBTree
*
* An implementation of a Red Black Tree with
* non-negative, distinct integer values
*
*/
public class RBTree {
//TODO document
private RBNode root;
//TODO document
public RBNode getRoot() {
return this.root;
}
//TODO document
public void setRoot(RBNode root) {
this.root = root;
}
/**
* public boolean empty()
*
* returns true if and only if the tree is empty
*
* preconditions: none postcondition: return true iff the data structure
* does not contain any item
*/
public boolean empty() {
return root == null;
}
/**
* public boolean contains(int i)
*
* returns true if and only if the tree contains i
*
* preconditions: none
* postcondition: returns true iff i is in the tree
*/
public boolean contains(int i) {
if (!empty()) {
return root.contains(i);
} else {
return false;
}
}
/**
* public void insert(int i)
*
* inserts the integer i into the binary tree; the tree
* must remain valid (keep its invariants).
*
* precondition: none
* postcondition: contains(i) == true (that is, i is in the list)
*/
public void insert(int i) {
RBNode newNode = new RBNode(i);
if (root == null) {
setRoot(newNode);
} else {
this.redBlackInsert(newNode);
}
}
//TODO document
public void redBlackInsert(RBNode newNode) {
RBNode y;
root.insert(newNode);
newNode.setRed();
while ((newNode != root) && (!newNode.getParent().isBlack())) {
if (newNode.getParent() == newNode.getGrandParent().getLeftChild()) {
y = newNode.getGrandParent().getLeftChild();
if (!y.isBlack()) {
newNode.getParent().setBlack();
y.setBlack();
newNode.getGrandParent().setRed();
newNode = newNode.getGrandParent();
} else if (newNode == newNode.getParent().getRightChild()) {
newNode = newNode.getParent();
leftRotate(newNode);
}
newNode.getParent().setBlack();
newNode.getGrandParent().setRed();
rightRotate(newNode.getGrandParent());
} else {
}
}
}
/**
* public void delete(int i)
*
* deletes the integer i from the binary tree, if it is there;
* the tree must remain valid (keep its invariants).
*
* precondition: none
* postcondition: contains(i) == false (that is, i is in the list)
*/
public void delete(int i) {
return; // to be replaced by student code
}
/**
* public int min()
*
* Returns the smallest key in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int min() {
return 42; // to be replaced by student code
}
/**
* public int max()
*
* Returns the largest key in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int max() {
return 42; // to be replaced by student code
}
/**
* public int[] toIntArray()
*
* returns an int[] array containing the values stored in the tree,
* in ascending order.
*
* preconditions: none
* postconditions: returns an array containing exactly the tree's elements in
* ascending order.
*/
public int[] toIntArray() {
int[] arr = new int[42]; //
return arr; // to be replaced by student code
}
/**
* public boolean isValid()
*
* Returns true if and only if the tree is a valid red-black tree.
*
* precondition: none
* postcondition: none
*
*/
public boolean isValid() {
return false; // should be replaced by student code
}
/**
* public int maxDepth()
*
* Returns the maximum depth of a node in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int maxDepth() {
return 42; // to be replaced by student code
}
/**
* public int minLeafDepth()
*
* Returns the minimum depth of a leaf in the tree. If the tree
* is empty, returns -1;
*
* precondition: none
* postcondition: none
*/
public int minLeafDepth() {
return 42; // to be replaced by student code
}
/**
* public int size()
*
* Returns the number of nodes in the tree.
*
* precondition: none
* postcondition: none
*/
public int size() {
return 42; // to be replaced by student code
}
//TODO document
public String toString() {
return root.toString();
}
/**
* precondition: x != null, x.right != null
* postcondition: rotates x to the left
* @param x
*/
private void leftRotate(RBNode x) {
RBNode y = x.getRightChild();
x.setRightChild(y.getLeftChild());
if (y.getLeftChild() != null) {
y.getLeftChild().setParent(x);
}
y.setParent(x.getParent());
if (x.getParent() == null) {
this.setRoot(y);
} else if (x == x.getParent().getLeftChild()) {
x.getParent().setLeftChild(y);
} else {
x.getParent().setRightChild(y);
}
y.setLeftChild(x);
x.setParent(y);
}
/**
* precondition: x != null, x.right != null
* postcondition: rotates x to the right
* @param x
*/
private void rightRotate(RBNode x) {
RBNode y = x.getLeftChild();
x.setLeftChild(y.getRightChild());
if (y.getRightChild() != null) {
y.getRightChild().setParent(x);
}
y.setParent(x.getParent());
if (x.getParent() == null) {
this.setRoot(y);
} else if (x == x.getParent().getRightChild()) {
x.getParent().setRightChild(y);
} else {
x.getParent().setLeftChild(y);
}
y.setRightChild(x);
x.setParent(y);
}
/**
* public class RBNode
*
* If you wish to implement classes other than RBTree
* (for example RBNode), do it in this file, not in
* another file
*
*/
//TODO document everything!
public class RBNode {
private int key;
private boolean isBlack;
private RBNode leftChild;
private RBNode rightChild;
private RBNode parent;
public RBNode(int key, boolean isBlack) {
this.key = key;
this.isBlack = isBlack;
}
public RBNode(int key) {
this(key, true);
}
public RBNode getParent() {
return this.parent;
}
public RBNode getGrandParent() {
return getParent().getParent();
}
public void setParent(RBNode parent) {
this.parent = parent;
}
public int getKey() {
return key;
}
public void setKey(int key) {
this.key = key;
}
public boolean isBlack() {
return isBlack;
}
public void setBlack() {
this.isBlack = true;
}
public boolean isRed() {
return !isBlack();
}
public void setRed() {
this.isBlack = false;
}
public RBNode getLeftChild() {
return leftChild;
}
public void setLeftChild(RBNode leftChild) {
this.leftChild = leftChild;
if (leftChild != null) {
leftChild.setParent(this);
}
}
public RBNode getRightChild() {
return rightChild;
}
public void setRightChild(RBNode rightChild) {
this.rightChild = rightChild;
if (rightChild != null) {
rightChild.setParent(this);
}
}
public boolean isLeaf() {
return !hasLeftChild() && !hasRightChild();
}
public boolean hasLeftChild() {
return leftChild != null;
}
public boolean hasRightChild() {
return rightChild != null;
}
public boolean contains(int i) {
if (getKey() == i) {
return true;
} else {
if (i < getKey() && hasLeftChild()) {
return getLeftChild().contains(i);
} else if (hasRightChild()) {
return getRightChild().contains(i);
}
}
return false;
}
// TODO: ask about keys being unique
public void insert(RBNode newNode) {
if (newNode.getKey() < this.getKey()) {
if (this.hasLeftChild()) {
this.getLeftChild().insert(newNode);
} else {
this.setLeftChild(newNode);
}
} else if (newNode.getKey() > this.getKey()) {
if (this.hasRightChild()) {
this.getRightChild().insert(newNode);
} else {
this.setRightChild(newNode);
}
}
}
// TODO: remove this
public String toString() {
String st;
st = "[ " + getKey() + " [";
st += hasLeftChild() ? getLeftChild().toString() : "x";
st += "] [";
st += hasRightChild() ? getRightChild().toString() : "x";
st += " ]]";
return st;
}
}
/**
* @original author Shai Vardi
* Modified for semester 2011/2012 a
*/
}
|
no longer need to ask about value uniqueness
|
src/RBTree.java
|
no longer need to ask about value uniqueness
|
<ide><path>rc/RBTree.java
<ide> return false;
<ide> }
<ide>
<del> // TODO: ask about keys being unique
<ide> public void insert(RBNode newNode) {
<ide> if (newNode.getKey() < this.getKey()) {
<ide> if (this.hasLeftChild()) {
|
|
JavaScript
|
mit
|
51abd511b8b09bedcaeb04c4106e2cc85756c1e4
| 0 |
SockDrawer/SockBot
|
'use strict';
const utils = require('./utils');
const fs = require('fs'),
yaml = require('js-yaml');
/**
* Default configuration options
*
* @readonly
* @type {object}
*/
const defaultConfig = {
/**
* Core configuration options
*
* @type {object}
*/
core: {
/**
* Username the bot will log in as
*
* @default
* @type {string}
*/
username: '',
/**
* Password the bot will log in with
*
* @default
* @type {string}
*/
password: '',
/**
* User the bot will consider owner
*
* Owner promotes the user to virtual trust level 9 (above forum admins)
*
* @default
* @type {string}
*/
owner: 'accalia',
/**
* Base URL for the discourse instance to log into
*
* Is case sensitive
*
* @default
* @type {string}
*/
forum: 'https://what.thedailywtf.com',
/**
* Users to ignore.
*
* Ignoring users demotes them internally to virtual trust level 0. Forum staff cannot be ignored.
*
* @default
* @type {string[]}
*/
ignoreUsers: [
'blakeyrat', 'PaulaBean'
],
/**
* Discourse categories to ignore
*
* Posts from ignored categories will not trigger bot.
*
* @default
* @type {Number[]}
*/
ignoreCategories: [8, 23],
/**
* Cooldown timer for users that map to virtual trust level 1 or lower
*
* @default
* @type {Number}
*/
cooldownPeriod: 3.6E6,
/**
* Switch to handle `acted` type channel messages.
*
* This type of message is often not needed for bot operation and generates a fair bit of traffic.
* Disabling reduces load on the host forum.
*
* @default
* @type {boolean}
*/
handleActedMessage: false,
/**
* Set whether to poll for messages.
*
* If the bot only needs to handle notifications, set this to `false` to reduce load on the host forum.
*
* Note: Setting this to `false` will cause notifications to be polled less frequently;
* leave `true` if you want a more responsive bot
*
* @default
* @type {boolean}
*/
pollMessages: true,
/**
* Set whether to poll for notifications.
*
* For bots, this will normally be left `true`.
* For cyberparts, set this to `false` to stop the bot marking notifications as read.
*
* @default
* @type {boolean}
*/
pollNotifications: true
},
/**
* Plugin configuration.
*
* See `Plugin Configuration` for details
*
* @type {object}
*/
plugins: {}
};
/**
* Read and parse configuration file from disc
*
* @param {string} path Path of file to read
* @param {configComplete} callback Completion callback
*/
function readFile(path, callback) {
if (!path || typeof path !== 'string') {
callback(new Error('Path must be a string'));
return;
}
fs.readFile(path, (err, data) => {
if (err) {
return callback(err);
}
// Remove UTF-8 BOM if present
if (data.length >= 3 && data[0] === 0xef &&
data[1] === 0xbb && data[2] === 0xbf) {
data = data.slice(3);
}
try {
callback(null, yaml.safeLoad(data));
} catch (e) {
callback(e);
}
});
}
/**
* Load configuration from disc
*
* @param {string} path Configuration file path
* @param {configComplete} callback Completion callback
*/
exports.loadConfiguration = function loadConfiguration(path, callback) {
readFile(path, (err, config) => {
if (err) {
callback(err);
return;
}
try {
const cfg = utils.mergeObjects(true, defaultConfig, config);
exports.core = cfg.core;
exports.plugins = cfg.plugins;
callback(null, cfg);
} catch (e) {
callback(e);
}
});
};
/**
* Configuration Loaded Callback
*
* @callback
* @name configComplete
* @param {Exception} [err=null] Error encountered processing request
* @param {Object} config Loaded Configuration
*/
const config = JSON.parse(JSON.stringify(defaultConfig));
/**
* Current core configuration
*
* Set by internals. Do not edit
*
* @readonly
*/
exports.core = config.core;
/**
* Current plugin configuration
*
* Set by ineternals. Do not edit
*
* @readonly
*/
exports.plugins = config.plugins;
/**
* Current logged in user
*
* Set by internals. Do not edit
*
* @readonly
*/
exports.user = {};
/* istanbul ignore else */
if (typeof GLOBAL.describe === 'function') {
exports.internals = {
readFile: readFile,
defaultConfig: defaultConfig
};
}
|
lib/config.js
|
'use strict';
const utils = require('./utils');
const fs = require('fs'),
yaml = require('js-yaml');
/**
* Default configuration options
*
* @readonly
* @type {object}
*/
const defaultConfig = {
/**
* Core configuration options
*
* @type {object}
*/
core: {
/**
* Username the bot will log in as
*
* @default
* @type {string}
*/
username: '',
/**
* Password the bot will log in with
*
* @default
* @type {string}
*/
password: '',
/**
* User the bot will consider owner
*
* Owner promotes the user to virtual trust level 9 (above forum admins)
*
* @default
* @type {string}
*/
owner: 'accalia',
/**
* Base URL for the discourse instance to log into
*
* Is case sensitive
*
* @default
* @type {string}
*/
forum: 'https://what.thedailywtf.com',
/**
* Users to ignore.
*
* Ignoring users demotes them internally to virtual trust level 0. Forum staff cannot be ignored.
*
* @default
* @type {string[]}
*/
ignoreUsers: [
'blakeyrat', 'PaulaBean'
],
/**
* Discourse categories to ignore
*
* Posts from ignored categories will not trigger bot.
*
* @default
* @type {Number[]}
*/
ignoreCategories: [8, 23],
/**
* Cooldown timer for users that map to virtual trust level 1 or lower
*
* @default
* @type {Number}
*/
cooldownPeriod: 3.6E6,
/**
* Switch to handle `acted` type channel messages.
*
* This type of message is often not needed for bot operation and generates a fair bit of traffic.
* Disabling reduces load on the host forum.
*
* @default
* @type {boolean}
*/
handleActedMessage: false,
/**
* Set whether to poll for messages.
*
* If the bot only needs to handle notifications, set this to `false` to reduce load on the host forum.
*
* Note: Setting this to `false` will cause notifications to be polled less frequently;
* leave `true` if you want a more responsive bot
*
* @default
* @type {boolean}
*/
pollMessages: true,
/**
* Set whether to poll for notifications.
*
* For bots, this will normally be left `true`.
* For cyberparts, set this to `false` to stop the bot marking notifications as read.
*
* @default
* @type {boolean}
*/
pollNotifications: true
},
/**
* Plugin configuration.
*
* See `Plugin Configuration` for details
*
* @type {object}
*/
plugins: {}
};
/**
* Read and parse configuration file from disc
*
* @param {string} path Path of file to read
* @param {configComplete} callback Completion callback
*/
function readFile(path, callback) {
if (!path || typeof path !== 'string') {
callback(new Error('Path must be a string'));
return;
}
fs.readFile(path, (err, data) => {
if (err) {
return callback(err);
}
// Remove UTF-8 BOM if present
if (data.length >= 3 && data[0] === 0xef &&
data[1] === 0xbb && data[2] === 0xbf) {
data = data.slice(3);
}
try {
callback(null, yaml.safeLoad(data));
} catch (e) {
callback(e);
}
});
}
/**
* Load configuration from disc
*
* @param {string} path Configuration file path
* @param {configComplete} callback Completion callback
*/
exports.loadConfiguration = function loadConfiguration(path, callback) {
readFile(path, (err, config) => {
if (err) {
callback(err);
return;
}
try {
const cfg = utils.mergeObjects(true, defaultConfig, config);
exports.core = cfg.core;
exports.plugins = cfg.plugins;
callback(null, cfg);
} catch (e) {
callback(e);
}
});
};
/**
* Configuration Loaded Callback
*
* @callback
* @name configComplete
* @param {Exception} [err=null] Error encountered processing request
* @param {Object} config Loaded Configuration
*/
const config = JSON.parse(JSON.stringify(defaultConfig));
/**
* Current core configuration
*
* Set by internals. Do not edit
*
* @readonly
*/
exports.core = config.core;
/**
* Current plugin configuration
*
* Set by ineternals. Do not edit
*
* @readonly
*/
exports.plugins = config.plugins;
/**
* Current logged in user
*
* Set by internals. Do not edit
*
* @readonly
*/
exports.user = {};
/* istanbul ignore else */
if (typeof GLOBAL.describe === 'function') {
exports.internals = {
readFile: readFile,
defaultConfig: defaultConfig
};
}
|
Eslint warning: remove trailing space from docs... done!
|
lib/config.js
|
Eslint warning: remove trailing space from docs... done!
|
<ide><path>ib/config.js
<ide> /**
<ide> * Base URL for the discourse instance to log into
<ide> *
<del> * Is case sensitive
<add> * Is case sensitive
<ide> *
<ide> * @default
<ide> * @type {string}
|
|
Java
|
apache-2.0
|
13c4e18436064ed0fc1634acec221a6e14c3dbdd
| 0 |
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.diagnostic;
import com.intellij.featureStatistics.fusCollectors.LifecycleUsageTriggerCollector;
import com.intellij.internal.DebugAttachDetector;
import com.intellij.internal.statistic.eventLog.EventLogGroup;
import com.intellij.internal.statistic.eventLog.events.EventFields;
import com.intellij.internal.statistic.eventLog.events.EventId;
import com.intellij.internal.statistic.eventLog.events.EventId1;
import com.intellij.internal.statistic.service.fus.collectors.CounterUsagesCollector;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.util.concurrency.AppExecutorUtil;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
final class IdeHeartbeatEventReporter implements Disposable {
private static final int UI_RESPONSE_LOGGING_INTERVAL_MS = 100_000;
private static final int TOLERABLE_UI_LATENCY = 100;
private final ScheduledExecutorService myExecutor = AppExecutorUtil.createBoundedScheduledExecutorService("IDE Heartbeat", 1);
private final ScheduledFuture<?> myThread;
private volatile long myPreviousLoggedUIResponse = 0;
IdeHeartbeatEventReporter() {
boolean isDebugEnabled = DebugAttachDetector.isDebugEnabled();
ApplicationManager.getApplication().getMessageBus().connect(this)
.subscribe(IdePerformanceListener.TOPIC, new IdePerformanceListener() {
@Override
public void uiFreezeFinished(long durationMs, @Nullable File reportDir) {
if (!isDebugEnabled) {
LifecycleUsageTriggerCollector.onFreeze(durationMs);
}
}
@Override
public void uiResponded(long latencyMs) {
final long currentTime = System.currentTimeMillis();
if (currentTime - myPreviousLoggedUIResponse >= UI_RESPONSE_LOGGING_INTERVAL_MS) {
myPreviousLoggedUIResponse = currentTime;
UILatencyLogger.LATENCY.log(latencyMs);
}
if (latencyMs >= TOLERABLE_UI_LATENCY && !isDebugEnabled) {
UILatencyLogger.LAGGING.log(latencyMs);
}
}
});
myThread = myExecutor.scheduleWithFixedDelay(
IdeHeartbeatEventReporter::recordHeartbeat,
UI_RESPONSE_LOGGING_INTERVAL_MS, UI_RESPONSE_LOGGING_INTERVAL_MS, TimeUnit.MILLISECONDS
);
}
private static void recordHeartbeat() {
UILatencyLogger.HEARTBEAT.log();
}
@Override
public void dispose() {
if (myThread != null) {
myThread.cancel(true);
}
myExecutor.shutdownNow();
}
public static final class UILatencyLogger extends CounterUsagesCollector {
private static final EventLogGroup GROUP = new EventLogGroup("performance", 58);
private static final EventId HEARTBEAT = GROUP.registerEvent("heartbeat");
private static final EventId1<Long> LATENCY = GROUP.registerEvent("ui.latency", EventFields.DurationMs);
private static final EventId1<Long> LAGGING = GROUP.registerEvent("ui.lagging", EventFields.DurationMs);
@Override
public EventLogGroup getGroup() {
return GROUP;
}
}
}
|
platform/platform-impl/src/com/intellij/diagnostic/IdeHeartbeatEventReporter.java
|
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.diagnostic;
import com.intellij.featureStatistics.fusCollectors.LifecycleUsageTriggerCollector;
import com.intellij.internal.DebugAttachDetector;
import com.intellij.internal.statistic.eventLog.EventLogGroup;
import com.intellij.internal.statistic.eventLog.events.EventFields;
import com.intellij.internal.statistic.eventLog.events.EventId1;
import com.intellij.internal.statistic.service.fus.collectors.CounterUsagesCollector;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import org.jetbrains.annotations.Nullable;
import java.io.File;
final class IdeHeartbeatEventReporter implements Disposable {
private static final int UI_RESPONSE_LOGGING_INTERVAL_MS = 100_000;
private static final int TOLERABLE_UI_LATENCY = 100;
private volatile long myPreviousLoggedUIResponse = 0;
IdeHeartbeatEventReporter() {
boolean isDebugEnabled = DebugAttachDetector.isDebugEnabled();
ApplicationManager.getApplication().getMessageBus().connect(this)
.subscribe(IdePerformanceListener.TOPIC, new IdePerformanceListener() {
@Override
public void uiFreezeFinished(long durationMs, @Nullable File reportDir) {
if (!isDebugEnabled) {
LifecycleUsageTriggerCollector.onFreeze(durationMs);
}
}
@Override
public void uiResponded(long latencyMs) {
final long currentTime = System.currentTimeMillis();
if (currentTime - myPreviousLoggedUIResponse >= UI_RESPONSE_LOGGING_INTERVAL_MS) {
myPreviousLoggedUIResponse = currentTime;
UILatencyLogger.LATENCY.log(latencyMs);
}
if (latencyMs >= TOLERABLE_UI_LATENCY && !isDebugEnabled) {
UILatencyLogger.LAGGING.log(latencyMs);
}
}
});
}
@Override
public void dispose() {
}
public static final class UILatencyLogger extends CounterUsagesCollector {
private static final EventLogGroup GROUP = new EventLogGroup("performance", 57);
private static final EventId1<Long> LATENCY = GROUP.registerEvent("ui.latency", EventFields.DurationMs);
private static final EventId1<Long> LAGGING = GROUP.registerEvent("ui.lagging", EventFields.DurationMs);
@Override
public EventLogGroup getGroup() {
return GROUP;
}
}
}
|
FUS: record heartbeat events from regular thread (IDEA-259780)
GitOrigin-RevId: 5489a326fca769650b79c17add68b54850cb70f7
|
platform/platform-impl/src/com/intellij/diagnostic/IdeHeartbeatEventReporter.java
|
FUS: record heartbeat events from regular thread (IDEA-259780)
|
<ide><path>latform/platform-impl/src/com/intellij/diagnostic/IdeHeartbeatEventReporter.java
<ide> import com.intellij.internal.DebugAttachDetector;
<ide> import com.intellij.internal.statistic.eventLog.EventLogGroup;
<ide> import com.intellij.internal.statistic.eventLog.events.EventFields;
<add>import com.intellij.internal.statistic.eventLog.events.EventId;
<ide> import com.intellij.internal.statistic.eventLog.events.EventId1;
<ide> import com.intellij.internal.statistic.service.fus.collectors.CounterUsagesCollector;
<ide> import com.intellij.openapi.Disposable;
<ide> import com.intellij.openapi.application.ApplicationManager;
<add>import com.intellij.util.concurrency.AppExecutorUtil;
<ide> import org.jetbrains.annotations.Nullable;
<ide>
<ide> import java.io.File;
<add>import java.util.concurrent.ScheduledExecutorService;
<add>import java.util.concurrent.ScheduledFuture;
<add>import java.util.concurrent.TimeUnit;
<ide>
<ide> final class IdeHeartbeatEventReporter implements Disposable {
<ide> private static final int UI_RESPONSE_LOGGING_INTERVAL_MS = 100_000;
<ide> private static final int TOLERABLE_UI_LATENCY = 100;
<add>
<add> private final ScheduledExecutorService myExecutor = AppExecutorUtil.createBoundedScheduledExecutorService("IDE Heartbeat", 1);
<add> private final ScheduledFuture<?> myThread;
<ide>
<ide> private volatile long myPreviousLoggedUIResponse = 0;
<ide>
<ide> }
<ide> }
<ide> });
<add>
<add> myThread = myExecutor.scheduleWithFixedDelay(
<add> IdeHeartbeatEventReporter::recordHeartbeat,
<add> UI_RESPONSE_LOGGING_INTERVAL_MS, UI_RESPONSE_LOGGING_INTERVAL_MS, TimeUnit.MILLISECONDS
<add> );
<ide> }
<ide>
<add> private static void recordHeartbeat() {
<add> UILatencyLogger.HEARTBEAT.log();
<add> }
<ide>
<ide> @Override
<ide> public void dispose() {
<del>
<add> if (myThread != null) {
<add> myThread.cancel(true);
<add> }
<add> myExecutor.shutdownNow();
<ide> }
<ide>
<ide> public static final class UILatencyLogger extends CounterUsagesCollector {
<del> private static final EventLogGroup GROUP = new EventLogGroup("performance", 57);
<add> private static final EventLogGroup GROUP = new EventLogGroup("performance", 58);
<ide>
<add> private static final EventId HEARTBEAT = GROUP.registerEvent("heartbeat");
<ide> private static final EventId1<Long> LATENCY = GROUP.registerEvent("ui.latency", EventFields.DurationMs);
<ide> private static final EventId1<Long> LAGGING = GROUP.registerEvent("ui.lagging", EventFields.DurationMs);
<ide>
|
|
JavaScript
|
mit
|
e1656ea47c584ba65338a16302335a85f9be6180
| 0 |
four43/node-crisp-cache
|
var assert = require('assert'),
async = require('async'),
CacheEntry = require('../lib/CacheEntry'),
CrispCache = require('../main'),
seed = require('seed-random'),
sinon = require('sinon');
var data = {
hello: "world",
foo: "bar",
arr: [1, 2, 3],
hash: {key: "value", nested: [4, 5, 6]}
};
function fetcher(key, callback) {
setTimeout(function () {
return callback(null, data[key]);
}, 1);
}
function fetcherBad(key, callback) {
callback(new Error("There was a problem with the fetcher"));
}
describe("CrispCache", function () {
describe("Setup Sanity", function () {
it("Should complain if we have no fetcher", function () {
assert.throws(
function () {
new CrispCache();
},
"Should complain that we don't have a fetcher!"
);
});
it("Should not setup LRU", function () {
var crispCache = new CrispCache({
fetcher: function (key) {
}
});
assert.equal(crispCache._lru, undefined);
});
});
describe("Get - Basic", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should fetch a key", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
it("Should not fetch a missing key", function (done) {
crispCacheBasic.get('hello', {skipFetch: true}, function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 0);
done();
});
});
it("Should fetch a key from cache", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
crispCacheBasic.get('hello', function (err, value) {
assert.equal(value, 'world');
assert.ok(fetcherSpy.calledOnce);
done();
});
});
});
it("Should fetch a stale key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(301);
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
assert.equal(fetcherSpy.callCount, 1);
done();
});
clock.tick(10);
});
clock.tick(10);
});
it("Should re-fetch an expired key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(1000);
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
assert.equal(fetcherSpy.callCount, 2);
done();
});
clock.tick(10);
});
clock.tick(10);
});
it("Should not re-fetch an expired key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(1000);
crispCacheBasic.get('hello', {skipFetch: true}, function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 1);
done();
});
clock.tick(10);
});
clock.tick(10);
});
});
describe("Get - Advanced", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
seed.resetGlobal();
});
it("Should fetch a key - force fetch", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
crispCacheBasic.get('hello', {forceFetch: true}, function (err, value) {
assert.equal(fetcherSpy.callCount, 2);
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
});
it("Should only fetch once for 2 cache misses (locking)", function (done) {
clock = sinon.useFakeTimers();
async.parallel([
function (callback) {
crispCacheBasic.get('hello', callback);
},
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(1000);
}
],
function (err, results) {
assert.equal(err, null);
assert.equal(results[0], 'world');
assert.equal(results[1], 'world');
assert.equal(fetcherSpy.callCount, 1);
done();
});
});
it("Should propagate the error from the fetcher", function (done) {
crispCacheBasic = new CrispCache({
fetcher: fetcherBad
});
crispCacheBasic.get('hello', function (err, value) {
assert.ok(err);
assert.equal(err.message, "There was a problem with the fetcher");
assert.equal(value, undefined);
done();
});
});
it("Should assign varying staleTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
staleTtlVariance: 50,
defaultExpiresTtl: 500
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].staleTtl >= 250 && crispCacheBasic.cache['a'].staleTtl <= 350);
assert.equal(crispCacheBasic.cache['a'].expiresTtl, 500);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].staleTtl >= 250 && crispCacheBasic.cache['b'].staleTtl <= 350);
assert.equal(crispCacheBasic.cache['b'].expiresTtl, 500);
assert.notEqual(crispCacheBasic.cache['a'].staleTtl, crispCacheBasic.cache['b'].staleTtl);
done();
});
});
});
it("Should assign varying expireTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
expiresTtlVariance: 100
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
assert.equal(crispCacheBasic.cache['a'].staleTtl, 300);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].expiresTtl >= 400 && crispCacheBasic.cache['b'].expiresTtl <= 600);
assert.equal(crispCacheBasic.cache['b'].staleTtl, 300);
assert.notEqual(crispCacheBasic.cache['a'].expiresTtl, crispCacheBasic.cache['b'].expiresTtl);
done();
});
});
});
it("Should assign varying expireTTLs and staleTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
ttlVariance: 100
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].staleTtl >= 200 && crispCacheBasic.cache['a'].staleTtl <= 400);
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].staleTtl >= 200 && crispCacheBasic.cache['b'].staleTtl <= 400);
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
assert.notEqual(crispCacheBasic.cache['a'].staleTtl, crispCacheBasic.cache['b'].staleTtl);
assert.notEqual(crispCacheBasic.cache['a'].expiresTtl, crispCacheBasic.cache['b'].expiresTtl);
done();
});
});
});
});
describe("Get - Events", function() {
var crispCache,
hitSpy,
missSpy;
beforeEach(function () {
hitSpy = sinon.spy(function(obj) {
return obj;
});
missSpy = sinon.spy(function(obj) {
return obj;
});
crispCache = new CrispCache({
fetcher: fetcher,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
});
crispCache.on('hit', hitSpy);
crispCache.on('miss', missSpy);
});
it("Should emit hit when getting from cache", function (done) {
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 0);
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 1);
done();
});
});
});
it("Should emit events with correct values", function (done) {
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.ok(missSpy.returned({ key: 'hello' }));
assert.equal(hitSpy.callCount, 0);
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 1);
assert.ok(hitSpy.lastCall.returnValue.entry instanceof CacheEntry);
assert.equal(hitSpy.lastCall.returnValue.entry.value, 'world');
done();
});
});
});
it("Should emit miss twice on force fetch", function (done) {
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 0);
crispCache.get('hello', {forceFetch: true}, function (err, value) {
assert.equal(missSpy.callCount, 2);
assert.equal(hitSpy.callCount, 0);
done();
});
});
});
});
describe("Set - Basic", function () {
var crispCacheBasic;
beforeEach(function () {
crispCacheBasic = new CrispCache({
fetcher: function (key, callback) {
callback(null, 'fetcher value')
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
it("Should set a key to the cache", function (done) {
crispCacheBasic.set("testA", "The Value", function (err, success) {
crispCacheBasic.get('testA', function (err, value) {
assert.equal(value, 'The Value');
done();
});
})
});
it("Should skip cache with TTL of 0", function (done) {
crispCacheBasic.set("testExpires", "The Value", {expiresTtl: 0}, function (err, success) {
//This isn't great but the only way to really make sure it wasn't set to the cache at all.
assert.equal(crispCacheBasic.cache['testA'], undefined);
crispCacheBasic.get('testA', function (err, value) {
assert.equal(value, 'fetcher value');
done();
});
})
});
});
describe("Set - Advanced", function () {
var clock,
crispCacheBasic;
beforeEach(function () {
crispCacheBasic = new CrispCache({
fetcher: function (key, callback) {
callback(null, 'fetcher', {staleTtl: 123, expiresTtl: 456})
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should set with different TTL", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('testA', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'fetcher');
assert.equal(crispCacheBasic.cache['testA'].staleTtl, 123);
assert.equal(crispCacheBasic.cache['testA'].expiresTtl, 456);
done();
});
});
it("Should set with different TTL for existing entry", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.set('testA', 'hello', {staleTtl: 200, expiresTtl: 300}, function (err, value) {
clock.tick(301);
crispCacheBasic.get('testA', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'fetcher');
assert.equal(crispCacheBasic.cache['testA'].staleTtl, 123);
assert.equal(crispCacheBasic.cache['testA'].expiresTtl, 456);
done();
});
})
});
});
describe("Del - Basic", function () {
var crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
it("Should delete a key", function (done) {
async.waterfall([
function (callback) {
return crispCacheBasic.get('hello', callback);
},
function (value, callback) {
assert.equal(value, 'world');
return crispCacheBasic.del('hello', callback);
},
function (value, callback) {
assert.equal(true, value);
crispCacheBasic.get('hello', {skipFetch: true}, callback);
}
], function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 1);
done();
});
});
});
var staleCheckSpy;
describe("StaleCheck - Auto refresh cache", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
fetcherSpy = sinon.spy(fetcher);
if (!CrispCache.prototype._staleCheck_orig) {
CrispCache.prototype._staleCheck_orig = CrispCache.prototype._staleCheck;
}
staleCheckSpy = sinon.spy(CrispCache.prototype._staleCheck_orig);
CrispCache.prototype._staleCheck = staleCheckSpy;
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
staleCheckInterval: 100
});
});
afterEach(function () {
if (clock) {
clock.restore();
}
if (CrispCache.prototype._staleCheck_orig) {
CrispCache.prototype._staleCheck = CrispCache.prototype._staleCheck_orig;
}
});
it("Should update the cache without get", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(10);
},
function (value, callback) {
assert.equal(value, 'world');
clock.tick(401);
callback();
},
function (callback) {
assert.equal(staleCheckSpy.callCount, 4);
clock.tick(10);
assert.equal(fetcherSpy.callCount, 2);
crispCacheBasic.get('hello', callback);
clock.tick(10);
}
],
function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
});
var delSpy;
describe("ExpiresCheck - Auto clean cache", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
if (!CrispCache.prototype._del_orig) {
CrispCache.prototype._del_orig = CrispCache.prototype.del;
}
delSpy = sinon.spy(CrispCache.prototype._del_orig);
CrispCache.prototype.del = delSpy;
crispCacheBasic = new CrispCache({
fetcher: fetcher,
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
evictCheckInterval: 100
});
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should expire the cache without asking", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(10);
},
function (value, callback) {
assert.equal(value, 'world');
clock.tick(600);
callback();
},
function (callback) {
assert.equal(delSpy.callCount, 1);
assert.equal(Object.keys(crispCacheBasic.cache).length, 0);
clock.tick(10);
crispCacheBasic.get('hello', {skipFetch: true}, callback);
}
],
function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
done();
});
});
});
describe("LRU Integration", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
crispCacheBasic = new CrispCache({
fetcher: fetcher,
maxSize: 10,
defaultExpiresTtl: 50,
evictCheckInterval: 100
});
});
it("Should increase LRU size", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 3);
done();
})
});
it("Should increase LRU size, multiple", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
crispCacheBasic.set("testB", "The Value B", {size: 2}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 5);
done();
});
})
});
it("Should increase LRU size, replace", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
crispCacheBasic.set("testA", "The Value B", {size: 4}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 4);
done();
});
})
});
it("Should update LRU", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.set("testB", "The Value B", {size: 8}, callback);
},
function (result, callback) {
crispCacheBasic.get("testA", callback);
}
],
function (err, result) {
assert.equal(result, "The Value A");
assert.equal(crispCacheBasic._lru.head.key, 'testA');
done();
});
});
it("Should update LRU without size", function (done) {
crispCacheBasic.set("testA", "The Value A", function (err, result) {
assert.ok(err);
done();
});
});
it("Should remove LRU via crispCache", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.del("testA", callback);
}
],
function (err, result) {
assert.equal(crispCacheBasic._lru.size, 0);
done();
});
});
it("Should remove LRU", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.set("testB", "The Value B", {size: 8}, callback);
},
function (result, callback) {
crispCacheBasic.set("testC", "The Value C", {size: 5}, callback);
},
function (result, callback) {
assert.equal(crispCacheBasic._lru.size, 5);
callback();
},
function (callback) {
crispCacheBasic.get("testA", {skipFetch: true}, callback);
},
function (result, callback) {
assert.equal(result, null);
callback();
},
function (callback) {
crispCacheBasic.get("testB", {skipFetch: true}, callback);
}
],
function (err, result) {
assert.equal(result, null);
done();
});
});
it('Should auto-evict entries from LRU cache', function(done) {
crispCacheBasic._lru.del = sinon.spy(crispCacheBasic._lru.del);
crispCacheBasic.set('foo', 'bar', {size: 1}, function(err) {
assert.ifError(err);
clock.tick(101);
assert(crispCacheBasic._lru.del.calledWith('foo'), 'Should evicted expired entry');
done();
});
});
});
});
|
test/crisp-cache-test.js
|
var assert = require('assert'),
async = require('async'),
CrispCache = require('../main'),
seed = require('seed-random'),
sinon = require('sinon');
var data = {
hello: "world",
foo: "bar",
arr: [1, 2, 3],
hash: {key: "value", nested: [4, 5, 6]}
};
function fetcher(key, callback) {
setTimeout(function () {
return callback(null, data[key]);
}, 1);
}
function fetcherBad(key, callback) {
callback(new Error("There was a problem with the fetcher"));
}
describe("CrispCache", function () {
describe("Setup Sanity", function () {
it("Should complain if we have no fetcher", function () {
assert.throws(
function () {
new CrispCache();
},
"Should complain that we don't have a fetcher!"
);
});
it("Should not setup LRU", function () {
var crispCache = new CrispCache({
fetcher: function (key) {
}
});
assert.equal(crispCache._lru, undefined);
});
});
describe("Get - Basic", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should fetch a key", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
it("Should not fetch a missing key", function (done) {
crispCacheBasic.get('hello', {skipFetch: true}, function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 0);
done();
});
});
it("Should fetch a key from cache", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
crispCacheBasic.get('hello', function (err, value) {
assert.equal(value, 'world');
assert.ok(fetcherSpy.calledOnce);
done();
});
});
});
it("Should fetch a stale key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(301);
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
assert.equal(fetcherSpy.callCount, 1);
done();
});
clock.tick(10);
});
clock.tick(10);
});
it("Should re-fetch an expired key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(1000);
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
assert.equal(fetcherSpy.callCount, 2);
done();
});
clock.tick(10);
});
clock.tick(10);
});
it("Should not re-fetch an expired key", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
clock.tick(1000);
crispCacheBasic.get('hello', {skipFetch: true}, function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 1);
done();
});
clock.tick(10);
});
clock.tick(10);
});
});
describe("Get - Advanced", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
seed.resetGlobal();
});
it("Should fetch a key - force fetch", function (done) {
crispCacheBasic.get('hello', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
crispCacheBasic.get('hello', {forceFetch: true}, function (err, value) {
assert.equal(fetcherSpy.callCount, 2);
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
});
it("Should only fetch once for 2 cache misses (locking)", function (done) {
clock = sinon.useFakeTimers();
async.parallel([
function (callback) {
crispCacheBasic.get('hello', callback);
},
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(1000);
}
],
function (err, results) {
assert.equal(err, null);
assert.equal(results[0], 'world');
assert.equal(results[1], 'world');
assert.equal(fetcherSpy.callCount, 1);
done();
});
});
it("Should propagate the error from the fetcher", function (done) {
crispCacheBasic = new CrispCache({
fetcher: fetcherBad
});
crispCacheBasic.get('hello', function (err, value) {
assert.ok(err);
assert.equal(err.message, "There was a problem with the fetcher");
assert.equal(value, undefined);
done();
});
});
it("Should assign varying staleTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
staleTtlVariance: 50,
defaultExpiresTtl: 500
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].staleTtl >= 250 && crispCacheBasic.cache['a'].staleTtl <= 350);
assert.equal(crispCacheBasic.cache['a'].expiresTtl, 500);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].staleTtl >= 250 && crispCacheBasic.cache['b'].staleTtl <= 350);
assert.equal(crispCacheBasic.cache['b'].expiresTtl, 500);
assert.notEqual(crispCacheBasic.cache['a'].staleTtl, crispCacheBasic.cache['b'].staleTtl);
done();
});
});
});
it("Should assign varying expireTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
expiresTtlVariance: 100
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
assert.equal(crispCacheBasic.cache['a'].staleTtl, 300);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].expiresTtl >= 400 && crispCacheBasic.cache['b'].expiresTtl <= 600);
assert.equal(crispCacheBasic.cache['b'].staleTtl, 300);
assert.notEqual(crispCacheBasic.cache['a'].expiresTtl, crispCacheBasic.cache['b'].expiresTtl);
done();
});
});
});
it("Should assign varying expireTTLs and staleTTLs based on variance", function (done) {
seed('foo', {global: true});
crispCacheBasic = new CrispCache({
fetcher: function () {
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
ttlVariance: 100
});
crispCacheBasic.set('a', 'hello', function (err, result) {
assert.ok(crispCacheBasic.cache['a'].staleTtl >= 200 && crispCacheBasic.cache['a'].staleTtl <= 400);
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
crispCacheBasic.set('b', 'world', function (err, result) {
assert.ok(crispCacheBasic.cache['b'].staleTtl >= 200 && crispCacheBasic.cache['b'].staleTtl <= 400);
assert.ok(crispCacheBasic.cache['a'].expiresTtl >= 400 && crispCacheBasic.cache['a'].expiresTtl <= 600);
assert.notEqual(crispCacheBasic.cache['a'].staleTtl, crispCacheBasic.cache['b'].staleTtl);
assert.notEqual(crispCacheBasic.cache['a'].expiresTtl, crispCacheBasic.cache['b'].expiresTtl);
done();
});
});
});
});
describe("Get - Events", function() {
var crispCache,
hitSpy,
missSpy;
beforeEach(function () {
hitSpy = sinon.spy(function(obj) {
return obj;
});
missSpy = sinon.spy(function(obj) {
return obj;
});
crispCache = new CrispCache({
fetcher: fetcher,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
});
crispCache.on('hit', hitSpy);
crispCache.on('miss', missSpy);
});
it("Should emit hit when getting from cache", function (done) {
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 0);
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 1);
done();
});
});
});
it("Should emit miss twice on force fetch", function (done) {
crispCache.get('hello', function (err, value) {
assert.equal(missSpy.callCount, 1);
assert.equal(hitSpy.callCount, 0);
crispCache.get('hello', {forceFetch: true}, function (err, value) {
assert.equal(missSpy.callCount, 2);
assert.equal(hitSpy.callCount, 0);
done();
});
});
});
});
describe("Set - Basic", function () {
var crispCacheBasic;
beforeEach(function () {
crispCacheBasic = new CrispCache({
fetcher: function (key, callback) {
callback(null, 'fetcher value')
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
it("Should set a key to the cache", function (done) {
crispCacheBasic.set("testA", "The Value", function (err, success) {
crispCacheBasic.get('testA', function (err, value) {
assert.equal(value, 'The Value');
done();
});
})
});
it("Should skip cache with TTL of 0", function (done) {
crispCacheBasic.set("testExpires", "The Value", {expiresTtl: 0}, function (err, success) {
//This isn't great but the only way to really make sure it wasn't set to the cache at all.
assert.equal(crispCacheBasic.cache['testA'], undefined);
crispCacheBasic.get('testA', function (err, value) {
assert.equal(value, 'fetcher value');
done();
});
})
});
});
describe("Set - Advanced", function () {
var clock,
crispCacheBasic;
beforeEach(function () {
crispCacheBasic = new CrispCache({
fetcher: function (key, callback) {
callback(null, 'fetcher', {staleTtl: 123, expiresTtl: 456})
},
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should set with different TTL", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.get('testA', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'fetcher');
assert.equal(crispCacheBasic.cache['testA'].staleTtl, 123);
assert.equal(crispCacheBasic.cache['testA'].expiresTtl, 456);
done();
});
});
it("Should set with different TTL for existing entry", function (done) {
clock = sinon.useFakeTimers();
crispCacheBasic.set('testA', 'hello', {staleTtl: 200, expiresTtl: 300}, function (err, value) {
clock.tick(301);
crispCacheBasic.get('testA', function (err, value) {
assert.equal(err, null);
assert.equal(value, 'fetcher');
assert.equal(crispCacheBasic.cache['testA'].staleTtl, 123);
assert.equal(crispCacheBasic.cache['testA'].expiresTtl, 456);
done();
});
})
});
});
describe("Del - Basic", function () {
var crispCacheBasic,
fetcherSpy;
beforeEach(function () {
fetcherSpy = sinon.spy(fetcher);
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500
})
});
it("Should delete a key", function (done) {
async.waterfall([
function (callback) {
return crispCacheBasic.get('hello', callback);
},
function (value, callback) {
assert.equal(value, 'world');
return crispCacheBasic.del('hello', callback);
},
function (value, callback) {
assert.equal(true, value);
crispCacheBasic.get('hello', {skipFetch: true}, callback);
}
], function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
assert.equal(fetcherSpy.callCount, 1);
done();
});
});
});
var staleCheckSpy;
describe("StaleCheck - Auto refresh cache", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
fetcherSpy = sinon.spy(fetcher);
if (!CrispCache.prototype._staleCheck_orig) {
CrispCache.prototype._staleCheck_orig = CrispCache.prototype._staleCheck;
}
staleCheckSpy = sinon.spy(CrispCache.prototype._staleCheck_orig);
CrispCache.prototype._staleCheck = staleCheckSpy;
crispCacheBasic = new CrispCache({
fetcher: fetcherSpy,
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
staleCheckInterval: 100
});
});
afterEach(function () {
if (clock) {
clock.restore();
}
if (CrispCache.prototype._staleCheck_orig) {
CrispCache.prototype._staleCheck = CrispCache.prototype._staleCheck_orig;
}
});
it("Should update the cache without get", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(10);
},
function (value, callback) {
assert.equal(value, 'world');
clock.tick(401);
callback();
},
function (callback) {
assert.equal(staleCheckSpy.callCount, 4);
clock.tick(10);
assert.equal(fetcherSpy.callCount, 2);
crispCacheBasic.get('hello', callback);
clock.tick(10);
}
],
function (err, value) {
assert.equal(err, null);
assert.equal(value, 'world');
done();
});
});
});
var delSpy;
describe("ExpiresCheck - Auto clean cache", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
if (!CrispCache.prototype._del_orig) {
CrispCache.prototype._del_orig = CrispCache.prototype.del;
}
delSpy = sinon.spy(CrispCache.prototype._del_orig);
CrispCache.prototype.del = delSpy;
crispCacheBasic = new CrispCache({
fetcher: fetcher,
defaultStaleTtl: 300,
defaultExpiresTtl: 500,
evictCheckInterval: 100
});
});
afterEach(function () {
if (clock) {
clock.restore();
}
});
it("Should expire the cache without asking", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.get('hello', callback);
clock.tick(10);
},
function (value, callback) {
assert.equal(value, 'world');
clock.tick(600);
callback();
},
function (callback) {
assert.equal(delSpy.callCount, 1);
assert.equal(Object.keys(crispCacheBasic.cache).length, 0);
clock.tick(10);
crispCacheBasic.get('hello', {skipFetch: true}, callback);
}
],
function (err, value) {
assert.equal(err, null);
assert.equal(value, undefined);
done();
});
});
});
describe("LRU Integration", function () {
var clock,
crispCacheBasic,
fetcherSpy;
beforeEach(function () {
clock = sinon.useFakeTimers();
crispCacheBasic = new CrispCache({
fetcher: fetcher,
maxSize: 10,
defaultExpiresTtl: 50,
evictCheckInterval: 100
});
});
it("Should increase LRU size", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 3);
done();
})
});
it("Should increase LRU size, multiple", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
crispCacheBasic.set("testB", "The Value B", {size: 2}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 5);
done();
});
})
});
it("Should increase LRU size, replace", function (done) {
crispCacheBasic.set("testA", "The Value", {size: 3}, function (err, success) {
crispCacheBasic.set("testA", "The Value B", {size: 4}, function (err, success) {
assert.equal(crispCacheBasic._lru.size, 4);
done();
});
})
});
it("Should update LRU", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.set("testB", "The Value B", {size: 8}, callback);
},
function (result, callback) {
crispCacheBasic.get("testA", callback);
}
],
function (err, result) {
assert.equal(result, "The Value A");
assert.equal(crispCacheBasic._lru.head.key, 'testA');
done();
});
});
it("Should update LRU without size", function (done) {
crispCacheBasic.set("testA", "The Value A", function (err, result) {
assert.ok(err);
done();
});
});
it("Should remove LRU via crispCache", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.del("testA", callback);
}
],
function (err, result) {
assert.equal(crispCacheBasic._lru.size, 0);
done();
});
});
it("Should remove LRU", function (done) {
async.waterfall([
function (callback) {
crispCacheBasic.set("testA", "The Value A", {size: 2}, callback);
},
function (result, callback) {
crispCacheBasic.set("testB", "The Value B", {size: 8}, callback);
},
function (result, callback) {
crispCacheBasic.set("testC", "The Value C", {size: 5}, callback);
},
function (result, callback) {
assert.equal(crispCacheBasic._lru.size, 5);
callback();
},
function (callback) {
crispCacheBasic.get("testA", {skipFetch: true}, callback);
},
function (result, callback) {
assert.equal(result, null);
callback();
},
function (callback) {
crispCacheBasic.get("testB", {skipFetch: true}, callback);
}
],
function (err, result) {
assert.equal(result, null);
done();
});
});
it('Should auto-evict entries from LRU cache', function(done) {
crispCacheBasic._lru.del = sinon.spy(crispCacheBasic._lru.del);
crispCacheBasic.set('foo', 'bar', {size: 1}, function(err) {
assert.ifError(err);
clock.tick(101);
assert(crispCacheBasic._lru.del.calledWith('foo'), 'Should evicted expired entry');
done();
});
});
});
});
|
Added value check for event arguments
|
test/crisp-cache-test.js
|
Added value check for event arguments
|
<ide><path>est/crisp-cache-test.js
<ide> var assert = require('assert'),
<ide> async = require('async'),
<add> CacheEntry = require('../lib/CacheEntry'),
<ide> CrispCache = require('../main'),
<ide> seed = require('seed-random'),
<ide> sinon = require('sinon');
<ide> });
<ide> });
<ide>
<add> it("Should emit events with correct values", function (done) {
<add> crispCache.get('hello', function (err, value) {
<add> assert.equal(missSpy.callCount, 1);
<add> assert.ok(missSpy.returned({ key: 'hello' }));
<add> assert.equal(hitSpy.callCount, 0);
<add> crispCache.get('hello', function (err, value) {
<add> assert.equal(missSpy.callCount, 1);
<add> assert.equal(hitSpy.callCount, 1);
<add> assert.ok(hitSpy.lastCall.returnValue.entry instanceof CacheEntry);
<add> assert.equal(hitSpy.lastCall.returnValue.entry.value, 'world');
<add> done();
<add> });
<add> });
<add> });
<add>
<ide> it("Should emit miss twice on force fetch", function (done) {
<ide> crispCache.get('hello', function (err, value) {
<ide> assert.equal(missSpy.callCount, 1);
|
|
Java
|
apache-2.0
|
65245025c2fd67428a3e1f1143847cd52ca97161
| 0 |
thymeleaf/thymeleaf,thymeleaf/thymeleaf
|
/*
* =============================================================================
*
* Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.context;
import java.lang.reflect.Array;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.thymeleaf.IEngineConfiguration;
import org.thymeleaf.engine.TemplateData;
import org.thymeleaf.inline.IInliner;
import org.thymeleaf.inline.NoOpInliner;
import org.thymeleaf.model.IProcessableElementTag;
import org.thymeleaf.util.Validate;
/**
* <p>
* Basic <b>web</b> implementation of the {@link IEngineContext} interface, based on the Servlet API.
* </p>
* <p>
* This is the context implementation that will be used by default for web processing. Note that <b>this is an
* internal implementation, and there is no reason for users' code to directly reference or use it instead
* of its implemented interfaces</b>.
* </p>
* <p>
* This class is NOT thread-safe. Thread-safety is not a requirement for context implementations.
* </p>
*
* @author Daniel Fernández
*
* @since 3.0.0
*
*/
public class WebEngineContext extends AbstractEngineContext implements IEngineContext, IWebContext {
/*
* ---------------------------------------------------------------------------
* THIS MAP FORWARDS ALL OPERATIONS TO THE UNDERLYING REQUEST, EXCEPT
* FOR THE param (request parameters), session (session attributes) AND
* application (servlet context attributes) VARIABLES.
*
* NOTE that, even if attributes are leveled so that above level 0 they are
* considered local and thus disappear after lowering the level, attributes
* directly set on the request object are considered global and therefore
* valid even when the level decreased (though they can be overridden). This
* is so for better simulating the effect of directly working against the
* request object, and for better integration with JSP or any other template
* engines or view-layer technologies that expect the HttpServletRequest to
* be the 'only source of truth' for context variables.
* ---------------------------------------------------------------------------
*/
private static final String PARAM_VARIABLE_NAME = "param";
private static final String SESSION_VARIABLE_NAME = "session";
private static final String APPLICATION_VARIABLE_NAME = "application";
private final HttpServletRequest request;
private final HttpServletResponse response;
private final HttpSession session;
private final ServletContext servletContext;
private final RequestAttributesVariablesMap requestAttributesVariablesMap;
private final Map<String,Object> requestParametersVariablesMap;
private final Map<String,Object> sessionAttributesVariablesMap;
private final Map<String,Object> applicationAttributesVariablesMap;
/**
* <p>
* Creates a new instance of this {@link IEngineContext} implementation binding engine execution to
* the Servlet API.
* </p>
* <p>
* Note that implementations of {@link IEngineContext} are not meant to be used in order to call
* the template engine (use implementations of {@link IContext} such as {@link Context} or {@link WebContext}
* instead). This is therefore mostly an <b>internal</b> implementation, and users should have no reason
* to ever call this constructor except in very specific integration/extension scenarios.
* </p>
*
* @param configuration the configuration instance being used.
* @param templateData the template data for the template to be processed.
* @param templateResolutionAttributes the template resolution attributes.
* @param request the servlet request object.
* @param response the servlet response object.
* @param servletContext the servlet context object.
* @param locale the locale.
* @param variables the context variables, probably coming from another {@link IContext} implementation.
*/
public WebEngineContext(
final IEngineConfiguration configuration,
final TemplateData templateData,
final Map<String,Object> templateResolutionAttributes,
final HttpServletRequest request, final HttpServletResponse response,
final ServletContext servletContext,
final Locale locale,
final Map<String, Object> variables) {
super(configuration, templateResolutionAttributes, locale);
Validate.notNull(request, "Request cannot be null in web variables map");
Validate.notNull(response, "Response cannot be null in web variables map");
Validate.notNull(servletContext, "Servlet Context cannot be null in web variables map");
this.request = request;
this.response = response;
this.session = request.getSession(false);
this.servletContext = servletContext;
this.requestAttributesVariablesMap =
new RequestAttributesVariablesMap(configuration, templateData, templateResolutionAttributes, this.request, locale, variables);
this.requestParametersVariablesMap = new RequestParametersMap(this.request);
this.applicationAttributesVariablesMap = new ServletContextAttributesMap(this.servletContext);
this.sessionAttributesVariablesMap = new SessionAttributesMap(this.session);
}
public HttpServletRequest getRequest() {
return this.request;
}
public HttpServletResponse getResponse() {
return this.response;
}
public HttpSession getSession() {
return this.session;
}
public ServletContext getServletContext() {
return this.servletContext;
}
public boolean containsVariable(final String name) {
if (SESSION_VARIABLE_NAME.equals(name)) {
return this.sessionAttributesVariablesMap != null;
}
if (PARAM_VARIABLE_NAME.equals(name)) {
return true;
}
return APPLICATION_VARIABLE_NAME.equals(name) || this.requestAttributesVariablesMap.containsVariable(name);
}
public Object getVariable(final String key) {
if (SESSION_VARIABLE_NAME.equals(key)) {
return this.sessionAttributesVariablesMap;
}
if (PARAM_VARIABLE_NAME.equals(key)) {
return this.requestParametersVariablesMap;
}
if (APPLICATION_VARIABLE_NAME.equals(key)) {
return this.applicationAttributesVariablesMap;
}
return this.requestAttributesVariablesMap.getVariable(key);
}
public Set<String> getVariableNames() {
// Note this set will NOT include 'param', 'session' or 'application', as they are considered special
// ways to access attributes/parameters in these Servlet API structures
return this.requestAttributesVariablesMap.getVariableNames();
}
public void setVariable(final String name, final Object value) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot set variable called '" + name + "' into web variables map: such name is a reserved word");
}
this.requestAttributesVariablesMap.setVariable(name, value);
}
public void setVariables(final Map<String, Object> variables) {
if (variables == null || variables.isEmpty()) {
return;
}
// First perform reserved word check on every variable name to be inserted
for (final String name : variables.keySet()) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot set variable called '" + name + "' into web variables map: such name is a reserved word");
}
}
this.requestAttributesVariablesMap.setVariables(variables);
}
public void removeVariable(final String name) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot remove variable called '" + name + "' in web variables map: such name is a reserved word");
}
this.requestAttributesVariablesMap.removeVariable(name);
}
public boolean isVariableLocal(final String name) {
return this.requestAttributesVariablesMap.isVariableLocal(name);
}
public boolean hasSelectionTarget() {
return this.requestAttributesVariablesMap.hasSelectionTarget();
}
public Object getSelectionTarget() {
return this.requestAttributesVariablesMap.getSelectionTarget();
}
public void setSelectionTarget(final Object selectionTarget) {
this.requestAttributesVariablesMap.setSelectionTarget(selectionTarget);
}
public IInliner getInliner() {
return this.requestAttributesVariablesMap.getInliner();
}
public void setInliner(final IInliner inliner) {
this.requestAttributesVariablesMap.setInliner(inliner);
}
public TemplateData getTemplateData() {
return this.requestAttributesVariablesMap.getTemplateData();
}
public void setTemplateData(final TemplateData templateData) {
this.requestAttributesVariablesMap.setTemplateData(templateData);
}
public List<TemplateData> getTemplateStack() {
return this.requestAttributesVariablesMap.getTemplateStack();
}
public void setElementTag(final IProcessableElementTag elementTag) {
this.requestAttributesVariablesMap.setElementTag(elementTag);
}
public List<IProcessableElementTag> getElementStack() {
return this.requestAttributesVariablesMap.getElementStack();
}
public List<IProcessableElementTag> getElementStackAbove(final int contextLevel) {
return this.requestAttributesVariablesMap.getElementStackAbove(contextLevel);
}
public int level() {
return this.requestAttributesVariablesMap.level();
}
public void increaseLevel() {
this.requestAttributesVariablesMap.increaseLevel();
}
public void decreaseLevel() {
this.requestAttributesVariablesMap.decreaseLevel();
}
public String getStringRepresentationByLevel() {
// Request parameters, session and servlet context can be safely ignored here
return this.requestAttributesVariablesMap.getStringRepresentationByLevel();
}
@Override
public String toString() {
// Request parameters, session and servlet context can be safely ignored here
return this.requestAttributesVariablesMap.toString();
}
static Object resolveLazy(final Object variable) {
/*
* Check the possibility that this variable is a lazy one, in which case we should not return it directly
* but instead make sure it is initialized and return its value.
*/
if (variable != null && variable instanceof ILazyContextVariable) {
return ((ILazyContextVariable)variable).getValue();
}
return variable;
}
private static final class SessionAttributesMap extends NoOpMapImpl {
private final HttpSession session;
SessionAttributesMap(final HttpSession session) {
super();
this.session = session;
}
@Override
public int size() {
if (this.session == null) {
return 0;
}
int size = 0;
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
attributeNames.nextElement();
size++;
}
return size;
}
@Override
public boolean isEmpty() {
if (this.session == null) {
return true;
}
final Enumeration<String> attributeNames = this.session.getAttributeNames();
return !attributeNames.hasMoreElements();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
if (this.session == null) {
return null;
}
return resolveLazy(this.session.getAttribute(key != null? key.toString() : null));
}
@Override
public Set<String> keySet() {
if (this.session == null) {
return Collections.emptySet();
}
final Set<String> keySet = new LinkedHashSet<String>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
keySet.add(attributeNames.nextElement());
}
return keySet;
}
@Override
public Collection<Object> values() {
if (this.session == null) {
return Collections.emptySet();
}
final List<Object> values = new ArrayList<Object>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
values.add(this.session.getAttribute(attributeNames.nextElement()));
}
return values;
}
@Override
public Set<Entry<String,Object>> entrySet() {
if (this.session == null) {
return Collections.emptySet();
}
final Set<Entry<String,Object>> entrySet = new LinkedHashSet<Entry<String, Object>>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
final String key = attributeNames.nextElement();
final Object value = this.session.getAttribute(key);
entrySet.add(new MapEntry(key, value));
}
return entrySet;
}
}
private static final class ServletContextAttributesMap extends NoOpMapImpl {
private final ServletContext servletContext;
ServletContextAttributesMap(final ServletContext servletContext) {
super();
this.servletContext = servletContext;
}
@Override
public int size() {
int size = 0;
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
attributeNames.nextElement();
size++;
}
return size;
}
@Override
public boolean isEmpty() {
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
return !attributeNames.hasMoreElements();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
return resolveLazy(this.servletContext.getAttribute(key != null? key.toString() : null));
}
@Override
public Set<String> keySet() {
final Set<String> keySet = new LinkedHashSet<String>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
keySet.add(attributeNames.nextElement());
}
return keySet;
}
@Override
public Collection<Object> values() {
final List<Object> values = new ArrayList<Object>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
values.add(this.servletContext.getAttribute(attributeNames.nextElement()));
}
return values;
}
@Override
public Set<Map.Entry<String,Object>> entrySet() {
final Set<Map.Entry<String,Object>> entrySet = new LinkedHashSet<Map.Entry<String, Object>>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
final String key = attributeNames.nextElement();
final Object value = this.servletContext.getAttribute(key);
entrySet.add(new MapEntry(key, value));
}
return entrySet;
}
}
private static final class RequestParametersMap extends NoOpMapImpl {
private final HttpServletRequest request;
RequestParametersMap(final HttpServletRequest request) {
super();
this.request = request;
}
@Override
public int size() {
return this.request.getParameterMap().size();
}
@Override
public boolean isEmpty() {
return this.request.getParameterMap().isEmpty();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
final String[] parameterValues = this.request.getParameterValues(key != null? key.toString() : null);
if (parameterValues == null) {
return null;
}
return new RequestParameterValues(parameterValues);
}
@Override
public Set<String> keySet() {
return this.request.getParameterMap().keySet();
}
@Override
public Collection<Object> values() {
return this.request.getParameterMap().values();
}
@Override
public Set<Map.Entry<String,Object>> entrySet() {
return this.request.getParameterMap().entrySet();
}
}
private static final class RequestAttributesVariablesMap extends AbstractEngineContext implements IEngineContext {
private static final int DEFAULT_ELEMENT_HIERARCHY_SIZE = 20;
private static final int DEFAULT_LEVELS_SIZE = 10;
private static final int DEFAULT_LEVELARRAYS_SIZE = 5;
private final HttpServletRequest request;
private int level = 0;
private int index = 0;
private int[] levels;
private String[][] names;
private Object[][] oldValues;
private Object[][] newValues;
private int[] levelSizes;
private SelectionTarget[] selectionTargets;
private IInliner[] inliners;
private TemplateData[] templateDatas;
private IProcessableElementTag[] elementTags;
private SelectionTarget lastSelectionTarget = null;
private IInliner lastInliner = null;
private TemplateData lastTemplateData = null;
private final List<TemplateData> templateStack;
RequestAttributesVariablesMap(
final IEngineConfiguration configuration,
final TemplateData templateData,
final Map<String,Object> templateResolutionAttributes,
final HttpServletRequest request,
final Locale locale,
final Map<String, Object> variables) {
super(configuration, templateResolutionAttributes, locale);
this.request = request;
this.levels = new int[DEFAULT_LEVELS_SIZE];
this.names = new String[DEFAULT_LEVELS_SIZE][];
this.oldValues = new Object[DEFAULT_LEVELS_SIZE][];
this.newValues = new Object[DEFAULT_LEVELS_SIZE][];
this.levelSizes = new int[DEFAULT_LEVELS_SIZE];
this.selectionTargets = new SelectionTarget[DEFAULT_LEVELS_SIZE];
this.inliners = new IInliner[DEFAULT_LEVELS_SIZE];
this.templateDatas = new TemplateData[DEFAULT_LEVELS_SIZE];
this.elementTags = new IProcessableElementTag[DEFAULT_ELEMENT_HIERARCHY_SIZE];
Arrays.fill(this.levels, Integer.MAX_VALUE);
Arrays.fill(this.names, null);
Arrays.fill(this.oldValues, null);
Arrays.fill(this.newValues, null);
Arrays.fill(this.levelSizes, 0);
Arrays.fill(this.selectionTargets, null);
Arrays.fill(this.inliners, null);
Arrays.fill(this.templateDatas, null);
Arrays.fill(this.elementTags, null);
this.levels[0] = 0;
this.templateDatas[0] = templateData;
this.lastTemplateData = templateData;
this.templateStack = new ArrayList<TemplateData>(DEFAULT_LEVELS_SIZE);
this.templateStack.add(templateData);
if (variables != null) {
setVariables(variables);
}
}
public boolean containsVariable(final String name) {
return this.request.getAttribute(name) != null;
}
public Object getVariable(final String key) {
return resolveLazy(this.request.getAttribute(key));
}
public Set<String> getVariableNames() {
// --------------------------
// Note this method relies on HttpServletRequest#getAttributeNames(), which is an extremely slow and
// inefficient method in implementations like Apache Tomcat's. So the uses of this method should be
// very controlled and reduced to the minimum. Specifically, any call that executes e.g. for every
// expression evaluation should be disallowed. Only sporadic uses should be done.
// Note also it would not be a good idea to cache the attribute names coming from the request if we
// want to keep complete independence of the HttpServletRequest object, so that it can be modified
// from the outside (e.g. from other libraries like Tiles) with Thymeleaf perfectly integrating with
// those modifications.
// --------------------------
final Set<String> variableNames = new HashSet<String>(10);
final Enumeration<String> attributeNamesEnum = this.request.getAttributeNames();
while (attributeNamesEnum.hasMoreElements()) {
variableNames.add(attributeNamesEnum.nextElement());
}
return variableNames;
}
private int searchNameInIndex(final String name, final int idx) {
int n = this.levelSizes[idx];
if (name == null) {
while (n-- != 0) {
if (this.names[idx][n] == null) {
return n;
}
}
return -1;
}
while (n-- != 0) {
if (name.equals(this.names[idx][n])) {
return n;
}
}
return -1;
}
public void setVariable(final String name, final Object value) {
ensureLevelInitialized(true);
if (this.level > 0) {
// We will only take care of new/old values if we are not on level 0
int levelIndex = searchNameInIndex(name,this.index);
if (levelIndex >= 0) {
// There already is a registered movement for this key - we should modify it instead of creating a new one
this.newValues[this.index][levelIndex] = value;
} else {
if (this.names[this.index].length == this.levelSizes[this.index]) {
// We need to grow the arrays for this level
this.names[this.index] = Arrays.copyOf(this.names[this.index], this.names[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
this.newValues[this.index] = Arrays.copyOf(this.newValues[this.index], this.newValues[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
this.oldValues[this.index] = Arrays.copyOf(this.oldValues[this.index], this.oldValues[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
}
levelIndex = this.levelSizes[this.index]; // We will add at the end
this.names[this.index][levelIndex] = name;
/*
* Per construction, according to the Servlet API, an attribute set to null and a non-existing
* attribute are exactly the same. So we don't really have a reason to worry about the attribute
* already existing or not when it was set to null.
*/
this.oldValues[this.index][levelIndex] = this.request.getAttribute(name);
this.newValues[this.index][levelIndex] = value;
this.levelSizes[this.index]++;
}
}
// No matter if value is null or not. Value null will be equivalent to .removeAttribute()
this.request.setAttribute(name, value);
}
public void setVariables(final Map<String, Object> variables) {
if (variables == null || variables.isEmpty()) {
return;
}
for (final Map.Entry<String,Object> entry : variables.entrySet()) {
setVariable(entry.getKey(), entry.getValue());
}
}
public void removeVariable(final String name) {
setVariable(name, null);
}
public boolean isVariableLocal(final String name) {
if (this.level == 0) {
// We are at level 0, so we cannot have local variables at all
return false;
}
int n = this.index + 1;
while (n-- > 1) { // variables at n == 0 are not local!
final int idx = searchNameInIndex(name, n);
if (idx >= 0) {
return this.newValues[n][idx] != null;
}
}
return false;
}
public boolean hasSelectionTarget() {
if (this.lastSelectionTarget != null) {
return true;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.selectionTargets[n] != null) {
return true;
}
}
return false;
}
public Object getSelectionTarget() {
if (this.lastSelectionTarget != null) {
return this.lastSelectionTarget.selectionTarget;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.selectionTargets[n] != null) {
this.lastSelectionTarget = this.selectionTargets[n];
return this.lastSelectionTarget.selectionTarget;
}
}
return null;
}
public void setSelectionTarget(final Object selectionTarget) {
ensureLevelInitialized(false);
this.lastSelectionTarget = new SelectionTarget(selectionTarget);
this.selectionTargets[this.index] = this.lastSelectionTarget;
}
public IInliner getInliner() {
if (this.lastInliner != null) {
if (this.lastInliner == NoOpInliner.INSTANCE) {
return null;
}
return this.lastInliner;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.inliners[n] != null) {
this.lastInliner = this.inliners[n];
if (this.lastInliner == NoOpInliner.INSTANCE) {
return null;
}
return this.lastInliner;
}
}
return null;
}
public void setInliner(final IInliner inliner) {
ensureLevelInitialized(false);
// We use NoOpInliner.INSTACE in order to signal when inlining has actually been disabled
this.lastInliner = (inliner == null? NoOpInliner.INSTANCE : inliner);
this.inliners[this.index] = this.lastInliner;
}
public TemplateData getTemplateData() {
if (this.lastTemplateData != null) {
return this.lastTemplateData;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.templateDatas[n] != null) {
this.lastTemplateData = this.templateDatas[n];
return this.lastTemplateData;
}
}
return null;
}
public void setTemplateData(final TemplateData templateData) {
Validate.notNull(templateData, "Template Data cannot be null");
ensureLevelInitialized(false);
this.lastTemplateData = templateData;
this.templateDatas[this.index] = this.lastTemplateData;
this.templateStack.clear();
}
public List<TemplateData> getTemplateStack() {
if (!this.templateStack.isEmpty()) {
// If would have been empty if we had just decreased a level or added a new template
return Collections.unmodifiableList(new ArrayList<TemplateData>(this.templateStack));
}
for (int i = 0; i <= this.index; i++) {
if (this.templateDatas[i] != null) {
this.templateStack.add(this.templateDatas[i]);
}
}
return Collections.unmodifiableList(new ArrayList<TemplateData>(this.templateStack));
}
public void setElementTag(final IProcessableElementTag elementTag) {
if (this.elementTags.length <= this.level) {
this.elementTags = Arrays.copyOf(this.elementTags, Math.max(this.level, this.elementTags.length + DEFAULT_ELEMENT_HIERARCHY_SIZE));
}
this.elementTags[this.level] = elementTag;
}
public List<IProcessableElementTag> getElementStack() {
final List<IProcessableElementTag> elementStack = new ArrayList<IProcessableElementTag>(this.level);
for (int i = 0; i <= this.level && i < this.elementTags.length; i++) {
if (this.elementTags[i] != null) {
elementStack.add(this.elementTags[i]);
}
}
return Collections.unmodifiableList(elementStack);
}
public List<IProcessableElementTag> getElementStackAbove(final int contextLevel) {
final List<IProcessableElementTag> elementStack = new ArrayList<IProcessableElementTag>(this.level);
for (int i = contextLevel + 1; i <= this.level && i < this.elementTags.length; i++) {
if (this.elementTags[i] != null) {
elementStack.add(this.elementTags[i]);
}
}
return Collections.unmodifiableList(elementStack);
}
private void ensureLevelInitialized(final boolean initVariables) {
// First, check if the current index already signals the current level (in which case, everything is OK)
if (this.levels[this.index] != this.level) {
// The current level still had no index assigned -- we must do it, and maybe even grow structures
this.index++; // This new index will be the one for our level
if (this.levels.length == this.index) {
this.levels = Arrays.copyOf(this.levels, this.levels.length + DEFAULT_LEVELS_SIZE);
Arrays.fill(this.levels, this.index, this.levels.length, Integer.MAX_VALUE); // We fill the new places with MAX_VALUE
this.names = Arrays.copyOf(this.names, this.names.length + DEFAULT_LEVELS_SIZE);
this.newValues = Arrays.copyOf(this.newValues, this.newValues.length + DEFAULT_LEVELS_SIZE);
this.oldValues = Arrays.copyOf(this.oldValues, this.oldValues.length + DEFAULT_LEVELS_SIZE);
this.levelSizes = Arrays.copyOf(this.levelSizes, this.levelSizes.length + DEFAULT_LEVELS_SIZE);
// No need to initialize new places in this.levelSizes as copyOf already fills with zeroes
this.selectionTargets = Arrays.copyOf(this.selectionTargets, this.selectionTargets.length + DEFAULT_LEVELS_SIZE);
this.inliners = Arrays.copyOf(this.inliners, this.inliners.length + DEFAULT_LEVELS_SIZE);
this.templateDatas = Arrays.copyOf(this.templateDatas, this.templateDatas.length + DEFAULT_LEVELS_SIZE);
}
this.levels[this.index] = this.level;
}
if (this.level > 0) {
// We will only take care of new/old values if we are not on level 0
if (initVariables && this.names[this.index] == null) {
// the arrays for this level have still not been created
this.names[this.index] = new String[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.names[this.index], null);
this.newValues[this.index] = new Object[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.newValues[this.index], null);
this.oldValues[this.index] = new Object[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.oldValues[this.index], null);
this.levelSizes[this.index] = 0;
}
}
}
public int level() {
return this.level;
}
public void increaseLevel() {
this.level++;
}
public void decreaseLevel() {
Validate.isTrue(this.level > 0, "Cannot decrease variable map level below 0");
if (this.levels[this.index] == this.level) {
this.levels[this.index] = Integer.MAX_VALUE;
if (this.names[this.index] != null && this.levelSizes[this.index] > 0) {
// There were movements at this level, so we have to revert them
int n = this.levelSizes[this.index];
while (n-- != 0) {
final String name = this.names[this.index][n];
final Object newValue = this.newValues[this.index][n];
final Object oldValue = this.oldValues[this.index][n];
final Object currentValue = this.request.getAttribute(name);
if (newValue == currentValue) {
// Only if the value matches, in order to avoid modifying values that have been set directly
// into the request.
this.request.setAttribute(name,oldValue);
}
}
this.levelSizes[this.index] = 0;
}
this.selectionTargets[this.index] = null;
this.inliners[this.index] = null;
this.templateDatas[this.index] = null;
this.index--;
// These might not belong to this level, but just in case...
this.lastSelectionTarget = null;
this.lastInliner = null;
this.lastTemplateData = null;
this.templateStack.clear();
}
if (this.level < this.elementTags.length) {
this.elementTags[this.level] = null;
}
this.level--;
}
public String getStringRepresentationByLevel() {
final StringBuilder strBuilder = new StringBuilder();
strBuilder.append('{');
final Map<String,Object> oldValuesSum = new LinkedHashMap<String, Object>();
int n = this.index + 1;
while (n-- != 1) {
final Map<String,Object> levelVars = new LinkedHashMap<String, Object>();
if (this.names[n] != null && this.levelSizes[n] > 0) {
for (int i = 0; i < this.levelSizes[n]; i++) {
final String name = this.names[n][i];
final Object newValue = this.newValues[n][i];
final Object oldValue = this.oldValues[n][i];
if (newValue == oldValue) {
// This is a no-op!
continue;
}
if (!oldValuesSum.containsKey(name)) {
// This means that, either the value in the request is the same as the newValue, or it was modified
// directly at the request and we need to discard this entry.
if (newValue != this.request.getAttribute(name)) {
continue;
}
} else {
// This means that, either the old value in the map is the same as the newValue, or it was modified
// directly at the request and we need to discard this entry.
if (newValue != oldValuesSum.get(name)) {
continue;
}
}
levelVars.put(name, newValue);
oldValuesSum.put(name, oldValue);
}
}
if (!levelVars.isEmpty() || this.selectionTargets[n] != null || this.inliners[n] != null) {
if (strBuilder.length() > 1) {
strBuilder.append(',');
}
strBuilder.append(this.levels[n]).append(":");
if (!levelVars.isEmpty() || n == 0) {
strBuilder.append(levelVars);
}
if (this.selectionTargets[n] != null) {
strBuilder.append("<").append(this.selectionTargets[n].selectionTarget).append(">");
}
if (this.inliners[n] != null) {
strBuilder.append("[").append(this.inliners[n].getName()).append("]");
}
if (this.templateDatas[n] != null) {
strBuilder.append("(").append(this.templateDatas[n].getTemplate()).append(")");
}
}
}
final Map<String,Object> requestAttributes = new LinkedHashMap<String, Object>();
final Enumeration<String> attrNames = this.request.getAttributeNames();
while (attrNames.hasMoreElements()) {
final String name = attrNames.nextElement();
if (oldValuesSum.containsKey(name)) {
final Object oldValue = oldValuesSum.get(name);
if (oldValue != null) {
requestAttributes.put(name, oldValuesSum.get(name));
}
oldValuesSum.remove(name);
} else {
requestAttributes.put(name, this.request.getAttribute(name));
}
}
for (Map.Entry<String,Object> oldValuesSumEntry : oldValuesSum.entrySet()) {
final String name = oldValuesSumEntry.getKey();
if (!requestAttributes.containsKey(name)) {
final Object oldValue = oldValuesSumEntry.getValue();
if (oldValue != null) {
requestAttributes.put(name, oldValue);
}
}
}
if (strBuilder.length() > 1) {
strBuilder.append(',');
}
strBuilder.append(this.levels[n]).append(":");
strBuilder.append(requestAttributes.toString());
if (this.selectionTargets[0] != null) {
strBuilder.append("<").append(this.selectionTargets[0].selectionTarget).append(">");
}
if (this.inliners[0] != null) {
strBuilder.append("[").append(this.inliners[0].getName()).append("]");
}
if (this.templateDatas[0] != null) {
strBuilder.append("(").append(this.templateDatas[0].getTemplate()).append(")");
}
strBuilder.append("}[");
strBuilder.append(this.level);
strBuilder.append(']');
return strBuilder.toString();
}
@Override
public String toString() {
final Map<String,Object> equivalentMap = new LinkedHashMap<String, Object>();
final Enumeration<String> attributeNamesEnum = this.request.getAttributeNames();
while (attributeNamesEnum.hasMoreElements()) {
final String name = attributeNamesEnum.nextElement();
equivalentMap.put(name, this.request.getAttribute(name));
}
final String textInliningStr = (getInliner() != null? "[" + getInliner().getName() + "]" : "" );
final String templateDataStr = "(" + getTemplateData().getTemplate() + ")";
return equivalentMap.toString() + (hasSelectionTarget()? "<" + getSelectionTarget() + ">" : "") + textInliningStr + templateDataStr;
}
/*
* This class works as a wrapper for the selection target, in order to differentiate whether we
* have set a selection target, we have not, or we have set it but it's null
*/
private static final class SelectionTarget {
final Object selectionTarget;
SelectionTarget(final Object selectionTarget) {
super();
this.selectionTarget = selectionTarget;
}
}
}
private abstract static class NoOpMapImpl implements Map<String,Object> {
protected NoOpMapImpl() {
super();
}
public int size() {
return 0;
}
public boolean isEmpty() {
return true;
}
public boolean containsKey(final Object key) {
return false;
}
public boolean containsValue(final Object value) {
return false;
}
public Object get(final Object key) {
return null;
}
public Object put(final String key, final Object value) {
throw new UnsupportedOperationException("Cannot add new entry: map is immutable");
}
public Object remove(final Object key) {
throw new UnsupportedOperationException("Cannot remove entry: map is immutable");
}
public void putAll(final Map<? extends String, ? extends Object> m) {
throw new UnsupportedOperationException("Cannot add new entry: map is immutable");
}
public void clear() {
throw new UnsupportedOperationException("Cannot clear: map is immutable");
}
public Set<String> keySet() {
return Collections.emptySet();
}
public Collection<Object> values() {
return Collections.emptyList();
}
public Set<Entry<String,Object>> entrySet() {
return Collections.emptySet();
}
static final class MapEntry implements Map.Entry<String,Object> {
private final String key;
private final Object value;
MapEntry(final String key, final Object value) {
super();
this.key = key;
this.value = value;
}
public String getKey() {
return this.key;
}
public Object getValue() {
return this.value;
}
public Object setValue(final Object value) {
throw new UnsupportedOperationException("Cannot set value: map is immutable");
}
}
}
private static final class RequestParameterValues extends AbstractList<String> {
private final String[] parameterValues;
public final int length;
RequestParameterValues(final String[] parameterValues) {
this.parameterValues = parameterValues;
this.length = this.parameterValues.length;
}
@Override
public int size() {
return this.length;
}
@Override
public Object[] toArray() {
return this.parameterValues.clone();
}
@Override
public <T> T[] toArray(final T[] arr) {
if (arr.length < this.length) {
final T[] copy = (T[]) Array.newInstance(arr.getClass().getComponentType(), this.length);
System.arraycopy(this.parameterValues, 0, copy, 0, this.length);
return copy;
}
System.arraycopy(this.parameterValues, 0, arr, 0, this.length);
if (arr.length > this.length) {
arr[this.length] = null;
}
return arr;
}
@Override
public String get(final int index) {
return this.parameterValues[index];
}
@Override
public int indexOf(final Object obj) {
final String[] a = this.parameterValues;
if (obj == null) {
for (int i = 0; i < a.length; i++) {
if (a[i] == null) {
return i;
}
}
} else {
for (int i = 0; i < a.length; i++) {
if (obj.equals(a[i])) {
return i;
}
}
}
return -1;
}
@Override
public boolean contains(final Object obj) {
return indexOf(obj) != -1;
}
@Override
public String toString() {
// This toString() method will be responsible of outputting non-indexed request parameters in the
// way most people expect, i.e. return parameterValues[0] when accessed without index and parameter is
// single-valued (${param.a}), returning ArrayList#toString() when accessed without index and parameter
// is multi-valued, and finally return the specific value when accessed with index (${param.a[0]})
if (this.length == 0) {
return "";
}
if (this.length == 1) {
return this.parameterValues[0];
}
return super.toString();
}
}
}
|
src/main/java/org/thymeleaf/context/WebEngineContext.java
|
/*
* =============================================================================
*
* Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.context;
import java.lang.reflect.Array;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.thymeleaf.IEngineConfiguration;
import org.thymeleaf.engine.TemplateData;
import org.thymeleaf.inline.IInliner;
import org.thymeleaf.inline.NoOpInliner;
import org.thymeleaf.model.IProcessableElementTag;
import org.thymeleaf.util.Validate;
/**
* <p>
* Basic <b>web</b> implementation of the {@link IEngineContext} interface, based on the Servlet API.
* </p>
* <p>
* This is the context implementation that will be used by default for web processing. Note that <b>this is an
* internal implementation, and there is no reason for users' code to directly reference or use it instead
* of its implemented interfaces</b>.
* </p>
* <p>
* This class is NOT thread-safe. Thread-safety is not a requirement for context implementations.
* </p>
*
* @author Daniel Fernández
*
* @since 3.0.0
*
*/
public class WebEngineContext extends AbstractEngineContext implements IEngineContext, IWebContext {
/*
* ---------------------------------------------------------------------------
* THIS MAP FORWARDS ALL OPERATIONS TO THE UNDERLYING REQUEST, EXCEPT
* FOR THE param (request parameters), session (session attributes) AND
* application (servlet context attributes) VARIABLES.
*
* NOTE that, even if attributes are leveled so that above level 0 they are
* considered local and thus disappear after lowering the level, attributes
* directly set on the request object are considered global and therefore
* valid even when the level decreased (though they can be overridden). This
* is so for better simulating the effect of directly working against the
* request object, and for better integration with JSP or any other template
* engines or view-layer technologies that expect the HttpServletRequest to
* be the 'only source of truth' for context variables.
* ---------------------------------------------------------------------------
*/
private static final String PARAM_VARIABLE_NAME = "param";
private static final String SESSION_VARIABLE_NAME = "session";
private static final String APPLICATION_VARIABLE_NAME = "application";
private final HttpServletRequest request;
private final HttpServletResponse response;
private final HttpSession session;
private final ServletContext servletContext;
private final RequestAttributesVariablesMap requestAttributesVariablesMap;
private final Map<String,Object> requestParametersVariablesMap;
private final Map<String,Object> sessionAttributesVariablesMap;
private final Map<String,Object> applicationAttributesVariablesMap;
/**
* <p>
* Creates a new instance of this {@link IEngineContext} implementation binding engine execution to
* the Servlet API.
* </p>
* <p>
* Note that implementations of {@link IEngineContext} are not meant to be used in order to call
* the template engine (use implementations of {@link IContext} such as {@link Context} or {@link WebContext}
* instead). This is therefore mostly an <b>internal</b> implementation, and users should have no reason
* to ever call this constructor except in very specific integration/extension scenarios.
* </p>
*
* @param configuration the configuration instance being used.
* @param templateData the template data for the template to be processed.
* @param templateResolutionAttributes the template resolution attributes.
* @param request the servlet request object.
* @param response the servlet response object.
* @param servletContext the servlet context object.
* @param locale the locale.
* @param variables the context variables, probably coming from another {@link IContext} implementation.
*/
public WebEngineContext(
final IEngineConfiguration configuration,
final TemplateData templateData,
final Map<String,Object> templateResolutionAttributes,
final HttpServletRequest request, final HttpServletResponse response,
final ServletContext servletContext,
final Locale locale,
final Map<String, Object> variables) {
super(configuration, templateResolutionAttributes, locale);
Validate.notNull(request, "Request cannot be null in web variables map");
Validate.notNull(response, "Response cannot be null in web variables map");
Validate.notNull(servletContext, "Servlet Context cannot be null in web variables map");
this.request = request;
this.response = response;
this.session = request.getSession(false);
this.servletContext = servletContext;
this.requestAttributesVariablesMap =
new RequestAttributesVariablesMap(configuration, templateData, templateResolutionAttributes, this.request, locale, variables);
this.requestParametersVariablesMap = new RequestParametersMap(this.request);
this.applicationAttributesVariablesMap = new ServletContextAttributesMap(this.servletContext);
this.sessionAttributesVariablesMap = new SessionAttributesMap(this.session);
}
public HttpServletRequest getRequest() {
return this.request;
}
public HttpServletResponse getResponse() {
return this.response;
}
public HttpSession getSession() {
return this.session;
}
public ServletContext getServletContext() {
return this.servletContext;
}
public boolean containsVariable(final String name) {
if (SESSION_VARIABLE_NAME.equals(name)) {
return this.sessionAttributesVariablesMap != null;
}
if (PARAM_VARIABLE_NAME.equals(name)) {
return true;
}
return APPLICATION_VARIABLE_NAME.equals(name) || this.requestAttributesVariablesMap.containsVariable(name);
}
public Object getVariable(final String key) {
if (SESSION_VARIABLE_NAME.equals(key)) {
return this.sessionAttributesVariablesMap;
}
if (PARAM_VARIABLE_NAME.equals(key)) {
return this.requestParametersVariablesMap;
}
if (APPLICATION_VARIABLE_NAME.equals(key)) {
return this.applicationAttributesVariablesMap;
}
return this.requestAttributesVariablesMap.getVariable(key);
}
public Set<String> getVariableNames() {
// Note this set will NOT include 'param', 'session' or 'application', as they are considered special
// ways to access attributes/parameters in these Servlet API structures
return this.requestAttributesVariablesMap.getVariableNames();
}
public void setVariable(final String name, final Object value) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot set variable called '" + name + "' into web variables map: such name is a reserved word");
}
this.requestAttributesVariablesMap.setVariable(name, value);
}
public void setVariables(final Map<String, Object> variables) {
if (variables == null || variables.isEmpty()) {
return;
}
// First perform reserved word check on every variable name to be inserted
for (final String name : variables.keySet()) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot set variable called '" + name + "' into web variables map: such name is a reserved word");
}
}
this.requestAttributesVariablesMap.setVariables(variables);
}
public void removeVariable(final String name) {
if (SESSION_VARIABLE_NAME.equals(name) ||
PARAM_VARIABLE_NAME.equals(name) ||
APPLICATION_VARIABLE_NAME.equals(name)) {
throw new IllegalArgumentException(
"Cannot remove variable called '" + name + "' in web variables map: such name is a reserved word");
}
this.requestAttributesVariablesMap.removeVariable(name);
}
public boolean isVariableLocal(final String name) {
return this.requestAttributesVariablesMap.isVariableLocal(name);
}
public boolean hasSelectionTarget() {
return this.requestAttributesVariablesMap.hasSelectionTarget();
}
public Object getSelectionTarget() {
return this.requestAttributesVariablesMap.getSelectionTarget();
}
public void setSelectionTarget(final Object selectionTarget) {
this.requestAttributesVariablesMap.setSelectionTarget(selectionTarget);
}
public IInliner getInliner() {
return this.requestAttributesVariablesMap.getInliner();
}
public void setInliner(final IInliner inliner) {
this.requestAttributesVariablesMap.setInliner(inliner);
}
public TemplateData getTemplateData() {
return this.requestAttributesVariablesMap.getTemplateData();
}
public void setTemplateData(final TemplateData templateData) {
this.requestAttributesVariablesMap.setTemplateData(templateData);
}
public List<TemplateData> getTemplateStack() {
return this.requestAttributesVariablesMap.getTemplateStack();
}
public void setElementTag(final IProcessableElementTag elementTag) {
this.requestAttributesVariablesMap.setElementTag(elementTag);
}
public List<IProcessableElementTag> getElementStack() {
return this.requestAttributesVariablesMap.getElementStack();
}
public List<IProcessableElementTag> getElementStackAbove(final int contextLevel) {
return this.requestAttributesVariablesMap.getElementStackAbove(contextLevel);
}
public int level() {
return this.requestAttributesVariablesMap.level();
}
public void increaseLevel() {
this.requestAttributesVariablesMap.increaseLevel();
}
public void decreaseLevel() {
this.requestAttributesVariablesMap.decreaseLevel();
}
public String getStringRepresentationByLevel() {
// Request parameters, session and servlet context can be safely ignored here
return this.requestAttributesVariablesMap.getStringRepresentationByLevel();
}
@Override
public String toString() {
// Request parameters, session and servlet context can be safely ignored here
return this.requestAttributesVariablesMap.toString();
}
static Object resolveLazy(final Object variable) {
/*
* Check the possibility that this variable is a lazy one, in which case we should not return it directly
* but instead make sure it is initialized and return its value.
*/
if (variable != null && variable instanceof ILazyContextVariable) {
return ((ILazyContextVariable)variable).getValue();
}
return variable;
}
private static final class SessionAttributesMap extends NoOpMapImpl {
private final HttpSession session;
SessionAttributesMap(final HttpSession session) {
super();
this.session = session;
}
@Override
public int size() {
if (this.session == null) {
return 0;
}
int size = 0;
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
attributeNames.nextElement();
size++;
}
return size;
}
@Override
public boolean isEmpty() {
if (this.session == null) {
return true;
}
final Enumeration<String> attributeNames = this.session.getAttributeNames();
return attributeNames.hasMoreElements();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
if (this.session == null) {
return null;
}
return resolveLazy(this.session.getAttribute(key != null? key.toString() : null));
}
@Override
public Set<String> keySet() {
if (this.session == null) {
return Collections.emptySet();
}
final Set<String> keySet = new LinkedHashSet<String>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
keySet.add(attributeNames.nextElement());
}
return keySet;
}
@Override
public Collection<Object> values() {
if (this.session == null) {
return Collections.emptySet();
}
final List<Object> values = new ArrayList<Object>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
values.add(this.session.getAttribute(attributeNames.nextElement()));
}
return values;
}
@Override
public Set<Entry<String,Object>> entrySet() {
if (this.session == null) {
return Collections.emptySet();
}
final Set<Entry<String,Object>> entrySet = new LinkedHashSet<Entry<String, Object>>(5);
final Enumeration<String> attributeNames = this.session.getAttributeNames();
while (attributeNames.hasMoreElements()) {
final String key = attributeNames.nextElement();
final Object value = this.session.getAttribute(key);
entrySet.add(new MapEntry(key, value));
}
return entrySet;
}
}
private static final class ServletContextAttributesMap extends NoOpMapImpl {
private final ServletContext servletContext;
ServletContextAttributesMap(final ServletContext servletContext) {
super();
this.servletContext = servletContext;
}
@Override
public int size() {
int size = 0;
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
attributeNames.nextElement();
size++;
}
return size;
}
@Override
public boolean isEmpty() {
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
return attributeNames.hasMoreElements();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
return resolveLazy(this.servletContext.getAttribute(key != null? key.toString() : null));
}
@Override
public Set<String> keySet() {
final Set<String> keySet = new LinkedHashSet<String>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
keySet.add(attributeNames.nextElement());
}
return keySet;
}
@Override
public Collection<Object> values() {
final List<Object> values = new ArrayList<Object>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
values.add(this.servletContext.getAttribute(attributeNames.nextElement()));
}
return values;
}
@Override
public Set<Map.Entry<String,Object>> entrySet() {
final Set<Map.Entry<String,Object>> entrySet = new LinkedHashSet<Map.Entry<String, Object>>(5);
final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
while (attributeNames.hasMoreElements()) {
final String key = attributeNames.nextElement();
final Object value = this.servletContext.getAttribute(key);
entrySet.add(new MapEntry(key, value));
}
return entrySet;
}
}
private static final class RequestParametersMap extends NoOpMapImpl {
private final HttpServletRequest request;
RequestParametersMap(final HttpServletRequest request) {
super();
this.request = request;
}
@Override
public int size() {
return this.request.getParameterMap().size();
}
@Override
public boolean isEmpty() {
return this.request.getParameterMap().isEmpty();
}
@Override
public boolean containsKey(final Object key) {
// Even if not completely correct to return 'true' for entries that might not exist, this is needed
// in order to avoid Spring's MapAccessor throwing an exception when trying to access an element
// that doesn't exist -- in the case of request parameters, session and servletContext attributes most
// developers would expect null to be returned in such case, and that's what this 'true' will cause.
return true;
}
@Override
public boolean containsValue(final Object value) {
// It wouldn't be consistent to have an 'ad hoc' implementation of #containsKey() but a 100% correct
// implementation of #containsValue(), so we are leaving this as unsupported.
throw new UnsupportedOperationException("Map does not support #containsValue()");
}
@Override
public Object get(final Object key) {
final String[] parameterValues = this.request.getParameterValues(key != null? key.toString() : null);
if (parameterValues == null) {
return null;
}
return new RequestParameterValues(parameterValues);
}
@Override
public Set<String> keySet() {
return this.request.getParameterMap().keySet();
}
@Override
public Collection<Object> values() {
return this.request.getParameterMap().values();
}
@Override
public Set<Map.Entry<String,Object>> entrySet() {
return this.request.getParameterMap().entrySet();
}
}
private static final class RequestAttributesVariablesMap extends AbstractEngineContext implements IEngineContext {
private static final int DEFAULT_ELEMENT_HIERARCHY_SIZE = 20;
private static final int DEFAULT_LEVELS_SIZE = 10;
private static final int DEFAULT_LEVELARRAYS_SIZE = 5;
private final HttpServletRequest request;
private int level = 0;
private int index = 0;
private int[] levels;
private String[][] names;
private Object[][] oldValues;
private Object[][] newValues;
private int[] levelSizes;
private SelectionTarget[] selectionTargets;
private IInliner[] inliners;
private TemplateData[] templateDatas;
private IProcessableElementTag[] elementTags;
private SelectionTarget lastSelectionTarget = null;
private IInliner lastInliner = null;
private TemplateData lastTemplateData = null;
private final List<TemplateData> templateStack;
RequestAttributesVariablesMap(
final IEngineConfiguration configuration,
final TemplateData templateData,
final Map<String,Object> templateResolutionAttributes,
final HttpServletRequest request,
final Locale locale,
final Map<String, Object> variables) {
super(configuration, templateResolutionAttributes, locale);
this.request = request;
this.levels = new int[DEFAULT_LEVELS_SIZE];
this.names = new String[DEFAULT_LEVELS_SIZE][];
this.oldValues = new Object[DEFAULT_LEVELS_SIZE][];
this.newValues = new Object[DEFAULT_LEVELS_SIZE][];
this.levelSizes = new int[DEFAULT_LEVELS_SIZE];
this.selectionTargets = new SelectionTarget[DEFAULT_LEVELS_SIZE];
this.inliners = new IInliner[DEFAULT_LEVELS_SIZE];
this.templateDatas = new TemplateData[DEFAULT_LEVELS_SIZE];
this.elementTags = new IProcessableElementTag[DEFAULT_ELEMENT_HIERARCHY_SIZE];
Arrays.fill(this.levels, Integer.MAX_VALUE);
Arrays.fill(this.names, null);
Arrays.fill(this.oldValues, null);
Arrays.fill(this.newValues, null);
Arrays.fill(this.levelSizes, 0);
Arrays.fill(this.selectionTargets, null);
Arrays.fill(this.inliners, null);
Arrays.fill(this.templateDatas, null);
Arrays.fill(this.elementTags, null);
this.levels[0] = 0;
this.templateDatas[0] = templateData;
this.lastTemplateData = templateData;
this.templateStack = new ArrayList<TemplateData>(DEFAULT_LEVELS_SIZE);
this.templateStack.add(templateData);
if (variables != null) {
setVariables(variables);
}
}
public boolean containsVariable(final String name) {
return this.request.getAttribute(name) != null;
}
public Object getVariable(final String key) {
return resolveLazy(this.request.getAttribute(key));
}
public Set<String> getVariableNames() {
// --------------------------
// Note this method relies on HttpServletRequest#getAttributeNames(), which is an extremely slow and
// inefficient method in implementations like Apache Tomcat's. So the uses of this method should be
// very controlled and reduced to the minimum. Specifically, any call that executes e.g. for every
// expression evaluation should be disallowed. Only sporadic uses should be done.
// Note also it would not be a good idea to cache the attribute names coming from the request if we
// want to keep complete independence of the HttpServletRequest object, so that it can be modified
// from the outside (e.g. from other libraries like Tiles) with Thymeleaf perfectly integrating with
// those modifications.
// --------------------------
final Set<String> variableNames = new HashSet<String>(10);
final Enumeration<String> attributeNamesEnum = this.request.getAttributeNames();
while (attributeNamesEnum.hasMoreElements()) {
variableNames.add(attributeNamesEnum.nextElement());
}
return variableNames;
}
private int searchNameInIndex(final String name, final int idx) {
int n = this.levelSizes[idx];
if (name == null) {
while (n-- != 0) {
if (this.names[idx][n] == null) {
return n;
}
}
return -1;
}
while (n-- != 0) {
if (name.equals(this.names[idx][n])) {
return n;
}
}
return -1;
}
public void setVariable(final String name, final Object value) {
ensureLevelInitialized(true);
if (this.level > 0) {
// We will only take care of new/old values if we are not on level 0
int levelIndex = searchNameInIndex(name,this.index);
if (levelIndex >= 0) {
// There already is a registered movement for this key - we should modify it instead of creating a new one
this.newValues[this.index][levelIndex] = value;
} else {
if (this.names[this.index].length == this.levelSizes[this.index]) {
// We need to grow the arrays for this level
this.names[this.index] = Arrays.copyOf(this.names[this.index], this.names[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
this.newValues[this.index] = Arrays.copyOf(this.newValues[this.index], this.newValues[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
this.oldValues[this.index] = Arrays.copyOf(this.oldValues[this.index], this.oldValues[this.index].length + DEFAULT_LEVELARRAYS_SIZE);
}
levelIndex = this.levelSizes[this.index]; // We will add at the end
this.names[this.index][levelIndex] = name;
/*
* Per construction, according to the Servlet API, an attribute set to null and a non-existing
* attribute are exactly the same. So we don't really have a reason to worry about the attribute
* already existing or not when it was set to null.
*/
this.oldValues[this.index][levelIndex] = this.request.getAttribute(name);
this.newValues[this.index][levelIndex] = value;
this.levelSizes[this.index]++;
}
}
// No matter if value is null or not. Value null will be equivalent to .removeAttribute()
this.request.setAttribute(name, value);
}
public void setVariables(final Map<String, Object> variables) {
if (variables == null || variables.isEmpty()) {
return;
}
for (final Map.Entry<String,Object> entry : variables.entrySet()) {
setVariable(entry.getKey(), entry.getValue());
}
}
public void removeVariable(final String name) {
setVariable(name, null);
}
public boolean isVariableLocal(final String name) {
if (this.level == 0) {
// We are at level 0, so we cannot have local variables at all
return false;
}
int n = this.index + 1;
while (n-- > 1) { // variables at n == 0 are not local!
final int idx = searchNameInIndex(name, n);
if (idx >= 0) {
return this.newValues[n][idx] != null;
}
}
return false;
}
public boolean hasSelectionTarget() {
if (this.lastSelectionTarget != null) {
return true;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.selectionTargets[n] != null) {
return true;
}
}
return false;
}
public Object getSelectionTarget() {
if (this.lastSelectionTarget != null) {
return this.lastSelectionTarget.selectionTarget;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.selectionTargets[n] != null) {
this.lastSelectionTarget = this.selectionTargets[n];
return this.lastSelectionTarget.selectionTarget;
}
}
return null;
}
public void setSelectionTarget(final Object selectionTarget) {
ensureLevelInitialized(false);
this.lastSelectionTarget = new SelectionTarget(selectionTarget);
this.selectionTargets[this.index] = this.lastSelectionTarget;
}
public IInliner getInliner() {
if (this.lastInliner != null) {
if (this.lastInliner == NoOpInliner.INSTANCE) {
return null;
}
return this.lastInliner;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.inliners[n] != null) {
this.lastInliner = this.inliners[n];
if (this.lastInliner == NoOpInliner.INSTANCE) {
return null;
}
return this.lastInliner;
}
}
return null;
}
public void setInliner(final IInliner inliner) {
ensureLevelInitialized(false);
// We use NoOpInliner.INSTACE in order to signal when inlining has actually been disabled
this.lastInliner = (inliner == null? NoOpInliner.INSTANCE : inliner);
this.inliners[this.index] = this.lastInliner;
}
public TemplateData getTemplateData() {
if (this.lastTemplateData != null) {
return this.lastTemplateData;
}
int n = this.index + 1;
while (n-- != 0) {
if (this.templateDatas[n] != null) {
this.lastTemplateData = this.templateDatas[n];
return this.lastTemplateData;
}
}
return null;
}
public void setTemplateData(final TemplateData templateData) {
Validate.notNull(templateData, "Template Data cannot be null");
ensureLevelInitialized(false);
this.lastTemplateData = templateData;
this.templateDatas[this.index] = this.lastTemplateData;
this.templateStack.clear();
}
public List<TemplateData> getTemplateStack() {
if (!this.templateStack.isEmpty()) {
// If would have been empty if we had just decreased a level or added a new template
return Collections.unmodifiableList(new ArrayList<TemplateData>(this.templateStack));
}
for (int i = 0; i <= this.index; i++) {
if (this.templateDatas[i] != null) {
this.templateStack.add(this.templateDatas[i]);
}
}
return Collections.unmodifiableList(new ArrayList<TemplateData>(this.templateStack));
}
public void setElementTag(final IProcessableElementTag elementTag) {
if (this.elementTags.length <= this.level) {
this.elementTags = Arrays.copyOf(this.elementTags, Math.max(this.level, this.elementTags.length + DEFAULT_ELEMENT_HIERARCHY_SIZE));
}
this.elementTags[this.level] = elementTag;
}
public List<IProcessableElementTag> getElementStack() {
final List<IProcessableElementTag> elementStack = new ArrayList<IProcessableElementTag>(this.level);
for (int i = 0; i <= this.level && i < this.elementTags.length; i++) {
if (this.elementTags[i] != null) {
elementStack.add(this.elementTags[i]);
}
}
return Collections.unmodifiableList(elementStack);
}
public List<IProcessableElementTag> getElementStackAbove(final int contextLevel) {
final List<IProcessableElementTag> elementStack = new ArrayList<IProcessableElementTag>(this.level);
for (int i = contextLevel + 1; i <= this.level && i < this.elementTags.length; i++) {
if (this.elementTags[i] != null) {
elementStack.add(this.elementTags[i]);
}
}
return Collections.unmodifiableList(elementStack);
}
private void ensureLevelInitialized(final boolean initVariables) {
// First, check if the current index already signals the current level (in which case, everything is OK)
if (this.levels[this.index] != this.level) {
// The current level still had no index assigned -- we must do it, and maybe even grow structures
this.index++; // This new index will be the one for our level
if (this.levels.length == this.index) {
this.levels = Arrays.copyOf(this.levels, this.levels.length + DEFAULT_LEVELS_SIZE);
Arrays.fill(this.levels, this.index, this.levels.length, Integer.MAX_VALUE); // We fill the new places with MAX_VALUE
this.names = Arrays.copyOf(this.names, this.names.length + DEFAULT_LEVELS_SIZE);
this.newValues = Arrays.copyOf(this.newValues, this.newValues.length + DEFAULT_LEVELS_SIZE);
this.oldValues = Arrays.copyOf(this.oldValues, this.oldValues.length + DEFAULT_LEVELS_SIZE);
this.levelSizes = Arrays.copyOf(this.levelSizes, this.levelSizes.length + DEFAULT_LEVELS_SIZE);
// No need to initialize new places in this.levelSizes as copyOf already fills with zeroes
this.selectionTargets = Arrays.copyOf(this.selectionTargets, this.selectionTargets.length + DEFAULT_LEVELS_SIZE);
this.inliners = Arrays.copyOf(this.inliners, this.inliners.length + DEFAULT_LEVELS_SIZE);
this.templateDatas = Arrays.copyOf(this.templateDatas, this.templateDatas.length + DEFAULT_LEVELS_SIZE);
}
this.levels[this.index] = this.level;
}
if (this.level > 0) {
// We will only take care of new/old values if we are not on level 0
if (initVariables && this.names[this.index] == null) {
// the arrays for this level have still not been created
this.names[this.index] = new String[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.names[this.index], null);
this.newValues[this.index] = new Object[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.newValues[this.index], null);
this.oldValues[this.index] = new Object[DEFAULT_LEVELARRAYS_SIZE];
Arrays.fill(this.oldValues[this.index], null);
this.levelSizes[this.index] = 0;
}
}
}
public int level() {
return this.level;
}
public void increaseLevel() {
this.level++;
}
public void decreaseLevel() {
Validate.isTrue(this.level > 0, "Cannot decrease variable map level below 0");
if (this.levels[this.index] == this.level) {
this.levels[this.index] = Integer.MAX_VALUE;
if (this.names[this.index] != null && this.levelSizes[this.index] > 0) {
// There were movements at this level, so we have to revert them
int n = this.levelSizes[this.index];
while (n-- != 0) {
final String name = this.names[this.index][n];
final Object newValue = this.newValues[this.index][n];
final Object oldValue = this.oldValues[this.index][n];
final Object currentValue = this.request.getAttribute(name);
if (newValue == currentValue) {
// Only if the value matches, in order to avoid modifying values that have been set directly
// into the request.
this.request.setAttribute(name,oldValue);
}
}
this.levelSizes[this.index] = 0;
}
this.selectionTargets[this.index] = null;
this.inliners[this.index] = null;
this.templateDatas[this.index] = null;
this.index--;
// These might not belong to this level, but just in case...
this.lastSelectionTarget = null;
this.lastInliner = null;
this.lastTemplateData = null;
this.templateStack.clear();
}
if (this.level < this.elementTags.length) {
this.elementTags[this.level] = null;
}
this.level--;
}
public String getStringRepresentationByLevel() {
final StringBuilder strBuilder = new StringBuilder();
strBuilder.append('{');
final Map<String,Object> oldValuesSum = new LinkedHashMap<String, Object>();
int n = this.index + 1;
while (n-- != 1) {
final Map<String,Object> levelVars = new LinkedHashMap<String, Object>();
if (this.names[n] != null && this.levelSizes[n] > 0) {
for (int i = 0; i < this.levelSizes[n]; i++) {
final String name = this.names[n][i];
final Object newValue = this.newValues[n][i];
final Object oldValue = this.oldValues[n][i];
if (newValue == oldValue) {
// This is a no-op!
continue;
}
if (!oldValuesSum.containsKey(name)) {
// This means that, either the value in the request is the same as the newValue, or it was modified
// directly at the request and we need to discard this entry.
if (newValue != this.request.getAttribute(name)) {
continue;
}
} else {
// This means that, either the old value in the map is the same as the newValue, or it was modified
// directly at the request and we need to discard this entry.
if (newValue != oldValuesSum.get(name)) {
continue;
}
}
levelVars.put(name, newValue);
oldValuesSum.put(name, oldValue);
}
}
if (!levelVars.isEmpty() || this.selectionTargets[n] != null || this.inliners[n] != null) {
if (strBuilder.length() > 1) {
strBuilder.append(',');
}
strBuilder.append(this.levels[n]).append(":");
if (!levelVars.isEmpty() || n == 0) {
strBuilder.append(levelVars);
}
if (this.selectionTargets[n] != null) {
strBuilder.append("<").append(this.selectionTargets[n].selectionTarget).append(">");
}
if (this.inliners[n] != null) {
strBuilder.append("[").append(this.inliners[n].getName()).append("]");
}
if (this.templateDatas[n] != null) {
strBuilder.append("(").append(this.templateDatas[n].getTemplate()).append(")");
}
}
}
final Map<String,Object> requestAttributes = new LinkedHashMap<String, Object>();
final Enumeration<String> attrNames = this.request.getAttributeNames();
while (attrNames.hasMoreElements()) {
final String name = attrNames.nextElement();
if (oldValuesSum.containsKey(name)) {
final Object oldValue = oldValuesSum.get(name);
if (oldValue != null) {
requestAttributes.put(name, oldValuesSum.get(name));
}
oldValuesSum.remove(name);
} else {
requestAttributes.put(name, this.request.getAttribute(name));
}
}
for (Map.Entry<String,Object> oldValuesSumEntry : oldValuesSum.entrySet()) {
final String name = oldValuesSumEntry.getKey();
if (!requestAttributes.containsKey(name)) {
final Object oldValue = oldValuesSumEntry.getValue();
if (oldValue != null) {
requestAttributes.put(name, oldValue);
}
}
}
if (strBuilder.length() > 1) {
strBuilder.append(',');
}
strBuilder.append(this.levels[n]).append(":");
strBuilder.append(requestAttributes.toString());
if (this.selectionTargets[0] != null) {
strBuilder.append("<").append(this.selectionTargets[0].selectionTarget).append(">");
}
if (this.inliners[0] != null) {
strBuilder.append("[").append(this.inliners[0].getName()).append("]");
}
if (this.templateDatas[0] != null) {
strBuilder.append("(").append(this.templateDatas[0].getTemplate()).append(")");
}
strBuilder.append("}[");
strBuilder.append(this.level);
strBuilder.append(']');
return strBuilder.toString();
}
@Override
public String toString() {
final Map<String,Object> equivalentMap = new LinkedHashMap<String, Object>();
final Enumeration<String> attributeNamesEnum = this.request.getAttributeNames();
while (attributeNamesEnum.hasMoreElements()) {
final String name = attributeNamesEnum.nextElement();
equivalentMap.put(name, this.request.getAttribute(name));
}
final String textInliningStr = (getInliner() != null? "[" + getInliner().getName() + "]" : "" );
final String templateDataStr = "(" + getTemplateData().getTemplate() + ")";
return equivalentMap.toString() + (hasSelectionTarget()? "<" + getSelectionTarget() + ">" : "") + textInliningStr + templateDataStr;
}
/*
* This class works as a wrapper for the selection target, in order to differentiate whether we
* have set a selection target, we have not, or we have set it but it's null
*/
private static final class SelectionTarget {
final Object selectionTarget;
SelectionTarget(final Object selectionTarget) {
super();
this.selectionTarget = selectionTarget;
}
}
}
private abstract static class NoOpMapImpl implements Map<String,Object> {
protected NoOpMapImpl() {
super();
}
public int size() {
return 0;
}
public boolean isEmpty() {
return true;
}
public boolean containsKey(final Object key) {
return false;
}
public boolean containsValue(final Object value) {
return false;
}
public Object get(final Object key) {
return null;
}
public Object put(final String key, final Object value) {
throw new UnsupportedOperationException("Cannot add new entry: map is immutable");
}
public Object remove(final Object key) {
throw new UnsupportedOperationException("Cannot remove entry: map is immutable");
}
public void putAll(final Map<? extends String, ? extends Object> m) {
throw new UnsupportedOperationException("Cannot add new entry: map is immutable");
}
public void clear() {
throw new UnsupportedOperationException("Cannot clear: map is immutable");
}
public Set<String> keySet() {
return Collections.emptySet();
}
public Collection<Object> values() {
return Collections.emptyList();
}
public Set<Entry<String,Object>> entrySet() {
return Collections.emptySet();
}
static final class MapEntry implements Map.Entry<String,Object> {
private final String key;
private final Object value;
MapEntry(final String key, final Object value) {
super();
this.key = key;
this.value = value;
}
public String getKey() {
return this.key;
}
public Object getValue() {
return this.value;
}
public Object setValue(final Object value) {
throw new UnsupportedOperationException("Cannot set value: map is immutable");
}
}
}
private static final class RequestParameterValues extends AbstractList<String> {
private final String[] parameterValues;
public final int length;
RequestParameterValues(final String[] parameterValues) {
this.parameterValues = parameterValues;
this.length = this.parameterValues.length;
}
@Override
public int size() {
return this.length;
}
@Override
public Object[] toArray() {
return this.parameterValues.clone();
}
@Override
public <T> T[] toArray(final T[] arr) {
if (arr.length < this.length) {
final T[] copy = (T[]) Array.newInstance(arr.getClass().getComponentType(), this.length);
System.arraycopy(this.parameterValues, 0, copy, 0, this.length);
return copy;
}
System.arraycopy(this.parameterValues, 0, arr, 0, this.length);
if (arr.length > this.length) {
arr[this.length] = null;
}
return arr;
}
@Override
public String get(final int index) {
return this.parameterValues[index];
}
@Override
public int indexOf(final Object obj) {
final String[] a = this.parameterValues;
if (obj == null) {
for (int i = 0; i < a.length; i++) {
if (a[i] == null) {
return i;
}
}
} else {
for (int i = 0; i < a.length; i++) {
if (obj.equals(a[i])) {
return i;
}
}
}
return -1;
}
@Override
public boolean contains(final Object obj) {
return indexOf(obj) != -1;
}
@Override
public String toString() {
// This toString() method will be responsible of outputting non-indexed request parameters in the
// way most people expect, i.e. return parameterValues[0] when accessed without index and parameter is
// single-valued (${param.a}), returning ArrayList#toString() when accessed without index and parameter
// is multi-valued, and finally return the specific value when accessed with index (${param.a[0]})
if (this.length == 0) {
return "";
}
if (this.length == 1) {
return this.parameterValues[0];
}
return super.toString();
}
}
}
|
Fix WebEngineContext application/session isEmpty()
|
src/main/java/org/thymeleaf/context/WebEngineContext.java
|
Fix WebEngineContext application/session isEmpty()
|
<ide><path>rc/main/java/org/thymeleaf/context/WebEngineContext.java
<ide> return true;
<ide> }
<ide> final Enumeration<String> attributeNames = this.session.getAttributeNames();
<del> return attributeNames.hasMoreElements();
<add> return !attributeNames.hasMoreElements();
<ide> }
<ide>
<ide> @Override
<ide> @Override
<ide> public boolean isEmpty() {
<ide> final Enumeration<String> attributeNames = this.servletContext.getAttributeNames();
<del> return attributeNames.hasMoreElements();
<add> return !attributeNames.hasMoreElements();
<ide> }
<ide>
<ide> @Override
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.