code
stringlengths 10
174k
| nl
stringlengths 3
129k
|
---|---|
public boolean isLoadMoreEnabled(){
return mLoadMoreEnabled;
}
| is load more function is enabled |
protected void scrollAndUpdateCoords(final MouseEvent e){
if (GUI.debugFX) {
}
final int x=(int)e.getX();
final int y=(int)e.getY();
updateCoords(x,y);
}
| scroll to visible Rectangle and update Coords box on screen |
private void loadColumns(){
ArrayList<MReportColumn> list=new ArrayList<MReportColumn>();
String sql="SELECT * FROM PA_ReportColumn WHERE PA_ReportColumnSet_ID=? AND IsActive='Y' ORDER BY SeqNo";
PreparedStatement pstmt=null;
try {
pstmt=DB.prepareStatement(sql,get_TrxName());
pstmt.setInt(1,getPA_ReportColumnSet_ID());
ResultSet rs=pstmt.executeQuery();
while (rs.next()) list.add(new MReportColumn(getCtx(),rs,null));
rs.close();
pstmt.close();
pstmt=null;
}
catch ( Exception e) {
log.log(Level.SEVERE,sql,e);
}
finally {
try {
if (pstmt != null) pstmt.close();
}
catch ( Exception e) {
}
pstmt=null;
}
m_columns=new MReportColumn[list.size()];
list.toArray(m_columns);
log.finest("ID=" + getPA_ReportColumnSet_ID() + " - Size="+ list.size());
}
| Load contained columns |
private void validateFixedPartitionAttributesAgainstTotalNumberBuckets(){
for ( FixedPartitionAttributesImpl fpa : this.pr.getFixedPartitionAttributesImpl()) {
int numBuckets=0;
Set<FixedPartitionAttributesImpl> allFPAs=new HashSet<FixedPartitionAttributesImpl>(this.pr.getRegionAdvisor().adviseAllFixedPartitionAttributes());
allFPAs.add(fpa);
for ( FixedPartitionAttributes samefpa : allFPAs) {
numBuckets=numBuckets + samefpa.getNumBuckets();
}
if (numBuckets > this.pr.getTotalNumberOfBuckets()) {
Object[] prms=new Object[]{this.pr.getName(),numBuckets,this.pr.getTotalNumberOfBuckets()};
throw new IllegalStateException(LocalizedStrings.PartitionedRegionConfigValidator_FOR_REGION_0_SUM_OF_NUM_BUCKETS_1_FOR_DIFFERENT_PRIMARY_PARTITIONS_SHOULD_NOT_BE_GREATER_THAN_TOTAL_NUM_BUCKETS_2.toString(prms));
}
}
}
| validate that for all partitions defined across all datastores, sum of num-buckets is not more than total-num-buckets defined |
public static List<SimpleOrderedMap<String>> formatForResponseHeader(List<ToleratedUpdateError> errs){
List<SimpleOrderedMap<String>> result=new ArrayList<>(errs.size());
for ( ToleratedUpdateError e : errs) {
result.add(e.getSimpleMap());
}
return result;
}
| returns a list of maps of simple objects suitable for putting in a SolrQueryResponse header |
public static void mount(FileSystem fs,Path mountPoint,boolean readonly,boolean log,Map<String,String> mountOptions) throws IOException {
if (readonly) fs=new ReadOnlyFileSystem(fs);
Fuse.mount(new FuseFileSystemProvider(fs,log).log(log),mountPoint,false,log,mountOptions);
}
| Mounts a filesystem. |
protected SVGStyleSheetProcessingInstruction(){
}
| Creates a new ProcessingInstruction object. |
public boolean equals(int i){
return right.equals(i);
}
| Check if the expression is equal to a value |
@Override public void onChangedAdded(final RPObject object,final RPObject changes){
super.onChangedAdded(object,changes);
if (changes.has("hidden")) {
hidden=true;
}
}
| The object added/changed attribute(s). |
public void dismiss(){
dismissInternal(false);
}
| Dismiss the fragment and it's bottom sheet. If the fragment was added to the back stack, all back stack state up to and including this entry will be popped. Otherwise, a new transaction will be committed to remove this fragment. |
public final void testValidateFails(){
DomainNameValidator domainNameValidator=new DomainNameValidator("foo");
assertFalse(domainNameValidator.validate("foo.t.t.c"));
assertFalse(domainNameValidator.validate("foo,com"));
assertFalse(domainNameValidator.validate("foo"));
assertFalse(domainNameValidator.validate("foo.123"));
assertFalse(domainNameValidator.validate(".com"));
assertFalse(domainNameValidator.validate("foo.a"));
assertFalse(domainNameValidator.validate("foo.com/users"));
assertFalse(domainNameValidator.validate("-foo.com"));
assertFalse(domainNameValidator.validate("foo-.com"));
assertFalse(domainNameValidator.validate("sub.-foo.com"));
assertFalse(domainNameValidator.validate("sub.foo-.com"));
}
| Tests the functionality of the validate-method, if it fails. |
private void initializeLiveAttributes(){
in=createLiveAnimatedString(null,SVG_IN_ATTRIBUTE);
surfaceScale=createLiveAnimatedNumber(null,SVG_SURFACE_SCALE_ATTRIBUTE,1f);
diffuseConstant=createLiveAnimatedNumber(null,SVG_DIFFUSE_CONSTANT_ATTRIBUTE,1f);
}
| Initializes the live attribute values of this element. |
public SimpleIntegerProperty qosProperty(){
return this.qos;
}
| Quality of service property. |
public void waitResponse(SipTransactionContext ctx,long timeout){
ctx.waitResponse(timeout);
SipMessage message=ctx.getMessageReceived();
if (!(message instanceof SipRequest) || !ctx.isSipResponse()) {
return;
}
String method=((SipRequest)message).getMethod();
SipResponse response=ctx.getSipResponse();
if (response == null) {
return;
}
if (!Request.REGISTER.equals(method)) {
WarningHeader warn=(WarningHeader)response.getHeader(WarningHeader.NAME);
if (Response.FORBIDDEN == ctx.getStatusCode() && warn == null) {
mNetworkInterface.getRegistrationManager().restart();
}
}
if (!Request.REGISTER.equals(method)) {
return;
}
KeepAliveManager keepAliveManager=mSipInterface.getKeepAliveManager();
if (keepAliveManager == null) {
return;
}
ListIterator<ViaHeader> iterator=response.getViaHeaders();
if (!iterator.hasNext()) {
return;
}
ViaHeader respViaHeader=iterator.next();
String keepStr=respViaHeader.getParameter("keep");
if (keepStr == null) {
return;
}
try {
long viaKeep=Integer.parseInt(keepStr) * SECONDS_TO_MILLISECONDS_CONVERSION_RATE;
if (viaKeep > 0) {
keepAliveManager.setPeriod(viaKeep);
}
else if (viaKeep == 0) {
keepAliveManager.setPeriod(mRcsSettings.getSipKeepAlivePeriod());
}
}
catch ( NumberFormatException e) {
keepAliveManager.setPeriod(mRcsSettings.getSipKeepAlivePeriod());
}
}
| Wait a response |
protected void flow(Env<AttrContext> env,Queue<Env<AttrContext>> results){
try {
if (shouldStop(CompileState.FLOW)) return;
if (relax || compileStates.isDone(env,CompileState.FLOW)) {
results.add(env);
return;
}
if (verboseCompilePolicy) printNote("[flow " + env.enclClass.sym + "]");
JavaFileObject prev=log.useSource(env.enclClass.sym.sourcefile != null ? env.enclClass.sym.sourcefile : env.toplevel.sourcefile);
try {
make.at(Position.FIRSTPOS);
TreeMaker localMake=make.forToplevel(env.toplevel);
flow.analyzeTree(env,localMake);
compileStates.put(env,CompileState.FLOW);
if (shouldStop(CompileState.FLOW)) return;
results.add(env);
}
finally {
log.useSource(prev);
}
}
finally {
if (!taskListener.isEmpty()) {
TaskEvent e=new TaskEvent(TaskEvent.Kind.ANALYZE,env.toplevel,env.enclClass.sym);
taskListener.finished(e);
}
}
}
| Perform dataflow checks on an attributed parse tree. |
private static void trial_search(ICollectionSearch<String> collection,int num){
TrialSuite searchTS=new TrialSuite();
System.out.println("Search trial:" + collection);
for (int t=0; t < NUM_TRIALS; t++) {
int found=0;
long before=System.currentTimeMillis();
for (int w=0; w < fullWords.length; w++) {
if (collection.exists(fullWords[w])) {
found++;
}
}
long after=System.currentTimeMillis();
searchTS.addTrial(num,before,after);
if (num != found) {
System.err.println("Error in search: proper number of elements not found:" + found);
}
}
System.out.println(searchTS.computeTable());
}
| Carry out a search for all known words within the given collection and produce report on execution times. <p> It is assumed that the table does not change during the search execution, otherwise the individual trials executed within this method would be biased. |
int runTest(Configuration conf,String[] args,OwlTest test) throws Exception {
conf.setInt(MRReasoningUtils.STEP_PROP,0);
conf.setInt(MRReasoningUtils.SCHEMA_UPDATE_PROP,0);
conf.setBoolean(MRReasoningUtils.DEBUG_FLAG,true);
conf.setBoolean(MRReasoningUtils.OUTPUT_FLAG,true);
Repository repo=MRReasoningUtils.getRepository(conf);
repo.initialize();
RepositoryConnection conn=repo.getConnection();
conn.clear();
conn.add(new StringReader(test.premise),"",RDFFormat.RDFXML);
conn.close();
repo.shutDown();
ReasoningDriver reasoner=new ReasoningDriver();
int result=ToolRunner.run(conf,reasoner,args);
test.success=(result == 0);
if (test.types.contains(TEST_INCONSISTENCY)) {
test.success=test.success && reasoner.hasInconsistencies();
}
if (test.types.contains(TEST_CONSISTENCY)) {
test.success=test.success && !reasoner.hasInconsistencies();
}
if (test.types.contains(TEST_NONENTAILMENT) || test.types.contains(TEST_ENTAILMENT)) {
System.out.println("Reading inferred triples...");
Schema schema=MRReasoningUtils.loadSchema(conf);
FileSystem fs=FileSystem.get(conf);
Path path=MRReasoningUtils.getOutputPath(conf,"final");
OutputCollector inferred=new OutputCollector();
NTriplesParser parser=new NTriplesParser();
parser.setRDFHandler(inferred);
if (fs.isDirectory(path)) {
for ( FileStatus status : fs.listStatus(path)) {
String s=status.getPath().getName();
if (s.startsWith(MRReasoningUtils.INCONSISTENT_OUT) || s.startsWith(MRReasoningUtils.DEBUG_OUT)) {
continue;
}
BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(status.getPath())));
parser.parse(br,"");
br.close();
}
}
MRReasoningUtils.deleteIfExists(conf,"final");
test.inferred.addAll(inferred.triples);
if (test.types.contains(TEST_ENTAILMENT)) {
for ( Statement st : test.expected) {
Fact fact=new Fact(st);
if (!test.inferred.contains(st) && !triviallyTrue(fact.getTriple(),schema) && !schema.containsTriple(fact.getTriple())) {
test.error.add(st);
}
}
}
if (test.types.contains(TEST_NONENTAILMENT)) {
for ( Statement st : test.unexpected) {
Fact fact=new Fact(st);
if (test.inferred.contains(st) || schema.containsTriple(fact.getTriple())) {
test.error.add(st);
}
}
}
test.success=test.success && test.error.isEmpty();
}
conf.setBoolean(MRReasoningUtils.DEBUG_FLAG,false);
MRReasoningUtils.clean(conf);
return result;
}
| Verify that we can infer the correct triples or detect an inconsistency. |
synchronized void discardQueue(){
queue.removeAllElements();
for (Enumeration e=running.elements(); e.hasMoreElements(); ) {
ResourceThread t=(ResourceThread)e.nextElement();
t.cancel();
}
running.removeAllElements();
bgImageCompsSelected.removeAllElements();
bgImageCompsUnselected.removeAllElements();
bgImageCompsPressed.removeAllElements();
threadCount=0;
cssCount=-1;
started=false;
}
| Discards the entire queue and signals the running threads to cancel. THis will be triggered if the user cancelled the page or moved to another page. |
public final void testPSSParameterSpec0204(){
try {
new PSSParameterSpec("SHA-1","MGF1",MGF1ParameterSpec.SHA1,-20,1);
fail("Expected IAE not thrown");
}
catch ( IllegalArgumentException e) {
}
}
| Test #4 for <code> PSSParameterSpec(String,String,AlgorithmParameterSpec,int,int) </code> ctor<br> Assertion: throws <code>IllegalArgumentException<code> if <code>saltLen<code> less than 0 |
public void init(Table t,Graph g,Class tupleType){
if (m_table != null) {
throw new IllegalStateException("This TupleManager has already been initialized");
}
m_table=t;
m_graph=g;
m_tupleType=tupleType;
m_tuples=null;
}
| Initialize this TupleManager for use with a given Table. |
public void testLoadingWithLocalSubstitutions() throws Exception {
Properties props=new Properties();
props.setProperty("a","y");
props.setProperty("b1","a${a}");
props.setProperty("b2","a$a");
System.setProperty("a","x");
int count=TungstenProperties.substituteSystemValues(props);
Assert.assertEquals("substitution count",1,count);
Assert.assertEquals("y",props.getProperty("a"));
Assert.assertEquals("ay",props.getProperty("b1"));
Assert.assertEquals("a$a",props.getProperty("b2"));
}
| Tests ability to load properties with substitutions from local properties, showing that local substitutions override system property values. |
public void testStartWithOneExpandedWarDeployed() throws Exception {
if (getContainer().getId().startsWith("geronimo")) {
return;
}
File artifactDir=new File(getTestData().targetDir).getParentFile();
Expand expandTask=(Expand)new AntUtils().createProject().createTask("unwar");
expandTask.setDest(new File(artifactDir,"expanded-war"));
expandTask.setSrc(new File(getTestData().getTestDataFileFor("expanded-war")));
expandTask.execute();
Deployable war=new DefaultDeployableFactory().createDeployable(getContainer().getId(),new File(artifactDir,"expanded-war").getPath(),DeployableType.WAR);
getLocalContainer().getConfiguration().addDeployable(war);
URL warPingURL=new URL("http://localhost:" + getTestData().port + "/expanded-war"+ "/index.html");
startAndStop(warPingURL);
}
| Test start with one expanded WAR. |
private void updateUnmanagedVolumeAndInitiatorCounts(StoragePort sp,boolean countMetaMembers,StringMap dbMetrics){
Long volumeCount=0L;
Long initiatorCount=0L;
URIQueryResultList queryResult=new URIQueryResultList();
_dbClient.queryByConstraint(AlternateIdConstraint.Factory.getUnManagedMaskByPort(sp.getId().toString()),queryResult);
Iterator<URI> maskIt=queryResult.iterator();
while (maskIt.hasNext()) {
UnManagedExportMask umask=_dbClient.queryObject(UnManagedExportMask.class,maskIt.next());
if (umask != null && umask.getInactive() == false && !checkForMatchingExportMask(umask.getMaskName(),umask.getNativeId(),umask.getStorageSystemUri())) {
StringSet unmanagedVolumeUris=umask.getUnmanagedVolumeUris();
Long unmanagedVolumes=(unmanagedVolumeUris != null ? unmanagedVolumeUris.size() : 0L);
if (countMetaMembers && unmanagedVolumeUris != null) {
unmanagedVolumes=0L;
for ( String unmanagedVolumeUri : unmanagedVolumeUris) {
UnManagedVolume uVolume=_dbClient.queryObject(UnManagedVolume.class,URI.create(unmanagedVolumeUri));
Long metaMemberCount=getUnManagedVolumeMetaMemberCount(uVolume);
unmanagedVolumes+=(metaMemberCount != null ? metaMemberCount : 1L);
}
}
Long unmanagedInitiators=0L;
ZoneInfoMap zoneInfoMap=umask.getZoningMap();
if (!zoneInfoMap.isEmpty()) {
for ( ZoneInfo info : zoneInfoMap.values()) {
if (info.getPortWwn().equals(sp.getPortNetworkId())) {
unmanagedInitiators+=1L;
}
}
}
else {
unmanagedInitiators+=1L;
}
_log.info(String.format("Port %s UnManagedExportMask %s " + "unmanagedVolumes %d unmanagedInitiators %d",sp.getPortName(),umask.getMaskName(),unmanagedVolumes,unmanagedInitiators));
volumeCount+=unmanagedVolumes;
initiatorCount+=unmanagedInitiators;
}
}
MetricsKeys.putLong(MetricsKeys.unmanagedInitiatorCount,initiatorCount,dbMetrics);
MetricsKeys.putLong(MetricsKeys.unmanagedVolumeCount,volumeCount,dbMetrics);
}
| Updates the volumes and initiators that are mapped to the port by UnManagedExportMasks. Note that if there is also a corresponding (managed) ExportMask the unmanaged information is not used. (COP-16349). This is called only from the processing of the port metrics. |
public static void closeBufferedReader(BufferedReader stream,String tag){
if (stream != null) {
try {
stream.close();
}
catch ( IOException e) {
Log.e(tag,"Exception occured when closing BufferedReader." + e);
}
}
}
| Close a InputStream passed in. |
public ListBasedSpreadsheetPanel(SpreadsheetService service,URL listFeedUrl){
this.service=service;
this.listFeedUrl=listFeedUrl;
model=new ListTableModel();
initializeGui();
}
| Creates a list-based spreadsheet editing panel. |
public void sort(){
if (!sorted) {
Collections.sort(samples);
sorted=true;
}
}
| sort the data set in increasing order |
@Override protected Revision readRevision() throws IOException, ArticleReaderException {
Revision rev=super.readRevision();
this.readRevisionCounter++;
return rev;
}
| Reads a single revision from an article. |
public boolean isMandatory(){
return m_combo.isMandatory();
}
| Is it mandatory |
public DenseDoubleMatrix3D(int slices,int rows,int columns){
setUp(slices,rows,columns);
this.elements=new double[slices * rows * columns];
}
| Constructs a matrix with a given number of slices, rows and columns. All entries are initially <tt>0</tt>. |
public void update(byte in){
M[mOff++]=in;
if (mOff == 16) {
processCheckSum(M);
processBlock(M);
mOff=0;
}
}
| update the message digest with a single byte. |
public void shutdown(){
setShutdownInProgress(true);
try {
servSock.close();
}
catch ( IOException e) {
}
thread.interrupt();
try {
thread.join();
}
catch ( InterruptedException e) {
}
debug("Main Monitor Thread Shut Down");
while (true) {
SlaveConnection sc=null;
synchronized (allSlaves) {
if (allSlaves.isEmpty()) break;
sc=(SlaveConnection)(allSlaves.removeFirst());
}
debug("Shutting Down Slave" + sc);
if (sc != null) sc.shutdown(state);
debug("Shut Down Slave" + sc);
}
synchronized (allSlaves) {
notifyMonitor(allSlaves);
}
pool.killAll();
debug("Shut Down Completed");
}
| Shuts down the slave monitor (also shuts down all slaves). |
public static List<org.oscm.vo.VOEventDefinition> convertToApiVOEventDefinition(List<org.oscm.internal.vo.VOEventDefinition> oldVO){
if (oldVO == null) {
return null;
}
List<org.oscm.vo.VOEventDefinition> newVO=new ArrayList<org.oscm.vo.VOEventDefinition>();
for ( org.oscm.internal.vo.VOEventDefinition tmp : oldVO) {
newVO.add(convertToApi(tmp));
}
return newVO;
}
| Convert list of VOEventDefinition. |
public boolean canTickRandomly(){
return false;
}
| Called to check if this block can perform random tick updates. |
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
this.titlePaint=SerialUtilities.readPaint(stream);
this.subtitlePaint=SerialUtilities.readPaint(stream);
this.chartBackgroundPaint=SerialUtilities.readPaint(stream);
this.legendBackgroundPaint=SerialUtilities.readPaint(stream);
this.legendItemPaint=SerialUtilities.readPaint(stream);
this.plotBackgroundPaint=SerialUtilities.readPaint(stream);
this.plotOutlinePaint=SerialUtilities.readPaint(stream);
this.labelLinkPaint=SerialUtilities.readPaint(stream);
this.baselinePaint=SerialUtilities.readPaint(stream);
this.domainGridlinePaint=SerialUtilities.readPaint(stream);
this.rangeGridlinePaint=SerialUtilities.readPaint(stream);
this.crosshairPaint=SerialUtilities.readPaint(stream);
this.axisLabelPaint=SerialUtilities.readPaint(stream);
this.tickLabelPaint=SerialUtilities.readPaint(stream);
this.itemLabelPaint=SerialUtilities.readPaint(stream);
this.shadowPaint=SerialUtilities.readPaint(stream);
this.thermometerPaint=SerialUtilities.readPaint(stream);
this.wallPaint=SerialUtilities.readPaint(stream);
this.errorIndicatorPaint=SerialUtilities.readPaint(stream);
this.gridBandPaint=SerialUtilities.readPaint(stream);
this.gridBandAlternatePaint=SerialUtilities.readPaint(stream);
}
| Provides serialization support. |
private Map loadJavaAPIMasterDataMap(final String className,final String methodName,final String parametertype[],final String parametervalue[]) throws ApplicationRuntimeException {
Map dataMap=new HashMap();
try {
if (parametertype.length != parametervalue.length) throw new ApplicationRuntimeException("Number of parameter types and parameter values doesnt match");
final Class cls=Class.forName(className);
final Method method=cls.getMethod(methodName,loadMethodParameter(parametertype));
dataMap=(HashMap)method.invoke(cls.newInstance(),loadMethodArguments(parametertype,parametervalue));
}
catch ( final Exception e) {
LOGGER.error("Error occurred in EgovMasterDataCaching loadJavaAPIMasterDataMap",e);
throw new ApplicationRuntimeException("Error occurred in EgovMasterDataCaching loadJavaAPIMasterDataMap",e);
}
return dataMap;
}
| This method loads the data for type Java API. |
public boolean hasSaveDestination(){
return processLocation != null;
}
| Returns true iff either a file or a repository location is defined. |
@Override public boolean isActive(){
return amIActive;
}
| Used by the Whitebox GUI to tell if this plugin is still running. |
@Override public boolean isLoggable(LogRecord record){
return super.isLoggable(record);
}
| Check if this <tt>Handler</tt> would actually log a given <tt>LogRecord</tt> into its internal buffer. <p> This method checks if the <tt>LogRecord</tt> has an appropriate level and whether it satisfies any <tt>Filter</tt>. However it does <b>not</b> check whether the <tt>LogRecord</tt> would result in a "push" of the buffer contents. It will return false if the <tt>LogRecord</tt> is null. <p> |
@Override public void endCamera(){
if (!manipulatingCamera) {
throw new RuntimeException("Cannot call endCamera() " + "without first calling beginCamera()");
}
camera.set(modelview);
cameraInv.set(modelviewInv);
manipulatingCamera=false;
}
| Record the current settings into the camera matrix, and set the matrix mode back to the current transformation matrix. <P> Note that this will destroy any settings to scale(), translate(), or whatever, because the final camera matrix will be copied (not multiplied) into the modelview. |
public static MPrintFormatItem createFromColumn(MPrintFormat format,int AD_Column_ID,int seqNo){
MPrintFormatItem pfi=new MPrintFormatItem(format.getCtx(),0,format.get_TrxName());
pfi.setAD_PrintFormat_ID(format.getAD_PrintFormat_ID());
pfi.setClientOrg(format);
pfi.setAD_Column_ID(AD_Column_ID);
pfi.setPrintFormatType(PRINTFORMATTYPE_Field);
String sql="SELECT c.ColumnName,e.Name,e.PrintName, " + "c.AD_Reference_ID,c.IsKey,c.SeqNo " + "FROM AD_Column c, AD_Element e "+ "WHERE c.AD_Column_ID=?"+ " AND c.AD_Element_ID=e.AD_Element_ID";
Language language=format.getLanguage();
boolean trl=!Env.isMultiLingualDocument(format.getCtx()) && !language.isBaseLanguage();
if (trl) sql="SELECT c.ColumnName,e.Name,e.PrintName, " + "c.AD_Reference_ID,c.IsKey,c.SeqNo " + "FROM AD_Column c, AD_Element_Trl e "+ "WHERE c.AD_Column_ID=?"+ " AND c.AD_Element_ID=e.AD_Element_ID"+ " AND e.AD_Language=?";
PreparedStatement pstmt=null;
ResultSet rs=null;
try {
pstmt=DB.prepareStatement(sql,format.get_TrxName());
pstmt.setInt(1,AD_Column_ID);
if (trl) pstmt.setString(2,language.getAD_Language());
rs=pstmt.executeQuery();
if (rs.next()) {
String ColumnName=rs.getString(1);
pfi.setName(rs.getString(2));
pfi.setPrintName(rs.getString(3));
int displayType=rs.getInt(4);
if (DisplayType.isNumeric(displayType)) pfi.setFieldAlignmentType(FIELDALIGNMENTTYPE_TrailingRight);
else if (displayType == DisplayType.Text || displayType == DisplayType.Memo) pfi.setFieldAlignmentType(FIELDALIGNMENTTYPE_Block);
else pfi.setFieldAlignmentType(FIELDALIGNMENTTYPE_LeadingLeft);
boolean isKey="Y".equals(rs.getString(5));
if (isKey || ColumnName.startsWith("Created") || ColumnName.startsWith("Updated")|| ColumnName.equals("AD_Client_ID")|| ColumnName.equals("AD_Org_ID")|| ColumnName.equals("IsActive")|| displayType == DisplayType.Button || displayType == DisplayType.Binary || displayType == DisplayType.ID || displayType == DisplayType.Image || displayType == DisplayType.RowID || seqNo == 0) {
pfi.setIsPrinted(false);
pfi.setSeqNo(0);
}
else {
pfi.setIsPrinted(true);
pfi.setSeqNo(seqNo);
}
int idSeqNo=rs.getInt(6);
if (idSeqNo > 0) {
pfi.setIsOrderBy(true);
pfi.setSortNo(idSeqNo);
}
}
else s_log.log(Level.SEVERE,"Not Found AD_Column_ID=" + AD_Column_ID + " Trl="+ trl+ " "+ language.getAD_Language());
}
catch ( SQLException e) {
s_log.log(Level.SEVERE,sql,e);
}
finally {
DB.close(rs,pstmt);
rs=null;
pstmt=null;
}
if (!pfi.save()) return null;
return pfi;
}
| Create Print Format Item from Column |
private synchronized void rebuildJournal() throws IOException {
if (journalWriter != null) {
journalWriter.close();
}
Writer writer=new BufferedWriter(new FileWriter(journalFileTmp),IO_BUFFER_SIZE);
writer.write(MAGIC);
writer.write("\n");
writer.write(VERSION_1);
writer.write("\n");
writer.write(Integer.toString(appVersion));
writer.write("\n");
writer.write(Integer.toString(valueCount));
writer.write("\n");
writer.write("\n");
for ( Entry entry : lruEntries.values()) {
if (entry.currentEditor != null) {
writer.write(DIRTY + ' ' + entry.key+ '\n');
}
else {
writer.write(CLEAN + ' ' + entry.key+ entry.getLengths()+ '\n');
}
}
writer.close();
journalFileTmp.renameTo(journalFile);
journalWriter=new BufferedWriter(new FileWriter(journalFile,true),IO_BUFFER_SIZE);
}
| Creates a new journal that omits redundant information. This replaces the current journal if it exists. |
private void checkFailedOrCancelledCount(final State current){
Operation.CompletionHandler handler=null;
QueryTask.QuerySpecification spec=QueryTaskUtils.buildChildServiceTaskStatusQuerySpec(this.getSelfLink(),ImageDatastoreSweeperService.State.class,TaskState.TaskStage.FAILED,TaskState.TaskStage.CANCELLED);
this.sendQuery(spec,handler);
}
| Triggers a query to retrieve the "child" ImageDatastoreSweeperService instances in FAILED or CANCELLED state. |
public void newMethod() throws IOException {
writeCode(CALL_NEW_METHOD);
}
| SWFActions interface |
public void updateDrawerIfNeeded(){
if (muninFoo.getMasters().size() == 0) drawerHelper.reset();
}
| When deleting a node / master, we should reinit the drawer if there's nothing to show |
public void removeModule(final MemoryModule module){
Preconditions.checkNotNull(module,"IE00758: Module argument can not be null");
if (!addressSpaceModules.remove(module)) {
throw new IllegalStateException("IE00759: Module was not part of this process");
}
moduleAddresses.remove(module.getBaseAddress().getAddress());
moduleByAddress.remove(module.getBaseAddress().getAddress());
for ( final ProcessManagerListener listener : listeners) {
try {
listener.removedModule(module);
}
catch ( final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
}
| Removes a module from the address space of the target process. |
void update(long index,Instant instant,Type type){
this.index=index;
this.type=type;
clock.set(instant);
}
| Updates the state machine context. |
protected void validatePatch(State current,State patch){
checkState(current.taskInfo.stage.ordinal() < TaskState.TaskStage.FINISHED.ordinal(),"Invalid stage update. Can not patch anymore when in final stage [%s]",current.taskInfo.stage);
if (patch.taskInfo != null) {
checkState(patch.taskInfo.stage != null,"Invalid stage update. 'stage' can not be null if taskInfo is provided");
checkState(patch.taskInfo.stage.ordinal() >= current.taskInfo.stage.ordinal(),"Invalid stage update. Can not revert to %s from %s",patch.taskInfo.stage,current.taskInfo.stage);
if (patch.taskInfo.subStage != null && current.taskInfo.subStage != null) {
checkState(patch.taskInfo.subStage.ordinal() >= current.taskInfo.subStage.ordinal(),"Invalid stage update. 'subStage' cannot move back.");
}
}
checkArgument(patch.imageWatermarkTime == null,"imageWatermarkTime cannot be changed.");
}
| Validate patch correctness. |
public static IgfsDirectoryInfo createDirectory(IgniteUuid id,@Nullable Map<String,IgfsListingEntry> listing,@Nullable Map<String,String> props,long createTime,long modificationTime){
return new IgfsDirectoryInfo(id,listing,props,createTime,modificationTime);
}
| Create directory. |
public void disconnect(LogConnection client) throws ReplicatorException {
client.release();
}
| Disconnect from the log. Adapters must call this to free resources and avoid leaks. |
public Terrain(int terrainSize,float scale,float minY,float maxY,String heightMapFile,String textureFile,int textInc) throws Exception {
this.terrainSize=terrainSize;
gameItems=new GameItem[terrainSize * terrainSize];
BufferedImage heightMapImage=ImageIO.read(getClass().getResourceAsStream(heightMapFile));
verticesPerCol=heightMapImage.getWidth() - 1;
verticesPerRow=heightMapImage.getHeight() - 1;
heightMapMesh=new HeightMapMesh(minY,maxY,heightMapImage,textureFile,textInc);
boundingBoxes=new Rectangle2D.Float[terrainSize][terrainSize];
for (int row=0; row < terrainSize; row++) {
for (int col=0; col < terrainSize; col++) {
float xDisplacement=(col - ((float)terrainSize - 1) / (float)2) * scale * HeightMapMesh.getXLength();
float zDisplacement=(row - ((float)terrainSize - 1) / (float)2) * scale * HeightMapMesh.getZLength();
GameItem terrainBlock=new GameItem(heightMapMesh.getMesh());
terrainBlock.setScale(scale);
terrainBlock.setPosition(xDisplacement,0,zDisplacement);
gameItems[row * terrainSize + col]=terrainBlock;
boundingBoxes[row][col]=getBoundingBox(terrainBlock);
}
}
}
| A Terrain is composed by blocks, each block is a GameItem constructed from a HeightMap. |
protected Reader createReader(InputStream in) throws IOException {
return new BufferedReader(new InputStreamReader(in));
}
| Factory method to create a Reader from the given InputStream. |
public void popReferenceCenter(DrawContext dc){
if (dc == null) {
String message=Logging.getMessage("nullValue.DrawContextIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
if (dc.getGL() == null) {
String message=Logging.getMessage("nullValue.DrawingContextGLIsNull");
Logging.logger().severe(message);
throw new IllegalStateException(message);
}
GL2 gl=dc.getGL().getGL2();
OGLStackHandler ogsh=new OGLStackHandler();
try {
ogsh.pushAttrib(gl,GL2.GL_TRANSFORM_BIT);
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glPopMatrix();
}
finally {
ogsh.pop(gl);
}
}
| Removes the model-view matrix on top of the matrix stack, and restores the original matrix. |
public ReaderToTextPane(Reader input,JTextPane output){
this(input,output,Color.BLACK);
}
| Sets up the thread. Using black as color for displaying the text. |
public static byte flags(boolean isDir,boolean isFile){
byte res=isDir ? FLAG_DIR : 0;
if (isFile) res|=FLAG_FILE;
return res;
}
| Create flags value. |
public void addCollider(Triple colliderTrip){
colliders.add(colliderTrip);
}
| Add another collider operation to the GraphChange. |
public Set<String> addSpriteFrames(HashMap<String,Object> dictionary,CCTexture2D texture){
@SuppressWarnings("unchecked") HashMap<String,Object> metadataDict=(HashMap<String,Object>)dictionary.get("metadata");
@SuppressWarnings("unchecked") HashMap<String,Object> framesDict=(HashMap<String,Object>)dictionary.get("frames");
int format=0;
if (metadataDict != null) format=(Integer)metadataDict.get("format");
if (!(format >= 0 && format <= 3)) {
ccMacros.CCLOGERROR("CCSpriteFrameCache","Unsupported Zwoptex plist file format.");
}
for ( Entry<String,Object> frameDictEntry : framesDict.entrySet()) {
@SuppressWarnings("unchecked") HashMap<String,Object> frameDict=(HashMap<String,Object>)frameDictEntry.getValue();
CCSpriteFrame spriteFrame=null;
if (format == 0) {
float x=((Number)frameDict.get("x")).floatValue();
float y=((Number)frameDict.get("y")).floatValue();
float w=((Number)frameDict.get("width")).floatValue();
float h=((Number)frameDict.get("height")).floatValue();
float ox=((Number)frameDict.get("offsetX")).floatValue();
float oy=((Number)frameDict.get("offsetY")).floatValue();
int ow=0;
int oh=0;
try {
ow=((Number)frameDict.get("originalWidth")).intValue();
oh=((Number)frameDict.get("originalHeight")).intValue();
}
catch ( Exception e) {
ccMacros.CCLOG("cocos2d","WARNING: originalWidth/Height not found on the CCSpriteFrame. AnchorPoint won't work as expected. Regenerate the .plist");
}
ow=Math.abs(ow);
oh=Math.abs(oh);
spriteFrame=CCSpriteFrame.frame(texture,CGRect.make(x,y,w,h),false,CGPoint.make(ox,oy),CGSize.make(ow,oh));
}
else if (format == 1 || format == 2) {
CGRect frame=GeometryUtil.CGRectFromString((String)frameDict.get("frame"));
boolean rotated=false;
if (format == 2) rotated=(Boolean)frameDict.get("rotated");
CGPoint offset=GeometryUtil.CGPointFromString((String)frameDict.get("offset"));
CGSize sourceSize=GeometryUtil.CGSizeFromString((String)frameDict.get("sourceSize"));
spriteFrame=CCSpriteFrame.frame(texture,frame,rotated,offset,sourceSize);
}
else if (format == 3) {
CGSize spriteSize=GeometryUtil.CGSizeFromString((String)frameDict.get("spriteSize"));
CGPoint spriteOffset=GeometryUtil.CGPointFromString((String)frameDict.get("spriteOffset"));
CGSize spriteSourceSize=GeometryUtil.CGSizeFromString((String)frameDict.get("spriteSourceSize"));
CGRect textureRect=GeometryUtil.CGRectFromString((String)frameDict.get("textureRect"));
boolean textureRotated=(Boolean)frameDict.get("textureRotated");
spriteFrame=CCSpriteFrame.frame(texture,CGRect.make(textureRect.origin.x,textureRect.origin.y,spriteSize.width,spriteSize.height),textureRotated,spriteOffset,spriteSourceSize);
}
spriteFrames.put(frameDictEntry.getKey(),spriteFrame);
}
return framesDict.keySet();
}
| Adds multiple Sprite Frames with a dictionary. The texture will be associated with the created sprite frames. |
@Override protected EClass eStaticClass(){
return StextPackage.Literals.DEF_ROOT;
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
protected void startNode(int node) throws org.xml.sax.SAXException {
if (m_contentHandler instanceof NodeConsumer) {
}
switch (m_dtm.getNodeType(node)) {
case DTM.COMMENT_NODE:
{
XMLString data=m_dtm.getStringValue(node);
if (m_contentHandler instanceof LexicalHandler) {
LexicalHandler lh=((LexicalHandler)this.m_contentHandler);
data.dispatchAsComment(lh);
}
}
break;
case DTM.DOCUMENT_FRAGMENT_NODE:
break;
case DTM.DOCUMENT_NODE:
this.m_contentHandler.startDocument();
break;
case DTM.ELEMENT_NODE:
DTM dtm=m_dtm;
for (int nsn=dtm.getFirstNamespaceNode(node,true); DTM.NULL != nsn; nsn=dtm.getNextNamespaceNode(node,nsn,true)) {
String prefix=dtm.getNodeNameX(nsn);
this.m_contentHandler.startPrefixMapping(prefix,dtm.getNodeValue(nsn));
}
String ns=dtm.getNamespaceURI(node);
if (null == ns) ns="";
org.xml.sax.helpers.AttributesImpl attrs=new org.xml.sax.helpers.AttributesImpl();
for (int i=dtm.getFirstAttribute(node); i != DTM.NULL; i=dtm.getNextAttribute(i)) {
attrs.addAttribute(dtm.getNamespaceURI(i),dtm.getLocalName(i),dtm.getNodeName(i),"CDATA",dtm.getNodeValue(i));
}
this.m_contentHandler.startElement(ns,m_dtm.getLocalName(node),m_dtm.getNodeName(node),attrs);
break;
case DTM.PROCESSING_INSTRUCTION_NODE:
{
String name=m_dtm.getNodeName(node);
if (name.equals("xslt-next-is-raw")) {
nextIsRaw=true;
}
else {
this.m_contentHandler.processingInstruction(name,m_dtm.getNodeValue(node));
}
}
break;
case DTM.CDATA_SECTION_NODE:
{
boolean isLexH=(m_contentHandler instanceof LexicalHandler);
LexicalHandler lh=isLexH ? ((LexicalHandler)this.m_contentHandler) : null;
if (isLexH) {
lh.startCDATA();
}
dispatachChars(node);
{
if (isLexH) {
lh.endCDATA();
}
}
}
break;
case DTM.TEXT_NODE:
{
if (nextIsRaw) {
nextIsRaw=false;
m_contentHandler.processingInstruction(javax.xml.transform.Result.PI_DISABLE_OUTPUT_ESCAPING,"");
dispatachChars(node);
m_contentHandler.processingInstruction(javax.xml.transform.Result.PI_ENABLE_OUTPUT_ESCAPING,"");
}
else {
dispatachChars(node);
}
}
break;
case DTM.ENTITY_REFERENCE_NODE:
{
if (m_contentHandler instanceof LexicalHandler) {
((LexicalHandler)this.m_contentHandler).startEntity(m_dtm.getNodeName(node));
}
else {
}
}
break;
default :
}
}
| Start processing given node |
public static Document parse(Reader reader,boolean validating,boolean namespaceAware) throws SAXException, IOException, ParserConfigurationException {
return parse(reader,validating,namespaceAware,false);
}
| Creates a DocumentBuilder and uses it to parse the XML text read from the given reader, allowing parser validation and namespace awareness to be controlled. Documents are not allowed to contain DOCYTYPE declarations. |
public PolynomialGF2mSmallM[] modPolynomialToFracton(PolynomialGF2mSmallM g){
int dg=g.degree >> 1;
int[] a0=normalForm(g.coefficients);
int[] a1=mod(coefficients,g.coefficients);
int[] b0={0};
int[] b1={1};
while (computeDegree(a1) > dg) {
int[][] q=div(a0,a1);
a0=a1;
a1=q[1];
int[] b2=add(b0,modMultiply(q[0],b1,g.coefficients));
b0=b1;
b1=b2;
}
return new PolynomialGF2mSmallM[]{new PolynomialGF2mSmallM(field,a1),new PolynomialGF2mSmallM(field,b1)};
}
| Compute a polynomial pair (a,b) from this polynomial and the given polynomial g with the property b*this = a mod g and deg(a)<=deg(g)/2. |
private boolean checkOrgInStoredList(long orgId,List<Organisation_Users> org) throws Exception {
for ( Organisation_Users orgUsers : org) {
if (orgUsers.getOrganisation().getOrganisation_id().equals(orgId)) {
return true;
}
}
return false;
}
| checks if an orgId is already stored in the Users Organisations Object |
public void paintProgressBarBorder(SynthContext context,Graphics g,int x,int y,int w,int h){
}
| Paints the border of a progress bar. |
public IdItem(CstType type){
if (type == null) {
throw new NullPointerException("type == null");
}
this.type=type;
}
| Constructs an instance. |
public synchronized Record first(){
if (rrs.size() == 0) throw new IllegalStateException("rrset is empty");
return (Record)rrs.get(0);
}
| Returns the first record |
private List<String> readLines(Reader reader){
List<String> lines=new ArrayList<String>();
BufferedReader bufferedReader=new BufferedReader(reader);
String line;
try {
while ((line=bufferedReader.readLine()) != null) {
lines.add(line);
}
}
catch ( IOException e) {
String message=resource == null ? "Unable to parse lines" : "Unable to parse " + resource.getLocation() + " ("+ resource.getLocationOnDisk()+ ")";
throw new CassandraMigrationException(message,e);
}
return lines;
}
| Parses the textual data provided by this reader into a list of lines. |
@Override public String toString(){
return "CUsurfObject[" + "nativePointer=0x" + Long.toHexString(getNativePointer()) + "]";
}
| Returns a String representation of this object. |
public boolean hasScheme(){
return super.hasAttribute(SCHEME);
}
| Returns whether it has the default scheme of the contained category elements. |
public TaskHandle enqueuePollTask(JobReference jobRef){
return enqueuer.enqueue(getQueue(QUEUE),createCommonPollTask(jobRef).method(Method.GET));
}
| Enqueue a task to poll for the success or failure of the referenced BigQuery job. |
public boolean findDeadlock(){
long[] tids;
if (findDeadlocksMethodName.equals("findDeadlockedThreads") && tmbean.isSynchronizerUsageSupported()) {
tids=tmbean.findDeadlockedThreads();
if (tids == null) {
return false;
}
System.out.println("Deadlock found :-");
ThreadInfo[] infos=tmbean.getThreadInfo(tids,true,true);
for ( ThreadInfo ti : infos) {
printThreadInfo(ti);
printMonitorInfo(ti);
printLockInfo(ti.getLockedSynchronizers());
System.out.println();
}
}
else {
tids=tmbean.findMonitorDeadlockedThreads();
if (tids == null) {
return false;
}
ThreadInfo[] infos=tmbean.getThreadInfo(tids,Integer.MAX_VALUE);
for ( ThreadInfo ti : infos) {
printThreadInfo(ti);
}
}
return true;
}
| Checks if any threads are deadlocked. If any, print the thread dump information. |
@SuppressWarnings("unchecked") public void writeDouble(double x) throws SQLException {
attribs.add(Double.valueOf(x));
}
| Writes a <code>double</code> in the Java programming language to this <code>SQLOutputImpl</code> object. The driver converts it to an SQL <code>DOUBLE</code> before returning it to the database. |
ElemTemplateElement popElemTemplateElement(){
return (ElemTemplateElement)m_elems.pop();
}
| Get the current XSLTElementProcessor from the top of the stack. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:59:16.602 -0500",hash_original_method="B28E495795E9250815D18C95A5C9BAB3",hash_generated_method="BE47B1335DEB2B492ACCC080BDC577FE") public boolean decodeContentLocation(int startIndex){
return decodeTextString(startIndex);
}
| Decode the "Content location" type for WSP pdu |
OneStepIterator(Compiler compiler,int opPos,int analysis) throws javax.xml.transform.TransformerException {
super(compiler,opPos,analysis);
int firstStepPos=OpMap.getFirstChildPos(opPos);
m_axis=WalkerFactory.getAxisFromStep(compiler,firstStepPos);
}
| Create a OneStepIterator object. |
protected int nextInDoctype() throws IOException, XMLException {
switch (current) {
case 0x9:
case 0xA:
case 0xD:
case 0x20:
do {
nextChar();
}
while (current != -1 && XMLUtilities.isXMLSpace((char)current));
return LexicalUnits.S;
case '>':
nextChar();
context=TOP_LEVEL_CONTEXT;
return LexicalUnits.END_CHAR;
case 'S':
return readIdentifier("YSTEM",LexicalUnits.SYSTEM_IDENTIFIER,LexicalUnits.NAME);
case 'P':
return readIdentifier("UBLIC",LexicalUnits.PUBLIC_IDENTIFIER,LexicalUnits.NAME);
case '"':
attrDelimiter='"';
return readString();
case '\'':
attrDelimiter='\'';
return readString();
case '[':
nextChar();
context=DTD_DECLARATIONS_CONTEXT;
inDTD=true;
return LexicalUnits.LSQUARE_BRACKET;
default :
return readName(LexicalUnits.NAME);
}
}
| Returns the next lexical unit in the context of a doctype. |
public static void error(String msg,Throwable ex){
error(msg,ex,true);
}
| Version that automatically pops a message. |
public void replace(int offset,int length,View[] views){
synchronized (stats) {
for (int i=0; i < length; i++) {
ChildState cs=stats.remove(offset);
float csSpan=cs.getMajorSpan();
cs.getChildView().setParent(null);
if (csSpan != 0) {
majorRequirementChange(cs,-csSpan);
}
}
LayoutQueue q=getLayoutQueue();
if (views != null) {
for (int i=0; i < views.length; i++) {
ChildState s=createChildState(views[i]);
stats.add(offset + i,s);
q.addTask(s);
}
}
q.addTask(flushTask);
}
}
| Calls the superclass to update the child views, and updates the status records for the children. This is expected to be called while a write lock is held on the model so that interaction with the layout thread will not happen (i.e. the layout thread acquires a read lock before doing anything). |
@Override public boolean onInterceptTouchEvent(MotionEvent ev){
try {
return super.onInterceptTouchEvent(ev);
}
catch ( IllegalArgumentException e) {
e.printStackTrace();
return false;
}
}
| PDFViewPager uses PhotoView, so this bugfix should be added Issue explained in https://github.com/chrisbanes/PhotoView |
public static float[] toFloatArray(Number[] array){
float[] result=new float[array.length];
for (int i=0; i < array.length; i++) {
result[i]=array[i].floatValue();
}
return result;
}
| Coverts given numbers array to array of bytes. |
public void addZoomListener(ZoomListener listener){
if (mPinchZoom != null) {
mPinchZoom.addZoomListener(listener);
}
}
| Adds a new zoom listener. |
private <T>T testAppend(T curVal,T newVal,boolean append) throws IgniteCheckedException, EntryProcessorException {
GridRestCommandHandler hnd=new GridCacheCommandHandler(((IgniteKernal)grid()).context());
String key=UUID.randomUUID().toString();
GridRestCacheRequest req=new GridRestCacheRequest();
req.command(append ? GridRestCommand.CACHE_APPEND : GridRestCommand.CACHE_PREPEND);
req.key(key);
req.value(newVal);
assertFalse("Expects failure due to no value in cache.",(Boolean)hnd.handleAsync(req).get().getResponse());
T res;
try {
jcache().put(key,curVal);
assertTrue((Boolean)hnd.handleAsync(req).get().getResponse());
}
finally {
res=(T)jcache().getAndRemove(key);
}
return res;
}
| Test cache handler append/prepend commands with specified environment. |
protected void onPauseWebRender(){
onPauseWebRender(false);
}
| Pause the web rendering engine (reduces memory leaks etc.) |
public static long absoluteDayFromDateValue(long dateValue){
long y=yearFromDateValue(dateValue);
int m=monthFromDateValue(dateValue);
int d=dayFromDateValue(dateValue);
if (m <= 2) {
y--;
m+=12;
}
long a=((y * 2922L) >> 3) + DAYS_OFFSET[m - 3] + d - 719484;
if (y <= 1582 && ((y < 1582) || (m * 100 + d < 1005))) {
a+=13;
}
else if (y < 1901 || y > 2099) {
a+=(y / 400) - (y / 100) + 15;
}
return a;
}
| Calculate the absolute day from a date value. |
private static long createLongSeed(final byte[] seed){
if (seed == null || seed.length != SEED_SIZE_BYTES) {
throw new IllegalArgumentException("Java RNG requires a 64-bit (8-byte) seed.");
}
long value=0;
for (int i=0; i < 0 + 8; i++) {
final byte b=seed[i];
value<<=8;
value+=b & 0xff;
}
return value;
}
| Helper method to convert seed bytes into the long value required by the super class. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:56:25.336 -0500",hash_original_method="F6756F2BA503B7A43E41A22536745AD1",hash_generated_method="F6756F2BA503B7A43E41A22536745AD1") Values initializeValues(Thread current){
return current.localValues=new Values();
}
| Creates Values instance for this thread and variable type. |
public Matrix3f rotateY(float ang){
return rotateY(ang,this);
}
| Apply rotation about the Y axis to this matrix by rotating the given amount of radians. <p> When used with a right-handed coordinate system, the produced rotation will rotate a vector counter-clockwise around the rotation axis, when viewing along the negative axis direction towards the origin. When used with a left-handed coordinate system, the rotation is clockwise. <p> If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, then the new matrix will be <code>M * R</code>. So when transforming a vector <code>v</code> with the new matrix by using <code>M * R * v</code> , the rotation will be applied first! <p> Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> |
public void clear(){
DelaunayTriangle t;
for (int i=0; i < 3; i++) {
t=neighbors[i];
if (t != null) {
t.clearNeighbor(this);
}
}
clearNeighbors();
points[0]=points[1]=points[2]=null;
}
| Clears all references to all other triangles and points |
public <CTX>String sanitize(@Nullable String html,@Nullable HtmlChangeListener<CTX> listener,@Nullable CTX context){
if (html == null) {
return "";
}
StringBuilder out=new StringBuilder(html.length());
HtmlSanitizer.sanitize(html,apply(HtmlStreamRenderer.create(out,Handler.DO_NOTHING),listener,context),preprocessor);
return out.toString();
}
| A convenience function that sanitizes a string of HTML and reports the names of rejected element and attributes to listener. |
private void analyze(File file,int burnin) throws TraceException {
if (file.isFile()) {
try {
String name=file.getCanonicalPath();
report(name,burnin);
}
catch ( IOException e) {
}
}
else {
File[] files=file.listFiles();
for ( File f : files) {
if (f.isDirectory()) {
analyze(f,burnin);
}
else if (f.getName().endsWith(".trees")) {
analyze(f,burnin);
}
}
}
}
| Recursively analyzes log files. |
@Override public boolean isCatalogAtStart(){
debugCodeCall("isCatalogAtStart");
return true;
}
| Returns whether the catalog is at the beginning. |
public final void negate(){
x=-x;
y=-y;
}
| Negate the point's coordinates |
public void testLeakAsyncFileChannel() throws IOException {
Path dir=wrap(createTempDir());
OutputStream file=Files.newOutputStream(dir.resolve("stillopen"));
file.write(5);
file.close();
AsynchronousFileChannel leak=AsynchronousFileChannel.open(dir.resolve("stillopen"));
try {
dir.getFileSystem().close();
fail("should have gotten exception");
}
catch ( Exception e) {
assertTrue(e.getMessage().contains("file handle leaks"));
}
leak.close();
}
| Test leaks via AsynchronousFileChannel.open |
public GuacamoleException(Throwable cause){
super(cause);
}
| Creates a new GuacamoleException with the given cause. |
public CoordinateLayout(){
this.width=-1;
this.height=-1;
}
| Allows creating a coordinate layouts that disables the scaling feature |
public boolean isError(){
return error != null;
}
| Determines if the task completed with an error. |
private void findAll(QueryKraken query,Object[] args,Result<Iterable<Cursor>> result){
try {
TableKraken table=query.table();
TableKelp tableKelp=table.getTableKelp();
TablePod tablePod=table.getTablePod();
if (query.isStaticNode()) {
RowCursor cursor=tableKelp.cursor();
query.fillKey(cursor,args);
int hash=query.calculateHash(cursor);
if (tablePod.getNode(hash).isSelfCopy() || true) {
query.findAll(result,args);
return;
}
else {
result.ok(null);
return;
}
}
query.findAll(result,args);
}
catch ( Exception e) {
result.fail(e);
}
}
| Query implementation for multiple result with the parsed query. |
public AnnotationVisitor visitParameterAnnotation(int parameter,String desc,boolean visible){
if (mv != null) {
return mv.visitParameterAnnotation(parameter,desc,visible);
}
return null;
}
| Visits an annotation of a parameter this method. |
public SnackbarWrapper dismiss(){
snackbar.dismiss();
return this;
}
| Dismiss the Snackbar. |
protected void commit() throws MailboxException {
try {
if (getSession().hasPendingChanges()) {
getSession().save();
}
}
catch ( RepositoryException e) {
throw new MailboxException("Unable to commit",e);
}
}
| Just call save on the underlying JCR Session, because level 1 JCR implementation does not offer Transactions |
public void generate(Queue<Pair<Env<AttrContext>,JCClassDecl>> queue){
generate(queue,null);
}
| Generates the source or class file for a list of classes. The decision to generate a source file or a class file is based upon the compiler's options. Generation stops if an error occurs while writing files. |
public AggregateFuture(Future<T>... futures){
for ( Future<T> f : futures) components.add(f);
}
| <p> Construct from a set of futures which is copied internally. </p> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.