code
stringlengths 10
174k
| nl
stringlengths 3
129k
|
---|---|
protected Pair(ThrowableSet caught,ThrowableSet uncaught){
this.caught=caught;
this.uncaught=uncaught;
}
| Constructs a <code>ThrowableSet.Pair</code>. |
public void unscheduleAllTimers(){
assert false : "Not implemented. Use unscheduleAllSelectors";
}
| unschedule all timers. You should NEVER call this method, unless you know what you are doing. |
private boolean isCacheValid(){
long now=CurrentTime.currentTime();
if ((now - _lastTime < 100) && !CurrentTime.isTest()) return true;
long oldLastModified=_lastModified;
long oldLength=_length;
long newLastModified=getBacking().getLastModified();
long newLength=getBacking().length();
_lastTime=now;
if (newLastModified == oldLastModified && newLength == oldLength) {
_lastTime=now;
return true;
}
else {
_changeSequence.incrementAndGet();
clearCache();
_zipEntryCache.clear();
_lastModified=newLastModified;
_length=newLength;
_lastTime=now;
return false;
}
}
| Returns the last modified time for the path. |
public ASN1Primitive toASN1Primitive(){
ASN1EncodableVector v=new ASN1EncodableVector();
if (keyidentifier != null) {
v.add(new DERTaggedObject(false,0,keyidentifier));
}
if (certissuer != null) {
v.add(new DERTaggedObject(false,1,certissuer));
}
if (certserno != null) {
v.add(new DERTaggedObject(false,2,certserno));
}
return new DERSequence(v);
}
| Produce an object suitable for an ASN1OutputStream. |
public void removeTagDefinition(int tagId){
getTagInfo().delete(tagId);
}
| Removes a tag definition for given defined tag constant. |
ItemTypeDescription(BaseEntry owner){
this.owner=owner;
}
| Creates an ItemTypeDescription. |
@DSSafe(DSCat.SAFE_LIST) @DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2014-08-13 13:14:17.028 -0400",hash_original_method="0BA5737D802F499B64A85853275E3368",hash_generated_method="B9078954865C695972CAFFD384122A35") public int hashCode(){
return rp == null ? m : m + rp.hashCode();
}
| Returns the hashcode value for this finite field. |
public void providesSingletonInScope(){
isProvidingSingletonInScope=true;
}
| to provide a singleton using the binding's scope and reuse it inside the binding's scope |
public static boolean isRowIndexMin(AggregateUnaryOperator uaggOp){
return (uaggOp.aggOp.increOp.fn instanceof Builtin && (((Builtin)(uaggOp.aggOp.increOp.fn)).bFunc == Builtin.BuiltinCode.MININDEX));
}
| This will return if uaggOp is of type RowIndexMin |
@NotNull private static Direction extractDirection(int directionKey){
if (directionKey == 0) {
return Out;
}
else if (directionKey == 1) {
return NullableOut;
}
else if (directionKey == 2) {
return Pure;
}
else {
int paramKey=directionKey - 3;
int paramId=paramKey / 8;
int subDirectionId=paramKey % 8;
if (subDirectionId <= 1) {
return new In(paramId,subDirectionId);
}
else {
int valueId=subDirectionId - 2;
return new InOut(paramId,Value.values()[valueId]);
}
}
}
| Converts int to Direction object. |
public static Object invoke(String apiName,ApiProtocol apiProtocol){
Class<?> classname;
Object classObject;
Constructor constructor;
Method method;
Object result=null;
Api api=ApiRoute.apiMap.get(apiName);
if (api == null) {
return ErrorHandler.error(StatusCode.API_NOT_FOUND);
}
if (apiProtocol.getBuild() < api.getBuild()) {
return ErrorHandler.error(StatusCode.VERSION_IS_TOO_LOW);
}
if (api.getHttpMethod() != null && !api.getHttpMethod().contains(apiProtocol.getMethod().toString().toLowerCase())) {
return ErrorHandler.error(StatusCode.REQUEST_MODE_ERROR);
}
try {
classname=Class.forName(Config.getString("resource.package.name") + "." + api.getResource());
constructor=classname.getConstructor(ApiProtocol.class);
classObject=constructor.newInstance(apiProtocol);
}
catch ( NoSuchMethodException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
catch ( ClassNotFoundException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
catch ( InvocationTargetException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
catch ( InstantiationException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
catch ( IllegalAccessException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
try {
method=classname.getMethod(apiProtocol.getMethod().toString().toLowerCase());
}
catch ( NoSuchMethodException e) {
logger.error(e.getMessage());
return ErrorHandler.error(StatusCode.API_SERVER_ERROR);
}
try {
result=method.invoke(classObject);
}
catch ( InvocationTargetException e) {
e.printStackTrace();
logger.error(e.getMessage());
}
catch ( IllegalAccessException e) {
logger.error(e.toString());
}
return result;
}
| invoke api resource method by apiName, but the request apiProtocol should observe routeMap regulations |
public static Object[] findMethodWithListParameters(Object service,String methodName,Object[] args){
Method method=null;
try {
method=service.getClass().getMethod(methodName,ConversionUtils.convertParams(args));
log.debug("Exact method found (skipping list): {}",methodName);
return new Object[]{method,args};
}
catch ( NoSuchMethodException nsme) {
log.debug("Method not found using exact parameter types");
}
List<Method> methods=ConversionUtils.findMethodsByNameAndNumParams(service,methodName,1);
log.debug("Found {} methods",methods.size());
if (methods.isEmpty()) {
return new Object[]{null,null};
}
else if (methods.size() > 1) {
log.debug("Multiple methods found with same name and parameter count.");
log.debug("Parameter conversion will be attempted in order.");
}
ArrayList<Object> argsList=new ArrayList<Object>();
if (args != null) {
for ( Object element : args) {
argsList.add(element);
}
}
args=new Object[]{argsList};
Object[] params=null;
for (int i=0; i < methods.size(); i++) {
try {
method=methods.get(i);
params=ConversionUtils.convertParams(args,method.getParameterTypes());
if (argsList.size() > 0 && (argsList.get(0) instanceof IConnection) && (!(params[0] instanceof IConnection))) {
continue;
}
return new Object[]{method,params};
}
catch ( Exception ex) {
log.debug("Parameter conversion failed",ex);
}
}
return nullReturn;
}
| Returns (method, params) for the given service or (null, null) if not method was found. |
private void buildHierarchy(Relation<V> database,Clustering<SubspaceModel> clustering,List<Cluster<SubspaceModel>> clusters,int dimensionality){
StringBuilder msg=LOG.isDebugging() ? new StringBuilder() : null;
final int db_dim=RelationUtil.dimensionality(database);
Hierarchy<Cluster<SubspaceModel>> hier=clustering.getClusterHierarchy();
for (int i=0; i < clusters.size() - 1; i++) {
Cluster<SubspaceModel> c_i=clusters.get(i);
final Subspace s_i=c_i.getModel().getSubspace();
int subspaceDim_i=dimensionality - s_i.dimensionality();
NumberVector ci_centroid=ProjectedCentroid.make(s_i.getDimensions(),database,c_i.getIDs());
long[] pv1=s_i.getDimensions();
for (int j=i + 1; j < clusters.size(); j++) {
Cluster<SubspaceModel> c_j=clusters.get(j);
final Subspace s_j=c_j.getModel().getSubspace();
int subspaceDim_j=dimensionality - s_j.dimensionality();
if (subspaceDim_i < subspaceDim_j) {
if (msg != null) {
msg.append("\n l_i=").append(subspaceDim_i).append(" pv_i=[").append(BitsUtil.toStringLow(s_i.getDimensions(),db_dim)).append(']');
msg.append("\n l_j=").append(subspaceDim_j).append(" pv_j=[").append(BitsUtil.toStringLow(s_j.getDimensions(),db_dim)).append(']');
}
if (s_j.dimensionality() == 0) {
if (hier.numParents(c_i) == 0) {
clustering.addChildCluster(c_j,c_i);
if (msg != null) {
msg.append("\n [").append(BitsUtil.toStringLow(s_j.getDimensions(),db_dim));
msg.append("] is parent of [").append(BitsUtil.toStringLow(s_i.getDimensions(),db_dim));
msg.append(']');
}
}
}
else {
NumberVector cj_centroid=ProjectedCentroid.make(c_j.getModel().getDimensions(),database,c_j.getIDs());
long[] pv2=s_j.getDimensions();
long[] commonPreferenceVector=BitsUtil.andCMin(pv1,pv2);
int subspaceDim=subspaceDimensionality(ci_centroid,cj_centroid,pv1,pv2,commonPreferenceVector);
double d=weightedDistance(ci_centroid,cj_centroid,commonPreferenceVector);
if (msg != null) {
msg.append("\n dist = ").append(subspaceDim);
}
if (subspaceDim == subspaceDim_j) {
if (msg != null) {
msg.append("\n d = ").append(d);
}
if (d <= 2 * epsilon) {
if (hier.numParents(c_i) == 0 || !isParent(database,c_j,hier.iterParents(c_i),db_dim)) {
clustering.addChildCluster(c_j,c_i);
if (msg != null) {
msg.append("\n [").append(BitsUtil.toStringLow(s_j.getDimensions(),db_dim));
msg.append("] is parent of [");
msg.append(BitsUtil.toStringLow(s_i.getDimensions(),db_dim));
msg.append(']');
}
}
}
else {
throw new RuntimeException("Should never happen: d = " + d);
}
}
}
}
}
}
if (msg != null) {
LOG.debug(msg.toString());
}
}
| Builds the cluster hierarchy. |
public EchoRequestMessage(EchoRequestMessage other){
if (other.isSetHeader()) {
this.header=new AsyncMessageHeader(other.header);
}
}
| Performs a deep copy on <i>other</i>. |
@Override public void run(){
amIActive=true;
String inputFilesString=null;
String[] imageFiles=null;
String outputName=null;
String workingDirectory=null;
WhiteboxRasterInfo[] images=null;
int nCols=0;
int nRows=0;
double z;
int numImages;
int progress=0;
int col, row;
int a, i, j;
double[] imageAverages;
double[] imageTotals;
double[] imageNumPixels;
double[][] data;
double[] noDataValues;
String pathSep=File.separator;
boolean standardizedPCA=false;
int numberOfComponentImagesToCreate=0;
if (args.length <= 0) {
showFeedback("Plugin parameters have not been set.");
return;
}
inputFilesString=args[0];
outputName=args[1];
if (outputName.toLowerCase().contains(".dep")) {
outputName=outputName.replace(".dep","");
}
standardizedPCA=Boolean.parseBoolean(args[2]);
if (args[3].toLowerCase().contains("not")) {
numberOfComponentImagesToCreate=9999999;
}
else {
numberOfComponentImagesToCreate=Integer.parseInt(args[3]);
}
try {
imageFiles=inputFilesString.split(";");
numImages=imageFiles.length;
images=new WhiteboxRasterInfo[numImages];
imageAverages=new double[numImages];
imageTotals=new double[numImages];
imageNumPixels=new double[numImages];
noDataValues=new double[numImages];
data=new double[numImages][];
for (i=0; i < numImages; i++) {
images[i]=new WhiteboxRasterInfo(imageFiles[i]);
noDataValues[i]=images[i].getNoDataValue();
if (i == 0) {
nCols=images[i].getNumberColumns();
nRows=images[i].getNumberRows();
File file=new File(imageFiles[i]);
workingDirectory=file.getParent();
}
else {
if (images[i].getNumberColumns() != nCols || images[i].getNumberRows() != nRows) {
showFeedback("All input images must have the same dimensions (rows and columns).");
return;
}
}
}
for (row=0; row < nRows; row++) {
for (i=0; i < numImages; i++) {
data[i]=images[i].getRowValues(row);
}
for (col=0; col < nCols; col++) {
for (i=0; i < numImages; i++) {
if (data[i][col] != noDataValues[i]) {
imageTotals[i]+=data[i][col];
imageNumPixels[i]++;
}
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress=(int)(100f * row / (nRows - 1));
updateProgress("Calculating image means:",progress);
}
for (i=0; i < numImages; i++) {
imageAverages[i]=imageTotals[i] / imageNumPixels[i];
}
double[] imageTotalDeviation=new double[numImages];
double[][] covariances=new double[numImages][numImages];
double[][] correlationMatrix=new double[numImages][numImages];
for (row=0; row < nRows; row++) {
for (i=0; i < numImages; i++) {
data[i]=images[i].getRowValues(row);
}
for (col=0; col < nCols; col++) {
for (i=0; i < numImages; i++) {
if (data[i][col] != noDataValues[i]) {
imageTotalDeviation[i]+=(data[i][col] - imageAverages[i]) * (data[i][col] - imageAverages[i]);
for (a=0; a < numImages; a++) {
if (data[a][col] != noDataValues[a]) {
covariances[i][a]+=(data[i][col] - imageAverages[i]) * (data[a][col] - imageAverages[a]);
}
}
}
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress=(int)(100f * row / (nRows - 1));
updateProgress("Calculating covariances:",progress);
}
for (i=0; i < numImages; i++) {
for (a=0; a < numImages; a++) {
correlationMatrix[i][a]=covariances[i][a] / (Math.sqrt(imageTotalDeviation[i] * imageTotalDeviation[a]));
}
}
for (i=0; i < numImages; i++) {
for (a=0; a < numImages; a++) {
covariances[i][a]=covariances[i][a] / (imageNumPixels[i] - 1);
}
}
Matrix cov=null;
if (!standardizedPCA) {
cov=new Matrix(covariances);
}
else {
cov=new Matrix(correlationMatrix);
}
EigenvalueDecomposition eigen=cov.eig();
double[] eigenvalues;
Matrix eigenvectors;
SortedSet<PrincipalComponent> principalComponents;
eigenvalues=eigen.getRealEigenvalues();
eigenvectors=eigen.getV();
double[][] vecs=eigenvectors.getArray();
int numComponents=eigenvectors.getColumnDimension();
principalComponents=new TreeSet<PrincipalComponent>();
for (i=0; i < numComponents; i++) {
double[] eigenvector=new double[numComponents];
for (j=0; j < numComponents; j++) {
eigenvector[j]=vecs[j][i];
}
principalComponents.add(new PrincipalComponent(eigenvalues[i],eigenvector));
}
double totalEigenvalue=0;
for (i=0; i < numComponents; i++) {
totalEigenvalue+=eigenvalues[i];
}
double[][] explainedVarianceArray=new double[numComponents][2];
j=0;
for ( PrincipalComponent pc : principalComponents) {
explainedVarianceArray[j][0]=pc.eigenValue / totalEigenvalue * 100.0;
if (j == 0) {
explainedVarianceArray[j][1]=explainedVarianceArray[j][0];
}
else {
explainedVarianceArray[j][1]=explainedVarianceArray[j][0] + explainedVarianceArray[j - 1][1];
}
j++;
}
DecimalFormat df1=new DecimalFormat("0.00");
DecimalFormat df2=new DecimalFormat("0.0000");
DecimalFormat df3=new DecimalFormat("0.000000");
String ret="Principal Component Analysis Report:\n\n";
ret+="Component\tExplained Var.\tCum. %\tEigenvalue\tEigenvector\n";
j=0;
for ( PrincipalComponent pc : principalComponents) {
String explainedVariance=df1.format(explainedVarianceArray[j][0]);
String explainedCumVariance=df1.format(explainedVarianceArray[j][1]);
double[] eigenvector=pc.eigenVector.clone();
ret+=(j + 1) + "\t" + explainedVariance+ "\t"+ explainedCumVariance+ "\t"+ df2.format(pc.eigenValue)+ "\t";
String eigenvec="[";
for (i=0; i < numComponents; i++) {
if (i < numComponents - 1) {
eigenvec+=df3.format(eigenvector[i]) + ", ";
}
else {
eigenvec+=df3.format(eigenvector[i]);
}
}
eigenvec+="]";
ret+=eigenvec + "\n";
if (j < numberOfComponentImagesToCreate) {
String outputHeader=workingDirectory + pathSep + outputName+ "_comp"+ (j + 1)+ ".dep";
WhiteboxRaster output=new WhiteboxRaster(outputHeader,"rw",imageFiles[0],WhiteboxRaster.DataType.FLOAT,0);
output.setDataScale(DataScale.CONTINUOUS);
for (row=0; row < nRows; row++) {
for (i=0; i < numImages; i++) {
data[i]=images[i].getRowValues(row);
}
for (col=0; col < nCols; col++) {
if (data[0][col] != noDataValues[0]) {
z=0;
for (i=0; i < numImages; i++) {
z+=data[i][col] * eigenvector[i];
}
output.setValue(row,col,z);
}
else {
output.setValue(row,col,noDataValues[0]);
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress=(int)(100f * row / (nRows - 1));
updateProgress("Creating component images:",progress);
}
output.addMetadataEntry("Created by the " + getDescriptiveName() + " tool.");
output.addMetadataEntry("Created on " + new Date());
output.addMetadataEntry("Principal Component Num.: " + (j + 1));
output.addMetadataEntry("Eigenvalue: " + pc.eigenValue);
eigenvec="[";
for (i=0; i < numComponents; i++) {
if (i < numComponents - 1) {
eigenvec+=eigenvector[i] + ", ";
}
else {
eigenvec+=eigenvector[i];
}
}
eigenvec+="]";
output.addMetadataEntry("Eigenvector: " + eigenvec);
if (!standardizedPCA) {
output.addMetadataEntry("PCA Type: unstandardized");
}
else {
output.addMetadataEntry("PCA Type: standardized");
}
output.close();
}
j++;
}
ret+="\nFactor Loadings:\n";
ret+="\t\tComponent\n\t";
for (i=0; i < numComponents; i++) {
ret+=(i + 1) + "\t";
}
ret+="\n";
double loading=0;
if (!standardizedPCA) {
for (i=0; i < numImages; i++) {
ret+="band" + (i + 1) + "\t";
for ( PrincipalComponent pc : principalComponents) {
double[] eigenvector=pc.eigenVector.clone();
double ev=pc.eigenValue;
loading=(eigenvector[i] * Math.sqrt(ev)) / Math.sqrt(covariances[i][i]);
ret+=df1.format(loading) + "\t";
}
ret+="\n";
}
}
else {
for (i=0; i < numImages; i++) {
ret+="band" + (i + 1) + "\t";
for ( PrincipalComponent pc : principalComponents) {
double[] eigenvector=pc.eigenVector.clone();
double ev=pc.eigenValue;
loading=(eigenvector[i] * Math.sqrt(ev));
ret+=df1.format(loading) + "\t";
}
ret+="\n";
}
}
for (i=0; i < numImages; i++) {
images[i].close();
}
returnData(ret);
ScreePlot plot=new ScreePlot(explainedVarianceArray);
returnData(plot);
if (numComponents > 3) {
for (i=2; i >= 0; i--) {
if (i < numberOfComponentImagesToCreate) {
String outputHeader=workingDirectory + pathSep + outputName+ "_comp"+ (i + 1)+ ".dep";
returnData(outputHeader);
}
}
}
else {
for (i=numComponents - 1; i >= 0; i--) {
if (i < numberOfComponentImagesToCreate) {
String outputHeader=workingDirectory + pathSep + outputName+ "_comp"+ (i + 1)+ ".dep";
returnData(outputHeader);
}
}
}
}
catch ( OutOfMemoryError oe) {
myHost.showFeedback("An out-of-memory error has occurred during operation.");
}
catch ( Exception e) {
myHost.showFeedback("An error has occurred during operation. See log file for details.");
myHost.logException("Error in " + getDescriptiveName(),e);
}
finally {
updateProgress("Progress: ",0);
amIActive=false;
myHost.pluginComplete();
}
}
| Used to execute this plugin tool. |
public int tileXToX(int tx){
return tx * tileWidth + tileGridXOffset;
}
| Converts a horizontal tile index into the X coordinate of its upper left pixel. This is a convenience method. No attempt is made to detect out-of-range indices. |
public boolean match(Element e,String pseudoE){
Node n=e.getParentNode();
if (n != null && n.getNodeType() == Node.ELEMENT_NODE) {
return ((ExtendedSelector)getAncestorSelector()).match((Element)n,null) && ((ExtendedSelector)getSimpleSelector()).match(e,pseudoE);
}
return false;
}
| Tests whether this selector matches the given element. |
@Override protected void finalize() throws Throwable {
mView=null;
super.finalize();
}
| dealloc methods |
public void addIncomingEdge(final EdgeType edge){
Preconditions.checkNotNull(edge,"Error: Edge argument can't be null");
m_incomingEdges.add(edge);
}
| Adds an incoming edge to the node. |
public static String[] delimitedListToStringArray(String str,String delimiter){
return delimitedListToStringArray(str,delimiter,null);
}
| Take a String which is a delimited list and convert it to a String array. <p>A single delimiter can consists of more than one character: It will still be considered as single delimiter string, rather than as bunch of potential delimiter characters - in contrast to <code>tokenizeToStringArray</code>. |
public static <K,V>boolean compare(Map<K,V> mapA,Map<K,V> mapB){
if (mapA == mapB) return true;
if (mapA == null || mapB == null || mapA.size() != mapB.size()) return false;
for ( K key : mapA.keySet()) {
V value1=mapA.get(key);
V value2=mapB.get(key);
if (value1 == null && value2 == null) {
continue;
}
else if (value1 == null || value2 == null) {
return false;
}
if (!value1.equals(value2)) {
return false;
}
}
return true;
}
| Compares maps for value-equality: 1. Check that the maps are the same size 2. Get the set of keys from one map 3. For each key from that set you retrieved, check that the value retrieved from each map for that key is the same (if the key is absent from one map, that's a total failure of equality). |
public static final XPATHErrorResources loadResourceBundle(String className) throws MissingResourceException {
Locale locale=Locale.getDefault();
String suffix=getResourceSuffix(locale);
try {
return (XPATHErrorResources)ResourceBundle.getBundle(className + suffix,locale);
}
catch ( MissingResourceException e) {
try {
return (XPATHErrorResources)ResourceBundle.getBundle(className,new Locale("en","US"));
}
catch ( MissingResourceException e2) {
throw new MissingResourceException("Could not load any resource bundles.",className,"");
}
}
}
| Return a named ResourceBundle for a particular locale. This method mimics the behavior of ResourceBundle.getBundle(). |
private void createTheUserForSingleUserMode(){
if (!isInSingleUserMode()) {
return;
}
try {
final User user=dbLogic.getOrCreateUser(singleUserName);
if (user == null) {
logger.log(Level.SEVERE,"Could not create single user user");
return;
}
if (!user.getIsSingleUser()) {
user.setIsSingleUser(true);
}
if (!user.getIsAdmin()) {
user.setIsAdmin(true);
}
dbLogic.commit();
}
catch ( final PersistenceException e) {
logger.log(Level.SEVERE,"Could not create single user user",e);
}
}
| Creates the user for single user mode. |
@SuppressWarnings("unused") public static byte[] select_position_hri(byte n){
byte[] result=new byte[3];
result[0]=GS;
result[1]=72;
result[2]=n;
return result;
}
| select position_hri Selects the print position of Human Readable Interpretation (HRI) characters when printing a barcode, using n as follows: |
public LinearComponentExtracter(Collection lines){
this.lines=lines;
}
| Constructs a LineExtracterFilter with a list in which to store LineStrings found. |
public QueueNode(Object contents){
m_Contents=contents;
next(null);
}
| Creates a queue node with the given contents |
@Override protected EClass eStaticClass(){
return MappingPackage.Literals.SOURCE;
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public static void iterate(Iterator iterator,Command command) throws Exception {
while (iterator.hasNext()) {
command.execute(iterator.next());
}
}
| Iterate passing each Command each Object that is being iterated |
public Object runSafely(Catbert.FastStack stack) throws Exception {
return Wizard.getInstance().searchByExactPerson(getPerson(stack));
}
| Returns all Airings in the database that refer to content that is NOT a music file and includes the specified person in the list of people involved (i.e. actors, directors, producers, etc.) |
public void addPostalAddress(final String addressItem){
if (postalAddress != null) {
postalAddress=new ArrayList<String>();
}
postalAddress.add(addressItem);
}
| Adds an address item to the complete address. |
public final void writeInt(int val) throws IOException {
Memory.pokeInt(scratch,0,val,ByteOrder.BIG_ENDIAN);
write(scratch,0,SizeOf.INT);
}
| Writes a big-endian 32-bit integer to this file, starting at the current file pointer. |
public InvitationProjectsExample(InvitationProjectsObject sample){
oredCriteria=new ArrayList<Criteria>();
Criteria criteria=this.or();
if (sample.getInvitationId() != null) {
criteria.andInvitationIdEqualTo(sample.getInvitationId());
}
if (sample.getProjectId() != null) {
criteria.andProjectIdEqualTo(sample.getProjectId());
}
}
| This method was generated by MyBatis Generator. This method corresponds to the database table invitation_projects |
public double calcScoreWithReversedParent(int nNode,int nCandidateParent) throws Exception {
ParentSet oParentSet=m_BayesNet.getParentSet(nNode);
ParentSet oParentSet2=m_BayesNet.getParentSet(nCandidateParent);
Instances instances=m_BayesNet.m_Instances;
if (!oParentSet.contains(nCandidateParent)) {
return -1e100;
}
int iParent=oParentSet.deleteParent(nCandidateParent,instances);
oParentSet2.addParent(nNode,instances);
double fAccuracy=calcScore(m_BayesNet);
oParentSet2.deleteLastParent(instances);
oParentSet.addParent(nCandidateParent,iParent,instances);
return fAccuracy;
}
| Calc Node Score With Arrow reversed |
private boolean checkTaskCanExecute(){
FileDownloadStatusFailReason failReason=null;
String url=getUrl();
if (mTaskParamInfo == null) {
failReason=new OnFileDownloadStatusFailReason(url,"init param is null pointer !",OnFileDownloadStatusFailReason.TYPE_NULL_POINTER);
}
if (failReason == null && !UrlUtil.isUrl(url)) {
failReason=new OnFileDownloadStatusFailReason(url,"url illegal !",OnFileDownloadStatusFailReason.TYPE_URL_ILLEGAL);
}
if (failReason == null && !FileUtil.isFilePath(mTaskParamInfo.getFilePath())) {
failReason=new OnFileDownloadStatusFailReason(url,"saveDir illegal !",OnFileDownloadStatusFailReason.TYPE_FILE_SAVE_PATH_ILLEGAL);
}
if (failReason == null && (!FileUtil.canWrite(mTaskParamInfo.getTempFilePath()) || !FileUtil.canWrite(mTaskParamInfo.getFilePath()))) {
failReason=new OnFileDownloadStatusFailReason(url,"savePath can not write !",OnFileDownloadStatusFailReason.TYPE_STORAGE_SPACE_CAN_NOT_WRITE);
}
if (failReason == null) {
DownloadFileInfo downloadFileInfo=getDownloadFile();
if (downloadFileInfo != null) {
if (downloadFileInfo.getStatus() == Status.DOWNLOAD_STATUS_COMPLETED) {
mFinishState=new FinishState(Status.DOWNLOAD_STATUS_COMPLETED);
return false;
}
else if (downloadFileInfo.getDownloadedSizeLong() == downloadFileInfo.getFileSizeLong()) {
boolean isSucceed=DownloadFileUtil.tryToRenameTempFileToSaveFile(downloadFileInfo);
if (isSucceed) {
mFinishState=new FinishState(Status.DOWNLOAD_STATUS_COMPLETED);
return false;
}
}
}
}
if (failReason == null) {
try {
String checkPath=null;
File file=new File(mTaskParamInfo.getFilePath());
if (file != null) {
checkPath=file.getParentFile().getAbsolutePath();
}
if (!FileUtil.isFilePath(checkPath)) {
failReason=new OnFileDownloadStatusFailReason(url,"file save path illegal !",OnFileDownloadStatusFailReason.TYPE_FILE_SAVE_PATH_ILLEGAL);
}
else {
long freeSize=FileUtil.getAvailableSpace(checkPath);
long needDownloadSize=mTaskParamInfo.getFileTotalSize() - mTaskParamInfo.getStartPosInTotal();
if (freeSize == -1 || needDownloadSize > freeSize) {
failReason=new OnFileDownloadStatusFailReason(url,"storage space is full or" + " storage can not " + "write !",OnFileDownloadStatusFailReason.TYPE_STORAGE_SPACE_IS_FULL);
}
}
}
catch ( Exception e) {
e.printStackTrace();
failReason=new OnFileDownloadStatusFailReason(url,e);
}
}
if (failReason != null) {
mFinishState=new FinishState(Status.DOWNLOAD_STATUS_ERROR,failReason);
return false;
}
return true;
}
| check whether the task can execute |
@Override protected void onPause(){
super.onPause();
Log.d(TAG,"onPause() - another activity is taking focus (this activity " + "is about to be \"paused\")");
}
| Hook method called when an Activity loses focus but is still visible in background. May be followed by onStop() or onResume(). Delegate more CPU intensive operation to onStop for seamless transition to next activity. Save persistent state (onSaveInstanceState()) in case app is killed. Often used to release exclusive resources. |
public static void clearAll(){
SharedPreferences sp=context.getSharedPreferences(APP_ID,Context.MODE_PRIVATE);
SharedPreferences.Editor editor=sp.edit();
editor.clear().commit();
}
| Clear all cache |
@Override public void printMessage(final String message){
printMessage(message,preferenceStore.getBoolean(UIPreferenceConstants.CONSOLE_SHOW_ON_NEW_MESSAGE));
}
| Prints a message to the console. The console is not raised. |
private void computeMinDistancePolygonRings(PlanarPolygon3D poly,Polygon ringPoly,boolean flip){
computeMinDistancePolygonLine(poly,ringPoly.getExteriorRing(),flip);
if (isDone) return;
int nHole=ringPoly.getNumInteriorRing();
for (int i=0; i < nHole; i++) {
computeMinDistancePolygonLine(poly,ringPoly.getInteriorRingN(i),flip);
if (isDone) return;
}
}
| Compute distance between a polygon and the rings of another. |
private void recursiveCompatibleEnvironemntCollector(IN4JSSourceContainerAware sourceContainer,Collection<String> collection,Predicate<IN4JSProject> predicate,List<IN4JSProject> allRuntimeEnv){
IN4JSProject project=(extractProject(sourceContainer));
if (predicate.test(project)) {
com.google.common.base.Optional<String> oExtendedProjectId=project.getExtendedRuntimeEnvironmentId();
if (!oExtendedProjectId.isPresent()) {
return;
}
String extendedProjectId=oExtendedProjectId.get();
collection.add(extendedProjectId);
allRuntimeEnv.stream().filter(null).findFirst().ifPresent(null);
}
}
| recursively searches given source container for provided runtime environments |
@Override public KeyParameter deriveKey(CharSequence password) throws KeyCrypterException {
byte[] passwordBytes=null;
try {
passwordBytes=convertToByteArray(password);
byte[] salt=new byte[0];
if (scryptParameters.getSalt() != null) {
salt=scryptParameters.getSalt().toByteArray();
}
else {
log.warn("You are using a ScryptParameters with no salt. Your encryption may be vulnerable to a dictionary attack.");
}
byte[] keyBytes=SCrypt.scrypt(passwordBytes,salt,(int)scryptParameters.getN(),scryptParameters.getR(),scryptParameters.getP(),KEY_LENGTH);
return new KeyParameter(keyBytes);
}
catch ( Exception e) {
throw new KeyCrypterException("Could not generate key from password and salt.",e);
}
finally {
if (passwordBytes != null) {
java.util.Arrays.fill(passwordBytes,(byte)0);
}
}
}
| Generate AES key. This is a very slow operation compared to encrypt/ decrypt so it is normally worth caching the result. |
protected boolean reverseArcMakesSense(BayesNet bayesNet,Instances instances,int iAttributeHead,int iAttributeTail){
if (iAttributeHead == iAttributeTail) {
return false;
}
if (!isArc(bayesNet,iAttributeHead,iAttributeTail)) {
return false;
}
int nNodes=instances.numAttributes();
boolean[] bDone=new boolean[nNodes];
for (int iNode=0; iNode < nNodes; iNode++) {
bDone[iNode]=false;
}
bayesNet.getParentSet(iAttributeTail).addParent(iAttributeHead,instances);
for (int iNode=0; iNode < nNodes; iNode++) {
boolean bFound=false;
for (int iNode2=0; !bFound && iNode2 < nNodes; iNode2++) {
if (!bDone[iNode2]) {
ParentSet parentSet=bayesNet.getParentSet(iNode2);
boolean bHasNoParents=true;
for (int iParent=0; iParent < parentSet.getNrOfParents(); iParent++) {
if (!bDone[parentSet.getParent(iParent)]) {
if (!(iNode2 == iAttributeHead && parentSet.getParent(iParent) == iAttributeTail)) {
bHasNoParents=false;
}
}
}
if (bHasNoParents) {
bDone[iNode2]=true;
bFound=true;
}
}
}
if (!bFound) {
bayesNet.getParentSet(iAttributeTail).deleteLastParent(instances);
return false;
}
}
bayesNet.getParentSet(iAttributeTail).deleteLastParent(instances);
return true;
}
| reverseArcMakesSense checks whether the arc from iAttributeTail to iAttributeHead exists and reversing does not introduce a cycle |
public Jerry each(JerryFunction function){
for (int i=0; i < nodes.length; i++) {
Node node=nodes[i];
Jerry $this=new Jerry(this,node);
if (function.onNode($this,i) == false) {
break;
}
}
return this;
}
| Iterates over a jQuery object, executing a function for each matched element. |
@Override public void eSet(int featureID,Object newValue){
switch (featureID) {
case TypesPackage.ARRAY_TYPE_SPECIFIER__SIZE:
setSize((Integer)newValue);
return;
}
super.eSet(featureID,newValue);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public static StringBuffer replaceString(StringBuffer buffer,String[] parameters){
int placeHolderPosition=-1;
String placeHolder=null;
for (int i=0; i < parameters.length; i++) {
if (parameters[i] == null) {
break;
}
placeHolder="%" + (i + 1) + "%";
placeHolderPosition=buffer.indexOf(placeHolder);
if (placeHolderPosition != -1) {
buffer.replace(placeHolderPosition,placeHolderPosition + placeHolder.length(),parameters[i]);
}
else {
break;
}
}
return buffer;
}
| Replaces the place holders by parameters |
public static Map<String,Integer> parseBlastOutputFile(String pathToFile) throws IOException {
Map<String,Integer> pirsfIdHitNumberMap=new HashMap<String,Integer>();
File blastOutputFile=new File(pathToFile);
if (blastOutputFile == null) {
throw new NullPointerException("Blast output file resource is null");
}
if (!blastOutputFile.exists()) {
throw new IllegalStateException(blastOutputFile.getName() + " does not exist");
}
if (!blastOutputFile.canRead()) {
throw new IllegalStateException(blastOutputFile.getName() + " is not readable");
}
BufferedReader reader=null;
try {
reader=new BufferedReader(new FileReader(blastOutputFile));
String readline;
Long currentProteinId=null;
String currentModelId=null;
while ((readline=reader.readLine()) != null) {
long proteinId;
String modelId;
String[] columns=readline.split("\t");
if (columns != null && columns.length == 12) {
proteinId=Long.parseLong(columns[0]);
if (currentProteinId == null) {
currentProteinId=proteinId;
}
else if (currentProteinId != proteinId) {
currentProteinId=proteinId;
currentModelId=null;
}
String matchId=columns[1];
String[] dividedMatchId=matchId.split("-");
if (dividedMatchId != null && dividedMatchId.length == 2) {
if (currentModelId == null) {
currentModelId=dividedMatchId[1];
String key=currentProteinId.toString() + '-' + currentModelId;
pirsfIdHitNumberMap.put(key,1);
}
else {
modelId=dividedMatchId[1];
if (currentModelId.equals(modelId)) {
String key=currentProteinId.toString() + '-' + currentModelId;
if (pirsfIdHitNumberMap.containsKey(key)) {
Integer numOfHits=pirsfIdHitNumberMap.get(key);
numOfHits++;
pirsfIdHitNumberMap.put(key,numOfHits);
}
else {
LOGGER.warn("Could not increment number of hits for this protein Id and best match model Id");
}
}
}
}
}
else {
LOGGER.warn("Skip line - wrong number of columns in blast output file for line: " + readline);
}
}
}
finally {
if (reader != null) {
reader.close();
}
}
return pirsfIdHitNumberMap;
}
| Parses all PIRSF IDs out of the BLAST standard output (line by line) and counts the occurrence of each ID. The result is stored in a hash map. <p/> Example blast results file: <p/> 3 A1FUJ0 41.40 314 180 3 3 315 20 330 3.5e-55 218.4 3 Q0I9F4-SF000729 35.99 364 225 3 7 363 5 367 4.5e-55 218.0 3 Q011Y4 43.01 286 157 4 6 288 10 292 4.5e-55 218.0 3 Q3BSJ0-SF000729 38.29 363 208 6 7 356 1 360 5.9e-55 217.6 4 Q2S6F1 46.64 461 241 2 6 462 55 514 4e-117 424.5 4 Q3SIV0-SF000210 44.52 456 250 1 6 458 10 465 7e-109 397.1 4 Q01YS3-SF000210 46.07 458 241 2 5 462 4 455 2e-106 389.0 4 Q4IBF8-SF000210 43.79 475 248 5 6 462 5 478 6e-106 387.5 4 O69236-SF000350 34.79 457 292 2 6 462 85 535 7.1e-75 284.3 4 Q9F4C3-SF000350 34.57 457 293 3 6 462 85 535 6.0e-74 281.2 4 Q3Y2B1-SF000210 36.60 459 268 6 3 457 2 441 1.7e-73 279.6 4 Q2BTS5-SF000210 36.78 454 266 7 9 458 7 443 1.7e-73 279.6 <p/> Required output after parsing (HashMap): <p/> 3-SF000729 -> 2 4-SF000210 -> 5 <p/> Note: We are only interested in the number of hits for the FIRST (lowest e-value therefore best) blast match for each protein Id, so no need to record number of hits for SF000350 match. |
public Expirer(){
expirerThread=new Thread(this,"ExpiringMapExpirer-" + expirerCount++);
expirerThread.setDaemon(true);
}
| Creates a new instance of Expirer. |
public void init(StreamImpl source){
_isDisableClose=false;
_isDisableCloseSource=false;
if (_source != null && _source != source) {
try {
close();
}
catch ( IOException e) {
}
}
if (source == null) throw new IllegalArgumentException();
if (_tempWrite == null) {
_tempWrite=TempBuffer.create();
_writeBuffer=_tempWrite.buffer();
}
_source=source;
_position=0;
_writeLength=0;
_isFlushRequired=false;
_isFlushOnNewline=source.getFlushOnNewline();
_newline="\n";
_newlineBytes=LF_BYTES;
_writeEncoding=null;
_writeEncodingName="ISO-8859-1";
}
| Initializes the stream with a given source. |
public boolean removeEntry(Entry e,int dataSetIndex){
if (e == null || dataSetIndex >= mDataSets.size()) return false;
boolean removed=mDataSets.get(dataSetIndex).removeEntry(e.getXIndex());
if (removed) {
float val=e.getVal();
mYValCount-=1;
mYValueSum-=val;
calcMinMax(mDataSets);
}
return removed;
}
| Removes the given Entry object from the DataSet at the specified index. |
public static boolean isConnected(Context context){
NetworkInfo info=getNetworkInfo(context);
return (info != null && info.isConnected());
}
| Check if there is any connectivity |
public static void logError(final Logger logger,final Error e){
logger.logError(Level.SEVERE,"Unexpected Error",e);
}
| Logs an error. |
public static boolean isTimeSeries(Class<? extends TaggedLogAPIEntity> clazz){
TimeSeries ts=clazz.getAnnotation(TimeSeries.class);
return ts != null && ts.value();
}
| Check whether the entity class is time series, false by default |
public ErrorMessage(int status,String message){
this.status=status;
this.message=message;
}
| Creates a new ErrorMessage object. |
public NTRUSigningPublicKeyParameters(InputStream is,NTRUSigningParameters params) throws IOException {
super(false);
h=IntegerPolynomial.fromBinary(is,params.N,params.q);
this.params=params;
}
| Reads a polynomial <code>h</code> from an input stream and constructs a new public key |
public void add(Item item){
Node oldfirst=first;
first=new Node();
first.item=item;
first.next=oldfirst;
n++;
}
| Adds the item to this bag. |
public CipherParameters generateDerivedMacParameters(int keySize){
return generateDerivedParameters(keySize);
}
| Generate a key parameter for use with a MAC derived from the password, salt, and iteration count we are currently initialised with. |
public void testAD_OrgBP_ID_Issue() throws Exception {
MBPartner bp=new MBPartner(getCtx(),50004,getTrxName());
final int old_org_id=bp.getAD_OrgBP_ID_Int();
int new_org_id=50005;
if (old_org_id == new_org_id) {
new_org_id=12;
}
bp.setAD_OrgBP_ID(new_org_id);
bp.getAD_OrgBP_ID_Int();
bp.saveEx();
}
| BF [ 2859125 ] Can't set AD_OrgBP_ID https://sourceforge.net/tracker/index.php?func=detail&aid=2859125&group_id=176962&atid=879332# |
public Builder permitDiskReads(){
return disable(DETECT_DISK_READ);
}
| Disable detection of disk reads. |
@Override public double distance(NumberVector v1,NumberVector v2){
final double pcc=PearsonCorrelation.coefficient(v1,v2);
return 1 - pcc * pcc;
}
| Computes the squared Pearson correlation distance for two given feature vectors. The squared Pearson correlation distance is computed from the Pearson correlation coefficient <code>r</code> as: <code>1-r</code><sup> <code>2</code></sup>. Hence, possible values of this distance are between 0 and 1. |
public final void replaceAll(Object from,Object to,boolean onlyIfSameObject){
final int width=this.width;
final int height=this.height;
Object[] fieldx=null;
for (int x=0; x < width; x++) {
fieldx=field[x];
for (int y=0; y < height; y++) {
Object obj=fieldx[y];
if ((obj == null && from == null) || (onlyIfSameObject && obj == from) || (!onlyIfSameObject && obj.equals(from))) fieldx[y]=to;
}
}
}
| Replace instances of one value to another. Equality is measured as follows. (1) if onlyIfSameObject is true, then objects must be "== from" to one another to be considered equal. (2) if onlyIfSameObject is false, then objects in the field must be "equals(from)". In either case, null is considered equal to null. |
private void highlightExpression(boolean highlight){
expressionHighlighted=highlight;
if (highlight) {
expressionPanel.setBorder(BorderFactory.createMatteBorder(1,1,1,1,SwingTools.RAPIDMINER_ORANGE));
expressionPanel.setBackground(Color.LIGHT_GRAY);
}
else {
expressionPanel.setBorder(BorderFactory.createMatteBorder(1,1,1,1,Color.GRAY));
expressionPanel.setBackground(defaultBackground);
}
}
| Highlights the part to select, if the user wants to insert the evaluated expression of the macro |
@Override protected EClass eStaticClass(){
return ImPackage.Literals.SYMBOL_TABLE_ENTRY;
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
protected void fail(String string){
lastPrint=0;
if (string.length() > 100) {
char[] data=string.toCharArray();
for (int i=0; i < data.length; i++) {
char c=data[i];
if (c >= 128 || c < 32) {
data[i]=(char)('a' + (c & 15));
string=null;
}
}
if (string == null) {
string=new String(data);
}
}
println(string);
throw new AssertionError(string);
}
| Called if the test reached a point that was not expected. |
public static void main(String[] args){
Log.printLine("Starting NetworkExample3...");
try {
int num_user=2;
Calendar calendar=Calendar.getInstance();
boolean trace_flag=false;
CloudSim.init(num_user,calendar,trace_flag);
Datacenter datacenter0=createDatacenter("Datacenter_0");
Datacenter datacenter1=createDatacenter("Datacenter_1");
DatacenterBroker broker1=createBroker(1);
int brokerId1=broker1.getId();
DatacenterBroker broker2=createBroker(2);
int brokerId2=broker2.getId();
vmlist1=new ArrayList<Vm>();
vmlist2=new ArrayList<Vm>();
int vmid=0;
long size=10000;
int mips=250;
int ram=512;
long bw=1000;
int pesNumber=1;
String vmm="Xen";
Vm vm1=new Vm(vmid,brokerId1,mips,pesNumber,ram,bw,size,vmm,new CloudletSchedulerTimeShared());
Vm vm2=new Vm(vmid,brokerId2,mips,pesNumber,ram,bw,size,vmm,new CloudletSchedulerTimeShared());
vmlist1.add(vm1);
vmlist2.add(vm2);
broker1.submitVmList(vmlist1);
broker2.submitVmList(vmlist2);
cloudletList1=new ArrayList<Cloudlet>();
cloudletList2=new ArrayList<Cloudlet>();
int id=0;
long length=40000;
long fileSize=300;
long outputSize=300;
UtilizationModel utilizationModel=new UtilizationModelFull();
Cloudlet cloudlet1=new Cloudlet(id,length,pesNumber,fileSize,outputSize,utilizationModel,utilizationModel,utilizationModel);
cloudlet1.setUserId(brokerId1);
Cloudlet cloudlet2=new Cloudlet(id,length,pesNumber,fileSize,outputSize,utilizationModel,utilizationModel,utilizationModel);
cloudlet2.setUserId(brokerId2);
cloudletList1.add(cloudlet1);
cloudletList2.add(cloudlet2);
broker1.submitCloudletList(cloudletList1);
broker2.submitCloudletList(cloudletList2);
NetworkTopology.buildNetworkTopology("topology.brite");
int briteNode=0;
NetworkTopology.mapNode(datacenter0.getId(),briteNode);
briteNode=2;
NetworkTopology.mapNode(datacenter1.getId(),briteNode);
briteNode=3;
NetworkTopology.mapNode(broker1.getId(),briteNode);
briteNode=4;
NetworkTopology.mapNode(broker2.getId(),briteNode);
CloudSim.startSimulation();
List<Cloudlet> newList1=broker1.getCloudletReceivedList();
List<Cloudlet> newList2=broker2.getCloudletReceivedList();
CloudSim.stopSimulation();
Log.print("=============> User " + brokerId1 + " ");
printCloudletList(newList1);
Log.print("=============> User " + brokerId2 + " ");
printCloudletList(newList2);
Log.printLine("NetworkExample3 finished!");
}
catch ( Exception e) {
e.printStackTrace();
Log.printLine("The simulation has been terminated due to an unexpected error");
}
}
| Creates main() to run this example |
public static synchronized ComponentUI createUI(JComponent label){
if (instance == null) {
instance=new StyledLabelUI(StyleUtil.getStyle());
}
return instance;
}
| Create StyledLabelUI for a label. |
@Override public CompilerPhase newExecution(IR ir){
return this;
}
| Return this instance of this phase. This phase contains no per-compilation instance fields. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:32:22.053 -0500",hash_original_method="EEDFE0453AAAF2A630A301400F88A0DB",hash_generated_method="E90D1453F700D1C14BCE902B1840D5A8") public static void register(){
DdmServer.registerHandler(CHUNK_HELO,mInstance);
DdmServer.registerHandler(CHUNK_FEAT,mInstance);
}
| Register for the messages we're interested in. |
public static TypeReference newTypeParameterBoundReference(int sort,int paramIndex,int boundIndex){
return new TypeReference((sort << 24) | (paramIndex << 16) | (boundIndex << 8));
}
| Returns a reference to a type parameter bound of a generic class or method. |
private void createGui(){
for ( final IResultsPanel panel : m_factory) {
m_tabbedPane.addTab(panel.getTitle(),panel.getComponent());
}
add(m_tabbedPane,BorderLayout.CENTER);
m_tabbedPane.setVisible(true);
}
| Creates the sub-components of the panel. |
public CacheServerImpl(AdminDistributedSystemImpl system,GemFireVM vm) throws AdminException {
super(system,vm);
this.config=new CacheServerConfigImpl(vm);
}
| Creates a new <code>CacheServerImpl</code> that represents an existing dedicated cache server in a given distributed system. |
public static AVList extractRasterParameters(Dataset ds,AVList params,boolean quickReadingMode) throws IllegalArgumentException, WWRuntimeException {
if (null == params) {
params=new AVListImpl();
}
if (!gdalIsAvailable.get()) {
String message=Logging.getMessage("gdal.GDALNotAvailable");
Logging.logger().finest(message);
throw new WWRuntimeException(message);
}
if (null == ds) {
String message=Logging.getMessage("nullValue.DataSetIsNull");
Logging.logger().finest(message);
throw new IllegalArgumentException(message);
}
int width=ds.getRasterXSize();
if (0 >= width) {
String message=Logging.getMessage("generic.InvalidWidth",width);
Logging.logger().finest(message);
throw new IllegalArgumentException(message);
}
params.setValue(AVKey.WIDTH,width);
int height=ds.getRasterYSize();
if (0 >= height) {
String message=Logging.getMessage("generic.InvalidHeight",height);
Logging.logger().finest(message);
throw new IllegalArgumentException(message);
}
params.setValue(AVKey.HEIGHT,height);
int bandCount=ds.getRasterCount();
if (0 >= bandCount) {
String message=Logging.getMessage("generic.UnexpectedBandCount",bandCount);
Logging.logger().finest(message);
throw new WWRuntimeException(message);
}
params.setValue(AVKey.NUM_BANDS,bandCount);
Band band=ds.GetRasterBand(1);
if (null != band) {
if (band.GetOverviewCount() > 0) {
params.setValue(AVKey.RASTER_HAS_OVERVIEWS,Boolean.TRUE);
}
int dataType=band.getDataType();
if (dataType == gdalconst.GDT_Int16 || dataType == gdalconst.GDT_CInt16) {
params.setValue(AVKey.PIXEL_FORMAT,AVKey.ELEVATION);
params.setValue(AVKey.DATA_TYPE,AVKey.INT16);
}
else if (dataType == gdalconst.GDT_Int32 || dataType == gdalconst.GDT_CInt32) {
params.setValue(AVKey.PIXEL_FORMAT,AVKey.ELEVATION);
params.setValue(AVKey.DATA_TYPE,AVKey.INT32);
}
else if (dataType == gdalconst.GDT_Float32 || dataType == gdalconst.GDT_CFloat32) {
params.setValue(AVKey.PIXEL_FORMAT,AVKey.ELEVATION);
params.setValue(AVKey.DATA_TYPE,AVKey.FLOAT32);
}
else if (dataType == gdalconst.GDT_Byte) {
int colorInt=band.GetColorInterpretation();
if (colorInt == gdalconst.GCI_GrayIndex && bandCount < 3) {
params.setValue(AVKey.IMAGE_COLOR_FORMAT,AVKey.GRAYSCALE);
}
else {
params.setValue(AVKey.IMAGE_COLOR_FORMAT,AVKey.COLOR);
}
params.setValue(AVKey.PIXEL_FORMAT,AVKey.IMAGE);
params.setValue(AVKey.DATA_TYPE,AVKey.INT8);
}
else if (dataType == gdalconst.GDT_UInt16) {
params.setValue(AVKey.IMAGE_COLOR_FORMAT,((bandCount >= 3) ? AVKey.COLOR : AVKey.GRAYSCALE));
params.setValue(AVKey.PIXEL_FORMAT,AVKey.IMAGE);
params.setValue(AVKey.DATA_TYPE,AVKey.INT16);
}
else if (dataType == gdalconst.GDT_UInt32) {
params.setValue(AVKey.IMAGE_COLOR_FORMAT,((bandCount >= 3) ? AVKey.COLOR : AVKey.GRAYSCALE));
params.setValue(AVKey.PIXEL_FORMAT,AVKey.IMAGE);
params.setValue(AVKey.DATA_TYPE,AVKey.INT32);
}
else {
String msg=Logging.getMessage("generic.UnrecognizedDataType",dataType);
Logging.logger().severe(msg);
throw new WWRuntimeException(msg);
}
if ("GTiff".equalsIgnoreCase(ds.GetDriver().getShortName()) && params.hasKey(AVKey.FILE) && AVKey.ELEVATION.equals(params.getValue(AVKey.PIXEL_FORMAT))&& !params.hasKey(AVKey.ELEVATION_UNIT)) {
GeotiffReader reader=null;
try {
File src=(File)params.getValue(AVKey.FILE);
AVList tiffParams=new AVListImpl();
reader=new GeotiffReader(src);
reader.copyMetadataTo(tiffParams);
WWUtil.copyValues(tiffParams,params,new String[]{AVKey.ELEVATION_UNIT,AVKey.ELEVATION_MIN,AVKey.ELEVATION_MAX,AVKey.MISSING_DATA_SIGNAL},false);
}
catch ( Throwable t) {
Logging.logger().finest(WWUtil.extractExceptionReason(t));
}
finally {
if (null != reader) reader.dispose();
}
}
extractMinMaxSampleValues(ds,band,params);
if (AVKey.ELEVATION.equals(params.getValue(AVKey.PIXEL_FORMAT)) && (!params.hasKey(AVKey.ELEVATION_MIN) || !params.hasKey(AVKey.ELEVATION_MAX) || !params.hasKey(AVKey.MISSING_DATA_SIGNAL)) && !quickReadingMode) {
double[] minmax=new double[2];
band.ComputeRasterMinMax(minmax);
if (ElevationsUtil.isKnownMissingSignal(minmax[0])) {
params.setValue(AVKey.MISSING_DATA_SIGNAL,minmax[0]);
if (setNoDataValue(band,minmax[0])) {
band.ComputeRasterMinMax(minmax);
params.setValue(AVKey.ELEVATION_MIN,minmax[0]);
params.setValue(AVKey.ELEVATION_MAX,minmax[1]);
}
}
else {
params.setValue(AVKey.ELEVATION_MIN,minmax[0]);
params.setValue(AVKey.ELEVATION_MAX,minmax[1]);
}
}
}
String proj_wkt=null;
if (params.hasKey(AVKey.SPATIAL_REFERENCE_WKT)) {
proj_wkt=params.getStringValue(AVKey.SPATIAL_REFERENCE_WKT);
}
if (WWUtil.isEmpty(proj_wkt)) {
proj_wkt=ds.GetProjectionRef();
}
if (WWUtil.isEmpty(proj_wkt)) {
proj_wkt=ds.GetProjection();
}
SpatialReference srs=null;
if (!WWUtil.isEmpty(proj_wkt)) {
params.setValue(AVKey.SPATIAL_REFERENCE_WKT,proj_wkt);
srs=new SpatialReference(proj_wkt);
}
double[] gt=new double[6];
ds.GetGeoTransform(gt);
if (gt[GDAL.GT_5_PIXEL_HEIGHT] > 0) {
gt[GDAL.GT_5_PIXEL_HEIGHT]=-gt[GDAL.GT_5_PIXEL_HEIGHT];
}
java.awt.geom.Point2D[] corners=GDAL.computeCornersFromGeotransform(gt,width,height);
double minX=GDAL.getMinX(corners);
double minY=GDAL.getMinY(corners);
double maxX=GDAL.getMaxX(corners);
double maxY=GDAL.getMaxY(corners);
double rotX=gt[GDAL.GT_2_ROTATION_X];
double rotY=gt[GDAL.GT_4_ROTATION_Y];
double pixelWidth=gt[GDAL.GT_1_PIXEL_WIDTH];
double pixelHeight=gt[GDAL.GT_5_PIXEL_HEIGHT];
params.setValue(AVKey.PIXEL_WIDTH,pixelWidth);
params.setValue(AVKey.PIXEL_HEIGHT,pixelHeight);
if (minX == 0d && pixelWidth == 1d && rotX == 0d && maxY == 0d && rotY == 0d && pixelHeight == 1d) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_SCREEN);
}
else if (Angle.isValidLongitude(minX) && Angle.isValidLatitude(maxY) && Angle.isValidLongitude(maxX)&& Angle.isValidLatitude(minY)) {
if (null == srs) {
srs=createGeographicSRS();
}
else if (srs.IsGeographic() == 0) {
String msg=Logging.getMessage("generic.UnexpectedCoordinateSystem",srs.ExportToWkt());
Logging.logger().warning(msg);
srs=createGeographicSRS();
}
}
if (null != srs) {
Sector sector=null;
if (!params.hasKey(AVKey.SPATIAL_REFERENCE_WKT)) {
params.setValue(AVKey.SPATIAL_REFERENCE_WKT,srs.ExportToWkt());
}
if (srs.IsLocal() == 1) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_UNKNOWN);
String msg=Logging.getMessage("generic.UnknownCoordinateSystem",proj_wkt);
Logging.logger().severe(msg);
return params;
}
GDAL.Area area=new GDAL.Area(srs,ds);
if (null != area) {
params.setValue(AVKey.GDAL_AREA,area);
sector=area.getSector();
if (null != sector) {
params.setValue(AVKey.SECTOR,sector);
LatLon origin=new LatLon(sector.getMaxLatitude(),sector.getMinLongitude());
params.setValue(AVKey.ORIGIN,origin);
}
}
if (srs.IsGeographic() == 1) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_GEOGRAPHIC);
}
else if (srs.IsProjected() == 1) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_PROJECTED);
String projection=srs.GetAttrValue("PROJCS|PROJECTION");
String unit=srs.GetAttrValue("PROJCS|UNIT");
if (null != unit) {
unit=unit.toLowerCase();
if ("meter".equals(unit) || "meters".equals(unit) || "metre".equals(unit)|| "metres".equals(unit)) {
params.setValue(AVKey.PROJECTION_UNITS,AVKey.UNIT_METER);
}
else if ("foot".equals(unit) || "feet".equals(unit)) {
params.setValue(AVKey.PROJECTION_UNITS,AVKey.UNIT_FOOT);
}
else {
Logging.logger().warning(Logging.getMessage("generic.UnknownProjectionUnits",unit));
}
}
if (null != projection && 0 < projection.length()) {
params.setValue(AVKey.PROJECTION_NAME,projection);
}
}
else if (srs.IsLocal() == 1) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_SCREEN);
}
else {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_UNKNOWN);
String msg=Logging.getMessage("generic.UnknownCoordinateSystem",proj_wkt);
Logging.logger().severe(msg);
}
}
if (!params.hasKey(AVKey.COORDINATE_SYSTEM)) {
params.setValue(AVKey.COORDINATE_SYSTEM,AVKey.COORDINATE_SYSTEM_UNKNOWN);
}
return params;
}
| Extracts raster parameters to an AVList |
public void measure(int reactTag,Callback callback){
mOperationsQueue.enqueueMeasure(reactTag,callback);
}
| Determines the location on screen, width, and height of the given view relative to the root view and returns the values via an async callback. |
public void add(Make make){
if (make == this) throw new IllegalArgumentException("Can't add self as a dependency.");
int p=_makeList.indexOf(make);
if (p >= 0) {
Make oldMake=_makeList.get(p);
if (oldMake != make) _makeList.add(p,make);
}
else _makeList.add(make);
}
| Adds a dependency. |
public void defineDictionary(String name,Map<String,Object> mapping){
dictionaries.put(name,mapping);
}
| Define a map for this group. <p> Not thread safe...do not keep adding these while you reference them.</p> |
public synchronized static void removeComp(String component){
skinSpecs.remove(component);
}
| Remove the specified componenet from the SkinSpecs map. |
protected void initLST(){
logger.fine("initializing Library Selection Table (LST)");
try {
if (lst == null) {
if (libraryBeanName != null) {
LibraryBean libraryBean=null;
BeanContext beanContext=getBeanContext();
if (beanContext == null) {
return;
}
for ( Object obj : beanContext) {
if (obj instanceof LibraryBean) {
LibraryBean lb=(LibraryBean)obj;
if (libraryBeanName.equals(lb.getName())) {
if (logger.isLoggable(Level.FINE)) {
logger.fine(getName() + ": setting library bean to " + lb.getName());
}
libraryBean=lb;
break;
}
}
}
if (libraryBean != null) {
lst=libraryBean.getLibrarySelectionTable();
warehouse=libraryBean.getWarehouse();
warehouse.setProperties(getPropertyPrefix(),props);
searchByFeatures=true;
box=null;
logger.fine("VPFLayer.initLST(libraryBean)");
}
else {
if (logger.isLoggable(Level.FINE)) {
logger.fine("Couldn't find libraryBean " + libraryBeanName + " to read VPF data");
}
}
}
else {
if (dataPaths == null) {
logger.info("VPFLayer|" + getName() + ": path not set");
}
else {
logger.fine("VPFLayer.initLST(dataPaths)");
lst=new LibrarySelectionTable(dataPaths);
lst.setCutoffScale(cutoffScale);
}
}
}
}
catch ( com.bbn.openmap.io.FormatException f) {
throw new java.lang.IllegalArgumentException(f.getMessage());
}
}
| initialize the library selection table. |
protected boolean isShortClassName(){
return useShortClassName;
}
| <p>Gets whether to output short or long class names.</p> |
public synchronized void disconnect(){
if (connection != null && connection.isOpen()) {
connection.close();
transaction=null;
setTransaction(null);
}
}
| Disconnects this <tt>ModbusSerialMaster</tt> from the slave. |
public InputStream openClassfile(String classname){
String jarname="/" + classname.replace('.','/') + ".class";
return thisClass.getResourceAsStream(jarname);
}
| Obtains a class file by <code>getResourceAsStream()</code>. |
public void notifyAdapter(){
try {
if (mSidePanelHandler.profileAdapter != null) mSidePanelHandler.profileAdapter.notifyDataSetChanged();
mSidePanelHandler.reloadProfileAdapter();
}
catch ( NullPointerException e) {
e.printStackTrace();
}
}
| Notify the side panel adapters, check for null if they're not available yet (rare case) |
public void dropAllCTFItems(Player player){
int amount;
for ( String ctfItemName : ctfItemNames) {
amount=player.getNumberOfEquipped(ctfItemName);
if (amount > 0) {
player.drop(ctfItemName,amount);
}
}
}
| drop anything the player is carrying that is ctf-related TODO: probably goes elsewhere - more general support class |
@Override protected void configure(){
bind(AuditDAO.class).to(AuditDAOImpl.class).in(Singleton.class);
bind(EventsDAO.class).to(EventsDAOImpl.class).in(Singleton.class);
bind(StateMachinesDAO.class).to(StateMachinesDAOImpl.class).in(Singleton.class);
bind(StatesDAO.class).to(StatesDAOImpl.class).in(Singleton.class);
final SessionFactoryProvider sessionFactoryProvider=new SessionFactoryProvider();
requestInjection(sessionFactoryProvider);
bind(SessionFactory.class).toProvider(sessionFactoryProvider).in(Singleton.class);
final TransactionInterceptor transactionInterceptor=new TransactionInterceptor(sessionFactoryProvider);
bindInterceptor(Matchers.not(Matchers.inPackage(MessageDao.class.getPackage())),Matchers.annotatedWith(Transactional.class),transactionInterceptor);
}
| Performs concrete bindings for interfaces |
@Override public void onBackup(ParcelFileDescriptor oldState,BackupDataOutput data,ParcelFileDescriptor newState) throws IOException {
mDispatcher.performBackup(oldState,data,newState);
}
| Run the backup process on each of the configured handlers. |
public Iterator lookupName(Identifier name){
List methodList=(List)lookupMap.get(name);
if (methodList == null) {
return Collections.emptyIterator();
}
return methodList.iterator();
}
| Returns an Iterator of all methods contained in the MethodSet which have a given name. |
@Deprecated public boolean isBackingStoreEnabled(){
return scrollMode == BACKINGSTORE_SCROLL_MODE;
}
| Returns <code>true</code> if this viewport is maintaining an offscreen image of its contents. |
public static boolean wildmatch(String str,String pat){
return wildmatch(str,pat,0,str.length(),0,pat.length());
}
| Match a string against a shell-style pattern. The only pattern characters recognized are <code>?</code>, standing for any one character, and <code>*</code>, standing for any string of characters, including the empty string. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2014-02-25 10:37:58.963 -0500",hash_original_method="C2CB13972600F76BB48639CA034B1C26",hash_generated_method="BE93D2D64BA5AB54040206A47E6A751C") final synchronized void _sendDo(int option) throws IOException {
if (debug || debugoptions) {
System.err.println("DO: " + TelnetOption.getOption(option));
}
_output_.write(_COMMAND_DO);
_output_.write(option);
_output_.flush();
}
| Sends a DO. <p> |
public static Address fromP2SHScript(NetworkParameters params,Script scriptPubKey){
checkArgument(scriptPubKey.isPayToScriptHash(),"Not a P2SH script");
return fromP2SHHash(params,scriptPubKey.getPubKeyHash());
}
| Returns an Address that represents the script hash extracted from the given scriptPubKey |
public boolean isStandardPeriod(){
return PERIODTYPE_StandardCalendarPeriod.equals(getPeriodType());
}
| Standard Period |
@Override public boolean eIsSet(int featureID){
switch (featureID) {
case UmplePackage.INVARIANT___ANONYMOUS_INVARIANT_11:
return anonymous_invariant_1_1 != null && !anonymous_invariant_1_1.isEmpty();
case UmplePackage.INVARIANT___ANONYMOUS_INVARIANT_21:
return anonymous_invariant_2_1 != null && !anonymous_invariant_2_1.isEmpty();
}
return super.eIsSet(featureID);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
@Override public void announceSeeds(){
BufferedReader reader=new BufferedReader(textSource.obtainReader());
try {
announceSeedsFromReader(reader);
}
finally {
IOUtils.closeQuietly(reader);
}
}
| Announce all seeds from configured source to SeedListeners (including nonseed lines mixed in). |
private boolean indexExists(String table) throws SQLException {
return indexExists(table,null);
}
| Checks if some index (besides the PRIMARY-Index) exists in a given table. |
public boolean hasAllSource(){
boolean yes=false;
int expect=getSourceExpectedCount();
int have=getSourceCount();
if (expect == -1 && isSwdLoading()) yes=false;
else if (expect == have) yes=true;
else yes=false;
return yes;
}
| This method returns true once we have all the scripts that we expect to ever have. We can get the information about how many scripts we should get from two sources, 1) we may get an InSwfInfo message from the player which contains this value and 2) we may get a InNumScript message which contains a script count. A small caveat of course, is that in case 1. we may also not get the a value if the swd has not been fully processed by the player yet. |
public void removeVariables(Set<String> variables){
for ( String var : variables) {
range.remove(var);
}
}
| Remove the variables from the value range |
public ContentObserver(Handler handler){
mHandler=handler;
}
| Creates a content observer. |
private int instrumentNotNull(File dir,final InstrumentationClassFinder finder){
int instrumented=0;
final File[] files=dir.listFiles();
for (int i=0; i < files.length; i++) {
File file=files[i];
final String name=file.getName();
if (name.endsWith(".class")) {
final String path=file.getPath();
log("Adding @NotNull assertions to " + path,Project.MSG_VERBOSE);
try {
final FileInputStream inputStream=new FileInputStream(file);
try {
ClassReader reader=new FailSafeClassReader(inputStream);
int version=getClassFileVersion(reader);
if (version >= Opcodes.V1_5 && !shouldBeSkippedByAnnotationPattern(reader)) {
ClassWriter writer=new InstrumenterClassWriter(getAsmClassWriterFlags(version),finder);
if (NotNullVerifyingInstrumenter.processClassFile(reader,writer)) {
final FileOutputStream fileOutputStream=new FileOutputStream(path);
try {
fileOutputStream.write(writer.toByteArray());
instrumented++;
}
finally {
fileOutputStream.close();
}
}
}
}
finally {
inputStream.close();
}
}
catch ( IOException e) {
log("Failed to instrument @NotNull assertion for " + path + ": "+ e.getMessage(),Project.MSG_WARN);
}
catch ( Exception e) {
fireError("@NotNull instrumentation failed for " + path + ": "+ e.toString());
}
}
else if (file.isDirectory()) {
instrumented+=instrumentNotNull(file,finder);
}
}
return instrumented;
}
| Instrument classes with NotNull annotations |
public void run(){
List<SsaInsn> useList=ssaMeth.getUseListForRegister(regV);
for ( SsaInsn insn : useList) {
nextFunction=NextFunction.DONE;
if (insn instanceof PhiInsn) {
PhiInsn phi=(PhiInsn)insn;
for ( SsaBasicBlock pred : phi.predBlocksForReg(regV,ssaMeth)) {
blockN=pred;
nextFunction=NextFunction.LIVE_OUT_AT_BLOCK;
handleTailRecursion();
}
}
else {
blockN=insn.getBlock();
statementIndex=blockN.getInsns().indexOf(insn);
if (statementIndex < 0) {
throw new RuntimeException("insn not found in it's own block");
}
nextFunction=NextFunction.LIVE_IN_AT_STATEMENT;
handleTailRecursion();
}
}
int nextLiveOutBlock;
while ((nextLiveOutBlock=liveOutBlocks.nextSetBit(0)) >= 0) {
blockN=ssaMeth.getBlocks().get(nextLiveOutBlock);
liveOutBlocks.clear(nextLiveOutBlock);
nextFunction=NextFunction.LIVE_OUT_AT_BLOCK;
handleTailRecursion();
}
}
| From Appel algorithm 19.17. |
private boolean isAtom(){
return "application".equals(type) && "atom+xml".equals(subType);
}
| Returns true if this is an atom content type. |
@Override public void eUnset(int featureID){
switch (featureID) {
case RegularExpressionPackage.REGULAR_EXPRESSION_FLAGS__FLAGS:
getFlags().clear();
return;
}
super.eUnset(featureID);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
protected boolean calculateSize(){
p_width=0;
p_height=0;
if (m_barcode == null) return true;
p_width=m_barcode.getWidth();
p_height=m_barcode.getHeight();
if (p_width * p_height == 0) return true;
m_scaleFactor=1f;
if (p_maxWidth != 0 && p_width > p_maxWidth) m_scaleFactor=p_maxWidth / p_width;
if (p_maxHeight != 0 && p_height > p_maxHeight && p_maxHeight / p_height < m_scaleFactor) m_scaleFactor=p_maxHeight / p_height;
p_width=(float)m_scaleFactor * p_width;
p_height=(float)m_scaleFactor * p_height;
return true;
}
| Layout and Calculate Size Set p_width & p_height |
private void tryGrow(Object[] array,int oldCap){
lock.unlock();
Object[] newArray=null;
if (allocationSpinLock == 0 && UNSAFE.compareAndSwapInt(this,allocationSpinLockOffset,0,1)) {
try {
int newCap=oldCap + ((oldCap < 64) ? (oldCap + 2) : (oldCap >> 1));
if (newCap - MAX_ARRAY_SIZE > 0) {
int minCap=oldCap + 1;
if (minCap < 0 || minCap > MAX_ARRAY_SIZE) throw new OutOfMemoryError();
newCap=MAX_ARRAY_SIZE;
}
if (newCap > oldCap && queue == array) newArray=new Object[newCap];
}
finally {
allocationSpinLock=0;
}
}
if (newArray == null) Thread.yield();
lock.lock();
if (newArray != null && queue == array) {
queue=newArray;
System.arraycopy(array,0,newArray,0,oldCap);
}
}
| Tries to grow array to accommodate at least one more element (but normally expand by about 50%), giving up (allowing retry) on contention (which we expect to be rare). Call only while holding lock. |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.