code
stringlengths 10
174k
| nl
stringlengths 3
129k
|
---|---|
public boolean isEmpty(){
return (m_index == -1);
}
| Tests if this stack is empty. |
@DSSafe(DSCat.SAFE_LIST) @DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2014-09-03 14:59:52.028 -0400",hash_original_method="95976D81CB90569BB8CC4A3AB0B84125",hash_generated_method="24F0FD499D929F2BE98900B33186C3D7") @Override public String toString(){
return new String(toByteArray());
}
| Gets the curent contents of this byte stream as a string. |
public Cursor newCursor(String storeName) throws UnknownStoreException {
IStore<ByteArray,byte[]> store=getStore(storeName);
int cursorId=rpcService.getTransactionId();
Cursor cursor=new Cursor(cursorId,store.entries());
cursorMap.put(Integer.valueOf(cursorId),cursor);
return cursor;
}
| Allocate a new cursor for the given store name |
@Override public void run(){
amIActive=true;
String inputHeader=null;
String outputHeader=null;
int row, col;
double z;
int progress=0;
int i, a;
long counter=0;
int loopNum=0;
int[] dX={1,1,1,0,-1,-1,-1,0};
int[] dY={-1,0,1,1,1,0,-1,-1};
int[][] elements={{0,1,4,5,6,7},{0,1,2,5,6,7},{0,1,2,3,6,7},{0,1,2,3,4,7},{0,1,2,3,4,5},{1,2,3,4,5,6},{2,3,4,5,6,7},{0,3,4,5,6,7}};
double[] neighbours=new double[8];
boolean patternMatch=false;
int numIterations=10;
if (args.length <= 0) {
showFeedback("Plugin parameters have not been set.");
return;
}
inputHeader=args[0];
outputHeader=args[1];
numIterations=Integer.parseInt(args[2]);
if ((inputHeader == null) || (outputHeader == null)) {
showFeedback("One or more of the input parameters have not been set properly.");
return;
}
try {
WhiteboxRaster image=new WhiteboxRaster(inputHeader,"r");
int nRows=image.getNumberRows();
int nCols=image.getNumberColumns();
double noData=image.getNoDataValue();
WhiteboxRaster output=new WhiteboxRaster(outputHeader,"rw",inputHeader,WhiteboxRaster.DataType.FLOAT,noData);
output.setPreferredPalette("black_white.pal");
double[] data=null;
for (row=0; row < nRows; row++) {
data=image.getRowValues(row);
for (col=0; col < nCols; col++) {
if (data[col] > 0) {
output.setValue(row,col,1);
}
else if (data[col] == noData) {
output.setValue(row,col,noData);
}
else {
output.setValue(row,col,0);
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress=(int)(100f * row / (nRows - 1));
updateProgress(progress);
}
image.close();
output.flush();
for (int k=0; k < numIterations; k++) {
loopNum++;
updateProgress("Loop Number " + loopNum + ":",0);
counter=0;
for (row=0; row < nRows; row++) {
for (col=0; col < nCols; col++) {
z=output.getValue(row,col);
if (z == 1 && z != noData) {
for (i=0; i < 8; i++) {
neighbours[i]=output.getValue(row + dY[i],col + dX[i]);
}
for (a=0; a < 8; a++) {
patternMatch=true;
for (i=0; i < elements[a].length; i++) {
if (neighbours[elements[a][i]] != 0) {
patternMatch=false;
}
}
if (patternMatch) {
output.setValue(row,col,0);
counter++;
}
}
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress=(int)(100f * row / (nRows - 1));
updateProgress(progress);
}
if (counter == 0) {
break;
}
}
output.addMetadataEntry("Created by the " + getDescriptiveName() + " tool.");
output.addMetadataEntry("Created on " + new Date());
output.close();
returnData(outputHeader);
}
catch ( OutOfMemoryError oe) {
myHost.showFeedback("An out-of-memory error has occurred during operation.");
}
catch ( Exception e) {
myHost.showFeedback("An error has occurred during operation. See log file for details.");
myHost.logException("Error in " + getDescriptiveName(),e);
}
finally {
updateProgress("Progress: ",0);
amIActive=false;
myHost.pluginComplete();
}
}
| Used to execute this plugin tool. |
public static void main(final String[] args){
DOMTestCase.doMain(characterdatareplacedataend.class,args);
}
| Runs this test from the command line. |
public ToolMetaBuilder start(final ToolMeta meta){
return new ToolMetaBuilder(meta);
}
| Returns new builder of item meta data, based on given one. |
public void loadAttributes(Element e3){
if (e3 != null) {
List<Element> l=e3.getChildren("keyvaluepair");
for ( Element fn : l) {
String key=fn.getChild("key").getText();
String value=fn.getChild("value").getText();
this.putAttribute(key,value);
}
}
}
| Loads attribute key/value pairs from a JDOM element. |
public static boolean neededFor(int uc){
return (uc >= UCS4_MIN) && (uc <= UCS4_MAX);
}
| Tells whether or not the given UCS-4 character must be represented as a surrogate pair in UTF-16. |
public Coordinate centre(){
if (isNull()) return null;
return new Coordinate((getMinX() + getMaxX()) / 2.0,(getMinY() + getMaxY()) / 2.0);
}
| Computes the coordinate of the centre of this envelope (as long as it is non-null |
public synchronized void stopPreview(){
if (autoFocusManager != null) {
autoFocusManager.stop();
autoFocusManager=null;
}
if (camera != null && previewing) {
camera.stopPreview();
previewCallback.setHandler(null,0);
previewing=false;
}
}
| Tells the camera to stop drawing preview frames. |
private CIMObjectPath createOrSelectSLOBasedStorageGroup(StorageSystem storage,URI exportMaskURI,Collection<Initiator> initiators,VolumeURIHLU[] volumeURIHLUs,String parentGroupName,Map<StorageGroupPolicyLimitsParam,CIMObjectPath> newlyCreatedChildVolumeGroups,TaskCompleter taskCompleter) throws Exception {
List<CIMObjectPath> childVolumeGroupsToBeAddedToParentGroup=new ArrayList<CIMObjectPath>();
String groupName=null;
CIMObjectPath groupPath=null;
ExportMask mask=_dbClient.queryObject(ExportMask.class,exportMaskURI);
ListMultimap<StorageGroupPolicyLimitsParam,VolumeURIHLU> policyToVolumeGroup=ArrayListMultimap.create();
WBEMClient client=_helper.getConnection(storage).getCimClient();
for ( VolumeURIHLU volumeUriHLU : volumeURIHLUs) {
policyToVolumeGroup.put(new StorageGroupPolicyLimitsParam(volumeUriHLU,storage,_helper),volumeUriHLU);
}
_log.info("{} Groups generated based on grouping volumes by fast policy",policyToVolumeGroup.size());
for ( Entry<StorageGroupPolicyLimitsParam,Collection<VolumeURIHLU>> policyToVolumeGroupEntry : policyToVolumeGroup.asMap().entrySet()) {
List<CIMObjectPath> childVolumeGroupsToBeAdded=new ArrayList<CIMObjectPath>();
StorageGroupPolicyLimitsParam storageGroupPolicyLimitsParam=policyToVolumeGroupEntry.getKey();
ListMultimap<String,VolumeURIHLU> expectedVolumeHluMap=ControllerUtils.getVolumeNativeGuids(policyToVolumeGroupEntry.getValue(),_dbClient);
Map<String,Set<String>> existingGroupPaths;
_log.info("Running Storage Group Selection Process");
existingGroupPaths=_helper.findAnyStorageGroupsCanBeReUsed(storage,expectedVolumeHluMap,storageGroupPolicyLimitsParam);
_log.info("Existing Storage Groups Found :" + Joiner.on("\t").join(existingGroupPaths.keySet()));
if (existingGroupPaths.size() > 0) {
if (existingGroupPaths.size() > 0) {
childVolumeGroupsToBeAdded.addAll(_helper.constructMaskingGroupPathsFromNames(existingGroupPaths.keySet(),storage));
}
}
Set<String> volumesInExistingStorageGroups=_helper.constructVolumeNativeGuids(existingGroupPaths.values());
_log.debug("Volumes part of existing reusable Storage Groups {}",Joiner.on("\t").join(volumesInExistingStorageGroups));
Set<String> diff=Sets.difference(expectedVolumeHluMap.asMap().keySet(),volumesInExistingStorageGroups);
_log.debug("Remaining Volumes, for which new Storage Group needs to be created",Joiner.on("\t").join(diff));
if (!diff.isEmpty()) {
VolumeURIHLU[] volumeURIHLU=ControllerUtils.constructVolumeUriHLUs(diff,expectedVolumeHluMap);
groupName=generateStorageGroupName(storage,mask,initiators,storageGroupPolicyLimitsParam);
_log.debug("Group Name Created :",groupName);
groupPath=createVolumeGroup(storage,groupName,volumeURIHLU,taskCompleter,true);
_log.info("{} Volume Group created on Array {}",storage.getSerialNumber());
}
if (null != groupPath) {
newlyCreatedChildVolumeGroups.put(storageGroupPolicyLimitsParam,groupPath);
childVolumeGroupsToBeAdded.add(groupPath);
}
childVolumeGroupsToBeAddedToParentGroup.addAll(childVolumeGroupsToBeAdded);
}
parentGroupName=_helper.generateGroupName(_helper.getExistingStorageGroupsFromArray(storage),parentGroupName);
CIMObjectPath cascadedGroupPath=createCascadedVolumeGroup(storage,parentGroupName,childVolumeGroupsToBeAddedToParentGroup,taskCompleter);
for ( Entry<StorageGroupPolicyLimitsParam,CIMObjectPath> createdChildVolumeGroupEntry : newlyCreatedChildVolumeGroups.entrySet()) {
CIMObjectPath childGroupPath=createdChildVolumeGroupEntry.getValue();
StorageGroupPolicyLimitsParam storageGroupPolicyLimitsParam=createdChildVolumeGroupEntry.getKey();
if (storageGroupPolicyLimitsParam.isHostIOLimitBandwidthSet()) {
_helper.updateHostIOLimitBandwidth(client,childGroupPath,storageGroupPolicyLimitsParam.getHostIOLimitBandwidth());
}
if (storageGroupPolicyLimitsParam.isHostIOLimitIOPsSet()) {
_helper.updateHostIOLimitIOPs(client,childGroupPath,storageGroupPolicyLimitsParam.getHostIOLimitIOPs());
}
}
return cascadedGroupPath;
}
| This is used only for VMAX3. |
public static boolean matchesExtension(String filename,String ext){
return filename.endsWith("." + ext);
}
| Returns true if the given filename ends with the given extension. One should provide a <i>pure</i> extension, without '.'. |
@Override public void eSet(int featureID,Object newValue){
switch (featureID) {
case BasePackage.DOMAIN_ELEMENT__DOMAIN_ID:
setDomainID((String)newValue);
return;
}
super.eSet(featureID,newValue);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public boolean isPreserveHourOfDayAcrossDaylightSavings(){
return preserveHourOfDayAcrossDaylightSavings;
}
| If intervals are a day or greater, this property (set to true) will cause the firing of the trigger to always occur at the same time of day, (the time of day of the startTime) regardless of daylight saving time transitions. Default value is false. <p> For example, without the property set, your trigger may have a start time of 9:00 am on March 1st, and a repeat interval of 2 days. But after the daylight saving transition occurs, the trigger may start firing at 8:00 am every other day. </p> <p> If however, the time of day does not exist on a given day to fire (e.g. 2:00 am in the United States on the days of daylight saving transition), the trigger will go ahead and fire one hour off on that day, and then resume the normal hour on other days. If you wish for the trigger to never fire at the "wrong" hour, then you should set the property skipDayIfHourDoesNotExist. </p> |
public static final Uri buildFolderUri(long folderId){
return ContentUris.withAppendedId(CONTENT_URI_DEFAULT_FOLDER,folderId);
}
| Builds a URI that points to a specific folder. |
public synchronized static void closeAll(){
for ( HyperGraph graph : dbs.values()) {
if (graph.isOpen()) try {
graph.close();
}
catch ( Throwable t) {
System.err.println("Problem closing HyperGraphDB instance at " + graph.getLocation() + ", stack trace follows...");
t.printStackTrace(System.err);
}
}
dbs.clear();
}
| <p> Close all currently open <code>HyperGraph</code> instances. This is generally done by a HyperGraphDB internal shutdown hook registered with the JVM. But if you need more control over the shutdown sequence, this method will gracefully do so. </p> |
public void add(Permission permission){
if (!(permission instanceof DelegationPermission)) throw new IllegalArgumentException("invalid permission: " + permission);
if (isReadOnly()) throw new SecurityException("attempt to add a Permission to a readonly PermissionCollection");
synchronized (this) {
perms.add(0,permission);
}
}
| Adds a permission to the DelegationPermissions. The key for the hash is the name. |
public boolean empty(){
return isEmpty();
}
| Returns whether the stack is empty or not. |
private void showFeedback(String message){
if (myHost != null) {
myHost.showFeedback(message);
}
else {
System.out.println(message);
}
}
| Used to communicate feedback pop-up messages between a plugin tool and the main Whitebox user-interface. |
private static boolean findActivity(ArrayList<AppInfo> apps,ComponentName component,UserHandleCompat user){
final int N=apps.size();
for (int i=0; i < N; i++) {
final AppInfo info=apps.get(i);
if (info.user.equals(user) && info.componentName.equals(component)) {
return true;
}
}
return false;
}
| Returns whether <em>apps</em> contains <em>component</em>. |
private void checkAccessTime(ClientIdentification.Agent agent,final DigestURL url){
if (!url.isLocal()) {
String host=url.getHost();
final Long lastAccess=accessTime.get(host);
long wait=0;
if (lastAccess != null) wait=Math.max(0,agent.minimumDelta + lastAccess.longValue() - System.currentTimeMillis());
if (wait > 0) {
final long untilTime=System.currentTimeMillis() + wait;
cleanupAccessTimeTable(untilTime);
if (System.currentTimeMillis() < untilTime) {
long frcdslp=untilTime - System.currentTimeMillis();
LoaderDispatcher.log.info("Forcing sleep of " + frcdslp + " ms for host "+ host);
try {
Thread.sleep(frcdslp);
}
catch ( final InterruptedException ee) {
}
}
}
}
}
| Check access time: this is a double-check (we checked possibly already in the balancer) to make sure that we don't DoS the target by mistake |
@SuppressForbidden(reason="System.out required: command line tool") public static void main(java.lang.String[] args) throws Exception {
if (args.length < 1) {
return;
}
args[0].toUpperCase(Locale.ROOT);
backward=args[0].charAt(0) == '-';
int qq=(backward) ? 1 : 0;
boolean storeorig=false;
if (args[0].charAt(qq) == '0') {
storeorig=true;
qq++;
}
multi=args[0].charAt(qq) == 'M';
if (multi) {
qq++;
}
String charset=System.getProperty("egothor.stemmer.charset","UTF-8");
char optimizer[]=new char[args[0].length() - qq];
for (int i=0; i < optimizer.length; i++) {
optimizer[i]=args[0].charAt(qq + i);
}
for (int i=1; i < args.length; i++) {
Diff diff=new Diff();
allocTrie();
System.out.println(args[i]);
try (LineNumberReader in=new LineNumberReader(Files.newBufferedReader(Paths.get(args[i]),Charset.forName(charset)))){
for (String line=in.readLine(); line != null; line=in.readLine()) {
try {
line=line.toLowerCase(Locale.ROOT);
StringTokenizer st=new StringTokenizer(line);
String stem=st.nextToken();
if (storeorig) {
trie.add(stem,"-a");
}
while (st.hasMoreTokens()) {
String token=st.nextToken();
if (token.equals(stem) == false) {
trie.add(token,diff.exec(token,stem));
}
}
}
catch ( java.util.NoSuchElementException x) {
}
}
}
Optimizer o=new Optimizer();
Optimizer2 o2=new Optimizer2();
Lift l=new Lift(true);
Lift e=new Lift(false);
Gener g=new Gener();
for (int j=0; j < optimizer.length; j++) {
String prefix;
switch (optimizer[j]) {
case 'G':
trie=trie.reduce(g);
prefix="G: ";
break;
case 'L':
trie=trie.reduce(l);
prefix="L: ";
break;
case 'E':
trie=trie.reduce(e);
prefix="E: ";
break;
case '2':
trie=trie.reduce(o2);
prefix="2: ";
break;
case '1':
trie=trie.reduce(o);
prefix="1: ";
break;
default :
continue;
}
trie.printInfo(System.out,prefix + " ");
}
try (DataOutputStream os=new DataOutputStream(new BufferedOutputStream(Files.newOutputStream(Paths.get(args[i] + ".out"))))){
os.writeUTF(args[0]);
trie.store(os);
}
}
}
| Entry point to the Compile application. <p> This program takes any number of arguments: the first is the name of the desired stemming algorithm to use (a list is available in the package description) , all of the rest should be the path or paths to a file or files containing a stemmer table to compile. |
public ConnectionConfig(jmri.jmrix.NetworkPortAdapter p){
super(p);
}
| Ctor for an object being created during load process; Swing init is deferred. |
@Override public NotificationChain eInverseRemove(InternalEObject otherEnd,int featureID,NotificationChain msgs){
switch (featureID) {
case N4JSPackage.PROPERTY_METHOD_DECLARATION__BODY:
return basicSetBody(null,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__LOK:
return basicSet_lok(null,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__FPARS:
return ((InternalEList<?>)getFpars()).basicRemove(otherEnd,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__RETURN_TYPE_REF:
return basicSetReturnTypeRef(null,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__TYPE_VARS:
return ((InternalEList<?>)getTypeVars()).basicRemove(otherEnd,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__DECLARED_TYPE_REF:
return basicSetDeclaredTypeRef(null,msgs);
case N4JSPackage.PROPERTY_METHOD_DECLARATION__BOGUS_TYPE_REF:
return basicSetBogusTypeRef(null,msgs);
}
return super.eInverseRemove(otherEnd,featureID,msgs);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public void characters(StylesheetHandler handler,char ch[],int start,int length) throws org.xml.sax.SAXException {
handler.error(XSLTErrorResources.ER_CHARS_NOT_ALLOWED,null,null);
}
| Receive notification of character data inside an element. |
private static boolean isDeprecatedSSLProtocol(){
return ("com.sun.net.ssl.internal.www.protocol".equals(System.getProperty("java.protocol.handler.pkgs")));
}
| Return true if the protocol handler property java. protocol.handler.pkgs is set to the Sun's com.sun.net.ssl. internal.www.protocol deprecated one, false otherwise. |
public static String formatTimestamp(Timestamp timestamp,String format){
if (null != timestamp) {
SimpleDateFormat sdf=new SimpleDateFormat(format);
return sdf.format(timestamp);
}
else {
return "";
}
}
| Formats the given timestamp object in the format HH:mm |
public int compareTo(cp_info constant_pool[],cp_info cp,cp_info cp_constant_pool[]){
int i;
if (tag != cp.tag) return tag - cp.tag;
CONSTANT_Methodref_info cu=(CONSTANT_Methodref_info)cp;
i=constant_pool[class_index].compareTo(constant_pool,cp_constant_pool[cu.class_index],cp_constant_pool);
if (i != 0) return i;
return constant_pool[name_and_type_index].compareTo(constant_pool,cp_constant_pool[cu.name_and_type_index],cp_constant_pool);
}
| Compares this entry with another cp_info object (which may reside in a different constant pool). |
protected boolean isRelevantToUser(EReference reference){
return TypesPackage.Literals.SYNTAX_RELATED_TELEMENT__AST_ELEMENT != reference && TypesPackage.Literals.TSTRUCT_MEMBER__DEFINED_MEMBER != reference;
}
| Returns <code>true</code> if the reference should be presented to the user. |
public void startPreview(){
if (camera != null && !previewing) {
camera.startPreview();
previewing=true;
}
}
| Asks the camera hardware to begin drawing preview frames to the screen. |
public final void flushLayoutCache(){
synchronized (mCachedXmlBlockIds) {
final int num=mCachedXmlBlockIds.length;
for (int i=0; i < num; i++) {
mCachedXmlBlockIds[i]=-0;
XmlBlock oldBlock=mCachedXmlBlocks[i];
if (oldBlock != null) {
oldBlock.close();
}
mCachedXmlBlocks[i]=null;
}
}
}
| Call this to remove all cached loaded layout resources from the Resources object. Only intended for use with performance testing tools. |
public TableRowElement(){
super("tr",new ListElement());
}
| Construct an empty row |
public Set<Map.Entry<String,Object>> valueSet(){
return mValues.entrySet();
}
| Returns a set of all of the keys and values |
@SuppressWarnings("unchecked") @protection static <E>Set<E> dynamicallyCastSet(Set<?> set,Class<E> type){
return dynamicallyCastCollection(set,type,Set.class);
}
| Dynamically check that the members of the set are all instances of the given type (or null). |
@Override public NotificationChain eInverseRemove(InternalEObject otherEnd,int featureID,NotificationChain msgs){
switch (featureID) {
case SRuntimePackage.COMPOSITE_SLOT__SLOTS:
return ((InternalEList<?>)getSlots()).basicRemove(otherEnd,msgs);
}
return super.eInverseRemove(otherEnd,featureID,msgs);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public static String padRight(String s,int n){
return String.format("%1$-" + n + "s",s);
}
| Pad white space to the right of the string to the given length |
public static boolean isCompressed(String filename){
if (filename == null) {
return false;
}
File test=new File(filename);
if (test.exists()) {
return isCompressed(test);
}
int len=filename.length();
return len > 2 && (filename.substring(len - GZIP_EXTENTION.length()).equalsIgnoreCase(GZIP_EXTENTION) || filename.substring(len - COMPRESS_EXTENTION.length()).equals(COMPRESS_EXTENTION) || filename.substring(len - BZIP2_EXTENTION.length()).equals(BZIP2_EXTENTION));
}
| Is a file compressed? (the magic number in the first 2 bytes is used to detect the compression. |
boolean shouldMerge(){
return mergeState.segmentInfo.maxDoc() > 0;
}
| True if any merging should happen |
public void addDaemonThreadCount(int daemonThreadCount){
this.totalDaemonThreadCount+=daemonThreadCount;
}
| increases the daemon thread count by the given number. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:56:50.929 -0500",hash_original_method="F21E97E0131CF96F90294027D325F2F1",hash_generated_method="6C0974834F4ED5E3334D2C54E96A4062") public boolean isUnshared(){
return unshared;
}
| Indicates whether this field is unshared. |
public boolean isEqualNode(Node arg){
if (arg == this) {
return true;
}
if (arg.getNodeType() != getNodeType()) {
return false;
}
if (getNodeName() == null) {
if (arg.getNodeName() != null) {
return false;
}
}
else if (!getNodeName().equals(arg.getNodeName())) {
return false;
}
if (getLocalName() == null) {
if (arg.getLocalName() != null) {
return false;
}
}
else if (!getLocalName().equals(arg.getLocalName())) {
return false;
}
if (getNamespaceURI() == null) {
if (arg.getNamespaceURI() != null) {
return false;
}
}
else if (!getNamespaceURI().equals(arg.getNamespaceURI())) {
return false;
}
if (getPrefix() == null) {
if (arg.getPrefix() != null) {
return false;
}
}
else if (!getPrefix().equals(arg.getPrefix())) {
return false;
}
if (getNodeValue() == null) {
if (arg.getNodeValue() != null) {
return false;
}
}
else if (!getNodeValue().equals(arg.getNodeValue())) {
return false;
}
return true;
}
| Tests whether two nodes are equal. <br>This method tests for equality of nodes, not sameness (i.e., whether the two nodes are references to the same object) which can be tested with <code>Node.isSameNode</code>. All nodes that are the same will also be equal, though the reverse may not be true. <br>Two nodes are equal if and only if the following conditions are satisfied: The two nodes are of the same type.The following string attributes are equal: <code>nodeName</code>, <code>localName</code>, <code>namespaceURI</code>, <code>prefix</code>, <code>nodeValue</code> , <code>baseURI</code>. This is: they are both <code>null</code>, or they have the same length and are character for character identical. The <code>attributes</code> <code>NamedNodeMaps</code> are equal. This is: they are both <code>null</code>, or they have the same length and for each node that exists in one map there is a node that exists in the other map and is equal, although not necessarily at the same index.The <code>childNodes</code> <code>NodeLists</code> are equal. This is: they are both <code>null</code>, or they have the same length and contain equal nodes at the same index. This is true for <code>Attr</code> nodes as for any other type of node. Note that normalization can affect equality; to avoid this, nodes should be normalized before being compared. <br>For two <code>DocumentType</code> nodes to be equal, the following conditions must also be satisfied: The following string attributes are equal: <code>publicId</code>, <code>systemId</code>, <code>internalSubset</code>.The <code>entities</code> <code>NamedNodeMaps</code> are equal.The <code>notations</code> <code>NamedNodeMaps</code> are equal. <br>On the other hand, the following do not affect equality: the <code>ownerDocument</code> attribute, the <code>specified</code> attribute for <code>Attr</code> nodes, the <code>isWhitespaceInElementContent</code> attribute for <code>Text</code> nodes, as well as any user data or event listeners registered on the nodes. |
public static void loadComments(Element commentNode,ICommentHolder parent,Comment.Type type){
loadComments(commentNode,parent,0,false,false,type);
}
| Extract comments recursively. |
@Override public void merge(Descriptor other){
for ( DescriptorTag tag : descriptorTagFactory.getAllTags()) {
Identifier identifier=tag.getIdentifier();
if (identifier != null) {
MergeStrategy strategy=getMergeStrategy(tag.getTagName());
Descriptor left=baseDescriptor;
Descriptor right=other;
List<Element> itemsL=new ArrayList<Element>(left.getTags(tag));
List<Element> itemsR=new ArrayList<Element>(right.getTags(tag));
try {
for ( Element itemL : itemsL) {
DescriptorElement lElement=(DescriptorElement)itemL;
DescriptorElement rElement=(DescriptorElement)right.getTagByIdentifier(tag.getTagName(),identifier.getIdentifier(lElement));
if (rElement != null) {
strategy.inBoth(left,lElement,rElement);
}
else {
strategy.inLeft(left,lElement);
}
}
for ( Element itemR : itemsR) {
DescriptorElement rElement=(DescriptorElement)itemR;
DescriptorElement lElement=(DescriptorElement)left.getTagByIdentifier(tag.getTagName(),identifier.getIdentifier(rElement));
if (lElement == null) {
strategy.inRight(left,rElement);
}
}
}
catch ( Exception ex) {
throw new CargoException("Element Merging Exception",ex);
}
}
else {
Descriptor left=baseDescriptor;
Descriptor right=other;
List<Element> itemsL=left.getTags(tag);
List<Element> itemsR=new ArrayList<Element>(right.getTags(tag));
if (tag.isMultipleAllowed()) {
for ( Element itemR : itemsR) {
DescriptorElement rightElement=(DescriptorElement)itemR;
left.addElement(tag,rightElement,left.getRootElement());
}
}
else {
MergeStrategy strategy=getMergeStrategy(tag.getTagName());
DescriptorElement leftElement=itemsL.size() == 0 ? null : (DescriptorElement)itemsL.get(0);
DescriptorElement rightElement=itemsR.size() == 0 ? null : (DescriptorElement)itemsR.get(0);
try {
if (leftElement != null && rightElement != null) {
strategy.inBoth(left,leftElement,rightElement);
}
else if (leftElement != null) {
strategy.inLeft(left,leftElement);
}
else if (rightElement != null) {
strategy.inRight(left,rightElement);
}
}
catch ( Exception ex) {
throw new CargoException("Element Merging Exception",ex);
}
}
}
}
}
| Merge this descriptor onto another. |
@Override public Object eGet(int featureID,boolean resolve,boolean coreType){
switch (featureID) {
case UmplePackage.EXTERNAL_DEFINITION___INTERFACE_1:
return isInterface_1();
case UmplePackage.EXTERNAL_DEFINITION___NAME_1:
return getName_1();
case UmplePackage.EXTERNAL_DEFINITION___CLASS_CONTENT_1:
return getClassContent_1();
}
return super.eGet(featureID,resolve,coreType);
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public NotificationChain basicSetScope(Scope newScope,NotificationChain msgs){
Scope oldScope=scope;
scope=newScope;
if (eNotificationRequired()) {
ENotificationImpl notification=new ENotificationImpl(this,Notification.SET,StextPackage.STATE_SPECIFICATION__SCOPE,oldScope,newScope);
if (msgs == null) msgs=notification;
else msgs.add(notification);
}
return msgs;
}
| <!-- begin-user-doc --> <!-- end-user-doc --> |
public static Container encloseIn(Layout l,Component... cmp){
Container cnt=new Container(l);
for ( Component c : cmp) {
cnt.addComponent(c);
}
return cnt;
}
| Short-hand for enclosing multiple components in a container typically a box layout |
public static Stopwatch createUnstarted(Ticker ticker){
return new Stopwatch(ticker);
}
| Creates (but does not start) a new stopwatch, using the specified time source. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:55:36.567 -0500",hash_original_method="55B8B2C74AE8EFAFCF07CA3753E23BB0",hash_generated_method="A30D64FD09E2B0376DF0127CD24EFA6B") public String debugDump(){
String superstring=super.debugDump();
stringRepresentation="";
sprint(SIPResponse.class.getCanonicalName());
sprint("{");
if (statusLine != null) {
sprint(statusLine.debugDump());
}
sprint(superstring);
sprint("}");
return stringRepresentation;
}
| Print formatting function. Indent and parenthesize for pretty printing. Note -- use the encode method for formatting the message. Hack here to XMLize. |
public String toString(){
int iMax=length() - 1;
if (iMax == -1) {
return "[]";
}
StringBuilder b=new StringBuilder((17 + 2) * (iMax + 1));
b.append('[');
for (int i=0; ; i++) {
b.append(longBitsToDouble(longs.get(i)));
if (i == iMax) {
return b.append(']').toString();
}
b.append(',').append(' ');
}
}
| Returns the String representation of the current values of array. |
private void createProxyAndRegionForClient(){
PoolFactory pf=PoolManager.createFactory();
pf.addServer("localhost",PORT);
pf.setMinConnections(0);
pf.setPingInterval(10000);
pf.setThreadLocalConnections(true);
pf.setReadTimeout(2000);
pf.setSocketBufferSize(32768);
proxy=(PoolImpl)pf.create("junitPool");
AttributesFactory factory=new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setPoolName("junitPool");
RegionAttributes attrs=factory.createRegionAttributes();
cache.createVMRegion(regionName,attrs);
}
| Initializes proxy object and creates region for client |
public boolean isFieldAlignLeading(){
return getFieldAlignmentType().equals(FIELDALIGNMENTTYPE_LeadingLeft);
}
| Field Align Leading |
public static List<String> changeCase(List<String> list){
if (list != null) {
List<String> result=new ArrayList<String>();
for ( String element : list) {
result.add(changeCase(element));
}
return result;
}
return null;
}
| Creates a new list of endpoints modified to upper or lower case based on the endpoint type. Endpoints of type WWN are changed to be upper case. All other types are changed to lower case. |
public MessageHandler(GlowNetworkServer connectionManager){
this.connectionManager=connectionManager;
}
| Creates a new network event handler. |
public void addRange(BytesRef minPrefixCoded,BytesRef maxPrefixCoded){
throw new UnsupportedOperationException();
}
| Overwrite this method, if you like to receive the already prefix encoded range bounds. You can directly build classical (inclusive) range queries from them. |
public void encode(DerOutputStream out) throws IOException {
out.write(tag);
out.putLength(length);
if (length > 0) {
byte[] value=new byte[length];
synchronized (data) {
buffer.reset();
if (buffer.read(value) != length) {
throw new IOException("short DER value read (encode)");
}
out.write(value);
}
}
}
| Encode an ASN1/DER encoded datum onto a DER output stream. |
public Entry proxyAuth(final String auth,final String ip){
final Entry entry=proxyAuth(auth);
if (entry != null) {
entry.updateLastAccess(false);
this.ipUsers.put(ip,entry.getUserName());
}
return entry;
}
| Use ProxyAuth String to authenticate user and save IP/username for ipAuth. |
@SubscribeEvent public static void playerClone(PlayerEvent.Clone event){
final IMaxHealth oldMaxHealth=getMaxHealth(event.getOriginal());
final IMaxHealth newMaxHealth=getMaxHealth(event.getEntityPlayer());
if (newMaxHealth != null && oldMaxHealth != null) {
newMaxHealth.setBonusMaxHealth(oldMaxHealth.getBonusMaxHealth());
}
}
| Copy the player's bonus max health when they respawn after dying or returning from the end. |
private TokenImpl(int type){
fType=type;
fData=null;
}
| Creates a new token according to the given specification which does not have any data attached to it. |
public void create(){
try {
ManagementFactory.getFactory().registerMBeanObject(JMX_NAME,this);
start();
if (logger.isInfoEnabled()) logger.info(JMX_NAME + " registrado con \351xito.");
}
catch ( Exception e) {
logger.error("Error al intentar registrar: " + JMX_NAME + ". Abortado registro.",e);
}
}
| Crea el MBean. |
public int hashCode(){
return value.hashCode() ^ locale.hashCode();
}
| Returns a hashcode for this text attribute. |
public static boolean isSameLength(final double[] array1,final double[] array2){
if (array1 == null && array2 != null && array2.length > 0 || array2 == null && array1 != null && array1.length > 0 || array1 != null && array2 != null && array1.length != array2.length) {
return false;
}
return true;
}
| <p>Checks whether two arrays are the same length, treating <code>null</code> arrays as length <code>0</code>.</p> |
public static void processStatementScheduleMultiple(EPStatementAgentInstanceHandle handle,Object callbackObject,EPServicesContext services){
if (InstrumentationHelper.ENABLED) {
InstrumentationHelper.get().qTimeCP(handle,services.getSchedulingService().getTime());
}
handle.getStatementAgentInstanceLock().acquireWriteLock();
try {
if (!handle.isDestroyed()) {
if (handle.isHasVariables()) {
services.getVariableService().setLocalVersion();
}
if (callbackObject instanceof ArrayDeque) {
ArrayDeque<ScheduleHandleCallback> callbackList=(ArrayDeque<ScheduleHandleCallback>)callbackObject;
for ( ScheduleHandleCallback callback : callbackList) {
callback.scheduledTrigger(services.getEngineLevelExtensionServicesContext());
}
}
else {
ScheduleHandleCallback callback=(ScheduleHandleCallback)callbackObject;
callback.scheduledTrigger(services.getEngineLevelExtensionServicesContext());
}
handle.internalDispatch();
}
}
catch ( RuntimeException ex) {
services.getExceptionHandlingService().handleException(ex,handle,ExceptionHandlerExceptionType.PROCESS,null);
}
finally {
if (handle.isHasTableAccess()) {
services.getTableService().getTableExprEvaluatorContext().releaseAcquiredLocks();
}
handle.getStatementAgentInstanceLock().releaseWriteLock();
if (InstrumentationHelper.ENABLED) {
InstrumentationHelper.get().aTimeCP();
}
}
}
| Processing multiple schedule matches for a statement. |
public CategoryListItem(Category category,CategoryListItem parentItem){
name=category.getName();
fullName=parentItem == null ? name : parentItem.fullName + " / " + name;
key=category.getKey();
List<Category> subCategories=category.getSubCategories();
int cntSubCategories=subCategories == null ? 0 : subCategories.size();
if (cntSubCategories == 0) {
subItems=new CategoryListItem[0];
}
else {
subItems=new CategoryListItem[cntSubCategories];
for (int i=0; i < cntSubCategories; i++) {
subItems[i]=new CategoryListItem(subCategories.get(i),this);
}
subItems[cntSubCategories - 1]=new CategoryListItem(name,fullName,key,null);
}
}
| Converts a Category object into a CategoryListItem. |
public void sortFromTo(int from,int to){
countSortFromTo(from,to);
}
| Sorts the specified range of the receiver into ascending order. The sorting algorithm is countsort. |
private void writeQNameAttribute(java.lang.String namespace,java.lang.String attName,javax.xml.namespace.QName qname,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace=qname.getNamespaceURI();
java.lang.String attributePrefix=xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix=registerPrefix(xmlWriter,attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue=attributePrefix + ":" + qname.getLocalPart();
}
else {
attributeValue=qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName,attributeValue);
}
else {
registerPrefix(xmlWriter,namespace);
xmlWriter.writeAttribute(namespace,attName,attributeValue);
}
}
| Util method to write an attribute without the ns prefix |
public BetterWeakReference(T r,ReferenceQueue<? super T> q){
super(r,q);
}
| Constructs a new weak reference to the given referent. The newly created reference is registered with the given reference queue. |
public String addBinary(String a,String b){
int m=a.length();
int n=b.length();
int carry=0;
StringBuilder res=new StringBuilder();
int i=0;
while (i < m || i < n) {
int p=i < m ? a.charAt(m - 1 - i) - '0' : 0;
int q=i < n ? b.charAt(n - 1 - i) - '0' : 0;
int temp=p + q + carry;
carry=temp / 2;
res.insert(0,temp % 2);
i++;
}
return carry == 0 ? res.toString() : "1" + res.toString();
}
| Math, String. From right to left, do it digit-by-digit. Get current digits of ab and b, add them up. Also use an integer to store carry from the previous addition. Store the sum to result and update carry for each round. Stop when longest string is reached. Remember to check carry before return, if carry is 1, it should still be inserted to the result. |
protected void init(DerValue encoding,int rep_type) throws Asn1Exception, IOException, RealmException {
DerValue der, subDer;
msgType=(encoding.getTag() & (byte)0x1F);
if (msgType != Krb5.KRB_ENC_AS_REP_PART && msgType != Krb5.KRB_ENC_TGS_REP_PART) {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
der=encoding.getData().getDerValue();
if (der.getTag() != DerValue.tag_Sequence) {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
key=EncryptionKey.parse(der.getData(),(byte)0x00,false);
lastReq=LastReq.parse(der.getData(),(byte)0x01,false);
subDer=der.getData().getDerValue();
if ((subDer.getTag() & (byte)0x1F) == (byte)0x02) {
nonce=subDer.getData().getBigInteger().intValue();
}
else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
keyExpiration=KerberosTime.parse(der.getData(),(byte)0x03,true);
flags=TicketFlags.parse(der.getData(),(byte)0x04,false);
authtime=KerberosTime.parse(der.getData(),(byte)0x05,false);
starttime=KerberosTime.parse(der.getData(),(byte)0x06,true);
endtime=KerberosTime.parse(der.getData(),(byte)0x07,false);
renewTill=KerberosTime.parse(der.getData(),(byte)0x08,true);
Realm srealm=Realm.parse(der.getData(),(byte)0x09,false);
sname=PrincipalName.parse(der.getData(),(byte)0x0A,false,srealm);
if (der.getData().available() > 0) {
caddr=HostAddresses.parse(der.getData(),(byte)0x0B,true);
}
}
| Initializes an EncKDCRepPart object. |
public boolean hasMoreTokens(){
return super.hasMoreTokens();
}
| Check whether the object has more tokens. |
private static Unit insertGotoBefore(Chain unitChain,Unit node,Unit target){
Unit newGoto=Jimple.v().newGotoStmt(target);
unitChain.insertBefore(newGoto,node);
newGoto.redirectJumpsToThisTo(node);
return newGoto;
}
| inserts a Jimple<code>Goto</code> to <code> target, directly before <code>node</code> in the given <code>unitChain</code>.<br> As we use <code>JGoto</code> the chain must contain Jimple-stmts. |
public static boolean isPrimitive(String signature){
char t=signature.charAt(0);
return t == 'Z' || t == 'B' || t == 'C' || t == 'S' || t == 'I' || t == 'J' || t == 'F' || t == 'D';
}
| <table> <tr><th>Type Signature</th><th>Java Type</th></tr> <tr>Z<td></td><td>boolean</td></tr> <tr>B<td></td><td>byte</td></tr> <tr>C<td></td><td>char</td></tr> <tr>S<td></td><td>short</td></tr> <tr>I<td></td><td>int</td></tr> <tr>J<td></td><td>long</td></tr> <tr>F<td></td><td>float</td></tr> <tr>D<td></td><td>double</td></tr> </table> |
private void inflateViewContainerLayout(){
setContentView(R.layout.activity_main);
ButterKnife.inject(this);
mainFlow=mainView.getFlow();
}
| Inflate the view container layout and inject our view components |
public MimeTypeParseException(String s){
super(s);
}
| Constructs a MimeTypeParseException with the specified detail message. |
private void buildLines(List<SuperMatchBucket> buckets){
final Set<CondensedLine> unsortedLines=new HashSet<CondensedLine>();
for ( SuperMatchBucket bucket : buckets) {
boolean bucketFoundAHome=false;
for ( CondensedLine line : unsortedLines) {
bucketFoundAHome=line.addSuperMatchesSameTypeWithoutOverlap(bucket);
if (bucketFoundAHome) {
break;
}
}
if (!bucketFoundAHome) {
unsortedLines.add(new CondensedLine(bucket));
}
numSuperMatchBlobs+=bucket.getSupermatches().size();
}
lines=new TreeSet<CondensedLine>(unsortedLines);
}
| Considering each bucket in turn, attempt to add the buckets to a line, minimising the number of lines and attempting to add the |
public Color(Color color){
if (color == null) {
throw new IllegalArgumentException(Logger.logMessage(Logger.ERROR,"Color","constructor","missingColor"));
}
this.red=color.red;
this.green=color.green;
this.blue=color.blue;
this.alpha=color.alpha;
}
| Constructs a color with the components of a specified color. |
public BiosCommandResult doCreateReplicationPolicy(StorageSystem system,String name,String source_root_path,String target_host,String target_path,IsilonSyncPolicy.Action action,String description,String schedule){
try {
_log.info("IsilonFileStorageDevice doCreateReplicationPolicy {} - start",source_root_path);
IsilonApi isi=getIsilonDevice(system);
IsilonSyncPolicy policy=new IsilonSyncPolicy(name,source_root_path,target_path,target_host,action);
if (schedule != null && !schedule.isEmpty()) {
policy.setSchedule(schedule);
}
if (description != null && !description.isEmpty()) {
policy.setDescription(description);
}
policy.setEnabled(false);
String policyId=isi.createReplicationPolicy(policy);
_log.info("IsilonFileStorageDevice doCreateReplicationPolicy {} with policyId {} - complete",name,policyId);
return BiosCommandResult.createSuccessfulResult();
}
catch ( IsilonException e) {
return BiosCommandResult.createErrorResult(e);
}
}
| Call to Isilon Device to Create Replication Session |
@Override public void deliver(WriteStream os,OutHttp2 outHttp) throws IOException {
outHttp.writeGoAway();
}
| Deliver the message |
private int preFillGrid(Direction direction,int dy,int emptyTop,RecyclerView.Recycler recycler,RecyclerView.State state){
int newFirstVisiblePosition=firstChildPositionForRow(mFirstVisibleRow);
SparseArray<View> viewCache=new SparseArray<>(getChildCount());
int startLeftOffset=getPaddingLeft();
int startTopOffset=getPaddingTop() + emptyTop;
if (getChildCount() != 0) {
startTopOffset=getDecoratedTop(getChildAt(0));
if (mFirstVisiblePosition != newFirstVisiblePosition) {
switch (direction) {
case UP:
double previousTopRowHeight=sizeForChildAtPosition(mFirstVisiblePosition - 1).getHeight();
startTopOffset-=previousTopRowHeight;
break;
case DOWN:
double topRowHeight=sizeForChildAtPosition(mFirstVisiblePosition).getHeight();
startTopOffset+=topRowHeight;
break;
}
}
for (int i=0; i < getChildCount(); i++) {
int position=mFirstVisiblePosition + i;
final View child=getChildAt(i);
viewCache.put(position,child);
}
for (int i=0; i < viewCache.size(); i++) {
final View cachedView=viewCache.valueAt(i);
detachView(cachedView);
}
}
mFirstVisiblePosition=newFirstVisiblePosition;
int leftOffset=startLeftOffset;
int topOffset=startTopOffset + mPendingScrollPositionOffset;
int nextPosition=mFirstVisiblePosition;
while (nextPosition >= 0 && nextPosition < state.getItemCount()) {
boolean isViewCached=true;
View view=viewCache.get(nextPosition);
if (view == null) {
view=recycler.getViewForPosition(nextPosition);
isViewCached=false;
}
if (mIsFirstViewHeader && nextPosition == HEADER_POSITION) {
measureChildWithMargins(view,0,0);
mHeaderViewSize=new Size(view.getMeasuredWidth(),view.getMeasuredHeight());
}
Size viewSize=sizeForChildAtPosition(nextPosition);
if ((leftOffset + viewSize.getWidth()) > getContentWidth()) {
leftOffset=startLeftOffset;
Size previousViewSize=sizeForChildAtPosition(nextPosition - 1);
topOffset+=previousViewSize.getHeight();
}
boolean isAtEndOfContent;
switch (direction) {
case DOWN:
isAtEndOfContent=topOffset >= getContentHeight() + dy;
break;
default :
isAtEndOfContent=topOffset >= getContentHeight();
break;
}
if (isAtEndOfContent) break;
if (isViewCached) {
attachView(view);
viewCache.remove(nextPosition);
}
else {
addView(view);
measureChildWithMargins(view,0,0);
int right=leftOffset + viewSize.getWidth();
int bottom=topOffset + viewSize.getHeight();
layoutDecorated(view,leftOffset,topOffset,right,bottom);
}
leftOffset+=viewSize.getWidth();
nextPosition++;
}
for (int i=0; i < viewCache.size(); i++) {
final View removingView=viewCache.valueAt(i);
recycler.recycleView(removingView);
}
int pixelsFilled=0;
if (getChildCount() > 0) {
pixelsFilled=getChildAt(getChildCount() - 1).getBottom();
}
return pixelsFilled;
}
| Find first visible position, scrap all children, and then layout all visible views returning the number of pixels laid out, which could be greater than the entire view (useful for scroll functions). |
@Override public boolean equals(Object o){
if (!(o instanceof AttributeWeights)) {
return false;
}
else {
AttributeWeights other=(AttributeWeights)o;
return this.weightMap.equals(other.weightMap);
}
}
| Returns true if both objects have the same weight map. |
private static PipelineOp convertNamedSolutionSetScan(PipelineOp left,final NamedSubqueryInclude nsi,final Set<IVariable<?>> doneSet,final AST2BOpContext ctx){
@SuppressWarnings("rawtypes") final Map<IConstraint,Set<IVariable<IV>>> needsMaterialization=new LinkedHashMap<IConstraint,Set<IVariable<IV>>>();
final IConstraint[] joinConstraints=getJoinConstraints(getJoinConstraints(nsi),needsMaterialization);
final String name=nsi.getName();
final INamedSolutionSetRef namedSolutionSet=NamedSolutionSetRefUtility.newInstance(ctx.getNamespace(),ctx.getTimestamp(),name,IVariable.EMPTY);
left=new NestedLoopJoinOp(leftOrEmpty(left),new NV(BOp.Annotations.BOP_ID,ctx.nextId()),new NV(BOp.Annotations.EVALUATION_CONTEXT,BOpEvaluationContext.CONTROLLER),new NV(PipelineOp.Annotations.SHARED_STATE,true),new NV(NestedLoopJoinOp.Annotations.NAMED_SET_REF,namedSolutionSet),new NV(NestedLoopJoinOp.Annotations.CONSTRAINTS,joinConstraints));
left=addMaterializationSteps3(left,doneSet,needsMaterialization,nsi.getQueryHints(),ctx);
return left;
}
| If the cardinality of the exogenous solutions is low, then we can SCAN the named solution set and use an inner loop to test each solution read from the named solution set against each exogenous solution. <p> Note: This code path MUST NOT change the order of the solutions read from the named solution set. We rely on that guarantee to provide fast ordered SLICEs from a pre-computed named solution set. |
public Map<String,StoredException> loadAll(){
Map<String,StoredException> storedExceptions=new HashMap<>();
if (context != null) {
String[] fileNames=context.fileList();
if (fileNames != null) {
for ( String fileName : fileNames) {
if (utils.isNotBlank(fileName) && fileName.startsWith("FoamStoredException")) {
StoredException storedException=loadStoredExceptionData(fileName);
if (storedException != null) {
storedExceptions.put(fileName,storedException);
}
}
}
}
}
return storedExceptions;
}
| Load and return all StoredException data. These represent crashes that have not yet been reported. |
public void uploadConfigDir(Path dir,String configName) throws IOException {
zkClient.uploadToZK(dir,CONFIGS_ZKNODE + "/" + configName,UPLOAD_FILENAME_EXCLUDE_PATTERN);
}
| Upload files from a given path to a config in Zookeeper |
public static final boolean isConversationAbort(byte esmClass){
return isMessageType(esmClass,SMPPConstant.ESMCLS_CONV_ABORT);
}
| Message Type. |
public static boolean request(final long size,final boolean force){
if (size < 1024) return true;
return getStrategy().request(size,force,shortStatus);
}
| check for a specified amount of bytes |
static MPrintFont create(Font font){
MPrintFont pf=new MPrintFont(Env.getCtx(),0,null);
StringBuffer name=new StringBuffer(font.getName());
if (font.isBold()) name.append(" bold");
if (font.isItalic()) name.append(" italic");
name.append(" ").append(font.getSize());
pf.setName(name.toString());
pf.setFont(font);
pf.saveEx();
return pf;
}
| Create Font in Database and save |
@Override public boolean isActive(){
return amIActive;
}
| Used by the Whitebox GUI to tell if this plugin is still running. |
@SuppressWarnings({"PMD.LooseCoupling","unchecked"}) protected void createDefaultState(){
Class<?> c=getStateClass();
try {
Object newState=(T)c.newInstance();
this.state=(T)newState;
}
catch ( Exception e) {
throw new UncheckedException(e);
}
}
| Creates a default state representation for this actor |
@TransactionAttribute(TransactionAttributeType.REQUIRED) public boolean restart(boolean isRestartAPP){
final String messageKey="mail_bes_notification_connection_success";
boolean isSuspendedByApp=false;
if (!besDAO.isBESAvalible()) {
if (isRestartAPP) {
sendMailToAppAdmin("mail_bes_notification_error_app_admin");
}
return false;
}
List<ServiceInstance> serviceInstances=instanceDAO.getInstancesSuspendedbyApp();
for ( ServiceInstance instance : serviceInstances) {
String actionLink=getResumeLinkForInstance(instance);
if (actionLink == null || actionLink.isEmpty()) {
isSuspendedByApp=true;
continue;
}
sendActionMail(true,instance,messageKey,null,actionLink,false);
instance.setSuspendedByApp(false);
}
configService.setAPPSuspend(Boolean.valueOf(isSuspendedByApp).toString());
return true;
}
| If BES is available process failed serviceInstances and reset APP_SUSPEND. |
public AdempiereThemeInnova(){
setDefault();
s_theme=this;
s_name=NAME;
}
| Adempiere default Theme Blue Metal |
protected void determineCoverageGoals(){
List<MethodCoverageTestFitness> goals=new MethodCoverageFactory().getCoverageGoals();
for ( MethodCoverageTestFitness goal : goals) {
methodCoverageMap.put(goal.getClassName() + "." + goal.getMethod(),goal);
if (Properties.TEST_ARCHIVE) TestsArchive.instance.addGoalToCover(this,goal);
}
}
| Initialize the set of known coverage goals |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2014-09-03 15:00:04.128 -0400",hash_original_method="7855450641F7BEAB9B605C4390783013",hash_generated_method="4D3E123755D01FEB460C17473C4BE80F") protected void engineUpdate(byte input){
oneByte[0]=input;
SHA1Impl.updateHash(buffer,oneByte,0,0);
messageLength++;
}
| Supplements a byte to current message. <BR> The method overrides "engineUpdate(byte)" in class MessageDigestSpi. <BR> |
public Allele(String seq,int start,int end,byte[] nt){
super(seq,start,end);
if (nt == null) {
throw new NullPointerException();
}
mNt=nt;
}
| Construct an Allele |
public static ShareIntentNoBuilder from(@NonNull Context context){
checkNotNull(context);
ShareIntentBuilder builder=new ShareIntentBuilder(context);
return new ShareIntentNoBuilder(builder);
}
| Create the first in a series of type-safe builder wrappers to create a share intent or to launch a share using that intent. |
private void testIsoDayOfWeek() throws Exception {
assertEquals(1,getIsoDayOfWeek(parse("2008-09-29")));
assertEquals(2,getIsoDayOfWeek(parse("2008-09-30")));
assertEquals(3,getIsoDayOfWeek(parse("2008-10-01")));
assertEquals(4,getIsoDayOfWeek(parse("2008-10-02")));
assertEquals(5,getIsoDayOfWeek(parse("2008-10-03")));
assertEquals(6,getIsoDayOfWeek(parse("2008-10-04")));
assertEquals(7,getIsoDayOfWeek(parse("2008-10-05")));
}
| Test if day of week is returned as Monday = 1 to Sunday = 7. |
public int indexOf(int elem){
if (null == m_map) return -1;
for (int i=0; i < m_firstFree; i++) {
int node=m_map[i];
if (node == elem) return i;
}
return -1;
}
| Searches for the first occurence of the given argument, beginning the search at index, and testing for equality using the equals method. |
private void stateChange(int nextState){
if (state == nextState) return;
if (newState == nextState) {
state=nextState;
phaser.arrive();
log("%d: state change: %s %s%n",getId(),toStateName(nextState),phaserToString(phaser));
return;
}
throw new RuntimeException("current " + state + " next "+ nextState+ " new state "+ newState);
}
| Change the state if it matches newState. |
private boolean same(double d1,double d2,double tolerance){
return (Math.abs(d1 - d2) < tolerance);
}
| Tests two doubles for 'near enough' equality. |
public AssignmentWizard(String action,String description,Assignment assignment,int fieldFlags,String helpId,String pageTwoHelpId){
super();
this.assignment=assignment;
assignmentPage=new AssignmentWizardPage(action,description,fieldFlags,helpId);
typePage=new TypingWizardPage(action,description,pageTwoHelpId);
}
| Constructs the wizard that assigns values to constants, I believe it also constructs the wizard that overrides definitions. (LL) The last argument is meaningful only for the wizard that assigns values to constants. |
static WordInfo[] doWordAnalysis(GVTGlyphVector gv,AttributedCharacterIterator aci,int numWords,FontRenderContext frc){
int numGlyphs=gv.getNumGlyphs();
int[] glyphWords=new int[numGlyphs];
int[] wordMap=allocWordMap(null,10);
int maxWord=0;
int aciIdx=aci.getBeginIndex();
for (int i=0; i < numGlyphs; i++) {
int cnt=gv.getCharacterCount(i,i);
aci.setIndex(aciIdx);
Integer integer=(Integer)aci.getAttribute(WORD_LIMIT);
int minWord=integer.intValue() - numWords;
if (minWord > maxWord) {
maxWord=minWord;
wordMap=allocWordMap(wordMap,maxWord + 1);
}
aciIdx++;
for (int c=1; c < cnt; c++) {
aci.setIndex(aciIdx);
integer=(Integer)aci.getAttribute(WORD_LIMIT);
int cWord=integer.intValue() - numWords;
if (cWord > maxWord) {
maxWord=cWord;
wordMap=allocWordMap(wordMap,maxWord + 1);
}
if (cWord < minWord) {
wordMap[minWord]=cWord;
minWord=cWord;
}
else if (cWord > minWord) {
wordMap[cWord]=minWord;
}
aciIdx++;
}
glyphWords[i]=minWord;
}
int words=0;
WordInfo[] cWordMap=new WordInfo[maxWord + 1];
for (int i=0; i <= maxWord; i++) {
int nw=wordMap[i];
if (nw == -1) {
cWordMap[i]=new WordInfo(words++);
}
else {
int word=nw;
nw=wordMap[i];
while (nw != -1) {
word=nw;
nw=wordMap[word];
}
wordMap[i]=word;
cWordMap[i]=cWordMap[word];
}
}
wordMap=null;
WordInfo[] wordInfos=new WordInfo[words];
for (int i=0; i <= maxWord; i++) {
WordInfo wi=cWordMap[i];
wordInfos[wi.getIndex()]=cWordMap[i];
}
aciIdx=aci.getBeginIndex();
int aciEnd=aci.getEndIndex();
char ch=aci.setIndex(aciIdx);
int aciWordStart=aciIdx;
GVTFont gvtFont=(GVTFont)aci.getAttribute(GVT_FONT);
float lineHeight=1.0f;
Float lineHeightFloat=(Float)aci.getAttribute(LINE_HEIGHT);
if (lineHeightFloat != null) lineHeight=lineHeightFloat.floatValue();
int runLimit=aci.getRunLimit(szAtts);
WordInfo prevWI=null;
float[] lastAdvAdj=new float[numGlyphs];
float[] advAdj=new float[numGlyphs];
boolean[] hideLast=new boolean[numGlyphs];
boolean[] hide=new boolean[numGlyphs];
boolean[] space=new boolean[numGlyphs];
float[] glyphPos=gv.getGlyphPositions(0,numGlyphs + 1,null);
for (int i=0; i < numGlyphs; i++) {
char pch=ch;
ch=aci.setIndex(aciIdx);
Integer integer=(Integer)aci.getAttribute(WORD_LIMIT);
WordInfo theWI=cWordMap[integer.intValue() - numWords];
if (theWI.getFlowLine() == null) theWI.setFlowLine(aci.getAttribute(FLOW_LINE_BREAK));
if (prevWI == null) {
prevWI=theWI;
}
else if (prevWI != theWI) {
GVTLineMetrics lm=gvtFont.getLineMetrics(aci,aciWordStart,aciIdx,frc);
prevWI.addLineMetrics(gvtFont,lm);
prevWI.addLineHeight(lineHeight);
aciWordStart=aciIdx;
prevWI=theWI;
}
int chCnt=gv.getCharacterCount(i,i);
if (chCnt == 1) {
char nch;
float kern;
switch (ch) {
case SOFT_HYPHEN:
hideLast[i]=true;
nch=aci.next();
aci.previous();
kern=gvtFont.getHKern(pch,nch);
advAdj[i]=-(glyphPos[2 * i + 2] - glyphPos[2 * i] + kern);
break;
case ZERO_WIDTH_JOINER:
hide[i]=true;
break;
case ZERO_WIDTH_SPACE:
hide[i]=true;
break;
case SPACE:
space[i]=true;
nch=aci.next();
aci.previous();
kern=gvtFont.getHKern(pch,nch);
lastAdvAdj[i]=-(glyphPos[2 * i + 2] - glyphPos[2 * i] + kern);
default :
}
}
aciIdx+=chCnt;
if ((aciIdx > runLimit) && (aciIdx < aciEnd)) {
GVTLineMetrics lm=gvtFont.getLineMetrics(aci,aciWordStart,runLimit,frc);
prevWI.addLineMetrics(gvtFont,lm);
prevWI.addLineHeight(lineHeight);
prevWI=null;
aciWordStart=aciIdx;
aci.setIndex(aciIdx);
gvtFont=(GVTFont)aci.getAttribute(GVT_FONT);
Float f=(Float)aci.getAttribute(LINE_HEIGHT);
lineHeight=f.floatValue();
runLimit=aci.getRunLimit(szAtts);
}
}
GVTLineMetrics lm=gvtFont.getLineMetrics(aci,aciWordStart,runLimit,frc);
prevWI.addLineMetrics(gvtFont,lm);
prevWI.addLineHeight(lineHeight);
int[] wordGlyphCounts=new int[words];
for (int i=0; i < numGlyphs; i++) {
int word=glyphWords[i];
int cWord=cWordMap[word].getIndex();
glyphWords[i]=cWord;
wordGlyphCounts[cWord]++;
}
cWordMap=null;
int[][] wordGlyphs=new int[words][];
int[] wordGlyphGroupsCounts=new int[words];
for (int i=0; i < numGlyphs; i++) {
int cWord=glyphWords[i];
int[] wgs=wordGlyphs[cWord];
if (wgs == null) {
wgs=wordGlyphs[cWord]=new int[wordGlyphCounts[cWord]];
wordGlyphCounts[cWord]=0;
}
int cnt=wordGlyphCounts[cWord];
wgs[cnt]=i;
if (cnt == 0) {
wordGlyphGroupsCounts[cWord]++;
}
else {
if (wgs[cnt - 1] != i - 1) wordGlyphGroupsCounts[cWord]++;
}
wordGlyphCounts[cWord]++;
}
for (int i=0; i < words; i++) {
int cnt=wordGlyphGroupsCounts[i];
GlyphGroupInfo[] wordGlyphGroups=new GlyphGroupInfo[cnt];
if (cnt == 1) {
int[] glyphs=wordGlyphs[i];
int start=glyphs[0];
int end=glyphs[glyphs.length - 1];
wordGlyphGroups[0]=new GlyphGroupInfo(gv,start,end,hide,hideLast[end],glyphPos,advAdj,lastAdvAdj,space);
}
else {
int glyphGroup=0;
int[] glyphs=wordGlyphs[i];
int prev=glyphs[0];
int start=prev;
for (int j=1; j < glyphs.length; j++) {
if (prev + 1 != glyphs[j]) {
int end=glyphs[j - 1];
wordGlyphGroups[glyphGroup]=new GlyphGroupInfo(gv,start,end,hide,hideLast[end],glyphPos,advAdj,lastAdvAdj,space);
start=glyphs[j];
glyphGroup++;
}
prev=glyphs[j];
}
int end=glyphs[glyphs.length - 1];
wordGlyphGroups[glyphGroup]=new GlyphGroupInfo(gv,start,end,hide,hideLast[end],glyphPos,advAdj,lastAdvAdj,space);
}
wordInfos[i].setGlyphGroups(wordGlyphGroups);
}
return wordInfos;
}
| This returns an array of glyphs numbers for each glyph group in each word: ret[word][glyphGroup][glyphNum]. |
@DSGenerator(tool_name="Doppelganger",tool_version="2.0",generated_on="2013-12-30 12:31:04.328 -0500",hash_original_method="D53B73924CE25BA79BE5C682EDA80315",hash_generated_method="76103A06C46A7D3BD84888DC632CF17F") public static Sampler CLAMP_NEAREST(RenderScript rs){
if (rs.mSampler_CLAMP_NEAREST == null) {
Builder b=new Builder(rs);
b.setMinification(Value.NEAREST);
b.setMagnification(Value.NEAREST);
b.setWrapS(Value.CLAMP);
b.setWrapT(Value.CLAMP);
rs.mSampler_CLAMP_NEAREST=b.create();
}
return rs.mSampler_CLAMP_NEAREST;
}
| Retrieve a sampler with min and mag set to nearest and wrap modes set to clamp. |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.