code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
public int getState(Var var) {
Integer state = config.get(var);
if (state == null) {
throw new RuntimeException("VarConfig does not contain var: " + var);
}
return state;
} | Gets the state (in this config) for a given variable. |
public int getState(Var var, int defaultState) {
Integer state = config.get(var);
if (state == null) {
return defaultState;
} else {
return state;
}
} | Gets the state (in this config) for a given variable if it exists, or the default otherwise. |
public VarConfig getSubset(VarSet subsetVars) {
if (!vars.isSuperset(subsetVars)) {
throw new IllegalStateException("This config does not contain all the given variables.");
}
return getIntersection(subsetVars);
} | Gets a new variable configuration that contains only a subset of the variables. |
public VarConfig getIntersection(Iterable<Var> otherVars) {
VarConfig subset = new VarConfig();
for (Var v : otherVars) {
Integer state = config.get(v);
if (state != null) {
subset.put(v, state);
}
}
return subset;
} | Gets a new variable configuration that keeps only variables in otherVars. |
@Override
public void accum(FgModel model, int i, Accumulator ac) {
try {
accumWithException(model, i, ac);
} catch(Throwable t) {
log.error("Skipping example " + i + " due to throwable: " + t.getMessage());
t.printStackTrace();
}
} | Assumed by caller to be threadsafe. |
@SuppressWarnings("unchecked")
@Override
public Boolean execute() {
validateCommand();
User sender = CommandUtil.getSfsUser(user, api);
sender = sender != null ? sender : newFakeUser();
ISFSObject params = createResponseParams();
getRequestHandler().handleClientRequest(sender, params);
return Boolean.TRUE;
} | /* (non-Javadoc)
@see com.lagente.core.command.BaseCommand#execute() |
private ISFSObject createResponseParams() {
return ResponseParamsBuilder.create()
.addition(addition)
.excludedVars(excludedVars)
.includedVars(includedVars)
.transformer(new ParamTransformer(context))
.data(data)
.build();
} | Create smartfox object to response to client
@return smartfox parameter object |
public boolean verify(String message, String signature) {
try {
return verifier.verify(message.getBytes(charsetName), Base64.decodeBase64(signature));
} catch (UnsupportedEncodingException e) {
throw new SignatureException("unsupported encoding: charsetName=" + charsetName, e);
} catch (Exception e) {
return false;
}
} | Verifies the authenticity of a message using a base64 encoded digital
signature.
@param message the original message to verify
@param signature the base64 encoded digital signature
@return true if the original message is verified by the digital signature |
public LFgExample get(int i) {
LFgExample ex;
synchronized (cache) {
ex = cache.get(i);
}
if (ex == null) {
ex = exampleFactory.get(i);
synchronized (cache) {
cache.put(i, ex);
}
}
return ex;
} | Gets the i'th example. |
public void updateStartEnd() {
ArrayList<IntBinaryTree> leaves = getLeaves();
for (int i=0; i<leaves.size(); i++) {
IntBinaryTree leaf = leaves.get(i);
leaf.start = i;
leaf.end = i+1;
}
postOrderTraversal(new UpdateStartEnd());
} | Updates all the start end fields, treating the current node as the root. |
public ArrayList<IntBinaryTree> getLeaves() {
LeafCollector leafCollector = new LeafCollector();
postOrderTraversal(leafCollector);
return leafCollector.leaves;
} | Gets the leaves of this tree. |
public String sign(String message) {
try {
final byte[] signature = signer.sign(message.getBytes(charsetName));
return new String(Base64.encodeBase64(signature, false));
} catch (UnsupportedEncodingException e) {
throw new SignatureException("unsupported encoding: charsetName=" + charsetName, e);
}
} | Signs a message.
@param message the message to sign
@return a base64 encoded version of the signature |
@Override
public void ping() {
sendPingCommand();
TaskScheduler scheduler = SmartFoxServer
.getInstance()
.getTaskScheduler();
schedule(scheduler);
} | /*
(non-Javadoc)
@see com.tvd12.ezyfox.core.command.SchedulePing#ping() |
private void schedule(TaskScheduler scheduler) {
scheduledFuture = scheduler.scheduleAtFixedRate(
createTask(), (int)delayTime, (int)period, TimeUnit.MILLISECONDS);
} | Schedule forever
@param scheduler TaskScheduler object |
public boolean isSuperset(SmallSet<E> other) {
if (this.list.size() < other.list.size()) {
return false;
}
int j = 0;
for (int i=0; i<list.size() && j<other.list.size(); i++) {
E e1 = this.list.get(i);
E e2 = other.list.get(j);
int diff = e1.compareTo(e2);
if (diff == 0) {
// Equal entries. Just continue.
j++;
continue;
} else if (diff > 0) {
// e1 is greater than e2, which means e2 must not appear in this.list.
return false;
} else {
// e1 is less than e2, so e2 might appear later in this.list.
continue;
}
}
if (j == other.list.size()) {
return true;
} else {
return false;
}
} | TODO: containsAll should call this and it should become private. |
public SmallSet<E> diff(SmallSet<E> other) {
SmallSet<E> tmp = new SmallSet<E>(this.size() - other.size());
Sort.diffSortedLists(this.list, other.list, tmp.list);
return tmp;
} | Gets a new SmallSet containing the difference of this set with the other. |
public ArrayList<BinaryTree> getLeaves() {
LeafCollector leafCollector = new LeafCollector();
postOrderTraversal(leafCollector);
return leafCollector.leaves;
} | Gets the leaves of this tree in left-to-right order. |
public ArrayList<BinaryTree> getLexicalLeaves() {
LexicalLeafCollector leafCollector = new LexicalLeafCollector();
postOrderTraversal(leafCollector);
return leafCollector.leaves;
} | Gets the lexical leaves of this tree in left-to-right order. |
public void intern() {
symbol = symbol.intern();
if (leftChild != null) {
leftChild.intern();
}
if (rightChild != null) {
rightChild.intern();
}
} | Intern all the strings. |
@Override
public V get(Object key) {
if (super.containsKey(key)) {
return super.get(key);
} else {
@SuppressWarnings("unchecked")
K k = (K) key;
V v = makeDefault.apply(k);
put(k, v);
return v;
}
} | If the key isn't in the dictionary, the makeDefault function will be called to create a new value
which will be added |
public static Object deserialize(byte[] bytes, boolean gzipOnSerialize) {
try {
InputStream is = new ByteArrayInputStream(bytes);
if (gzipOnSerialize) {
is = new GZIPInputStream(is);
}
ObjectInputStream in = new ObjectInputStream(is);
Object inObj = in.readObject();
in.close();
return inObj;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
} | Deserialize and ungzip an object. |
public static byte[] serialize(Serializable obj, boolean gzipOnSerialize) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream out;
if (gzipOnSerialize) {
out = new ObjectOutputStream(new GZIPOutputStream(baos));
} else {
out = new ObjectOutputStream(baos);
}
out.writeObject(obj);
out.flush();
out.close();
return baos.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
} | Serializes and gzips an object. |
@Override
public Tensor forward() {
Tensor x = modInX.getOutput();
y = new Tensor(x); // copy
y.log();
return y;
} | Foward pass: y_i = log(x_i) |
@Override
public void backward() {
Tensor x = modInX.getOutput();
Tensor tmp = new Tensor(yAdj); // copy
tmp.elemDivide(x);
modInX.getOutputAdj().elemAdd(tmp);
} | Backward pass:
dG/dx_i += dG/dy_i dy_i/dx_i = dG/dy_i (1 / x_i) |
@SuppressWarnings({ "unchecked", "rawtypes" })
static final <R> Comparator<Cell<R>> comparator() {
return (Comparator<Cell<R>>) (Comparator) COMPARATOR;
} | Compare by row then column.
@param <R> the type of the value
@return the comparator, not null |
private void scheduleOneTime(TaskScheduler scheduler) {
scheduledFuture = scheduler.schedule(runnable, (int)delayTime, TimeUnit.MILLISECONDS);
} | Schedule one short
@param scheduler TaskScheduler object |
@SuppressWarnings("unchecked")
@Override
public <T> T getProperty(Object key) {
return (T) properties.get(key);
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.entities.ApiProperties#getProperty(java.lang.Object) |
@Override
public <T> T getProperty(Object key, Class<T> clazz) {
return clazz.cast(getProperty(key));
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.entities.ApiProperties#getProperty(java.lang.Object, java.lang.Class) |
public void writeTreesInOneLineFormat(File outFile) throws IOException {
BufferedWriter writer = new BufferedWriter(new FileWriter(outFile));
for (BinaryTree tree : this) {
writer.write(tree.getAsOneLineString());
writer.write("\n");
}
writer.close();
} | Writes the trees to a file.
@param outFile The output file.
@throws IOException |
public static void setBeanProperty(Object beanObj, String fieldName,
Object value) throws Exception {
Class cls = beanObj.getClass();
Field field = cls.getDeclaredField(fieldName);
Class fieldCls = field.getType();
field.setAccessible(true);
Object fieldObj = str2Obj(fieldCls, value);
field.set(beanObj, fieldObj);
} | /*
@param beanObj
@param fieldName
@param value |
public SFSDataWrapper transform(Object value) {
if(value == null)
return transformNullValue(value);
return transformNotNullValue(value);
} | Transform the value to SFSDataWrapper object
@param value the value
@return a SFSDataWrapper object |
protected SFSDataWrapper transformArrayObject(Object value) {
int length = ArrayUtils.getLength(value);
if(length == 0)
return new SFSDataWrapper(SFSDataType.NULL, null);
ISFSArray sfsarray = new SFSArray();
for(Object obj : (Object[])value)
sfsarray.add(transform(obj));
return new SFSDataWrapper(SFSDataType.SFS_ARRAY, sfsarray);
} | Transform a java pojo object array to sfsarray
@param value the pojo object array
@return a SFSDataWrapper object |
protected SFSDataWrapper transformObject(Object value) {
ResponseParamsClass struct = null;
if(context != null) struct = context.getResponseParamsClass(value.getClass());
if(struct == null) struct = new ResponseParamsClass(value.getClass());
ISFSObject sfsObject = new ResponseParamSerializer().object2params(struct, value);
return new SFSDataWrapper(SFSDataType.SFS_OBJECT, sfsObject);
} | Transform a java pojo object to sfsobject
@param value pojo java object
@return a SFSDataWrapper object |
@SuppressWarnings("unchecked")
protected SFSDataWrapper transformCollection(Object value) {
Collection<?> collection = (Collection<?>)value;
if(collection.isEmpty())
return new SFSDataWrapper(SFSDataType.NULL, value);
Iterator<?> it = collection.iterator();
Object firstItem = it.next();
if(firstItem.getClass().isArray())
return transformArrayCollection(collection);
if(isObject(firstItem.getClass()))
return transformObjectCollection((Collection<?>)value);
if(firstItem instanceof Boolean)
return new SFSDataWrapper(SFSDataType.BOOL_ARRAY, value);
if(firstItem instanceof Byte)
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, collectionToPrimitiveByteArray((Collection<Byte>)value));
if(firstItem instanceof Character)
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, charCollectionToPrimitiveByteArray((Collection<Character>)value));
if(firstItem instanceof Double)
return new SFSDataWrapper(SFSDataType.DOUBLE_ARRAY, value);
if(firstItem instanceof Float)
return new SFSDataWrapper(SFSDataType.FLOAT_ARRAY, value);
if(firstItem instanceof Integer)
return new SFSDataWrapper(SFSDataType.INT_ARRAY, value);
if(firstItem instanceof Long)
return new SFSDataWrapper(SFSDataType.LONG_ARRAY, value);
if(firstItem instanceof Short)
return new SFSDataWrapper(SFSDataType.SHORT_ARRAY, value);
if(firstItem instanceof String)
return new SFSDataWrapper(SFSDataType.UTF_STRING_ARRAY, value);
throw new IllegalArgumentException("Can not transform value of " + value.getClass());
} | Transform a collection of value to SFSDataWrapper object
@param value the collection of value
@return a SFSDataWrapper object |
protected Transformer findTransformer(Class<?> clazz) {
Transformer answer = transformers.get(clazz);
if(answer == null)
throw new IllegalArgumentException("Can not transform value of " + clazz);
return answer;
} | Find transformer of a type
@param clazz the type
@return a transformer reference |
protected void initWithWrapperType() {
// =========== wrapper type ==============
transformers.put(Boolean.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BOOL, value);
}
});
transformers.put(Byte.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE, value);
}
});
transformers.put(Character.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE, (byte)((Character)value).charValue());
}
});
transformers.put(Double.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.DOUBLE, value);
}
});
transformers.put(Float.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.FLOAT, value);
}
});
transformers.put(Integer.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.INT, value);
}
});
transformers.put(Long.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.LONG, value);
}
});
transformers.put(Short.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.SHORT, value);
}
});
} | Add transformers of wrapper type to the map |
protected void initWithPrimitiveTypeArray() {
// =========== primitve array type ==============
transformers.put(boolean[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BOOL_ARRAY, primitiveArrayToBoolCollection((boolean[])value));
}
});
transformers.put(byte[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, value);
}
});
transformers.put(char[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, charArrayToByteArray((char[])value));
}
});
transformers.put(double[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.DOUBLE_ARRAY, primitiveArrayToDoubleCollection((double[])value));
}
});
transformers.put(float[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.FLOAT_ARRAY, primitiveArrayToFloatCollection((float[])value));
}
});
transformers.put(int[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.INT_ARRAY, primitiveArrayToIntCollection((int[])value));
}
});
transformers.put(long[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.LONG_ARRAY, primitiveArrayToLongCollection((long[])value));
}
});
transformers.put(short[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.SHORT_ARRAY, primitiveArrayToShortCollection((short[])value));
}
});
} | Add transformers of array of primitive values to the map |
protected void initWithWrapperTypArray() {
// =========== wrapper array type ==============
transformers.put(Boolean[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BOOL_ARRAY, wrapperArrayToCollection((Boolean[])value));
}
});
transformers.put(Byte[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, toPrimitiveByteArray((Byte[])value));
}
});
transformers.put(Character[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.BYTE_ARRAY, charWrapperArrayToPrimitiveByteArray((Character[])value));
}
});
transformers.put(Double[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.DOUBLE_ARRAY, wrapperArrayToCollection((Double[])value));
}
});
transformers.put(Float[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.FLOAT_ARRAY, wrapperArrayToCollection((Float[])value));
}
});
transformers.put(Integer[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.INT_ARRAY, wrapperArrayToCollection((Integer[])value));
}
});
transformers.put(Long[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.LONG_ARRAY, wrapperArrayToCollection((Long[])value));
}
});
transformers.put(Short[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.SHORT_ARRAY, wrapperArrayToCollection((Short[])value));
}
});
} | Add transformers of array of wrapper values to the map |
protected void initWithStringType() {
transformers.put(String.class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.UTF_STRING, value);
}
});
transformers.put(String[].class, new Transformer() {
@Override
public SFSDataWrapper transform(Object value) {
return new SFSDataWrapper(SFSDataType.UTF_STRING_ARRAY, stringArrayToCollection((String[])value));
}
});
} | Add transformer of string and transformer of array of strings to the map |
public void addEdge(DiEdge e) {
if (edges.add(e)) {
int s = e.get1();
int t = e.get2();
addNode(s);
addNode(t);
predecessors.get(t).add(s);
successors.get(s).add(t);
}
} | Add the edge (and the nodes) to the graph if they aren't already there
@param s
@param t |
public Pair<IntDiGraph, IntObjectBimap<DiEdge>> edgeGraph(boolean selfAvoiding) {
IntDiGraph g = new IntDiGraph();
IntObjectBimap<DiEdge> edgeToNodeMap = new IntObjectBimap<>(g.edges.size());
edgeToNodeMap.startGrowth();
for (DiEdge e : edges) {
g.addNode(edgeToNodeMap.lookupIndex(e));
}
edgeToNodeMap.stopGrowth();
// loop over edges
for (Indexed<DiEdge> e : enumerate(edges)) {
int newS = e.index();
int oldS = e.get().get1();
int oldT = e.get().get2();
// loop over successors
for (int oldV : successors.get(oldT)) {
// skip if self avoiding and s == v
if (selfAvoiding && oldS == oldV) {
continue;
}
int newT = edgeToNodeMap.lookupIndex(edge(oldT, oldV));
g.addEdge(newS, newT);
}
}
return new Pair<>(g, edgeToNodeMap);
} | Returns a new graph whose nodes are the edges of this graph with and edge
from (i,j) to (s,t) if j == s and i != t; if selfAvoiding is false, then
i = t is allowed; the id of the nodes correspond to the position in the edgeList of this graph;
the nodes are added in that order; edges are created in the order of the source nodes following successor pointers |
@Override
public void handleServerEvent(ISFSEvent event) throws SFSException {
Zone sfsZone = (Zone) event.getParameter(SFSEventParam.ZONE);
Room sfsRoom = (Room) event.getParameter(SFSEventParam.ROOM);
User sfsUser = (User) event.getParameter(SFSEventParam.USER);
User recipient = (User)event.getParameter(SFSEventParam.RECIPIENT);
String message = (String)event.getParameter(SFSEventParam.MESSAGE);
ISFSObject params = (ISFSObject)event.getParameter(SFSEventParam.OBJECT);
ApiPrivateMessageImpl messageObject = new ApiPrivateMessageImpl();
messageObject.setContent(message);
messageObject.setZone((ApiZone)sfsZone.getProperty(APIKey.ZONE));
messageObject.setRoom((sfsRoom != null)
? (ApiRoom)sfsRoom.getProperty(APIKey.ROOM) : null);
messageObject.setSender((ApiUser)sfsUser.getProperty(APIKey.USER));
messageObject.setRecipient((ApiUser)recipient.getProperty(APIKey.USER));
notifyToHandlers(messageObject, params);
} | /* (non-Javadoc)
@see com.smartfoxserver.v2.extensions.IServerEventHandler#handleServerEvent(com.smartfoxserver.v2.core.ISFSEvent) |
@SuppressWarnings("unchecked")
@Override
public Boolean execute() {
User sfsSender = CommandUtil.getSfsUser(sender, api);
api.sendObjectMessage(getSfsRoom(), sfsSender, getMessage(), getSFSRecipients());
return Boolean.TRUE;
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.command.BaseCommand#execute() |
private Room getSfsRoom() {
if(StringUtils.isEmpty(roomName))
return extension.getParentZone().getRoomById(roomId);
return CommandUtil.getSfsRoom(roomName, extension);
} | Get smartfox room reference by name
@return smartfox room reference |
private ISFSObject getMessage() {
if(messageObject != null) {
MessageParamsClass clazz = context.getMessageParamsClass(messageObject.getClass());
if(clazz != null)
return new ResponseParamSerializer().object2params(clazz.getUnwrapper(), messageObject);
}
if(jsonMessage != null)
return SFSObject.newFromJsonData(jsonMessage);
if(messageString == null)
return new SFSObject();
ISFSObject answer = new SFSObject();
answer.putUtfString(APIKey.MESSAGE, messageString);
return answer;
} | Create smartfox parameter object from a POJO object or json string or string
@return smartfox parameter object |
private Collection<User> getSFSRecipients() {
List<User> answer = new ArrayList<>();
for(String recipient : recipients) {
User sfsUser = CommandUtil.getSfsUser(recipient, api);
if(sfsUser != null)
answer.add(sfsUser);
}
return answer;
} | Get list of smartfox users (recipients)
@return list of smartfox users |
@Override
public SendObjectMessage recipients(Collection<? extends ApiBaseUser> recipients) {
for(ApiBaseUser user : recipients) {
this.recipients.add(user.getName());
}
return this;
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.command.SendObjectMessage#recipients(java.util.List) |
@Override
public SendObjectMessage recipients(Iterable<String> recipients) {
this.recipients.addAll(Sets.newHashSet(recipients));
return this;
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.command.SendObjectMessage#recipients(java.lang.Iterable) |
@Override
protected void invokeExecuteMethod(Method method, Object listener, Object userAgent) {
ReflectMethodUtil.invokeExecuteMethod(
method,
listener,
context, getRoom().getProperty(APIKey.ROOM), userAgent);
} | /*
(non-Javadoc)
@see com.tvd12.ezyfox.sfs2x.clienthandler.ClientEventHandler#invokeExecuteMethod(java.lang.reflect.Method, java.lang.Object, java.lang.Object) |
public void getMaxBranching(CompleteGraph graph, int root,
int[] invertedMaxBranching) {
// Number of nodes.
int numNodes = graph.getNumberOfNodes();
// Branching.
SparseGraph maxBranching = new SparseGraph();
/*
* Weighted edges used to allow weight modification and avoid any impact
* in the original graph.
*/
SimpleWeightedEdge[][] edges = new SimpleWeightedEdge[numNodes][numNodes];
// Disjoint sets for the strongly connected components (SCC).
DisjointSets sPartition = new DisjointSets(numNodes);
// Disjoint sets for the weakly connected components (WCC).
DisjointSets wPartition = new DisjointSets(numNodes);
/*
* Priority queues for each strongly connected component. In the
* beginning, each SCC is composed by exactly one node.
*/
ArrayList<PriorityQueue<SimpleWeightedEdge>> incomingEdges = new ArrayList<PriorityQueue<SimpleWeightedEdge>>(
numNodes);
// Unique incoming edges for each SCC. Initially, no SCC has such edge.
SimpleWeightedEdge[] enter = new SimpleWeightedEdge[numNodes];
/*
* List of root components, i.e., SCCs that have no incoming edges
* (enter[scc] == null). In the beginning, every SCC is a root
* component.
*/
LinkedList<Integer> rootComponents = new LinkedList<Integer>();
// Root node of each root component.
int[] min = new int[numNodes];
for (int node = 0; node < numNodes; ++node) {
// Every SCC is a root component.
rootComponents.add(node);
/*
* The head of its root component is its only node. This array is
* called min in Tarjan's paper.
*/
min[node] = node;
// Create a priority queue for each SCC.
PriorityQueue<SimpleWeightedEdge> sccPriorityQueue = new PriorityQueue<SimpleWeightedEdge>();
incomingEdges.add(sccPriorityQueue);
// No incoming edge is considered (nor created) for the root node.
if (node != root) {
/*
* Create and add all incoming edges of <code>node</code> to its
* SCC priority queue.
*/
for (int from = 0; from < numNodes; ++from) {
if (from == node)
// Skip autocycle edges.
continue;
// Create an weighted edge and add it to the priority queue.
edges[from][node] = new SimpleWeightedEdge(from, node,
graph.getEdgeWeight(from, node));
sccPriorityQueue.add(edges[from][node]);
}
}
}
// Root component with no available incoming edges.
LinkedList<Integer> doneRootComponents = new LinkedList<Integer>();
while (!rootComponents.isEmpty()) {
// Get some arbitrary root component.
int sccTo = rootComponents.pop();
// Maximum edge entering the root component 'sccTo'.
SimpleWeightedEdge maxInEdge = incomingEdges.get(sccTo).poll();
if (maxInEdge == null) {
// No edge left to consider in this component.
doneRootComponents.add(sccTo);
continue;
}
// SCC component of edge 'e' from node: e = (from, to).
int sccFrom = sPartition.find(maxInEdge.from);
if (sccFrom == sccTo) {
// Skip, for now, this component.
rootComponents.add(sccTo);
continue;
}
// Include the selected edge in the current branching.
maxBranching.addEdge(maxInEdge.from, maxInEdge.to);
// SCC component of edge 'e' from node, where e = (from, to).
int wssFrom = wPartition.find(maxInEdge.from);
// SCC component of edge 'e' to node, where e = (from, to).
int wssTo = wPartition.find(maxInEdge.to);
// Edge connects two weakly connected components.
if (wssFrom != wssTo) {
wPartition.union(wssFrom, wssTo);
enter[sccTo] = maxInEdge;
continue;
}
/*
* Edge is within the same WCC, thus it inclusion will create a new
* SCC by uniting some old SCCs (the ones on the path from e.to to
* e.from).
*/
double minEdgeWeight = Double.POSITIVE_INFINITY;
int minScc = -1;
SimpleWeightedEdge tmpEdge = maxInEdge;
while (tmpEdge != null) {
if (tmpEdge.weight < minEdgeWeight) {
minEdgeWeight = tmpEdge.weight;
minScc = sPartition.find(tmpEdge.to);
}
tmpEdge = enter[sPartition.find(tmpEdge.from)];
}
// Increment incoming edges weight.
double inc = minEdgeWeight - maxInEdge.weight;
for (SimpleWeightedEdge e : incomingEdges.get(sccTo))
e.weight += inc;
// Set the head of the current SCC.
min[sccTo] = min[minScc];
// Include all used SCCs in the current SCC.
tmpEdge = enter[sccFrom];
while (tmpEdge != null) {
/*
* Increment incoming edges weight and include them in the
* current SCC priority queue.
*/
int tmpSccTo = sPartition.find(tmpEdge.to);
inc = minEdgeWeight - tmpEdge.weight;
for (SimpleWeightedEdge e : incomingEdges.get(tmpSccTo)) {
e.weight += inc;
incomingEdges.get(sccTo).add(e);
}
// Remove the priority queue of this SCC.
incomingEdges.set(tmpSccTo, null);
sPartition.union(sccTo, tmpSccTo);
// Next edge.
tmpEdge = enter[sPartition.find(tmpEdge.from)];
}
// Include the new SCC to be considered in the future.
rootComponents.add(sccTo);
}
// Invert the maximum branching.
boolean[] visited = new boolean[numNodes];
for (int scc : doneRootComponents)
invertMaximumBranching(min[scc], maxBranching, visited,
invertedMaxBranching);
}
/**
* Walk through the given branching from <code>node</code> and store the
* inverted branching in <code>invertedMaxBranching</code>.
*
* In fact, the given branching can include cycles. But it is only necessary
* to disconsider the last edges of each cycle to get the real branching.
* Thus, we use the array <code>visited</code>.
*
* @param node
* @param branching
* @param visited
* @param invertedMaxBranching
*/
private void invertMaximumBranching(int node, SparseGraph branching,
boolean[] visited, int[] invertedMaxBranching) {
visited[node] = true;
Set<Integer> toNodes = branching.getOutEdges(node);
if (toNodes == null)
return;
for (int to : toNodes) {
if (visited[to])
continue;
invertedMaxBranching[to] = node;
invertMaximumBranching(to, branching, visited, invertedMaxBranching);
}
} | Fill <code>maxBranching</code> with a maximum branching of the given
complete graph <code>graph</code> and rooted in the given node.
@param graph
@param root
@param invertedMaxBranching |
public static IntDiGraph buildTriggers(IntDiGraph g, Schedule s) {
// return trigger DAG
IntDiGraph d = new IntDiGraph();
// map from node to triggering indexes
DefaultDict<Integer, List<Integer>> currentTriggers = new DefaultDict<>(i -> new LinkedList<Integer>());
for (Indexed<Integer> s_j : enumerate(s)) {
// add in arcs from triggers
for (int i : currentTriggers.get(s_j.get())) {
d.addEdge(i, s_j.index());
}
// remove s_j from the agenda
currentTriggers.remove(s_j.get());
// record that j is triggering consequents
for (int s_k : g.getSuccessors(s_j.get())) {
currentTriggers.get(s_k).add(s_j.index());
}
}
// add a link to the unpopped version of each node still on the agenda
// the integer will be the length of the trajectory plus an index into
// the set of nodes
for (Entry<Integer, List<Integer>> item : currentTriggers.entrySet()) {
int s_k = item.getKey();
List<Integer> triggers = item.getValue();
for (int j : triggers) {
d.addEdge(j, s.size() + g.index(s_k));
}
}
return d;
} | return a DAG, G' = V', E' such that vertecies correspond to indexes in
the schedule and there is an edge (i,j) \in E' if s_i triggered s_j |
public static <T> Iterator<T> cycle(Iterator<T> itr, int times) {
// if we repeat 0, then it is as if the itr were empty, so don't take the items
final List<T> items = (times != 0) ? Lists.newLinkedList(iterable(itr)) : Collections.emptyList();
return new Iterator<T>() {
private Iterator<T> currentItr = Collections.emptyIterator();
private int ncalls = 0;
private Iterator<T> getItr() {
// if this is the first call or we've gotten to the end of a round
if (!currentItr.hasNext() && ncalls < times) {
currentItr = items.iterator();
ncalls++;
}
return currentItr;
}
@Override
public boolean hasNext() {
return getItr().hasNext();
}
@Override
public T next() {
return getItr().next();
}
};
} | Returns an iterator cycles over the elements in items repeat times;
if repeat < 0, then this cycles indefinitely
if repeat == 0, then the iterator is an empty iterator |
@Override
public Beliefs forward() {
Tensor dep = depIn.getOutput();
int n = dep.getDims()[1];
Beliefs origB = inf.getOutput();
y = new Beliefs(s);
y.varBeliefs = new VarTensor[origB.varBeliefs.length];
y.facBeliefs = new VarTensor[0];
for (int v=0; v<origB.varBeliefs.length; v++) {
Var var = origB.varBeliefs[v].getVars().get(0);
if (var instanceof LinkVar) {
LinkVar link = (LinkVar) var;
int p = link.getParent();
int c = link.getChild();
int pp = EdgeScores.getTensorParent(p, c);
assert p < n && c < n;
// Initialize the belief tensor.
y.varBeliefs[v] = new VarTensor(s, origB.varBeliefs[v].getVars(), s.zero());
// Set the marginal b(e_{p,c} = True) and b(e_{p,c} = False).
y.varBeliefs[v].setValue(LinkVar.TRUE, dep.get(pp, c));
y.varBeliefs[v].setValue(LinkVar.FALSE, s.minus(s.one(), dep.get(pp, c)));
}
}
return y;
} | Forward pass:
b(e_{p,c} = True) = d_{p,c}
b(e_{p,c} = False) = 1.0 - d_{p,c}
Abbreviated:
t_i = m_i
f_i = 1.0 - m_i |
@Override
public void backward() {
Tensor depAdj = depIn.getOutputAdj();
int n = depAdj.getDims()[1];
for (int v=0; v<yAdj.varBeliefs.length; v++) {
if (yAdj.varBeliefs[v] != null) {
Var var = y.varBeliefs[v].getVars().get(0);
if (var instanceof LinkVar) {
LinkVar link = (LinkVar) var;
int p = link.getParent();
int c = link.getChild();
int pp = EdgeScores.getTensorParent(p, c);
assert p < n && c < n;
// Add the adjoint of b(e_{p,c} = True).
depAdj.add(yAdj.varBeliefs[v].getValue(LinkVar.TRUE), pp, c);
// Add the adjoint of b(e_{p,c} = False)
depAdj.subtract(yAdj.varBeliefs[v].getValue(LinkVar.FALSE), pp, c);
}
}
}
} | Backward pass:
dG/dm_i += dG/dt_i*dt_i/dm_i + dG/df_i*df_i/dm_i
= dG/dt_i - dG/df_i |
@Override
public void setAll(Map<Object, Object> values) {
for(Object key : values.keySet())
set(key, values.get(key));
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#set(java.lang.Object, java.lang.Object)
*/
@Override
public Object set(Object key, Object value) {
data.put(key.toString(), TRANSFORMER.transform(value));
return value;
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#get(java.lang.Object)
*/
@SuppressWarnings("unchecked")
@Override
public <T> T get(Object key) {
return (T)data.get(key.toString()).getObject();
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#get(java.lang.Object, java.lang.Class)
*/
@SuppressWarnings("unchecked")
@Override
public <T> T get(Object key, Class<T> clazz) {
return (T) doGet((String)key, clazz);
}
private Object doGet(String key, Class<?> clazz) {
if(FETCHERS.containsKey(clazz))
return FETCHERS.get(clazz).get(key, data);
throw new IllegalArgumentException("has no value with " + clazz + " and key " + key);
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#keys()
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Set<Object> keys() {
return (Set)data.getKeys();
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#values()
*/
@Override
public List<Object> values() {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.tvd12.ezyfox.core.transport.Parameters#toMap()
*/
@Override
public Map<Object, Object> toMap() {
throw new UnsupportedOperationException();
}
public static interface ValueFetcher {
Object get(String key, ISFSObject data);
}
public static Map<Class<?>, ValueFetcher> defaultFetchers() {
Map<Class<?>, ValueFetcher> answer = new HashMap<>();
answer.put(boolean.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getBool(key);
}
});
answer.put(byte.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getByte(key);
}
});
answer.put(char.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return (char)data.getByte(key).byteValue();
}
});
answer.put(double.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getDouble(key);
}
});
answer.put(float.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getFloat(key);
}
});
answer.put(int.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getInt(key);
}
});
answer.put(long.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getLong(key);
}
});
answer.put(short.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getShort(key);
}
});
answer.put(Boolean.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getBool(key);
}
});
answer.put(Byte.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getByte(key);
}
});
answer.put(Character.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return (char)data.getByte(key).byteValue();
}
});
answer.put(Double.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getDouble(key);
}
});
answer.put(Float.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getFloat(key);
}
});
answer.put(Integer.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getInt(key);
}
});
answer.put(Long.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getLong(key);
}
});
answer.put(Short.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getShort(key);
}
});
answer.put(String.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getUtfString(key);
}
});
//============
answer.put(Boolean[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperBoolArray(data.getBoolArray(key));
}
});
answer.put(Byte[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return toByteWrapperArray(data.getByteArray(key));
}
});
answer.put(Character[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return toCharWrapperArray(data.getByteArray(key));
}
});
answer.put(Double[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperDoubleArray(data.getDoubleArray(key));
}
});
answer.put(Float[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperFloatArray(data.getFloatArray(key));
}
});
answer.put(Integer[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperIntArray(data.getIntArray(key));
}
});
answer.put(Long[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperLongArray(data.getLongArray(key));
}
});
answer.put(Short[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToWrapperShortArray(data.getShortArray(key));
}
});
answer.put(String[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToStringArray(data.getUtfStringArray(key));
}
});
//============
answer.put(boolean[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveBoolArray(data.getBoolArray(key));
}
});
answer.put(byte[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return data.getByteArray(key);
}
});
answer.put(char[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return byteArrayToCharArray(data.getByteArray(key));
}
});
answer.put(double[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveDoubleArray(data.getDoubleArray(key));
}
});
answer.put(float[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveFloatArray(data.getFloatArray(key));
}
});
answer.put(int[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveIntArray(data.getIntArray(key));
}
});
answer.put(long[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveLongArray(data.getLongArray(key));
}
});
answer.put(short[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return collectionToPrimitiveShortArray(data.getShortArray(key));
}
});
answer.put(Parameters.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return new SfsParameters(data.getSFSObject(key));
}
});
answer.put(Parameters[].class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
ISFSArray array = data.getSFSArray(key);
Parameters[] answer = new SfsParameters[array.size()];
for(int i = 0 ; i < array.size() ; i++)
answer[i] = new SfsParameters(array.getSFSObject(i));
return answer;
}
});
answer.put(Arraymeters.class, new ValueFetcher() {
public Object get(String key, ISFSObject data) {
return new SfsArrayParameters(data.getSFSArray(key));
}
});
return answer;
}
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.core.transport.Parameters#setAll(java.util.Map) |
public static boolean isDepTree(int[] parents, boolean isProjective, boolean isSingleHeaded) {
// Check that every token has some head. (Note that the parents array encoding ensures we
// can't have multiple heads per token.)
int emptyCount = ParentsArray.countChildrenOf(parents, ParentsArray.EMPTY_POSITION);
if (emptyCount != 0) {
return false;
}
// Check that there is exactly one node with the WALL as its parent
int wallCount = ParentsArray.countChildrenOf(parents, ParentsArray.WALL_POSITION);
if (isSingleHeaded && wallCount != 1) {
return false;
} else if (wallCount < 1) {
return false;
}
// Check that there are no cyles
if (!ParentsArray.isConnectedAndAcyclic(parents)) {
return false;
}
// Check for projectivity if necessary
if (isProjective) {
if (!ParentsArray.isProjective(parents)) {
return false;
}
}
// Is a valid dependency tree.
return true;
} | Returns whether this is a valid depedency tree: a directed acyclic graph
with a single root which covers all the tokens. |
public static boolean isConnectedAndAcyclic(int[] parents) {
int numVisited = 0;
// 1-indexed array indicating whether each node (including the wall at position 0) has been visited.
boolean[] visited = new boolean[parents.length+1];
Arrays.fill(visited, false);
// Visit the nodes reachable from the wall in a pre-order traversal.
IntStack stack = new IntStack();
stack.push(-1);
while (stack.size() > 0) {
// Pop off the current node from the stack.
int cur = stack.pop();
if (visited[cur+1] == true) {
continue;
}
// Mark it as visited.
visited[cur+1] = true;
numVisited++;
// Push the current node's unvisited children onto the stack.
for (int i=0; i<parents.length; i++) {
if (parents[i] == cur && visited[i+1] == false) {
stack.push(i);
}
}
}
return numVisited == parents.length + 1;
} | Whether the directed graph (including an implicit wall node) denoted by this parents array is
fully connected. If a singly-headed directed graph is connected it must also be acyclic. |
public static ArrayList<Integer> getSiblingsOf(int[] parents, int idx) {
int parent = parents[idx];
ArrayList<Integer> siblings = new ArrayList<Integer>();
for (int i=0; i<parents.length; i++) {
if (parents[i] == parent) {
siblings.add(i);
}
}
return siblings;
} | Gets the siblings of the specified word.
@param parents The parents array.
@param idx The word for which to extract siblings.
@return The indices of the siblings. |
public static boolean containsCycle(int[] parents) {
for (int i=0; i<parents.length; i++) {
int numAncestors = 0;
int parent = parents[i];
while(parent != ParentsArray.WALL_POSITION) {
numAncestors += 1;
if (numAncestors > parents.length - 1) {
return true;
}
parent = parents[parent];
}
}
return false;
} | Checks if a dependency tree represented as a parents array contains a cycle.
@param parents
A parents array where parents[i] contains the index of the
parent of the word at position i, with parents[i] = -1
indicating that the parent of word i is the wall node.
@return True if the tree specified by the parents array contains a cycle,
False otherwise. |
public static boolean isProjective(int[] parents) {
for (int i=0; i<parents.length; i++) {
int pari = (parents[i] == ParentsArray.WALL_POSITION) ? parents.length : parents[i];
int minI = i < pari ? i : pari;
int maxI = i > pari ? i : pari;
for (int j=0; j<parents.length; j++) {
if (j == i) {
continue;
}
int parj = (parents[j] == ParentsArray.WALL_POSITION) ? parents.length : parents[j];
if (minI < j && j < maxI) {
if (!(minI <= parj && parj <= maxI)) {
return false;
}
} else {
if (!(parj <= minI || parj >= maxI)) {
return false;
}
}
}
}
return true;
} | Checks that a dependency tree represented as a parents array is projective.
@param parents
A parents array where parents[i] contains the index of the
parent of the word at position i, with parents[i] = -1
indicating that the parent of word i is the wall node.
@return True if the tree specified by the parents array is projective,
False otherwise. |
public static int countChildrenOf(int[] parents, int parent) {
int count = 0;
for (int i=0; i<parents.length; i++) {
if (parents[i] == parent) {
count++;
}
}
return count;
} | Counts of the number of children in a dependency tree for the given
parent index.
@param parents
A parents array where parents[i] contains the index of the
parent of the word at position i, with parents[i] = -1
indicating that the parent of word i is the wall node.
@param parent The parent for which the children should be counted.
@return The number of entries in <code>parents</code> that equal
<code>parent</code>. |
public static IntArrayList getChildrenOf(int[] parents, int parent) {
IntArrayList children = new IntArrayList();
for (int i=0; i<parents.length; i++) {
if (parents[i] == parent) {
children.add(i);
}
}
return children;
} | Gets the children of the specified parent.
@param parents A parents array.
@param parent The parent for which the children should be extracted.
@return The indices of the children. |
public static boolean isAncestor(int idx1, int idx2, int[] parents) {
int anc = parents[idx2];
while (anc != -1) {
if (anc == idx1) {
return true;
}
anc = parents[anc];
}
return false;
} | Checks whether idx1 is the ancestor of idx2. If idx1 is the parent of
idx2 this will return true, but if idx1 == idx2, it will return false.
@param idx1 The ancestor position.
@param idx2 The descendent position.
@param parents The parents array.
@return Whether idx is the ancestor of idx2. |
public static List<Pair<Integer,Dir>> getDependencyPath(int start, int end, int[] parents) {
int n = parents.length;
if (start < -1 || start >= n || end < -1 || end >= n) {
throw new IllegalArgumentException(String.format("Invalid start/end: %d/%d", start, end));
}
// Build a hash set of the ancestors of end, including end and the
// wall node.
IntHashSet endAncSet = new IntHashSet();
IntArrayList endAncList = new IntArrayList();
int curPos = end;
while (curPos != ParentsArray.WALL_POSITION && curPos != -2 && !endAncSet.contains(curPos)) {
endAncSet.add(curPos);
endAncList.add(curPos);
curPos = parents[curPos];
}
if (curPos != -1) {
// No path to the wall. Possibly a cycle.
return null;
}
endAncSet.add(curPos); // Don't forget the wall node.
endAncList.add(curPos);
// Create the dependency path.
List<Pair<Integer,Dir>> path = new ArrayList<Pair<Integer,Dir>>();
// Add all the "edges" from the start up to the one pointing at the LCA.
IntHashSet startAncSet = new IntHashSet();
curPos = start;
while (!endAncSet.contains(curPos) && curPos != -2 && !startAncSet.contains(curPos)) {
path.add(new Pair<Integer,Dir>(curPos, Dir.UP));
startAncSet.add(curPos);
curPos = parents[curPos];
}
if (!endAncSet.contains(curPos)) {
// No path to any nodes in endAncSet or a cycle.
return null;
}
// Least common ancestor.
int lca = curPos;
// Add all the edges from the LCA to the end position.
int lcaIndex = endAncList.lookupIndex(lca);
for (int i = lcaIndex; i > 0; i--) {
path.add(new Pair<Integer,Dir>(endAncList.get(i), Dir.DOWN));
}
// TODO: Update unit tests to reflect this change.
path.add(new Pair<Integer,Dir>(end, Dir.NONE));
return path;
} | Gets the shortest dependency path between two tokens.
<p>
For the tree: x0 <-- x1 --> x2, represented by parents=[1, -1, 1] the
dependency path from x0 to x2 would be a list [(0, UP), (1, DOWN), (2, NONE)]
</p>
@param start The position of the start token.
@param end The position of the end token.
@param parents The parents array.
@return The path as a list of pairs containing the word positions and the
direction of the edge, inclusive of the start and end.
Or null if there is no path. |
public static <T> List<T> toposort(T root, Deps<T> deps) {
List<T> order = new ArrayList<T>();
HashSet<T> done = new HashSet<T>();
Stack<T> todo = new Stack<T>();
HashSet<T> ancestors = new HashSet<T>();
// Run a Tarjan (1976) style topological sort.
todo.push(root);
while (!todo.isEmpty()) {
T x = todo.peek();
// Whether all x's descendents are done.
boolean ready = true;
for (T y : deps.getDeps(x)) {
if (!done.contains(y)) {
ready = false;
todo.push(y);
}
}
if (ready) {
todo.pop();
ancestors.remove(x);
if (done.add(x)) {
order.add(x);
}
} else {
if (ancestors.contains(x)) {
throw new IllegalStateException("Graph is not a DAG. Cycle involves node: " + x);
}
ancestors.add(x);
}
}
return order;
} | Gets a topological sort for the graph.
@param root The root of the graph.
@param deps Functional description of the graph's dependencies.
@return The topological sort. |
public static <T> List<T> toposort(List<T> inputs,
T root, final Deps<T> deps, boolean isFullCut) {
// Get inputs as a set.
final HashSet<T> inputSet = new HashSet<T>(inputs);
if (inputSet.size() != inputs.size()) {
throw new IllegalStateException("Multiple copies of module in inputs list: " + inputs);
}
return toposort(inputSet, root, deps, isFullCut);
} | Gets a topological sort for the graph, where the depth-first search is cutoff by an input set.
@param inputs The input set which is excluded from the graph.
@param root The root of the graph.
@param deps Functional description of the graph's dependencies.
@param isFullCut Whether the input set is a full cut of the graph.
@return The topological sort. |
public static <T> List<T> toposort(final Set<T> inputSet, T root, final Deps<T> deps, boolean isFullCut) {
// Check that inputs set is a valid set of leaves for the given output module.
checkAreDescendentsOf(inputSet, root, deps);
if (isFullCut) {
checkIsFullCut(inputSet, root, deps);
}
Deps<T> cutoffDeps = getCutoffDeps(inputSet, deps);
return Toposort.toposort(root, cutoffDeps);
} | Gets a topological sort for the graph, where the depth-first search is cutoff by an input set.
@param inputSet The input set which is excluded from the graph.
@param root The root of the graph.
@param deps Functional description of the graph's dependencies.
@param isFullCut Whether the input set is a full cut of the graph.
@return The topological sort. |
public static <T> Deps<T> getCutoffDeps(final Set<T> inputSet, final Deps<T> deps) {
Deps<T> cutoffDeps = new Deps<T>() {
@Override
public Collection<T> getDeps(T x) {
HashSet<T> pruned = new HashSet<T>(deps.getDeps(x));
pruned.removeAll(inputSet);
return pruned;
}
};
return cutoffDeps;
} | Gets a new Deps graph where each node in the input set is removed from the graph. |
public static <T> void checkAreDescendentsOf(Set<T> inputSet, T root, Deps<T> deps) {
// Check that all modules in the input set are descendents of the output module.
HashSet<T> visited = new HashSet<T>();
dfs(root, visited, deps);
if (!visited.containsAll(inputSet)) {
throw new IllegalStateException("Input set contains modules which are not descendents of the output module: " + inputSet);
}
} | Checks that the given inputSet consists of only descendents of the root. |
public static <T> void checkIsFullCut(Set<T> inputSet, T root, Deps<T> deps) {
// Check that the input set defines a full cut through the graph with outMod as root.
HashSet<T> visited = new HashSet<T>();
// Mark the inputSet as visited. If it is a valid leaf set, then leaves will be empty upon
// completion of the DFS.
visited.addAll(inputSet);
HashSet<T> leaves = dfs(root, visited, deps);
if (leaves.size() != 0) {
throw new IllegalStateException("Input set is not a valid leaf set for the given output module. Extra leaves: " + leaves);
}
} | Checks that the given inputSet defines a full cut through the graph rooted at the given root. |
public static <T> HashSet<T> dfs(T root, Set<T> visited, Deps<T> deps) {
// The set of leaves (excluding any which were already marked as visited).
HashSet<T> leaves = new HashSet<T>();
// The stack for DFS.
Stack<T> stack = new Stack<T>();
stack.push(root);
while (stack.size() > 0) {
T p = stack.pop();
if (visited.add(p)) {
// Unseen.
if (deps.getDeps(p).size() == 0) {
// Is leaf.
leaves.add(p);
} else {
// Not a leaf.
stack.addAll(deps.getDeps(p));
}
} else {
// Seen.
continue;
}
}
return leaves;
} | TODO: detect cycles. |
public static <T> HashSet<T> getLeaves(T root, Deps<T> deps) {
return dfs(root, new HashSet<T>(), deps);
} | Gets the leaves in DFS order. |
public void addEdge(int from, int to) {
Set<Integer> outEdges = edges.get(from);
if (outEdges == null) {
outEdges = new TreeSet<Integer>();
edges.put(from, outEdges);
}
outEdges.add(to);
} | Add an edge from node <code>from</code> to node <code>to</code>.
@param from
@param to |
public SparseGraph reversed() {
SparseGraph reverse = new SparseGraph();
for (Entry<Integer, Set<Integer>> entryFrom : edges.entrySet())
for (Integer to : entryFrom.getValue())
reverse.addEdge(to, entryFrom.getKey());
return reverse;
} | Build and return a sparse graph by reversing all edges in this graph.
@return |
protected void addServerEventHandlers() {
Map<Object, Class<?>> handlers = getServerEventHandlers();
Set<Entry<Object, Class<?>>> entries = handlers.entrySet();
for(Entry<Object, Class<?>> entry : entries) {
SFSEventType type = SFSEventType.valueOf(
entry.getKey().toString());
ServerEventHandler handler = createServerEventHandler(
type, entry.getValue());
addEventHandler(type, handler);
}
} | Add server event handlers |
private ServerEventHandler createServerEventHandler(
SFSEventType type, Class<?> clazz) {
try {
return (ServerEventHandler)
ReflectClassUtil.newInstance(
clazz, BaseAppContext.class, context);
} catch (ExtensionException e) {
getLogger().error("Error when create server event handlers", e);
throw new RuntimeException("Can not create event handler of class "
+ clazz, e);
}
} | Create server event handler by type and handler class
@param type event type
@param clazz handler class
@return a ServerEventHandler object |
public void addSysControllerFilters() {
for(SystemRequest rq : SystemRequest.values()) {
ISystemFilterChain filterChain = new SysControllerFilterChain();
filterChain.addFilter("EzyFoxFilterChain#" + rq,
new BaseSysControllerFilter(appContext(), rq));
getParentZone().setFilterChain(rq, filterChain);
}
} | Add System Controller Filters |
protected void notifyHandler(ServerHandlerClass handler, Object zoneAgent) {
ReflectMethodUtil.invokeHandleMethod(handler.getHandleMethod(),
handler.newInstance(), context, zoneAgent);
} | Propagate event to handler
@param handler structure of handler class
@param zoneAgent the zone agent |
public LinkVar getLinkVar1Idx(int parent, int child) {
if (parent == 0) {
return rootVars[child-1];
} else {
return childVars[parent-1][child-1];
}
} | Get the link var corresponding to the specified parent and child position.
@param parent The parent word position (1-indexed), or 0 to indicate the wall node.
@param child The child word position (1-indexed).
@return The link variable. |
public LinkVar getLinkVar(int parent, int child) {
if (parent == -1) {
return rootVars[child];
} else {
return childVars[parent][child];
}
} | Get the link var corresponding to the specified parent and child position.
@param parent The parent word position, or -1 to indicate the wall node.
@param child The child word position.
@return The link variable. |
@Override
public ImmutableSet<Cell<V>> cells() {
return ImmutableSet.<Cell<V>>of(cell);
} | ----------------------------------------------------------------------- |
@Override
public double toReal(double x) {
double unsignedReal = FastMath.exp(natlog(x));
return (sign(x) == POSITIVE) ? unsignedReal : -unsignedReal;
} | Converts a compacted number to its real value. |
@Override
public double fromReal(double x) {
long sign = POSITIVE;
if (x < 0) {
sign = NEGATIVE;
x = -x;
}
return compact(sign, FastMath.log(x));
} | Converts a real value to its compacted representation. |
public static final double compact(long sign, double natlog) {
return Double.longBitsToDouble(sign | (FLOAT_MASK & Double.doubleToRawLongBits(natlog)));
} | Gets the compacted version from the sign and natural log. |
public boolean verify(KeyStoreChooser keyStoreChooser, PublicKeyChooserByAlias publicKeyChooserByAlias, byte[] message,
byte[] signature) {
Verifier verifier = cache.get(cacheKey(keyStoreChooser, publicKeyChooserByAlias));
if (verifier != null) {
return verifier.verify(message, signature);
}
VerifierImpl verifierImpl = new VerifierImpl();
verifierImpl.setAlgorithm(algorithm);
verifierImpl.setProvider(provider);
PublicKey publicKey = publicKeyRegistryByAlias.get(keyStoreChooser, publicKeyChooserByAlias);
if (publicKey == null) {
throw new SignatureException("public key not found in registry: keyStoreName="
+ keyStoreChooser.getKeyStoreName() + ", alias=" + publicKeyChooserByAlias.getAlias());
}
verifierImpl.setPublicKey(publicKey);
cache.put(cacheKey(keyStoreChooser, publicKeyChooserByAlias), verifierImpl);
return verifierImpl.verify(message, signature);
} | Verifies the authenticity of a message using a digital signature.
@param keyStoreChooser the keystore chooser
@param publicKeyChooserByAlias the public key chooser
@param message the message to sign
@param signature the digital signature
@return true if the authenticity of the message is verified by the
digital signature |
@Override
public void backward() {
modInX.getOutputAdj().elemAdd(yAdj);
modInW.getOutputAdj().elemSubtract(yAdj);
} | Backward pass:
dG/dx_i += dG/dy_i dy_i/dx_i = dG/dy_i
dG/dw_i += dG/dy_i dy_i/dw_i = - dG/dy_i |
public String encrypt(String message) {
try {
final Cipher cipher = (((provider == null) || (provider.length() == 0))
? Cipher.getInstance(cipherAlgorithm)
: Cipher.getInstance(cipherAlgorithm, provider));
switch (mode) {
case ENCRYPT:
cipher.init(Cipher.ENCRYPT_MODE, keySpec, initializationVectorSpec);
byte[] encryptedMessage = cipher.doFinal(message.getBytes(charsetName));
return new String(Base64.encodeBase64(encryptedMessage, chunkOutput));
case DECRYPT:
cipher.init(Cipher.DECRYPT_MODE, keySpec, initializationVectorSpec);
byte[] decodedMessage = Base64.decodeBase64(message);
return new String(cipher.doFinal(decodedMessage), charsetName);
default:
return null;
}
} catch (Exception e) {
throw new SymmetricEncryptionException("error encrypting/decrypting message; mode=" + mode, e);
}
} | Encrypts/decrypts a message using the underlying symmetric key and mode.
@param message if in encryption mode, the clear-text message to encrypt,
otherwise a base64 encoded version of the raw byte array
containing the message to decrypt
@return if in encryption mode, returns a base64 encoded version of the
encrypted message, otherwise returns the clear-text message
@throws SymmetricEncryptionException on runtime errors
@see #setMode(Mode) |
public int getNumObsFeats() {
int count = 0;
for (FactorTemplate ft : fts) {
count += ft.getAlphabet().size();
}
return count;
} | Gets the number of observation function features. |
public byte[] encrypt(byte[] key, byte[] initializationVector, byte[] message) {
try {
IvParameterSpec initializationVectorSpec = new IvParameterSpec(initializationVector);
final SecretKeySpec skey = new SecretKeySpec(key, keyAlgorithm);
final Cipher cipher = (((provider == null) || (provider.length() == 0))
? Cipher.getInstance(cipherAlgorithm)
: Cipher.getInstance(cipherAlgorithm, provider));
switch (mode) {
case ENCRYPT:
cipher.init(Cipher.ENCRYPT_MODE, skey, initializationVectorSpec);
break;
case DECRYPT:
cipher.init(Cipher.DECRYPT_MODE, skey, initializationVectorSpec);
break;
default:
throw new SymmetricEncryptionException("error encrypting/decrypting message: invalid mode; mode=" + mode);
}
return cipher.doFinal(message);
} catch (Exception e) {
throw new SymmetricEncryptionException("error encrypting/decrypting message; mode=" + mode, e);
}
} | Encrypts/decrypts a message based on the underlying mode of operation.
@param key the encryption key
@param initializationVector the initialization vector
@param message if in encryption mode, the clear-text message, otherwise
the message to decrypt
@return if in encryption mode, the encrypted message, otherwise the
decrypted message
@throws SymmetricEncryptionException on runtime errors
@see #setMode(Mode) |
@SuppressWarnings("unchecked")
public ApiBuddy execute() {
User sfsOwner = api.getUserByName(owner);
ApiUser targetUser = getUser(target);
ApiUser ownerUser = (ApiUser) sfsOwner.getProperty(APIKey.USER);
ISFSBuddyResponseApi responseAPI = SmartFoxServer.getInstance().getAPIManager()
.getBuddyApi().getResponseAPI();
ISFSEventManager eventManager = SmartFoxServer.getInstance().getEventManager();
BuddyList buddyList = sfsOwner.getZone()
.getBuddyListManager().getBuddyList(owner);
BuddyListManager buddyListManager = sfsOwner.getZone().getBuddyListManager();
checkBuddyManagerIsActive(buddyListManager, sfsOwner);
sfsOwner.updateLastRequestTime();
ApiBuddyImpl buddy = new ApiBuddyImpl(target, temp);
buddy.setOwner(ownerUser);
buddy.setParentBuddyList(buddyList);
if(targetUser != null) buddy.setUser(targetUser);
try {
buddyList.addBuddy(buddy);
if (fireClientEvent)
responseAPI.notifyAddBuddy(buddy, sfsOwner);
if (fireServerEvent) {
Map<ISFSEventParam, Object> evtParams = new HashMap<>();
evtParams.put(SFSEventParam.ZONE, sfsOwner.getZone());
evtParams.put(SFSEventParam.USER, sfsOwner);
evtParams.put(SFSBuddyEventParam.BUDDY, buddy);
eventManager.dispatchEvent(new SFSEvent(SFSEventType.BUDDY_ADD, evtParams));
}
} catch (SFSBuddyListException e) {
if (fireClientEvent) {
api.getResponseAPI().notifyRequestError(e, sfsOwner, SystemRequest.AddBuddy);
}
return null;
}
return buddy;
} | Execute to add a buddy to list |
private void checkBuddyManagerIsActive(BuddyListManager buddyListManager, User sfsOwner) {
if (!buddyListManager.isActive()) {
throw new IllegalStateException(
String.format("BuddyList operation failure. BuddyListManager is not active. Zone: %s, Sender: %s", new Object[] { sfsOwner.getZone(), sfsOwner }));
}
} | Check whether buddy manager is active
@param buddyListManager manager object
@param sfsOwner buddy's owner |
public ApiUser getUser(String name) {
User sfsUser = CommandUtil.getSfsUser(name, api);
if(sfsUser == null)
return null;
return (ApiUser) sfsUser.getProperty(APIKey.USER);
} | Get user agent reference
@param name name of user agent
@return user agent object |
public int peek(int offset) throws IOException {
if(this.count < 0) {
return EOF;
}
if(offset < 0) {
throw new java.lang.IllegalArgumentException("offset must be >= 0");
}
/**
* 实际请求位置
*/
int i = this.index + offset;
if(i < this.count) {
return this.buffer[i];
}
/**
* �?��读取新数�? * 先将旧的数据移动到buffer的起始位�? */
if(this.index > 0) {
if(this.count - this.index > 0) {
System.arraycopy(this.buffer, this.index, this.buffer, 0, this.count - this.index);
}
this.count = this.count - this.index;
this.index = 0;
}
/**
* 如果请求位置超出了buffer容量
*/
if(offset > this.buffer.length) {
/**
* 扩容, 确保容量超过请求位置
* 新的容量大小是当前bufferSize的�?�? */
int bufferSize = (offset / this.buffer.length + 1) * this.buffer.length;
char[] cbuf = new char[bufferSize];
System.arraycopy(this.buffer, 0, cbuf, 0, this.count);
this.buffer = cbuf;
}
/**
* 读取新数�? */
int length = this.reader.read(this.buffer, this.count, this.buffer.length - this.count);
if(length > 0) {
this.count = this.count + length;
}
if(offset < this.count) {
return this.buffer[offset];
}
return EOF;
} | 获取当前位置以后的数�? offset >= 0
offset = 0时就是当前位置的数据
该方法只是预�? 不会改变数据指针
并且该方法会缓存预读的数�? * @param offset
@return int
@throws IOException |
@Override
protected void notifyHandler(ServerHandlerClass handler) {
Object instance = handler.newInstance();
assignDataToHandler(handler, instance);
ReflectMethodUtil.invokeHandleMethod(handler.getHandleMethod(),
instance, context);
} | /* (non-Javadoc)
@see com.tvd12.ezyfox.sfs2x.serverhandler.ServerBaseEventHandler#notifyHandler(com.tvd12.ezyfox.core.structure.ServerHandlerClass) |
protected void assignDataToHandler(ServerHandlerClass handler, Object instance) {
if(getParentExtension().getConfigProperties() != null) {
new ConfigPropertyDeserializer().deserialize(
handler.getPropertiesClassWrapper(),
instance,
getParentExtension().getConfigProperties());
}
} | Map configuration properties to handler object
@param handler structure of handler class
@param instance a handler instance |
public Tensor forward() {
VarTensor[] varBeliefs = inf.getOutput().varBeliefs;
double expectedRecall = s.zero();
for (Var var : vc.getVars()) {
if (var.getType() == VarType.PREDICTED) {
VarTensor marg = varBeliefs[var.getId()];
expectedRecall = s.minus(expectedRecall, marg.getValue(vc.getState(var)));
}
}
y = Tensor.getScalarTensor(s, expectedRecall);
return y;
} | Forward pass: y = - \sum_{x_i \in x*} b(x_i), where x* is the gold variable assignment. |
public void backward() {
double expectedRecallAdj = yAdj.getValue(0);
VarTensor[] varBeliefsAdjs = inf.getOutputAdj().varBeliefs;
// Fill in the non-zero adjoints with the adjoint of the expected recall.
for (Var var : vc.getVars()) {
if (var.getType() == VarType.PREDICTED) {
varBeliefsAdjs[var.getId()].subtractValue(vc.getState(var), expectedRecallAdj);
}
}
} | Backward pass: dG/db(x_i) = dG/dy dy/db(x_i) = - dG/dy, \forall x_i \in x*. |
@Override
public VarTensor forward() {
VarTensor x = modInX.getOutput();
y = new VarTensor(s, x.getVars());
for (int c = 0; c < y.size(); c++) {
y.setValue(c, s.fromLogProb(x.getValue(c)));
}
return y;
} | Foward pass: y_i = exp(x_i) |
@Override
public void backward() {
VarTensor tmp = new VarTensor(yAdj); // copy
tmp.elemMultiply(y);
tmp = tmp.copyAndConvertAlgebra(RealAlgebra.getInstance());
VarTensor xAdj = modInX.getOutputAdj();
xAdj.elemAdd(tmp);
} | Backward pass:
dG/dx_i += dG/dy_i dy_i/dx_i = dG/dy_i exp(x_i) |
@Override
public void handleClientRequest(User user, ISFSObject params) {
try {
debugLogRequestInfo(user, params);
ApiUser apiUser = getUserAgent(user);
for(RequestResponseClass clazz : listeners) {
Object userAgent = checkUserAgent(clazz, apiUser);
notifyListener(clazz, params, user, userAgent);
}
} catch(Exception e) {
processHandlerException(e, user);
}
} | Handle request from client |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.