lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
77e086a0ece043398f521b6bd8626142cd8aefba
0
sambalmueslie/herold
package de.sambalmueslie.herold; public interface HeroldDataCenter { /** * Create a new model for a specified type. * * @param elementType * the type */ <T extends DataModelElement> DataModel<T> createModel(Class<T> elementType); /** * Remove all model in the data center. */ void removeAllModel(); /** * Remove all model of a specified type. * * @param elementType * the element type */ <T extends DataModelElement> void removeAllModel(Class<T> elementType); /** * Remove a specified model for an element type. * * @param elementType * the type * @param model * the model to remove */ <T extends DataModelElement> void removeModel(Class<T> elementType, DataModel<T> model); }
src/main/java/de/sambalmueslie/herold/HeroldDataCenter.java
package de.sambalmueslie.herold; public interface HeroldDataCenter { /** * Create a new model for a specified type. * * @param elementType * the type */ <T extends DataModelElement> void createModel(Class<T> elementType); /** * Remove all model in the data center. */ void removeAllModel(); /** * Remove all model of a specified type. * * @param elementType * the element type */ <T extends DataModelElement> void removeAllModel(Class<T> elementType); /** * Remove a specified model for an element type. * * @param elementType * the type * @param model * the model to remove */ <T extends DataModelElement> void removeModel(Class<T> elementType, DataModel<T> model); }
finish data center and model controller
src/main/java/de/sambalmueslie/herold/HeroldDataCenter.java
finish data center and model controller
Java
apache-2.0
43e5218a869ca5c2bdc457375461ae89cdf97315
0
JingchengDu/hadoop,wwjiang007/hadoop,apache/hadoop,wwjiang007/hadoop,JingchengDu/hadoop,mapr/hadoop-common,mapr/hadoop-common,apache/hadoop,wwjiang007/hadoop,apache/hadoop,apache/hadoop,JingchengDu/hadoop,mapr/hadoop-common,apache/hadoop,wwjiang007/hadoop,mapr/hadoop-common,wwjiang007/hadoop,mapr/hadoop-common,JingchengDu/hadoop,JingchengDu/hadoop,mapr/hadoop-common,mapr/hadoop-common,JingchengDu/hadoop,apache/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,wwjiang007/hadoop,apache/hadoop
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.util.Stack; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.util.XMLUtils; import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache; import org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer; import org.apache.hadoop.hdfs.util.XMLUtils.Stanza; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Charsets; /** * OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file */ @InterfaceAudience.Private @InterfaceStability.Unstable class OfflineEditsXmlLoader extends DefaultHandler implements OfflineEditsLoader { private final boolean fixTxIds; private final OfflineEditsVisitor visitor; private final InputStreamReader fileReader; private ParseState state; private Stanza stanza; private Stack<Stanza> stanzaStack; private FSEditLogOpCodes opCode; private StringBuffer cbuf; private long nextTxId; private final OpInstanceCache opCache = new OpInstanceCache(); enum ParseState { EXPECT_EDITS_TAG, EXPECT_VERSION, EXPECT_RECORD, EXPECT_OPCODE, EXPECT_DATA, HANDLE_DATA, EXPECT_END, } public OfflineEditsXmlLoader(OfflineEditsVisitor visitor, File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException { this.visitor = visitor; this.fileReader = new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8); this.fixTxIds = flags.getFixTxIds(); } /** * Loads edits file, uses visitor to process all elements */ @Override public void loadEdits() throws IOException { try { XMLReader xr = XMLReaderFactory.createXMLReader(); xr.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); xr.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); xr.setFeature("http://xml.org/sax/features/external-general-entities", false); xr.setFeature("http://xml.org/sax/features/external-parameter-entities", false); xr.setContentHandler(this); xr.setErrorHandler(this); xr.setDTDHandler(null); xr.parse(new InputSource(fileReader)); visitor.close(null); } catch (SAXParseException e) { System.out.println("XML parsing error: " + "\n" + "Line: " + e.getLineNumber() + "\n" + "URI: " + e.getSystemId() + "\n" + "Message: " + e.getMessage()); visitor.close(e); throw new IOException(e.toString()); } catch (SAXException e) { visitor.close(e); throw new IOException(e.toString()); } catch (RuntimeException e) { visitor.close(e); throw e; } finally { fileReader.close(); } } @Override public void startDocument() { state = ParseState.EXPECT_EDITS_TAG; stanza = null; stanzaStack = new Stack<Stanza>(); opCode = null; cbuf = new StringBuffer(); nextTxId = -1; } @Override public void endDocument() { if (state != ParseState.EXPECT_END) { throw new InvalidXmlException("expecting </EDITS>"); } } @Override public void startElement (String uri, String name, String qName, Attributes atts) { switch (state) { case EXPECT_EDITS_TAG: if (!name.equals("EDITS")) { throw new InvalidXmlException("you must put " + "<EDITS> at the top of the XML file! " + "Got tag " + name + " instead"); } state = ParseState.EXPECT_VERSION; break; case EXPECT_VERSION: if (!name.equals("EDITS_VERSION")) { throw new InvalidXmlException("you must put " + "<EDITS_VERSION> at the top of the XML file! " + "Got tag " + name + " instead"); } break; case EXPECT_RECORD: if (!name.equals("RECORD")) { throw new InvalidXmlException("expected a <RECORD> tag"); } state = ParseState.EXPECT_OPCODE; break; case EXPECT_OPCODE: if (!name.equals("OPCODE")) { throw new InvalidXmlException("expected an <OPCODE> tag"); } break; case EXPECT_DATA: if (!name.equals("DATA")) { throw new InvalidXmlException("expected a <DATA> tag"); } stanza = new Stanza(); state = ParseState.HANDLE_DATA; break; case HANDLE_DATA: Stanza parent = stanza; Stanza child = new Stanza(); stanzaStack.push(parent); stanza = child; parent.addChild(name, child); break; case EXPECT_END: throw new InvalidXmlException("not expecting anything after </EDITS>"); } } @Override public void endElement (String uri, String name, String qName) { String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim(); cbuf = new StringBuffer(); switch (state) { case EXPECT_EDITS_TAG: throw new InvalidXmlException("expected <EDITS/>"); case EXPECT_VERSION: if (!name.equals("EDITS_VERSION")) { throw new InvalidXmlException("expected </EDITS_VERSION>"); } try { int version = Integer.parseInt(str); visitor.start(version); } catch (IOException e) { // Can't throw IOException from a SAX method, sigh. throw new RuntimeException(e); } state = ParseState.EXPECT_RECORD; break; case EXPECT_RECORD: if (name.equals("EDITS")) { state = ParseState.EXPECT_END; } else if (!name.equals("RECORD")) { throw new InvalidXmlException("expected </EDITS> or </RECORD>"); } break; case EXPECT_OPCODE: if (!name.equals("OPCODE")) { throw new InvalidXmlException("expected </OPCODE>"); } opCode = FSEditLogOpCodes.valueOf(str); state = ParseState.EXPECT_DATA; break; case EXPECT_DATA: throw new InvalidXmlException("expected <DATA/>"); case HANDLE_DATA: stanza.setValue(str); if (stanzaStack.empty()) { if (!name.equals("DATA")) { throw new InvalidXmlException("expected </DATA>"); } state = ParseState.EXPECT_RECORD; FSEditLogOp op = opCache.get(opCode); opCode = null; try { op.decodeXml(stanza); stanza = null; } finally { if (stanza != null) { System.err.println("fromXml error decoding opcode " + opCode + "\n" + stanza.toString()); stanza = null; } } if (fixTxIds) { if (nextTxId <= 0) { nextTxId = op.getTransactionId(); if (nextTxId <= 0) { nextTxId = 1; } } op.setTransactionId(nextTxId); nextTxId++; } try { visitor.visitOp(op); } catch (IOException e) { // Can't throw IOException from a SAX method, sigh. throw new RuntimeException(e); } state = ParseState.EXPECT_RECORD; } else { stanza = stanzaStack.pop(); } break; case EXPECT_END: throw new InvalidXmlException("not expecting anything after </EDITS>"); } } @Override public void characters (char ch[], int start, int length) { cbuf.append(ch, start, length); } }
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.util.Stack; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.util.XMLUtils; import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache; import org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer; import org.apache.hadoop.hdfs.util.XMLUtils.Stanza; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Charsets; /** * OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file */ @InterfaceAudience.Private @InterfaceStability.Unstable class OfflineEditsXmlLoader extends DefaultHandler implements OfflineEditsLoader { private final boolean fixTxIds; private final OfflineEditsVisitor visitor; private final InputStreamReader fileReader; private ParseState state; private Stanza stanza; private Stack<Stanza> stanzaStack; private FSEditLogOpCodes opCode; private StringBuffer cbuf; private long nextTxId; private final OpInstanceCache opCache = new OpInstanceCache(); enum ParseState { EXPECT_EDITS_TAG, EXPECT_VERSION, EXPECT_RECORD, EXPECT_OPCODE, EXPECT_DATA, HANDLE_DATA, EXPECT_END, } public OfflineEditsXmlLoader(OfflineEditsVisitor visitor, File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException { this.visitor = visitor; this.fileReader = new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8); this.fixTxIds = flags.getFixTxIds(); } /** * Loads edits file, uses visitor to process all elements */ @Override public void loadEdits() throws IOException { try { XMLReader xr = XMLReaderFactory.createXMLReader(); xr.setContentHandler(this); xr.setErrorHandler(this); xr.setDTDHandler(null); xr.parse(new InputSource(fileReader)); visitor.close(null); } catch (SAXParseException e) { System.out.println("XML parsing error: " + "\n" + "Line: " + e.getLineNumber() + "\n" + "URI: " + e.getSystemId() + "\n" + "Message: " + e.getMessage()); visitor.close(e); throw new IOException(e.toString()); } catch (SAXException e) { visitor.close(e); throw new IOException(e.toString()); } catch (RuntimeException e) { visitor.close(e); throw e; } finally { fileReader.close(); } } @Override public void startDocument() { state = ParseState.EXPECT_EDITS_TAG; stanza = null; stanzaStack = new Stack<Stanza>(); opCode = null; cbuf = new StringBuffer(); nextTxId = -1; } @Override public void endDocument() { if (state != ParseState.EXPECT_END) { throw new InvalidXmlException("expecting </EDITS>"); } } @Override public void startElement (String uri, String name, String qName, Attributes atts) { switch (state) { case EXPECT_EDITS_TAG: if (!name.equals("EDITS")) { throw new InvalidXmlException("you must put " + "<EDITS> at the top of the XML file! " + "Got tag " + name + " instead"); } state = ParseState.EXPECT_VERSION; break; case EXPECT_VERSION: if (!name.equals("EDITS_VERSION")) { throw new InvalidXmlException("you must put " + "<EDITS_VERSION> at the top of the XML file! " + "Got tag " + name + " instead"); } break; case EXPECT_RECORD: if (!name.equals("RECORD")) { throw new InvalidXmlException("expected a <RECORD> tag"); } state = ParseState.EXPECT_OPCODE; break; case EXPECT_OPCODE: if (!name.equals("OPCODE")) { throw new InvalidXmlException("expected an <OPCODE> tag"); } break; case EXPECT_DATA: if (!name.equals("DATA")) { throw new InvalidXmlException("expected a <DATA> tag"); } stanza = new Stanza(); state = ParseState.HANDLE_DATA; break; case HANDLE_DATA: Stanza parent = stanza; Stanza child = new Stanza(); stanzaStack.push(parent); stanza = child; parent.addChild(name, child); break; case EXPECT_END: throw new InvalidXmlException("not expecting anything after </EDITS>"); } } @Override public void endElement (String uri, String name, String qName) { String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim(); cbuf = new StringBuffer(); switch (state) { case EXPECT_EDITS_TAG: throw new InvalidXmlException("expected <EDITS/>"); case EXPECT_VERSION: if (!name.equals("EDITS_VERSION")) { throw new InvalidXmlException("expected </EDITS_VERSION>"); } try { int version = Integer.parseInt(str); visitor.start(version); } catch (IOException e) { // Can't throw IOException from a SAX method, sigh. throw new RuntimeException(e); } state = ParseState.EXPECT_RECORD; break; case EXPECT_RECORD: if (name.equals("EDITS")) { state = ParseState.EXPECT_END; } else if (!name.equals("RECORD")) { throw new InvalidXmlException("expected </EDITS> or </RECORD>"); } break; case EXPECT_OPCODE: if (!name.equals("OPCODE")) { throw new InvalidXmlException("expected </OPCODE>"); } opCode = FSEditLogOpCodes.valueOf(str); state = ParseState.EXPECT_DATA; break; case EXPECT_DATA: throw new InvalidXmlException("expected <DATA/>"); case HANDLE_DATA: stanza.setValue(str); if (stanzaStack.empty()) { if (!name.equals("DATA")) { throw new InvalidXmlException("expected </DATA>"); } state = ParseState.EXPECT_RECORD; FSEditLogOp op = opCache.get(opCode); opCode = null; try { op.decodeXml(stanza); stanza = null; } finally { if (stanza != null) { System.err.println("fromXml error decoding opcode " + opCode + "\n" + stanza.toString()); stanza = null; } } if (fixTxIds) { if (nextTxId <= 0) { nextTxId = op.getTransactionId(); if (nextTxId <= 0) { nextTxId = 1; } } op.setTransactionId(nextTxId); nextTxId++; } try { visitor.visitOp(op); } catch (IOException e) { // Can't throw IOException from a SAX method, sigh. throw new RuntimeException(e); } state = ParseState.EXPECT_RECORD; } else { stanza = stanzaStack.pop(); } break; case EXPECT_END: throw new InvalidXmlException("not expecting anything after </EDITS>"); } } @Override public void characters (char ch[], int start, int length) { cbuf.append(ch, start, length); } }
HDFS-16410. Insecure Xml parsing in OfflineEditsXmlLoader (#3854) Contributed by Ashutosh Gupta
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
HDFS-16410. Insecure Xml parsing in OfflineEditsXmlLoader (#3854)
Java
bsd-3-clause
ed4415b131bdecb1835f204305a76aad8b42bb9c
0
Mediator/HollowCraft,Mediator/HollowCraft,Mediator/HollowCraft
package org.opencraft.server.task; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; /* * OpenCraft License * * Copyright (c) 2009 Graham Edgecombe. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the OpenCraft nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * Manages the task queue. * @author Graham Edgecombe * */ public class TaskQueue { /** * The task queue singleton. */ private static final TaskQueue INSTANCE = new TaskQueue(); /** * Gets the task queue instance. * @return The task queue instance. */ public static TaskQueue getTaskQueue() { return INSTANCE; } /** * The scheduled executor service backing this task queue. */ private ScheduledExecutorService service = Executors.newScheduledThreadPool(1); /** * Default private constructor. */ private TaskQueue() { /* empty */ } /** * Pushes a task onto the task queue. * @param task The task to be executed. */ public void push(final Task task) { service.submit(new Runnable() { public void run() { task.execute(); } }); } }
src/org/opencraft/server/task/TaskQueue.java
package org.opencraft.server.task; /* * OpenCraft License * * Copyright (c) 2009 Graham Edgecombe. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the OpenCraft nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * Manages the task queue. * @author Graham Edgecombe * */ public class TaskQueue { /** * The task queue singleton. */ private static final TaskQueue INSTANCE = new TaskQueue(); /** * Gets the task queue instance. * @return The task queue instance. */ public static TaskQueue getTaskQueue() { return INSTANCE; } }
- More work on the task system. git-svn-id: be99ed0070bb28ac5919ad28b1e3fed0cd356292@8 ed70fe5f-b892-4f76-b3ae-cefddca5e843
src/org/opencraft/server/task/TaskQueue.java
- More work on the task system.
Java
bsd-3-clause
dd039cb81c35eb3f7165873d8c906e173cedc2cf
0
konklone/campyre
package campyre.java; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.io.UnsupportedEncodingException; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.impl.cookie.DateParseException; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; public class Room implements Comparable<Room> { public String id, name, topic; public boolean full = false; public Campfire campfire; public ArrayList<User> initialUsers = null; // For those times when you don't need a whole Room's details, // You just have the ID and need a Room function (e.g. uploading a file) public Room(Campfire campfire, String id) { this.campfire = campfire; this.id = id; } protected Room(Campfire campfire, JSONObject json) throws JSONException { this.campfire = campfire; this.id = json.getString("id"); this.name = json.getString("name"); this.topic = json.getString("topic"); if (json.has("full")) this.full = json.getBoolean("full"); if (json.has("users")) { initialUsers = new ArrayList<User>(); JSONArray users = json.getJSONArray("users"); int length = users.length(); for (int i=0; i<length; i++) initialUsers.add(new User(campfire, users.getJSONObject(i))); } } public static Room find(Campfire campfire, String id) throws CampfireException { try { return new Room(campfire, new CampfireRequest(campfire).getOne(Campfire.roomPath(id), "room")); } catch(JSONException e) { throw new CampfireException(e, "Problem loading room from the API."); } } public static ArrayList<Room> all(Campfire campfire) throws CampfireException { ArrayList<Room> rooms = new ArrayList<Room>(); try { JSONArray roomList = new CampfireRequest(campfire).getList(Campfire.roomsPath(), "rooms"); int length = roomList.length(); for (int i=0; i<length; i++) rooms.add(new Room(campfire, roomList.getJSONObject(i))); Collections.sort(rooms); } catch(JSONException e) { throw new CampfireException(e, "Problem loading room list from the API."); } return rooms; } // convenience function public void join() throws CampfireException { joinRoom(campfire, id); } public static void joinRoom(Campfire campfire, String roomId) throws CampfireException { String url = Campfire.joinPath(roomId); HttpResponse response = new CampfireRequest(campfire).post(url); int statusCode = response.getStatusLine().getStatusCode(); switch(statusCode) { case HttpStatus.SC_OK: return; // okay! case HttpStatus.SC_MOVED_TEMPORARILY: throw new CampfireException("Unknown room."); default: throw new CampfireException("Unknown error trying to join the room."); } } public Message speak(String body) throws CampfireException { String type = (body.contains("\n")) ? "PasteMessage" : "TextMessage"; String url = Campfire.speakPath(id); try { body = new String(body.getBytes("UTF-8"), "ISO-8859-1"); String request = new JSONObject().put("message", new JSONObject().put("type", type).put("body", body)).toString(); HttpResponse response = new CampfireRequest(campfire).post(url, request); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == HttpStatus.SC_CREATED) { String responseBody = CampfireRequest.responseBody(response); return new Message(new JSONObject(responseBody).getJSONObject("message")); } else throw new CampfireException("Campfire error, message was not sent."); } catch(JSONException e) { throw new CampfireException(e, "Couldn't create JSON object while speaking."); } catch (DateParseException e) { throw new CampfireException(e, "Couldn't parse date from created message while speaking."); } catch (UnsupportedEncodingException e) { throw new CampfireException(e, "Problem converting special characters for transmission."); } } public void uploadImage(InputStream stream, String filename, String mimeType) throws CampfireException { new CampfireRequest(campfire).uploadFile(Campfire.uploadPath(id), stream, filename, mimeType); } public String toString() { return name; } @Override public int compareTo(Room another) { return name.compareTo(another.name); } }
src/campyre/java/Room.java
package campyre.java; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.io.UnsupportedEncodingException; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.impl.cookie.DateParseException; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; public class Room implements Comparable<Room> { public String id, name, topic; public boolean full = false; public Campfire campfire; public ArrayList<User> initialUsers = null; // For those times when you don't need a whole Room's details, // You just have the ID and need a Room function (e.g. uploading a file) public Room(Campfire campfire, String id) { this.campfire = campfire; this.id = id; } protected Room(Campfire campfire, JSONObject json) throws JSONException { this.campfire = campfire; this.id = json.getString("id"); this.name = json.getString("name"); this.topic = json.getString("topic"); if (json.has("full")) this.full = json.getBoolean("full"); if (json.has("users")) { initialUsers = new ArrayList<User>(); JSONArray users = json.getJSONArray("users"); int length = users.length(); for (int i=0; i<length; i++) initialUsers.add(new User(campfire, users.getJSONObject(i))); } } public static Room find(Campfire campfire, String id) throws CampfireException { try { return new Room(campfire, new CampfireRequest(campfire).getOne(Campfire.roomPath(id), "room")); } catch(JSONException e) { throw new CampfireException(e, "Problem loading room from the API."); } } public static ArrayList<Room> all(Campfire campfire) throws CampfireException { ArrayList<Room> rooms = new ArrayList<Room>(); try { JSONArray roomList = new CampfireRequest(campfire).getList(Campfire.roomsPath(), "rooms"); int length = roomList.length(); for (int i=0; i<length; i++) rooms.add(new Room(campfire, roomList.getJSONObject(i))); Collections.sort(rooms); } catch(JSONException e) { throw new CampfireException(e, "Problem loading room list from the API."); } return rooms; } // convenience function public void join() throws CampfireException { joinRoom(campfire, id); } public static void joinRoom(Campfire campfire, String roomId) throws CampfireException { String url = Campfire.joinPath(roomId); HttpResponse response = new CampfireRequest(campfire).post(url); int statusCode = response.getStatusLine().getStatusCode(); switch(statusCode) { case HttpStatus.SC_OK: return; // okay! case HttpStatus.SC_MOVED_TEMPORARILY: throw new CampfireException("Unknown room."); default: throw new CampfireException("Unknown error trying to join the room."); } } public Message speak(String body) throws CampfireException { String type = (body.contains("\n")) ? "PasteMessage" : "TextMessage"; String url = Campfire.speakPath(id); try { body = new String(body.getBytes("UTF-8"), "ISO-8859-1"); String request = new JSONObject().put("message", new JSONObject().put("type", type).put("body", body)).toString(); HttpResponse response = new CampfireRequest(campfire).post(url, request); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == HttpStatus.SC_CREATED) { String responseBody = CampfireRequest.responseBody(response); return new Message(new JSONObject(responseBody).getJSONObject("message")); } else throw new CampfireException("Campfire error, message was not sent."); } catch(JSONException e) { throw new CampfireException(e, "Couldn't create JSON object while speaking."); } catch (DateParseException e) { throw new CampfireException(e, "Couldn't parse date from created message while speaking."); } catch (UnsupportedEncodingException e) { throw new CampfireException(e, "Cannot convert from UTF-8 to ISO-8859-1"); } } public void uploadImage(InputStream stream, String filename, String mimeType) throws CampfireException { new CampfireRequest(campfire).uploadFile(Campfire.uploadPath(id), stream, filename, mimeType); } public String toString() { return name; } @Override public int compareTo(Room another) { return name.compareTo(another.name); } }
More user friendly error message
src/campyre/java/Room.java
More user friendly error message
Java
mit
af3aac558dfbfc4b1ef016d8dd5d9d2b2c8956ea
0
VEDAGroup/webstart-maven-plugin
package org.codehaus.mojo.webstart; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.metadata.ArtifactMetadataSource; import org.apache.maven.artifact.resolver.ArtifactNotFoundException; import org.apache.maven.artifact.resolver.ArtifactResolutionException; import org.apache.maven.artifact.resolver.ArtifactResolutionResult; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.codehaus.mojo.webstart.generator.GeneratorExtraConfig; import org.codehaus.mojo.webstart.generator.JarResourcesGenerator; import org.codehaus.mojo.webstart.generator.VersionXmlGenerator; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.StringUtils; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.util.*; /** * This MOJO is tailored for use within a Maven web application project that uses * the JnlpDownloadServlet to serve up the JNLP application. * * @author Kevin Stembridge * @version $Id$ * @goal jnlp-download-servlet * @requiresDependencyResolution runtime * @requiresProject * @inheritedByDefault true * @since 1.0-alpha-2 */ public class JnlpDownloadServletMojo extends AbstractBaseJnlpMojo { /** * Maven project. * * @parameter default-value="${project}" * @required * @readonly */ private MavenProject project; /** * The project's artifact metadata source, used to resolve transitive dependencies. * * @component * @required * @readonly */ private ArtifactMetadataSource artifactMetadataSource; /** * The name of the directory into which the jnlp file and other * artifacts will be stored after processing. This directory will be created * directly within the root of the WAR produced by the enclosing project. * * @parameter default-value="webstart" */ private String outputDirectoryName; /** * The collection of JnlpFile configuration elements. Each one represents a * JNLP file that is to be generated and deployed within the enclosing * project's WAR artifact. At least one JnlpFile must be specified. * * @parameter * @required */ private List/*JnlpFile*/ jnlpFiles; /** * The configurable collection of jars that are common to all jnlpFile elements declared in * plugin configuration. These jars will be output as jar elements in the resources section of * every generated JNLP file and bundled into the specified output directory of the artifact * produced by the project. * * @parameter */ private List/*JarResource*/ commonJarResources; /** * {@inheritDoc} */ public MavenProject getProject() { return this.project; } /** * {@inheritDoc} */ public void execute() throws MojoExecutionException, MojoFailureException { checkConfiguration(); try { copyResources( getResourcesDirectory(), getWorkDirectory() ); } catch ( IOException e ) { throw new MojoExecutionException( "An error occurred attempting to copy " + "resources to the working directory.", e ); } if ( this.commonJarResources != null ) { retrieveJarResources( this.commonJarResources ); } for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); retrieveJarResources( jnlpFile.getJarResources() ); } signOrRenameJars(); packJars(); for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { generateJnlpFile( (JnlpFile) itr.next(), getLibPath() ); } generateVersionXml(); copyWorkingDirToOutputDir(); } /** * Confirms that all plugin configuration provided by the user * in the pom.xml file is valid. * * @throws MojoExecutionException if any user configuration is invalid. */ private void checkConfiguration() throws MojoExecutionException { if ( this.jnlpFiles.isEmpty() ) { throw new MojoExecutionException( "Configuration error: At least one <jnlpFile> element must be specified" ); } if ( this.jnlpFiles.size() == 1 && StringUtils.isEmpty( ( (JnlpFile) this.jnlpFiles.get( 0 ) ).getOutputFilename() ) ) { getLog().debug( "Jnlp output file name not specified in single set of jnlpFiles. " + "Using default output file name: launch.jnlp." ); ( (JnlpFile) this.jnlpFiles.get( 0 ) ).setOutputFilename( "launch.jnlp" ); } for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { checkJnlpFileConfiguration( (JnlpFile) itr.next() ); } checkForDuplicateJarResources(); checkCommonJarResources(); checkForUniqueJnlpFilenames(); checkPack200(); } /** * Checks the validity of a single jnlpFile configuration element. * * @param jnlpFile The configuration element to be checked. * @throws MojoExecutionException if the config element is invalid. */ private void checkJnlpFileConfiguration( JnlpFile jnlpFile ) throws MojoExecutionException { if ( StringUtils.isEmpty( jnlpFile.getOutputFilename() ) ) { throw new MojoExecutionException( "Configuration error: An outputFilename must be specified for each jnlpFile element" ); } if ( jnlpFile.getTemplateFilename() == null ) { getLog().info( "No templateFilename found for " + jnlpFile.getOutputFilename() + ". Will use the default template." ); } else { File templateFile = new File( getTemplateDirectory(), jnlpFile.getTemplateFilename() ); if ( !templateFile.isFile() ) { throw new MojoExecutionException( "The specified JNLP template does not exist: [" + templateFile + "]" ); } } checkJnlpJarResources( jnlpFile ); } /** * Checks the collection of jarResources configured for a given jnlpFile element. * * @param jnlpFile The configuration element whose jarResources are to be checked. * @throws MojoExecutionException if any config is invalid. */ private void checkJnlpJarResources( JnlpFile jnlpFile ) throws MojoExecutionException { List jnlpJarResources = jnlpFile.getJarResources(); if ( jnlpJarResources == null || jnlpJarResources.isEmpty() ) { throw new MojoExecutionException( "Configuration error: A non-empty <jarResources> element must be specified in the plugin " + "configuration for the JNLP file named [" + jnlpFile.getOutputFilename() + "]" ); } Iterator itr = jnlpJarResources.iterator(); List/*JarResource*/ jarsWithMainClass = new ArrayList(); while ( itr.hasNext() ) { JarResource jarResource = (JarResource) itr.next(); checkMandatoryJarResourceFields( jarResource ); if ( jarResource.getMainClass() != null ) { jnlpFile.setMainClass( jarResource.getMainClass() ); jarsWithMainClass.add( jarResource ); } } if ( jarsWithMainClass.isEmpty() ) { throw new MojoExecutionException( "Configuration error: Exactly one <jarResource> element must " + "be declared with a <mainClass> element in the configuration for JNLP file [" + jnlpFile.getOutputFilename() + "]" ); } if ( jarsWithMainClass.size() > 1 ) { throw new MojoExecutionException( "Configuration error: More than one <jarResource> element has been declared " + "with a <mainClass> element in the configuration for JNLP file [" + jnlpFile.getOutputFilename() + "]" ); } } /** * Checks that any jarResources defined in the jnlpFile elements are not also defined in * commonJarResources. * * @throws MojoExecutionException if a duplicate is found. */ private void checkForDuplicateJarResources() throws MojoExecutionException { if ( this.commonJarResources == null || this.commonJarResources.isEmpty() ) { return; } for ( Iterator jnlpFileItr = this.jnlpFiles.iterator(); jnlpFileItr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) jnlpFileItr.next(); List jnlpJarResources = jnlpFile.getJarResources(); for ( Iterator jarResourceItr = jnlpJarResources.iterator(); jarResourceItr.hasNext(); ) { JarResource jarResource = (JarResource) jarResourceItr.next(); if ( this.commonJarResources.contains( jarResource ) ) { String message = "Configuration Error: The jar resource element for artifact " + jarResource + " defined in common jar resources is duplicated in the jar " + "resources configuration of the jnlp file identified by the template file " + jnlpFile.getTemplateFilename() + "."; throw new MojoExecutionException( message ); } } } } /** * Checks the configuration of common jar resources. Specifying common jar * resources is optional but if present, each jar resource must have the * same mandatory fields as jar resources configured directly within a * jnlpFile element, but it must not have a configured mainClass element. * * @throws MojoExecutionException if the config is invalid. */ private void checkCommonJarResources() throws MojoExecutionException { if ( this.commonJarResources == null ) { return; } for ( Iterator itr = this.commonJarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); checkMandatoryJarResourceFields( jarResource ); if ( jarResource.getMainClass() != null ) { throw new MojoExecutionException( "Configuration Error: A mainClass must not be specified " + "on a JarResource in the commonJarResources collection." ); } } } /** * Checks mandatory files of the given jar resource (says groupId, artificatId or version). * * @param jarResource jar resource to check * @throws MojoExecutionException if one of the mandatory field is missing */ private void checkMandatoryJarResourceFields( JarResource jarResource ) throws MojoExecutionException { if ( StringUtils.isEmpty( jarResource.getGroupId() ) || StringUtils.isEmpty( jarResource.getArtifactId() ) || StringUtils.isEmpty( jarResource.getVersion() ) ) { throw new MojoExecutionException( "Configuration error: groupId, artifactId or version missing for jarResource[" + jarResource + "]." ); } } /** * Confirms that each jnlpFile element is configured with a unique JNLP file name. * * @throws MojoExecutionException if the config is invalid. */ private void checkForUniqueJnlpFilenames() throws MojoExecutionException { Set filenames = new HashSet( this.jnlpFiles.size() ); for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); if ( !filenames.add( jnlpFile.getOutputFilename() ) ) { throw new MojoExecutionException( "Configuration error: Unique JNLP filenames must be provided. " + "The following file name appears more than once [" + jnlpFile.getOutputFilename() + "]." ); } } } /** * Resolve the artifacts represented by the given collection of JarResources and * copy them to the working directory if a newer copy of the file doesn't already * exist there. Transitive dependencies will also be retrieved. * <p/> * Transitive dependencies are added to the list specified as parameter. TODO fix that. * * @param jarResources list of jar resources to retrieve * @throws MojoExecutionException if something bas occurs while retrieving resources */ private void retrieveJarResources( List jarResources ) throws MojoExecutionException { Set jarResourceArtifacts = new HashSet(); try { //for each configured JarResource, create and resolve the corresponding artifact and //check it for the mainClass if specified for ( Iterator itr = jarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); Artifact artifact = createArtifact( jarResource ); getArtifactResolver().resolve( artifact, getRemoteRepositories(), getLocalRepository() ); jarResource.setArtifact( artifact ); checkForMainClass( jarResource ); jarResourceArtifacts.add( artifact ); } if ( !isExcludeTransitive() ) { retrieveTransitiveDependencies( jarResourceArtifacts, jarResources ); } //for each JarResource, copy its artifact to the lib directory if necessary for ( Iterator itr = jarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); Artifact artifact = jarResource.getArtifact(); boolean copied = copyJarAsUnprocessedToDirectoryIfNecessary( artifact.getFile(), getLibDirectory() ); if ( copied ) { String name = artifact.getFile().getName(); if ( getLog().isDebugEnabled() ) { getLog().debug( "Adding " + name + " to modifiedJnlpArtifacts list." ); } getModifiedJnlpArtifacts().add( name.substring( 0, name.lastIndexOf( '.' ) ) ); } if ( jarResource.isOutputJarVersion() ) { // Create and set a version-less href for this jarResource String hrefValue = buildHrefValue( artifact ); jarResource.setHrefValue( hrefValue ); } } } catch ( ArtifactResolutionException e ) { throw new MojoExecutionException( "Unable to resolve an artifact", e ); } catch ( ArtifactNotFoundException e ) { throw new MojoExecutionException( "Unable to find an artifact", e ); } catch ( IOException e ) { throw new MojoExecutionException( "Unable to copy an artifact to the working directory", e ); } } /** * Creates from the given jar resource the underlying artifact. * * @param jarResource the jar resource * @return the created artifact from the given jar resource */ private Artifact createArtifact( JarResource jarResource ) { if ( jarResource.getClassifier() == null ) { return getArtifactFactory().createArtifact( jarResource.getGroupId(), jarResource.getArtifactId(), jarResource.getVersion(), Artifact.SCOPE_RUNTIME, "jar" ); } else { return getArtifactFactory().createArtifactWithClassifier( jarResource.getGroupId(), jarResource.getArtifactId(), jarResource.getVersion(), "jar", jarResource.getClassifier() ); } } /** * If the given jarResource is configured with a main class, the underlying artifact * is checked to see if it actually contains the specified class. * * @param jarResource the jar resources to test * @throws IllegalStateException if the jarResource's underlying artifact has not yet been resolved. * @throws MojoExecutionException if could not chek that the jar resource with a main class has really it */ private void checkForMainClass( JarResource jarResource ) throws MojoExecutionException { String mainClass = jarResource.getMainClass(); if ( mainClass == null ) { return; } Artifact artifact = jarResource.getArtifact(); if ( artifact == null ) { throw new IllegalStateException( "Implementation Error: The given jarResource cannot be checked for " + "a main class until the underlying artifact has been resolved: [" + jarResource + "]" ); } try { if ( !artifactContainsClass( artifact, mainClass ) ) { throw new MojoExecutionException( "The jar specified by the following jarResource does not contain the declared main class:" + jarResource ); } } catch ( MalformedURLException e ) { throw new MojoExecutionException( "Attempting to find main class [" + mainClass + "] in [" + artifact + "]", e ); } } private void retrieveTransitiveDependencies( Set jarResourceArtifacts, List jarResources ) throws ArtifactResolutionException, ArtifactNotFoundException { // this restricts to runtime and compile scope ScopeArtifactFilter artifactFilter = new ScopeArtifactFilter( Artifact.SCOPE_RUNTIME ); ArtifactResolutionResult result = getArtifactResolver().resolveTransitively( jarResourceArtifacts, getProject().getArtifact(), null, //managedVersions getLocalRepository(), getRemoteRepositories(), this.artifactMetadataSource, artifactFilter ); Set transitiveResolvedArtifacts = result.getArtifacts(); if ( getLog().isDebugEnabled() ) { getLog().debug( "transitively resolved artifacts = " + transitiveResolvedArtifacts ); getLog().debug( "jarResources = " + jarResources ); getLog().debug( "jarResourceArtifacts = " + jarResourceArtifacts ); } //for each transitive dependency, wrap it in a JarResource and add it to the collection of //existing jar resources for ( Iterator itr = transitiveResolvedArtifacts.iterator(); itr.hasNext(); ) { Artifact resolvedArtifact = (Artifact) itr.next(); // this whole double check is ugly as well as this method changing the input variable // we should really improve the way we collect the jarResources if ( !jarResourceArtifacts.contains( resolvedArtifact ) ) { JarResource newJarResource = new JarResource( resolvedArtifact ); if ( !jarResources.contains( newJarResource ) && newJarResource.getType().equals( "jar" ) ) { newJarResource.setOutputJarVersion( true ); jarResources.add( newJarResource ); } } } } private void generateJnlpFile( JnlpFile jnlpFile, String libPath ) throws MojoExecutionException { File jnlpOutputFile = new File( getWorkDirectory(), jnlpFile.getOutputFilename() ); Set jarResources = new LinkedHashSet(); jarResources.addAll( jnlpFile.getJarResources() ); if ( this.commonJarResources != null && !this.commonJarResources.isEmpty() ) { for ( Iterator itr = this.commonJarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); jarResources.add( jarResource ); } jarResources.addAll( this.commonJarResources ); } JarResourcesGenerator jnlpGenerator = new JarResourcesGenerator( getProject(), getTemplateDirectory(), "default-jnlp-servlet-template.vm", jnlpOutputFile, jnlpFile.getTemplateFilename(), jarResources, jnlpFile.getMainClass(), getWebstartJarURLForVelocity(), libPath, getEncoding() ); jnlpGenerator.setExtraConfig( getGeneratorExtraConfig() ); try { jnlpGenerator.generate(); } catch ( Exception e ) { throw new MojoExecutionException( "The following error occurred attempting to generate " + "the JNLP deployment descriptor: " + e, e ); } } private GeneratorExtraConfig getGeneratorExtraConfig() { return new GeneratorExtraConfig() { public String getJnlpSpec() { return "1.0+"; } public String getOfflineAllowed() { return "false"; } public String getAllPermissions() { return "true"; } public String getJ2seVersion() { return "1.5+"; } public String getJnlpCodeBase() { return getCodebase(); } }; } /** * Generates a version.xml file for all the jarResources configured either in jnlpFile elements * or in the commonJarResources element. * * @throws MojoExecutionException if could not generate the xml version file */ private void generateVersionXml() throws MojoExecutionException { Set/*JarResource*/ jarResources = new LinkedHashSet(); //combine the jar resources from commonJarResources and each JnlpFile config for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); jarResources.addAll( jnlpFile.getJarResources() ); } if ( this.commonJarResources != null ) { jarResources.addAll( this.commonJarResources ); } VersionXmlGenerator generator = new VersionXmlGenerator( getEncoding() ); generator.generate( getLibDirectory(), jarResources ); } /** * Builds the string to be entered in the href attribute of the jar * resource element in the generated JNLP file. This will be equal * to the artifact file name with the version number stripped out. * * @param artifact The underlying artifact of the jar resource. * @return The href string for the given artifact, never null. */ private String buildHrefValue( Artifact artifact ) { StringBuffer sbuf = new StringBuffer(); sbuf.append( artifact.getArtifactId() ); if ( StringUtils.isNotEmpty( artifact.getClassifier() ) ) { sbuf.append( "-" ).append( artifact.getClassifier() ); } sbuf.append( "." ).append( artifact.getArtifactHandler().getExtension() ); return sbuf.toString(); } /** * Copies the contents of the working directory to the output directory. * @throws MojoExecutionException if could not copy files */ private void copyWorkingDirToOutputDir() throws MojoExecutionException { File outputDir = new File( getProject().getBuild().getDirectory(), getProject().getBuild().getFinalName() + File.separator + this.outputDirectoryName ); if ( !outputDir.exists() ) { if ( getLog().isInfoEnabled() ) { getLog().info( "Creating JNLP output directory: " + outputDir.getAbsolutePath() ); } if ( !outputDir.mkdirs() ) { throw new MojoExecutionException( "Unable to create the output directory for the jnlp bundle" ); } } try { FileUtils.copyDirectoryStructure( getWorkDirectory(), outputDir ); } catch ( IOException e ) { throw new MojoExecutionException( "An error occurred attempting to copy a file to the JNLP output directory.", e ); } } }
webstart-maven-plugin/src/main/java/org/codehaus/mojo/webstart/JnlpDownloadServletMojo.java
package org.codehaus.mojo.webstart; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.metadata.ArtifactMetadataSource; import org.apache.maven.artifact.resolver.ArtifactNotFoundException; import org.apache.maven.artifact.resolver.ArtifactResolutionException; import org.apache.maven.artifact.resolver.ArtifactResolutionResult; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.codehaus.mojo.webstart.generator.GeneratorExtraConfig; import org.codehaus.mojo.webstart.generator.JarResourcesGenerator; import org.codehaus.mojo.webstart.generator.VersionXmlGenerator; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.StringUtils; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.util.*; /** * This MOJO is tailored for use within a Maven web application project that uses * the JnlpDownloadServlet to serve up the JNLP application. * * @author Kevin Stembridge * @version $Id$ * @goal jnlp-download-servlet * @requiresDependencyResolution runtime * @requiresProject * @inheritedByDefault true * @since 1.0-alpha-2 */ public class JnlpDownloadServletMojo extends AbstractBaseJnlpMojo { /** * Maven project. * * @parameter default-value="${project}" * @required * @readonly */ private MavenProject project; /** * The project's artifact metadata source, used to resolve transitive dependencies. * * @component * @required * @readonly */ private ArtifactMetadataSource artifactMetadataSource; /** * The name of the directory into which the jnlp file and other * artifacts will be stored after processing. This directory will be created * directly within the root of the WAR produced by the enclosing project. * * @parameter default-value="webstart" */ private String outputDirectoryName; /** * The collection of JnlpFile configuration elements. Each one represents a * JNLP file that is to be generated and deployed within the enclosing * project's WAR artifact. At least one JnlpFile must be specified. * * @parameter * @required */ private List/*JnlpFile*/ jnlpFiles; /** * The configurable collection of jars that are common to all jnlpFile elements declared in * plugin configuration. These jars will be output as jar elements in the resources section of * every generated JNLP file and bundled into the specified output directory of the artifact * produced by the project. * * @parameter */ private List/*JarResource*/ commonJarResources; /** * {@inheritDoc} */ public MavenProject getProject() { return this.project; } /** * {@inheritDoc} */ public void execute() throws MojoExecutionException, MojoFailureException { checkConfiguration(); try { copyResources( getResourcesDirectory(), getWorkDirectory() ); } catch ( IOException e ) { throw new MojoExecutionException( "An error occurred attempting to copy " + "resources to the working directory.", e ); } if ( this.commonJarResources != null ) { retrieveJarResources( this.commonJarResources ); } for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); retrieveJarResources( jnlpFile.getJarResources() ); } signOrRenameJars(); packJars(); for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { generateJnlpFile( (JnlpFile) itr.next(), getLibPath() ); } generateVersionXml(); copyWorkingDirToOutputDir(); } /** * Confirms that all plugin configuration provided by the user * in the pom.xml file is valid. * * @throws MojoExecutionException if any user configuration is invalid. */ private void checkConfiguration() throws MojoExecutionException { if ( this.jnlpFiles.isEmpty() ) { throw new MojoExecutionException( "Configuration error: At least one <jnlpFile> element must be specified" ); } if ( this.jnlpFiles.size() == 1 && StringUtils.isEmpty( ( (JnlpFile) this.jnlpFiles.get( 0 ) ).getOutputFilename() ) ) { getLog().debug( "Jnlp output file name not specified in single set of jnlpFiles. " + "Using default output file name: launch.jnlp." ); ( (JnlpFile) this.jnlpFiles.get( 0 ) ).setOutputFilename( "launch.jnlp" ); } for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { checkJnlpFileConfiguration( (JnlpFile) itr.next() ); } checkForDuplicateJarResources(); checkCommonJarResources(); checkForUniqueJnlpFilenames(); checkPack200(); } /** * Checks the validity of a single jnlpFile configuration element. * * @param jnlpFile The configuration element to be checked. * @throws MojoExecutionException if the config element is invalid. */ private void checkJnlpFileConfiguration( JnlpFile jnlpFile ) throws MojoExecutionException { if ( StringUtils.isEmpty( jnlpFile.getOutputFilename() ) ) { throw new MojoExecutionException( "Configuration error: An outputFilename must be specified for each jnlpFile element" ); } if ( jnlpFile.getTemplateFilename() == null ) { getLog().info( "No templateFilename found for " + jnlpFile.getOutputFilename() + ". Will use the default template." ); } else { File templateFile = new File( getTemplateDirectory(), jnlpFile.getTemplateFilename() ); if ( !templateFile.isFile() ) { throw new MojoExecutionException( "The specified JNLP template does not exist: [" + templateFile + "]" ); } } checkJnlpJarResources( jnlpFile ); } /** * Checks the collection of jarResources configured for a given jnlpFile element. * * @param jnlpFile The configuration element whose jarResources are to be checked. * @throws MojoExecutionException if any config is invalid. */ private void checkJnlpJarResources( JnlpFile jnlpFile ) throws MojoExecutionException { List jnlpJarResources = jnlpFile.getJarResources(); if ( jnlpJarResources == null || jnlpJarResources.isEmpty() ) { throw new MojoExecutionException( "Configuration error: A non-empty <jarResources> element must be specified in the plugin " + "configuration for the JNLP file named [" + jnlpFile.getOutputFilename() + "]" ); } Iterator itr = jnlpJarResources.iterator(); List/*JarResource*/ jarsWithMainClass = new ArrayList(); while ( itr.hasNext() ) { JarResource jarResource = (JarResource) itr.next(); checkMandatoryJarResourceFields( jarResource ); if ( jarResource.getMainClass() != null ) { jnlpFile.setMainClass( jarResource.getMainClass() ); jarsWithMainClass.add( jarResource ); } } if ( jarsWithMainClass.isEmpty() ) { throw new MojoExecutionException( "Configuration error: Exactly one <jarResource> element must " + "be declared with a <mainClass> element in the configuration for JNLP file [" + jnlpFile.getOutputFilename() + "]" ); } if ( jarsWithMainClass.size() > 1 ) { throw new MojoExecutionException( "Configuration error: More than one <jarResource> element has been declared " + "with a <mainClass> element in the configuration for JNLP file [" + jnlpFile.getOutputFilename() + "]" ); } } /** * Checks that any jarResources defined in the jnlpFile elements are not also defined in * commonJarResources. * * @throws MojoExecutionException if a duplicate is found. */ private void checkForDuplicateJarResources() throws MojoExecutionException { if ( this.commonJarResources == null || this.commonJarResources.isEmpty() ) { return; } for ( Iterator jnlpFileItr = this.jnlpFiles.iterator(); jnlpFileItr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) jnlpFileItr.next(); List jnlpJarResources = jnlpFile.getJarResources(); for ( Iterator jarResourceItr = jnlpJarResources.iterator(); jarResourceItr.hasNext(); ) { JarResource jarResource = (JarResource) jarResourceItr.next(); if ( this.commonJarResources.contains( jarResource ) ) { String message = "Configuration Error: The jar resource element for artifact " + jarResource + " defined in common jar resources is duplicated in the jar " + "resources configuration of the jnlp file identified by the template file " + jnlpFile.getTemplateFilename() + "."; throw new MojoExecutionException( message ); } } } } /** * Checks the configuration of common jar resources. Specifying common jar * resources is optional but if present, each jar resource must have the * same mandatory fields as jar resources configured directly within a * jnlpFile element, but it must not have a configured mainClass element. * * @throws MojoExecutionException if the config is invalid. */ private void checkCommonJarResources() throws MojoExecutionException { if ( this.commonJarResources == null ) { return; } for ( Iterator itr = this.commonJarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); checkMandatoryJarResourceFields( jarResource ); if ( jarResource.getMainClass() != null ) { throw new MojoExecutionException( "Configuration Error: A mainClass must not be specified " + "on a JarResource in the commonJarResources collection." ); } } } /** * Checks mandatory files of the given jar resource (says groupId, artificatId or version). * * @param jarResource jar resource to check * @throws MojoExecutionException if one of the mandatory field is missing */ private void checkMandatoryJarResourceFields( JarResource jarResource ) throws MojoExecutionException { if ( StringUtils.isEmpty( jarResource.getGroupId() ) || StringUtils.isEmpty( jarResource.getArtifactId() ) || StringUtils.isEmpty( jarResource.getVersion() ) ) { throw new MojoExecutionException( "Configuration error: groupId, artifactId or version missing for jarResource[" + jarResource + "]." ); } } /** * Confirms that each jnlpFile element is configured with a unique JNLP file name. * * @throws MojoExecutionException if the config is invalid. */ private void checkForUniqueJnlpFilenames() throws MojoExecutionException { Set filenames = new HashSet( this.jnlpFiles.size() ); for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); if ( !filenames.add( jnlpFile.getOutputFilename() ) ) { throw new MojoExecutionException( "Configuration error: Unique JNLP filenames must be provided. " + "The following file name appears more than once [" + jnlpFile.getOutputFilename() + "]." ); } } } /** * Resolve the artifacts represented by the given collection of JarResources and * copy them to the working directory if a newer copy of the file doesn't already * exist there. Transitive dependencies will also be retrieved. * <p/> * Transitive dependencies are added to the list specified as parameter. TODO fix that. * * @param jarResources list of jar resources to retrieve * @throws MojoExecutionException if something bas occurs while retrieving resources */ private void retrieveJarResources( List jarResources ) throws MojoExecutionException { Set jarResourceArtifacts = new HashSet(); try { //for each configured JarResource, create and resolve the corresponding artifact and //check it for the mainClass if specified for ( Iterator itr = jarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); Artifact artifact = createArtifact( jarResource ); getArtifactResolver().resolve( artifact, getRemoteRepositories(), getLocalRepository() ); jarResource.setArtifact( artifact ); checkForMainClass( jarResource ); jarResourceArtifacts.add( artifact ); } if ( !isExcludeTransitive() ) { retrieveTransitiveDependencies( jarResourceArtifacts, jarResources ); } //for each JarResource, copy its artifact to the lib directory if necessary for ( Iterator itr = jarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); Artifact artifact = jarResource.getArtifact(); boolean copied = copyJarAsUnprocessedToDirectoryIfNecessary( artifact.getFile(), getLibDirectory() ); if ( copied ) { String name = artifact.getFile().getName(); if ( getLog().isDebugEnabled() ) { getLog().debug( "Adding " + name + " to modifiedJnlpArtifacts list." ); } getModifiedJnlpArtifacts().add( name.substring( 0, name.lastIndexOf( '.' ) ) ); } if ( jarResource.isOutputJarVersion() ) { // Create and set a version-less href for this jarResource String hrefValue = buildHrefValue( artifact ); jarResource.setHrefValue( hrefValue ); } } } catch ( ArtifactResolutionException e ) { throw new MojoExecutionException( "Unable to resolve an artifact", e ); } catch ( ArtifactNotFoundException e ) { throw new MojoExecutionException( "Unable to find an artifact", e ); } catch ( IOException e ) { throw new MojoExecutionException( "Unable to copy an artifact to the working directory", e ); } } /** * Creates from the given jar resource the underlying artifact. * * @param jarResource the jar resource * @return the created artifact from the given jar resource */ private Artifact createArtifact( JarResource jarResource ) { if ( jarResource.getClassifier() == null ) { return getArtifactFactory().createArtifact( jarResource.getGroupId(), jarResource.getArtifactId(), jarResource.getVersion(), Artifact.SCOPE_RUNTIME, "jar" ); } else { return getArtifactFactory().createArtifactWithClassifier( jarResource.getGroupId(), jarResource.getArtifactId(), jarResource.getVersion(), "jar", jarResource.getClassifier() ); } } /** * If the given jarResource is configured with a main class, the underlying artifact * is checked to see if it actually contains the specified class. * * @param jarResource the jar resources to test * @throws IllegalStateException if the jarResource's underlying artifact has not yet been resolved. * @throws MojoExecutionException if could not chek that the jar resource with a main class has really it */ private void checkForMainClass( JarResource jarResource ) throws MojoExecutionException { String mainClass = jarResource.getMainClass(); if ( mainClass == null ) { return; } Artifact artifact = jarResource.getArtifact(); if ( artifact == null ) { throw new IllegalStateException( "Implementation Error: The given jarResource cannot be checked for " + "a main class until the underlying artifact has been resolved: [" + jarResource + "]" ); } try { if ( !artifactContainsClass( artifact, mainClass ) ) { throw new MojoExecutionException( "The jar specified by the following jarResource does not contain the declared main class:" + jarResource ); } } catch ( MalformedURLException e ) { throw new MojoExecutionException( "Attempting to find main class [" + mainClass + "] in [" + artifact + "]", e ); } } private void retrieveTransitiveDependencies( Set jarResourceArtifacts, List jarResources ) throws ArtifactResolutionException, ArtifactNotFoundException { // this restricts to runtime and compile scope ScopeArtifactFilter artifactFilter = new ScopeArtifactFilter( Artifact.SCOPE_RUNTIME ); ArtifactResolutionResult result = getArtifactResolver().resolveTransitively( jarResourceArtifacts, getProject().getArtifact(), null, //managedVersions getLocalRepository(), getRemoteRepositories(), this.artifactMetadataSource, artifactFilter ); Set transitiveResolvedArtifacts = result.getArtifacts(); if ( getLog().isDebugEnabled() ) { getLog().debug( "transitively resolved artifacts = " + transitiveResolvedArtifacts ); getLog().debug( "jarResources = " + jarResources ); getLog().debug( "jarResourceArtifacts = " + jarResourceArtifacts ); } //for each transitive dependency, wrap it in a JarResource and add it to the collection of //existing jar resources for ( Iterator itr = transitiveResolvedArtifacts.iterator(); itr.hasNext(); ) { Artifact resolvedArtifact = (Artifact) itr.next(); // this whole double check is ugly as well as this method changing the input variable // we should really improve the way we collect the jarResources if ( !jarResourceArtifacts.contains( resolvedArtifact ) ) { JarResource newJarResource = new JarResource( resolvedArtifact ); if ( !jarResources.contains( newJarResource ) && !newJarResource.getType().equals( "pom" ) ) { newJarResource.setOutputJarVersion( true ); jarResources.add( newJarResource ); } } } } private void generateJnlpFile( JnlpFile jnlpFile, String libPath ) throws MojoExecutionException { File jnlpOutputFile = new File( getWorkDirectory(), jnlpFile.getOutputFilename() ); Set jarResources = new LinkedHashSet(); jarResources.addAll( jnlpFile.getJarResources() ); if ( this.commonJarResources != null && !this.commonJarResources.isEmpty() ) { for ( Iterator itr = this.commonJarResources.iterator(); itr.hasNext(); ) { JarResource jarResource = (JarResource) itr.next(); jarResources.add( jarResource ); } jarResources.addAll( this.commonJarResources ); } JarResourcesGenerator jnlpGenerator = new JarResourcesGenerator( getProject(), getTemplateDirectory(), "default-jnlp-servlet-template.vm", jnlpOutputFile, jnlpFile.getTemplateFilename(), jarResources, jnlpFile.getMainClass(), getWebstartJarURLForVelocity(), libPath, getEncoding() ); jnlpGenerator.setExtraConfig( getGeneratorExtraConfig() ); try { jnlpGenerator.generate(); } catch ( Exception e ) { throw new MojoExecutionException( "The following error occurred attempting to generate " + "the JNLP deployment descriptor: " + e, e ); } } private GeneratorExtraConfig getGeneratorExtraConfig() { return new GeneratorExtraConfig() { public String getJnlpSpec() { return "1.0+"; } public String getOfflineAllowed() { return "false"; } public String getAllPermissions() { return "true"; } public String getJ2seVersion() { return "1.5+"; } public String getJnlpCodeBase() { return getCodebase(); } }; } /** * Generates a version.xml file for all the jarResources configured either in jnlpFile elements * or in the commonJarResources element. * * @throws MojoExecutionException if could not generate the xml version file */ private void generateVersionXml() throws MojoExecutionException { Set/*JarResource*/ jarResources = new LinkedHashSet(); //combine the jar resources from commonJarResources and each JnlpFile config for ( Iterator itr = this.jnlpFiles.iterator(); itr.hasNext(); ) { JnlpFile jnlpFile = (JnlpFile) itr.next(); jarResources.addAll( jnlpFile.getJarResources() ); } if ( this.commonJarResources != null ) { jarResources.addAll( this.commonJarResources ); } VersionXmlGenerator generator = new VersionXmlGenerator( getEncoding() ); generator.generate( getLibDirectory(), jarResources ); } /** * Builds the string to be entered in the href attribute of the jar * resource element in the generated JNLP file. This will be equal * to the artifact file name with the version number stripped out. * * @param artifact The underlying artifact of the jar resource. * @return The href string for the given artifact, never null. */ private String buildHrefValue( Artifact artifact ) { StringBuffer sbuf = new StringBuffer(); sbuf.append( artifact.getArtifactId() ); if ( StringUtils.isNotEmpty( artifact.getClassifier() ) ) { sbuf.append( "-" ).append( artifact.getClassifier() ); } sbuf.append( "." ).append( artifact.getArtifactHandler().getExtension() ); return sbuf.toString(); } /** * Copies the contents of the working directory to the output directory. * @throws MojoExecutionException if could not copy files */ private void copyWorkingDirToOutputDir() throws MojoExecutionException { File outputDir = new File( getProject().getBuild().getDirectory(), getProject().getBuild().getFinalName() + File.separator + this.outputDirectoryName ); if ( !outputDir.exists() ) { if ( getLog().isInfoEnabled() ) { getLog().info( "Creating JNLP output directory: " + outputDir.getAbsolutePath() ); } if ( !outputDir.mkdirs() ) { throw new MojoExecutionException( "Unable to create the output directory for the jnlp bundle" ); } } try { FileUtils.copyDirectoryStructure( getWorkDirectory(), outputDir ); } catch ( IOException e ) { throw new MojoExecutionException( "An error occurred attempting to copy a file to the JNLP output directory.", e ); } } }
MWEBSTART-138 The number of signed artifacts differ from the number of modified artifacts (Patch from Farrukh) git-svn-id: 77b324aa2a79f5be5a5588838f60aad7515134dd@16042 52ab4f32-60fc-0310-b215-8acea882cd1b
webstart-maven-plugin/src/main/java/org/codehaus/mojo/webstart/JnlpDownloadServletMojo.java
MWEBSTART-138 The number of signed artifacts differ from the number of modified artifacts (Patch from Farrukh)
Java
mit
0c42ba1ecdfc60d9f7a414f3056c9862b9ca72a3
0
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
package com.worth.ifs.user.service; import com.worth.ifs.BaseServiceSecurityTest; import com.worth.ifs.commons.service.ServiceResult; import com.worth.ifs.token.domain.Token; import com.worth.ifs.token.security.TokenLookupStrategies; import com.worth.ifs.token.security.TokenPermissionRules; import com.worth.ifs.user.resource.UserResource; import com.worth.ifs.user.resource.UserRoleType; import com.worth.ifs.user.security.UserPermissionRules; import com.worth.ifs.user.transactional.UserService; import org.junit.Before; import org.junit.Test; import org.springframework.security.access.method.P; import java.util.List; import java.util.Set; import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess; import static com.worth.ifs.user.builder.UserResourceBuilder.newUserResource; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.*; /** * Testing how the secured methods in UserService interact with Spring Security */ public class UserServiceSecurityTest extends BaseServiceSecurityTest<UserService> { private UserPermissionRules userRules; private TokenPermissionRules tokenRules; private TokenLookupStrategies tokenLookupStrategies; @Before public void lookupPermissionRules() { userRules = getMockPermissionRulesBean(UserPermissionRules.class); tokenRules = getMockPermissionRulesBean(TokenPermissionRules.class); tokenLookupStrategies = getMockPermissionEntityLookupStrategiesBean(TokenLookupStrategies.class); } @Test public void testFindAll() { service.findAll(); assertViewMultipleUsersExpectations(); } @Test public void testFindAssignableUsers() { service.findAssignableUsers(123L); assertViewMultipleUsersExpectations(); } @Test public void testFindByEmail() { assertAccessDenied(() -> service.findByEmail("[email protected]"), () -> { assertViewSingleUserExpectations(); }); } @Test public void testGetUserById() { assertAccessDenied(() -> service.getUserById(123L), () -> { assertViewSingleUserExpectations(); }); } @Test public void testGetUserByUid() { // this method must remain unsecured because it is the way in which we get a user onto the // SecurityContext in the first place for permission checking service.getUserResourceByUid("asdf"); verifyNoMoreInteractionsWithRules(); } @Test public void testChangePassword() { Token token = new Token(); when(tokenLookupStrategies.getTokenByHash("hash")).thenReturn(token); assertAccessDenied(() -> service.changePassword("hash", "newpassword"), () -> { verify(tokenRules).systemRegistrationUserCanUseTokensToResetPaswords(token, getLoggedInUser()); verifyNoMoreInteractionsWithRules(); }); } @Test public void testSendPasswordResetNotification() { UserResource user = newUserResource().build(); assertAccessDenied(() -> service.sendPasswordResetNotification(user), () -> { verify(userRules).usersCanChangeTheirOwnPassword(user, getLoggedInUser()); verify(userRules).systemRegistrationUserCanChangePasswordsForUsers(user, getLoggedInUser()); verifyNoMoreInteractionsWithRules(); }); } @Test public void testFindRelatedUsers() { service.findRelatedUsers(123L); assertViewMultipleUsersExpectations(); } private void assertViewSingleUserExpectations() { assertViewXUsersExpectations(1); } private void assertViewMultipleUsersExpectations() { assertViewXUsersExpectations(2); } private void assertViewXUsersExpectations(int numberOfUsers) { verify(userRules, times(numberOfUsers)).anyUserCanViewThemselves(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).assessorsCanViewConsortiumUsersOnApplicationsTheyAreAssessing(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).compAdminsCanViewEveryone(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).consortiumMembersCanViewOtherConsortiumMembers(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).systemRegistrationUserCanViewEveryone(isA(UserResource.class), eq(getLoggedInUser())); verifyNoMoreInteractionsWithRules(); } private void verifyNoMoreInteractionsWithRules() { verifyNoMoreInteractions(tokenRules); verifyNoMoreInteractions(userRules); } @Override protected Class<? extends UserService> getServiceClass() { return TestUserService.class; } /** * Test class for use in Service Security tests. */ public static class TestUserService implements UserService { @Override public ServiceResult<UserResource> getUserResourceByUid(String uid) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<UserResource> getUserById(Long id) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<List<UserResource>> findAll() { return serviceSuccess(newUserResource().build(2)); } @Override public ServiceResult<List<UserResource>> findByProcessRole(UserRoleType roleType) { return serviceSuccess(newUserResource().build(2)); } @Override public ServiceResult<UserResource> findByEmail(String email) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<UserResource> findInactiveByEmail(String email) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<Set<UserResource>> findAssignableUsers(Long applicationId) { return serviceSuccess(newUserResource().buildSet(2)); } @Override public ServiceResult<Set<UserResource>> findRelatedUsers(Long applicationId) { return serviceSuccess(newUserResource().buildSet(2)); } @Override public ServiceResult<Void> sendPasswordResetNotification(@P("user") UserResource user) { return null; } @Override public ServiceResult<Void> changePassword(@P("hash") String hash, String password) { return null; } } }
ifs-data-service/src/test/java/com/worth/ifs/user/service/UserServiceSecurityTest.java
package com.worth.ifs.user.service; import com.worth.ifs.BaseServiceSecurityTest; import com.worth.ifs.commons.service.ServiceResult; import com.worth.ifs.token.domain.Token; import com.worth.ifs.token.security.TokenLookupStrategies; import com.worth.ifs.token.security.TokenPermissionRules; import com.worth.ifs.user.resource.UserResource; import com.worth.ifs.user.resource.UserRoleType; import com.worth.ifs.user.security.UserPermissionRules; import com.worth.ifs.user.transactional.UserService; import org.junit.Before; import org.junit.Test; import org.springframework.security.access.method.P; import java.util.List; import java.util.Set; import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess; import static com.worth.ifs.user.builder.UserResourceBuilder.newUserResource; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.*; /** * Testing how the secured methods in UserService interact with Spring Security */ public class UserServiceSecurityTest extends BaseServiceSecurityTest<UserService> { private UserPermissionRules userRules; private TokenPermissionRules tokenRules; private TokenLookupStrategies tokenLookupStrategies; @Before public void lookupPermissionRules() { userRules = getMockPermissionRulesBean(UserPermissionRules.class); tokenRules = getMockPermissionRulesBean(TokenPermissionRules.class); tokenLookupStrategies = getMockPermissionEntityLookupStrategiesBean(TokenLookupStrategies.class); } @Test public void testFindAll() { service.findAll(); assertViewMultipleUsersExpectations(); } @Test public void testFindAssignableUsers() { service.findAssignableUsers(123L); assertViewMultipleUsersExpectations(); } @Test public void testFindByEmail() { assertAccessDenied(() -> service.findByEmail("[email protected]"), () -> { assertViewSingleUserExpectations(); }); } @Test public void testGetUserById() { assertAccessDenied(() -> service.getUserById(123L), () -> { assertViewSingleUserExpectations(); }); } @Test public void testGetUserByUid() { // this method must remain unsecured because it is the way in which we get a user onto the // SecurityContext in the first place for permission checking service.getUserResourceByUid("asdf"); verifyNoMoreInteractionsWithRules(); } @Test public void testChangePassword() { Token token = new Token(); when(tokenLookupStrategies.getTokenByHash("hash")).thenReturn(token); assertAccessDenied(() -> service.changePassword("hash", "newpassword"), () -> { verify(tokenRules).systemRegistrationUserCanUseTokensToResetPaswords(token, getLoggedInUser()); verifyNoMoreInteractionsWithRules(); }); } @Test public void testSendPasswordResetNotification() { UserResource user = newUserResource().build(); assertAccessDenied(() -> service.sendPasswordResetNotification(user), () -> { verify(userRules).usersCanChangeTheirOwnPassword(user, getLoggedInUser()); verify(userRules).systemRegistrationUserCanChangePasswordsForUsers(user, getLoggedInUser()); verifyNoMoreInteractionsWithRules(); }); } @Test public void testFindRelatedUsers() { service.findRelatedUsers(123L); assertViewMultipleUsersExpectations(); } private void assertViewSingleUserExpectations() { assertViewXUsersExpectations(1); } private void assertViewMultipleUsersExpectations() { assertViewXUsersExpectations(2); } private void assertViewXUsersExpectations(int numberOfUsers) { verify(userRules, times(numberOfUsers)).anyUserCanViewThemselves(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).assessorsCanViewConsortiumUsersOnApplicationsTheyAreAssessing(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).compAdminsCanViewEveryone(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).consortiumMembersCanViewOtherConsortiumMembers(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).systemRegistrationUserCanViewEveryone(isA(UserResource.class), eq(getLoggedInUser())); verify(userRules, times(numberOfUsers)).projectFinanceUsersCanViewEveryone(isA(UserResource.class), eq(getLoggedInUser())); verifyNoMoreInteractionsWithRules(); } private void verifyNoMoreInteractionsWithRules() { verifyNoMoreInteractions(tokenRules); verifyNoMoreInteractions(userRules); } @Override protected Class<? extends UserService> getServiceClass() { return TestUserService.class; } /** * Test class for use in Service Security tests. */ public static class TestUserService implements UserService { @Override public ServiceResult<UserResource> getUserResourceByUid(String uid) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<UserResource> getUserById(Long id) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<List<UserResource>> findAll() { return serviceSuccess(newUserResource().build(2)); } @Override public ServiceResult<List<UserResource>> findByProcessRole(UserRoleType roleType) { return serviceSuccess(newUserResource().build(2)); } @Override public ServiceResult<UserResource> findByEmail(String email) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<UserResource> findInactiveByEmail(String email) { return serviceSuccess(newUserResource().build()); } @Override public ServiceResult<Set<UserResource>> findAssignableUsers(Long applicationId) { return serviceSuccess(newUserResource().buildSet(2)); } @Override public ServiceResult<Set<UserResource>> findRelatedUsers(Long applicationId) { return serviceSuccess(newUserResource().buildSet(2)); } @Override public ServiceResult<Void> sendPasswordResetNotification(@P("user") UserResource user) { return null; } @Override public ServiceResult<Void> changePassword(@P("hash") String hash, String password) { return null; } } }
remove exp not needed until merge Former-commit-id: ab54da0ad42b89277e3a36b344b7a61f366ff858
ifs-data-service/src/test/java/com/worth/ifs/user/service/UserServiceSecurityTest.java
remove exp not needed until merge
Java
mit
0680ceccb0e84ea24e29f0f67d9dfb949db49635
0
SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/Sponge
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.mixin.core.server.players; import io.netty.channel.local.LocalAddress; import net.kyori.adventure.audience.Audience; import net.kyori.adventure.identity.Identity; import net.kyori.adventure.text.Component; import net.minecraft.core.BlockPos; import net.minecraft.core.RegistryAccess; import net.minecraft.nbt.CompoundTag; import net.minecraft.network.Connection; import net.minecraft.network.chat.ChatType; import net.minecraft.network.chat.MutableComponent; import net.minecraft.network.chat.TextComponent; import net.minecraft.network.chat.TranslatableComponent; import net.minecraft.network.protocol.Packet; import net.minecraft.network.protocol.game.ClientboundDisconnectPacket; import net.minecraft.network.protocol.game.ClientboundLoginPacket; import net.minecraft.network.protocol.game.ClientboundPlayerInfoPacket; import net.minecraft.resources.ResourceKey; import net.minecraft.server.MinecraftServer; import net.minecraft.server.ServerScoreboard; import net.minecraft.server.bossevents.CustomBossEvents; import net.minecraft.server.level.ServerLevel; import net.minecraft.server.network.ServerGamePacketListenerImpl; import net.minecraft.server.players.IpBanList; import net.minecraft.server.players.PlayerList; import net.minecraft.server.players.UserBanList; import net.minecraft.server.players.UserWhiteList; import net.minecraft.world.level.GameType; import net.minecraft.world.level.Level; import net.minecraft.world.level.border.BorderChangeListener; import net.minecraft.world.level.border.WorldBorder; import net.minecraft.world.level.dimension.DimensionType; import org.apache.logging.log4j.Logger; import org.objectweb.asm.Opcodes; import org.spongepowered.api.Sponge; import org.spongepowered.api.adventure.Audiences; import org.spongepowered.api.entity.living.player.User; import org.spongepowered.api.entity.living.player.server.ServerPlayer; import org.spongepowered.api.event.Cause; import org.spongepowered.api.event.EventContext; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.entity.living.player.RespawnPlayerEvent; import org.spongepowered.api.event.network.ServerSideConnectionEvent; import org.spongepowered.api.network.ServerSideConnection; import org.spongepowered.api.profile.GameProfile; import org.spongepowered.api.service.ban.Ban; import org.spongepowered.api.service.permission.PermissionService; import org.spongepowered.api.service.permission.Subject; import org.spongepowered.api.world.server.ServerLocation; import org.spongepowered.asm.mixin.Final; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Mutable; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.Redirect; import org.spongepowered.asm.mixin.injection.Slice; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import org.spongepowered.common.SpongeCommon; import org.spongepowered.common.SpongeServer; import org.spongepowered.common.accessor.network.protocol.game.ClientboundRespawnPacketAccessor; import org.spongepowered.common.adventure.SpongeAdventure; import org.spongepowered.common.bridge.client.server.IntegratedPlayerListBridge; import org.spongepowered.common.bridge.data.VanishableBridge; import org.spongepowered.common.bridge.network.ConnectionBridge; import org.spongepowered.common.bridge.server.level.ServerPlayerBridge; import org.spongepowered.common.bridge.server.ServerScoreboardBridge; import org.spongepowered.common.bridge.server.players.PlayerListBridge; import org.spongepowered.common.bridge.server.level.ServerLevelBridge; import org.spongepowered.common.bridge.world.level.storage.PrimaryLevelDataBridge; import org.spongepowered.common.entity.player.LoginPermissions; import org.spongepowered.common.entity.player.SpongeUserView; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.context.transaction.EffectTransactor; import org.spongepowered.common.event.tracking.context.transaction.TransactionalCaptureSupplier; import org.spongepowered.common.event.tracking.context.transaction.effect.BroadcastInventoryChangesEffect; import org.spongepowered.common.event.tracking.context.transaction.inventory.PlayerInventoryTransaction; import org.spongepowered.common.profile.SpongeGameProfile; import org.spongepowered.common.server.PerWorldBorderListener; import org.spongepowered.common.service.server.ban.SpongeIPBanList; import org.spongepowered.common.service.server.ban.SpongeUserBanList; import org.spongepowered.common.service.server.whitelist.SpongeUserWhiteList; import org.spongepowered.common.util.Constants; import org.spongepowered.common.util.NetworkUtil; import org.spongepowered.common.util.VecHelper; import org.spongepowered.math.vector.Vector3d; import java.net.InetAddress; import java.net.SocketAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import javax.annotation.Nullable; @Mixin(PlayerList.class) public abstract class PlayerListMixin implements PlayerListBridge { // @formatter:off @Shadow @Final private static Logger LOGGER; @Shadow @Final private static SimpleDateFormat BAN_DATE_FORMAT; @Shadow @Final private MinecraftServer server; @Shadow private int viewDistance; @Shadow @Final @Mutable private UserBanList bans; @Shadow @Final @Mutable private IpBanList ipBans; @Shadow @Final @Mutable private UserWhiteList whitelist; @Shadow @Final private List<net.minecraft.server.level.ServerPlayer> players; @Shadow @Final protected int maxPlayers; @Shadow @Final private Map<UUID, net.minecraft.server.level.ServerPlayer> playersByUUID; @Shadow public abstract MinecraftServer shadow$getServer(); @Shadow @Nullable public abstract CompoundTag shadow$load(net.minecraft.server.level.ServerPlayer playerIn); @Shadow public abstract boolean shadow$canBypassPlayerLimit(com.mojang.authlib.GameProfile param0); // @formatter:on private boolean impl$isGameMechanicRespawn = false; ResourceKey<Level> impl$newDestination = null; ResourceKey<Level> impl$originalDestination = null; @Inject(method = "<init>", at = @At("RETURN")) private void impl$setSpongeLists(final CallbackInfo callbackInfo) { this.bans = new SpongeUserBanList(PlayerList.USERBANLIST_FILE); this.ipBans = new SpongeIPBanList(PlayerList.IPBANLIST_FILE); this.whitelist = new SpongeUserWhiteList(PlayerList.WHITELIST_FILE); } @Override public void bridge$setOriginalDestinationDimension(final ResourceKey<Level> dimension) { this.impl$originalDestination = dimension; } @Override public void bridge$setNewDestinationDimension(final ResourceKey<Level> dimension) { this.impl$newDestination = dimension; } @Override public CompletableFuture<net.minecraft.network.chat.Component> bridge$canPlayerLogin(final SocketAddress param0, final com.mojang.authlib.GameProfile param1) { if (this instanceof IntegratedPlayerListBridge) { return ((IntegratedPlayerListBridge) this).bridge$canPlayerLoginClient(param0, param1); } return this.impl$canPlayerLoginServer(param0, param1); } protected final CompletableFuture<net.minecraft.network.chat.Component> impl$canPlayerLoginServer(final SocketAddress param0, final com.mojang.authlib.GameProfile param1) { final SpongeGameProfile profile = SpongeGameProfile.basicOf(param1); return Sponge.server().serviceProvider().banService().find(profile).thenCompose(profileBanOpt -> { if (profileBanOpt.isPresent()) { final Ban.Profile var0 = profileBanOpt.get(); final MutableComponent var1 = new TranslatableComponent("multiplayer.disconnect.banned.reason", var0.reason().orElse(Component.empty())); if (var0.expirationDate().isPresent()) { var1.append(new TranslatableComponent("multiplayer.disconnect.banned.expiration", BAN_DATE_FORMAT.format(var0.expirationDate().get()))); } return CompletableFuture.completedFuture(var1); } if (param0 instanceof LocalAddress) { // don't bother looking up IP bans on local address return CompletableFuture.completedFuture(null); } final InetAddress address; try { address = InetAddress.getByName(NetworkUtil.getHostString(param0)); } catch (final UnknownHostException ex) { return CompletableFuture.completedFuture(new TextComponent(ex.getMessage())); // no } return Sponge.server().serviceProvider().banService().find(address).thenCompose(ipBanOpt -> { if (ipBanOpt.isPresent()) { final Ban.IP var2 = ipBanOpt.get(); final MutableComponent var3 = new TranslatableComponent("multiplayer.disconnect.banned_ip.reason", var2.reason().orElse(Component.empty())); if (var2.expirationDate().isPresent()) { var3.append(new TranslatableComponent("multiplayer.disconnect.banned_ip.expiration", BAN_DATE_FORMAT.format(var2.expirationDate().get()))); } return CompletableFuture.completedFuture(var3); } return CompletableFuture.supplyAsync(() -> { if (!Sponge.server().isWhitelistEnabled()) { return true; } final PermissionService permissionService = Sponge.server().serviceProvider().permissionService(); Subject subject = permissionService.userSubjects().subject(param1.getId().toString()).orElse(null); if (subject == null) { subject = permissionService.defaults(); } return subject.hasPermission(LoginPermissions.BYPASS_WHITELIST_PERMISSION); }, SpongeCommon.server()).thenCompose(w -> { if (w) { return CompletableFuture.completedFuture(null); } return Sponge.server().serviceProvider().whitelistService().isWhitelisted(profile).<net.minecraft.network.chat.Component>thenApply(whitelisted -> { if (!whitelisted) { return new TranslatableComponent("multiplayer.disconnect.not_whitelisted"); } return null; }); }); }); }).thenApplyAsync(component -> { if (component != null) { return component; } if (this.players.size() >= this.maxPlayers && !this.shadow$canBypassPlayerLimit(param1)) { return new TranslatableComponent("multiplayer.disconnect.server_full"); } return null; }, SpongeCommon.server()); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;load(Lnet/minecraft/server/level/ServerPlayer;)Lnet/minecraft/nbt/CompoundTag;" ) ) private CompoundTag impl$setPlayerDataForNewPlayers(final PlayerList playerList, final net.minecraft.server.level.ServerPlayer playerIn) { final CompoundTag compound = this.shadow$load(playerIn); if (compound == null) { ((SpongeServer) SpongeCommon.server()).getPlayerDataManager().setPlayerInfo(playerIn.getUUID(), Instant.now(), Instant.now()); } return compound; } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getLevel(Lnet/minecraft/resources/ResourceKey;)Lnet/minecraft/server/level/ServerLevel;" ) ) private net.minecraft.server.level.ServerLevel impl$onInitPlayer_getWorld(final MinecraftServer minecraftServer, final ResourceKey<Level> dimension, final Connection networkManager, final net.minecraft.server.level.ServerPlayer mcPlayer ) { @Nullable final net.minecraft.network.chat.Component kickReason = ((ConnectionBridge) networkManager).bridge$getKickReason(); final Component disconnectMessage; if (kickReason != null) { disconnectMessage = SpongeAdventure.asAdventure(kickReason); } else { disconnectMessage = Component.text("You are not allowed to log in to this server."); } net.minecraft.server.level.ServerLevel mcWorld = minecraftServer.getLevel(dimension); if (mcWorld == null) { SpongeCommon.logger().warn("The player '{}' was located in a world that isn't loaded or doesn't exist. This is not safe so " + "the player will be moved to the spawn of the default world.", mcPlayer.getGameProfile().getName()); mcWorld = minecraftServer.overworld(); final BlockPos spawnPoint = mcWorld.getSharedSpawnPos(); mcPlayer.setPos(spawnPoint.getX() + 0.5, spawnPoint.getY() + 0.5, spawnPoint.getZ() + 0.5); } mcPlayer.setLevel(mcWorld); final ServerPlayer player = (ServerPlayer) mcPlayer; final ServerLocation location = player.serverLocation(); final Vector3d rotation = player.rotation(); // player.connection() cannot be used here, because it's still be null at this point final ServerSideConnection connection = (ServerSideConnection) networkManager.getPacketListener(); // The user is not yet in the player list, so we need to make special provision. final User user = SpongeUserView.createLoginEventUser(player); final Cause cause = Cause.of(EventContext.empty(), connection, user); final ServerSideConnectionEvent.Login event = SpongeEventFactory.createServerSideConnectionEventLogin(cause, disconnectMessage, disconnectMessage, location, location, rotation, rotation, connection, user); if (kickReason != null) { event.setCancelled(true); } if (SpongeCommon.post(event)) { this.impl$disconnectClient(networkManager, event.message(), player.profile()); return null; } final ServerLocation toLocation = event.toLocation(); final Vector3d toRotation = event.toRotation(); mcPlayer.absMoveTo(toLocation.x(), toLocation.y(), toLocation.z(), (float) toRotation.y(), (float) toRotation.x()); return (net.minecraft.server.level.ServerLevel) toLocation.world(); } @Inject(method = "placeNewPlayer", cancellable = true, at = @At( value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getLevel(Lnet/minecraft/resources/ResourceKey;)Lnet/minecraft/server/level/ServerLevel;", shift = At.Shift.AFTER ) ) private void impl$onInitPlayer_BeforeSetWorld(final Connection p_72355_1_, final net.minecraft.server.level.ServerPlayer p_72355_2_, final CallbackInfo ci) { if (!p_72355_1_.isConnected()) { ci.cancel(); } } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lorg/apache/logging/log4j/Logger;info(Ljava/lang/String;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V", remap = false ) ) private void impl$onInitPlayer_printPlayerWorldInJoinFeedback( final Logger logger, final String message, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Connection manager, final net.minecraft.server.level.ServerPlayer entity) { logger.info("{}[{}] logged in to world '{}' with entity id {} at ({}, {}, {})", p0, p1, ((org.spongepowered.api.world.server.ServerWorld) entity.getLevel()).key(), p2, p3, p4, p5); } @Redirect(method = "placeNewPlayer", slice = @Slice( from = @At( value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;invalidateStatus()V"), to = @At( value = "FIELD", opcode = Opcodes.GETSTATIC, target = "Lnet/minecraft/ChatFormatting;YELLOW:Lnet/minecraft/ChatFormatting;" ) ), at = @At( value = "INVOKE", remap = false, target = "Ljava/lang/String;equalsIgnoreCase(Ljava/lang/String;)Z" ) ) private boolean impl$onInitPlayer_dontClassSpongeNameAsModified(final String currentName, final String originalName) { if (originalName.equals(Constants.GameProfile.DUMMY_NAME)) { return true; } return currentName.equalsIgnoreCase(originalName); } @Redirect(method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;broadcastMessage(Lnet/minecraft/network/chat/Component;Lnet/minecraft/network/chat/ChatType;Ljava/util/UUID;)V" ) ) private void impl$onInitPlayer_delaySendMessage( final PlayerList playerList, final net.minecraft.network.chat.Component message, final ChatType p_232641_2_, final UUID p_232641_3_, final Connection manager, final net.minecraft.server.level.ServerPlayer playerIn ) { // Don't send here, will be done later. We cache the expected message. ((ServerPlayerBridge) playerIn).bridge$setConnectionMessageToSend(message); } @Redirect(method = "placeNewPlayer", at = @At(value = "NEW", target = "net/minecraft/network/protocol/game/ClientboundLoginPacket")) private ClientboundLoginPacket impl$usePerWorldViewDistance(final int p_i242082_1_, final GameType p_i242082_2_, final GameType p_i242082_3_, final long p_i242082_4_, final boolean p_i242082_6_, final Set<ResourceKey<Level>> p_i242082_7_, final RegistryAccess.RegistryHolder p_i242082_8_, final DimensionType p_i242082_9_, final ResourceKey<Level> p_i242082_10_, final int p_i242082_11_, final int p_i242082_12_, final boolean p_i242082_13_, final boolean p_i242082_14_, final boolean p_i242082_15_, final boolean p_i242082_16_, final Connection p_72355_1_, final net.minecraft.server.level.ServerPlayer player) { return new ClientboundLoginPacket(p_i242082_1_, p_i242082_2_, p_i242082_3_, p_i242082_4_, p_i242082_6_, p_i242082_7_, p_i242082_8_, p_i242082_9_, p_i242082_10_, p_i242082_11_, ((PrimaryLevelDataBridge) player.getLevel().getLevelData()).bridge$viewDistance().orElse(this.viewDistance), p_i242082_13_, p_i242082_14_, p_i242082_15_, p_i242082_16_); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getCustomBossEvents()Lnet/minecraft/server/bossevents/CustomBossEvents;")) private CustomBossEvents impl$getPerWorldBossBarManager( final MinecraftServer minecraftServer, final Connection netManager, final net.minecraft.server.level.ServerPlayer playerIn) { return ((ServerLevelBridge) playerIn.getLevel()).bridge$getBossBarManager(); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;updateEntireScoreboard(Lnet/minecraft/server/ServerScoreboard;Lnet/minecraft/server/level/ServerPlayer;)V")) private void impl$sendScoreboard(final PlayerList playerList, final ServerScoreboard scoreboardIn, final net.minecraft.server.level.ServerPlayer playerIn) { ((ServerPlayerBridge)playerIn).bridge$initScoreboard(); } @Redirect( method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;broadcastAll(Lnet/minecraft/network/protocol/Packet;)V" ) ) private void impl$sendScoreboard(final PlayerList playerList, final Packet<?> addPlayer, final Connection playerConnection, final net.minecraft.server.level.ServerPlayer serverPlayer ) { if (((VanishableBridge) serverPlayer).bridge$isVanished()) { return; } playerList.broadcastAll(addPlayer); } @Redirect( method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/network/ServerGamePacketListenerImpl;send(Lnet/minecraft/network/protocol/Packet;)V" ), slice = @Slice( from = @At( value = "INVOKE", target = "Ljava/util/List;size()I", remap = false ), to = @At( value = "INVOKE", target = "Lnet/minecraft/server/level/ServerLevel;addNewPlayer(Lnet/minecraft/server/level/ServerPlayer;)V" ) ) ) private void impl$onlySendAddPlayerForUnvanishedPlayers(ServerGamePacketListenerImpl connection, Packet<?> packet) { ClientboundPlayerInfoPacket pkt = (ClientboundPlayerInfoPacket) packet; // size is always 1 VanishableBridge p = (VanishableBridge) this.playersByUUID.get(pkt.getEntries().get(0).getProfile().getId()); // Effectively, don't notify new players of vanished players if (p.bridge$isVanished()) { return; } connection.send(packet); } @Inject(method = "placeNewPlayer", at = @At(value = "RETURN")) private void impl$onInitPlayer_join(final Connection networkManager, final net.minecraft.server.level.ServerPlayer mcPlayer, final CallbackInfo ci) { final ServerPlayer player = (ServerPlayer) mcPlayer; final ServerSideConnection connection = player.connection(); final Cause cause = Cause.of(EventContext.empty(), connection, player); final Audience audience = Audiences.onlinePlayers(); final Component joinComponent = SpongeAdventure.asAdventure(((ServerPlayerBridge) mcPlayer).bridge$getConnectionMessageToSend()); final ServerSideConnectionEvent.Join event = SpongeEventFactory.createServerSideConnectionEventJoin(cause, audience, Optional.of(audience), joinComponent, joinComponent, connection, player, false); SpongeCommon.post(event); if (!event.isMessageCancelled()) { event.audience().ifPresent(audience1 -> audience1.sendMessage(Identity.nil(), event.message())); } ((ServerPlayerBridge) mcPlayer).bridge$setConnectionMessageToSend(null); final PhaseContext<?> context = PhaseTracker.SERVER.getPhaseContext(); PhaseTracker.SERVER.pushCause(event); final TransactionalCaptureSupplier transactor = context.getTransactor(); transactor.logPlayerInventoryChange(mcPlayer, PlayerInventoryTransaction.EventCreator.STANDARD); try (EffectTransactor ignored = BroadcastInventoryChangesEffect.transact(transactor)) { mcPlayer.inventoryMenu.broadcastChanges(); // in case plugins modified it } } @Redirect(method = "remove", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getCustomBossEvents()Lnet/minecraft/server/bossevents/CustomBossEvents;")) private CustomBossEvents impl$getPerWorldBossBarManager(final MinecraftServer minecraftServer, final net.minecraft.server.level.ServerPlayer playerIn) { return ((ServerLevelBridge) playerIn.getLevel()).bridge$getBossBarManager(); } @Inject(method = "remove", at = @At("HEAD")) private void impl$RemovePlayerReferenceFromScoreboard(final net.minecraft.server.level.ServerPlayer player, final CallbackInfo ci) { ((ServerScoreboardBridge) ((ServerPlayer) player).scoreboard()).bridge$removePlayer(player, false); } @Redirect(method = "setLevel", at = @At( value = "INVOKE", target = "Lnet/minecraft/world/level/border/WorldBorder;addListener(Lnet/minecraft/world/level/border/BorderChangeListener;)V" ) ) private void impl$usePerWorldBorderListener(final WorldBorder worldBorder, final BorderChangeListener listener, final ServerLevel serverWorld) { worldBorder.addListener(new PerWorldBorderListener(serverWorld)); } @Redirect(method = "load", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/level/ServerPlayer;load(Lnet/minecraft/nbt/CompoundTag;)V" ) ) private void impl$setSpongePlayerDataForSinglePlayer(final net.minecraft.server.level.ServerPlayer entity, final CompoundTag compound) { entity.load(compound); ((SpongeServer) this.shadow$getServer()).getPlayerDataManager().readPlayerData(compound, entity.getUUID(), null); } @SuppressWarnings("OptionalUsedAsFieldOrParameterType") @Redirect( method = "respawn", at = @At( value = "INVOKE", target = "Ljava/util/Optional;isPresent()Z", remap = false ), slice = @Slice( from = @At(value = "INVOKE", target = "Ljava/util/Optional;empty()Ljava/util/Optional;", remap = false), to = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;isDemo()Z") ) ) private boolean impl$flagIfRespawnLocationIsGameMechanic(final Optional<?> optional) { this.impl$isGameMechanicRespawn = optional.isPresent(); return this.impl$isGameMechanicRespawn; } @Redirect(method = "respawn", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/network/ServerGamePacketListenerImpl;send(Lnet/minecraft/network/protocol/Packet;)V", ordinal = 1 ) ) private void impl$callRespawnPlayerRecreateEvent( final ServerGamePacketListenerImpl serverPlayNetHandler, final Packet<?> packetIn, final net.minecraft.server.level.ServerPlayer originalPlayer, final boolean keepAllPlayerData) { final net.minecraft.server.level.ServerPlayer recreatedPlayer = serverPlayNetHandler.player; final Vector3d originalPosition = VecHelper.toVector3d(originalPlayer.position()); final Vector3d destinationPosition = VecHelper.toVector3d(recreatedPlayer.position()); final org.spongepowered.api.world.server.ServerWorld originalWorld = (org.spongepowered.api.world.server.ServerWorld) originalPlayer.level; final org.spongepowered.api.world.server.ServerWorld originalDestinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$originalDestination == null ? Level.OVERWORLD : this.impl$originalDestination); final org.spongepowered.api.world.server.ServerWorld destinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$newDestination == null ? Level.OVERWORLD : this.impl$newDestination); final RespawnPlayerEvent.Recreate event = SpongeEventFactory.createRespawnPlayerEventRecreate(PhaseTracker.getCauseStackManager().currentCause(), destinationPosition, originalWorld, originalPosition, destinationWorld, originalDestinationWorld, destinationPosition, (ServerPlayer) originalPlayer, (ServerPlayer) recreatedPlayer, this.impl$isGameMechanicRespawn, !keepAllPlayerData); SpongeCommon.post(event); recreatedPlayer.setPos(event.destinationPosition().x(), event.destinationPosition().y(), event.destinationPosition().z()); this.impl$isGameMechanicRespawn = false; this.impl$originalDestination = null; this.impl$newDestination = null; final ServerLevel targetWorld = (ServerLevel) event.destinationWorld(); ((ServerPlayerBridge) recreatedPlayer).bridge$sendChangeDimension( targetWorld.dimensionType(), ((ClientboundRespawnPacketAccessor) packetIn).accessor$dimension(), ((ClientboundRespawnPacketAccessor) packetIn).accessor$seed(), recreatedPlayer.gameMode.getGameModeForPlayer(), recreatedPlayer.gameMode.getPreviousGameModeForPlayer(), targetWorld.isDebug(), targetWorld.isFlat(), keepAllPlayerData ); } @Inject(method = "respawn", at = @At("RETURN")) private void impl$callRespawnPlayerPostEvent(final net.minecraft.server.level.ServerPlayer player, final boolean keepAllPlayerData, final CallbackInfoReturnable<net.minecraft.server.level.ServerPlayer> cir) { final org.spongepowered.api.world.server.ServerWorld originalWorld = (org.spongepowered.api.world.server.ServerWorld) player.level; final org.spongepowered.api.world.server.ServerWorld originalDestinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$originalDestination == null ? Level.OVERWORLD : this.impl$originalDestination); final org.spongepowered.api.world.server.ServerWorld destinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$newDestination == null ? Level.OVERWORLD : this.impl$newDestination); final RespawnPlayerEvent.Post event = SpongeEventFactory.createRespawnPlayerEventPost(PhaseTracker.getCauseStackManager().currentCause(), destinationWorld, originalWorld, originalDestinationWorld, (ServerPlayer) cir.getReturnValue()); SpongeCommon.post(event); } @Redirect(method = "sendLevelInfo", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;overworld()Lnet/minecraft/server/level/ServerLevel;")) private ServerLevel impl$usePerWorldWorldBorder(final MinecraftServer minecraftServer, final net.minecraft.server.level.ServerPlayer playerIn, final ServerLevel worldIn) { return worldIn; } private void impl$disconnectClient(final Connection netManager, final Component disconnectMessage, final @Nullable GameProfile profile) { final net.minecraft.network.chat.Component reason = SpongeAdventure.asVanilla(disconnectMessage); try { PlayerListMixin.LOGGER.info("Disconnecting " + (profile != null ? profile.toString() + " (" + netManager.getRemoteAddress().toString() + ")" : netManager.getRemoteAddress() + ": " + reason.getString())); netManager.send(new ClientboundDisconnectPacket(reason)); netManager.disconnect(reason); } catch (final Exception exception) { PlayerListMixin.LOGGER.error("Error whilst disconnecting player", exception); } } @Inject(method = "saveAll()V", at = @At("RETURN")) private void impl$saveDirtyUsersOnSaveAll(final CallbackInfo ci) { ((SpongeServer) SpongeCommon.server()).userManager().saveDirtyUsers(); } }
src/mixins/java/org/spongepowered/common/mixin/core/server/players/PlayerListMixin.java
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.mixin.core.server.players; import io.netty.channel.local.LocalAddress; import net.kyori.adventure.audience.Audience; import net.kyori.adventure.identity.Identity; import net.kyori.adventure.text.Component; import net.minecraft.core.BlockPos; import net.minecraft.core.RegistryAccess; import net.minecraft.nbt.CompoundTag; import net.minecraft.network.Connection; import net.minecraft.network.chat.ChatType; import net.minecraft.network.chat.MutableComponent; import net.minecraft.network.chat.TextComponent; import net.minecraft.network.chat.TranslatableComponent; import net.minecraft.network.protocol.Packet; import net.minecraft.network.protocol.game.ClientboundDisconnectPacket; import net.minecraft.network.protocol.game.ClientboundLoginPacket; import net.minecraft.network.protocol.game.ClientboundPlayerInfoPacket; import net.minecraft.resources.ResourceKey; import net.minecraft.server.MinecraftServer; import net.minecraft.server.ServerScoreboard; import net.minecraft.server.bossevents.CustomBossEvents; import net.minecraft.server.level.ServerLevel; import net.minecraft.server.network.ServerGamePacketListenerImpl; import net.minecraft.server.players.IpBanList; import net.minecraft.server.players.PlayerList; import net.minecraft.server.players.UserBanList; import net.minecraft.server.players.UserWhiteList; import net.minecraft.world.level.GameType; import net.minecraft.world.level.Level; import net.minecraft.world.level.border.BorderChangeListener; import net.minecraft.world.level.border.WorldBorder; import net.minecraft.world.level.dimension.DimensionType; import org.apache.logging.log4j.Logger; import org.objectweb.asm.Opcodes; import org.spongepowered.api.Sponge; import org.spongepowered.api.adventure.Audiences; import org.spongepowered.api.entity.living.player.User; import org.spongepowered.api.entity.living.player.server.ServerPlayer; import org.spongepowered.api.event.Cause; import org.spongepowered.api.event.EventContext; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.entity.living.player.RespawnPlayerEvent; import org.spongepowered.api.event.network.ServerSideConnectionEvent; import org.spongepowered.api.network.ServerSideConnection; import org.spongepowered.api.profile.GameProfile; import org.spongepowered.api.service.ban.Ban; import org.spongepowered.api.service.permission.PermissionService; import org.spongepowered.api.service.permission.Subject; import org.spongepowered.api.world.server.ServerLocation; import org.spongepowered.asm.mixin.Final; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Mutable; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.Redirect; import org.spongepowered.asm.mixin.injection.Slice; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import org.spongepowered.common.SpongeCommon; import org.spongepowered.common.SpongeServer; import org.spongepowered.common.accessor.network.protocol.game.ClientboundRespawnPacketAccessor; import org.spongepowered.common.adventure.SpongeAdventure; import org.spongepowered.common.bridge.client.server.IntegratedPlayerListBridge; import org.spongepowered.common.bridge.data.VanishableBridge; import org.spongepowered.common.bridge.network.ConnectionBridge; import org.spongepowered.common.bridge.server.level.ServerPlayerBridge; import org.spongepowered.common.bridge.server.ServerScoreboardBridge; import org.spongepowered.common.bridge.server.players.PlayerListBridge; import org.spongepowered.common.bridge.server.level.ServerLevelBridge; import org.spongepowered.common.bridge.world.level.storage.PrimaryLevelDataBridge; import org.spongepowered.common.entity.player.LoginPermissions; import org.spongepowered.common.entity.player.SpongeUserView; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.context.transaction.EffectTransactor; import org.spongepowered.common.event.tracking.context.transaction.TransactionalCaptureSupplier; import org.spongepowered.common.event.tracking.context.transaction.effect.BroadcastInventoryChangesEffect; import org.spongepowered.common.event.tracking.context.transaction.inventory.PlayerInventoryTransaction; import org.spongepowered.common.profile.SpongeGameProfile; import org.spongepowered.common.server.PerWorldBorderListener; import org.spongepowered.common.service.server.ban.SpongeIPBanList; import org.spongepowered.common.service.server.ban.SpongeUserBanList; import org.spongepowered.common.service.server.whitelist.SpongeUserWhiteList; import org.spongepowered.common.util.Constants; import org.spongepowered.common.util.NetworkUtil; import org.spongepowered.common.util.VecHelper; import org.spongepowered.math.vector.Vector3d; import java.net.InetAddress; import java.net.SocketAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import javax.annotation.Nullable; @Mixin(PlayerList.class) public abstract class PlayerListMixin implements PlayerListBridge { // @formatter:off @Shadow @Final private static Logger LOGGER; @Shadow @Final private static SimpleDateFormat BAN_DATE_FORMAT; @Shadow @Final private MinecraftServer server; @Shadow private int viewDistance; @Shadow @Final @Mutable private UserBanList bans; @Shadow @Final @Mutable private IpBanList ipBans; @Shadow @Final @Mutable private UserWhiteList whitelist; @Shadow @Final private List<net.minecraft.server.level.ServerPlayer> players; @Shadow @Final protected int maxPlayers; @Shadow public abstract MinecraftServer shadow$getServer(); @Shadow @Nullable public abstract CompoundTag shadow$load(net.minecraft.server.level.ServerPlayer playerIn); @Shadow public abstract boolean shadow$canBypassPlayerLimit(com.mojang.authlib.GameProfile param0); // @formatter:on private boolean impl$isGameMechanicRespawn = false; ResourceKey<Level> impl$newDestination = null; ResourceKey<Level> impl$originalDestination = null; @Inject(method = "<init>", at = @At("RETURN")) private void impl$setSpongeLists(final CallbackInfo callbackInfo) { this.bans = new SpongeUserBanList(PlayerList.USERBANLIST_FILE); this.ipBans = new SpongeIPBanList(PlayerList.IPBANLIST_FILE); this.whitelist = new SpongeUserWhiteList(PlayerList.WHITELIST_FILE); } @Override public void bridge$setOriginalDestinationDimension(final ResourceKey<Level> dimension) { this.impl$originalDestination = dimension; } @Override public void bridge$setNewDestinationDimension(final ResourceKey<Level> dimension) { this.impl$newDestination = dimension; } @Override public CompletableFuture<net.minecraft.network.chat.Component> bridge$canPlayerLogin(final SocketAddress param0, final com.mojang.authlib.GameProfile param1) { if (this instanceof IntegratedPlayerListBridge) { return ((IntegratedPlayerListBridge) this).bridge$canPlayerLoginClient(param0, param1); } return this.impl$canPlayerLoginServer(param0, param1); } protected final CompletableFuture<net.minecraft.network.chat.Component> impl$canPlayerLoginServer(final SocketAddress param0, final com.mojang.authlib.GameProfile param1) { final SpongeGameProfile profile = SpongeGameProfile.basicOf(param1); return Sponge.server().serviceProvider().banService().find(profile).thenCompose(profileBanOpt -> { if (profileBanOpt.isPresent()) { final Ban.Profile var0 = profileBanOpt.get(); final MutableComponent var1 = new TranslatableComponent("multiplayer.disconnect.banned.reason", var0.reason().orElse(Component.empty())); if (var0.expirationDate().isPresent()) { var1.append(new TranslatableComponent("multiplayer.disconnect.banned.expiration", BAN_DATE_FORMAT.format(var0.expirationDate().get()))); } return CompletableFuture.completedFuture(var1); } if (param0 instanceof LocalAddress) { // don't bother looking up IP bans on local address return CompletableFuture.completedFuture(null); } final InetAddress address; try { address = InetAddress.getByName(NetworkUtil.getHostString(param0)); } catch (final UnknownHostException ex) { return CompletableFuture.completedFuture(new TextComponent(ex.getMessage())); // no } return Sponge.server().serviceProvider().banService().find(address).thenCompose(ipBanOpt -> { if (ipBanOpt.isPresent()) { final Ban.IP var2 = ipBanOpt.get(); final MutableComponent var3 = new TranslatableComponent("multiplayer.disconnect.banned_ip.reason", var2.reason().orElse(Component.empty())); if (var2.expirationDate().isPresent()) { var3.append(new TranslatableComponent("multiplayer.disconnect.banned_ip.expiration", BAN_DATE_FORMAT.format(var2.expirationDate().get()))); } return CompletableFuture.completedFuture(var3); } return CompletableFuture.supplyAsync(() -> { if (!Sponge.server().isWhitelistEnabled()) { return true; } final PermissionService permissionService = Sponge.server().serviceProvider().permissionService(); Subject subject = permissionService.userSubjects().subject(param1.getId().toString()).orElse(null); if (subject == null) { subject = permissionService.defaults(); } return subject.hasPermission(LoginPermissions.BYPASS_WHITELIST_PERMISSION); }, SpongeCommon.server()).thenCompose(w -> { if (w) { return CompletableFuture.completedFuture(null); } return Sponge.server().serviceProvider().whitelistService().isWhitelisted(profile).<net.minecraft.network.chat.Component>thenApply(whitelisted -> { if (!whitelisted) { return new TranslatableComponent("multiplayer.disconnect.not_whitelisted"); } return null; }); }); }); }).thenApplyAsync(component -> { if (component != null) { return component; } if (this.players.size() >= this.maxPlayers && !this.shadow$canBypassPlayerLimit(param1)) { return new TranslatableComponent("multiplayer.disconnect.server_full"); } return null; }, SpongeCommon.server()); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;load(Lnet/minecraft/server/level/ServerPlayer;)Lnet/minecraft/nbt/CompoundTag;" ) ) private CompoundTag impl$setPlayerDataForNewPlayers(final PlayerList playerList, final net.minecraft.server.level.ServerPlayer playerIn) { final CompoundTag compound = this.shadow$load(playerIn); if (compound == null) { ((SpongeServer) SpongeCommon.server()).getPlayerDataManager().setPlayerInfo(playerIn.getUUID(), Instant.now(), Instant.now()); } return compound; } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getLevel(Lnet/minecraft/resources/ResourceKey;)Lnet/minecraft/server/level/ServerLevel;" ) ) private net.minecraft.server.level.ServerLevel impl$onInitPlayer_getWorld(final MinecraftServer minecraftServer, final ResourceKey<Level> dimension, final Connection networkManager, final net.minecraft.server.level.ServerPlayer mcPlayer ) { @Nullable final net.minecraft.network.chat.Component kickReason = ((ConnectionBridge) networkManager).bridge$getKickReason(); final Component disconnectMessage; if (kickReason != null) { disconnectMessage = SpongeAdventure.asAdventure(kickReason); } else { disconnectMessage = Component.text("You are not allowed to log in to this server."); } net.minecraft.server.level.ServerLevel mcWorld = minecraftServer.getLevel(dimension); if (mcWorld == null) { SpongeCommon.logger().warn("The player '{}' was located in a world that isn't loaded or doesn't exist. This is not safe so " + "the player will be moved to the spawn of the default world.", mcPlayer.getGameProfile().getName()); mcWorld = minecraftServer.overworld(); final BlockPos spawnPoint = mcWorld.getSharedSpawnPos(); mcPlayer.setPos(spawnPoint.getX() + 0.5, spawnPoint.getY() + 0.5, spawnPoint.getZ() + 0.5); } mcPlayer.setLevel(mcWorld); final ServerPlayer player = (ServerPlayer) mcPlayer; final ServerLocation location = player.serverLocation(); final Vector3d rotation = player.rotation(); // player.connection() cannot be used here, because it's still be null at this point final ServerSideConnection connection = (ServerSideConnection) networkManager.getPacketListener(); // The user is not yet in the player list, so we need to make special provision. final User user = SpongeUserView.createLoginEventUser(player); final Cause cause = Cause.of(EventContext.empty(), connection, user); final ServerSideConnectionEvent.Login event = SpongeEventFactory.createServerSideConnectionEventLogin(cause, disconnectMessage, disconnectMessage, location, location, rotation, rotation, connection, user); if (kickReason != null) { event.setCancelled(true); } if (SpongeCommon.post(event)) { this.impl$disconnectClient(networkManager, event.message(), player.profile()); return null; } final ServerLocation toLocation = event.toLocation(); final Vector3d toRotation = event.toRotation(); mcPlayer.absMoveTo(toLocation.x(), toLocation.y(), toLocation.z(), (float) toRotation.y(), (float) toRotation.x()); return (net.minecraft.server.level.ServerLevel) toLocation.world(); } @Inject(method = "placeNewPlayer", cancellable = true, at = @At( value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getLevel(Lnet/minecraft/resources/ResourceKey;)Lnet/minecraft/server/level/ServerLevel;", shift = At.Shift.AFTER ) ) private void impl$onInitPlayer_BeforeSetWorld(final Connection p_72355_1_, final net.minecraft.server.level.ServerPlayer p_72355_2_, final CallbackInfo ci) { if (!p_72355_1_.isConnected()) { ci.cancel(); } } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lorg/apache/logging/log4j/Logger;info(Ljava/lang/String;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V", remap = false ) ) private void impl$onInitPlayer_printPlayerWorldInJoinFeedback( final Logger logger, final String message, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Connection manager, final net.minecraft.server.level.ServerPlayer entity) { logger.info("{}[{}] logged in to world '{}' with entity id {} at ({}, {}, {})", p0, p1, ((org.spongepowered.api.world.server.ServerWorld) entity.getLevel()).key(), p2, p3, p4, p5); } @Redirect(method = "placeNewPlayer", slice = @Slice( from = @At( value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;invalidateStatus()V"), to = @At( value = "FIELD", opcode = Opcodes.GETSTATIC, target = "Lnet/minecraft/ChatFormatting;YELLOW:Lnet/minecraft/ChatFormatting;" ) ), at = @At( value = "INVOKE", remap = false, target = "Ljava/lang/String;equalsIgnoreCase(Ljava/lang/String;)Z" ) ) private boolean impl$onInitPlayer_dontClassSpongeNameAsModified(final String currentName, final String originalName) { if (originalName.equals(Constants.GameProfile.DUMMY_NAME)) { return true; } return currentName.equalsIgnoreCase(originalName); } @Redirect(method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;broadcastMessage(Lnet/minecraft/network/chat/Component;Lnet/minecraft/network/chat/ChatType;Ljava/util/UUID;)V" ) ) private void impl$onInitPlayer_delaySendMessage( final PlayerList playerList, final net.minecraft.network.chat.Component message, final ChatType p_232641_2_, final UUID p_232641_3_, final Connection manager, final net.minecraft.server.level.ServerPlayer playerIn ) { // Don't send here, will be done later. We cache the expected message. ((ServerPlayerBridge) playerIn).bridge$setConnectionMessageToSend(message); } @Redirect(method = "placeNewPlayer", at = @At(value = "NEW", target = "net/minecraft/network/protocol/game/ClientboundLoginPacket")) private ClientboundLoginPacket impl$usePerWorldViewDistance(final int p_i242082_1_, final GameType p_i242082_2_, final GameType p_i242082_3_, final long p_i242082_4_, final boolean p_i242082_6_, final Set<ResourceKey<Level>> p_i242082_7_, final RegistryAccess.RegistryHolder p_i242082_8_, final DimensionType p_i242082_9_, final ResourceKey<Level> p_i242082_10_, final int p_i242082_11_, final int p_i242082_12_, final boolean p_i242082_13_, final boolean p_i242082_14_, final boolean p_i242082_15_, final boolean p_i242082_16_, final Connection p_72355_1_, final net.minecraft.server.level.ServerPlayer player) { return new ClientboundLoginPacket(p_i242082_1_, p_i242082_2_, p_i242082_3_, p_i242082_4_, p_i242082_6_, p_i242082_7_, p_i242082_8_, p_i242082_9_, p_i242082_10_, p_i242082_11_, ((PrimaryLevelDataBridge) player.getLevel().getLevelData()).bridge$viewDistance().orElse(this.viewDistance), p_i242082_13_, p_i242082_14_, p_i242082_15_, p_i242082_16_); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getCustomBossEvents()Lnet/minecraft/server/bossevents/CustomBossEvents;")) private CustomBossEvents impl$getPerWorldBossBarManager( final MinecraftServer minecraftServer, final Connection netManager, final net.minecraft.server.level.ServerPlayer playerIn) { return ((ServerLevelBridge) playerIn.getLevel()).bridge$getBossBarManager(); } @Redirect(method = "placeNewPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;updateEntireScoreboard(Lnet/minecraft/server/ServerScoreboard;Lnet/minecraft/server/level/ServerPlayer;)V")) private void impl$sendScoreboard(final PlayerList playerList, final ServerScoreboard scoreboardIn, final net.minecraft.server.level.ServerPlayer playerIn) { ((ServerPlayerBridge)playerIn).bridge$initScoreboard(); } @Redirect( method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/players/PlayerList;broadcastAll(Lnet/minecraft/network/protocol/Packet;)V" ) ) private void impl$sendScoreboard(final PlayerList playerList, final Packet<?> addPlayer, final Connection playerConnection, final net.minecraft.server.level.ServerPlayer serverPlayer ) { if (((VanishableBridge) serverPlayer).bridge$isVanished()) { return; } playerList.broadcastAll(addPlayer); } @Redirect( method = "placeNewPlayer", at = @At( value = "NEW", target = "Lnet/minecraft/network/protocol/game/ClientboundPlayerInfoPacket;<init>(Lnet/minecraft/network/protocol/game/ClientboundPlayerInfoPacket$Action;[Lnet/minecraft/server/level/ServerPlayer;)V" ), slice = @Slice( from = @At( value = "INVOKE", target = "Ljava/util/List;size()I", remap = false ), to = @At( value = "INVOKE", target = "Lnet/minecraft/server/level/ServerLevel;addNewPlayer(Lnet/minecraft/server/level/ServerPlayer;)V" ) ) ) private ClientboundPlayerInfoPacket impl$onlySendAddPlayerForUnvanishedPlayers( ClientboundPlayerInfoPacket.Action addPlayer, net.minecraft.server.level.ServerPlayer[] players ) { if (players.length == 0) { return null; } // Effectively, don't notify new players of vanished players if (((VanishableBridge) players[0]).bridge$isVanished()) { return null; } return new ClientboundPlayerInfoPacket(addPlayer, players); } @Redirect( method = "placeNewPlayer", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/network/ServerGamePacketListenerImpl;send(Lnet/minecraft/network/protocol/Packet;)V" ), slice = @Slice( from = @At( value = "INVOKE", target = "Ljava/util/List;size()I", remap = false ), to = @At( value = "INVOKE", target = "Lnet/minecraft/server/level/ServerLevel;addNewPlayer(Lnet/minecraft/server/level/ServerPlayer;)V" ) ) ) private void impl$onlySendAddPlayerForUnvanishedPlayers(ServerGamePacketListenerImpl connection, Packet<?> packet) { // Since the redirect above can technically make the packet null, we don't // want to send a null packet and cause an NPE if (packet == null) { return; } connection.send(packet); } @Inject(method = "placeNewPlayer", at = @At(value = "RETURN")) private void impl$onInitPlayer_join(final Connection networkManager, final net.minecraft.server.level.ServerPlayer mcPlayer, final CallbackInfo ci) { final ServerPlayer player = (ServerPlayer) mcPlayer; final ServerSideConnection connection = player.connection(); final Cause cause = Cause.of(EventContext.empty(), connection, player); final Audience audience = Audiences.onlinePlayers(); final Component joinComponent = SpongeAdventure.asAdventure(((ServerPlayerBridge) mcPlayer).bridge$getConnectionMessageToSend()); final ServerSideConnectionEvent.Join event = SpongeEventFactory.createServerSideConnectionEventJoin(cause, audience, Optional.of(audience), joinComponent, joinComponent, connection, player, false); SpongeCommon.post(event); if (!event.isMessageCancelled()) { event.audience().ifPresent(audience1 -> audience1.sendMessage(Identity.nil(), event.message())); } ((ServerPlayerBridge) mcPlayer).bridge$setConnectionMessageToSend(null); final PhaseContext<?> context = PhaseTracker.SERVER.getPhaseContext(); PhaseTracker.SERVER.pushCause(event); final TransactionalCaptureSupplier transactor = context.getTransactor(); transactor.logPlayerInventoryChange(mcPlayer, PlayerInventoryTransaction.EventCreator.STANDARD); try (EffectTransactor ignored = BroadcastInventoryChangesEffect.transact(transactor)) { mcPlayer.inventoryMenu.broadcastChanges(); // in case plugins modified it } } @Redirect(method = "remove", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;getCustomBossEvents()Lnet/minecraft/server/bossevents/CustomBossEvents;")) private CustomBossEvents impl$getPerWorldBossBarManager(final MinecraftServer minecraftServer, final net.minecraft.server.level.ServerPlayer playerIn) { return ((ServerLevelBridge) playerIn.getLevel()).bridge$getBossBarManager(); } @Inject(method = "remove", at = @At("HEAD")) private void impl$RemovePlayerReferenceFromScoreboard(final net.minecraft.server.level.ServerPlayer player, final CallbackInfo ci) { ((ServerScoreboardBridge) ((ServerPlayer) player).scoreboard()).bridge$removePlayer(player, false); } @Redirect(method = "setLevel", at = @At( value = "INVOKE", target = "Lnet/minecraft/world/level/border/WorldBorder;addListener(Lnet/minecraft/world/level/border/BorderChangeListener;)V" ) ) private void impl$usePerWorldBorderListener(final WorldBorder worldBorder, final BorderChangeListener listener, final ServerLevel serverWorld) { worldBorder.addListener(new PerWorldBorderListener(serverWorld)); } @Redirect(method = "load", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/level/ServerPlayer;load(Lnet/minecraft/nbt/CompoundTag;)V" ) ) private void impl$setSpongePlayerDataForSinglePlayer(final net.minecraft.server.level.ServerPlayer entity, final CompoundTag compound) { entity.load(compound); ((SpongeServer) this.shadow$getServer()).getPlayerDataManager().readPlayerData(compound, entity.getUUID(), null); } @SuppressWarnings("OptionalUsedAsFieldOrParameterType") @Redirect( method = "respawn", at = @At( value = "INVOKE", target = "Ljava/util/Optional;isPresent()Z", remap = false ), slice = @Slice( from = @At(value = "INVOKE", target = "Ljava/util/Optional;empty()Ljava/util/Optional;", remap = false), to = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;isDemo()Z") ) ) private boolean impl$flagIfRespawnLocationIsGameMechanic(final Optional<?> optional) { this.impl$isGameMechanicRespawn = optional.isPresent(); return this.impl$isGameMechanicRespawn; } @Redirect(method = "respawn", at = @At( value = "INVOKE", target = "Lnet/minecraft/server/network/ServerGamePacketListenerImpl;send(Lnet/minecraft/network/protocol/Packet;)V", ordinal = 1 ) ) private void impl$callRespawnPlayerRecreateEvent( final ServerGamePacketListenerImpl serverPlayNetHandler, final Packet<?> packetIn, final net.minecraft.server.level.ServerPlayer originalPlayer, final boolean keepAllPlayerData) { final net.minecraft.server.level.ServerPlayer recreatedPlayer = serverPlayNetHandler.player; final Vector3d originalPosition = VecHelper.toVector3d(originalPlayer.position()); final Vector3d destinationPosition = VecHelper.toVector3d(recreatedPlayer.position()); final org.spongepowered.api.world.server.ServerWorld originalWorld = (org.spongepowered.api.world.server.ServerWorld) originalPlayer.level; final org.spongepowered.api.world.server.ServerWorld originalDestinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$originalDestination == null ? Level.OVERWORLD : this.impl$originalDestination); final org.spongepowered.api.world.server.ServerWorld destinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$newDestination == null ? Level.OVERWORLD : this.impl$newDestination); final RespawnPlayerEvent.Recreate event = SpongeEventFactory.createRespawnPlayerEventRecreate(PhaseTracker.getCauseStackManager().currentCause(), destinationPosition, originalWorld, originalPosition, destinationWorld, originalDestinationWorld, destinationPosition, (ServerPlayer) originalPlayer, (ServerPlayer) recreatedPlayer, this.impl$isGameMechanicRespawn, !keepAllPlayerData); SpongeCommon.post(event); recreatedPlayer.setPos(event.destinationPosition().x(), event.destinationPosition().y(), event.destinationPosition().z()); this.impl$isGameMechanicRespawn = false; this.impl$originalDestination = null; this.impl$newDestination = null; final ServerLevel targetWorld = (ServerLevel) event.destinationWorld(); ((ServerPlayerBridge) recreatedPlayer).bridge$sendChangeDimension( targetWorld.dimensionType(), ((ClientboundRespawnPacketAccessor) packetIn).accessor$dimension(), ((ClientboundRespawnPacketAccessor) packetIn).accessor$seed(), recreatedPlayer.gameMode.getGameModeForPlayer(), recreatedPlayer.gameMode.getPreviousGameModeForPlayer(), targetWorld.isDebug(), targetWorld.isFlat(), keepAllPlayerData ); } @Inject(method = "respawn", at = @At("RETURN")) private void impl$callRespawnPlayerPostEvent(final net.minecraft.server.level.ServerPlayer player, final boolean keepAllPlayerData, final CallbackInfoReturnable<net.minecraft.server.level.ServerPlayer> cir) { final org.spongepowered.api.world.server.ServerWorld originalWorld = (org.spongepowered.api.world.server.ServerWorld) player.level; final org.spongepowered.api.world.server.ServerWorld originalDestinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$originalDestination == null ? Level.OVERWORLD : this.impl$originalDestination); final org.spongepowered.api.world.server.ServerWorld destinationWorld = (org.spongepowered.api.world.server.ServerWorld) this.server.getLevel(this.impl$newDestination == null ? Level.OVERWORLD : this.impl$newDestination); final RespawnPlayerEvent.Post event = SpongeEventFactory.createRespawnPlayerEventPost(PhaseTracker.getCauseStackManager().currentCause(), destinationWorld, originalWorld, originalDestinationWorld, (ServerPlayer) cir.getReturnValue()); SpongeCommon.post(event); } @Redirect(method = "sendLevelInfo", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;overworld()Lnet/minecraft/server/level/ServerLevel;")) private ServerLevel impl$usePerWorldWorldBorder(final MinecraftServer minecraftServer, final net.minecraft.server.level.ServerPlayer playerIn, final ServerLevel worldIn) { return worldIn; } private void impl$disconnectClient(final Connection netManager, final Component disconnectMessage, final @Nullable GameProfile profile) { final net.minecraft.network.chat.Component reason = SpongeAdventure.asVanilla(disconnectMessage); try { PlayerListMixin.LOGGER.info("Disconnecting " + (profile != null ? profile.toString() + " (" + netManager.getRemoteAddress().toString() + ")" : netManager.getRemoteAddress() + ": " + reason.getString())); netManager.send(new ClientboundDisconnectPacket(reason)); netManager.disconnect(reason); } catch (final Exception exception) { PlayerListMixin.LOGGER.error("Error whilst disconnecting player", exception); } } @Inject(method = "saveAll()V", at = @At("RETURN")) private void impl$saveDirtyUsersOnSaveAll(final CallbackInfo ci) { ((SpongeServer) SpongeCommon.server()).userManager().saveDirtyUsers(); } }
fix mixin error on server startup impl was trying to redirect a method whose arg was redirected by another redirect
src/mixins/java/org/spongepowered/common/mixin/core/server/players/PlayerListMixin.java
fix mixin error on server startup
Java
mit
1a25377c2ed8bc7cfc566f8a1e2a644c35787cb6
0
jenkinsci/codebeamer-result-trend-updater-plugin,jenkinsci/codebeamer-result-trend-updater-plugin
/* * Copyright (c) 2015 Intland Software ([email protected]) */ package com.intland.jenkins.collector; import com.intland.jenkins.api.CodebeamerApiClient; import com.intland.jenkins.collector.dto.CodebeamerDto; import hudson.Plugin; import hudson.PluginWrapper; import hudson.model.*; import hudson.scm.ChangeLogSet; import hudson.scm.ChangeLogSet.Entry; import jenkins.model.Jenkins; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.io.PrintStream; import java.util.Collection; import java.util.Iterator; import static org.mockito.Matchers.anyString; import static org.powermock.api.mockito.PowerMockito.mockStatic; import static org.powermock.api.mockito.PowerMockito.when; @RunWith(PowerMockRunner.class) @PrepareForTest({Jenkins.class, Project.class}) public class CodebeamerCollectorTests { @Mock CodebeamerApiClient apiClient; @Mock ItemGroup<Item> projectParent; @Mock PluginWrapper pluginWrapper; @Mock AbstractProject project; @Mock BuildListener listener; @Mock AbstractBuild build; @Mock PrintStream logger; @Mock Jenkins jenkins; @Mock Plugin plugin; @Mock Node builtOn; @Mock User user; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); } @Test public void testMarkupCreationWithoutPlugins() throws Exception { mockJenkins(); String expectedMarkup = "[{JenkinsBuildTrends}]\n" + "//DO NOT MODIFY! \n//Generated by plugin version: 10.x.x at: 1970-01-01 01:01:15\n" + "!2 %%(color: #ff0000;)Build #0 (1970-01-01 01:01:15)%!\n" + "[{Table\n\n|__Duration__\n|[1 min, 15 sec|http://localhost:8080/jenkins/myproject/01/buildTimeTrend] @ Jenkins\n\n" + "| \n__Test Result__ \n|__[0/0|http://localhost:8080/jenkins/myproject/01/testReport/] failures__ \n\n" + "|__[Tested changes|http://localhost:8080/jenkins/myproject/01/changes]__\n|__\n" + "* [#1000|ISSUE:1000] \\\\* bullet 1 \\\\* second row (admin)\n" + "* [#1000|ISSUE:1000] \\\\* bullet 1 \\\\* second row (admin)\n \n\n" + "|__Repository__\n|Unsupported SCM\n}] \n"; long currentTime = 75000l; CodebeamerDto codebeamerDto = CodebeamerCollector.collectCodebeamerData(build, listener, apiClient, currentTime); Assert.assertEquals(expectedMarkup, codebeamerDto.getMarkup()); } private void mockJenkins() { mockStatic(Jenkins.class); when(Jenkins.getInstance()).thenReturn(jenkins); when(build.getProject()).thenReturn(project); when(project.getParent()).thenReturn(projectParent); when(jenkins.getPlugin(anyString())).thenReturn(plugin); when(plugin.getWrapper()).thenReturn(pluginWrapper); when(build.getBuiltOn()).thenReturn(builtOn); when(listener.getLogger()).thenReturn(logger); when(user.toString()).thenReturn("admin"); when(builtOn.getDisplayName()).thenReturn("Jenkins"); when(project.getUrl()).thenReturn("myproject/"); when(project.getShortUrl()).thenReturn("01/"); when(pluginWrapper.getVersion()).thenReturn("10.x.x"); when(jenkins.getRootUrl()).thenReturn("http://localhost:8080/jenkins/"); when(build.getUrl()).thenReturn("myproject/01/"); DummyEntry entry = new DummyEntry("#1000\n* bullet 1\n* second row\n", user); ChangeLogSet changeLogSet = new DummyChangelog(entry, entry); when(build.getChangeSet()).thenReturn(changeLogSet); } } class DummyChangelog extends ChangeLogSet { private Entry[] entries; private int i = 0; public DummyChangelog(Entry... entries) { super(null, null); this.entries = entries; } @Override public boolean isEmptySet() { return false; } @Override public Iterator iterator() { return new Iterator() { @Override public boolean hasNext() { return i < entries.length ; } @Override public Object next() { return entries[i++]; } @Override public void remove() { } }; } } class DummyEntry extends Entry { private String message; private User user; public DummyEntry(String message, User user) { this.message = message; this.user = user; } @Override public String getMsg() { return message; } @Override public User getAuthor() { return user; } @Override public Collection<String> getAffectedPaths() { return null; } }
src/test/java/com/intland/jenkins/collector/CodebeamerCollectorTests.java
/* * Copyright (c) 2015 Intland Software ([email protected]) */ package com.intland.jenkins.collector; import com.intland.jenkins.api.CodebeamerApiClient; import com.intland.jenkins.collector.dto.CodebeamerDto; import hudson.Plugin; import hudson.PluginWrapper; import hudson.model.*; import hudson.scm.ChangeLogSet; import hudson.scm.ChangeLogSet.Entry; import jenkins.model.Jenkins; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.io.PrintStream; import java.util.Collection; import java.util.Iterator; import static org.mockito.Matchers.anyString; import static org.powermock.api.mockito.PowerMockito.mockStatic; import static org.powermock.api.mockito.PowerMockito.when; @RunWith(PowerMockRunner.class) @PrepareForTest({Jenkins.class, Project.class}) public class CodebeamerCollectorTests { @Mock CodebeamerApiClient apiClient; @Mock ItemGroup<Item> projectParent; @Mock PluginWrapper pluginWrapper; @Mock AbstractProject project; @Mock BuildListener listener; @Mock AbstractBuild build; @Mock PrintStream logger; @Mock Jenkins jenkins; @Mock Plugin plugin; @Mock Node builtOn; @Mock User user; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); } @Test public void testMarkupCreationWithoutPlugins() throws Exception { mockJenkins(); String expectedMarkup = "[{JenkinsBuildTrends}]\n" + "//DO NOT MODIFY! \n//Generated by plugin version: 10.x.x at: 1970-01-01 01:01:15\n" + "!2 %%(color: #ff0000;)Build #0 (1970-01-01 01:01:15)%!\n" + "[{Table\n\n|__Duration__\n|[1 min, 15 sec|http://localhost:8080/jenkins/myproject/01/buildTimeTrend] @ Jenkins\n\n" + "| \n__Test Result__ \n|__[0/0|http://localhost:8080/jenkins/myproject/01/testReport/] failures__ \n\n" + "|__[Tested changes|http://localhost:8080/jenkins/myproject/01/changes]__\n|__\n" + "* [#1000|ISSUE:1000] \\\\* bullet 1 \\\\* second row (admin)\n" + "* [#1000|ISSUE:1000] \\\\* bullet 1 \\\\* second row (admin)\n \n\n" + "|__Repository__\n|Unsupported SCM\n}] \n"; long currentTime = 75000l; CodebeamerDto codebeamerDto = CodebeamerCollector.collectCodebeamerData(build, listener, apiClient, currentTime); Assert.assertEquals(expectedMarkup, codebeamerDto.getMarkup()); } private void mockJenkins() { mockStatic(Jenkins.class); when(Jenkins.getInstance()).thenReturn(jenkins); when(build.getProject()).thenReturn(project); when(project.getParent()).thenReturn(projectParent); when(jenkins.getPlugin(anyString())).thenReturn(plugin); when(plugin.getWrapper()).thenReturn(pluginWrapper); when(build.getBuiltOn()).thenReturn(builtOn); when(listener.getLogger()).thenReturn(logger); when(user.toString()).thenReturn("admin"); when(builtOn.getDisplayName()).thenReturn("Jenkins"); when(project.getUrl()).thenReturn("myproject/"); when(project.getShortUrl()).thenReturn("01/"); when(pluginWrapper.getVersion()).thenReturn("10.x.x"); when(jenkins.getRootUrl()).thenReturn("http://localhost:8080/jenkins/"); when(build.getUrl()).thenReturn("myproject/01/"); DummyEntry entry = new DummyEntry("#1000\n* bullet 1\n* second row\n", user); ChangeLogSet changeLogSet = new DummyChangelog(entry, entry); when(build.getChangeSet()).thenReturn(changeLogSet); } } class DummyChangelog extends ChangeLogSet { private Entry[] entries; private int i = 0; public DummyChangelog(Entry... entries) { super(null, null); this.entries = entries; } @Override public boolean isEmptySet() { return false; } @Override public Iterator iterator() { return new Iterator() { @Override public boolean hasNext() { return i < entries.length ; } @Override public Object next() { return entries[i++]; } }; } } class DummyEntry extends Entry { private String message; private User user; public DummyEntry(String message, User user) { this.message = message; this.user = user; } @Override public String getMsg() { return message; } @Override public User getAuthor() { return user; } @Override public Collection<String> getAffectedPaths() { return null; } }
override remove() for interator as well / CI issue
src/test/java/com/intland/jenkins/collector/CodebeamerCollectorTests.java
override remove() for interator as well / CI issue
Java
epl-1.0
6aec5bebc9210e1e812a305600f89852b778e9e4
0
css-iter/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio
package org.csstudio.swt.xygraph.figures; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.csstudio.swt.xygraph.linearscale.AbstractScale.LabelSide; import org.csstudio.swt.xygraph.linearscale.LinearScale.Orientation; import org.csstudio.swt.xygraph.undo.OperationsManager; import org.csstudio.swt.xygraph.undo.ZoomCommand; import org.csstudio.swt.xygraph.undo.ZoomType; import org.csstudio.swt.xygraph.util.Log10; import org.csstudio.swt.xygraph.util.XYGraphMediaFactory; import org.eclipse.draw2d.ColorConstants; import org.eclipse.draw2d.Figure; import org.eclipse.draw2d.Graphics; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.SWTGraphics; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; /** * XY-Graph Figure. * @author Xihui Chen * @author Kay Kasemir (performStagger) */ public class XYGraph extends Figure{ private static final int GAP = 2; public final static Color WHITE_COLOR = ColorConstants.white; public final static Color BLACK_COLOR = ColorConstants.black; public final static Color[] DEFAULT_TRACES_COLOR = new Color[]{ ColorConstants.red, ColorConstants.blue, ColorConstants.darkGreen, ColorConstants.orange, ColorConstants.darkBlue, ColorConstants.cyan, ColorConstants.green, ColorConstants.yellow, ColorConstants.black }; private int traceNum = 0; private boolean transparent = true; private boolean showLegend = true; private Map<Axis, Legend> legendMap; /** Graph title. Should never be <code>null</code> because * otherwise the ToolbarArmedXYGraph's GraphConfigPage * can crash. */ private String title = ""; private Color titleColor; private Label titleLabel; private List<Axis> xAxisList; private List<Axis> yAxisList; private PlotArea plotArea; // TODO Clients can set these to null. Should these be 'final'? Or provider getter? public Axis primaryXAxis; public Axis primaryYAxis; private OperationsManager operationsManager; private ZoomType zoomType; /** * Constructor. */ public XYGraph() { setOpaque(!transparent); legendMap = new LinkedHashMap<Axis, Legend>(); titleLabel = new Label(); setTitleFont(XYGraphMediaFactory.getInstance().getFont( new FontData("Arial", 12, SWT.BOLD))); //titleLabel.setVisible(false); xAxisList = new ArrayList<Axis>(); yAxisList = new ArrayList<Axis>(); plotArea = new PlotArea(this); add(titleLabel); add(plotArea); primaryYAxis = new Axis("Y-Axis", true); primaryYAxis.setOrientation(Orientation.VERTICAL); primaryYAxis.setTickLableSide(LabelSide.Primary); primaryYAxis.setAutoScaleThreshold(0.1); addAxis(primaryYAxis); primaryXAxis = new Axis("X-Axis", false); primaryXAxis.setOrientation(Orientation.HORIZONTAL); primaryXAxis.setTickLableSide(LabelSide.Primary); addAxis(primaryXAxis); operationsManager = new OperationsManager(); } @Override public boolean isOpaque() { return false; } @Override protected void layout() { Rectangle clientArea = getClientArea().getCopy(); boolean hasRightYAxis = false; boolean hasTopXAxis = false; if(titleLabel != null && titleLabel.isVisible() && !(titleLabel.getText().length() <= 0)){ Dimension titleSize = titleLabel.getPreferredSize(); titleLabel.setBounds(new Rectangle(clientArea.x + clientArea.width/2 - titleSize.width/2, clientArea.y, titleSize.width, titleSize.height)); clientArea.y += titleSize.height + GAP; clientArea.height -= titleSize.height + GAP; } if(showLegend){ List<Integer> rowHPosList = new ArrayList<Integer>(); List<Dimension> legendSizeList = new ArrayList<Dimension>(); List<Integer> rowLegendNumList = new ArrayList<Integer>(); List<Legend> legendList = new ArrayList<Legend>(); Object[] yAxes = legendMap.keySet().toArray(); int hPos = 0; int rowLegendNum = 0; for(int i = 0; i< yAxes.length; i++){ Legend legend = legendMap.get(yAxes[i]); if(legend != null && legend.isVisible()){ legendList.add(legend); Dimension legendSize = legend.getPreferredSize(clientArea.width, clientArea.height); legendSizeList.add(legendSize); if((hPos+legendSize.width + GAP) > clientArea.width){ if(rowLegendNum ==0) break; rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); rowLegendNum = 1; hPos = legendSize.width + GAP; clientArea.height -=legendSize.height +GAP; if(i==yAxes.length-1){ hPos =legendSize.width + GAP; rowLegendNum = 1; rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); clientArea.height -=legendSize.height +GAP; } }else{ hPos+=legendSize.width + GAP; rowLegendNum++; if(i==yAxes.length-1){ rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); clientArea.height -=legendSize.height +GAP; } } } } int lm = 0; int vPos = clientArea.y + clientArea.height + GAP; for(int i=0; i<rowLegendNumList.size(); i++){ hPos = rowHPosList.get(i); for(int j=0; j<rowLegendNumList.get(i); j++){ legendList.get(lm).setBounds(new Rectangle( hPos, vPos, legendSizeList.get(lm).width, legendSizeList.get(lm).height)); hPos += legendSizeList.get(lm).width + GAP; lm++; } vPos += legendSizeList.get(lm-1).height + GAP; } } for(int i=xAxisList.size()-1; i>=0; i--){ Axis xAxis = xAxisList.get(i); Dimension xAxisSize = xAxis.getPreferredSize(clientArea.width, clientArea.height); if(xAxis.getTickLablesSide() == LabelSide.Primary){ xAxis.setBounds(new Rectangle(clientArea.x, clientArea.y + clientArea.height - xAxisSize.height, xAxisSize.width, xAxisSize.height)); clientArea.height -= xAxisSize.height; }else{ hasTopXAxis = true; xAxis.setBounds(new Rectangle(clientArea.x, clientArea.y+1, xAxisSize.width, xAxisSize.height)); clientArea.y += xAxisSize.height ; clientArea.height -= xAxisSize.height; } } for(int i=yAxisList.size()-1; i>=0; i--){ Axis yAxis = yAxisList.get(i); Dimension yAxisSize = yAxis.getPreferredSize(clientArea.width, clientArea.height + (hasTopXAxis? 2:1) *yAxis.getMargin()); if(yAxis.getTickLablesSide() == LabelSide.Primary){ // on the left yAxis.setBounds(new Rectangle(clientArea.x, clientArea.y - (hasTopXAxis? yAxis.getMargin():0), yAxisSize.width, yAxisSize.height)); clientArea.x += yAxisSize.width; clientArea.width -= yAxisSize.width; }else{ // on the right hasRightYAxis = true; yAxis.setBounds(new Rectangle(clientArea.x + clientArea.width - yAxisSize.width -1, clientArea.y- (hasTopXAxis? yAxis.getMargin():0), yAxisSize.width, yAxisSize.height)); clientArea.width -= yAxisSize.width; } } //re-adjust xAxis boundss for(int i=xAxisList.size()-1; i>=0; i--){ Axis xAxis = xAxisList.get(i); xAxis.getBounds().x = clientArea.x - xAxis.getMargin()-1; if(hasRightYAxis) xAxis.getBounds().width = clientArea.width + 2*xAxis.getMargin(); else xAxis.getBounds().width = clientArea.width + xAxis.getMargin(); } if(plotArea != null && plotArea.isVisible()){ Rectangle plotAreaBound = new Rectangle( primaryXAxis.getBounds().x + primaryXAxis.getMargin(), primaryYAxis.getBounds().y + primaryYAxis.getMargin(), primaryXAxis.getBounds().width - 2*primaryXAxis.getMargin(), primaryYAxis.getBounds().height - 2*primaryYAxis.getMargin() ); plotArea.setBounds(plotAreaBound); } super.layout(); } /** * @param zoomType the zoomType to set */ public void setZoomType(ZoomType zoomType) { this.zoomType = zoomType; plotArea.setZoomType(zoomType); for(Axis axis : xAxisList) axis.setZoomType(zoomType); for(Axis axis : yAxisList) axis.setZoomType(zoomType); } /** * @return the zoomType */ public ZoomType getZoomType() { return zoomType; } /** * @param title the title to set */ public void setTitle(String title) { this.title = title.trim(); titleLabel.setText(title); } /** * @param showTitle true if title should be shown; false otherwise. */ public void setShowTitle(boolean showTitle){ titleLabel.setVisible(showTitle); revalidate(); } /** * @return true if title should be shown; false otherwise. */ public boolean isShowTitle(){ return titleLabel.isVisible(); } /** * @param showLegend true if legend should be shown; false otherwise. */ public void setShowLegend(boolean showLegend){ this.showLegend = showLegend; for(Axis yAxis : legendMap.keySet()){ Legend legend = legendMap.get(yAxis); legend.setVisible(showLegend); } revalidate(); } /** * @return the showLegend */ public boolean isShowLegend() { return showLegend; } /**Add an axis to the graph * @param axis */ public void addAxis(Axis axis){ if(axis.isHorizontal()) xAxisList.add(axis); else yAxisList.add(axis); plotArea.addGrid(new Grid(axis)); add(axis); axis.setXyGraph(this); revalidate(); } /**Remove an axis from the graph * @param axis * @return true if this axis exists. */ public boolean removeAxis(Axis axis){ remove(axis); plotArea.removeGrid(axis.getGrid()); revalidate(); if(axis.isHorizontal()) return xAxisList.remove(axis); else return yAxisList.remove(axis); } /**Add a trace * @param trace */ public void addTrace(Trace trace){ if (trace.getTraceColor() == null) { // Cycle through default colors trace.setTraceColor(DEFAULT_TRACES_COLOR[traceNum % DEFAULT_TRACES_COLOR.length]); ++traceNum; } if(legendMap.containsKey(trace.getYAxis())) legendMap.get(trace.getYAxis()).addTrace(trace); else{ legendMap.put(trace.getYAxis(), new Legend()); legendMap.get(trace.getYAxis()).addTrace(trace); add(legendMap.get(trace.getYAxis())); } plotArea.addTrace(trace); trace.setXYGraph(this); trace.dataChanged(null); revalidate(); } /**Remove a trace. * @param trace */ public void removeTrace(Trace trace){ if(legendMap.containsKey(trace.getYAxis())){ legendMap.get(trace.getYAxis()).removeTrace(trace); if(legendMap.get(trace.getYAxis()).getTraceList().size() <=0){ remove(legendMap.remove(trace.getYAxis())); } } plotArea.removeTrace(trace); revalidate(); } /**Add an annotation * @param annotation */ public void addAnnotation(Annotation annotation){ plotArea.addAnnotation(annotation); } /**Remove an annotation * @param annotation */ public void removeAnnotation(Annotation annotation){ plotArea.removeAnnotation(annotation); } /** * @param titleFont the titleFont to set */ public void setTitleFont(Font titleFont) { titleLabel.setFont(titleFont); } /** * @return the title font. */ public Font getTitleFont(){ return titleLabel.getFont(); } /** * @param titleColor the titleColor to set */ public void setTitleColor(Color titleColor) { this.titleColor = titleColor; titleLabel.setForegroundColor(titleColor); } /** * {@inheritDoc} */ public void paintFigure(final Graphics graphics) { if (!transparent) { graphics.fillRectangle(getClientArea()); } super.paintFigure(graphics); } /** * @param transparent the transparent to set */ public void setTransparent(boolean transparent) { this.transparent = transparent; repaint(); } /** * @return the transparent */ public boolean isTransparent() { return transparent; } /** TODO This allows clients to change the traces via getPlotArea().getTraceList() and then add/remove/clear/..., * circumventing the designated addTrace()/removeTrace(). * Can it be non-public? * @return the plotArea, which contains all the elements drawn inside it. */ public PlotArea getPlotArea() { return plotArea; } /** * @return the image of the XYFigure */ public Image getImage(){ Image image = new Image(null, bounds.width + 6, bounds.height + 6); GC gc = new GC(image); SWTGraphics graphics = new SWTGraphics(gc); graphics.translate(-bounds.x + 3, -bounds.y + 3); graphics.setForegroundColor(getForegroundColor()); graphics.setBackgroundColor(getBackgroundColor()); paint(graphics); gc.dispose(); return image; } /** * @return the titleColor */ public Color getTitleColor() { if(titleColor == null) return getForegroundColor(); return titleColor; } /** * @return the title */ public String getTitle() { return title; } /** * @return the operationsManager */ public OperationsManager getOperationsManager() { return operationsManager; } /** * @return the xAxisList */ public List<Axis> getXAxisList() { return xAxisList; } /** * @return the yAxisList */ public List<Axis> getYAxisList() { return yAxisList; } /** * @return the all the axis include xAxes and yAxes. * yAxisList is appended to xAxisList in the returned list. */ public List<Axis> getAxisList(){ List<Axis> list = new ArrayList<Axis>(); list.addAll(xAxisList); list.addAll(yAxisList); return list; } /** * @return the legendMap */ public Map<Axis, Legend> getLegendMap() { return legendMap; } /** * Perform forced autoscale to all axes. */ public void performAutoScale(){ ZoomCommand command = new ZoomCommand("Auto Scale", xAxisList, yAxisList); command.savePreviousStates(); for(Axis axis : xAxisList){ axis.performAutoScale(true); } for(Axis axis : yAxisList){ axis.performAutoScale(true); } command.saveAfterStates(); operationsManager.addCommand(command); } /** Stagger all axes: Autoscale each axis so that traces on various * axes don't overlap */ public void performStagger() { final ZoomCommand command = new ZoomCommand("Stagger Axes", null, yAxisList); command.savePreviousStates(); for(Axis axis : yAxisList){ axis.performAutoScale(true); } // Arrange all so they don't overlap by assigning 1/Nth of // the vertical range to each one final int N = yAxisList.size(); for (int i=0; i<N; ++i) { final Axis yaxis = yAxisList.get(i); if (yaxis.isAutoScale()) continue; // takes care of itself double low = yaxis.getRange().getLower(); double high = yaxis.getRange().getUpper(); if (yaxis.isLogScaleEnabled()) { low = Log10.log10(low); high = Log10.log10(high); } double range = high - low; // Fudge factor to get some extra space range = 1.1*range; // Shift it down according to its index, using a total of N*range. low -= (N-i-1)*range; high += i*range; if (yaxis.isLogScaleEnabled()) { low = Log10.pow10(low); high = Log10.pow10(high); } // Sanity check for empty traces if (low < high) yaxis.setRange(low, high); } command.saveAfterStates(); operationsManager.addCommand(command); } }
applications/plugins/org.csstudio.swt.xygraph/src/org/csstudio/swt/xygraph/figures/XYGraph.java
package org.csstudio.swt.xygraph.figures; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.csstudio.swt.xygraph.linearscale.AbstractScale.LabelSide; import org.csstudio.swt.xygraph.linearscale.LinearScale.Orientation; import org.csstudio.swt.xygraph.undo.OperationsManager; import org.csstudio.swt.xygraph.undo.ZoomCommand; import org.csstudio.swt.xygraph.undo.ZoomType; import org.csstudio.swt.xygraph.util.Log10; import org.csstudio.swt.xygraph.util.XYGraphMediaFactory; import org.eclipse.draw2d.ColorConstants; import org.eclipse.draw2d.Figure; import org.eclipse.draw2d.Graphics; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.SWTGraphics; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; /** * XY-Graph Figure. * @author Xihui Chen * @author Kay Kasemir (performStagger) */ public class XYGraph extends Figure{ private static final int GAP = 2; public final static Color WHITE_COLOR = ColorConstants.white; public final static Color BLACK_COLOR = ColorConstants.black; public final static Color[] DEFAULT_TRACES_COLOR = new Color[]{ ColorConstants.red, ColorConstants.blue, ColorConstants.darkGreen, ColorConstants.orange, ColorConstants.darkBlue, ColorConstants.cyan, ColorConstants.green, ColorConstants.yellow, ColorConstants.black }; private int traceNum = 0; private boolean transparent = true; private boolean showLegend = true; private Map<Axis, Legend> legendMap; /** Graph title. Should never be <code>null</code> because * otherwise the ToolbarArmedXYGraph's GraphConfigPage * can crash. */ private String title = ""; private Color titleColor; private Label titleLabel; private List<Axis> xAxisList; private List<Axis> yAxisList; private PlotArea plotArea; // TODO Clients can set these to null. Should these be 'final'? Or provider getter? public Axis primaryXAxis; public Axis primaryYAxis; private OperationsManager operationsManager; private ZoomType zoomType; /** * Constructor. */ public XYGraph() { setOpaque(!transparent); legendMap = new LinkedHashMap<Axis, Legend>(); titleLabel = new Label(); setTitleFont(XYGraphMediaFactory.getInstance().getFont( new FontData("Arial", 12, SWT.BOLD))); //titleLabel.setVisible(false); xAxisList = new ArrayList<Axis>(); yAxisList = new ArrayList<Axis>(); plotArea = new PlotArea(this); add(titleLabel); add(plotArea); primaryYAxis = new Axis("Y-Axis", true); primaryYAxis.setOrientation(Orientation.VERTICAL); primaryYAxis.setTickLableSide(LabelSide.Primary); primaryYAxis.setAutoScaleThreshold(0.1); addAxis(primaryYAxis); primaryXAxis = new Axis("X-Axis", false); primaryXAxis.setOrientation(Orientation.HORIZONTAL); primaryXAxis.setTickLableSide(LabelSide.Primary); addAxis(primaryXAxis); operationsManager = new OperationsManager(); } @Override public boolean isOpaque() { return false; } @Override protected void layout() { Rectangle clientArea = getClientArea().getCopy(); boolean hasRightYAxis = false; boolean hasTopXAxis = false; if(titleLabel != null && titleLabel.isVisible() && !(titleLabel.getText().length() <= 0)){ Dimension titleSize = titleLabel.getPreferredSize(); titleLabel.setBounds(new Rectangle(clientArea.x + clientArea.width/2 - titleSize.width/2, clientArea.y, titleSize.width, titleSize.height)); clientArea.y += titleSize.height + GAP; clientArea.height -= titleSize.height + GAP; } if(showLegend){ List<Integer> rowHPosList = new ArrayList<Integer>(); List<Dimension> legendSizeList = new ArrayList<Dimension>(); List<Integer> rowLegendNumList = new ArrayList<Integer>(); List<Legend> legendList = new ArrayList<Legend>(); Object[] yAxes = legendMap.keySet().toArray(); int hPos = 0; int rowLegendNum = 0; for(int i = 0; i< yAxes.length; i++){ Legend legend = legendMap.get(yAxes[i]); if(legend != null && legend.isVisible()){ legendList.add(legend); Dimension legendSize = legend.getPreferredSize(clientArea.width, clientArea.height); legendSizeList.add(legendSize); if((hPos+legendSize.width + GAP) > clientArea.width){ if(rowLegendNum ==0) break; rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); rowLegendNum = 1; hPos = legendSize.width + GAP; clientArea.height -=legendSize.height +GAP; if(i==yAxes.length-1){ hPos =legendSize.width + GAP; rowLegendNum = 1; rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); clientArea.height -=legendSize.height +GAP; } }else{ hPos+=legendSize.width + GAP; rowLegendNum++; if(i==yAxes.length-1){ rowHPosList.add(clientArea.x + (clientArea.width-hPos)/2); rowLegendNumList.add(rowLegendNum); clientArea.height -=legendSize.height +GAP; } } } } int lm = 0; int vPos = clientArea.y + clientArea.height + GAP; for(int i=0; i<rowLegendNumList.size(); i++){ hPos = rowHPosList.get(i); for(int j=0; j<rowLegendNumList.get(i); j++){ legendList.get(lm).setBounds(new Rectangle( hPos, vPos, legendSizeList.get(lm).width, legendSizeList.get(lm).height)); hPos += legendSizeList.get(lm).width + GAP; lm++; } vPos += legendSizeList.get(lm-1).height + GAP; } } for(int i=xAxisList.size()-1; i>=0; i--){ Axis xAxis = xAxisList.get(i); Dimension xAxisSize = xAxis.getPreferredSize(clientArea.width, clientArea.height); if(xAxis.getTickLablesSide() == LabelSide.Primary){ xAxis.setBounds(new Rectangle(clientArea.x, clientArea.y + clientArea.height - xAxisSize.height, xAxisSize.width, xAxisSize.height)); clientArea.height -= xAxisSize.height; }else{ hasTopXAxis = true; xAxis.setBounds(new Rectangle(clientArea.x, clientArea.y+1, xAxisSize.width, xAxisSize.height)); clientArea.y += xAxisSize.height ; clientArea.height -= xAxisSize.height; } } for(int i=yAxisList.size()-1; i>=0; i--){ Axis yAxis = yAxisList.get(i); Dimension yAxisSize = yAxis.getPreferredSize(clientArea.width, clientArea.height + (hasTopXAxis? 2:1) *yAxis.getMargin()); if(yAxis.getTickLablesSide() == LabelSide.Primary){ // on the left yAxis.setBounds(new Rectangle(clientArea.x, clientArea.y - (hasTopXAxis? yAxis.getMargin():0), yAxisSize.width, yAxisSize.height)); clientArea.x += yAxisSize.width; clientArea.width -= yAxisSize.width; }else{ // on the right hasRightYAxis = true; yAxis.setBounds(new Rectangle(clientArea.x + clientArea.width - yAxisSize.width -1, clientArea.y- (hasTopXAxis? yAxis.getMargin():0), yAxisSize.width, yAxisSize.height)); clientArea.width -= yAxisSize.width; } } //re-adjust xAxis boundss for(int i=xAxisList.size()-1; i>=0; i--){ Axis xAxis = xAxisList.get(i); xAxis.getBounds().x = clientArea.x - xAxis.getMargin()-1; if(hasRightYAxis) xAxis.getBounds().width = clientArea.width + 2*xAxis.getMargin(); else xAxis.getBounds().width = clientArea.width + xAxis.getMargin(); } if(plotArea != null && plotArea.isVisible()){ Rectangle plotAreaBound = new Rectangle( primaryXAxis.getBounds().x + primaryXAxis.getMargin(), primaryYAxis.getBounds().y + primaryYAxis.getMargin(), primaryXAxis.getBounds().width - 2*primaryXAxis.getMargin(), primaryYAxis.getBounds().height - 2*primaryYAxis.getMargin() ); plotArea.setBounds(plotAreaBound); } super.layout(); } /** * @param zoomType the zoomType to set */ public void setZoomType(ZoomType zoomType) { this.zoomType = zoomType; plotArea.setZoomType(zoomType); for(Axis axis : xAxisList) axis.setZoomType(zoomType); for(Axis axis : yAxisList) axis.setZoomType(zoomType); } /** * @return the zoomType */ public ZoomType getZoomType() { return zoomType; } /** * @param title the title to set */ public void setTitle(String title) { this.title = title.trim(); titleLabel.setText(title); } /** * @param showTitle true if title should be shown; false otherwise. */ public void setShowTitle(boolean showTitle){ titleLabel.setVisible(showTitle); revalidate(); } /** * @return true if title should be shown; false otherwise. */ public boolean isShowTitle(){ return titleLabel.isVisible(); } /** * @param showLegend true if legend should be shown; false otherwise. */ public void setShowLegend(boolean showLegend){ this.showLegend = showLegend; for(Axis yAxis : legendMap.keySet()){ Legend legend = legendMap.get(yAxis); legend.setVisible(showLegend); } revalidate(); } /** * @return the showLegend */ public boolean isShowLegend() { return showLegend; } /**Add an axis to the graph * @param axis */ public void addAxis(Axis axis){ if(axis.isHorizontal()) xAxisList.add(axis); else yAxisList.add(axis); plotArea.addGrid(new Grid(axis)); add(axis); axis.setXyGraph(this); revalidate(); } /**Remove an axis from the graph * @param axis * @return true if this axis exists. */ public boolean removeAxis(Axis axis){ remove(axis); plotArea.removeGrid(axis.getGrid()); revalidate(); if(axis.isHorizontal()) return xAxisList.remove(axis); else return yAxisList.remove(axis); } /**Add a trace * @param trace */ public void addTrace(Trace trace){ if (trace.getTraceColor() == null) { // Cycle through default colors trace.setTraceColor(DEFAULT_TRACES_COLOR[traceNum % DEFAULT_TRACES_COLOR.length]); ++traceNum; } if(legendMap.containsKey(trace.getYAxis())) legendMap.get(trace.getYAxis()).addTrace(trace); else{ legendMap.put(trace.getYAxis(), new Legend()); legendMap.get(trace.getYAxis()).addTrace(trace); add(legendMap.get(trace.getYAxis())); } plotArea.addTrace(trace); trace.setXYGraph(this); trace.dataChanged(null); revalidate(); } /**Remove a trace. * @param trace */ public void removeTrace(Trace trace){ if(legendMap.containsKey(trace.getYAxis())){ legendMap.get(trace.getYAxis()).removeTrace(trace); if(legendMap.get(trace.getYAxis()).getTraceList().size() <=0){ remove(legendMap.remove(trace.getYAxis())); } } plotArea.removeTrace(trace); revalidate(); } /**Add an annotation * @param annotation */ public void addAnnotation(Annotation annotation){ plotArea.addAnnotation(annotation); } /**Remove an annotation * @param annotation */ public void removeAnnotation(Annotation annotation){ plotArea.removeAnnotation(annotation); } /** * @param titleFont the titleFont to set */ public void setTitleFont(Font titleFont) { titleLabel.setFont(titleFont); } /** * @return the title font. */ public Font getTitleFont(){ return titleLabel.getFont(); } /** * @param titleColor the titleColor to set */ public void setTitleColor(Color titleColor) { this.titleColor = titleColor; titleLabel.setForegroundColor(titleColor); } /** * {@inheritDoc} */ public void paintFigure(final Graphics graphics) { if (!transparent) { graphics.fillRectangle(getClientArea()); } super.paintFigure(graphics); } /** * @param transparent the transparent to set */ public void setTransparent(boolean transparent) { this.transparent = transparent; repaint(); } /** * @return the transparent */ public boolean isTransparent() { return transparent; } /** TODO This allows clients to change the traces via getPlotArea().getTraceList() and then add/remove/clear/..., * circumventing the designated addTrace()/removeTrace(). * Can it be non-public? * @return the plotArea, which contains all the elements drawn inside it. */ public PlotArea getPlotArea() { return plotArea; } /** * @return the image of the XYFigure */ public Image getImage(){ Image image = new Image(null, bounds.width + 6, bounds.height + 6); GC gc = new GC(image); SWTGraphics graphics = new SWTGraphics(gc); graphics.translate(-bounds.x + 3, -bounds.y + 3); graphics.setForegroundColor(getForegroundColor()); graphics.setBackgroundColor(getBackgroundColor()); paint(graphics); gc.dispose(); return image; } /** * @return the titleColor */ public Color getTitleColor() { if(titleColor == null) return getForegroundColor(); return titleColor; } /** * @return the title */ public String getTitle() { return title; } /** * @return the operationsManager */ public OperationsManager getOperationsManager() { return operationsManager; } /** * @return the xAxisList */ public List<Axis> getXAxisList() { return xAxisList; } /** * @return the yAxisList */ public List<Axis> getYAxisList() { return yAxisList; } /** * @return the all the axis include xAxes and yAxes. * yAxisList is appended to xAxisList in the returned list. */ public List<Axis> getAxisList(){ List<Axis> list = new ArrayList<Axis>(); list.addAll(xAxisList); list.addAll(yAxisList); return list; } /** * @return the legendMap */ public Map<Axis, Legend> getLegendMap() { return legendMap; } /** * Perform forced autoscale to all axes. */ public void performAutoScale(){ ZoomCommand command = new ZoomCommand("Auto Scale", xAxisList, yAxisList); command.savePreviousStates(); for(Axis axis : xAxisList){ axis.performAutoScale(true); } for(Axis axis : yAxisList){ axis.performAutoScale(true); } command.saveAfterStates(); operationsManager.addCommand(command); } /** Stagger all axes: Autoscale each axis so that traces on various * axes don't overlap */ public void performStagger() { final ZoomCommand command = new ZoomCommand("Stagger Axes", null, yAxisList); command.savePreviousStates(); for(Axis axis : yAxisList){ axis.performAutoScale(true); } // Arrange all so they don't overlap by assigning 1/Nth of // the vertical range to each one final int N = yAxisList.size(); for (int i=0; i<N; ++i) { final Axis yaxis = yAxisList.get(i); if (yaxis.isAutoScale()) continue; // takes care of itself double low = yaxis.getRange().getLower(); double high = yaxis.getRange().getUpper(); if (yaxis.isLogScaleEnabled()) { low = Log10.log10(low); high = Log10.log10(high); } double range = high - low; // Fudge factor to get some extra space range = 1.1*range; // Shift it down according to its index, using a total of N*range. low -= (N-i-1)*range; high += i*range; if (yaxis.isLogScaleEnabled()) { low = Log10.pow10(low); high = Log10.pow10(high); } yaxis.setRange(low, high); } command.saveAfterStates(); operationsManager.addCommand(command); } }
tweak
applications/plugins/org.csstudio.swt.xygraph/src/org/csstudio/swt/xygraph/figures/XYGraph.java
tweak
Java
lgpl-2.1
49fec2725c6999ea2808feb305382593c0548d49
0
mebigfatguy/fb-contrib,rblasch/fb-contrib,ThrawnCA/fb-contrib,ThrawnCA/fb-contrib,rblasch/fb-contrib,rblasch/fb-contrib,rblasch/fb-contrib,mebigfatguy/fb-contrib,ThrawnCA/fb-contrib,mebigfatguy/fb-contrib,ThrawnCA/fb-contrib
/* * fb-contrib - Auxiliary detectors for Java programs * Copyright (C) 2005-2016 Dave Brosius * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.mebigfatguy.fbcontrib.detect; import java.util.ArrayDeque; import java.util.BitSet; import java.util.Deque; import java.util.Iterator; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.CodeException; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.Type; import com.mebigfatguy.fbcontrib.utils.BugType; import com.mebigfatguy.fbcontrib.utils.ToString; import edu.umd.cs.findbugs.BugInstance; import edu.umd.cs.findbugs.BugReporter; import edu.umd.cs.findbugs.BytecodeScanningDetector; import edu.umd.cs.findbugs.OpcodeStack; import edu.umd.cs.findbugs.ba.ClassContext; /** * looks for relatively large if blocks of code, where you unconditionally return from them, and then follow that with an unconditional return of a small block. * This places the bulk of the logic to the right indentation-wise, making it more difficult to read than needed. It would be better to invert the logic of the * if block, and immediately return, allowing the bulk of the logic to be move to the left, for easier reading. */ public class BuryingLogic extends BytecodeScanningDetector { private static final String BURY_LOGIC_LOW_RATIO_PROPERTY = "fb-contrib.bl.low_ratio"; private static final String BURY_LOGIC_NORMAL_RATIO_PROPERTY = "fb-contrib.bl.normal_ratio"; private static final double LOW_BUG_RATIO_LIMIT = 12.0; private static final double NORMAL_BUG_RATIO_LIMIT = 20.0; private BugReporter bugReporter; private OpcodeStack stack; private Deque<IfBlock> ifBlocks; private IfBlock activeUnconditional; private boolean isReported; private double lowBugRatioLimit; private double normalBugRatioLimit; private BitSet catchPCs; private BitSet gotoBranchPCs; public BuryingLogic(BugReporter bugReporter) { this.bugReporter = bugReporter; String lowRatio = System.getProperty(BURY_LOGIC_LOW_RATIO_PROPERTY); try { if (lowRatio == null) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } else { lowBugRatioLimit = Double.parseDouble(lowRatio); if (lowBugRatioLimit <= 0) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } } } catch (NumberFormatException e) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } String normalRatio = System.getProperty(BURY_LOGIC_NORMAL_RATIO_PROPERTY); try { if (normalRatio == null) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } else { normalBugRatioLimit = Double.parseDouble(normalRatio); if (normalBugRatioLimit <= 0) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } } } catch (NumberFormatException e) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } } @Override public void visitClassContext(ClassContext classContext) { try { stack = new OpcodeStack(); ifBlocks = new ArrayDeque<>(); gotoBranchPCs = new BitSet(); super.visitClassContext(classContext); } finally { stack = null; ifBlocks = null; catchPCs = null; gotoBranchPCs = null; } } @Override public void visitCode(Code obj) { Method m = getMethod(); if (m.getReturnType() == Type.VOID) { return; } stack.resetForMethodEntry(this); ifBlocks.clear(); activeUnconditional = null; isReported = false; CodeException[] ces = obj.getExceptionTable(); if ((ces == null) || (ces.length == 0)) { catchPCs = null; } else { catchPCs = new BitSet(); for (CodeException ce : ces) { catchPCs.set(ce.getHandlerPC()); } } gotoBranchPCs.clear(); super.visitCode(obj); } @Override public void sawOpcode(int seen) { if (isReported) { return; } try { int removed = 0; if (!ifBlocks.isEmpty()) { Iterator<IfBlock> it = ifBlocks.iterator(); while (it.hasNext()) { IfBlock block = it.next(); if ((getPC() >= block.getEnd())) { it.remove(); removed++; } } } if (removed > 1) { activeUnconditional = null; } if (isBranch(seen)) { if (activeUnconditional != null) { activeUnconditional = null; } int target = getBranchTarget(); if (getBranchOffset() > 0) { if ((seen == GOTO) || (seen == GOTO_W)) { gotoBranchPCs.set(target); } else if ((catchPCs == null) || !catchPCs.get(getNextPC())) { ifBlocks.addLast(new IfBlock(getNextPC(), target)); } } else { removeLoopBlocks(target); } } else if (isReturn(seen)) { if ((activeUnconditional != null) && !gotoBranchPCs.get(activeUnconditional.getEnd())) { int ifSize = activeUnconditional.getEnd() - activeUnconditional.getStart(); int elseSize = getPC() - activeUnconditional.getEnd(); double ratio = (double) ifSize / (double) elseSize; if (ratio > lowBugRatioLimit) { bugReporter .reportBug(new BugInstance(this, BugType.BL_BURYING_LOGIC.name(), ratio > normalBugRatioLimit ? NORMAL_PRIORITY : LOW_PRIORITY) .addClass(this).addMethod(this).addSourceLineRange(this, activeUnconditional.getStart(), activeUnconditional.getEnd())); isReported = true; } } else if (!ifBlocks.isEmpty() && (getNextPC() == ifBlocks.getFirst().getEnd())) { activeUnconditional = ifBlocks.getFirst(); } } } finally { stack.sawOpcode(this, seen); } } private void removeLoopBlocks(int target) { Iterator<IfBlock> it = ifBlocks.descendingIterator(); while (it.hasNext()) { if (it.next().getStart() >= target) { it.remove(); } else { return; } } } static class IfBlock { private int start; private int end; public IfBlock(int s, int e) { start = s; end = e; } public int getStart() { return start; } public int getEnd() { return end; } @Override public String toString() { return ToString.build(this); } } }
src/com/mebigfatguy/fbcontrib/detect/BuryingLogic.java
/* * fb-contrib - Auxiliary detectors for Java programs * Copyright (C) 2005-2016 Dave Brosius * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.mebigfatguy.fbcontrib.detect; import java.util.ArrayDeque; import java.util.BitSet; import java.util.Deque; import java.util.Iterator; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.CodeException; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.Type; import com.mebigfatguy.fbcontrib.utils.BugType; import com.mebigfatguy.fbcontrib.utils.ToString; import edu.umd.cs.findbugs.BugInstance; import edu.umd.cs.findbugs.BugReporter; import edu.umd.cs.findbugs.BytecodeScanningDetector; import edu.umd.cs.findbugs.OpcodeStack; import edu.umd.cs.findbugs.ba.ClassContext; /** * looks for relatively large if blocks of code, where you unconditionally return from them, and then follow that with an unconditional return of a small block. * This places the bulk of the logic to the right indentation-wise, making it more difficult to read than needed. It would be better to invert the logic of the * if block, and immediately return, allowing the bulk of the logic to be move to the left, for easier reading. */ public class BuryingLogic extends BytecodeScanningDetector { private static final String BURY_LOGIC_LOW_RATIO_PROPERTY = "fb-contrib.bl.low_ratio"; private static final String BURY_LOGIC_NORMAL_RATIO_PROPERTY = "fb-contrib.bl.normal_ratio"; private static final double LOW_BUG_RATIO_LIMIT = 12.0; private static final double NORMAL_BUG_RATIO_LIMIT = 20.0; private BugReporter bugReporter; private OpcodeStack stack; private Deque<IfBlock> ifBlocks; private IfBlock activeUnconditional; private boolean isReported; private double lowBugRatioLimit; private double normalBugRatioLimit; private BitSet catchPCs; private BitSet gotoBranchPCs; public BuryingLogic(BugReporter bugReporter) { this.bugReporter = bugReporter; String lowRatio = System.getProperty(BURY_LOGIC_LOW_RATIO_PROPERTY); try { if (lowRatio == null) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } else { lowBugRatioLimit = Double.parseDouble(lowRatio); if (lowBugRatioLimit <= 0) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } } } catch (Exception e) { lowBugRatioLimit = LOW_BUG_RATIO_LIMIT; } String normalRatio = System.getProperty(BURY_LOGIC_NORMAL_RATIO_PROPERTY); try { if (normalRatio == null) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } else { normalBugRatioLimit = Double.parseDouble(normalRatio); if (normalBugRatioLimit <= 0) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } } } catch (Exception e) { normalBugRatioLimit = NORMAL_BUG_RATIO_LIMIT; } } @Override public void visitClassContext(ClassContext classContext) { try { stack = new OpcodeStack(); ifBlocks = new ArrayDeque<>(); gotoBranchPCs = new BitSet(); super.visitClassContext(classContext); } finally { stack = null; ifBlocks = null; catchPCs = null; gotoBranchPCs = null; } } @Override public void visitCode(Code obj) { Method m = getMethod(); if (m.getReturnType() == Type.VOID) { return; } stack.resetForMethodEntry(this); ifBlocks.clear(); activeUnconditional = null; isReported = false; CodeException[] ces = obj.getExceptionTable(); if ((ces == null) || (ces.length == 0)) { catchPCs = null; } else { catchPCs = new BitSet(); for (CodeException ce : ces) { catchPCs.set(ce.getHandlerPC()); } } gotoBranchPCs.clear(); super.visitCode(obj); } @Override public void sawOpcode(int seen) { if (isReported) { return; } try { int removed = 0; if (!ifBlocks.isEmpty()) { Iterator<IfBlock> it = ifBlocks.iterator(); while (it.hasNext()) { IfBlock block = it.next(); if ((getPC() >= block.getEnd())) { it.remove(); removed++; } } } if (removed > 1) { activeUnconditional = null; } if (isBranch(seen)) { if (activeUnconditional != null) { activeUnconditional = null; } int target = getBranchTarget(); if (getBranchOffset() > 0) { if ((seen == GOTO) || (seen == GOTO_W)) { gotoBranchPCs.set(target); } else if ((catchPCs == null) || !catchPCs.get(getNextPC())) { ifBlocks.addLast(new IfBlock(getNextPC(), target)); } } else { removeLoopBlocks(target); } } else if (isReturn(seen)) { if ((activeUnconditional != null) && !gotoBranchPCs.get(activeUnconditional.getEnd())) { int ifSize = activeUnconditional.getEnd() - activeUnconditional.getStart(); int elseSize = getPC() - activeUnconditional.getEnd(); double ratio = (double) ifSize / (double) elseSize; if (ratio > lowBugRatioLimit) { bugReporter .reportBug(new BugInstance(this, BugType.BL_BURYING_LOGIC.name(), ratio > normalBugRatioLimit ? NORMAL_PRIORITY : LOW_PRIORITY) .addClass(this).addMethod(this).addSourceLineRange(this, activeUnconditional.getStart(), activeUnconditional.getEnd())); isReported = true; } } else if (!ifBlocks.isEmpty() && (getNextPC() == ifBlocks.getFirst().getEnd())) { activeUnconditional = ifBlocks.getFirst(); } } } finally { stack.sawOpcode(this, seen); } } private void removeLoopBlocks(int target) { Iterator<IfBlock> it = ifBlocks.descendingIterator(); while (it.hasNext()) { if (it.next().getStart() >= target) { it.remove(); } else { return; } } } static class IfBlock { private int start; private int end; public IfBlock(int s, int e) { start = s; end = e; } public int getStart() { return start; } public int getEnd() { return end; } @Override public String toString() { return ToString.build(this); } } }
catch the excptions expected
src/com/mebigfatguy/fbcontrib/detect/BuryingLogic.java
catch the excptions expected
Java
lgpl-2.1
ed8d76c90b55bde9755095cfb635b0d4cc18411e
0
kurtwalker/pdi-agile-bi-plugin,mdamour1976/pdi-agile-bi-plugin,mkambol/pdi-agile-bi-plugin,pedrofvteixeira/pdi-agile-bi-plugin,mkambol/pdi-agile-bi-plugin,pentaho-nbaker/pdi-agile-bi-plugin,pentaho-nbaker/pdi-agile-bi-plugin,bmorrise/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin,pedrofvteixeira/pdi-agile-bi-plugin,kurtwalker/pdi-agile-bi-plugin,bmorrise/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin,mdamour1976/pdi-agile-bi-plugin
package org.pentaho.agilebi.pdi.visualizations.prpt; import java.io.File; import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.pentaho.agilebi.pdi.PDIMessages; import org.pentaho.agilebi.pdi.modeler.ModelerException; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspace; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspaceUtil; import org.pentaho.agilebi.pdi.modeler.XulUI; import org.pentaho.agilebi.pdi.perspective.AgileBiVisualizationPerspective; import org.pentaho.agilebi.pdi.perspective.AbstractPerspective.XulTabAndPanel; import org.pentaho.agilebi.pdi.visualizations.AbstractVisualization; import org.pentaho.agilebi.pdi.visualizations.IVisualization; import org.pentaho.agilebi.pdi.wizard.EmbeddedWizard; import org.pentaho.di.core.Const; import org.pentaho.di.core.EngineMetaInterface; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.gui.SpoonFactory; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.SpoonPerspectiveManager; import org.pentaho.reporting.engine.classic.core.ClassicEngineBoot; import org.pentaho.reporting.engine.classic.core.MasterReport; import org.pentaho.reporting.libraries.base.util.ObjectUtilities; import org.pentaho.reporting.libraries.fonts.LibFontBoot; import org.pentaho.reporting.libraries.resourceloader.LibLoaderBoot; import org.pentaho.reporting.libraries.resourceloader.Resource; import org.pentaho.reporting.libraries.resourceloader.ResourceManager; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.components.WaitBoxRunnable; import org.pentaho.ui.xul.components.XulWaitBox; import org.pentaho.ui.xul.dom.Document; import org.pentaho.ui.xul.swt.SwtXulLoader; import org.pentaho.ui.xul.swt.SwtXulRunner; import org.w3c.dom.Node; public class PRPTVisualization extends AbstractVisualization { public PRPTVisualization(){ super(); System.setProperty("org.jpedal.suppressViewerPopups", "true"); } private static Log logger = LogFactory.getLog(PRPTVisualization.class); public void createVisualizationFromModel(String modelFileLocation, String modelId) { // TODO Auto-generated method stub } public boolean accepts(String fileName) { return fileName.endsWith(".prpt"); } public boolean acceptsXml(String nodeName) { return false; } public String[] getFileTypeDisplayNames(Locale locale) { return new String[]{BaseMessages.getString(IVisualization.class, "prpt_file_type_name")}; } public String getRootNodeName() { return null; } public String[] getSupportedExtensions() { return new String[]{"prpt"}; } public boolean open(Node transNode, String fname, boolean importfile) { try{ if(ClassicEngineBoot.getInstance().isBootDone() == false){ ObjectUtilities.setClassLoader(getClass().getClassLoader()); ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT); LibLoaderBoot.getInstance().start(); LibFontBoot.getInstance().start(); ClassicEngineBoot.getInstance().start(); } MasterReport masterReport = null; try { ResourceManager theResourceManager = new ResourceManager(); theResourceManager.registerDefaults(); File theReportFile = new File(fname); Resource theResource = theResourceManager.createDirectly(theReportFile, MasterReport.class); masterReport = (MasterReport) theResource.getResource(); } catch(Exception e){ logger.error(e); } XulTabAndPanel tabAndPanel = AgileBiVisualizationPerspective.getInstance().createTab(); AgileBiVisualizationPerspective.getInstance().setNameForTab(tabAndPanel.tab, fname); try { SpoonPerspectiveManager.getInstance().activatePerspective(AgileBiVisualizationPerspective.class); } catch (KettleException e) { logger.error(e); return false; } SwtXulLoader theXulLoader = new SwtXulLoader(); theXulLoader.registerClassLoader(getClass().getClassLoader()); theXulLoader.register("PRPT", "org.pentaho.agilebi.pdi.visualizations.xul.PrptViewerTag"); XulDomContainer theXulContainer = theXulLoader.loadXul("org/pentaho/agilebi/pdi/visualizations/prpt/prptVisualization.xul", new PDIMessages(IVisualization.class)); PRPTMeta meta = new PRPTMeta(); meta.setTab(tabAndPanel.tab); AgileBiVisualizationPerspective.getInstance().setMetaForTab(tabAndPanel.tab, meta); PRPTVisualizationController controller = new PRPTVisualizationController(meta, masterReport); theXulContainer.addEventHandler(controller); Composite theMainBox = (Composite) theXulContainer.getDocumentRoot().getElementById("mainContainer").getManagedObject(); SwtXulRunner theRunner = new SwtXulRunner(); theRunner.addContainer(theXulContainer); theRunner.initialize(); theMainBox.setParent((Composite) tabAndPanel.panel.getManagedObject()); ((Composite) tabAndPanel.panel.getManagedObject()).layout(true); AgileBiVisualizationPerspective.getInstance().setSelectedMeta(meta); return true; } catch(Exception e){ e.printStackTrace(); } return false; } public boolean save(EngineMetaInterface meta, String fname, boolean isExport) { try{ PRPTMeta prptMeta = (PRPTMeta) meta; prptMeta.save(fname); if(fname.endsWith(".prpt") == false){ fname +=".prpt"; } File f = new File(fname); String fullPath = f.getAbsolutePath(); Spoon spoon = ((Spoon)SpoonFactory.getInstance()); spoon.getProperties().addLastFile("Model", fullPath, null, false, null); AgileBiVisualizationPerspective.getInstance().setNameForTab(prptMeta.getTab(), fname); spoon.addMenuLast(); return true; } catch(ModelerException e){ logger.error(e); return false; } } public void syncMetaName(EngineMetaInterface meta, String name) { } public void createVisualizationFromModel(final ModelerWorkspace model) { XulWaitBox box; try { Spoon spoon = ((Spoon)SpoonFactory.getInstance()); Document document = spoon.getMainSpoonContainer().getDocumentRoot(); box = (XulWaitBox) document.createElement("waitbox"); box.setIndeterminate(true); box.setMaximum(10); box.setCanCancel(false); box.setTitle(BaseMessages.getString(XulUI.class, "wait_dialog_title")); box.setMessage(BaseMessages.getString(XulUI.class, "wait_dialog_message")); box.setCancelLabel(BaseMessages.getString(XulUI.class, "wait_dialog_btn")); box.setDialogParent(((Spoon)SpoonFactory.getInstance()).getShell()); box.setRunnable(new WaitBoxRunnable(box){ boolean canceled = false; @Override public void run() { try { ObjectUtilities.setClassLoader(getClass().getClassLoader()); ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT); if(ClassicEngineBoot.getInstance().isBootDone() == false){ ClassicEngineBoot engineBoot = ClassicEngineBoot.getInstance(); engineBoot.start(); } model.setAutoModel(false); EmbeddedWizard wizard = new EmbeddedWizard(model); waitBox.stop(); wizard.run(null); } catch (final Exception e) { logger.error(e); Display.getDefault().asyncExec(new Runnable(){ public void run() { new ErrorDialog(((Spoon) SpoonFactory.getInstance()).getShell(), "Error", "Error creating visualization", e); } }); } waitBox.stop(); } @Override public void cancel() { canceled =true; } }); box.start(); } catch (XulException e1) { logger.error(e1); new ErrorDialog(((Spoon) SpoonFactory.getInstance()).getShell(), "Error", "Error creating visualization", e1); } } public void createVisualizationFromMasterReport(MasterReport rpt){ try{ if(ClassicEngineBoot.getInstance().isBootDone() == false){ LibLoaderBoot.getInstance().start(); LibFontBoot.getInstance().start(); ClassicEngineBoot.getInstance().start(); } XulTabAndPanel tabAndPanel = AgileBiVisualizationPerspective.getInstance().createTab(); AgileBiVisualizationPerspective.getInstance().setNameForTab(tabAndPanel.tab, "Untitled Report"); try { SpoonPerspectiveManager.getInstance().activatePerspective(AgileBiVisualizationPerspective.class); } catch (KettleException e) { logger.error(e); return; } SwtXulLoader theXulLoader = new SwtXulLoader(); theXulLoader.registerClassLoader(getClass().getClassLoader()); theXulLoader.register("PRPT", "org.pentaho.agilebi.pdi.visualizations.xul.PrptViewerTag"); XulDomContainer theXulContainer = theXulLoader.loadXul("org/pentaho/agilebi/pdi/visualizations/prpt/prptVisualization.xul"); Composite theMainBox = (Composite) theXulContainer.getDocumentRoot().getElementById("mainContainer").getManagedObject(); PRPTMeta meta = new PRPTMeta(); AgileBiVisualizationPerspective.getInstance().setMetaForTab(tabAndPanel.tab, meta); meta.setTab(tabAndPanel.tab); PRPTVisualizationController controller = new PRPTVisualizationController(meta, rpt); theXulContainer.addEventHandler(controller); SwtXulRunner theRunner = new SwtXulRunner(); theRunner.addContainer(theXulContainer); theRunner.initialize(); theMainBox.setParent((Composite) tabAndPanel.panel.getManagedObject()); ((Composite) tabAndPanel.panel.getManagedObject()).layout(true); AgileBiVisualizationPerspective.getInstance().setSelectedMeta(meta); } catch(Exception e){ e.printStackTrace(); } } }
src/org/pentaho/agilebi/pdi/visualizations/prpt/PRPTVisualization.java
package org.pentaho.agilebi.pdi.visualizations.prpt; import java.io.File; import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.pentaho.agilebi.pdi.PDIMessages; import org.pentaho.agilebi.pdi.modeler.ModelerException; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspace; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspaceUtil; import org.pentaho.agilebi.pdi.modeler.XulUI; import org.pentaho.agilebi.pdi.perspective.AgileBiVisualizationPerspective; import org.pentaho.agilebi.pdi.perspective.AbstractPerspective.XulTabAndPanel; import org.pentaho.agilebi.pdi.visualizations.AbstractVisualization; import org.pentaho.agilebi.pdi.visualizations.IVisualization; import org.pentaho.agilebi.pdi.wizard.EmbeddedWizard; import org.pentaho.di.core.Const; import org.pentaho.di.core.EngineMetaInterface; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.gui.SpoonFactory; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.SpoonPerspectiveManager; import org.pentaho.reporting.engine.classic.core.ClassicEngineBoot; import org.pentaho.reporting.engine.classic.core.MasterReport; import org.pentaho.reporting.libraries.base.util.ObjectUtilities; import org.pentaho.reporting.libraries.fonts.LibFontBoot; import org.pentaho.reporting.libraries.resourceloader.LibLoaderBoot; import org.pentaho.reporting.libraries.resourceloader.Resource; import org.pentaho.reporting.libraries.resourceloader.ResourceManager; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.components.WaitBoxRunnable; import org.pentaho.ui.xul.components.XulWaitBox; import org.pentaho.ui.xul.dom.Document; import org.pentaho.ui.xul.swt.SwtXulLoader; import org.pentaho.ui.xul.swt.SwtXulRunner; import org.w3c.dom.Node; public class PRPTVisualization extends AbstractVisualization { public PRPTVisualization(){ super(); System.setProperty("org.jpedal.suppressViewerPopups", "true"); } private static Log logger = LogFactory.getLog(PRPTVisualization.class); public void createVisualizationFromModel(String modelFileLocation, String modelId) { // TODO Auto-generated method stub } public boolean accepts(String fileName) { return fileName.endsWith(".prpt"); } public boolean acceptsXml(String nodeName) { return false; } public String[] getFileTypeDisplayNames(Locale locale) { return new String[]{BaseMessages.getString(IVisualization.class, "prpt_file_type_name")}; } public String getRootNodeName() { return null; } public String[] getSupportedExtensions() { return new String[]{"prpt"}; } public boolean open(Node transNode, String fname, boolean importfile) { try{ if(ClassicEngineBoot.getInstance().isBootDone() == false){ LibLoaderBoot.getInstance().start(); LibFontBoot.getInstance().start(); ClassicEngineBoot.getInstance().start(); } MasterReport masterReport = null; try { ResourceManager theResourceManager = new ResourceManager(); theResourceManager.registerDefaults(); File theReportFile = new File(fname); Resource theResource = theResourceManager.createDirectly(theReportFile, MasterReport.class); masterReport = (MasterReport) theResource.getResource(); } catch(Exception e){ logger.error(e); } XulTabAndPanel tabAndPanel = AgileBiVisualizationPerspective.getInstance().createTab(); AgileBiVisualizationPerspective.getInstance().setNameForTab(tabAndPanel.tab, fname); try { SpoonPerspectiveManager.getInstance().activatePerspective(AgileBiVisualizationPerspective.class); } catch (KettleException e) { logger.error(e); return false; } SwtXulLoader theXulLoader = new SwtXulLoader(); theXulLoader.registerClassLoader(getClass().getClassLoader()); theXulLoader.register("PRPT", "org.pentaho.agilebi.pdi.visualizations.xul.PrptViewerTag"); XulDomContainer theXulContainer = theXulLoader.loadXul("org/pentaho/agilebi/pdi/visualizations/prpt/prptVisualization.xul", new PDIMessages(IVisualization.class)); PRPTMeta meta = new PRPTMeta(); meta.setTab(tabAndPanel.tab); AgileBiVisualizationPerspective.getInstance().setMetaForTab(tabAndPanel.tab, meta); PRPTVisualizationController controller = new PRPTVisualizationController(meta, masterReport); theXulContainer.addEventHandler(controller); Composite theMainBox = (Composite) theXulContainer.getDocumentRoot().getElementById("mainContainer").getManagedObject(); SwtXulRunner theRunner = new SwtXulRunner(); theRunner.addContainer(theXulContainer); theRunner.initialize(); theMainBox.setParent((Composite) tabAndPanel.panel.getManagedObject()); ((Composite) tabAndPanel.panel.getManagedObject()).layout(true); AgileBiVisualizationPerspective.getInstance().setSelectedMeta(meta); return true; } catch(Exception e){ e.printStackTrace(); } return false; } public boolean save(EngineMetaInterface meta, String fname, boolean isExport) { try{ PRPTMeta prptMeta = (PRPTMeta) meta; prptMeta.save(fname); if(fname.endsWith(".prpt") == false){ fname +=".prpt"; } File f = new File(fname); String fullPath = f.getAbsolutePath(); Spoon spoon = ((Spoon)SpoonFactory.getInstance()); spoon.getProperties().addLastFile("Model", fullPath, null, false, null); AgileBiVisualizationPerspective.getInstance().setNameForTab(prptMeta.getTab(), fname); spoon.addMenuLast(); return true; } catch(ModelerException e){ logger.error(e); return false; } } public void syncMetaName(EngineMetaInterface meta, String name) { } public void createVisualizationFromModel(final ModelerWorkspace model) { XulWaitBox box; try { Spoon spoon = ((Spoon)SpoonFactory.getInstance()); Document document = spoon.getMainSpoonContainer().getDocumentRoot(); box = (XulWaitBox) document.createElement("waitbox"); box.setIndeterminate(true); box.setMaximum(10); box.setCanCancel(false); box.setTitle(BaseMessages.getString(XulUI.class, "wait_dialog_title")); box.setMessage(BaseMessages.getString(XulUI.class, "wait_dialog_message")); box.setCancelLabel(BaseMessages.getString(XulUI.class, "wait_dialog_btn")); box.setDialogParent(((Spoon)SpoonFactory.getInstance()).getShell()); box.setRunnable(new WaitBoxRunnable(box){ boolean canceled = false; @Override public void run() { try { ObjectUtilities.setClassLoader(getClass().getClassLoader()); ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT); if(ClassicEngineBoot.getInstance().isBootDone() == false){ ClassicEngineBoot engineBoot = ClassicEngineBoot.getInstance(); engineBoot.start(); } model.setAutoModel(false); EmbeddedWizard wizard = new EmbeddedWizard(model); waitBox.stop(); wizard.run(null); } catch (final Exception e) { logger.error(e); Display.getDefault().asyncExec(new Runnable(){ public void run() { new ErrorDialog(((Spoon) SpoonFactory.getInstance()).getShell(), "Error", "Error creating visualization", e); } }); } waitBox.stop(); } @Override public void cancel() { canceled =true; } }); box.start(); } catch (XulException e1) { logger.error(e1); new ErrorDialog(((Spoon) SpoonFactory.getInstance()).getShell(), "Error", "Error creating visualization", e1); } } public void createVisualizationFromMasterReport(MasterReport rpt){ try{ if(ClassicEngineBoot.getInstance().isBootDone() == false){ LibLoaderBoot.getInstance().start(); LibFontBoot.getInstance().start(); ClassicEngineBoot.getInstance().start(); } XulTabAndPanel tabAndPanel = AgileBiVisualizationPerspective.getInstance().createTab(); AgileBiVisualizationPerspective.getInstance().setNameForTab(tabAndPanel.tab, "Untitled Report"); try { SpoonPerspectiveManager.getInstance().activatePerspective(AgileBiVisualizationPerspective.class); } catch (KettleException e) { logger.error(e); return; } SwtXulLoader theXulLoader = new SwtXulLoader(); theXulLoader.registerClassLoader(getClass().getClassLoader()); theXulLoader.register("PRPT", "org.pentaho.agilebi.pdi.visualizations.xul.PrptViewerTag"); XulDomContainer theXulContainer = theXulLoader.loadXul("org/pentaho/agilebi/pdi/visualizations/prpt/prptVisualization.xul"); Composite theMainBox = (Composite) theXulContainer.getDocumentRoot().getElementById("mainContainer").getManagedObject(); PRPTMeta meta = new PRPTMeta(); AgileBiVisualizationPerspective.getInstance().setMetaForTab(tabAndPanel.tab, meta); meta.setTab(tabAndPanel.tab); PRPTVisualizationController controller = new PRPTVisualizationController(meta, rpt); theXulContainer.addEventHandler(controller); SwtXulRunner theRunner = new SwtXulRunner(); theRunner.addContainer(theXulContainer); theRunner.initialize(); theMainBox.setParent((Composite) tabAndPanel.panel.getManagedObject()); ((Composite) tabAndPanel.panel.getManagedObject()).layout(true); AgileBiVisualizationPerspective.getInstance().setSelectedMeta(meta); } catch(Exception e){ e.printStackTrace(); } } }
[PDI-2840] fix for classloader issue opening PRPT when the reporting engine not booted
src/org/pentaho/agilebi/pdi/visualizations/prpt/PRPTVisualization.java
[PDI-2840] fix for classloader issue opening PRPT when the reporting engine not booted
Java
apache-2.0
444b41a0a4f4dccc0cd3fc1b503d7e25d25388f0
0
ok2c/httpclient,UlrichColby/httpcomponents-client,apache/httpcomponents-client
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.client; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Queue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.FormattedHeader; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.annotation.Immutable; import org.apache.http.auth.AuthOption; import org.apache.http.auth.AuthScheme; import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.MalformedChallengeException; import org.apache.http.client.AuthCache; import org.apache.http.client.AuthenticationStrategy; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.config.AuthSchemes; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.config.Lookup; import org.apache.http.protocol.HTTP; import org.apache.http.protocol.HttpContext; import org.apache.http.util.Args; import org.apache.http.util.CharArrayBuffer; @Immutable abstract class AuthenticationStrategyImpl implements AuthenticationStrategy { private final Log log = LogFactory.getLog(getClass()); private static final List<String> DEFAULT_SCHEME_PRIORITY = Collections.unmodifiableList(Arrays.asList( AuthSchemes.SPNEGO, AuthSchemes.KERBEROS, AuthSchemes.NTLM, AuthSchemes.DIGEST, AuthSchemes.BASIC)); private final int challengeCode; private final String headerName; AuthenticationStrategyImpl(final int challengeCode, final String headerName) { super(); this.challengeCode = challengeCode; this.headerName = headerName; } @Override public boolean isAuthenticationRequested( final HttpHost authhost, final HttpResponse response, final HttpContext context) { Args.notNull(response, "HTTP response"); final int status = response.getStatusLine().getStatusCode(); return status == this.challengeCode; } @Override public Map<String, Header> getChallenges( final HttpHost authhost, final HttpResponse response, final HttpContext context) throws MalformedChallengeException { Args.notNull(response, "HTTP response"); final Header[] headers = response.getHeaders(this.headerName); final Map<String, Header> map = new HashMap<String, Header>(headers.length); for (final Header header : headers) { final CharArrayBuffer buffer; int pos; if (header instanceof FormattedHeader) { buffer = ((FormattedHeader) header).getBuffer(); pos = ((FormattedHeader) header).getValuePos(); } else { final String s = header.getValue(); if (s == null) { throw new MalformedChallengeException("Header value is null"); } buffer = new CharArrayBuffer(s.length()); buffer.append(s); pos = 0; } while (pos < buffer.length() && HTTP.isWhitespace(buffer.charAt(pos))) { pos++; } final int beginIndex = pos; while (pos < buffer.length() && !HTTP.isWhitespace(buffer.charAt(pos))) { pos++; } final int endIndex = pos; final String s = buffer.substring(beginIndex, endIndex); map.put(s.toLowerCase(Locale.ROOT), header); } return map; } abstract Collection<String> getPreferredAuthSchemes(RequestConfig config); @Override public Queue<AuthOption> select( final Map<String, Header> challenges, final HttpHost authhost, final HttpResponse response, final HttpContext context) throws MalformedChallengeException { Args.notNull(challenges, "Map of auth challenges"); Args.notNull(authhost, "Host"); Args.notNull(response, "HTTP response"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); final Queue<AuthOption> options = new LinkedList<AuthOption>(); final Lookup<AuthSchemeProvider> registry = clientContext.getAuthSchemeRegistry(); if (registry == null) { this.log.debug("Auth scheme registry not set in the context"); return options; } final CredentialsProvider credsProvider = clientContext.getCredentialsProvider(); if (credsProvider == null) { this.log.debug("Credentials provider not set in the context"); return options; } final RequestConfig config = clientContext.getRequestConfig(); Collection<String> authPrefs = getPreferredAuthSchemes(config); if (authPrefs == null) { authPrefs = DEFAULT_SCHEME_PRIORITY; } if (this.log.isDebugEnabled()) { this.log.debug("Authentication schemes in the order of preference: " + authPrefs); } for (final String id: authPrefs) { final Header challenge = challenges.get(id.toLowerCase(Locale.ROOT)); if (challenge != null) { final AuthSchemeProvider authSchemeProvider = registry.lookup(id); if (authSchemeProvider == null) { if (this.log.isWarnEnabled()) { this.log.warn("Authentication scheme " + id + " not supported"); // Try again } continue; } final AuthScheme authScheme = authSchemeProvider.create(context); authScheme.processChallenge(challenge); final AuthScope authScope = new AuthScope( authhost.getHostName(), authhost.getPort(), authScheme.getRealm(), authScheme.getSchemeName()); final Credentials credentials = credsProvider.getCredentials(authScope); if (credentials != null) { options.add(new AuthOption(authScheme, credentials)); } } else { if (this.log.isDebugEnabled()) { this.log.debug("Challenge for " + id + " authentication scheme not available"); // Try again } } } return options; } @Override public void authSucceeded( final HttpHost authhost, final AuthScheme authScheme, final HttpContext context) { Args.notNull(authhost, "Host"); Args.notNull(authScheme, "Auth scheme"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); if (isCachable(authScheme)) { AuthCache authCache = clientContext.getAuthCache(); if (authCache == null) { authCache = new BasicAuthCache(); clientContext.setAuthCache(authCache); } if (this.log.isDebugEnabled()) { this.log.debug("Caching '" + authScheme.getSchemeName() + "' auth scheme for " + authhost); } authCache.put(authhost, authScheme); } } protected boolean isCachable(final AuthScheme authScheme) { if (authScheme == null || !authScheme.isComplete()) { return false; } final String schemeName = authScheme.getSchemeName(); return schemeName.equalsIgnoreCase(AuthSchemes.BASIC) || schemeName.equalsIgnoreCase(AuthSchemes.DIGEST); } @Override public void authFailed( final HttpHost authhost, final AuthScheme authScheme, final HttpContext context) { Args.notNull(authhost, "Host"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); final AuthCache authCache = clientContext.getAuthCache(); if (authCache != null) { if (this.log.isDebugEnabled()) { this.log.debug("Clearing cached auth scheme for " + authhost); } authCache.remove(authhost); } } }
httpclient/src/main/java/org/apache/http/impl/client/AuthenticationStrategyImpl.java
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.client; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Queue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.FormattedHeader; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.annotation.Immutable; import org.apache.http.auth.AuthOption; import org.apache.http.auth.AuthScheme; import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.MalformedChallengeException; import org.apache.http.client.AuthCache; import org.apache.http.client.AuthenticationStrategy; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.config.AuthSchemes; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.config.Lookup; import org.apache.http.protocol.HTTP; import org.apache.http.protocol.HttpContext; import org.apache.http.util.Args; import org.apache.http.util.CharArrayBuffer; @Immutable abstract class AuthenticationStrategyImpl implements AuthenticationStrategy { private final Log log = LogFactory.getLog(getClass()); private static final List<String> DEFAULT_SCHEME_PRIORITY = Collections.unmodifiableList(Arrays.asList(AuthSchemes.SPNEGO, AuthSchemes.KERBEROS, AuthSchemes.NTLM, AuthSchemes.DIGEST, AuthSchemes.BASIC)); private final int challengeCode; private final String headerName; AuthenticationStrategyImpl(final int challengeCode, final String headerName) { super(); this.challengeCode = challengeCode; this.headerName = headerName; } @Override public boolean isAuthenticationRequested( final HttpHost authhost, final HttpResponse response, final HttpContext context) { Args.notNull(response, "HTTP response"); final int status = response.getStatusLine().getStatusCode(); return status == this.challengeCode; } @Override public Map<String, Header> getChallenges( final HttpHost authhost, final HttpResponse response, final HttpContext context) throws MalformedChallengeException { Args.notNull(response, "HTTP response"); final Header[] headers = response.getHeaders(this.headerName); final Map<String, Header> map = new HashMap<String, Header>(headers.length); for (final Header header : headers) { final CharArrayBuffer buffer; int pos; if (header instanceof FormattedHeader) { buffer = ((FormattedHeader) header).getBuffer(); pos = ((FormattedHeader) header).getValuePos(); } else { final String s = header.getValue(); if (s == null) { throw new MalformedChallengeException("Header value is null"); } buffer = new CharArrayBuffer(s.length()); buffer.append(s); pos = 0; } while (pos < buffer.length() && HTTP.isWhitespace(buffer.charAt(pos))) { pos++; } final int beginIndex = pos; while (pos < buffer.length() && !HTTP.isWhitespace(buffer.charAt(pos))) { pos++; } final int endIndex = pos; final String s = buffer.substring(beginIndex, endIndex); map.put(s.toLowerCase(Locale.ROOT), header); } return map; } abstract Collection<String> getPreferredAuthSchemes(RequestConfig config); @Override public Queue<AuthOption> select( final Map<String, Header> challenges, final HttpHost authhost, final HttpResponse response, final HttpContext context) throws MalformedChallengeException { Args.notNull(challenges, "Map of auth challenges"); Args.notNull(authhost, "Host"); Args.notNull(response, "HTTP response"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); final Queue<AuthOption> options = new LinkedList<AuthOption>(); final Lookup<AuthSchemeProvider> registry = clientContext.getAuthSchemeRegistry(); if (registry == null) { this.log.debug("Auth scheme registry not set in the context"); return options; } final CredentialsProvider credsProvider = clientContext.getCredentialsProvider(); if (credsProvider == null) { this.log.debug("Credentials provider not set in the context"); return options; } final RequestConfig config = clientContext.getRequestConfig(); Collection<String> authPrefs = getPreferredAuthSchemes(config); if (authPrefs == null) { authPrefs = DEFAULT_SCHEME_PRIORITY; } if (this.log.isDebugEnabled()) { this.log.debug("Authentication schemes in the order of preference: " + authPrefs); } for (final String id: authPrefs) { final Header challenge = challenges.get(id.toLowerCase(Locale.ROOT)); if (challenge != null) { final AuthSchemeProvider authSchemeProvider = registry.lookup(id); if (authSchemeProvider == null) { if (this.log.isWarnEnabled()) { this.log.warn("Authentication scheme " + id + " not supported"); // Try again } continue; } final AuthScheme authScheme = authSchemeProvider.create(context); authScheme.processChallenge(challenge); final AuthScope authScope = new AuthScope( authhost.getHostName(), authhost.getPort(), authScheme.getRealm(), authScheme.getSchemeName()); final Credentials credentials = credsProvider.getCredentials(authScope); if (credentials != null) { options.add(new AuthOption(authScheme, credentials)); } } else { if (this.log.isDebugEnabled()) { this.log.debug("Challenge for " + id + " authentication scheme not available"); // Try again } } } return options; } @Override public void authSucceeded( final HttpHost authhost, final AuthScheme authScheme, final HttpContext context) { Args.notNull(authhost, "Host"); Args.notNull(authScheme, "Auth scheme"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); if (isCachable(authScheme)) { AuthCache authCache = clientContext.getAuthCache(); if (authCache == null) { authCache = new BasicAuthCache(); clientContext.setAuthCache(authCache); } if (this.log.isDebugEnabled()) { this.log.debug("Caching '" + authScheme.getSchemeName() + "' auth scheme for " + authhost); } authCache.put(authhost, authScheme); } } protected boolean isCachable(final AuthScheme authScheme) { if (authScheme == null || !authScheme.isComplete()) { return false; } final String schemeName = authScheme.getSchemeName(); return schemeName.equalsIgnoreCase(AuthSchemes.BASIC) || schemeName.equalsIgnoreCase(AuthSchemes.DIGEST); } @Override public void authFailed( final HttpHost authhost, final AuthScheme authScheme, final HttpContext context) { Args.notNull(authhost, "Host"); Args.notNull(context, "HTTP context"); final HttpClientContext clientContext = HttpClientContext.adapt(context); final AuthCache authCache = clientContext.getAuthCache(); if (authCache != null) { if (this.log.isDebugEnabled()) { this.log.debug("Clearing cached auth scheme for " + authhost); } authCache.remove(authhost); } } }
Align scheme names git-svn-id: 897293da6115b9493049ecf64199cf2f9a0ac287@1580242 13f79535-47bb-0310-9956-ffa450edef68
httpclient/src/main/java/org/apache/http/impl/client/AuthenticationStrategyImpl.java
Align scheme names
Java
apache-2.0
d298bb3262e50116122fe8f3d83f1a78646bb77f
0
NLeSC/Xenon,NLeSC/Xenon
/** * Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.esciencecenter.xenon.adaptors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Arrays; import nl.esciencecenter.xenon.JobException; import nl.esciencecenter.xenon.Xenon; import nl.esciencecenter.xenon.XenonException; import nl.esciencecenter.xenon.XenonFactory; import nl.esciencecenter.xenon.XenonTestWatcher; import nl.esciencecenter.xenon.credentials.Credentials; import nl.esciencecenter.xenon.files.Files; import nl.esciencecenter.xenon.files.OpenOption; import nl.esciencecenter.xenon.files.Path; import nl.esciencecenter.xenon.jobs.InvalidJobDescriptionException; import nl.esciencecenter.xenon.jobs.Job; import nl.esciencecenter.xenon.jobs.JobCanceledException; import nl.esciencecenter.xenon.jobs.JobDescription; import nl.esciencecenter.xenon.jobs.JobStatus; import nl.esciencecenter.xenon.jobs.Jobs; import nl.esciencecenter.xenon.jobs.Scheduler; import nl.esciencecenter.xenon.jobs.Streams; import nl.esciencecenter.xenon.jobs.UnsupportedJobDescriptionException; import nl.esciencecenter.xenon.util.Utils; import org.junit.After; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestWatcher; import org.junit.runners.MethodSorters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public abstract class GenericScheduleJobTestParent { private static final Logger logger = LoggerFactory.getLogger(GenericScheduleJobTestParent.class); private static String TEST_ROOT; protected static JobTestConfig config; protected Xenon xenon; protected Files files; protected Jobs jobs; protected Credentials credentials; protected Scheduler scheduler; protected Job job; protected Path testDir; @Rule public TestWatcher watcher = new XenonTestWatcher(); public Path resolve(Path root, String path) throws XenonException { return files.newPath(root.getFileSystem(), root.getRelativePath().resolve(path)); } // MUST be invoked by a @BeforeClass method of the subclass! public static void prepareClass(JobTestConfig testConfig) { config = testConfig; TEST_ROOT = "xenon_test_" + config.getAdaptorName() + "_" + System.currentTimeMillis(); } // MUST be invoked by a @AfterClass method of the subclass! public static void cleanupClass() throws Exception { logger.info("GenericJobAdaptorTest.cleanupClass() attempting to remove: " + TEST_ROOT); Xenon xenon = XenonFactory.newXenon(null); Files files = xenon.files(); Credentials credentials = xenon.credentials(); Path cwd = config.getWorkingDir(files, credentials); Path root = files.newPath(cwd.getFileSystem(), cwd.getRelativePath().resolve(TEST_ROOT)); if (files.exists(root)) { Utils.recursiveDelete(files, root); } XenonFactory.endXenon(xenon); } @Before public void prepare() throws Exception { // This is not an adaptor option, so it will throw an exception! //Map<String, String> properties = new HashMap<>(); //properties.put(SshAdaptor.POLLING_DELAY, "100"); xenon = XenonFactory.newXenon(null); files = xenon.files(); jobs = xenon.jobs(); credentials = xenon.credentials(); scheduler = config.getDefaultScheduler(jobs, credentials); job = null; } @After public void cleanup() throws XenonException { jobs.close(scheduler); // XenonFactory.endXenon(xenon); XenonFactory.endAll(); } protected String getWorkingDir(String testName) { return TEST_ROOT + "/" + testName; } protected Path initJobDirectory(String workingDir) throws XenonException, Exception { Path cwd = config.getWorkingDir(files, credentials); Path root = resolve(cwd, workingDir); files.createDirectories(root); return root; } protected void checkJobDone(JobStatus status) throws JobException { assertNotNull(status); assertTrue("Job exceeded deadline!", status.isDone()); if (status.hasException()) { throw new JobException("Job failed!", status.getException()); } if (!status.getState().equals("unknown")) { assertNotNull("Job exit code is null; status: " + status, status.getExitCode()); } } protected void checkJobOutput(Job job, Path root) throws XenonException, IOException { checkJobOutput(job, root, null, null); } protected void checkJobOutput(Job job, Path root, String expectedStdout) throws XenonException, IOException { checkJobOutput(job, root, expectedStdout, null); } protected void checkJobOutput(Job job, Path root, String expectedStdout, String expectedWindowsStdout) throws XenonException, IOException { if (job.getJobDescription().getStdout() != null) { String tmpout = readFile(root, job.getJobDescription().getStdout()); logger.info("STDOUT: " + tmpout); assertNotNull(tmpout); if (expectedStdout != null) { if (config.targetIsWindows()) { if (expectedWindowsStdout == null) { assertFalse(tmpout.isEmpty()); } else { assertTrue(tmpout.startsWith(expectedWindowsStdout)); } } else { assertEquals(expectedStdout, tmpout); } } } if (job.getJobDescription().getStderr() != null) { String tmperr = readFile(root, job.getJobDescription().getStderr()); logger.info("STDERR: " + tmperr); assertNotNull(tmperr); assertTrue(tmperr.isEmpty()); } } protected String readFile(Path root, String filename) throws XenonException, IOException { Path filePath = resolve(root, filename); return readFully(filePath); } protected String readFully(Path p) throws XenonException, IOException { long end = System.currentTimeMillis() + 60*1000; while (!files.exists(p) && System.currentTimeMillis() < end) { try { Thread.sleep(1000); } catch (InterruptedException e) { // ignore } } InputStream in = files.newInputStream(p); String result = Utils.readToString(in); try { in.close(); } catch (Exception e) { // ignored } return result; } protected void writeFully(Path p, String message) throws IOException, XenonException { OutputStream out = files.newOutputStream(p, OpenOption.CREATE, OpenOption.APPEND, OpenOption.WRITE); writeFully(out, message); } protected void writeFully(OutputStream out, String message) throws IOException { out.write(message.getBytes()); out.close(); } protected void cleanupJobRecursive(Path root) { XenonException cleanupFailed = null; try { Utils.recursiveDelete(files, root); } catch (XenonException ex) { cleanupFailed = ex; } try { files.close(root.getFileSystem()); } catch (XenonException ex) { cleanupFailed = ex; } if (cleanupFailed != null) { throw new AssertionError(cleanupFailed); } } /** * Remove job root folder, stdout, stderr and other provided paths. * @param job job to cleanup files for (null if not applicable) * @param root job working directory to remove * @param otherPaths other paths to remove * @throws XenonException if resolving path or delete fails */ protected void cleanupJob(Job job, Path root, Path... otherPaths) throws XenonException { XenonException cleanupFailed = null; Path[] allPaths = Arrays.copyOf(otherPaths, otherPaths.length + 2); if (job != null) { JobDescription description = job.getJobDescription(); if (description.getStdout() != null) allPaths[otherPaths.length] = resolve(root, description.getStdout()); if (description.getStderr() != null) allPaths[otherPaths.length + 1] = resolve(root, description.getStderr()); } for (Path p : allPaths) { if (p != null) { try { if (files.exists(p)) { files.delete(p); } } catch (XenonException ex) { cleanupFailed = ex; logger.warn("cleanupJob failed to delete file {}", p); } } } if (root != null) { try { files.delete(root); } catch (XenonException ex) { cleanupFailed = ex; } files.close(root.getFileSystem()); } if (cleanupFailed != null) { throw new AssertionError(cleanupFailed); } } /** Run a job with in given directory, and compare the output with expected output. * @param workingDir directory to run in * @param description job description. Include stdout in the description if comparing with expectedOutput * @param expectedOutput output that stdout should match, exactly. Provide null to only check that stderr is empty and stdout exists, if provided. */ protected void runJob(String workingDir, JobDescription description, String expectedOutput) throws Exception { Path root = initJobDirectory(workingDir); try { job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, expectedOutput); } finally { cleanupJob(job, root); } } /** * Job description to set environment value and print it. * Does not set stderr and stdout files. * * @param workDir directory to run in * @param value value of the environement variable * @return generated job description */ protected JobDescription printEnvJobDescription(String workDir, String value) { JobDescription description = new JobDescription(); description.setExecutable("/usr/bin/printenv"); description.setArguments("SOME_VARIABLE"); description.addEnvironment("SOME_VARIABLE", value); description.setWorkingDirectory(workDir); return description; } /** * Job description to print a message. * Does not set stderr and stdout files. In Windows, this prints the hostname, not the message. * * @param workingDir directory to run in * @param message message to print, if not in Windows. * @return generated job description */ protected JobDescription echoJobDescription(String workingDir, String message) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("hostname"); } else { description.setExecutable("/bin/echo"); description.setArguments("-n", message); } description.setWorkingDirectory(workingDir); return description; } /** * Job description to echo a message. * Does not set stderr and stdout files. In Windows, this prints the hostname, not the message. * * @param workingDir directory to run in * @param message message to print, if not in Windows. * @return generated job description */ protected JobDescription catJobDescription(String workingDir, String message) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("hostname"); } else { description.setExecutable("/bin/cat"); } description.setWorkingDirectory(workingDir); return description; } /** * Job description that takes approximately a fixed time. * Does not set stderr and stdout files. * * @param workingDir directory to run in * @param seconds number of seconds the job should take. * @return generated job description */ protected JobDescription timedJobDescription(String workingDir, int seconds) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("ping"); description.setArguments("-n", Integer.toString(seconds + 1), "127.0.0.1"); } else { description.setExecutable("/bin/sleep"); description.setArguments(Integer.toString(seconds)); } description.setWorkingDirectory(workingDir); return description; } /** * Job description prints the contents of a file * Does not set stderr and stdout files. * * @param workingDir directory to run in * @param stdin path to stdin to write contents to * @param message message to store as contents * @return generated job description * @throws XenonException file cannot be created or written to * @throws IOException file stream cannot be written to */ protected JobDescription catJobDescription(String workingDir, Path stdin, String message) throws XenonException, IOException { writeFully(stdin, message); JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("c:\\Windows\\System32\\more.com"); } else { description.setExecutable("/bin/cat"); } description.setWorkingDirectory(workingDir); description.setStdin("stdin.txt"); return description; } protected JobDescription nonExistingJobDescription(String workingDir) { JobDescription description = new JobDescription(); description.setExecutable("non-existing-executable"); description.setWorkingDirectory(workingDir); return description; } @Test public void test30_interactiveJobSubmit() throws Exception { if (!scheduler.supportsInteractive()) { return; } String message = "Hello World! test30"; JobDescription description = catJobDescription(null, message); description.setInteractive(true); logger.info("Submitting interactive job to " + scheduler.getScheme() + "://" + scheduler.getLocation()); job = jobs.submitJob(scheduler, description); logger.info("Interactive job submitted to " + scheduler.getScheme() + "://" + scheduler.getLocation()); Streams streams = jobs.getStreams(job); PrintWriter w = new PrintWriter(streams.getStdin()); w.print(message); w.flush(); w.close(); String out = Utils.readToString(streams.getStdout()); String err = Utils.readToString(streams.getStderr()); // Wait for 30 sec for the job to complete JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(30)); checkJobDone(status); assertNotNull(out); assertNotNull(err); if (config.targetIsWindows()) { assertTrue(out.length() > 0); } else { assertEquals(message, out); } assertEquals(0, err.length()); } @Test public void test31_batchJobSubmitWithPolling() throws Exception { String message = "Hello World! test31"; String workingDir = getWorkingDir("test31"); Path root = initJobDirectory(workingDir); try { JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); long deadline = System.currentTimeMillis() + config.getJobTimeout(0); JobStatus status = jobs.getJobStatus(job); while (!status.isDone()) { Thread.sleep(config.getPollDelay()); assertTrue("Job exceeded deadline!", System.currentTimeMillis() < deadline); status = jobs.getJobStatus(job); } checkJobDone(status); checkJobOutput(job, root, message); } finally { cleanupJob(job, root); } } @Test public void test32_batchJobSubmitWithWait() throws Exception { String message = "Hello World! test32"; String workingDir = getWorkingDir("test32"); Path root = initJobDirectory(workingDir); try { JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); if (status.isRunning()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } checkJobDone(status); checkJobOutput(job, root, message); } finally { cleanupJob(job, root); } } protected void submitToQueueWithPolling(String testName, String queueName, int jobCount) throws Exception { logger.info("STARTING TEST submitToQueueWithPolling(" + testName + ", " + queueName + ", " + jobCount); String workingDir = getWorkingDir(testName); Path root = initJobDirectory(workingDir); Job[] j = new Job[jobCount]; try { for (int i = 0; i < j.length; i++) { JobDescription description = timedJobDescription(workingDir, 1); description.setQueueName(queueName); description.setStdout("stdout" + i + ".txt"); description.setStderr("stderr" + i + ".txt"); j[i] = jobs.submitJob(scheduler, description); } // Bit hard to determine realistic deadline here ? long deadline = System.currentTimeMillis() + jobCount * config.getJobTimeout(1); boolean done = false; while (!done) { JobStatus[] status = jobs.getJobStatuses(j); int count = 0; for (int i = 0; i < j.length; i++) { if (j[i] != null) { if (status[i].isDone()) { if (status[i].hasException()) { throw new JobException("Job " + i + " failed", status[i].getException()); } logger.info("Job " + i + " done."); j[i] = null; } else { count++; } } } if (count == 0) { done = true; } else { Thread.sleep(config.getPollDelay()); assertTrue("Job exceeded deadline!", System.currentTimeMillis() < deadline); } } } finally { cleanupJobRecursive(root); } } @Test public void test33a_testMultiBatchJobSubmitWithPolling() throws Exception { for (String queue : config.getQueueNames()) { submitToQueueWithPolling("test33a_" + queue, queue, 1); } } @Test public void test33b_testMultiBatchJobSubmitWithPolling() throws Exception { logger.info("STARTING TEST test33b"); for (String queue : config.getQueueNames()) { submitToQueueWithPolling("test33b_" + queue, queue, 10); } } @Test public void test34_batchJobSubmitWithKill() throws Exception { String workingDir = getWorkingDir("test34"); Path root = initJobDirectory(workingDir); Job[] tmpJobs = new Job[4]; try { // Start uninteresting jobs, to make sure there is something on the queue. for (int i = 0; i < tmpJobs.length; i++) { tmpJobs[i] = jobs.submitJob(scheduler, timedJobDescription(null, 10)); } JobDescription description = timedJobDescription(workingDir, 60); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); // We immediately kill the job. Hopefully it isn't running yet! job = jobs.submitJob(scheduler, description); JobStatus status = jobs.cancelJob(job); if (status.isRunning()) { // Wait until the job is killed. if (!status.isDone()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } } assertTrue("Failed to kill job! Expected status done, but job status is " + status, status.isDone()); assertTrue("Job cancellation not registered: job status is " + status, status.hasException()); Exception e = status.getException(); assertTrue("Did not expect " + e + ": " + e.getMessage(), e instanceof JobCanceledException); } finally { for (Job tmpJob : tmpJobs) { if (tmpJob != null) jobs.cancelJob(tmpJob); } cleanupJob(job, root); } } @Test public void test35_batchJobSubmitWithKill2() throws Exception { String workingDir = getWorkingDir("test35"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 60); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); // Wait for job to run before killing it! JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); assertTrue("Job failed to start! Expected status running, but job status is " + status, status.isRunning()); status = jobs.cancelJob(job); // Wait until the job is killed. We assume it takes less than a minute! if (!status.isDone()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } assertTrue("Failed to kill job! Expected status done, but job status is " + status, status.isDone()); assertTrue("Expected status with exception, but job status is " + status, status.hasException()); assertTrue(status.getException() instanceof JobCanceledException); } finally { cleanupJob(job, root); } } @Test public void test36a_batchJobSubmitWithInput() throws Exception { String message = "Hello World! test36a"; String workingDir = getWorkingDir("test36a"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, message, message); } finally { cleanupJob(job, root, stdin); } } @Test public void test36b_batchJobSubmitWithInput() throws Exception { String message = "Hello World! test36b"; String workingDir = getWorkingDir("test36b"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); if (status.isRunning()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } checkJobDone(status); checkJobOutput(job, root, message, message); } finally { cleanupJob(job, root, stdin); } } @Test public void test37a_batchJobSubmitWithoutWorkDir() throws Exception { job = jobs.submitJob(scheduler, timedJobDescription(null, 1)); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); } @Test public void test37b_batchJobSubmitWithRelativeWorkDir() throws Exception { String workingDir = "test37b"; String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } @Test public void test37c_batchJobSubmitWithAbsoluteWorkDir() throws Exception { String workingDir = getWorkingDir("test37c"); String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } @Test public void test37d_batchJobSubmitWithIncorrectWorkingDir() throws Exception { //note that we are _not_ creating this directory, making it invalid String workingDir = getWorkingDir("test37d"); //submitting this job will either: // 1) throw an InvalidJobDescription when we submit the job // 2) produce an error when the job is run. try { job = jobs.submitJob(scheduler, timedJobDescription(workingDir, 1)); // wait extra second for timed job JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); assertTrue("Job exceeded deadline! Expected status done, got " + status, status.isDone()); //option (2) assertTrue(status.hasException()); } catch (InvalidJobDescriptionException e) { //Submit failed, as expected (1) } } @Test public void test37e_batchJobSubmitWithWorkDirWithSpaces() throws Exception { //note the space in the path String workingDir = getWorkingDir("test 37b"); String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } //@Test public void test38_multipleBatchJobSubmitWithInput() throws Exception { String message = "Hello World! test38"; String workingDir = getWorkingDir("test38"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setProcessesPerNode(2); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); for (int i = 0; i < 2; i++) { String outString = readFile(root, "stdout.txt." + i); String errString = readFile(root, "stderr.txt." + i); assertNotNull(outString); // Line ending may differ assertTrue(outString.startsWith(message)); assertNotNull(errString); assertEquals(0, errString.length()); } } finally { cleanupJobRecursive(root); } } @Test public void test39_multipleBatchJobSubmitWithExceptions() throws Exception { // NOTE: This test assumes that an exception is thrown when the status of a job is requested twice after the job is done! // This may not be true for all schedulers. if (config.supportsStatusAfterDone()) { return; } Job[] j = new Job[] { jobs.submitJob(scheduler, timedJobDescription(null, 1)), jobs.submitJob(scheduler, timedJobDescription(null, 2)), }; long deadline = System.currentTimeMillis() + config.getJobTimeout(1) + config.getJobTimeout(2); JobStatus[] s = null; while (System.currentTimeMillis() < deadline) { s = jobs.getJobStatuses(j); if (s[0].hasException() && s[1].hasException()) { break; } Thread.sleep(config.getPollDelay()); } assertNotNull("Job status could not be retrieved", s); assertTrue("Job exceeded deadline!", s[0].hasException() && s[1].hasException()); } @Test public void test40_batchJobSubmitWithExitcode() throws Exception { job = jobs.submitJob(scheduler, timedJobDescription(null, 1)); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); if (!status.getState().equals("unknown")) { assertEquals(0, status.getExitCode().longValue()); } } @Test public void test40_batchJobSubmitWithNonZeroExitcode() throws Exception { //run an ls with a non existing file. This should make ls return exitcode 2 JobDescription description = new JobDescription(); if (config.targetIsWindows()) { // Will always exit! description.setExecutable("timeout"); description.setArguments("1"); } else { description.setExecutable("/bin/cat"); description.setArguments("non.existing.file"); } job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); if (!status.getState().equals("unknown")) { assertNotEquals(0, status.getExitCode().longValue()); } } @Test public void test41_batchJobSubmitWithEnvironmentVariable() throws Exception { if (!config.supportsEnvironmentVariables() || config.targetIsWindows()) { return; } String envValue = "some_value"; String workingDir = getWorkingDir("test41"); Path root = initJobDirectory(workingDir); try { //echo the given variable, to see if the va JobDescription description = printEnvJobDescription(workingDir, envValue); description.setStdout("stdout.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, envValue + "\n"); } finally { cleanupJob(job, root); } } @Test public void test41b_batchJobSubmitWithEnvironmentVariable() throws Exception { if (config.supportsEnvironmentVariables() || config.targetIsWindows()) { return; } try { job = jobs.submitJob(scheduler, printEnvJobDescription(null, "some_value")); jobs.waitUntilDone(job, config.getUpdateTime()); fail("Job description not supposed to be supported."); } catch (UnsupportedJobDescriptionException e) { // do nothing } } @Test public void test42a_batchJob_parallel_Exception() throws Exception { if (config.supportsParallelJobs()) { return; } JobDescription description = echoJobDescription(null, "some message"); description.setNodeCount(2); description.setProcessesPerNode(2); try { jobs.submitJob(scheduler, description); fail("Submit did not throw exception, which was expected!"); } catch (InvalidJobDescriptionException e) { // do nothing } } @Test public void test43_submit_JobDescriptionShouldBeCopied_Success() throws Exception { String workingDir = getWorkingDir("test43"); Path root = initJobDirectory(workingDir); try { JobDescription description = nonExistingJobDescription(workingDir); description.setStdout("stdout.txt"); job = jobs.submitJob(scheduler, description); description.setStdout("aap.txt"); assertNotEquals("Job description should have been copied!", job.getJobDescription().getStdout(), description.getStdout()); JobStatus status = jobs.cancelJob(job); if (!status.isDone()) { jobs.waitUntilDone(job, config.getUpdateTime()); } } finally { cleanupJobRecursive(root); } } @Test public void test44_submit_JobDescriptionShouldBeSame() throws Exception { String workingDir = getWorkingDir("test44"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, "my message"); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.cancelJob(job); if (!status.isDone()) { jobs.waitUntilDone(job, config.getUpdateTime()); } JobDescription submitted = job.getJobDescription(); assertEquals("stdout.txt", submitted.getStdout()); assertEquals("stderr.txt", submitted.getStderr()); assertEquals("stdin.txt", submitted.getStdin()); assertEquals(workingDir, submitted.getWorkingDirectory()); } finally { cleanupJob(job, root, stdin); } } @Test(expected = IllegalArgumentException.class) public void test45_batchJobSubmitWithIllegalWaitUntilRunning() throws Exception { String workingDir = getWorkingDir("test45"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 5); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); // Should throw exception jobs.waitUntilRunning(job, -1); } finally { jobs.waitUntilDone(job, 0); cleanupJob(job, root); } } @Test public void test46a_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); long start = System.currentTimeMillis(); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); while (status.isRunning()) { status = jobs.waitUntilDone(job, 1000); } long end = System.currentTimeMillis(); checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue((end-start) >= 10000); } finally { cleanupJob(job, root); } } @Test public void test46b_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); int count = 1; while (status.isRunning()) { status = jobs.waitUntilDone(job, 1000); count++; } checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue(count >= 9); } finally { cleanupJob(job, root); } } @Test public void test46c_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); while (status.isRunning()) { long now = System.currentTimeMillis(); status = jobs.waitUntilDone(job, 1000); long diff = System.currentTimeMillis() - now; // The wait should have lasted at least 1000 millis if (status.isRunning()) { assertTrue(diff >= 1000); } } checkJobDone(status); } finally { cleanupJob(job, root); } } @Test public void test47_batchJobSubmitWithSingleWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); long start = System.currentTimeMillis(); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 0); // Should wait until the job is finished, however long it takes. long end = System.currentTimeMillis(); // Job must be in done state checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue((end-start) >= 10000); } finally { cleanupJob(job, root); } } @Test public void test48_batchJobSubmitWithSingleWaitUntilRunning() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 5); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, 0); // Should wait until the job is finished, however long it takes. assert(status.isRunning()); status = jobs.waitUntilDone(job, 0); // Job must be in done state checkJobDone(status); } finally { cleanupJob(job, root); } } @Test(expected = IllegalArgumentException.class) public void test49_batchJobSubmitWithIllegalWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 1); job = jobs.submitJob(scheduler, description); // Should throw exception! jobs.waitUntilDone(job, -1); } finally { jobs.cancelJob(job); cleanupJob(job, root); } } }
src/integrationTest/java/nl/esciencecenter/xenon/adaptors/GenericScheduleJobTestParent.java
/** * Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.esciencecenter.xenon.adaptors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Arrays; import nl.esciencecenter.xenon.JobException; import nl.esciencecenter.xenon.Xenon; import nl.esciencecenter.xenon.XenonException; import nl.esciencecenter.xenon.XenonFactory; import nl.esciencecenter.xenon.XenonTestWatcher; import nl.esciencecenter.xenon.credentials.Credentials; import nl.esciencecenter.xenon.files.Files; import nl.esciencecenter.xenon.files.OpenOption; import nl.esciencecenter.xenon.files.Path; import nl.esciencecenter.xenon.jobs.InvalidJobDescriptionException; import nl.esciencecenter.xenon.jobs.Job; import nl.esciencecenter.xenon.jobs.JobCanceledException; import nl.esciencecenter.xenon.jobs.JobDescription; import nl.esciencecenter.xenon.jobs.JobStatus; import nl.esciencecenter.xenon.jobs.Jobs; import nl.esciencecenter.xenon.jobs.Scheduler; import nl.esciencecenter.xenon.jobs.Streams; import nl.esciencecenter.xenon.jobs.UnsupportedJobDescriptionException; import nl.esciencecenter.xenon.util.Utils; import org.junit.After; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestWatcher; import org.junit.runners.MethodSorters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public abstract class GenericScheduleJobTestParent { private static final Logger logger = LoggerFactory.getLogger(GenericScheduleJobTestParent.class); private static String TEST_ROOT; protected static JobTestConfig config; protected Xenon xenon; protected Files files; protected Jobs jobs; protected Credentials credentials; protected Scheduler scheduler; protected Job job; protected Path testDir; @Rule public TestWatcher watcher = new XenonTestWatcher(); public Path resolve(Path root, String path) throws XenonException { return files.newPath(root.getFileSystem(), root.getRelativePath().resolve(path)); } // MUST be invoked by a @BeforeClass method of the subclass! public static void prepareClass(JobTestConfig testConfig) { config = testConfig; TEST_ROOT = "xenon_test_" + config.getAdaptorName() + "_" + System.currentTimeMillis(); } // MUST be invoked by a @AfterClass method of the subclass! public static void cleanupClass() throws Exception { logger.info("GenericJobAdaptorTest.cleanupClass() attempting to remove: " + TEST_ROOT); Xenon xenon = XenonFactory.newXenon(null); Files files = xenon.files(); Credentials credentials = xenon.credentials(); Path cwd = config.getWorkingDir(files, credentials); Path root = files.newPath(cwd.getFileSystem(), cwd.getRelativePath().resolve(TEST_ROOT)); if (files.exists(root)) { Utils.recursiveDelete(files, root); } XenonFactory.endXenon(xenon); } @Before public void prepare() throws Exception { // This is not an adaptor option, so it will throw an exception! //Map<String, String> properties = new HashMap<>(); //properties.put(SshAdaptor.POLLING_DELAY, "100"); xenon = XenonFactory.newXenon(null); files = xenon.files(); jobs = xenon.jobs(); credentials = xenon.credentials(); scheduler = config.getDefaultScheduler(jobs, credentials); job = null; } @After public void cleanup() throws XenonException { jobs.close(scheduler); // XenonFactory.endXenon(xenon); XenonFactory.endAll(); } protected String getWorkingDir(String testName) { return TEST_ROOT + "/" + testName; } protected Path initJobDirectory(String workingDir) throws XenonException, Exception { Path cwd = config.getWorkingDir(files, credentials); Path root = resolve(cwd, workingDir); files.createDirectories(root); return root; } protected void checkJobDone(JobStatus status) throws JobException { assertNotNull(status); assertTrue("Job exceeded deadline!", status.isDone()); if (status.hasException()) { throw new JobException("Job failed!", status.getException()); } if (!status.getState().equals("unknown")) { assertNotNull("Job exit code is null; status: " + status, status.getExitCode()); } } protected void checkJobOutput(Job job, Path root) throws XenonException, IOException { checkJobOutput(job, root, null, null); } protected void checkJobOutput(Job job, Path root, String expectedStdout) throws XenonException, IOException { checkJobOutput(job, root, expectedStdout, null); } protected void checkJobOutput(Job job, Path root, String expectedStdout, String expectedWindowsStdout) throws XenonException, IOException { if (job.getJobDescription().getStdout() != null) { String tmpout = readFile(root, job.getJobDescription().getStdout()); logger.info("STDOUT: " + tmpout); assertNotNull(tmpout); if (expectedStdout != null) { if (config.targetIsWindows()) { if (expectedWindowsStdout == null) { assertFalse(tmpout.isEmpty()); } else { assertTrue(tmpout.startsWith(expectedWindowsStdout)); } } else { assertEquals(expectedStdout, tmpout); } } } if (job.getJobDescription().getStderr() != null) { String tmperr = readFile(root, job.getJobDescription().getStderr()); logger.info("STDERR: " + tmperr); assertNotNull(tmperr); assertTrue(tmperr.isEmpty()); } } protected String readFile(Path root, String filename) throws XenonException, IOException { Path filePath = resolve(root, filename); return readFully(filePath); } protected String readFully(Path p) throws XenonException, IOException { long end = System.currentTimeMillis() + 60*1000; while (!files.exists(p) && System.currentTimeMillis() < end) { try { Thread.sleep(1000); } catch (InterruptedException e) { // ignore } } InputStream in = files.newInputStream(p); String result = Utils.readToString(in); try { in.close(); } catch (Exception e) { // ignored } return result; } protected void writeFully(Path p, String message) throws IOException, XenonException { OutputStream out = files.newOutputStream(p, OpenOption.CREATE, OpenOption.APPEND, OpenOption.WRITE); writeFully(out, message); } protected void writeFully(OutputStream out, String message) throws IOException { out.write(message.getBytes()); out.close(); } protected void cleanupJobRecursive(Path root) { XenonException cleanupFailed = null; try { Utils.recursiveDelete(files, root); } catch (XenonException ex) { cleanupFailed = ex; } try { files.close(root.getFileSystem()); } catch (XenonException ex) { cleanupFailed = ex; } if (cleanupFailed != null) { throw new AssertionError(cleanupFailed); } } /** * Remove job root folder, stdout, stderr and other provided paths. * @param job job to cleanup files for (null if not applicable) * @param root job working directory to remove * @param otherPaths other paths to remove * @throws XenonException if resolving path or delete fails */ protected void cleanupJob(Job job, Path root, Path... otherPaths) throws XenonException { XenonException cleanupFailed = null; Path[] allPaths = Arrays.copyOf(otherPaths, otherPaths.length + 2); if (job != null) { JobDescription description = job.getJobDescription(); if (description.getStdout() != null) allPaths[otherPaths.length] = resolve(root, description.getStdout()); if (description.getStderr() != null) allPaths[otherPaths.length + 1] = resolve(root, description.getStderr()); } for (Path p : allPaths) { if (p != null) { try { if (files.exists(p)) { files.delete(p); } } catch (XenonException ex) { cleanupFailed = ex; logger.warn("cleanupJob failed to delete file {}", p); } } } if (root != null) { try { files.delete(root); } catch (XenonException ex) { cleanupFailed = ex; } files.close(root.getFileSystem()); } if (cleanupFailed != null) { throw new AssertionError(cleanupFailed); } } /** Run a job with in given directory, and compare the output with expected output. * @param workingDir directory to run in * @param description job description. Include stdout in the description if comparing with expectedOutput * @param expectedOutput output that stdout should match, exactly. Provide null to only check that stderr is empty and stdout exists, if provided. */ protected void runJob(String workingDir, JobDescription description, String expectedOutput) throws Exception { Path root = initJobDirectory(workingDir); try { job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, expectedOutput); } finally { cleanupJob(job, root); } } /** * Job description to set environment value and print it. * Does not set stderr and stdout files. * * @param workDir directory to run in * @param value value of the environement variable * @return generated job description */ protected JobDescription printEnvJobDescription(String workDir, String value) { JobDescription description = new JobDescription(); description.setExecutable("/usr/bin/printenv"); description.setArguments("SOME_VARIABLE"); description.addEnvironment("SOME_VARIABLE", value); description.setWorkingDirectory(workDir); return description; } /** * Job description to print a message. * Does not set stderr and stdout files. In Windows, this prints the hostname, not the message. * * @param workingDir directory to run in * @param message message to print, if not in Windows. * @return generated job description */ protected JobDescription echoJobDescription(String workingDir, String message) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("hostname"); } else { description.setExecutable("/bin/echo"); description.setArguments("-n", message); } description.setWorkingDirectory(workingDir); return description; } /** * Job description to echo a message. * Does not set stderr and stdout files. In Windows, this prints the hostname, not the message. * * @param workingDir directory to run in * @param message message to print, if not in Windows. * @return generated job description */ protected JobDescription catJobDescription(String workingDir, String message) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("hostname"); } else { description.setExecutable("/bin/cat"); } description.setWorkingDirectory(workingDir); return description; } /** * Job description that takes approximately a fixed time. * Does not set stderr and stdout files. * * @param workingDir directory to run in * @param seconds number of seconds the job should take. * @return generated job description */ protected JobDescription timedJobDescription(String workingDir, int seconds) { JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("ping"); description.setArguments("-n", Integer.toString(seconds + 1), "127.0.0.1"); } else { description.setExecutable("/bin/sleep"); description.setArguments(Integer.toString(seconds)); } description.setWorkingDirectory(workingDir); return description; } /** * Job description prints the contents of a file * Does not set stderr and stdout files. * * @param workingDir directory to run in * @param stdin path to stdin to write contents to * @param message message to store as contents * @return generated job description * @throws XenonException file cannot be created or written to * @throws IOException file stream cannot be written to */ protected JobDescription catJobDescription(String workingDir, Path stdin, String message) throws XenonException, IOException { writeFully(stdin, message); JobDescription description = new JobDescription(); if (config.targetIsWindows()) { description.setExecutable("c:\\Windows\\System32\\more.com"); } else { description.setExecutable("/bin/cat"); } description.setWorkingDirectory(workingDir); description.setStdin("stdin.txt"); return description; } protected JobDescription nonExistingJobDescription(String workingDir) { JobDescription description = new JobDescription(); description.setExecutable("non-existing-executable"); description.setWorkingDirectory(workingDir); return description; } @Test public void test30_interactiveJobSubmit() throws Exception { if (!scheduler.supportsInteractive()) { return; } String message = "Hello World! test30"; JobDescription description = catJobDescription(null, message); description.setInteractive(true); logger.info("Submitting interactive job to " + scheduler.getScheme() + "://" + scheduler.getLocation()); job = jobs.submitJob(scheduler, description); logger.info("Interactive job submitted to " + scheduler.getScheme() + "://" + scheduler.getLocation()); Streams streams = jobs.getStreams(job); PrintWriter w = new PrintWriter(streams.getStdin()); w.print(message); w.flush(); w.close(); String out = Utils.readToString(streams.getStdout()); String err = Utils.readToString(streams.getStderr()); // Wait for 30 sec for the job to complete JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(30)); checkJobDone(status); assertNotNull(out); assertNotNull(err); if (config.targetIsWindows()) { assertTrue(out.length() > 0); } else { assertEquals(message, out); } assertEquals(0, err.length()); } @Test public void test31_batchJobSubmitWithPolling() throws Exception { String message = "Hello World! test31"; String workingDir = getWorkingDir("test31"); Path root = initJobDirectory(workingDir); try { JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); long deadline = System.currentTimeMillis() + config.getJobTimeout(0); JobStatus status = jobs.getJobStatus(job); while (!status.isDone()) { Thread.sleep(config.getPollDelay()); assertTrue("Job exceeded deadline!", System.currentTimeMillis() < deadline); status = jobs.getJobStatus(job); } checkJobDone(status); checkJobOutput(job, root, message); } finally { cleanupJob(job, root); } } @Test public void test32_batchJobSubmitWithWait() throws Exception { String message = "Hello World! test32"; String workingDir = getWorkingDir("test32"); Path root = initJobDirectory(workingDir); try { JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); if (status.isRunning()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } checkJobDone(status); checkJobOutput(job, root, message); } finally { cleanupJob(job, root); } } protected void submitToQueueWithPolling(String testName, String queueName, int jobCount) throws Exception { logger.info("STARTING TEST submitToQueueWithPolling(" + testName + ", " + queueName + ", " + jobCount); String workingDir = getWorkingDir(testName); Path root = initJobDirectory(workingDir); Job[] j = new Job[jobCount]; try { for (int i = 0; i < j.length; i++) { JobDescription description = timedJobDescription(workingDir, 1); description.setQueueName(queueName); description.setStdout("stdout" + i + ".txt"); description.setStderr("stderr" + i + ".txt"); j[i] = jobs.submitJob(scheduler, description); } // Bit hard to determine realistic deadline here ? long deadline = System.currentTimeMillis() + jobCount * config.getJobTimeout(1); boolean done = false; while (!done) { JobStatus[] status = jobs.getJobStatuses(j); int count = 0; for (int i = 0; i < j.length; i++) { if (j[i] != null) { if (status[i].isDone()) { if (status[i].hasException()) { throw new JobException("Job " + i + " failed", status[i].getException()); } logger.info("Job " + i + " done."); j[i] = null; } else { count++; } } } if (count == 0) { done = true; } else { Thread.sleep(config.getPollDelay()); assertTrue("Job exceeded deadline!", System.currentTimeMillis() < deadline); } } } finally { cleanupJobRecursive(root); } } @Test public void test33a_testMultiBatchJobSubmitWithPolling() throws Exception { for (String queue : config.getQueueNames()) { submitToQueueWithPolling("test33a_" + queue, queue, 1); } } @Test public void test33b_testMultiBatchJobSubmitWithPolling() throws Exception { logger.info("STARTING TEST test33b"); for (String queue : config.getQueueNames()) { submitToQueueWithPolling("test33b_" + queue, queue, 10); } } @Test public void test34_batchJobSubmitWithKill() throws Exception { String workingDir = getWorkingDir("test34"); Path root = initJobDirectory(workingDir); Job[] tmpJobs = new Job[4]; try { // Start uninteresting jobs, to make sure there is something on the queue. for (int i = 0; i < tmpJobs.length; i++) { tmpJobs[i] = jobs.submitJob(scheduler, timedJobDescription(null, 10)); } JobDescription description = timedJobDescription(workingDir, 60); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); // We immediately kill the job. Hopefully it isn't running yet! job = jobs.submitJob(scheduler, description); JobStatus status = jobs.cancelJob(job); if (status.isRunning()) { // Wait until the job is killed. if (!status.isDone()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } } assertTrue("Failed to kill job! Expected status done, but job status is " + status, status.isDone()); assertTrue("Job cancellation not registered: job status is " + status, status.hasException()); Exception e = status.getException(); assertTrue("Did not expect " + e + ": " + e.getMessage(), e instanceof JobCanceledException); } finally { for (Job tmpJob : tmpJobs) { if (tmpJob != null) jobs.cancelJob(tmpJob); } cleanupJob(job, root); } } @Test public void test35_batchJobSubmitWithKill2() throws Exception { String workingDir = getWorkingDir("test35"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 60); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); // Wait for job to run before killing it! JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); assertTrue("Job failed to start! Expected status running, but job status is " + status, status.isRunning()); status = jobs.cancelJob(job); // Wait until the job is killed. We assume it takes less than a minute! if (!status.isDone()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } assertTrue("Failed to kill job! Expected status done, but job status is " + status, status.isDone()); assertTrue("Expected status with exception, but job status is " + status, status.hasException()); assertTrue(status.getException() instanceof JobCanceledException); } finally { cleanupJob(job, root); } } @Test public void test36a_batchJobSubmitWithInput() throws Exception { String message = "Hello World! test36a"; String workingDir = getWorkingDir("test36a"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, message, message); } finally { cleanupJob(job, root, stdin); } } @Test public void test36b_batchJobSubmitWithInput() throws Exception { String message = "Hello World! test36b"; String workingDir = getWorkingDir("test36b"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, config.getQueueWaitTime()); if (status.isRunning()) { status = jobs.waitUntilDone(job, config.getUpdateTime()); } checkJobDone(status); checkJobOutput(job, root, message, message); } finally { cleanupJob(job, root, stdin); } } @Test public void test37a_batchJobSubmitWithoutWorkDir() throws Exception { job = jobs.submitJob(scheduler, timedJobDescription(null, 1)); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); } @Test public void test37b_batchJobSubmitWithRelativeWorkDir() throws Exception { String workingDir = "test37b"; String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } @Test public void test37c_batchJobSubmitWithAbsoluteWorkDir() throws Exception { String workingDir = getWorkingDir("test37c"); String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } @Test public void test37d_batchJobSubmitWithIncorrectWorkingDir() throws Exception { //note that we are _not_ creating this directory, making it invalid String workingDir = getWorkingDir("test37d"); //submitting this job will either: // 1) throw an InvalidJobDescription when we submit the job // 2) produce an error when the job is run. try { job = jobs.submitJob(scheduler, timedJobDescription(workingDir, 1)); // wait extra second for timed job JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); assertTrue("Job exceeded deadline! Expected status done, got " + status, status.isDone()); //option (2) assertTrue(status.hasException()); } catch (InvalidJobDescriptionException e) { //Submit failed, as expected (1) } } @Test public void test37e_batchJobSubmitWithWorkDirWithSpaces() throws Exception { //note the space in the path String workingDir = getWorkingDir("test 37b"); String message = "some message " + workingDir; JobDescription description = echoJobDescription(workingDir, message); description.setStdout("stdout.txt"); runJob(workingDir, description, message); } //@Test public void test38_multipleBatchJobSubmitWithInput() throws Exception { String message = "Hello World! test38"; String workingDir = getWorkingDir("test38"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, message); description.setProcessesPerNode(2); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); for (int i = 0; i < 2; i++) { String outString = readFile(root, "stdout.txt." + i); String errString = readFile(root, "stderr.txt." + i); assertNotNull(outString); // Line ending may differ assertTrue(outString.startsWith(message)); assertNotNull(errString); assertEquals(0, errString.length()); } } finally { cleanupJobRecursive(root); } } @Test public void test39_multipleBatchJobSubmitWithExceptions() throws Exception { // NOTE: This test assumes that an exception is thrown when the status of a job is requested twice after the job is done! // This may not be true for all schedulers. if (config.supportsStatusAfterDone()) { return; } Job[] j = new Job[] { jobs.submitJob(scheduler, timedJobDescription(null, 1)), jobs.submitJob(scheduler, timedJobDescription(null, 2)), }; long deadline = System.currentTimeMillis() + config.getJobTimeout(1) + config.getJobTimeout(2); JobStatus[] s = null; while (System.currentTimeMillis() < deadline) { s = jobs.getJobStatuses(j); if (s[0].hasException() && s[1].hasException()) { break; } Thread.sleep(config.getPollDelay()); } assertNotNull("Job status could not be retrieved", s); assertTrue("Job exceeded deadline!", s[0].hasException() && s[1].hasException()); } @Test public void test40_batchJobSubmitWithExitcode() throws Exception { job = jobs.submitJob(scheduler, timedJobDescription(null, 1)); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); if (!status.getState().equals("unknown")) { assertEquals(0, status.getExitCode().longValue()); } } @Test public void test40_batchJobSubmitWithNonZeroExitcode() throws Exception { //run an ls with a non existing file. This should make ls return exitcode 2 JobDescription description = new JobDescription(); if (config.targetIsWindows()) { // Will always exit! description.setExecutable("timeout"); description.setArguments("1"); } else { description.setExecutable("/bin/cat"); description.setArguments("non.existing.file"); } job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(1)); checkJobDone(status); if (!status.getState().equals("unknown")) { assertNotEquals(0, status.getExitCode().longValue()); } } @Test public void test41_batchJobSubmitWithEnvironmentVariable() throws Exception { if (!config.supportsEnvironmentVariables() || config.targetIsWindows()) { return; } String envValue = "some_value"; String workingDir = getWorkingDir("test41"); Path root = initJobDirectory(workingDir); try { //echo the given variable, to see if the va JobDescription description = printEnvJobDescription(workingDir, envValue); description.setStdout("stdout.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, config.getJobTimeout(0)); checkJobDone(status); checkJobOutput(job, root, envValue + "\n"); } finally { cleanupJob(job, root); } } @Test public void test41b_batchJobSubmitWithEnvironmentVariable() throws Exception { if (config.supportsEnvironmentVariables() || config.targetIsWindows()) { return; } try { job = jobs.submitJob(scheduler, printEnvJobDescription(null, "some_value")); jobs.waitUntilDone(job, config.getUpdateTime()); fail("Job description not supposed to be supported."); } catch (UnsupportedJobDescriptionException e) { // do nothing } } @Test public void test42a_batchJob_parallel_Exception() throws Exception { if (config.supportsParallelJobs()) { return; } JobDescription description = echoJobDescription(null, "some message"); description.setNodeCount(2); description.setProcessesPerNode(2); try { jobs.submitJob(scheduler, description); fail("Submit did not throw exception, which was expected!"); } catch (InvalidJobDescriptionException e) { // do nothing } } @Test public void test43_submit_JobDescriptionShouldBeCopied_Success() throws Exception { String workingDir = getWorkingDir("test43"); Path root = initJobDirectory(workingDir); try { JobDescription description = nonExistingJobDescription(workingDir); description.setStdout("stdout.txt"); job = jobs.submitJob(scheduler, description); description.setStdout("aap.txt"); assertNotEquals("Job description should have been copied!", job.getJobDescription().getStdout(), description.getStdout()); JobStatus status = jobs.cancelJob(job); if (!status.isDone()) { jobs.waitUntilDone(job, config.getUpdateTime()); } } finally { cleanupJobRecursive(root); } } @Test public void test44_submit_JobDescriptionShouldBeSame() throws Exception { String workingDir = getWorkingDir("test44"); Path root = initJobDirectory(workingDir); Path stdin = resolve(root, "stdin.txt"); try { JobDescription description = catJobDescription(workingDir, stdin, "my message"); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.cancelJob(job); if (!status.isDone()) { jobs.waitUntilDone(job, config.getUpdateTime()); } JobDescription submitted = job.getJobDescription(); assertEquals("stdout.txt", submitted.getStdout()); assertEquals("stderr.txt", submitted.getStderr()); assertEquals("stdin.txt", submitted.getStdin()); assertEquals(workingDir, submitted.getWorkingDirectory()); } finally { cleanupJob(job, root, stdin); } } @Test(expected = IllegalArgumentException.class) public void test45_batchJobSubmitWithIllegalWaitUntilRunning() throws Exception { String workingDir = getWorkingDir("test45"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 5); description.setStdout("stdout.txt"); description.setStderr("stderr.txt"); job = jobs.submitJob(scheduler, description); // Should throw exception jobs.waitUntilRunning(job, -1); } finally { jobs.cancelJob(job); jobs.waitUntilDone(job, 0); cleanupJob(job, root); } } @Test public void test46a_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); long start = System.currentTimeMillis(); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); while (status.isRunning()) { status = jobs.waitUntilDone(job, 1000); } long end = System.currentTimeMillis(); checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue((end-start) >= 10000); } finally { cleanupJob(job, root); } } @Test public void test46b_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); int count = 1; while (status.isRunning()) { status = jobs.waitUntilDone(job, 1000); count++; } checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue(count >= 9); } finally { cleanupJob(job, root); } } @Test public void test46c_batchJobSubmitWithPollingWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 1000); while (status.isRunning()) { long now = System.currentTimeMillis(); status = jobs.waitUntilDone(job, 1000); long diff = System.currentTimeMillis() - now; // The wait should have lasted at least 1000 millis if (status.isRunning()) { assertTrue(diff >= 1000); } } checkJobDone(status); } finally { cleanupJob(job, root); } } @Test public void test47_batchJobSubmitWithSingleWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 10); long start = System.currentTimeMillis(); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilDone(job, 0); // Should wait until the job is finished, however long it takes. long end = System.currentTimeMillis(); // Job must be in done state checkJobDone(status); // We expect the job to have lasted at least 10000 milliseconds, which would require 9 or more times polling. assertTrue((end-start) >= 10000); } finally { cleanupJob(job, root); } } @Test public void test48_batchJobSubmitWithSingleWaitUntilRunning() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 5); job = jobs.submitJob(scheduler, description); JobStatus status = jobs.waitUntilRunning(job, 0); // Should wait until the job is finished, however long it takes. assert(status.isRunning()); status = jobs.waitUntilDone(job, 0); // Job must be in done state checkJobDone(status); } finally { cleanupJob(job, root); } } @Test(expected = IllegalArgumentException.class) public void test49_batchJobSubmitWithIllegalWaitUntilDone() throws Exception { String workingDir = getWorkingDir("test46"); Path root = initJobDirectory(workingDir); try { JobDescription description = timedJobDescription(workingDir, 1); job = jobs.submitJob(scheduler, description); // Should throw exception! jobs.waitUntilDone(job, -1); } finally { jobs.cancelJob(job); jobs.waitUntilDone(job, 0); cleanupJob(job, root); } } }
Fixed tests for waitUntilDone and waitUntilRunning. Skrewed up job cleanup after test
src/integrationTest/java/nl/esciencecenter/xenon/adaptors/GenericScheduleJobTestParent.java
Fixed tests for waitUntilDone and waitUntilRunning. Skrewed up job cleanup after test
Java
apache-2.0
e6984296888103a33b4aacf0650f2a5bb9b17919
0
wildfly-security-incubator/undertow,wildfly-security-incubator/undertow,wildfly-security-incubator/undertow
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.protocol.framed; import static org.xnio.IoUtils.safeClose; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import io.undertow.UndertowOptions; import org.xnio.Buffers; import org.xnio.ChannelExceptionHandler; import org.xnio.ChannelListener; import org.xnio.ChannelListener.Setter; import org.xnio.ChannelListeners; import org.xnio.IoUtils; import org.xnio.Option; import org.xnio.OptionMap; import io.undertow.connector.ByteBufferPool; import io.undertow.connector.PooledByteBuffer; import org.xnio.StreamConnection; import org.xnio.XnioIoThread; import org.xnio.XnioWorker; import org.xnio.channels.CloseableChannel; import org.xnio.channels.ConnectedChannel; import org.xnio.channels.StreamSinkChannel; import org.xnio.channels.StreamSourceChannel; import io.undertow.UndertowMessages; import io.undertow.conduits.IdleTimeoutConduit; import io.undertow.util.ReferenceCountedPooled; import io.undertow.websockets.core.WebSocketLogger; import org.xnio.channels.SuspendableWriteChannel; /** * A {@link org.xnio.channels.ConnectedChannel} which can be used to send and receive Frames. * <p> * This provides a common base for framed protocols such as websockets and SPDY * * @author Stuart Douglas */ public abstract class AbstractFramedChannel<C extends AbstractFramedChannel<C, R, S>, R extends AbstractFramedStreamSourceChannel<C, R, S>, S extends AbstractFramedStreamSinkChannel<C, R, S>> implements ConnectedChannel { /** * The maximum number of buffers we will queue before suspending reads and * waiting for the buffers to be consumed * * TODO: make the configurable */ private final int maxQueuedBuffers; private final StreamConnection channel; private final IdleTimeoutConduit idleTimeoutConduit; private final ChannelListener.SimpleSetter<C> closeSetter; private final ChannelListener.SimpleSetter<C> receiveSetter; private final ByteBufferPool bufferPool; /** * Frame priority implementation. This is used to determine the order in which frames get sent */ private final FramePriority<C, R, S> framePriority; /** * List of frames that are ready to send */ private final List<S> pendingFrames = new LinkedList<>(); /** * Frames that are not yet read to send. */ private final Deque<S> heldFrames = new ArrayDeque<>(); /** * new frames to be sent. These will be added to either the pending or held frames list * depending on the {@link #framePriority} implementation in use. */ private final Deque<S> newFrames = new LinkedBlockingDeque<>(); private volatile long frameDataRemaining; private volatile R receiver; private boolean receivesSuspended = true; @SuppressWarnings("unused") private volatile int readsBroken = 0; @SuppressWarnings("unused") private volatile int writesBroken = 0; private static final AtomicIntegerFieldUpdater<AbstractFramedChannel> readsBrokenUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "readsBroken"); private static final AtomicIntegerFieldUpdater<AbstractFramedChannel> writesBrokenUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "writesBroken"); private ReferenceCountedPooled readData = null; private final List<ChannelListener<C>> closeTasks = new CopyOnWriteArrayList<>(); private volatile boolean flushingSenders = false; private final Set<AbstractFramedStreamSourceChannel<C, R, S>> receivers = new HashSet<>(); @SuppressWarnings("unused") private volatile int outstandingBuffers; private volatile AtomicIntegerFieldUpdater<AbstractFramedChannel> outstandingBuffersUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "outstandingBuffers"); private final LinkedBlockingDeque<Runnable> taskRunQueue = new LinkedBlockingDeque<>(); private final OptionMap settings; /** * If this is true then the flush() method must be called to queue writes. This is provided to support batching */ private volatile boolean requireExplicitFlush = false; private final ReferenceCountedPooled.FreeNotifier freeNotifier = new ReferenceCountedPooled.FreeNotifier() { @Override public void freed() { int res = outstandingBuffersUpdater.decrementAndGet(AbstractFramedChannel.this); if(!receivesSuspended && res == maxQueuedBuffers - 1) { synchronized (AbstractFramedChannel.this) { if(outstandingBuffersUpdater.get(AbstractFramedChannel.this) < maxQueuedBuffers) { channel.getSourceChannel().resumeReads(); } } } } }; private static final ChannelListener<AbstractFramedChannel> DRAIN_LISTENER = new ChannelListener<AbstractFramedChannel>() { @Override public void handleEvent(AbstractFramedChannel channel) { try { AbstractFramedStreamSourceChannel stream = channel.receive(); if(stream != null) { WebSocketLogger.REQUEST_LOGGER.debugf("Draining channel %s as no receive listener has been set", stream); stream.getReadSetter().set(ChannelListeners.drainListener(Long.MAX_VALUE, null, null)); stream.wakeupReads(); } } catch (IOException e) { IoUtils.safeClose(channel); } } }; /** * Create a new {@link io.undertow.server.protocol.framed.AbstractFramedChannel} * 8 * @param connectedStreamChannel The {@link org.xnio.channels.ConnectedStreamChannel} over which the WebSocket Frames should get send and received. * Be aware that it already must be "upgraded". * @param bufferPool The {@link ByteBufferPool} which will be used to acquire {@link ByteBuffer}'s from. * @param framePriority * @param settings The settings */ protected AbstractFramedChannel(final StreamConnection connectedStreamChannel, ByteBufferPool bufferPool, FramePriority<C, R, S> framePriority, final PooledByteBuffer readData, OptionMap settings) { this.framePriority = framePriority; this.maxQueuedBuffers = settings.get(UndertowOptions.MAX_QUEUED_READ_BUFFERS, 10); this.settings = settings; if (readData != null) { if(readData.getBuffer().hasRemaining()) { this.readData = new ReferenceCountedPooled(readData, 1); } else { readData.close(); } } if(bufferPool == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("bufferPool"); } if(connectedStreamChannel == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("connectedStreamChannel"); } IdleTimeoutConduit idle = createIdleTimeoutChannel(connectedStreamChannel); connectedStreamChannel.getSourceChannel().setConduit(idle); connectedStreamChannel.getSinkChannel().setConduit(idle); this.idleTimeoutConduit = idle; this.channel = connectedStreamChannel; this.bufferPool = bufferPool; closeSetter = new ChannelListener.SimpleSetter<>(); receiveSetter = new ChannelListener.SimpleSetter<>(); channel.getSourceChannel().getReadSetter().set(null); channel.getSourceChannel().suspendReads(); channel.getSourceChannel().getReadSetter().set(new FrameReadListener()); connectedStreamChannel.getSinkChannel().getWriteSetter().set(new FrameWriteListener()); FrameCloseListener closeListener = new FrameCloseListener(); connectedStreamChannel.getSinkChannel().getCloseSetter().set(closeListener); connectedStreamChannel.getSourceChannel().getCloseSetter().set(closeListener); } protected IdleTimeoutConduit createIdleTimeoutChannel(StreamConnection connectedStreamChannel) { return new IdleTimeoutConduit(connectedStreamChannel.getSinkChannel().getConduit(), connectedStreamChannel.getSourceChannel().getConduit()); } void runInIoThread(Runnable task) { this.taskRunQueue.add(task); getIoThread().execute(new Runnable() { @Override public void run() { while (!taskRunQueue.isEmpty()) { taskRunQueue.poll().run(); } } }); } /** * Get the buffer pool for this connection. * * @return the buffer pool for this connection */ public ByteBufferPool getBufferPool() { return bufferPool; } @Override public SocketAddress getLocalAddress() { return channel.getLocalAddress(); } @Override public <A extends SocketAddress> A getLocalAddress(Class<A> type) { return channel.getLocalAddress(type); } @Override public XnioWorker getWorker() { return channel.getWorker(); } @Override public XnioIoThread getIoThread() { return channel.getIoThread(); } @Override public boolean supportsOption(Option<?> option) { return channel.supportsOption(option); } @Override public <T> T getOption(Option<T> option) throws IOException { return channel.getOption(option); } @Override public <T> T setOption(Option<T> option, T value) throws IOException { return channel.setOption(option, value); } @Override public boolean isOpen() { return channel.isOpen(); } @Override public SocketAddress getPeerAddress() { return channel.getPeerAddress(); } @Override public <A extends SocketAddress> A getPeerAddress(Class<A> type) { return channel.getPeerAddress(type); } /** * Get the source address of the Channel. * * @return the source address of the Channel */ public InetSocketAddress getSourceAddress() { return getPeerAddress(InetSocketAddress.class); } /** * Get the destination address of the Channel. * * @return the destination address of the Channel */ public InetSocketAddress getDestinationAddress() { return getLocalAddress(InetSocketAddress.class); } /** * receive method, returns null if no frame is ready. Otherwise returns a * channel that can be used to read the frame contents. * <p> * Calling this method can also have the side effect of making additional data available to * existing source channels. In general if you suspend receives or don't have some other way * of calling this method then it can prevent frame channels for being fully consumed. */ public synchronized R receive() throws IOException { if (isLastFrameReceived() && receiver == null) { //we have received the last frame, we just shut down and return //it would probably make more sense to have the last channel responsible for this //however it is much simpler just to have it here if(readData != null) { readData.close(); readData = null; } channel.getSourceChannel().suspendReads(); channel.getSourceChannel().shutdownReads(); return null; } ReferenceCountedPooled pooled = this.readData; boolean hasData; if (pooled == null) { pooled = allocateReferenceCountedBuffer(); if (pooled == null) { return null; } hasData = false; } else if(pooled.isFreed()) { //we attempt to re-used an existing buffer if(!pooled.tryUnfree()) { pooled = allocateReferenceCountedBuffer(); if (pooled == null) { return null; } } else { pooled.getBuffer().limit(pooled.getBuffer().capacity()); } hasData = false; } else { hasData = pooled.getBuffer().hasRemaining(); } boolean forceFree = false; int read = 0; try { if (!hasData) { pooled.getBuffer().clear(); read = channel.getSourceChannel().read(pooled.getBuffer()); if (read == 0) { //no data, we just free the buffer forceFree = true; return null; } else if (read == -1) { try { channel.getSourceChannel().shutdownReads(); } catch (IOException e) { if (WebSocketLogger.REQUEST_LOGGER.isDebugEnabled()) { WebSocketLogger.REQUEST_LOGGER.debugf(e, "Connection closed with IOException when attempting to shut down reads"); } // nothing we can do here.. close safeClose(channel.getSourceChannel()); throw e; } forceFree = true; lastDataRead(); return null; } pooled.getBuffer().flip(); } if (frameDataRemaining > 0) { if (frameDataRemaining >= pooled.getBuffer().remaining()) { frameDataRemaining -= pooled.getBuffer().remaining(); if(receiver != null) { receiver.dataReady(null, pooled); } else { //we are dropping a frame pooled.close(); } readData = null; if(frameDataRemaining == 0) { receiver = null; } return null; } else { ByteBuffer buf = pooled.getBuffer().duplicate(); buf.limit((int) (buf.position() + frameDataRemaining)); pooled.getBuffer().position((int) (pooled.getBuffer().position() + frameDataRemaining)); frameDataRemaining = 0; PooledByteBuffer frameData = pooled.createView(buf); if(receiver != null) { receiver.dataReady(null, frameData); } else{ //we are dropping the frame frameData.close(); } receiver = null; } //if we read data into a frame we just return immediately, even if there is more remaining //see https://issues.jboss.org/browse/UNDERTOW-410 //basically if we don't do this we loose some message ordering semantics //as the second message may be processed before the first one //this is problematic for HTTPS, where the read listener may also be invoked by a queued task //and not by the selector mechanism return null; } FrameHeaderData data = parseFrame(pooled.getBuffer()); if (data != null) { PooledByteBuffer frameData; if (data.getFrameLength() >= pooled.getBuffer().remaining()) { frameDataRemaining = data.getFrameLength() - pooled.getBuffer().remaining(); frameData = pooled.createView(pooled.getBuffer().duplicate()); pooled.getBuffer().position(pooled.getBuffer().limit()); } else { ByteBuffer buf = pooled.getBuffer().duplicate(); buf.limit((int) (buf.position() + data.getFrameLength())); pooled.getBuffer().position((int) (pooled.getBuffer().position() + data.getFrameLength())); frameData = pooled.createView(buf); } AbstractFramedStreamSourceChannel<?, ?, ?> existing = data.getExistingChannel(); if (existing != null) { if (data.getFrameLength() > frameData.getBuffer().remaining()) { receiver = (R) existing; } existing.dataReady(data, frameData); return null; } else { boolean moreData = data.getFrameLength() > frameData.getBuffer().remaining(); R newChannel = createChannel(data, frameData); if (newChannel != null) { if(!newChannel.isComplete()) { receivers.add(newChannel); } if (moreData) { receiver = newChannel; } } else { frameData.close(); } return newChannel; } } return null; } catch (IOException|RuntimeException e) { //something has code wrong with parsing, close the read side //we don't close the write side, as the underlying implementation will most likely want to send an error markReadsBroken(e); forceFree = true; throw e; }finally { //if the receive caused the channel to break the close listener may be have been called //which will make readData null if (readData != null) { if (!pooled.getBuffer().hasRemaining() || forceFree) { if(pooled.getBuffer().limit() * 2 > pooled.getBuffer().capacity() || forceFree) { //if we have used more than half the buffer we don't allow it to be re-aquired readData = null; } //even though this is freed we may un-free it if we get a new packet //this prevents many small reads resulting in a large number of allocated buffers pooled.close(); } } } } private ReferenceCountedPooled allocateReferenceCountedBuffer() { if(maxQueuedBuffers > 0) { int expect; do { expect = outstandingBuffersUpdater.get(this); if (expect == maxQueuedBuffers) { synchronized (this) { //we need to re-read in a sync block, to prevent races expect = outstandingBuffersUpdater.get(this); if (expect == maxQueuedBuffers) { channel.getSourceChannel().suspendReads(); return null; } } } } while (!outstandingBuffersUpdater.compareAndSet(this, expect, expect + 1)); } PooledByteBuffer buf = bufferPool.allocate(); return this.readData = new ReferenceCountedPooled(buf, 1, maxQueuedBuffers > 0 ? freeNotifier : null); } /** * Method than is invoked when read() returns -1. */ protected void lastDataRead() { } /** * Method that creates the actual stream source channel implementation that is in use. * * @param frameHeaderData The header data, as returned by {@link #parseFrame(java.nio.ByteBuffer)} * @param frameData Any additional data for the frame that has already been read. This may not be the complete frame contents * @return A new stream source channel */ protected abstract R createChannel(FrameHeaderData frameHeaderData, PooledByteBuffer frameData) throws IOException; /** * Attempts to parse an incoming frame header from the data in the buffer. * * @param data The data that has been read from the channel * @return The frame header data, or <code>null</code> if the data was incomplete * @throws IOException If the data could not be parsed. */ protected abstract FrameHeaderData parseFrame(ByteBuffer data) throws IOException; protected synchronized void recalculateHeldFrames() throws IOException { if (!heldFrames.isEmpty()) { framePriority.frameAdded(null, pendingFrames, heldFrames); flushSenders(); } } /** * Flushes all ready stream sink conduits to the channel. * <p> * Frames will be batched up, to allow them all to be written out via a gathering * write. The {@link #framePriority} implementation will be invoked to decide which * frames are eligible for sending and in what order. */ protected synchronized void flushSenders() { if(flushingSenders) { throw UndertowMessages.MESSAGES.recursiveCallToFlushingSenders(); } flushingSenders = true; try { int toSend = 0; while (!newFrames.isEmpty()) { S frame = newFrames.poll(); if (framePriority.insertFrame(frame, pendingFrames)) { if (!heldFrames.isEmpty()) { framePriority.frameAdded(frame, pendingFrames, heldFrames); } } else { heldFrames.add(frame); } } boolean finalFrame = false; ListIterator<S> it = pendingFrames.listIterator(); while (it.hasNext()) { S sender = it.next(); if (sender.isReadyForFlush()) { ++toSend; } else { break; } if (sender.isLastFrame()) { finalFrame = true; } } if (toSend == 0) { //if there is nothing to send we just attempt a flush on the underlying channel try { if(channel.getSinkChannel().flush()) { channel.getSinkChannel().suspendWrites(); } } catch (IOException e) { safeClose(channel); markWritesBroken(e); } return; } ByteBuffer[] data = new ByteBuffer[toSend * 3]; int j = 0; it = pendingFrames.listIterator(); try { while (j < toSend) { S next = it.next(); //todo: rather than adding empty buffers just store the offsets SendFrameHeader frameHeader = next.getFrameHeader(); PooledByteBuffer frameHeaderByteBuffer = frameHeader.getByteBuffer(); data[j * 3] = frameHeaderByteBuffer != null ? frameHeaderByteBuffer.getBuffer() : Buffers.EMPTY_BYTE_BUFFER; data[(j * 3) + 1] = next.getBuffer() == null ? Buffers.EMPTY_BYTE_BUFFER : next.getBuffer(); data[(j * 3) + 2] = next.getFrameFooter(); ++j; } long toWrite = Buffers.remaining(data); long res; do { res = channel.getSinkChannel().write(data); toWrite -= res; } while (res > 0 && toWrite > 0); int max = toSend; while (max > 0) { S sinkChannel = pendingFrames.get(0); PooledByteBuffer frameHeaderByteBuffer = sinkChannel.getFrameHeader().getByteBuffer(); if (frameHeaderByteBuffer != null && frameHeaderByteBuffer.getBuffer().hasRemaining() || sinkChannel.getBuffer() != null && sinkChannel.getBuffer().hasRemaining() || sinkChannel.getFrameFooter().hasRemaining()) { break; } sinkChannel.flushComplete(); pendingFrames.remove(sinkChannel); max--; } if (!pendingFrames.isEmpty() || !channel.getSinkChannel().flush()) { channel.getSinkChannel().resumeWrites(); } else { channel.getSinkChannel().suspendWrites(); } if (pendingFrames.isEmpty() && finalFrame) { //all data has been sent. Close gracefully channel.getSinkChannel().shutdownWrites(); if (!channel.getSinkChannel().flush()) { channel.getSinkChannel().setWriteListener(ChannelListeners.flushingChannelListener(null, null)); channel.getSinkChannel().resumeWrites(); } } } catch (IOException e) { safeClose(channel); markWritesBroken(e); } } finally { flushingSenders = false; if(!newFrames.isEmpty()) { runInIoThread(new Runnable() { @Override public void run() { flushSenders(); } }); } } } void awaitWritable() throws IOException { this.channel.getSinkChannel().awaitWritable(); } void awaitWritable(long time, TimeUnit unit) throws IOException { this.channel.getSinkChannel().awaitWritable(time, unit); } /** * Queues a new frame to be sent, and attempts a flush if this is the first frame in the new frame queue. * <p> * Depending on the {@link FramePriority} implementation in use the channel may or may not be added to the actual * pending queue * * @param channel The channel */ protected void queueFrame(final S channel) throws IOException { assert !newFrames.contains(channel); if (isWritesBroken() || !this.channel.getSinkChannel().isOpen() || channel.isBroken() || !channel.isOpen()) { IoUtils.safeClose(channel); throw UndertowMessages.MESSAGES.channelIsClosed(); } newFrames.add(channel); if (!requireExplicitFlush || channel.isBufferFull()) { flush(); } } public void flush() { if (!flushingSenders) { if(channel.getIoThread() == Thread.currentThread()) { flushSenders(); } else { runInIoThread(new Runnable() { @Override public void run() { flushSenders(); } }); } } } /** * Returns true if the protocol specific final frame has been received. * * @return <code>true</code> If the last frame has been received */ protected abstract boolean isLastFrameReceived(); /** * @return <code>true</code> If the last frame has been sent */ protected abstract boolean isLastFrameSent(); /** * Method that is invoked when the read side of the channel is broken. This generally happens on a protocol error. */ protected abstract void handleBrokenSourceChannel(Throwable e); /** * Method that is invoked when then write side of a channel is broken. This generally happens on a protocol error. */ protected abstract void handleBrokenSinkChannel(Throwable e); /** * Return the {@link org.xnio.ChannelListener.Setter} which will holds the {@link org.xnio.ChannelListener} that gets notified once a frame was * received. */ public Setter<C> getReceiveSetter() { return receiveSetter; } /** * Suspend the receive of new frames via {@link #receive()} */ public synchronized void suspendReceives() { receivesSuspended = true; if (receiver == null) { channel.getSourceChannel().suspendReads(); } } /** * Resume the receive of new frames via {@link #receive()} */ public synchronized void resumeReceives() { receivesSuspended = false; if (readData != null && !readData.isFreed()) { channel.getSourceChannel().wakeupReads(); } else { channel.getSourceChannel().resumeReads(); } } public boolean isReceivesResumed() { return !receivesSuspended; } /** * Forcibly closes the {@link io.undertow.server.protocol.framed.AbstractFramedChannel}. */ @Override public void close() throws IOException { safeClose(channel); if(readData != null) { readData.close(); readData = null; } } @Override public Setter<? extends AbstractFramedChannel> getCloseSetter() { return closeSetter; } /** * Called when a source sub channel fails to fulfil its contract, and leaves the channel in an inconsistent state. * <p> * The underlying read side will be forcibly closed. * * @param cause The possibly null cause */ @SuppressWarnings({"unchecked", "rawtypes"}) protected void markReadsBroken(Throwable cause) { if (readsBrokenUpdater.compareAndSet(this, 0, 1)) { handleBrokenSourceChannel(cause); safeClose(channel.getSourceChannel()); closeSubChannels(); } } /** * Method that is called when the channel is being forcibly closed, and all sub stream sink/source * channels should also be forcibly closed. */ protected abstract void closeSubChannels(); /** * Called when a sub channel fails to fulfil its contract, and leaves the channel in an inconsistent state. * <p> * The underlying channel will be closed, and any sub channels that have writes resumed will have their * listeners notified. It is expected that these listeners will then attempt to use the channel, and their standard * error handling logic will take over. * * @param cause The possibly null cause */ @SuppressWarnings({"unchecked", "rawtypes"}) protected void markWritesBroken(Throwable cause) { if (writesBrokenUpdater.compareAndSet(this, 0, 1)) { handleBrokenSinkChannel(cause); safeClose(channel.getSinkChannel()); synchronized (this) { for (final S channel : pendingFrames) { channel.markBroken(); } pendingFrames.clear(); for (final S channel : newFrames) { channel.markBroken(); } newFrames.clear(); for (final S channel : heldFrames) { channel.markBroken(); } heldFrames.clear(); } } } protected boolean isWritesBroken() { return writesBrokenUpdater.get(this) != 0; } protected boolean isReadsBroken() { return readsBrokenUpdater.get(this) != 0; } void resumeWrites() { channel.getSinkChannel().resumeWrites(); } void suspendWrites() { channel.getSinkChannel().suspendWrites(); } void wakeupWrites() { channel.getSinkChannel().wakeupWrites(); } StreamSourceChannel getSourceChannel() { return channel.getSourceChannel(); } void notifyFrameReadComplete(AbstractFramedStreamSourceChannel<C, R, S> channel) { synchronized (AbstractFramedChannel.this) { if (isLastFrameReceived()) { safeClose(AbstractFramedChannel.this.channel.getSourceChannel()); } } } void notifyClosed(AbstractFramedStreamSourceChannel<C, R, S> channel) { synchronized (AbstractFramedChannel.this) { receivers.remove(channel); } } /** * {@link org.xnio.ChannelListener} which delegates the read notification to the appropriate listener */ private final class FrameReadListener implements ChannelListener<StreamSourceChannel> { @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void handleEvent(final StreamSourceChannel channel) { //clear the task queue before reading while (!taskRunQueue.isEmpty()) { taskRunQueue.poll().run(); } final R receiver = AbstractFramedChannel.this.receiver; if ((isLastFrameReceived() || receivesSuspended) && receiver == null) { channel.suspendReads(); return; } else { ChannelListener listener = receiveSetter.get(); if(listener == null) { listener = DRAIN_LISTENER; } WebSocketLogger.REQUEST_LOGGER.tracef("Invoking receive listener", receiver); ChannelListeners.invokeChannelListener(AbstractFramedChannel.this, listener); } if (readData != null && !readData.isFreed() && channel.isOpen()) { try { runInIoThread(new Runnable() { @Override public void run() { ChannelListeners.invokeChannelListener(channel, FrameReadListener.this); } }); } catch (RejectedExecutionException e) { IoUtils.safeClose(AbstractFramedChannel.this); } } } } private class FrameWriteListener implements ChannelListener<StreamSinkChannel> { @Override public void handleEvent(final StreamSinkChannel channel) { flushSenders(); } } /** * close listener, just goes through and activates any sub channels to make sure their listeners are invoked */ private class FrameCloseListener implements ChannelListener<CloseableChannel> { private boolean sinkClosed; private boolean sourceClosed; @Override public void handleEvent(final CloseableChannel c) { if(c instanceof StreamSinkChannel) { sinkClosed = true; } else if(c instanceof StreamSourceChannel) { sourceClosed = true; } if(!sourceClosed || !sinkClosed) { return; //both sides need to be closed } else if(readData != null && !readData.isFreed()) { //we make sure there is no data left to receive, if there is then we invoke the receive listener runInIoThread(new Runnable() { @Override public void run() { while (readData != null && !readData.isFreed()) { int rem = readData.getBuffer().remaining(); ChannelListener listener = receiveSetter.get(); if(listener == null) { listener = DRAIN_LISTENER; } ChannelListeners.invokeChannelListener(AbstractFramedChannel.this, listener); if(!AbstractFramedChannel.this.isOpen()) { break; } if (readData != null && rem == readData.getBuffer().remaining()) { break;//make sure we are making progress } } handleEvent(c); } }); return; } if (Thread.currentThread() != c.getIoThread()) { runInIoThread(new Runnable() { @Override public void run() { ChannelListeners.invokeChannelListener(c, FrameCloseListener.this); } }); return; } R receiver = AbstractFramedChannel.this.receiver; try { if (receiver != null && receiver.isOpen() && receiver.isReadResumed()) { ChannelListeners.invokeChannelListener(receiver, ((SimpleSetter) receiver.getReadSetter()).get()); } synchronized (AbstractFramedChannel.this) { for (final S channel : pendingFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for (final S channel : newFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for (final S channel : heldFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for(AbstractFramedStreamSourceChannel<C, R, S> r : new ArrayList<>(receivers)) { IoUtils.safeClose(r); } } } finally { try { for (ChannelListener<C> task : closeTasks) { ChannelListeners.invokeChannelListener((C) AbstractFramedChannel.this, task); } } finally { synchronized (AbstractFramedChannel.this) { closeSubChannels(); if (readData != null) { readData.close(); readData = null; } } ChannelListeners.invokeChannelListener((C) AbstractFramedChannel.this, closeSetter.get()); } } } } public void setIdleTimeout(long timeout) { idleTimeoutConduit.setIdleTimeout(timeout); } public long getIdleTimeout() { return idleTimeoutConduit.getIdleTimeout(); } protected FramePriority<C, R, S> getFramePriority() { return framePriority; } public void addCloseTask(final ChannelListener<C> task) { closeTasks.add(task); } @Override public String toString() { return getClass().getSimpleName() + " peer " + channel.getPeerAddress() + " local " + channel.getLocalAddress() + "[ " + (receiver == null ? "No Receiver" : receiver.toString()) + " " + pendingFrames.toString() + " -- " + heldFrames.toString() + " -- " + newFrames.toString() + "]"; } protected StreamConnection getUnderlyingConnection() { return channel; } protected ChannelExceptionHandler<SuspendableWriteChannel> writeExceptionHandler() { return new ChannelExceptionHandler<SuspendableWriteChannel>() { @Override public void handleException(SuspendableWriteChannel channel, IOException exception) { markWritesBroken(exception); } }; } public boolean isRequireExplicitFlush() { return requireExplicitFlush; } public void setRequireExplicitFlush(boolean requireExplicitFlush) { this.requireExplicitFlush = requireExplicitFlush; } protected OptionMap getSettings() { return settings; } }
core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedChannel.java
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.protocol.framed; import static org.xnio.IoUtils.safeClose; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import io.undertow.UndertowOptions; import org.xnio.Buffers; import org.xnio.ChannelExceptionHandler; import org.xnio.ChannelListener; import org.xnio.ChannelListener.Setter; import org.xnio.ChannelListeners; import org.xnio.IoUtils; import org.xnio.Option; import org.xnio.OptionMap; import io.undertow.connector.ByteBufferPool; import io.undertow.connector.PooledByteBuffer; import org.xnio.StreamConnection; import org.xnio.XnioIoThread; import org.xnio.XnioWorker; import org.xnio.channels.CloseableChannel; import org.xnio.channels.ConnectedChannel; import org.xnio.channels.StreamSinkChannel; import org.xnio.channels.StreamSourceChannel; import io.undertow.UndertowMessages; import io.undertow.conduits.IdleTimeoutConduit; import io.undertow.util.ReferenceCountedPooled; import io.undertow.websockets.core.WebSocketLogger; import org.xnio.channels.SuspendableWriteChannel; /** * A {@link org.xnio.channels.ConnectedChannel} which can be used to send and receive Frames. * <p> * This provides a common base for framed protocols such as websockets and SPDY * * @author Stuart Douglas */ public abstract class AbstractFramedChannel<C extends AbstractFramedChannel<C, R, S>, R extends AbstractFramedStreamSourceChannel<C, R, S>, S extends AbstractFramedStreamSinkChannel<C, R, S>> implements ConnectedChannel { /** * The maximum number of buffers we will queue before suspending reads and * waiting for the buffers to be consumed * * TODO: make the configurable */ private final int maxQueuedBuffers; private final StreamConnection channel; private final IdleTimeoutConduit idleTimeoutConduit; private final ChannelListener.SimpleSetter<C> closeSetter; private final ChannelListener.SimpleSetter<C> receiveSetter; private final ByteBufferPool bufferPool; /** * Frame priority implementation. This is used to determine the order in which frames get sent */ private final FramePriority<C, R, S> framePriority; /** * List of frames that are ready to send */ private final List<S> pendingFrames = new LinkedList<>(); /** * Frames that are not yet read to send. */ private final Deque<S> heldFrames = new ArrayDeque<>(); /** * new frames to be sent. These will be added to either the pending or held frames list * depending on the {@link #framePriority} implementation in use. */ private final Deque<S> newFrames = new LinkedBlockingDeque<>(); private volatile long frameDataRemaining; private volatile R receiver; private boolean receivesSuspended = true; @SuppressWarnings("unused") private volatile int readsBroken = 0; @SuppressWarnings("unused") private volatile int writesBroken = 0; private static final AtomicIntegerFieldUpdater<AbstractFramedChannel> readsBrokenUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "readsBroken"); private static final AtomicIntegerFieldUpdater<AbstractFramedChannel> writesBrokenUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "writesBroken"); private ReferenceCountedPooled readData = null; private final List<ChannelListener<C>> closeTasks = new CopyOnWriteArrayList<>(); private volatile boolean flushingSenders = false; private final Set<AbstractFramedStreamSourceChannel<C, R, S>> receivers = new HashSet<>(); @SuppressWarnings("unused") private volatile int outstandingBuffers; private volatile AtomicIntegerFieldUpdater<AbstractFramedChannel> outstandingBuffersUpdater = AtomicIntegerFieldUpdater.newUpdater(AbstractFramedChannel.class, "outstandingBuffers"); private final LinkedBlockingDeque<Runnable> taskRunQueue = new LinkedBlockingDeque<>(); private final OptionMap settings; /** * If this is true then the flush() method must be called to queue writes. This is provided to support batching */ private volatile boolean requireExplicitFlush = false; private final ReferenceCountedPooled.FreeNotifier freeNotifier = new ReferenceCountedPooled.FreeNotifier() { @Override public void freed() { int res = outstandingBuffersUpdater.decrementAndGet(AbstractFramedChannel.this); if(!receivesSuspended && res == maxQueuedBuffers - 1) { synchronized (AbstractFramedChannel.this) { if(outstandingBuffersUpdater.get(AbstractFramedChannel.this) < maxQueuedBuffers) { channel.getSourceChannel().resumeReads(); } } } } }; private static final ChannelListener<AbstractFramedChannel> DRAIN_LISTENER = new ChannelListener<AbstractFramedChannel>() { @Override public void handleEvent(AbstractFramedChannel channel) { try { AbstractFramedStreamSourceChannel stream = channel.receive(); if(stream != null) { WebSocketLogger.REQUEST_LOGGER.debugf("Draining channel %s as no receive listener has been set", stream); stream.getReadSetter().set(ChannelListeners.drainListener(Long.MAX_VALUE, null, null)); stream.wakeupReads(); } } catch (IOException e) { IoUtils.safeClose(channel); } } }; /** * Create a new {@link io.undertow.server.protocol.framed.AbstractFramedChannel} * 8 * @param connectedStreamChannel The {@link org.xnio.channels.ConnectedStreamChannel} over which the WebSocket Frames should get send and received. * Be aware that it already must be "upgraded". * @param bufferPool The {@link ByteBufferPool} which will be used to acquire {@link ByteBuffer}'s from. * @param framePriority * @param settings The settings */ protected AbstractFramedChannel(final StreamConnection connectedStreamChannel, ByteBufferPool bufferPool, FramePriority<C, R, S> framePriority, final PooledByteBuffer readData, OptionMap settings) { this.framePriority = framePriority; this.maxQueuedBuffers = settings.get(UndertowOptions.MAX_QUEUED_READ_BUFFERS, 10); this.settings = settings; if (readData != null) { if(readData.getBuffer().hasRemaining()) { this.readData = new ReferenceCountedPooled(readData, 1); } else { readData.close(); } } if(bufferPool == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("bufferPool"); } if(connectedStreamChannel == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("connectedStreamChannel"); } IdleTimeoutConduit idle = createIdleTimeoutChannel(connectedStreamChannel); connectedStreamChannel.getSourceChannel().setConduit(idle); connectedStreamChannel.getSinkChannel().setConduit(idle); this.idleTimeoutConduit = idle; this.channel = connectedStreamChannel; this.bufferPool = bufferPool; closeSetter = new ChannelListener.SimpleSetter<>(); receiveSetter = new ChannelListener.SimpleSetter<>(); channel.getSourceChannel().getReadSetter().set(null); channel.getSourceChannel().suspendReads(); channel.getSourceChannel().getReadSetter().set(new FrameReadListener()); connectedStreamChannel.getSinkChannel().getWriteSetter().set(new FrameWriteListener()); FrameCloseListener closeListener = new FrameCloseListener(); connectedStreamChannel.getSinkChannel().getCloseSetter().set(closeListener); connectedStreamChannel.getSourceChannel().getCloseSetter().set(closeListener); } protected IdleTimeoutConduit createIdleTimeoutChannel(StreamConnection connectedStreamChannel) { return new IdleTimeoutConduit(connectedStreamChannel.getSinkChannel().getConduit(), connectedStreamChannel.getSourceChannel().getConduit()); } void runInIoThread(Runnable task) { this.taskRunQueue.add(task); getIoThread().execute(new Runnable() { @Override public void run() { while (!taskRunQueue.isEmpty()) { taskRunQueue.poll().run(); } } }); } /** * Get the buffer pool for this connection. * * @return the buffer pool for this connection */ public ByteBufferPool getBufferPool() { return bufferPool; } @Override public SocketAddress getLocalAddress() { return channel.getLocalAddress(); } @Override public <A extends SocketAddress> A getLocalAddress(Class<A> type) { return channel.getLocalAddress(type); } @Override public XnioWorker getWorker() { return channel.getWorker(); } @Override public XnioIoThread getIoThread() { return channel.getIoThread(); } @Override public boolean supportsOption(Option<?> option) { return channel.supportsOption(option); } @Override public <T> T getOption(Option<T> option) throws IOException { return channel.getOption(option); } @Override public <T> T setOption(Option<T> option, T value) throws IOException { return channel.setOption(option, value); } @Override public boolean isOpen() { return channel.isOpen(); } @Override public SocketAddress getPeerAddress() { return channel.getPeerAddress(); } @Override public <A extends SocketAddress> A getPeerAddress(Class<A> type) { return channel.getPeerAddress(type); } /** * Get the source address of the Channel. * * @return the source address of the Channel */ public InetSocketAddress getSourceAddress() { return getPeerAddress(InetSocketAddress.class); } /** * Get the destination address of the Channel. * * @return the destination address of the Channel */ public InetSocketAddress getDestinationAddress() { return getLocalAddress(InetSocketAddress.class); } /** * receive method, returns null if no frame is ready. Otherwise returns a * channel that can be used to read the frame contents. * <p> * Calling this method can also have the side effect of making additional data available to * existing source channels. In general if you suspend receives or don't have some other way * of calling this method then it can prevent frame channels for being fully consumed. */ public synchronized R receive() throws IOException { if (isLastFrameReceived() && receiver == null) { //we have received the last frame, we just shut down and return //it would probably make more sense to have the last channel responsible for this //however it is much simpler just to have it here if(readData != null) { readData.close(); readData = null; } channel.getSourceChannel().suspendReads(); channel.getSourceChannel().shutdownReads(); return null; } ReferenceCountedPooled pooled = this.readData; boolean hasData; if (pooled == null) { pooled = allocateReferenceCountedBuffer(); if (pooled == null) { return null; } hasData = false; } else if(pooled.isFreed()) { //we attempt to re-used an existing buffer if(!pooled.tryUnfree()) { pooled = allocateReferenceCountedBuffer(); if (pooled == null) { return null; } } else { pooled.getBuffer().limit(pooled.getBuffer().capacity()); } hasData = false; } else { hasData = pooled.getBuffer().hasRemaining(); } boolean forceFree = false; int read = 0; try { if (!hasData) { pooled.getBuffer().clear(); read = channel.getSourceChannel().read(pooled.getBuffer()); if (read == 0) { //no data, we just free the buffer forceFree = true; return null; } else if (read == -1) { try { channel.getSourceChannel().shutdownReads(); } catch (IOException e) { if (WebSocketLogger.REQUEST_LOGGER.isDebugEnabled()) { WebSocketLogger.REQUEST_LOGGER.debugf(e, "Connection closed with IOException when attempting to shut down reads"); } // nothing we can do here.. close safeClose(channel.getSourceChannel()); throw e; } forceFree = true; lastDataRead(); return null; } pooled.getBuffer().flip(); } if (frameDataRemaining > 0) { if (frameDataRemaining >= pooled.getBuffer().remaining()) { frameDataRemaining -= pooled.getBuffer().remaining(); if(receiver != null) { receiver.dataReady(null, pooled); } else { //we are dropping a frame pooled.close(); } readData = null; if(frameDataRemaining == 0) { receiver = null; } return null; } else { ByteBuffer buf = pooled.getBuffer().duplicate(); buf.limit((int) (buf.position() + frameDataRemaining)); pooled.getBuffer().position((int) (pooled.getBuffer().position() + frameDataRemaining)); frameDataRemaining = 0; PooledByteBuffer frameData = pooled.createView(buf); if(receiver != null) { receiver.dataReady(null, frameData); } else{ //we are dropping the frame frameData.close(); } receiver = null; } //if we read data into a frame we just return immediately, even if there is more remaining //see https://issues.jboss.org/browse/UNDERTOW-410 //basically if we don't do this we loose some message ordering semantics //as the second message may be processed before the first one //this is problematic for HTTPS, where the read listener may also be invoked by a queued task //and not by the selector mechanism return null; } FrameHeaderData data = parseFrame(pooled.getBuffer()); if (data != null) { PooledByteBuffer frameData; if (data.getFrameLength() >= pooled.getBuffer().remaining()) { frameDataRemaining = data.getFrameLength() - pooled.getBuffer().remaining(); frameData = pooled.createView(pooled.getBuffer().duplicate()); pooled.getBuffer().position(pooled.getBuffer().limit()); } else { ByteBuffer buf = pooled.getBuffer().duplicate(); buf.limit((int) (buf.position() + data.getFrameLength())); pooled.getBuffer().position((int) (pooled.getBuffer().position() + data.getFrameLength())); frameData = pooled.createView(buf); } AbstractFramedStreamSourceChannel<?, ?, ?> existing = data.getExistingChannel(); if (existing != null) { if (data.getFrameLength() > frameData.getBuffer().remaining()) { receiver = (R) existing; } existing.dataReady(data, frameData); return null; } else { boolean moreData = data.getFrameLength() > frameData.getBuffer().remaining(); R newChannel = createChannel(data, frameData); if (newChannel != null) { if(!newChannel.isComplete()) { receivers.add(newChannel); } if (moreData) { receiver = newChannel; } } else { frameData.close(); } return newChannel; } } return null; } catch (IOException|RuntimeException e) { //something has code wrong with parsing, close the read side //we don't close the write side, as the underlying implementation will most likely want to send an error markReadsBroken(e); forceFree = true; throw e; }finally { //if the receive caused the channel to break the close listener may be have been called //which will make readData null if (readData != null) { if (!pooled.getBuffer().hasRemaining() || forceFree) { if(pooled.getBuffer().limit() * 2 > pooled.getBuffer().capacity() || forceFree) { //if we have used more than half the buffer we don't allow it to be re-aquired readData = null; } //even though this is freed we may un-free it if we get a new packet //this prevents many small reads resulting in a large number of allocated buffers pooled.close(); } } } } private ReferenceCountedPooled allocateReferenceCountedBuffer() { if(maxQueuedBuffers > 0) { int expect; do { expect = outstandingBuffersUpdater.get(this); if (expect == maxQueuedBuffers) { synchronized (this) { //we need to re-read in a sync block, to prevent races expect = outstandingBuffersUpdater.get(this); if (expect == maxQueuedBuffers) { channel.getSourceChannel().suspendReads(); return null; } } } } while (!outstandingBuffersUpdater.compareAndSet(this, expect, expect + 1)); } PooledByteBuffer buf = bufferPool.allocate(); return this.readData = new ReferenceCountedPooled(buf, 1, maxQueuedBuffers > 0 ? freeNotifier : null); } /** * Method than is invoked when read() returns -1. */ protected void lastDataRead() { } /** * Method that creates the actual stream source channel implementation that is in use. * * @param frameHeaderData The header data, as returned by {@link #parseFrame(java.nio.ByteBuffer)} * @param frameData Any additional data for the frame that has already been read. This may not be the complete frame contents * @return A new stream source channel */ protected abstract R createChannel(FrameHeaderData frameHeaderData, PooledByteBuffer frameData) throws IOException; /** * Attempts to parse an incoming frame header from the data in the buffer. * * @param data The data that has been read from the channel * @return The frame header data, or <code>null</code> if the data was incomplete * @throws IOException If the data could not be parsed. */ protected abstract FrameHeaderData parseFrame(ByteBuffer data) throws IOException; protected synchronized void recalculateHeldFrames() throws IOException { if (!heldFrames.isEmpty()) { framePriority.frameAdded(null, pendingFrames, heldFrames); flushSenders(); } } /** * Flushes all ready stream sink conduits to the channel. * <p> * Frames will be batched up, to allow them all to be written out via a gathering * write. The {@link #framePriority} implementation will be invoked to decide which * frames are eligible for sending and in what order. */ protected synchronized void flushSenders() { if(flushingSenders) { throw UndertowMessages.MESSAGES.recursiveCallToFlushingSenders(); } flushingSenders = true; try { int toSend = 0; while (!newFrames.isEmpty()) { S frame = newFrames.poll(); if (framePriority.insertFrame(frame, pendingFrames)) { if (!heldFrames.isEmpty()) { framePriority.frameAdded(frame, pendingFrames, heldFrames); } } else { heldFrames.add(frame); } } boolean finalFrame = false; ListIterator<S> it = pendingFrames.listIterator(); while (it.hasNext()) { S sender = it.next(); if (sender.isReadyForFlush()) { ++toSend; } else { break; } if (sender.isLastFrame()) { finalFrame = true; } } if (toSend == 0) { //if there is nothing to send we just attempt a flush on the underlying channel try { if(channel.getSinkChannel().flush()) { channel.getSinkChannel().suspendWrites(); } } catch (IOException e) { safeClose(channel); markWritesBroken(e); } return; } ByteBuffer[] data = new ByteBuffer[toSend * 3]; int j = 0; it = pendingFrames.listIterator(); try { while (j < toSend) { S next = it.next(); //todo: rather than adding empty buffers just store the offsets SendFrameHeader frameHeader = next.getFrameHeader(); PooledByteBuffer frameHeaderByteBuffer = frameHeader.getByteBuffer(); data[j * 3] = frameHeaderByteBuffer != null ? frameHeaderByteBuffer.getBuffer() : Buffers.EMPTY_BYTE_BUFFER; data[(j * 3) + 1] = next.getBuffer() == null ? Buffers.EMPTY_BYTE_BUFFER : next.getBuffer(); data[(j * 3) + 2] = next.getFrameFooter(); ++j; } long toWrite = Buffers.remaining(data); long res; do { res = channel.getSinkChannel().write(data); toWrite -= res; } while (res > 0 && toWrite > 0); int max = toSend; while (max > 0) { S sinkChannel = pendingFrames.get(0); PooledByteBuffer frameHeaderByteBuffer = sinkChannel.getFrameHeader().getByteBuffer(); if (frameHeaderByteBuffer != null && frameHeaderByteBuffer.getBuffer().hasRemaining() || sinkChannel.getBuffer() != null && sinkChannel.getBuffer().hasRemaining() || sinkChannel.getFrameFooter().hasRemaining()) { break; } sinkChannel.flushComplete(); pendingFrames.remove(sinkChannel); max--; } if (!pendingFrames.isEmpty() || !channel.getSinkChannel().flush()) { channel.getSinkChannel().resumeWrites(); } else { channel.getSinkChannel().suspendWrites(); } if (pendingFrames.isEmpty() && finalFrame) { //all data has been sent. Close gracefully channel.getSinkChannel().shutdownWrites(); if (!channel.getSinkChannel().flush()) { channel.getSinkChannel().setWriteListener(ChannelListeners.flushingChannelListener(null, null)); channel.getSinkChannel().resumeWrites(); } } } catch (IOException e) { safeClose(channel); markWritesBroken(e); } } finally { flushingSenders = false; if(!newFrames.isEmpty()) { runInIoThread(new Runnable() { @Override public void run() { flushSenders(); } }); } } } void awaitWritable() throws IOException { this.channel.getSinkChannel().awaitWritable(); } void awaitWritable(long time, TimeUnit unit) throws IOException { this.channel.getSinkChannel().awaitWritable(time, unit); } /** * Queues a new frame to be sent, and attempts a flush if this is the first frame in the new frame queue. * <p> * Depending on the {@link FramePriority} implementation in use the channel may or may not be added to the actual * pending queue * * @param channel The channel */ protected void queueFrame(final S channel) throws IOException { assert !newFrames.contains(channel); if (isWritesBroken() || !this.channel.getSinkChannel().isOpen() || channel.isBroken() || !channel.isOpen()) { IoUtils.safeClose(channel); throw UndertowMessages.MESSAGES.channelIsClosed(); } newFrames.add(channel); if (!requireExplicitFlush || channel.isBufferFull()) { flush(); } } public void flush() { if (!flushingSenders) { if(channel.getIoThread() == Thread.currentThread()) { flushSenders(); } else { runInIoThread(new Runnable() { @Override public void run() { flushSenders(); } }); } } } /** * Returns true if the protocol specific final frame has been received. * * @return <code>true</code> If the last frame has been received */ protected abstract boolean isLastFrameReceived(); /** * @return <code>true</code> If the last frame has been sent */ protected abstract boolean isLastFrameSent(); /** * Method that is invoked when the read side of the channel is broken. This generally happens on a protocol error. */ protected abstract void handleBrokenSourceChannel(Throwable e); /** * Method that is invoked when then write side of a channel is broken. This generally happens on a protocol error. */ protected abstract void handleBrokenSinkChannel(Throwable e); /** * Return the {@link org.xnio.ChannelListener.Setter} which will holds the {@link org.xnio.ChannelListener} that gets notified once a frame was * received. */ public Setter<C> getReceiveSetter() { return receiveSetter; } /** * Suspend the receive of new frames via {@link #receive()} */ public synchronized void suspendReceives() { receivesSuspended = true; if (receiver == null) { channel.getSourceChannel().suspendReads(); } } /** * Resume the receive of new frames via {@link #receive()} */ public synchronized void resumeReceives() { receivesSuspended = false; if (readData != null && !readData.isFreed()) { channel.getSourceChannel().wakeupReads(); } else { channel.getSourceChannel().resumeReads(); } } public boolean isReceivesResumed() { return !receivesSuspended; } /** * Forcibly closes the {@link io.undertow.server.protocol.framed.AbstractFramedChannel}. */ @Override public void close() throws IOException { safeClose(channel); if(readData != null) { readData.close(); readData = null; } } @Override public Setter<? extends AbstractFramedChannel> getCloseSetter() { return closeSetter; } /** * Called when a source sub channel fails to fulfil its contract, and leaves the channel in an inconsistent state. * <p> * The underlying read side will be forcibly closed. * * @param cause The possibly null cause */ @SuppressWarnings({"unchecked", "rawtypes"}) protected void markReadsBroken(Throwable cause) { if (readsBrokenUpdater.compareAndSet(this, 0, 1)) { handleBrokenSourceChannel(cause); safeClose(channel.getSourceChannel()); closeSubChannels(); } } /** * Method that is called when the channel is being forcibly closed, and all sub stream sink/source * channels should also be forcibly closed. */ protected abstract void closeSubChannels(); /** * Called when a sub channel fails to fulfil its contract, and leaves the channel in an inconsistent state. * <p> * The underlying channel will be closed, and any sub channels that have writes resumed will have their * listeners notified. It is expected that these listeners will then attempt to use the channel, and their standard * error handling logic will take over. * * @param cause The possibly null cause */ @SuppressWarnings({"unchecked", "rawtypes"}) protected void markWritesBroken(Throwable cause) { if (writesBrokenUpdater.compareAndSet(this, 0, 1)) { handleBrokenSinkChannel(cause); safeClose(channel.getSinkChannel()); synchronized (this) { for (final S channel : pendingFrames) { channel.markBroken(); } pendingFrames.clear(); for (final S channel : newFrames) { channel.markBroken(); } newFrames.clear(); for (final S channel : heldFrames) { channel.markBroken(); } heldFrames.clear(); } } } protected boolean isWritesBroken() { return writesBrokenUpdater.get(this) != 0; } protected boolean isReadsBroken() { return readsBrokenUpdater.get(this) != 0; } void resumeWrites() { channel.getSinkChannel().resumeWrites(); } void suspendWrites() { channel.getSinkChannel().suspendWrites(); } void wakeupWrites() { channel.getSinkChannel().wakeupWrites(); } StreamSourceChannel getSourceChannel() { return channel.getSourceChannel(); } void notifyFrameReadComplete(AbstractFramedStreamSourceChannel<C, R, S> channel) { synchronized (AbstractFramedChannel.this) { if (isLastFrameReceived()) { safeClose(AbstractFramedChannel.this.channel.getSourceChannel()); } } } void notifyClosed(AbstractFramedStreamSourceChannel<C, R, S> channel) { synchronized (AbstractFramedChannel.this) { receivers.remove(channel); } } /** * {@link org.xnio.ChannelListener} which delegates the read notification to the appropriate listener */ private final class FrameReadListener implements ChannelListener<StreamSourceChannel> { @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void handleEvent(final StreamSourceChannel channel) { //clear the task queue before reading while (!taskRunQueue.isEmpty()) { taskRunQueue.poll().run(); } final R receiver = AbstractFramedChannel.this.receiver; if ((isLastFrameReceived() || receivesSuspended) && receiver == null) { channel.suspendReads(); return; } else { ChannelListener listener = receiveSetter.get(); if(listener == null) { listener = DRAIN_LISTENER; } WebSocketLogger.REQUEST_LOGGER.tracef("Invoking receive listener", receiver); ChannelListeners.invokeChannelListener(AbstractFramedChannel.this, listener); } if (readData != null && !readData.isFreed() && channel.isOpen()) { try { runInIoThread(new Runnable() { @Override public void run() { ChannelListeners.invokeChannelListener(channel, FrameReadListener.this); } }); } catch (RejectedExecutionException e) { IoUtils.safeClose(AbstractFramedChannel.this); } } } } private class FrameWriteListener implements ChannelListener<StreamSinkChannel> { @Override public void handleEvent(final StreamSinkChannel channel) { flushSenders(); } } /** * close listener, just goes through and activates any sub channels to make sure their listeners are invoked */ private class FrameCloseListener implements ChannelListener<CloseableChannel> { private boolean sinkClosed; private boolean sourceClosed; @Override public void handleEvent(final CloseableChannel c) { if(c instanceof StreamSinkChannel) { sinkClosed = true; } else if(c instanceof StreamSourceChannel) { sourceClosed = true; } if(!sourceClosed || !sinkClosed) { return; //both sides need to be closed } else if(readData != null && !readData.isFreed()) { //we make sure there is no data left to receive, if there is then we invoke the receive listener runInIoThread(new Runnable() { @Override public void run() { while (readData != null && !readData.isFreed()) { int rem = readData.getBuffer().remaining(); ChannelListener listener = receiveSetter.get(); if(listener == null) { listener = DRAIN_LISTENER; } ChannelListeners.invokeChannelListener(AbstractFramedChannel.this, listener); if(!AbstractFramedChannel.this.isOpen()) { break; } if (readData != null && rem == readData.getBuffer().remaining()) { break;//make sure we are making progress } } handleEvent(c); } }); return; } if (Thread.currentThread() != c.getIoThread()) { runInIoThread(new Runnable() { @Override public void run() { ChannelListeners.invokeChannelListener(c, FrameCloseListener.this); } }); return; } R receiver = AbstractFramedChannel.this.receiver; try { if (receiver != null && receiver.isOpen() && receiver.isReadResumed()) { ChannelListeners.invokeChannelListener(receiver, ((SimpleSetter) receiver.getReadSetter()).get()); } synchronized (AbstractFramedChannel.this) { for (final S channel : pendingFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for (final S channel : newFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for (final S channel : heldFrames) { //if this was a clean shutdown there should not be any senders channel.markBroken(); } for(AbstractFramedStreamSourceChannel<C, R, S> r : new ArrayList<>(receivers)) { IoUtils.safeClose(r); } } } finally { try { for (ChannelListener<C> task : closeTasks) { ChannelListeners.invokeChannelListener((C) AbstractFramedChannel.this, task); } } finally { synchronized (AbstractFramedChannel.this) { closeSubChannels(); if (readData != null) { readData.close(); readData = null; } } ChannelListeners.invokeChannelListener((C) AbstractFramedChannel.this, closeSetter.get()); } } } } public void setIdleTimeout(long timeout) { idleTimeoutConduit.setIdleTimeout(timeout); } public long getIdleTimeout() { return idleTimeoutConduit.getIdleTimeout(); } protected FramePriority<C, R, S> getFramePriority() { return framePriority; } public void addCloseTask(final ChannelListener<C> task) { closeTasks.add(task); } @Override public String toString() { return getClass().getSimpleName() + "[ " + (receiver == null ? "No Receiver" : receiver.toString()) + " " + pendingFrames.toString() + " -- " + heldFrames.toString() + " -- " + newFrames.toString() + "]"; } protected StreamConnection getUnderlyingConnection() { return channel; } protected ChannelExceptionHandler<SuspendableWriteChannel> writeExceptionHandler() { return new ChannelExceptionHandler<SuspendableWriteChannel>() { @Override public void handleException(SuspendableWriteChannel channel, IOException exception) { markWritesBroken(exception); } }; } public boolean isRequireExplicitFlush() { return requireExplicitFlush; } public void setRequireExplicitFlush(boolean requireExplicitFlush) { this.requireExplicitFlush = requireExplicitFlush; } protected OptionMap getSettings() { return settings; } }
Improve toString()
core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedChannel.java
Improve toString()
Java
apache-2.0
585830f3f27c3a1cc49b6f9215378367453183e6
0
hakan42/perrypedia-release-calendar,hakan42/perrypedia-release-calendar
package com.gurkensalat.calendar.perrypedia.releasecalendar; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.Transient; @Entity @Table(name = "WIKI_PAGE") public class WikiPage { @Transient public static final String VALID = "Y"; @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; private String seriesPrefix; private int issueNumber; private String sourcePageId; private String sourcePageTitle; private String sourcePageValid; private String fullPageId; private String fullPageTitle; private String fullPageValid; private String valid; public WikiPage() { } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getSeriesPrefix() { return seriesPrefix; } public void setSeriesPrefix(String seriesPrefix) { this.seriesPrefix = seriesPrefix; } public int getIssueNumber() { return issueNumber; } public void setIssueNumber(int issueNumber) { this.issueNumber = issueNumber; } public String getSourcePageId() { return sourcePageId; } public void setSourcePageId(String sourcePageId) { this.sourcePageId = sourcePageId; } public String getSourcePageTitle() { return sourcePageTitle; } public void setSourcePageTitle(String sourcePageTitle) { this.sourcePageTitle = sourcePageTitle; } public String getSourcePageValid() { return sourcePageValid; } public void setSourcePageValid(String sourcePageValid) { this.sourcePageValid = sourcePageValid; } public String getFullPageId() { return fullPageId; } public void setFullPageId(String fullPageId) { this.fullPageId = fullPageId; } public String getFullPageTitle() { return fullPageTitle; } public void setFullPageTitle(String fullPageTitle) { this.fullPageTitle = fullPageTitle; } public String getFullPageValid() { return fullPageValid; } public void setFullPageValid(String fullPageValid) { this.fullPageValid = fullPageValid; } public String getValid() { return valid; } public void setValid(String valid) { this.valid = valid; } /** * {@inheritDoc} */ @Override public String toString() { // @formatter:off return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("id", getId()) .append("seriesPrefix", getSeriesPrefix()) .append("number", getIssueNumber()) .append("valid", getValid()) .toString(); // @formatter:on } }
src/main/java/com/gurkensalat/calendar/perrypedia/releasecalendar/WikiPage.java
package com.gurkensalat.calendar.perrypedia.releasecalendar; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.Transient; @Entity @Table(name = "WIKI_PAGE") public class WikiPage { @Transient public static final String VALID = "Y"; @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; private String seriesPrefix; private int issueNumber; private String sourcePageId; private String sourcePageTitle; private String sourceValid; private String fullPageId; private String fullPageTitle; private String fullPageValid; private String valid; public WikiPage() { } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getSeriesPrefix() { return seriesPrefix; } public void setSeriesPrefix(String seriesPrefix) { this.seriesPrefix = seriesPrefix; } public int getIssueNumber() { return issueNumber; } public void setIssueNumber(int issueNumber) { this.issueNumber = issueNumber; } public String getSourcePageId() { return sourcePageId; } public void setSourcePageId(String sourcePageId) { this.sourcePageId = sourcePageId; } public String getSourcePageTitle() { return sourcePageTitle; } public void setSourcePageTitle(String sourcePageTitle) { this.sourcePageTitle = sourcePageTitle; } public String getSourceValid() { return sourceValid; } public void setSourceValid(String sourceValid) { this.sourceValid = sourceValid; } public String getFullPageId() { return fullPageId; } public void setFullPageId(String fullPageId) { this.fullPageId = fullPageId; } public String getFullPageTitle() { return fullPageTitle; } public void setFullPageTitle(String fullPageTitle) { this.fullPageTitle = fullPageTitle; } public String getFullPageValid() { return fullPageValid; } public void setFullPageValid(String fullPageValid) { this.fullPageValid = fullPageValid; } public String getValid() { return valid; } public void setValid(String valid) { this.valid = valid; } /** * {@inheritDoc} */ @Override public String toString() { // @formatter:off return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("id", getId()) .append("seriesPrefix", getSeriesPrefix()) .append("number", getIssueNumber()) .append("valid", getValid()) .toString(); // @formatter:on } }
Clarify attribute name
src/main/java/com/gurkensalat/calendar/perrypedia/releasecalendar/WikiPage.java
Clarify attribute name
Java
apache-2.0
42392432beb6761809b12bb49f8638603f992fd8
0
tresvecesseis/ExoPlayer,tresvecesseis/ExoPlayer,tresvecesseis/ExoPlayer
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.extractor; import com.google.android.exoplayer.C; import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.upstream.Allocation; import com.google.android.exoplayer.upstream.Allocator; import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.ParsableByteArray; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.LinkedBlockingDeque; /** * A rolling buffer of sample data and corresponding sample information. */ /* package */ final class RollingSampleBuffer { private static final int INITIAL_SCRATCH_SIZE = 32; private final Allocator allocator; private final int allocationLength; private final InfoQueue infoQueue; private final LinkedBlockingDeque<Allocation> dataQueue; private final SampleExtrasHolder extrasHolder; private final ParsableByteArray scratch; // Accessed only by the consuming thread. private long totalBytesDropped; // Accessed only by the loading thread. private long totalBytesWritten; private Allocation lastAllocation; private int lastAllocationOffset; /** * @param allocator An {@link Allocator} from which allocations for sample data can be obtained. */ public RollingSampleBuffer(Allocator allocator) { this.allocator = allocator; allocationLength = allocator.getIndividualAllocationLength(); infoQueue = new InfoQueue(); dataQueue = new LinkedBlockingDeque<>(); extrasHolder = new SampleExtrasHolder(); scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE); lastAllocationOffset = allocationLength; } // Called by the consuming thread, but only when there is no loading thread. /** * Clears the buffer, returning all allocations to the allocator. */ public void clear() { infoQueue.clear(); while (!dataQueue.isEmpty()) { allocator.release(dataQueue.remove()); } totalBytesDropped = 0; totalBytesWritten = 0; lastAllocation = null; lastAllocationOffset = allocationLength; } /** * Returns the current absolute write index. */ public int getWriteIndex() { return infoQueue.getWriteIndex(); } /** * Discards samples from the write side of the buffer. * * @param discardFromIndex The absolute index of the first sample to be discarded. */ public void discardUpstreamSamples(int discardFromIndex) { totalBytesWritten = infoQueue.discardUpstreamSamples(discardFromIndex); dropUpstreamFrom(totalBytesWritten); } /** * Discards data from the write side of the buffer. Data is discarded from the specified absolute * position. Any allocations that are fully discarded are returned to the allocator. * * @param absolutePosition The absolute position (inclusive) from which to discard data. */ private void dropUpstreamFrom(long absolutePosition) { int relativePosition = (int) (absolutePosition - totalBytesDropped); // Calculate the index of the allocation containing the position, and the offset within it. int allocationIndex = relativePosition / allocationLength; int allocationOffset = relativePosition % allocationLength; // We want to discard any allocations after the one at allocationIdnex. int allocationDiscardCount = dataQueue.size() - allocationIndex - 1; if (allocationOffset == 0) { // If the allocation at allocationIndex is empty, we should discard that one too. allocationDiscardCount++; } // Discard the allocations. for (int i = 0; i < allocationDiscardCount; i++) { allocator.release(dataQueue.removeLast()); } // Update lastAllocation and lastAllocationOffset to reflect the new position. lastAllocation = dataQueue.peekLast(); lastAllocationOffset = allocationOffset == 0 ? allocationLength : allocationOffset; } // Called by the consuming thread. /** * Returns the current absolute read index. */ public int getReadIndex() { return infoQueue.getReadIndex(); } /** * Fills {@code holder} with information about the current sample, but does not write its data. * <p> * The fields set are {@link SampleHolder#size}, {@link SampleHolder#timeUs} and * {@link SampleHolder#flags}. * * @param holder The holder into which the current sample information should be written. * @return True if the holder was filled. False if there is no current sample. */ public boolean peekSample(SampleHolder holder) { return infoQueue.peekSample(holder, extrasHolder); } /** * Skips the current sample. */ public void skipSample() { long nextOffset = infoQueue.moveToNextSample(); dropDownstreamTo(nextOffset); } /** * Attempts to skip to the keyframe before the specified time, if it's present in the buffer. * * @param timeUs The seek time. * @return True if the skip was successful. False otherwise. */ public boolean skipToKeyframeBefore(long timeUs) { long nextOffset = infoQueue.skipToKeyframeBefore(timeUs); if (nextOffset == -1) { return false; } dropDownstreamTo(nextOffset); return true; } /** * Reads the current sample, advancing the read index to the next sample. * * @param sampleHolder The holder into which the current sample should be written. * @return True if a sample was read. False if there is no current sample. */ public boolean readSample(SampleHolder sampleHolder) { // Write the sample information into the holder and extrasHolder. boolean haveSample = infoQueue.peekSample(sampleHolder, extrasHolder); if (!haveSample) { return false; } // Read encryption data if the sample is encrypted. if (sampleHolder.isEncrypted()) { readEncryptionData(sampleHolder, extrasHolder); } // Write the sample data into the holder. sampleHolder.ensureSpaceForWrite(sampleHolder.size); readData(extrasHolder.offset, sampleHolder.data, sampleHolder.size); // Advance the read head. long nextOffset = infoQueue.moveToNextSample(); dropDownstreamTo(nextOffset); return true; } /** * Reads encryption data for the current sample. * <p> * The encryption data is written into {@code sampleHolder.cryptoInfo}, and * {@code sampleHolder.size} is adjusted to subtract the number of bytes that were read. The * same value is added to {@code extrasHolder.offset}. * * @param sampleHolder The holder into which the encryption data should be written. * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. */ private void readEncryptionData(SampleHolder sampleHolder, SampleExtrasHolder extrasHolder) { long offset = extrasHolder.offset; // Read the signal byte. readData(offset, scratch.data, 1); offset++; byte signalByte = scratch.data[0]; boolean subsampleEncryption = (signalByte & 0x80) != 0; int ivSize = signalByte & 0x7F; // Read the initialization vector. if (sampleHolder.cryptoInfo.iv == null) { sampleHolder.cryptoInfo.iv = new byte[16]; } readData(offset, sampleHolder.cryptoInfo.iv, ivSize); offset += ivSize; // Read the subsample count, if present. int subsampleCount; if (subsampleEncryption) { readData(offset, scratch.data, 2); offset += 2; scratch.setPosition(0); subsampleCount = scratch.readUnsignedShort(); } else { subsampleCount = 1; } // Write the clear and encrypted subsample sizes. int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData; if (clearDataSizes == null || clearDataSizes.length < subsampleCount) { clearDataSizes = new int[subsampleCount]; } int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData; if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) { encryptedDataSizes = new int[subsampleCount]; } if (subsampleEncryption) { int subsampleDataLength = 6 * subsampleCount; ensureCapacity(scratch, subsampleDataLength); readData(offset, scratch.data, subsampleDataLength); offset += subsampleDataLength; scratch.setPosition(0); for (int i = 0; i < subsampleCount; i++) { clearDataSizes[i] = scratch.readUnsignedShort(); encryptedDataSizes[i] = scratch.readUnsignedIntToInt(); } } else { clearDataSizes[0] = 0; encryptedDataSizes[0] = sampleHolder.size - (int) (offset - extrasHolder.offset); } // Populate the cryptoInfo. sampleHolder.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, extrasHolder.encryptionKeyId, sampleHolder.cryptoInfo.iv, C.CRYPTO_MODE_AES_CTR); // Adjust the offset and size to take into account the bytes read. int bytesRead = (int) (offset - extrasHolder.offset); extrasHolder.offset += bytesRead; sampleHolder.size -= bytesRead; } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The buffer into which data should be written. * @param length The number of bytes to read. */ private void readData(long absolutePosition, ByteBuffer target, int length) { int remaining = length; while (remaining > 0) { dropDownstreamTo(absolutePosition); int positionInAllocation = (int) (absolutePosition - totalBytesDropped); int toCopy = Math.min(remaining, allocationLength - positionInAllocation); Allocation allocation = dataQueue.peek(); target.put(allocation.data, allocation.translateOffset(positionInAllocation), toCopy); absolutePosition += toCopy; remaining -= toCopy; } } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The array into which data should be written. * @param length The number of bytes to read. */ // TODO: Consider reducing duplication of this method and the one above. private void readData(long absolutePosition, byte[] target, int length) { int bytesRead = 0; while (bytesRead < length) { dropDownstreamTo(absolutePosition); int positionInAllocation = (int) (absolutePosition - totalBytesDropped); int toCopy = Math.min(length - bytesRead, allocationLength - positionInAllocation); Allocation allocation = dataQueue.peek(); System.arraycopy(allocation.data, allocation.translateOffset(positionInAllocation), target, bytesRead, toCopy); absolutePosition += toCopy; bytesRead += toCopy; } } /** * Discard any allocations that hold data prior to the specified absolute position, returning * them to the allocator. * * @param absolutePosition The absolute position up to which allocations can be discarded. */ private void dropDownstreamTo(long absolutePosition) { int relativePosition = (int) (absolutePosition - totalBytesDropped); int allocationIndex = relativePosition / allocationLength; for (int i = 0; i < allocationIndex; i++) { allocator.release(dataQueue.remove()); totalBytesDropped += allocationLength; } } /** * Ensure that the passed {@link ParsableByteArray} is of at least the specified limit. */ private static void ensureCapacity(ParsableByteArray byteArray, int limit) { if (byteArray.limit() < limit) { byteArray.reset(new byte[limit], limit); } } // Called by the loading thread. /** * Returns the current write position in the rolling buffer. * * @return The current write position. */ public long getWritePosition() { return totalBytesWritten; } /** * Appends data to the rolling buffer. * * @param dataSource The source from which to read. * @param length The maximum length of the read. * @param allowEndOfInput True if encountering the end of the input having appended no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. * @return The number of bytes appended, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the source. */ public int appendData(DataSource dataSource, int length, boolean allowEndOfInput) throws IOException { length = prepareForAppend(length); int bytesAppended = dataSource.read(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), length); if (bytesAppended == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; } throw new EOFException(); } lastAllocationOffset += bytesAppended; totalBytesWritten += bytesAppended; return bytesAppended; } /** * Appends data to the rolling buffer. * * @param input The source from which to read. * @param length The maximum length of the read. * @param allowEndOfInput True if encountering the end of the input having appended no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. * @return The number of bytes appended, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the source. * @throws InterruptedException If the thread has been interrupted. */ public int appendData(ExtractorInput input, int length, boolean allowEndOfInput) throws IOException, InterruptedException { length = prepareForAppend(length); int bytesAppended = input.read(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), length); if (bytesAppended == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; } throw new EOFException(); } lastAllocationOffset += bytesAppended; totalBytesWritten += bytesAppended; return bytesAppended; } /** * Appends data to the rolling buffer. * * @param buffer A buffer containing the data to append. * @param length The length of the data to append. */ public void appendData(ParsableByteArray buffer, int length) { while (length > 0) { int thisAppendLength = prepareForAppend(length); buffer.readBytes(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), thisAppendLength); lastAllocationOffset += thisAppendLength; totalBytesWritten += thisAppendLength; length -= thisAppendLength; } } /** * Indicates the end point for the current sample, making it available for consumption. * * @param sampleTimeUs The sample timestamp. * @param flags Flags that accompany the sample. See {@link SampleHolder#flags}. * @param position The position of the sample data in the rolling buffer. * @param size The size of the sample, in bytes. * @param encryptionKey The encryption key associated with the sample, or null. */ public void commitSample(long sampleTimeUs, int flags, long position, int size, byte[] encryptionKey) { infoQueue.commitSample(sampleTimeUs, flags, position, size, encryptionKey); } /** * Prepares the rolling sample buffer for an append of up to {@code length} bytes, returning the * number of bytes that can actually be appended. */ private int prepareForAppend(int length) { if (lastAllocationOffset == allocationLength) { lastAllocationOffset = 0; lastAllocation = allocator.allocate(); dataQueue.add(lastAllocation); } return Math.min(length, allocationLength - lastAllocationOffset); } /** * Holds information about the samples in the rolling buffer. */ private static final class InfoQueue { private static final int SAMPLE_CAPACITY_INCREMENT = 1000; private int capacity; private long[] offsets; private int[] sizes; private int[] flags; private long[] timesUs; private byte[][] encryptionKeys; private int queueSize; private int absoluteReadIndex; private int relativeReadIndex; private int relativeWriteIndex; public InfoQueue() { capacity = SAMPLE_CAPACITY_INCREMENT; offsets = new long[capacity]; timesUs = new long[capacity]; flags = new int[capacity]; sizes = new int[capacity]; encryptionKeys = new byte[capacity][]; } // Called by the consuming thread, but only when there is no loading thread. /** * Clears the queue. */ public void clear() { absoluteReadIndex = 0; relativeReadIndex = 0; relativeWriteIndex = 0; queueSize = 0; } /** * Returns the current absolute write index. */ public int getWriteIndex() { return absoluteReadIndex + queueSize; } /** * Discards samples from the write side of the buffer. * * @param discardFromIndex The absolute index of the first sample to be discarded. * @return The reduced total number of bytes written, after the samples have been discarded. */ public long discardUpstreamSamples(int discardFromIndex) { int discardCount = getWriteIndex() - discardFromIndex; Assertions.checkArgument(0 <= discardCount && discardCount <= queueSize); if (discardCount == 0) { if (absoluteReadIndex == 0) { // queueSize == absoluteReadIndex == 0, so nothing has been written to the queue. return 0; } int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1; return offsets[lastWriteIndex] + sizes[lastWriteIndex]; } queueSize -= discardCount; relativeWriteIndex = (relativeWriteIndex + capacity - discardCount) % capacity; return offsets[relativeWriteIndex]; } // Called by the consuming thread. /** * Returns the current absolute read index. */ public int getReadIndex() { return absoluteReadIndex; } /** * Fills {@code holder} with information about the current sample, but does not write its data. * The first entry in {@code offsetHolder} is filled with the absolute position of the sample's * data in the rolling buffer. * <p> * Populates {@link SampleHolder#size}, {@link SampleHolder#timeUs}, {@link SampleHolder#flags} * and the {@code extrasHolder}. * * @param holder The holder into which the current sample information should be written. * @param extrasHolder The holder into which extra sample information should be written. * @return True if the holders were filled. False if there is no current sample. */ public synchronized boolean peekSample(SampleHolder holder, SampleExtrasHolder extrasHolder) { if (queueSize == 0) { return false; } holder.timeUs = timesUs[relativeReadIndex]; holder.size = sizes[relativeReadIndex]; holder.flags = flags[relativeReadIndex]; extrasHolder.offset = offsets[relativeReadIndex]; extrasHolder.encryptionKeyId = encryptionKeys[relativeReadIndex]; return true; } /** * Advances the read index to the next sample. * * @return The absolute position of the first byte in the rolling buffer that may still be * required after advancing the index. Data prior to this position can be dropped. */ public synchronized long moveToNextSample() { queueSize--; int lastReadIndex = relativeReadIndex++; absoluteReadIndex++; if (relativeReadIndex == capacity) { // Wrap around. relativeReadIndex = 0; } return queueSize > 0 ? offsets[relativeReadIndex] : (sizes[lastReadIndex] + offsets[lastReadIndex]); } /** * Attempts to locate the keyframe before the specified time, if it's present in the buffer. * * @param timeUs The seek time. * @return The offset of the keyframe's data if the keyframe was present. -1 otherwise. */ public synchronized long skipToKeyframeBefore(long timeUs) { if (queueSize == 0 || timeUs < timesUs[relativeReadIndex]) { return -1; } int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1; long lastTimeUs = timesUs[lastWriteIndex]; if (timeUs > lastTimeUs) { return -1; } // TODO: This can be optimized further using binary search, although the fact that the array // is cyclic means we'd need to implement the binary search ourselves. int sampleCount = 0; int sampleCountToKeyframe = -1; int searchIndex = relativeReadIndex; while (searchIndex != relativeWriteIndex) { if (timesUs[searchIndex] > timeUs) { // We've gone too far. break; } else if ((flags[searchIndex] & C.SAMPLE_FLAG_SYNC) != 0) { // We've found a keyframe, and we're still before the seek position. sampleCountToKeyframe = sampleCount; } searchIndex = (searchIndex + 1) % capacity; sampleCount++; } if (sampleCountToKeyframe == -1) { return -1; } queueSize -= sampleCountToKeyframe; relativeReadIndex = (relativeReadIndex + sampleCountToKeyframe) % capacity; absoluteReadIndex += sampleCountToKeyframe; return offsets[relativeReadIndex]; } // Called by the loading thread. public synchronized void commitSample(long timeUs, int sampleFlags, long offset, int size, byte[] encryptionKey) { timesUs[relativeWriteIndex] = timeUs; offsets[relativeWriteIndex] = offset; sizes[relativeWriteIndex] = size; flags[relativeWriteIndex] = sampleFlags; encryptionKeys[relativeWriteIndex] = encryptionKey; // Increment the write index. queueSize++; if (queueSize == capacity) { // Increase the capacity. int newCapacity = capacity + SAMPLE_CAPACITY_INCREMENT; long[] newOffsets = new long[newCapacity]; long[] newTimesUs = new long[newCapacity]; int[] newFlags = new int[newCapacity]; int[] newSizes = new int[newCapacity]; byte[][] newEncryptionKeys = new byte[newCapacity][]; int beforeWrap = capacity - relativeReadIndex; System.arraycopy(offsets, relativeReadIndex, newOffsets, 0, beforeWrap); System.arraycopy(timesUs, relativeReadIndex, newTimesUs, 0, beforeWrap); System.arraycopy(flags, relativeReadIndex, newFlags, 0, beforeWrap); System.arraycopy(sizes, relativeReadIndex, newSizes, 0, beforeWrap); System.arraycopy(encryptionKeys, relativeReadIndex, newEncryptionKeys, 0, beforeWrap); int afterWrap = relativeReadIndex; System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap); System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap); System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap); System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap); System.arraycopy(encryptionKeys, 0, newEncryptionKeys, beforeWrap, afterWrap); offsets = newOffsets; timesUs = newTimesUs; flags = newFlags; sizes = newSizes; encryptionKeys = newEncryptionKeys; relativeReadIndex = 0; relativeWriteIndex = capacity; queueSize = capacity; capacity = newCapacity; } else { relativeWriteIndex++; if (relativeWriteIndex == capacity) { // Wrap around. relativeWriteIndex = 0; } } } } /** * Holds additional sample information not held by {@link SampleHolder}. */ private static final class SampleExtrasHolder { public long offset; public byte[] encryptionKeyId; } }
library/src/main/java/com/google/android/exoplayer/extractor/RollingSampleBuffer.java
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.extractor; import com.google.android.exoplayer.C; import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.upstream.Allocation; import com.google.android.exoplayer.upstream.Allocator; import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.ParsableByteArray; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.LinkedBlockingDeque; /** * A rolling buffer of sample data and corresponding sample information. */ /* package */ final class RollingSampleBuffer { private static final int INITIAL_SCRATCH_SIZE = 32; private final Allocator allocator; private final int allocationLength; private final InfoQueue infoQueue; private final LinkedBlockingDeque<Allocation> dataQueue; private final SampleExtrasHolder extrasHolder; private final ParsableByteArray scratch; // Accessed only by the consuming thread. private long totalBytesDropped; // Accessed only by the loading thread. private long totalBytesWritten; private Allocation lastAllocation; private int lastAllocationOffset; /** * @param allocator An {@link Allocator} from which allocations for sample data can be obtained. */ public RollingSampleBuffer(Allocator allocator) { this.allocator = allocator; allocationLength = allocator.getIndividualAllocationLength(); infoQueue = new InfoQueue(); dataQueue = new LinkedBlockingDeque<>(); extrasHolder = new SampleExtrasHolder(); scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE); lastAllocationOffset = allocationLength; } // Called by the consuming thread, but only when there is no loading thread. /** * Clears the buffer, returning all allocations to the allocator. */ public void clear() { infoQueue.clear(); while (!dataQueue.isEmpty()) { allocator.release(dataQueue.remove()); } totalBytesDropped = 0; totalBytesWritten = 0; lastAllocation = null; lastAllocationOffset = allocationLength; } /** * Returns the current absolute write index. */ public int getWriteIndex() { return infoQueue.getWriteIndex(); } /** * Discards samples from the write side of the buffer. * * @param discardFromIndex The absolute index of the first sample to be discarded. */ public void discardUpstreamSamples(int discardFromIndex) { totalBytesWritten = infoQueue.discardUpstreamSamples(discardFromIndex); dropUpstreamFrom(totalBytesWritten); } /** * Discards data from the write side of the buffer. Data is discarded from the specified absolute * position. Any allocations that are fully discarded are returned to the allocator. * * @param absolutePosition The absolute position (inclusive) from which to discard data. */ private void dropUpstreamFrom(long absolutePosition) { int relativePosition = (int) (absolutePosition - totalBytesDropped); // Calculate the index of the allocation containing the position, and the offset within it. int allocationIndex = relativePosition / allocationLength; int allocationOffset = relativePosition % allocationLength; // We want to discard any allocations after the one at allocationIdnex. int allocationDiscardCount = dataQueue.size() - allocationIndex - 1; if (allocationOffset == 0) { // If the allocation at allocationIndex is empty, we should discard that one too. allocationDiscardCount++; } // Discard the allocations. for (int i = 0; i < allocationDiscardCount; i++) { allocator.release(dataQueue.removeLast()); } // Update lastAllocation and lastAllocationOffset to reflect the new position. lastAllocation = dataQueue.peekLast(); lastAllocationOffset = allocationOffset == 0 ? allocationLength : allocationOffset; } // Called by the consuming thread. /** * Returns the current absolute read index. */ public int getReadIndex() { return infoQueue.getReadIndex(); } /** * Fills {@code holder} with information about the current sample, but does not write its data. * <p> * The fields set are {@link SampleHolder#size}, {@link SampleHolder#timeUs} and * {@link SampleHolder#flags}. * * @param holder The holder into which the current sample information should be written. * @return True if the holder was filled. False if there is no current sample. */ public boolean peekSample(SampleHolder holder) { return infoQueue.peekSample(holder, extrasHolder); } /** * Skips the current sample. */ public void skipSample() { long nextOffset = infoQueue.moveToNextSample(); dropDownstreamTo(nextOffset); } /** * Attempts to skip to the keyframe before the specified time, if it's present in the buffer. * * @param timeUs The seek time. * @return True if the skip was successful. False otherwise. */ public boolean skipToKeyframeBefore(long timeUs) { long nextOffset = infoQueue.skipToKeyframeBefore(timeUs); if (nextOffset == -1) { return false; } dropDownstreamTo(nextOffset); return true; } /** * Reads the current sample, advancing the read index to the next sample. * * @param sampleHolder The holder into which the current sample should be written. * @return True if a sample was read. False if there is no current sample. */ public boolean readSample(SampleHolder sampleHolder) { // Write the sample information into the holder and extrasHolder. boolean haveSample = infoQueue.peekSample(sampleHolder, extrasHolder); if (!haveSample) { return false; } // Read encryption data if the sample is encrypted. if (sampleHolder.isEncrypted()) { readEncryptionData(sampleHolder, extrasHolder); } // Write the sample data into the holder. sampleHolder.ensureSpaceForWrite(sampleHolder.size); readData(extrasHolder.offset, sampleHolder.data, sampleHolder.size); // Advance the read head. long nextOffset = infoQueue.moveToNextSample(); dropDownstreamTo(nextOffset); return true; } /** * Reads encryption data for the current sample. * <p> * The encryption data is written into {@code sampleHolder.cryptoInfo}, and * {@code sampleHolder.size} is adjusted to subtract the number of bytes that were read. The * same value is added to {@code extrasHolder.offset}. * * @param sampleHolder The holder into which the encryption data should be written. * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. */ private void readEncryptionData(SampleHolder sampleHolder, SampleExtrasHolder extrasHolder) { long offset = extrasHolder.offset; // Read the signal byte. readData(offset, scratch.data, 1); offset++; byte signalByte = scratch.data[0]; boolean subsampleEncryption = (signalByte & 0x80) != 0; int ivSize = signalByte & 0x7F; // Read the initialization vector. if (sampleHolder.cryptoInfo.iv == null) { sampleHolder.cryptoInfo.iv = new byte[16]; } readData(offset, sampleHolder.cryptoInfo.iv, ivSize); offset += ivSize; // Read the subsample count, if present. int subsampleCount; if (subsampleEncryption) { readData(offset, scratch.data, 2); offset += 2; scratch.setPosition(0); subsampleCount = scratch.readUnsignedShort(); } else { subsampleCount = 1; } // Write the clear and encrypted subsample sizes. int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData; if (clearDataSizes == null || clearDataSizes.length < subsampleCount) { clearDataSizes = new int[subsampleCount]; } int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData; if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) { encryptedDataSizes = new int[subsampleCount]; } if (subsampleEncryption) { int subsampleDataLength = 6 * subsampleCount; ensureCapacity(scratch, subsampleDataLength); readData(offset, scratch.data, subsampleDataLength); offset += subsampleDataLength; scratch.setPosition(0); for (int i = 0; i < subsampleCount; i++) { clearDataSizes[i] = scratch.readUnsignedShort(); encryptedDataSizes[i] = scratch.readUnsignedIntToInt(); } } else { clearDataSizes[0] = 0; encryptedDataSizes[0] = sampleHolder.size - (int) (offset - extrasHolder.offset); } // Populate the cryptoInfo. sampleHolder.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, extrasHolder.encryptionKeyId, sampleHolder.cryptoInfo.iv, C.CRYPTO_MODE_AES_CTR); // Adjust the offset and size to take into account the bytes read. int bytesRead = (int) (offset - extrasHolder.offset); extrasHolder.offset += bytesRead; sampleHolder.size -= bytesRead; } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The buffer into which data should be written. * @param length The number of bytes to read. */ private void readData(long absolutePosition, ByteBuffer target, int length) { int remaining = length; while (remaining > 0) { dropDownstreamTo(absolutePosition); int positionInAllocation = (int) (absolutePosition - totalBytesDropped); int toCopy = Math.min(remaining, allocationLength - positionInAllocation); Allocation allocation = dataQueue.peek(); target.put(allocation.data, allocation.translateOffset(positionInAllocation), toCopy); absolutePosition += toCopy; remaining -= toCopy; } } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The array into which data should be written. * @param length The number of bytes to read. */ // TODO: Consider reducing duplication of this method and the one above. private void readData(long absolutePosition, byte[] target, int length) { int bytesRead = 0; while (bytesRead < length) { dropDownstreamTo(absolutePosition); int positionInAllocation = (int) (absolutePosition - totalBytesDropped); int toCopy = Math.min(length - bytesRead, allocationLength - positionInAllocation); Allocation allocation = dataQueue.peek(); System.arraycopy(allocation.data, allocation.translateOffset(positionInAllocation), target, bytesRead, toCopy); absolutePosition += toCopy; bytesRead += toCopy; } } /** * Discard any allocations that hold data prior to the specified absolute position, returning * them to the allocator. * * @param absolutePosition The absolute position up to which allocations can be discarded. */ private void dropDownstreamTo(long absolutePosition) { int relativePosition = (int) (absolutePosition - totalBytesDropped); int allocationIndex = relativePosition / allocationLength; for (int i = 0; i < allocationIndex; i++) { allocator.release(dataQueue.remove()); totalBytesDropped += allocationLength; } } /** * Ensure that the passed {@link ParsableByteArray} is of at least the specified limit. */ private static void ensureCapacity(ParsableByteArray byteArray, int limit) { if (byteArray.limit() < limit) { byteArray.reset(new byte[limit], limit); } } // Called by the loading thread. /** * Returns the current write position in the rolling buffer. * * @return The current write position. */ public long getWritePosition() { return totalBytesWritten; } /** * Appends data to the rolling buffer. * * @param dataSource The source from which to read. * @param length The maximum length of the read. * @param allowEndOfInput True if encountering the end of the input having appended no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. * @return The number of bytes appended, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the source. */ public int appendData(DataSource dataSource, int length, boolean allowEndOfInput) throws IOException { length = prepareForAppend(length); int bytesAppended = dataSource.read(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), length); if (bytesAppended == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; } throw new EOFException(); } lastAllocationOffset += bytesAppended; totalBytesWritten += bytesAppended; return bytesAppended; } /** * Appends data to the rolling buffer. * * @param input The source from which to read. * @param length The maximum length of the read. * @param allowEndOfInput True if encountering the end of the input having appended no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. * @return The number of bytes appended, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the source. * @throws InterruptedException If the thread has been interrupted. */ public int appendData(ExtractorInput input, int length, boolean allowEndOfInput) throws IOException, InterruptedException { length = prepareForAppend(length); int bytesAppended = input.read(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), length); if (bytesAppended == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; } throw new EOFException(); } lastAllocationOffset += bytesAppended; totalBytesWritten += bytesAppended; return bytesAppended; } /** * Appends data to the rolling buffer. * * @param buffer A buffer containing the data to append. * @param length The length of the data to append. */ public void appendData(ParsableByteArray buffer, int length) { while (length > 0) { int thisAppendLength = prepareForAppend(length); buffer.readBytes(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset), thisAppendLength); lastAllocationOffset += thisAppendLength; totalBytesWritten += thisAppendLength; length -= thisAppendLength; } } /** * Indicates the end point for the current sample, making it available for consumption. * * @param sampleTimeUs The sample timestamp. * @param flags Flags that accompany the sample. See {@link SampleHolder#flags}. * @param position The position of the sample data in the rolling buffer. * @param size The size of the sample, in bytes. * @param encryptionKey The encryption key associated with the sample, or null. */ public void commitSample(long sampleTimeUs, int flags, long position, int size, byte[] encryptionKey) { infoQueue.commitSample(sampleTimeUs, flags, position, size, encryptionKey); } /** * Prepares the rolling sample buffer for an append of up to {@code length} bytes, returning the * number of bytes that can actually be appended. */ private int prepareForAppend(int length) { if (lastAllocationOffset == allocationLength) { lastAllocationOffset = 0; lastAllocation = allocator.allocate(); dataQueue.add(lastAllocation); } return Math.min(length, allocationLength - lastAllocationOffset); } /** * Holds information about the samples in the rolling buffer. */ private static final class InfoQueue { private static final int SAMPLE_CAPACITY_INCREMENT = 1000; private int capacity; private long[] offsets; private int[] sizes; private int[] flags; private long[] timesUs; private byte[][] encryptionKeys; private int queueSize; private int absoluteReadIndex; private int relativeReadIndex; private int relativeWriteIndex; public InfoQueue() { capacity = SAMPLE_CAPACITY_INCREMENT; offsets = new long[capacity]; timesUs = new long[capacity]; flags = new int[capacity]; sizes = new int[capacity]; encryptionKeys = new byte[capacity][]; } // Called by the consuming thread, but only when there is no loading thread. /** * Clears the queue. */ public void clear() { absoluteReadIndex = 0; relativeReadIndex = 0; relativeWriteIndex = 0; queueSize = 0; } /** * Returns the current absolute write index. */ public int getWriteIndex() { return absoluteReadIndex + queueSize; } /** * Discards samples from the write side of the buffer. * * @param discardFromIndex The absolute index of the first sample to be discarded. * @return The reduced total number of bytes written, after the samples have been discarded. */ public long discardUpstreamSamples(int discardFromIndex) { int discardCount = getWriteIndex() - discardFromIndex; Assertions.checkArgument(0 <= discardCount && discardCount <= queueSize); if (discardCount == 0) { if (absoluteReadIndex == 0) { // queueSize == absoluteReadIndex == 0, so nothing has been written to the queue. return 0; } int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1; return offsets[lastWriteIndex] + sizes[lastWriteIndex]; } queueSize -= discardCount; relativeWriteIndex = (relativeWriteIndex + capacity - discardCount) % capacity; return offsets[relativeWriteIndex]; } // Called by the consuming thread. /** * Returns the current absolute read index. */ public int getReadIndex() { return absoluteReadIndex; } /** * Fills {@code holder} with information about the current sample, but does not write its data. * The first entry in {@code offsetHolder} is filled with the absolute position of the sample's * data in the rolling buffer. * <p> * The fields set are {SampleHolder#size}, {SampleHolder#timeUs}, {SampleHolder#flags} and * {@code offsetHolder[0]}. * * @param holder The holder into which the current sample information should be written. * @param extrasHolder The holder into which extra sample information should be written. * @return True if the holders were filled. False if there is no current sample. */ public synchronized boolean peekSample(SampleHolder holder, SampleExtrasHolder extrasHolder) { if (queueSize == 0) { return false; } holder.timeUs = timesUs[relativeReadIndex]; holder.size = sizes[relativeReadIndex]; holder.flags = flags[relativeReadIndex]; extrasHolder.offset = offsets[relativeReadIndex]; extrasHolder.encryptionKeyId = encryptionKeys[relativeReadIndex]; return true; } /** * Advances the read index to the next sample. * * @return The absolute position of the first byte in the rolling buffer that may still be * required after advancing the index. Data prior to this position can be dropped. */ public synchronized long moveToNextSample() { queueSize--; int lastReadIndex = relativeReadIndex++; absoluteReadIndex++; if (relativeReadIndex == capacity) { // Wrap around. relativeReadIndex = 0; } return queueSize > 0 ? offsets[relativeReadIndex] : (sizes[lastReadIndex] + offsets[lastReadIndex]); } /** * Attempts to locate the keyframe before the specified time, if it's present in the buffer. * * @param timeUs The seek time. * @return The offset of the keyframe's data if the keyframe was present. -1 otherwise. */ public synchronized long skipToKeyframeBefore(long timeUs) { if (queueSize == 0 || timeUs < timesUs[relativeReadIndex]) { return -1; } int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1; long lastTimeUs = timesUs[lastWriteIndex]; if (timeUs > lastTimeUs) { return -1; } // TODO: This can be optimized further using binary search, although the fact that the array // is cyclic means we'd need to implement the binary search ourselves. int sampleCount = 0; int sampleCountToKeyframe = -1; int searchIndex = relativeReadIndex; while (searchIndex != relativeWriteIndex) { if (timesUs[searchIndex] > timeUs) { // We've gone too far. break; } else if ((flags[searchIndex] & C.SAMPLE_FLAG_SYNC) != 0) { // We've found a keyframe, and we're still before the seek position. sampleCountToKeyframe = sampleCount; } searchIndex = (searchIndex + 1) % capacity; sampleCount++; } if (sampleCountToKeyframe == -1) { return -1; } queueSize -= sampleCountToKeyframe; relativeReadIndex = (relativeReadIndex + sampleCountToKeyframe) % capacity; absoluteReadIndex += sampleCountToKeyframe; return offsets[relativeReadIndex]; } // Called by the loading thread. public synchronized void commitSample(long timeUs, int sampleFlags, long offset, int size, byte[] encryptionKey) { timesUs[relativeWriteIndex] = timeUs; offsets[relativeWriteIndex] = offset; sizes[relativeWriteIndex] = size; flags[relativeWriteIndex] = sampleFlags; encryptionKeys[relativeWriteIndex] = encryptionKey; // Increment the write index. queueSize++; if (queueSize == capacity) { // Increase the capacity. int newCapacity = capacity + SAMPLE_CAPACITY_INCREMENT; long[] newOffsets = new long[newCapacity]; long[] newTimesUs = new long[newCapacity]; int[] newFlags = new int[newCapacity]; int[] newSizes = new int[newCapacity]; byte[][] newEncryptionKeys = new byte[newCapacity][]; int beforeWrap = capacity - relativeReadIndex; System.arraycopy(offsets, relativeReadIndex, newOffsets, 0, beforeWrap); System.arraycopy(timesUs, relativeReadIndex, newTimesUs, 0, beforeWrap); System.arraycopy(flags, relativeReadIndex, newFlags, 0, beforeWrap); System.arraycopy(sizes, relativeReadIndex, newSizes, 0, beforeWrap); System.arraycopy(encryptionKeys, relativeReadIndex, newEncryptionKeys, 0, beforeWrap); int afterWrap = relativeReadIndex; System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap); System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap); System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap); System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap); System.arraycopy(encryptionKeys, 0, newEncryptionKeys, beforeWrap, afterWrap); offsets = newOffsets; timesUs = newTimesUs; flags = newFlags; sizes = newSizes; encryptionKeys = newEncryptionKeys; relativeReadIndex = 0; relativeWriteIndex = capacity; queueSize = capacity; capacity = newCapacity; } else { relativeWriteIndex++; if (relativeWriteIndex == capacity) { // Wrap around. relativeWriteIndex = 0; } } } } /** * Holds additional sample information not held by {@link SampleHolder}. */ private static final class SampleExtrasHolder { public long offset; public byte[] encryptionKeyId; } }
Add missing @links. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=117840707
library/src/main/java/com/google/android/exoplayer/extractor/RollingSampleBuffer.java
Add missing @links. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=117840707
Java
apache-2.0
ef51cb31341db1c65811f262a18d1c06f3c9411f
0
hackugyo/ThoughtsCalendar_Android
package jp.ne.hatena.hackugyo.thoughtscalendar.ui.fragment; import java.util.ArrayList; import jp.ne.hatena.hackugyo.thoughtscalendar.CustomApplication; import jp.ne.hatena.hackugyo.thoughtscalendar.R; import android.annotation.SuppressLint; import android.net.Uri; import android.os.Build; import android.provider.CalendarContract; import android.provider.CalendarContract.Calendars; import android.provider.CalendarContract.Instances; import android.support.v4.app.Fragment; import android.support.v4.content.CursorLoader; public class PlaceHolderFragmentHelper { public static final ArrayList<String> sCalendarOwners = CustomApplication.getStringArrayById(R.array.list_calendar_owners); private PlaceHolderFragmentHelper() { } @SuppressLint("NewApi") static CursorLoader getCursorLoader(Fragment fragment, int begin, int end) { Uri content_by_day_uri; String[] instance_projection; String sort_order; String selection; String[] selectionArgs = new String[] { sCalendarOwners.get(fragment.getArguments().getInt(PlaceholderFragment.ARG_SECTION_NUMBER, 1) - 1) }; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { content_by_day_uri = CalendarContract.Instances.CONTENT_BY_DAY_URI; instance_projection = new String[] { Instances._ID, Instances.EVENT_ID, Instances.BEGIN, Instances.END, Instances.TITLE, Instances.EVENT_LOCATION, Instances.DESCRIPTION, Instances.OWNER_ACCOUNT }; sort_order = Instances.BEGIN + " ASC, " + Instances.END + " DESC, " + Instances.TITLE + " ASC"; selection = "((" + Calendars.OWNER_ACCOUNT + " = ?))"; } else { final String authority = "com.android.calendar"; content_by_day_uri = Uri.parse("content://" + authority + "/instances/whenbyday"); instance_projection = new String[] { "_id", "event_id", "begin", "end", "title", "eventLocation", "description", "ownerAccount" }; sort_order = "begin ASC, end DESC, title ASC"; selection = "((" + "ownerAccount" + " = ?))"; } Uri baseUri = buildQueryUri(begin, end, content_by_day_uri); return new CursorLoader(fragment.getActivity(), baseUri, instance_projection, selection, selectionArgs, sort_order); } private static Uri buildQueryUri(int start, int end, Uri content_by_day_uri) { StringBuilder path = new StringBuilder(); path.append(start); path.append('/'); path.append(end); Uri uri = Uri.withAppendedPath(content_by_day_uri, path.toString()); return uri; } @SuppressLint("InlinedApi") static String[] getBindFrom() { String[] from; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { from = new String[] { Instances.TITLE, Instances.BEGIN, Instances.EVENT_LOCATION, Instances.DESCRIPTION, Instances.EVENT_ID }; } else { from = new String[] { "title", "begin", "eventLocation", "description", "event_id" }; } return from; } }
src/jp/ne/hatena/hackugyo/thoughtscalendar/ui/fragment/PlaceHolderFragmentHelper.java
package jp.ne.hatena.hackugyo.thoughtscalendar.ui.fragment; import java.util.ArrayList; import jp.ne.hatena.hackugyo.thoughtscalendar.CustomApplication; import jp.ne.hatena.hackugyo.thoughtscalendar.R; import android.annotation.SuppressLint; import android.net.Uri; import android.os.Build; import android.provider.CalendarContract; import android.provider.CalendarContract.Calendars; import android.provider.CalendarContract.Instances; import android.support.v4.app.Fragment; import android.support.v4.content.CursorLoader; public class PlaceholderFragmentHelper { public static final ArrayList<String> sCalendarOwners = CustomApplication.getStringArrayById(R.array.list_calendar_owners); private PlaceholderFragmentHelper() { } @SuppressLint("NewApi") static CursorLoader getCursorLoader(Fragment fragment, int begin, int end) { Uri content_by_day_uri; String[] instance_projection; String sort_order; String selection; String[] selectionArgs = new String[] { sCalendarOwners.get(fragment.getArguments().getInt(PlaceholderFragment.ARG_SECTION_NUMBER, 1) - 1) }; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { content_by_day_uri = CalendarContract.Instances.CONTENT_BY_DAY_URI; instance_projection = new String[] { Instances._ID, Instances.EVENT_ID, Instances.BEGIN, Instances.END, Instances.TITLE, Instances.EVENT_LOCATION, Instances.DESCRIPTION, Instances.OWNER_ACCOUNT }; sort_order = Instances.BEGIN + " ASC, " + Instances.END + " DESC, " + Instances.TITLE + " ASC"; selection = "((" + Calendars.OWNER_ACCOUNT + " = ?))"; } else { final String authority = "com.android.calendar"; content_by_day_uri = Uri.parse("content://" + authority + "/instances/whenbyday"); instance_projection = new String[] { "_id", "event_id", "begin", "end", "title", "eventLocation", "description", "ownerAccount" }; sort_order = "begin ASC, end DESC, title ASC"; selection = "((" + "ownerAccount" + " = ?))"; } Uri baseUri = buildQueryUri(begin, end, content_by_day_uri); return new CursorLoader(fragment.getActivity(), baseUri, instance_projection, selection, selectionArgs, sort_order); } private static Uri buildQueryUri(int start, int end, Uri content_by_day_uri) { StringBuilder path = new StringBuilder(); path.append(start); path.append('/'); path.append(end); Uri uri = Uri.withAppendedPath(content_by_day_uri, path.toString()); return uri; } @SuppressLint("InlinedApi") static String[] getBindFrom() { String[] from; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { from = new String[] { Instances.TITLE, Instances.BEGIN, Instances.EVENT_LOCATION, Instances.DESCRIPTION, Instances.EVENT_ID }; } else { from = new String[] { "title", "begin", "eventLocation", "description", "event_id" }; } return from; } }
クラス名をファイル名に一致させる
src/jp/ne/hatena/hackugyo/thoughtscalendar/ui/fragment/PlaceHolderFragmentHelper.java
クラス名をファイル名に一致させる
Java
apache-2.0
1595a326da90081cd447d366453ee114072fcb29
0
twitter-forks/bazel,meteorcloudy/bazel,bazelbuild/bazel,twitter-forks/bazel,twitter-forks/bazel,perezd/bazel,meteorcloudy/bazel,perezd/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,meteorcloudy/bazel,katre/bazel,twitter-forks/bazel,bazelbuild/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,katre/bazel,cushon/bazel,cushon/bazel,twitter-forks/bazel,twitter-forks/bazel,cushon/bazel,ButterflyNetwork/bazel,perezd/bazel,bazelbuild/bazel,bazelbuild/bazel,twitter-forks/bazel,perezd/bazel,katre/bazel,ButterflyNetwork/bazel,katre/bazel,meteorcloudy/bazel,cushon/bazel,katre/bazel,perezd/bazel,perezd/bazel,ButterflyNetwork/bazel,perezd/bazel,cushon/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,katre/bazel,bazelbuild/bazel,cushon/bazel,meteorcloudy/bazel
// Copyright 2020 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.testutil.MoreAsserts.assertNoEvents; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.actions.MutableActionGraph; import com.google.devtools.build.lib.analysis.AnalysisFailureEvent; import com.google.devtools.build.lib.analysis.ViewCreationFailedException; import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.BuildEventId.TargetCompletedId; import com.google.devtools.build.lib.buildtool.util.GoogleBuildIntegrationTestCase; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.testing.junit.testparameterinjector.TestParameter; import com.google.testing.junit.testparameterinjector.TestParameterInjector; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; /** Tests for action conflicts. */ @RunWith(TestParameterInjector.class) public class OutputArtifactConflictTest extends GoogleBuildIntegrationTestCase { static class AnalysisFailureEventListener extends BlazeModule { private final List<TargetCompletedId> eventIds = new ArrayList<>(); private final List<String> failedTargetNames = new ArrayList<>(); @Override public void beforeCommand(CommandEnvironment env) { env.getEventBus().register(this); } @Subscribe public void onAnalysisFailure(AnalysisFailureEvent event) { eventIds.add(event.getEventId().getTargetCompleted()); failedTargetNames.add(event.getFailedTarget().getLabel().toString()); } } private final AnalysisFailureEventListener eventListener = new AnalysisFailureEventListener(); @Override protected BlazeRuntime.Builder getRuntimeBuilder() throws Exception { return super.getRuntimeBuilder().addBlazeModule(eventListener); } @Test public void testArtifactPrefix( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write("x/BUILD", "cc_library(name = 'y', srcs = ['y.cc'])"); } else { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); } write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); if (modifyBuildFile) { buildTarget("//x/y", "//x:y"); write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); } else { buildTarget("//x/y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); if (keepGoing) { runtimeWrapper.addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); assertThat(Iterables.size(events.errors())).isGreaterThan(1); if (keepGoing) { assertThat(eventListener.failedTargetNames).containsExactly("//x:y", "//x/y:y"); } else { assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); } } @Test public void testAspectArtifactSharesPrefixWithTargetArtifact( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write("x/BUILD", "genrule(name = 'y', outs = ['y.out'], cmd = 'touch $@')"); } else { write("x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')"); } write("x/y/BUILD", "cc_library(name = 'y')"); write( "x/aspect.bzl", "def _aspect_impl(target, ctx):", " if not getattr(ctx.rule.attr, 'outs', None):", " return struct(output_groups = {})", " conflict_outputs = list()", " for out in ctx.rule.attr.outs:", " if out.name[1:] == '.bad':", " aspect_out = ctx.actions.declare_file(out.name[:1])", " conflict_outputs.append(aspect_out)", " cmd = 'echo %s > %s' % (out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " return [OutputGroupInfo(", " files = depset(conflict_outputs)", " )]", "", "my_aspect = aspect(implementation = _aspect_impl)"); if (modifyBuildFile) { buildTarget("//x/y", "//x:y"); write("x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')"); } else { buildTarget("//x/y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); addOptions("--aspects=//x:aspect.bzl%my_aspect", "--output_groups=files"); if (keepGoing) { addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); // When an aspect artifact's path is in aa prefix conflict with a target artifact's path, the // target artifact is created and only the aspect fails analysis. assertThat(Iterables.size(events.errors())).isGreaterThan(1); assertThat(eventListener.failedTargetNames).containsExactly("//x:y"); assertThat(eventListener.eventIds.get(0).getAspect()).isEqualTo("//x:aspect.bzl%my_aspect"); } @Test public void testAspectArtifactPrefix( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write( "x/BUILD", "genrule(name = 'y', outs = ['y.out'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } else { write( "x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } write( "x/aspect.bzl", "def _aspect_impl(target, ctx):", " if not getattr(ctx.rule.attr, 'outs', None):", " return struct(output_groups = {})", " conflict_outputs = list()", " for out in ctx.rule.attr.outs:", " if out.name[1:] == '.bad':", " aspect_out = ctx.actions.declare_file(out.name[:1])", " conflict_outputs.append(aspect_out)", " cmd = 'echo %s > %s' % (out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " elif out.name[1:] == '.dir':", " aspect_out = ctx.actions.declare_file(out.name[:1] + '/' + out.name)", " conflict_outputs.append(aspect_out)", " out_dir = aspect_out.path[:len(aspect_out.path) - len(out.name) + 1]", " cmd = 'mkdir %s && echo %s > %s' % (out_dir, out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " return [OutputGroupInfo(", " files = depset(conflict_outputs)", " )]", "", "my_aspect = aspect(implementation = _aspect_impl)"); if (modifyBuildFile) { buildTarget("//x:y", "//x:ydir"); write( "x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } else { buildTarget("//x:y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); addOptions("--aspects=//x:aspect.bzl%my_aspect", "--output_groups=files"); if (keepGoing) { addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x:ydir"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); assertThat(events.errors()).hasSize(1); assertThat(eventListener.eventIds.get(0).getAspect()).isEqualTo("//x:aspect.bzl%my_aspect"); if (keepGoing) { assertThat(eventListener.failedTargetNames).containsExactly("//x:y", "//x:ydir"); } else { assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x:ydir"); } } @Test public void testInvalidatedConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); write("x/BUILD", "# no conflict"); events.clear(); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); } @Test public void testNewTargetConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); } @Test public void testTwoOverlappingBuildsHasNoConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); buildTarget("//x:y"); events.assertNoWarningsOrErrors(); // Verify that together they fail, even though no new targets have been analyzed try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.clear(); // Verify that they still don't fail individually, so no state remains buildTarget("//x/y"); events.assertNoWarningsOrErrors(); buildTarget("//x:y"); events.assertNoWarningsOrErrors(); } @Test public void testFailingTargetsDoNotCauseActionConflicts() throws Exception { write( "x/bad_rule.bzl", "def _impl(ctx):", " return list().this_method_does_not_exist()", "bad_rule = rule(_impl, attrs = {'deps': attr.label_list()})"); write( "x/BUILD", "load('//x:bad_rule.bzl', 'bad_rule')", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')", "bad_rule(name = 'bad', deps = [':y'])"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); runtimeWrapper.addOptions("--keep_going"); try { buildTarget("//x:y", "//x/y"); fail(); } catch (ViewCreationFailedException e) { fail("Unexpected artifact prefix conflict: " + e); } catch (BuildFailedException e) { // Expected. } } // Regression test for b/184944522. @Test public void testConflictErrorAndAnalysisError() throws Exception { write( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.pic.o', srcs=['bar.cc'], " + "malloc = '//base:system_malloc')"); write("conflict/foo.cc", "int main() { return 0; }"); write("conflict/bar.cc", "int main() { return 0; }"); write("x/BUILD", "sh_library(name = 'x', deps = ['//y:y'])"); write("y/BUILD", "sh_library(name = 'y', visibility = ['//visibility:private'])"); runtimeWrapper.addOptions("--keep_going"); assertThrows( BuildFailedException.class, () -> buildTarget("//x:x", "//conflict:x", "//conflict:_objs/x/foo.pic.o")); events.assertContainsError( "file 'conflict/_objs/x/foo.pic.o' is generated by these conflicting actions:"); // When two targets have conflicting artifacts, the first target named on the commandline "wins" // and is successfully built. All other targets fail analysis for conflicting with the first. assertThat(eventListener.failedTargetNames) .containsExactly("//x:x", "//conflict:_objs/x/foo.pic.o"); } @Test public void testMultipleConflictErrors() throws Exception { write( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.pic.o', srcs=['bar.cc'], " + "malloc = '//base:system_malloc')"); write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y.cc", "int main() { return 0; }"); write("conflict/foo.cc", "int main() { return 0; }"); write("conflict/bar.cc", "int main() { return 0; }"); write("x/y/BUILD", "cc_library(name = 'y')"); runtimeWrapper.addOptions("--keep_going"); assertThrows( BuildFailedException.class, () -> buildTarget("//x/y", "//x:y", "//conflict:x", "//conflict:_objs/x/foo.pic.o")); events.assertContainsError( "file 'conflict/_objs/x/foo.pic.o' is generated by these conflicting actions:"); events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); // When targets have conflicting artifacts, one of them "wins" and is successfully built. All // other targets fail analysis for conflicting with the first. assertThat(eventListener.failedTargetNames).containsAtLeast("//x:y", "//x/y:y"); assertThat(eventListener.failedTargetNames).hasSize(3); assertThat(eventListener.failedTargetNames) .containsAnyOf("//conflict:x", "//conflict:_objs/x/foo.pic.o"); } @Test public void repeatedConflictBuild() throws Exception { write( "foo/conflict.bzl", "def _conflict_impl(ctx):", " conflict_output = ctx.actions.declare_file('conflict_output')", " other = ctx.actions.declare_file('other' + ctx.attr.other_name)", " ctx.actions.run_shell(", " outputs = [conflict_output, other],", " command = 'touch %s %s' % (conflict_output.path, other.path)", " )", " return DefaultInfo(files = depset([conflict_output, other]))", "", "my_rule = rule(", " implementation=_conflict_impl,", " attrs = { 'other_name': attr.string() },", ")"); write( "foo/BUILD", "load('//foo:conflict.bzl', 'my_rule')", "my_rule(name = 'first', other_name = '1')", "my_rule(name = 'second', other_name = '2')"); ViewCreationFailedException e = assertThrows( ViewCreationFailedException.class, () -> buildTarget("//foo:first", "//foo:second")); assertThat(e) .hasCauseThat() .hasCauseThat() .isInstanceOf(MutableActionGraph.ActionConflictException.class); assertThat(eventListener.failedTargetNames).containsAnyOf("//foo:first", "//foo:second"); eventListener.failedTargetNames.clear(); e = assertThrows( ViewCreationFailedException.class, () -> buildTarget("//foo:first", "//foo:second")); assertThat(e) .hasCauseThat() .hasCauseThat() .isInstanceOf(MutableActionGraph.ActionConflictException.class); assertThat(eventListener.failedTargetNames).containsAnyOf("//foo:first", "//foo:second"); } }
src/test/java/com/google/devtools/build/lib/buildtool/OutputArtifactConflictTest.java
// Copyright 2020 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.testutil.MoreAsserts.assertNoEvents; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.actions.MutableActionGraph; import com.google.devtools.build.lib.analysis.AnalysisFailureEvent; import com.google.devtools.build.lib.analysis.ViewCreationFailedException; import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.BuildEventId.TargetCompletedId; import com.google.devtools.build.lib.buildtool.util.GoogleBuildIntegrationTestCase; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.testing.junit.testparameterinjector.TestParameter; import com.google.testing.junit.testparameterinjector.TestParameterInjector; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; /** Tests for action conflicts. */ @RunWith(TestParameterInjector.class) public class OutputArtifactConflictTest extends GoogleBuildIntegrationTestCase { static class AnalysisFailureEventListener extends BlazeModule { private final List<TargetCompletedId> eventIds = new ArrayList<>(); private final List<String> failedTargetNames = new ArrayList<>(); @Override public void beforeCommand(CommandEnvironment env) { env.getEventBus().register(this); } @Subscribe public void onAnalysisFailure(AnalysisFailureEvent event) { eventIds.add(event.getEventId().getTargetCompleted()); failedTargetNames.add(event.getFailedTarget().getLabel().toString()); } } private final AnalysisFailureEventListener eventListener = new AnalysisFailureEventListener(); @Override protected BlazeRuntime.Builder getRuntimeBuilder() throws Exception { return super.getRuntimeBuilder().addBlazeModule(eventListener); } @Test public void testArtifactPrefix( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write("x/BUILD", "cc_library(name = 'y', srcs = ['y.cc'])"); } else { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); } write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); if (modifyBuildFile) { buildTarget("//x/y", "//x:y"); write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); } else { buildTarget("//x/y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); if (keepGoing) { runtimeWrapper.addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); assertThat(Iterables.size(events.errors())).isGreaterThan(1); if (keepGoing) { assertThat(eventListener.failedTargetNames).containsExactly("//x:y", "//x/y:y"); } else { assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); } } @Test public void testAspectArtifactSharesPrefixWithTargetArtifact( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write("x/BUILD", "genrule(name = 'y', outs = ['y.out'], cmd = 'touch $@')"); } else { write("x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')"); } write("x/y/BUILD", "cc_library(name = 'y')"); write( "x/aspect.bzl", "def _aspect_impl(target, ctx):", " if not getattr(ctx.rule.attr, 'outs', None):", " return struct(output_groups = {})", " conflict_outputs = list()", " for out in ctx.rule.attr.outs:", " if out.name[1:] == '.bad':", " aspect_out = ctx.actions.declare_file(out.name[:1])", " conflict_outputs.append(aspect_out)", " cmd = 'echo %s > %s' % (out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " return [OutputGroupInfo(", " files = depset(conflict_outputs)", " )]", "", "my_aspect = aspect(implementation = _aspect_impl)"); if (modifyBuildFile) { buildTarget("//x/y", "//x:y"); write("x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')"); } else { buildTarget("//x/y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); addOptions("--aspects=//x:aspect.bzl%my_aspect", "--output_groups=files"); if (keepGoing) { addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); // When an aspect artifact's path is in aa prefix conflict with a target artifact's path, the // target artifact is created and only the aspect fails analysis. assertThat(Iterables.size(events.errors())).isGreaterThan(1); assertThat(eventListener.failedTargetNames).containsExactly("//x:y"); assertThat(eventListener.eventIds.get(0).getAspect()).isEqualTo("//x:aspect.bzl%my_aspect"); } @Test public void testAspectArtifactPrefix( @TestParameter boolean keepGoing, @TestParameter boolean modifyBuildFile) throws Exception { if (modifyBuildFile) { write( "x/BUILD", "genrule(name = 'y', outs = ['y.out'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } else { write( "x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } write( "x/aspect.bzl", "def _aspect_impl(target, ctx):", " if not getattr(ctx.rule.attr, 'outs', None):", " return struct(output_groups = {})", " conflict_outputs = list()", " for out in ctx.rule.attr.outs:", " if out.name[1:] == '.bad':", " aspect_out = ctx.actions.declare_file(out.name[:1])", " conflict_outputs.append(aspect_out)", " cmd = 'echo %s > %s' % (out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " elif out.name[1:] == '.dir':", " aspect_out = ctx.actions.declare_file(out.name[:1] + '/' + out.name)", " conflict_outputs.append(aspect_out)", " out_dir = aspect_out.path[:len(aspect_out.path) - len(out.name) + 1]", " cmd = 'mkdir %s && echo %s > %s' % (out_dir, out.name, aspect_out.path)", " ctx.actions.run_shell(", " outputs = [aspect_out],", " command = cmd,", " )", " return [OutputGroupInfo(", " files = depset(conflict_outputs)", " )]", "", "my_aspect = aspect(implementation = _aspect_impl)"); if (modifyBuildFile) { buildTarget("//x:y", "//x:ydir"); write( "x/BUILD", "genrule(name = 'y', outs = ['y.bad'], cmd = 'touch $@')", "genrule(name = 'ydir', outs = ['y.dir'], cmd = 'touch $@')"); } else { buildTarget("//x:y"); } assertNoEvents(events.errors()); assertThat(eventListener.failedTargetNames).isEmpty(); addOptions("--aspects=//x:aspect.bzl%my_aspect", "--output_groups=files"); if (keepGoing) { addOptions("--keep_going"); } try { // Skyframe full should throw an error here even if we just build //x:y. However, because our // testing infrastructure sets up lots of symlinks, Skyframe invalidates the //x/y action, and // so would not find a conflict here without re-evaluating //x/y. Note that in a real client, // requesting the //x/y target would not be necessary to throw an exception. buildTarget("//x:y", "//x:ydir"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.assertContainsError("output path 'blaze-out/"); // Skip over config key string ... events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); assertThat(events.errors()).hasSize(1); assertThat(eventListener.eventIds.get(0).getAspect()).isEqualTo("//x:aspect.bzl%my_aspect"); if (keepGoing) { assertThat(eventListener.failedTargetNames).containsExactly("//x:y", "//x:ydir"); } else { assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x:ydir"); } } @Test public void testInvalidatedConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); write("x/BUILD", "# no conflict"); events.clear(); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); } @Test public void testNewTargetConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } assertThat(eventListener.failedTargetNames).containsAnyOf("//x:y", "//x/y:y"); } @Test public void testTwoOverlappingBuildsHasNoConflict() throws Exception { write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); buildTarget("//x/y"); events.assertNoWarningsOrErrors(); buildTarget("//x:y"); events.assertNoWarningsOrErrors(); // Verify that together they fail, even though no new targets have been analyzed try { buildTarget("//x:y", "//x/y"); fail(); } catch (BuildFailedException | ViewCreationFailedException e) { // Expected. } events.clear(); // Verify that they still don't fail individually, so no state remains buildTarget("//x/y"); events.assertNoWarningsOrErrors(); buildTarget("//x:y"); events.assertNoWarningsOrErrors(); } @Test public void testFailingTargetsDoNotCauseActionConflicts() throws Exception { write( "x/bad_rule.bzl", "def _impl(ctx):", " return list().this_method_does_not_exist()", "bad_rule = rule(_impl, attrs = {'deps': attr.label_list()})"); write( "x/BUILD", "load('//x:bad_rule.bzl', 'bad_rule')", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')", "bad_rule(name = 'bad', deps = [':y'])"); write("x/y/BUILD", "cc_library(name = 'y')"); write("x/y.cc", "int main() { return 0; }"); runtimeWrapper.addOptions("--keep_going"); try { buildTarget("//x:y", "//x/y"); fail(); } catch (ViewCreationFailedException e) { fail("Unexpected artifact prefix conflict: " + e); } catch (BuildFailedException e) { // Expected. } } // Regression test for b/184944522. @Test public void testConflictErrorAndAnalysisError() throws Exception { write( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.pic.o', srcs=['bar.cc'], " + "malloc = '//base:system_malloc')"); write("conflict/foo.cc", "int main() { return 0; }"); write("conflict/bar.cc", "int main() { return 0; }"); write("x/BUILD", "sh_library(name = 'x', deps = ['//y:y'])"); write("y/BUILD", "sh_library(name = 'y', visibility = ['//visibility:private'])"); runtimeWrapper.addOptions("--keep_going"); assertThrows( BuildFailedException.class, () -> buildTarget("//x:x", "//conflict:x", "//conflict:_objs/x/foo.pic.o")); events.assertContainsError( "file 'conflict/_objs/x/foo.pic.o' is generated by these conflicting actions:"); // When two targets have conflicting artifacts, the first target named on the commandline "wins" // and is successfully built. All other targets fail analysis for conflicting with the first. assertThat(eventListener.failedTargetNames) .containsExactly("//x:x", "//conflict:_objs/x/foo.pic.o"); } @Test public void testMultipleConflictErrors() throws Exception { write( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.pic.o', srcs=['bar.cc'], " + "malloc = '//base:system_malloc')"); write("x/BUILD", "cc_binary(name = 'y', srcs = ['y.cc'], malloc = '//base:system_malloc')"); write("x/y.cc", "int main() { return 0; }"); write("conflict/foo.cc", "int main() { return 0; }"); write("conflict/bar.cc", "int main() { return 0; }"); write("x/y/BUILD", "cc_library(name = 'y')"); runtimeWrapper.addOptions("--keep_going"); assertThrows( BuildFailedException.class, () -> buildTarget("//x/y", "//x:y", "//conflict:x", "//conflict:_objs/x/foo.pic.o")); events.assertContainsError( "file 'conflict/_objs/x/foo.pic.o' is generated by these conflicting actions:"); events.assertContainsError( "/bin/x/y' (belonging to //x:y) is a prefix of output path 'blaze-out"); // When two targets have conflicting artifacts, the first target named on the commandline "wins" // and is successfully built. All other targets fail analysis for conflicting with the first. assertThat(eventListener.failedTargetNames) .containsExactly("//x:y", "//x/y:y", "//conflict:_objs/x/foo.pic.o"); } @Test public void repeatedConflictBuild() throws Exception { write( "foo/conflict.bzl", "def _conflict_impl(ctx):", " conflict_output = ctx.actions.declare_file('conflict_output')", " other = ctx.actions.declare_file('other' + ctx.attr.other_name)", " ctx.actions.run_shell(", " outputs = [conflict_output, other],", " command = 'touch %s %s' % (conflict_output.path, other.path)", " )", " return DefaultInfo(files = depset([conflict_output, other]))", "", "my_rule = rule(", " implementation=_conflict_impl,", " attrs = { 'other_name': attr.string() },", ")"); write( "foo/BUILD", "load('//foo:conflict.bzl', 'my_rule')", "my_rule(name = 'first', other_name = '1')", "my_rule(name = 'second', other_name = '2')"); ViewCreationFailedException e = assertThrows( ViewCreationFailedException.class, () -> buildTarget("//foo:first", "//foo:second")); assertThat(e) .hasCauseThat() .hasCauseThat() .isInstanceOf(MutableActionGraph.ActionConflictException.class); assertThat(eventListener.failedTargetNames).containsAnyOf("//foo:first", "//foo:second"); eventListener.failedTargetNames.clear(); e = assertThrows( ViewCreationFailedException.class, () -> buildTarget("//foo:first", "//foo:second")); assertThat(e) .hasCauseThat() .hasCauseThat() .isInstanceOf(MutableActionGraph.ActionConflictException.class); assertThat(eventListener.failedTargetNames).containsAnyOf("//foo:first", "//foo:second"); } }
"Deflake" OutputArtifactConflictTest#testMultipleConflictErrors: there is no reason that //conflict:x should be inserted into the action-conflict-checking map before //conflict:_objs/x/foo.pic.o, even though historically it always is. PiperOrigin-RevId: 368474386
src/test/java/com/google/devtools/build/lib/buildtool/OutputArtifactConflictTest.java
"Deflake" OutputArtifactConflictTest#testMultipleConflictErrors: there is no reason that //conflict:x should be inserted into the action-conflict-checking map before //conflict:_objs/x/foo.pic.o, even though historically it always is.
Java
apache-2.0
2348a94614afbf3d72940dbf859727966ed4160d
0
Distelli/graphql-apigen
package com.distelli.graphql.apigen; import graphql.language.Definition; import graphql.language.EnumTypeDefinition; import graphql.language.EnumValueDefinition; import graphql.language.FieldDefinition; import graphql.language.InputObjectTypeDefinition; import graphql.language.InputValueDefinition; import graphql.language.InterfaceTypeDefinition; import graphql.language.ListType; import graphql.language.NonNullType; import graphql.language.ObjectTypeDefinition; import graphql.language.OperationTypeDefinition; import graphql.language.ScalarTypeDefinition; import graphql.language.SchemaDefinition; import graphql.language.Type; import graphql.language.TypeName; import graphql.language.UnionTypeDefinition; import graphql.language.Value; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class STModel { private static Map<String, String> BUILTINS = new HashMap<String, String>(){{ put("Int", null); put("Long", null); put("Float", null); put("String", null); put("Boolean", null); put("ID", null); put("BigInteger", "java.math.BigInteger"); put("BigDecimal", "java.math.BigDecimal"); put("Byte", null); put("Short", null); put("Char", null); }}; private static Map<String, String> RENAME = new HashMap<String, String>(){{ put("Int", "Integer"); put("ID", "String"); put("Char", "Character"); put("Float", "Double"); }}; public static class Builder { private TypeEntry typeEntry; private Map<String, TypeEntry> referenceTypes; public Builder withTypeEntry(TypeEntry typeEntry) { this.typeEntry = typeEntry; return this; } public Builder withReferenceTypes(Map<String, TypeEntry> referenceTypes) { this.referenceTypes = referenceTypes; return this; } public STModel build() { return new STModel(this); } } public static class DataResolver { public String fieldName; public String fieldType; public int listDepth; } public static class Interface { public String type; } public static class Arg { public String name; public String type; public String graphQLType; public String defaultValue; public Arg(String name, String type) { this.name = name; this.type = type; } public String getUcname() { return ucFirst(name); } } // Field of Interface, Object, InputObject, UnionType (no names), Enum (no types) public static class Field { public String name; public String type; public DataResolver dataResolver; public String graphQLType; public List<Arg> args; public String defaultValue; public Field(String name, String type) { this.name = name; this.type = type; } public String getUcname() { return ucFirst(name); } } private TypeEntry typeEntry; private Map<String, TypeEntry> referenceTypes; private List<Field> fields; public List<Interface> interfaces; private List<String> imports; private Field idField; private boolean gotIdField = false; private STModel(Builder builder) { this.typeEntry = builder.typeEntry; this.referenceTypes = builder.referenceTypes; } public void validate() { // TODO: Validate that any Object "implements" actually implements // the interface so we can error before compile time... // these throw if there are any inconsistencies... getFields(); getImports(); getInterfaces(); } public boolean isObjectType() { return typeEntry.getDefinition() instanceof ObjectTypeDefinition; } public boolean isInterfaceType() { return typeEntry.getDefinition() instanceof InterfaceTypeDefinition; } public boolean isEnumType() { return typeEntry.getDefinition() instanceof EnumTypeDefinition; } public boolean isScalarType() { return typeEntry.getDefinition() instanceof ScalarTypeDefinition; } public boolean isUnionType() { return typeEntry.getDefinition() instanceof UnionTypeDefinition; } public boolean isInputObjectType() { return typeEntry.getDefinition() instanceof InputObjectTypeDefinition; } public boolean isSchemaType() { return typeEntry.getDefinition() instanceof SchemaDefinition; } public String getPackageName() { return typeEntry.getPackageName(); } public String getName() { return typeEntry.getName(); } public String getUcname() { return ucFirst(getName()); } private static String ucFirst(String name) { if ( null == name || name.length() < 1 ) return name; return name.substring(0, 1).toUpperCase() + name.substring(1); } private static String lcFirst(String name) { if ( null == name || name.length() < 1 ) return name; return name.substring(0, 1).toLowerCase() + name.substring(1); } public synchronized Field getIdField() { if ( ! gotIdField ) { for ( Field field : getFields() ) { if ( "id".equals(field.name) ) { idField = field; break; } } gotIdField = true; } return idField; } public List<Interface> getInterfaces() { interfaces = new ArrayList<>(); if (!isObjectType()) { return interfaces; } ObjectTypeDefinition objectTypeDefinition = (ObjectTypeDefinition) typeEntry.getDefinition(); List<Type> interfaceTypes = objectTypeDefinition.getImplements(); for (Type anInterfaceType : interfaceTypes) { Interface anInterface = new Interface(); anInterface.type = toJavaTypeName(anInterfaceType); interfaces.add(anInterface); } return interfaces; } public List<DataResolver> getDataResolvers() { Map<String, DataResolver> resolvers = new LinkedHashMap<>(); for ( Field field : getFields() ) { DataResolver resolver = field.dataResolver; if ( null == resolver ) continue; resolvers.put(resolver.fieldType, resolver); } return new ArrayList<>(resolvers.values()); } public synchronized List<String> getImports() { if ( null == imports ) { Definition def = typeEntry.getDefinition(); Set<String> names = new TreeSet<String>(); if ( isObjectType() ) { addImports(names, (ObjectTypeDefinition)def); } else if ( isInterfaceType() ) { addImports(names, (InterfaceTypeDefinition)def); } else if ( isInputObjectType() ) { addImports(names, (InputObjectTypeDefinition)def); } else if ( isUnionType() ) { addImports(names, (UnionTypeDefinition)def); } else if ( isEnumType() ) { addImports(names, (EnumTypeDefinition)def); } else if ( isSchemaType() ) { addImports(names, (SchemaDefinition)def); } imports = new ArrayList<>(names); } return imports; } public synchronized List<Field> getFields() { if ( null == fields ) { Definition def = typeEntry.getDefinition(); if ( isObjectType() ) { fields = getFields((ObjectTypeDefinition)def); } else if ( isInterfaceType() ) { fields = getFields((InterfaceTypeDefinition)def); } else if ( isInputObjectType() ) { fields = getFields((InputObjectTypeDefinition)def); } else if ( isUnionType() ) { fields = getFields((UnionTypeDefinition)def); } else if ( isEnumType() ) { fields = getFields((EnumTypeDefinition)def); } else if ( isSchemaType() ) { fields = getFields((SchemaDefinition)def); } else { fields = Collections.emptyList(); } } return fields; } private List<Field> getFields(ObjectTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.graphQLType = toGraphQLType(fieldDef.getType()); field.dataResolver = toDataResolver(fieldDef.getType()); field.args = toArgs(fieldDef.getInputValueDefinitions()); fields.add(field); } return fields; } private List<Field> getFields(InterfaceTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.args = toArgs(fieldDef.getInputValueDefinitions()); fields.add(field); } return fields; } private List<Field> getFields(InputObjectTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( InputValueDefinition fieldDef : def.getInputValueDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.graphQLType = toGraphQLType(fieldDef.getType()); field.defaultValue = toJavaValue(fieldDef.getDefaultValue()); fields.add(field); } return fields; } private List<Field> getFields(UnionTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( Type type : def.getMemberTypes() ) { fields.add(new Field(null, toJavaTypeName(type))); } return fields; } private List<Field> getFields(EnumTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( EnumValueDefinition fieldDef : def.getEnumValueDefinitions() ) { fields.add(new Field(fieldDef.getName(), null)); } return fields; } private List<Field> getFields(SchemaDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( OperationTypeDefinition fieldDef : def.getOperationTypeDefinitions() ) { fields.add(new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType()))); } return fields; } private List<Arg> toArgs(List<InputValueDefinition> defs) { List<Arg> result = new ArrayList<>(); for ( InputValueDefinition def : defs ) { Arg arg = new Arg(def.getName(), toJavaTypeName(def.getType())); arg.graphQLType = toGraphQLType(def.getType()); arg.defaultValue = toJavaValue(def.getDefaultValue()); result.add(arg); } return result; } private String toJavaValue(Value value) { // TODO: Implement me! return null; } private DataResolver toDataResolver(Type type) { if ( type instanceof ListType ) { DataResolver resolver = toDataResolver(((ListType)type).getType()); if ( null == resolver ) return null; resolver.listDepth++; return resolver; } else if ( type instanceof NonNullType ) { return toDataResolver(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String typeName = ((TypeName)type).getName(); if ( BUILTINS.containsKey(typeName) ) return null; TypeEntry typeEntry = referenceTypes.get(typeName); if ( !typeEntry.hasIdField() ) return null; DataResolver resolver = new DataResolver(); resolver.fieldType = typeName + ".Resolver"; resolver.fieldName = "_" + lcFirst(typeName) + "Resolver"; return resolver; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private String toGraphQLType(Type type) { if ( type instanceof ListType ) { return "new GraphQLList(" + toGraphQLType(((ListType)type).getType()) + ")"; } else if ( type instanceof NonNullType ) { return toGraphQLType(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); if ( BUILTINS.containsKey(name) ) { return "Scalars.GraphQL" + name; } return "new GraphQLTypeReference(\""+name+"\")"; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private String toJavaTypeName(Type type) { if ( type instanceof ListType ) { return "List<" + toJavaTypeName(((ListType)type).getType()) + ">"; } else if ( type instanceof NonNullType ) { return toJavaTypeName(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); String rename = RENAME.get(name); // TODO: scalar type directive to get implementation class... if ( null != rename ) return rename; return name; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private void addImports(Collection<String> imports, ObjectTypeDefinition def) { for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, InterfaceTypeDefinition def) { for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, InputObjectTypeDefinition def) { for ( InputValueDefinition fieldDef : def.getInputValueDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, UnionTypeDefinition def) { for ( Type type : def.getMemberTypes() ) { addImports(imports, type); } } private void addImports(Collection<String> imports, EnumTypeDefinition def) { // No imports should be necessary... } private void addImports(Collection<String> imports, SchemaDefinition def) { for ( OperationTypeDefinition fieldDef : def.getOperationTypeDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, Type type) { if ( type instanceof ListType ) { imports.add("java.util.List"); addImports(imports, ((ListType)type).getType()); } else if ( type instanceof NonNullType ) { addImports(imports, ((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); if ( BUILTINS.containsKey(name) ) { String importName = BUILTINS.get(name); if ( null == importName ) return; imports.add(importName); } else { TypeEntry refEntry = referenceTypes.get(name); // TODO: scalar name may be different... should read annotations for scalars. if ( null == refEntry ) { throw new RuntimeException("Unknown type '"+name+"' was not defined in the schema"); } else { imports.add(refEntry.getPackageName() + "." + name); } } } else { throw new RuntimeException("Unknown Type="+type.getClass().getName()); } } }
apigen/src/main/java/com/distelli/graphql/apigen/STModel.java
package com.distelli.graphql.apigen; import graphql.language.Definition; import graphql.language.EnumTypeDefinition; import graphql.language.EnumValueDefinition; import graphql.language.FieldDefinition; import graphql.language.InputObjectTypeDefinition; import graphql.language.InputValueDefinition; import graphql.language.InterfaceTypeDefinition; import graphql.language.ListType; import graphql.language.NonNullType; import graphql.language.ObjectTypeDefinition; import graphql.language.OperationTypeDefinition; import graphql.language.ScalarTypeDefinition; import graphql.language.SchemaDefinition; import graphql.language.Type; import graphql.language.TypeName; import graphql.language.UnionTypeDefinition; import graphql.language.Value; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class STModel { private static Map<String, String> BUILTINS = new HashMap<String, String>(){{ put("Int", null); put("Long", null); put("Float", null); put("String", null); put("Boolean", null); put("ID", null); put("BigInteger", "java.math.BigInteger"); put("BigDecimal", "java.math.BigDecimal"); put("Byte", null); put("Short", null); put("Char", null); }}; private static Map<String, String> RENAME = new HashMap<String, String>(){{ put("Int", "Integer"); put("ID", "String"); put("Char", "Character"); }}; public static class Builder { private TypeEntry typeEntry; private Map<String, TypeEntry> referenceTypes; public Builder withTypeEntry(TypeEntry typeEntry) { this.typeEntry = typeEntry; return this; } public Builder withReferenceTypes(Map<String, TypeEntry> referenceTypes) { this.referenceTypes = referenceTypes; return this; } public STModel build() { return new STModel(this); } } public static class DataResolver { public String fieldName; public String fieldType; public int listDepth; } public static class Interface { public String type; } public static class Arg { public String name; public String type; public String graphQLType; public String defaultValue; public Arg(String name, String type) { this.name = name; this.type = type; } public String getUcname() { return ucFirst(name); } } // Field of Interface, Object, InputObject, UnionType (no names), Enum (no types) public static class Field { public String name; public String type; public DataResolver dataResolver; public String graphQLType; public List<Arg> args; public String defaultValue; public Field(String name, String type) { this.name = name; this.type = type; } public String getUcname() { return ucFirst(name); } } private TypeEntry typeEntry; private Map<String, TypeEntry> referenceTypes; private List<Field> fields; public List<Interface> interfaces; private List<String> imports; private Field idField; private boolean gotIdField = false; private STModel(Builder builder) { this.typeEntry = builder.typeEntry; this.referenceTypes = builder.referenceTypes; } public void validate() { // TODO: Validate that any Object "implements" actually implements // the interface so we can error before compile time... // these throw if there are any inconsistencies... getFields(); getImports(); getInterfaces(); } public boolean isObjectType() { return typeEntry.getDefinition() instanceof ObjectTypeDefinition; } public boolean isInterfaceType() { return typeEntry.getDefinition() instanceof InterfaceTypeDefinition; } public boolean isEnumType() { return typeEntry.getDefinition() instanceof EnumTypeDefinition; } public boolean isScalarType() { return typeEntry.getDefinition() instanceof ScalarTypeDefinition; } public boolean isUnionType() { return typeEntry.getDefinition() instanceof UnionTypeDefinition; } public boolean isInputObjectType() { return typeEntry.getDefinition() instanceof InputObjectTypeDefinition; } public boolean isSchemaType() { return typeEntry.getDefinition() instanceof SchemaDefinition; } public String getPackageName() { return typeEntry.getPackageName(); } public String getName() { return typeEntry.getName(); } public String getUcname() { return ucFirst(getName()); } private static String ucFirst(String name) { if ( null == name || name.length() < 1 ) return name; return name.substring(0, 1).toUpperCase() + name.substring(1); } private static String lcFirst(String name) { if ( null == name || name.length() < 1 ) return name; return name.substring(0, 1).toLowerCase() + name.substring(1); } public synchronized Field getIdField() { if ( ! gotIdField ) { for ( Field field : getFields() ) { if ( "id".equals(field.name) ) { idField = field; break; } } gotIdField = true; } return idField; } public List<Interface> getInterfaces() { interfaces = new ArrayList<>(); if (!isObjectType()) { return interfaces; } ObjectTypeDefinition objectTypeDefinition = (ObjectTypeDefinition) typeEntry.getDefinition(); List<Type> interfaceTypes = objectTypeDefinition.getImplements(); for (Type anInterfaceType : interfaceTypes) { Interface anInterface = new Interface(); anInterface.type = toJavaTypeName(anInterfaceType); interfaces.add(anInterface); } return interfaces; } public List<DataResolver> getDataResolvers() { Map<String, DataResolver> resolvers = new LinkedHashMap<>(); for ( Field field : getFields() ) { DataResolver resolver = field.dataResolver; if ( null == resolver ) continue; resolvers.put(resolver.fieldType, resolver); } return new ArrayList<>(resolvers.values()); } public synchronized List<String> getImports() { if ( null == imports ) { Definition def = typeEntry.getDefinition(); Set<String> names = new TreeSet<String>(); if ( isObjectType() ) { addImports(names, (ObjectTypeDefinition)def); } else if ( isInterfaceType() ) { addImports(names, (InterfaceTypeDefinition)def); } else if ( isInputObjectType() ) { addImports(names, (InputObjectTypeDefinition)def); } else if ( isUnionType() ) { addImports(names, (UnionTypeDefinition)def); } else if ( isEnumType() ) { addImports(names, (EnumTypeDefinition)def); } else if ( isSchemaType() ) { addImports(names, (SchemaDefinition)def); } imports = new ArrayList<>(names); } return imports; } public synchronized List<Field> getFields() { if ( null == fields ) { Definition def = typeEntry.getDefinition(); if ( isObjectType() ) { fields = getFields((ObjectTypeDefinition)def); } else if ( isInterfaceType() ) { fields = getFields((InterfaceTypeDefinition)def); } else if ( isInputObjectType() ) { fields = getFields((InputObjectTypeDefinition)def); } else if ( isUnionType() ) { fields = getFields((UnionTypeDefinition)def); } else if ( isEnumType() ) { fields = getFields((EnumTypeDefinition)def); } else if ( isSchemaType() ) { fields = getFields((SchemaDefinition)def); } else { fields = Collections.emptyList(); } } return fields; } private List<Field> getFields(ObjectTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.graphQLType = toGraphQLType(fieldDef.getType()); field.dataResolver = toDataResolver(fieldDef.getType()); field.args = toArgs(fieldDef.getInputValueDefinitions()); fields.add(field); } return fields; } private List<Field> getFields(InterfaceTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.args = toArgs(fieldDef.getInputValueDefinitions()); fields.add(field); } return fields; } private List<Field> getFields(InputObjectTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( InputValueDefinition fieldDef : def.getInputValueDefinitions() ) { Field field = new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType())); field.graphQLType = toGraphQLType(fieldDef.getType()); field.defaultValue = toJavaValue(fieldDef.getDefaultValue()); fields.add(field); } return fields; } private List<Field> getFields(UnionTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( Type type : def.getMemberTypes() ) { fields.add(new Field(null, toJavaTypeName(type))); } return fields; } private List<Field> getFields(EnumTypeDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( EnumValueDefinition fieldDef : def.getEnumValueDefinitions() ) { fields.add(new Field(fieldDef.getName(), null)); } return fields; } private List<Field> getFields(SchemaDefinition def) { List<Field> fields = new ArrayList<Field>(); for ( OperationTypeDefinition fieldDef : def.getOperationTypeDefinitions() ) { fields.add(new Field(fieldDef.getName(), toJavaTypeName(fieldDef.getType()))); } return fields; } private List<Arg> toArgs(List<InputValueDefinition> defs) { List<Arg> result = new ArrayList<>(); for ( InputValueDefinition def : defs ) { Arg arg = new Arg(def.getName(), toJavaTypeName(def.getType())); arg.graphQLType = toGraphQLType(def.getType()); arg.defaultValue = toJavaValue(def.getDefaultValue()); result.add(arg); } return result; } private String toJavaValue(Value value) { // TODO: Implement me! return null; } private DataResolver toDataResolver(Type type) { if ( type instanceof ListType ) { DataResolver resolver = toDataResolver(((ListType)type).getType()); if ( null == resolver ) return null; resolver.listDepth++; return resolver; } else if ( type instanceof NonNullType ) { return toDataResolver(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String typeName = ((TypeName)type).getName(); if ( BUILTINS.containsKey(typeName) ) return null; TypeEntry typeEntry = referenceTypes.get(typeName); if ( !typeEntry.hasIdField() ) return null; DataResolver resolver = new DataResolver(); resolver.fieldType = typeName + ".Resolver"; resolver.fieldName = "_" + lcFirst(typeName) + "Resolver"; return resolver; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private String toGraphQLType(Type type) { if ( type instanceof ListType ) { return "new GraphQLList(" + toGraphQLType(((ListType)type).getType()) + ")"; } else if ( type instanceof NonNullType ) { return toGraphQLType(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); if ( BUILTINS.containsKey(name) ) { return "Scalars.GraphQL" + name; } return "new GraphQLTypeReference(\""+name+"\")"; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private String toJavaTypeName(Type type) { if ( type instanceof ListType ) { return "List<" + toJavaTypeName(((ListType)type).getType()) + ">"; } else if ( type instanceof NonNullType ) { return toJavaTypeName(((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); String rename = RENAME.get(name); // TODO: scalar type directive to get implementation class... if ( null != rename ) return rename; return name; } else { throw new UnsupportedOperationException("Unknown Type="+type.getClass().getName()); } } private void addImports(Collection<String> imports, ObjectTypeDefinition def) { for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, InterfaceTypeDefinition def) { for ( FieldDefinition fieldDef : def.getFieldDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, InputObjectTypeDefinition def) { for ( InputValueDefinition fieldDef : def.getInputValueDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, UnionTypeDefinition def) { for ( Type type : def.getMemberTypes() ) { addImports(imports, type); } } private void addImports(Collection<String> imports, EnumTypeDefinition def) { // No imports should be necessary... } private void addImports(Collection<String> imports, SchemaDefinition def) { for ( OperationTypeDefinition fieldDef : def.getOperationTypeDefinitions() ) { addImports(imports, fieldDef.getType()); } } private void addImports(Collection<String> imports, Type type) { if ( type instanceof ListType ) { imports.add("java.util.List"); addImports(imports, ((ListType)type).getType()); } else if ( type instanceof NonNullType ) { addImports(imports, ((NonNullType)type).getType()); } else if ( type instanceof TypeName ) { String name = ((TypeName)type).getName(); if ( BUILTINS.containsKey(name) ) { String importName = BUILTINS.get(name); if ( null == importName ) return; imports.add(importName); } else { TypeEntry refEntry = referenceTypes.get(name); // TODO: scalar name may be different... should read annotations for scalars. if ( null == refEntry ) { throw new RuntimeException("Unknown type '"+name+"' was not defined in the schema"); } else { imports.add(refEntry.getPackageName() + "." + name); } } } else { throw new RuntimeException("Unknown Type="+type.getClass().getName()); } } }
Mapping GraphQL Float data type to Double in Java According to the specification of graphql-java Float type should mapped as Double. https://github.com/graphql-java/graphql-java/blob/master/src/main/java/graphql/Scalars.java#L122 The problem actually is that when a getXXX method defined as Float in the GraphQL IDL we get cast exception as the graphql parser stores it as Double.
apigen/src/main/java/com/distelli/graphql/apigen/STModel.java
Mapping GraphQL Float data type to Double in Java
Java
apache-2.0
b1cab871f6b5730875be61babddb12128041e9a8
0
cwenao/DSAA
/** * Company * Copyright (C) 2014-2017 All Rights Reserved. */ package com.cwenao.datastructure; import com.cwenao.util.DrawGraphForSearch; import com.cwenao.util.Vertexes; import java.util.ArrayDeque; import java.util.Queue; /** * @author cwenao * @version $Id BreadthFirstSearchPath.java, v 0.1 2017-07-07 8:17 cwenao Exp $$ */ public class BreadthFirstSearchPath { private static final int maxVertexes = 9; private static Vertexes[] vertexes; private static int[][] adjacent; public static void entrySearar (Integer start) { Queue queue = new ArrayDeque(); queue.add(start); while (!queue.isEmpty()) { int x = (int) queue.poll(); vertexes[x].setVisited(true); for(int i =0;i<maxVertexes;i++) { if (adjacent[x][i] == 1 && vertexes[i].getVisited() == false) { printVertexe(i); vertexes[i].setVisited(true); queue.offer(i); } } } } private static void printVertexe(int i) { System.out.println(vertexes[i].getVertex() + " "); } public static void printAdjacent(int[][] adjacent) { for(int[] line:adjacent) { for(int i:line) { System.out.print(i + " "); } System.out.println(); } } public static void main(String[] args) { DrawGraphForSearch drawGraphForSearch = new DrawGraphForSearch(maxVertexes); char[] ver = {'A','B','C','D','E','F','G','H','I'}; vertexes = drawGraphForSearch.getVertexes(); for(int i=0;i<vertexes.length;i++) { Vertexes vertexesX = new Vertexes(ver[i]); vertexes[i] = vertexesX; } drawGraphForSearch.addEdge(0,1); drawGraphForSearch.addEdge(0,2); drawGraphForSearch.addEdge(1,2); drawGraphForSearch.addEdge(2,3); drawGraphForSearch.addEdge(2,5); drawGraphForSearch.addEdge(3,5); drawGraphForSearch.addEdge(5,8); drawGraphForSearch.setVertexes(vertexes); adjacent = drawGraphForSearch.getAdjacent(); printAdjacent(adjacent); entrySearar(0); } }
data-structure/src/main/java/com/cwenao/datastructure/BreadthFirstSearchPath.java
/** * Company * Copyright (C) 2014-2017 All Rights Reserved. */ package com.cwenao.datastructure; import com.cwenao.util.DrawGraphForSearch; import com.cwenao.util.Vertexes; import java.util.ArrayDeque; import java.util.Queue; /** * @author cwenao * @version $Id BreadthFirstSearchPath.java, v 0.1 2017-07-07 8:17 cwenao Exp $$ */ public class BreadthFirstSearchPath { private static final int maxVertexes = 5; private static Vertexes[] vertexes; private static int[][] adjacent; public static void entrySearar (Integer start) { Queue queue = new ArrayDeque(); queue.add(start); while (!queue.isEmpty()) { int x = (int) queue.poll(); printVertexe(x); for(int i =1;i<maxVertexes;i++) { if (adjacent[x][i] != 1 && vertexes[x].getVisited() == false) { vertexes[x].setVisited(true); queue.offer(i); } } } } private static void printVertexe(int i) { System.out.println(vertexes[i].getVertex() + " "); } public static void main(String[] args) { DrawGraphForSearch drawGraphForSearch = new DrawGraphForSearch(maxVertexes); char[] ver = {'A','B','C','D','E'}; //,'D','E','F','G','H','I' vertexes = drawGraphForSearch.getVertexes(); for(int i=0;i<vertexes.length;i++) { Vertexes vertexesX = new Vertexes(ver[i]); vertexes[i] = vertexesX; } drawGraphForSearch.addEdge(0,1); drawGraphForSearch.addEdge(0,2); drawGraphForSearch.addEdge(2,3); drawGraphForSearch.addEdge(3,5); drawGraphForSearch.addEdge(5,8); drawGraphForSearch.setVertexes(vertexes); adjacent = drawGraphForSearch.getAdjacent(); entrySearar(0); } }
update the breadthfirst
data-structure/src/main/java/com/cwenao/datastructure/BreadthFirstSearchPath.java
update the breadthfirst
Java
apache-2.0
7daa7f7430786a88c857d09e8d4caf7710b6e32a
0
noemus/kotlin-eclipse,noemus/kotlin-eclipse
package org.jetbrains.kotlin.parser; import java.io.File; import org.eclipse.core.resources.IFile; import org.jetbrains.jet.CompilerModeProvider; import org.jetbrains.jet.OperationModeProvider; import org.jetbrains.jet.lang.parsing.JetParser; import org.jetbrains.jet.lang.parsing.JetParserDefinition; import org.jetbrains.jet.lang.psi.JetFile; import org.jetbrains.jet.plugin.JetFileType; import org.jetbrains.jet.plugin.JetLanguage; import com.intellij.core.JavaCoreApplicationEnvironment; import com.intellij.core.JavaCoreProjectEnvironment; import com.intellij.lang.ASTNode; import com.intellij.lang.PsiBuilder; import com.intellij.lang.PsiBuilderFactory; import com.intellij.openapi.Disposable; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; public class KotlinParser { private final static Disposable DISPOSABLE = new Disposable() { @Override public void dispose() { } }; private final File file; private final JavaCoreApplicationEnvironment applicationEnvironment; private final Project project; private ASTNode tree; public KotlinParser(File file) { this.file = file; this.tree = null; applicationEnvironment = new JavaCoreApplicationEnvironment(DISPOSABLE); applicationEnvironment.registerFileType(JetFileType.INSTANCE, "kt"); applicationEnvironment.registerFileType(JetFileType.INSTANCE, "jet"); applicationEnvironment.registerParserDefinition(new JetParserDefinition()); applicationEnvironment.getApplication().registerService(OperationModeProvider.class, new CompilerModeProvider()); JavaCoreProjectEnvironment projectEnvironment = new JavaCoreProjectEnvironment(DISPOSABLE, applicationEnvironment); project = projectEnvironment.getProject(); } public KotlinParser(IFile iFile) { this(new File(iFile.getRawLocation().toOSString())); } public static ASTNode parse(IFile iFile) { return new KotlinParser(iFile).parse(); } public ASTNode parse() { JetParser jetParser = new JetParser(project); tree = jetParser.parse(null, createPsiBuilder(getNode(file)), getPsiFile(file)); return tree; } public ASTNode getTree() { return tree; } private PsiBuilder createPsiBuilder(ASTNode chameleon) { return PsiBuilderFactory.getInstance().createBuilder(project, chameleon, null, JetLanguage.INSTANCE, chameleon.getChars()); } private PsiFile getPsiFile(File file) { VirtualFile fileByPath = applicationEnvironment.getLocalFileSystem().findFileByPath(file.getAbsolutePath()); return PsiManager.getInstance(project).findFile(fileByPath); } private ASTNode getNode(File file) { JetFile jetFile = (JetFile) getPsiFile(file); return jetFile.getNode(); } }
kotlin-eclipse-core/src/org/jetbrains/kotlin/parser/KotlinParser.java
package org.jetbrains.kotlin.parser; import java.io.File; import org.eclipse.core.resources.IFile; import org.jetbrains.jet.CompilerModeProvider; import org.jetbrains.jet.OperationModeProvider; import org.jetbrains.jet.lang.parsing.JetParser; import org.jetbrains.jet.lang.parsing.JetParserDefinition; import org.jetbrains.jet.lang.psi.JetFile; import org.jetbrains.jet.plugin.JetFileType; import org.jetbrains.jet.plugin.JetLanguage; import com.intellij.core.JavaCoreApplicationEnvironment; import com.intellij.core.JavaCoreProjectEnvironment; import com.intellij.lang.ASTNode; import com.intellij.lang.PsiBuilder; import com.intellij.lang.PsiBuilderFactory; import com.intellij.openapi.Disposable; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; public class KotlinParser { private final static Disposable DISPOSABLE = new Disposable() { @Override public void dispose() { } }; private final File file; private final static JavaCoreApplicationEnvironment applicationEnvironment; private final static Project project; private ASTNode tree; static { applicationEnvironment = new JavaCoreApplicationEnvironment(DISPOSABLE); applicationEnvironment.registerFileType(JetFileType.INSTANCE, "kt"); applicationEnvironment.registerFileType(JetFileType.INSTANCE, "jet"); applicationEnvironment.registerParserDefinition(new JetParserDefinition()); applicationEnvironment.getApplication().registerService(OperationModeProvider.class, new CompilerModeProvider()); JavaCoreProjectEnvironment projectEnvironment = new JavaCoreProjectEnvironment(DISPOSABLE, applicationEnvironment); project = projectEnvironment.getProject(); } public KotlinParser(File file) { this.file = file; this.tree = null; } public KotlinParser(IFile iFile) { this(new File(iFile.getRawLocation().toOSString())); } public static ASTNode parse(IFile iFile) { return new KotlinParser(iFile).parse(); } public ASTNode parse() { JetParser jetParser = new JetParser(project); tree = jetParser.parse(null, createPsiBuilder(getNode(file)), getPsiFile(file)); return tree; } public ASTNode getTree() { return tree; } private PsiBuilder createPsiBuilder(ASTNode chameleon) { return PsiBuilderFactory.getInstance().createBuilder(project, chameleon, null, JetLanguage.INSTANCE, chameleon.getChars()); } private PsiFile getPsiFile(File file) { VirtualFile fileByPath = applicationEnvironment.getLocalFileSystem().findFileByPath(file.getAbsolutePath()); return PsiManager.getInstance(project).findFile(fileByPath); } private ASTNode getNode(File file) { JetFile jetFile = (JetFile) getPsiFile(file); return jetFile.getNode(); } }
Refresh application environment every time when we want parse file
kotlin-eclipse-core/src/org/jetbrains/kotlin/parser/KotlinParser.java
Refresh application environment every time when we want parse file
Java
apache-2.0
85fc3fecec7386db5c0c7793c64c4773e6990019
0
sirkkalap/DependencyCheck,sirkkalap/DependencyCheck,sirkkalap/DependencyCheck,sirkkalap/DependencyCheck,sirkkalap/DependencyCheck
/** * Contains classes for working with the CPE Lucene Index. */ package org.owasp.dependencycheck.data.cpe;
dependency-check-core/src/main/java/org/owasp/dependencycheck/data/cpe/package-info.java
/** * <html> * <head> * <title>org.owasp.dependencycheck.data.cpe</title> * </head> * <body> * Contains classes for working with the CPE Lucene Index. * </body> * </html> */ package org.owasp.dependencycheck.data.cpe;
updated package-info
dependency-check-core/src/main/java/org/owasp/dependencycheck/data/cpe/package-info.java
updated package-info
Java
apache-2.0
71484947b0bfcb9dda6e4fb2df9305975cfd6820
0
ConSol/sakuli,ConSol/sakuli,ConSol/sakuli,ConSol/sakuli,ConSol/sakuli,ConSol/sakuli
/* * Sakuli - Testing and Monitoring-Tool for Websites and common UIs. * * Copyright 2013 - 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakuli.services.cipher; import org.apache.commons.codec.binary.Base64; import org.sakuli.exceptions.SakuliCipherException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.nio.charset.StandardCharsets; import java.security.InvalidParameterException; /** * Abstract class for the supported ciphers modules * * @author tschneck * Date: 6/28/17 */ public abstract class AbstractCipher implements CipherService { private static String IV_KEY = "IVcon17SakSoENVS"; private static String ALGORITHM = "AES/CBC/PKCS5Padding"; /** * Converts a String input to a byte array */ static byte[] convertStringToBytes(String s) { if (s == null) { throw new InvalidParameterException("can't convert null String to byte array"); } return s.getBytes(StandardCharsets.UTF_8); } /** * Encrypts the secret into a encrypted {@link String}, based on the MAC address of the first network interface of a machine. * Therewith it should be secured, that an encrypted secret is only valid on one physical machine. * * @param strToEncrypt the secret * @return a encrypted String, which is coupled to one physical machine * @throws SakuliCipherException if the encryption fails. */ public String encrypt(String strToEncrypt) throws SakuliCipherException { try { Cipher cipher = Cipher.getInstance(ALGORITHM); cipher.init(Cipher.ENCRYPT_MODE, getKey(), getIV()); return Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes())); } catch (Exception e) { throw new SakuliCipherException(e, getPreLogOutput()); } } abstract String getPreLogOutput(); /** * Decrypts a String to the secret. The decryption must be take place on the same physical machine like the encryption, see {@link #encrypt(String)}. * * @param strToDecrypt String to encrypt * @return the decrypted secret * @throws SakuliCipherException if the decryption fails. */ public String decrypt(String strToDecrypt) throws SakuliCipherException { try { Cipher cipher = Cipher.getInstance(ALGORITHM); cipher.init(Cipher.DECRYPT_MODE, getKey(), getIV()); return new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt))); } catch (IllegalBlockSizeException e) { throw new SakuliCipherException("Maybe this secret hasn't been encrypted correctly! Maybe encrypt it again!", getPreLogOutput(), e); } catch (Exception e) { throw new SakuliCipherException(e, getPreLogOutput()); } } /** * build the initialization vector * * @return byte array wrapped {@link IvParameterSpec} */ protected IvParameterSpec getIV() { return new IvParameterSpec(AbstractCipher.convertStringToBytes(IV_KEY)); } /** * @return the expected master key for the encryption */ protected abstract SecretKeySpec getKey(); }
src/core/src/main/java/org/sakuli/services/cipher/AbstractCipher.java
/* * Sakuli - Testing and Monitoring-Tool for Websites and common UIs. * * Copyright 2013 - 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakuli.services.cipher; import org.apache.commons.codec.binary.Base64; import org.sakuli.exceptions.SakuliCipherException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.nio.charset.Charset; import java.security.InvalidParameterException; /** * Abstract class for the supported ciphers modules * * @author tschneck * Date: 6/28/17 */ public abstract class AbstractCipher implements CipherService { private static String IV_KEY = "IVcon17SakSoENVS"; private static String ALGORITHM = "AES/CBC/PKCS5Padding"; /** * Converts a String input to a byte array */ static byte[] convertStringToBytes(String s) { if (s == null) { throw new InvalidParameterException("can't convert null String to byte array"); } return s.getBytes(Charset.defaultCharset()); } /** * Encrypts the secret into a encrypted {@link String}, based on the MAC address of the first network interface of a machine. * Therewith it should be secured, that an encrypted secret is only valid on one physical machine. * * @param strToEncrypt the secret * @return a encrypted String, which is coupled to one physical machine * @throws SakuliCipherException if the encryption fails. */ public String encrypt(String strToEncrypt) throws SakuliCipherException { try { Cipher cipher = Cipher.getInstance(ALGORITHM); cipher.init(Cipher.ENCRYPT_MODE, getKey(), getIV()); return Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes())); } catch (Exception e) { throw new SakuliCipherException(e, getPreLogOutput()); } } abstract String getPreLogOutput(); /** * Decrypts a String to the secret. The decryption must be take place on the same physical machine like the encryption, see {@link #encrypt(String)}. * * @param strToDecrypt String to encrypt * @return the decrypted secret * @throws SakuliCipherException if the decryption fails. */ public String decrypt(String strToDecrypt) throws SakuliCipherException { try { Cipher cipher = Cipher.getInstance(ALGORITHM); cipher.init(Cipher.DECRYPT_MODE, getKey(), getIV()); return new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt))); } catch (IllegalBlockSizeException e) { throw new SakuliCipherException("Maybe this secret hasn't been encrypted correctly! Maybe encrypt it again!", getPreLogOutput(), e); } catch (Exception e) { throw new SakuliCipherException(e, getPreLogOutput()); } } /** * build the initialization vector * * @return byte array wrapped {@link IvParameterSpec} */ protected IvParameterSpec getIV() { return new IvParameterSpec(AbstractCipher.convertStringToBytes(IV_KEY)); } /** * @return the expected master key for the encryption */ protected abstract SecretKeySpec getKey(); }
#197 use UTF-8 for conversion between string -> byte array
src/core/src/main/java/org/sakuli/services/cipher/AbstractCipher.java
#197 use UTF-8 for conversion between string -> byte array
Java
apache-2.0
4784b2d405b68ed203661f7fee9a500962c21a8d
0
apache/reef,gyeongin/reef,dkm2110/Microsoft-cisl,apache/incubator-reef,zerg-junior/incubator-reef,shulmanb/reef,nachocano/incubator-reef,apache/reef,afchung/reef,apache/incubator-reef,yunseong/reef,yunseong/reef,dougmsft/reef,yunseong/reef,yunseong/reef,jwang98052/incubator-reef,singlis/reef,beysims/reef,afchung/reef,dongjoon-hyun/incubator-reef,dongjoon-hyun/reef,dongjoon-hyun/reef,nachocano/incubator-reef,taegeonum/incubator-reef,zerg-junior/incubator-reef,dkm2110/veyor,swlsw/incubator-reef,afchung/reef,dongjoon-hyun/incubator-reef,tcNickolas/incubator-reef,shravanmn/reef,apache/incubator-reef,jsjason/incubator-reef,tcNickolas/reef,dkm2110/Microsoft-cisl,DifferentSC/incubator-reef,tcNickolas/reef,yingdachen/incubator-reef,dkm2110/Microsoft-cisl,shravanmn/reef,jsjason/incubator-reef,shravanmn/reef,markusweimer/incubator-reef,singlis/reef,yingdachen/incubator-reef,shulmanb/reef,gyeongin/reef,shravanmn/reef,markusweimer/incubator-reef,singlis/reef,dongjoon-hyun/reef,nachocano/incubator-reef,tcNickolas/incubator-reef,markusweimer/reef,beysims/reef,markusweimer/incubator-reef,dougmsft/reef,jwang98052/reef,dkm2110/Microsoft-cisl,nachocano/incubator-reef,DifferentSC/incubator-reef,afchung/reef,jwang98052/incubator-reef,beysims/reef,dafrista/incubator-reef,yunseong/incubator-reef,markusweimer/incubator-reef,afchung/incubator-reef,zerg-junior/incubator-reef,dkm2110/veyor,singlis/reef,beysims/reef,shravanmn/reef,dkm2110/Microsoft-cisl,afchung/reef,anupam128/reef,yunseong/reef,markusweimer/incubator-reef,dougmsft/reef,jwang98052/incubator-reef,tcNickolas/reef,yunseong/incubator-reef,afchung/incubator-reef,markusweimer/reef,yunseong/incubator-reef,yunseong/incubator-reef,tcNickolas/reef,taegeonum/incubator-reef,jsjason/incubator-reef,singlis/reef,dongjoon-hyun/incubator-reef,dkm2110/veyor,dafrista/incubator-reef,apache/reef,tcNickolas/incubator-reef,jwang98052/incubator-reef,markusweimer/incubator-reef,dongjoon-hyun/incubator-reef,DifferentSC/incubator-reef,bgchun/incubator-reef,afchung/reef,apache/reef,bgchun/incubator-reef,taegeonum/incubator-reef,apache/incubator-reef,markusweimer/reef,anupam128/reef,dafrista/incubator-reef,shravanmn/reef,yunseong/reef,DifferentSC/incubator-reef,apache/incubator-reef,jsjason/incubator-reef,markusweimer/reef,DifferentSC/incubator-reef,taegeonum/incubator-reef,anupam128/reef,yingdachen/incubator-reef,nachocano/incubator-reef,motus/reef,dongjoon-hyun/reef,dkm2110/veyor,shulmanb/reef,taegeonum/incubator-reef,shravanmn/reef,apache/reef,dongjoon-hyun/incubator-reef,dkm2110/veyor,swlsw/incubator-reef,yunseong/reef,dongjoon-hyun/incubator-reef,tcNickolas/reef,afchung/reef,dkm2110/Microsoft-cisl,tcNickolas/incubator-reef,motus/reef,jsjason/incubator-reef,tcNickolas/reef,apache/reef,yunseong/incubator-reef,swlsw/incubator-reef,jwang98052/reef,beysims/reef,DifferentSC/incubator-reef,markusweimer/reef,zerg-junior/incubator-reef,jwang98052/incubator-reef,dafrista/incubator-reef,nachocano/incubator-reef,yunseong/incubator-reef,dougmsft/reef,yingdachen/incubator-reef,dougmsft/reef,jsjason/incubator-reef,motus/reef,markusweimer/incubator-reef,tcNickolas/reef,jwang98052/incubator-reef,shulmanb/reef,markusweimer/reef,dongjoon-hyun/reef,yingdachen/incubator-reef,jwang98052/reef,DifferentSC/incubator-reef,jwang98052/reef,dafrista/incubator-reef,jsjason/incubator-reef,afchung/incubator-reef,shulmanb/reef,bgchun/incubator-reef,gyeongin/reef,jwang98052/reef,apache/reef,afchung/incubator-reef,swlsw/incubator-reef,anupam128/reef,afchung/incubator-reef,dougmsft/reef,zerg-junior/incubator-reef,dkm2110/Microsoft-cisl,singlis/reef,apache/incubator-reef,tcNickolas/incubator-reef,taegeonum/incubator-reef,yingdachen/incubator-reef,dkm2110/veyor,zerg-junior/incubator-reef,motus/reef,shulmanb/reef,gyeongin/reef,yingdachen/incubator-reef,tcNickolas/incubator-reef,motus/reef,dongjoon-hyun/reef,dkm2110/veyor,anupam128/reef,dongjoon-hyun/reef,gyeongin/reef,swlsw/incubator-reef,swlsw/incubator-reef,motus/reef,gyeongin/reef,singlis/reef,jwang98052/reef,zerg-junior/incubator-reef,bgchun/incubator-reef,dougmsft/reef,anupam128/reef,markusweimer/reef,afchung/incubator-reef,dafrista/incubator-reef,jwang98052/reef,tcNickolas/incubator-reef,anupam128/reef,shulmanb/reef,jwang98052/incubator-reef,yunseong/incubator-reef,motus/reef,swlsw/incubator-reef,nachocano/incubator-reef,afchung/incubator-reef,apache/incubator-reef,bgchun/incubator-reef,dafrista/incubator-reef,dongjoon-hyun/incubator-reef,gyeongin/reef
/* * Copyright 2013 Microsoft. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.tang.util.walk; import com.microsoft.tang.Configuration; import com.microsoft.tang.types.Node; import com.microsoft.tang.types.ClassNode; import com.microsoft.tang.types.PackageNode; import com.microsoft.tang.types.NamedParameterNode; /** * Build a Graphviz representation of the configuration graph. * @author sergiym */ public final class GraphVisitorGraphviz extends AbstractTypedNodeVisitor implements EdgeVisitor { /** Legend for the configuration graph in Graphviz format */ private final static String LEGEND = " subgraph Legend {\n" + " label=\"Legend\";\n" + " PackageNode [shape=folder];\n" + " ClassNode [shape=box];\n" + " NamedParameterNode [shape=oval];\n" + " legend1l [shape=point, label=\"\"];\n" + " legend1r [shape=point, label=\"\"];\n" + " legend2l [shape=point, label=\"\"];\n" + " legend2r [shape=point, label=\"\"];\n" + " legend1l -> legend1r [style=dashed, dir=back, arrowtail=empty, label=\"implements\"];\n" + " legend2l -> legend2r [style=solid, dir=back, arrowtail=diamond, label=\"contains\"];\n" + " }\n"; /** Accumulate string representation of the graph here. */ private final transient StringBuilder mGraphStr = new StringBuilder( "digraph ConfigMain {\n" + LEGEND + " rankdir=LR;\n"); /** * @return TANG configuration represented as a Graphviz DOT string. */ @Override public String toString() { return this.mGraphStr.toString() + "}\n"; } /** * Process current class configuration node. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final ClassNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getName()) .append("\", shape=box];\n"); for (final Object implNode : aNode.getKnownImplementations()) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" -> \"node_") .append(((ClassNode) implNode).getName()) .append("\" [style=dashed, dir=back, arrowtail=empty];\n"); } return true; } /** * Process current package configuration node. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final PackageNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getFullName()) .append("\", shape=folder];\n"); return true; } /** * Process current configuration node for the named parameter. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final NamedParameterNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getSimpleArgName()) // parameter type, e.g. "Integer" .append("\\n") .append(aNode.getName()) // short name, e.g. "NumberOfThreads" .append(" = ") .append(aNode.getDefaultInstanceAsString()) // default value, e.g. "4" .append("\", shape=oval];\n"); return true; } /** * Process current edge of the configuration graph. * @param aNodeFrom Current configuration node. * @param aNodeTo Destination configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final Node aNodeFrom, final Node aNodeTo) { this.mGraphStr .append(" \"node_") .append(aNodeFrom.getName()) .append("\" -> \"node_") .append(aNodeTo.getName()) .append("\" [style=solid, dir=back, arrowtail=diamond];\n"); return true; } /** * Produce a Graphviz DOT string for a given TANG configuration. * @param config TANG configuration object. * @return configuration graph represented as a string in Graphviz DOT format. */ public static String getGraphvizStr(final Configuration config) { final GraphVisitorGraphviz visitor = new GraphVisitorGraphviz(); Walk.preorder(visitor, visitor, config); return visitor.toString(); } }
tang/src/main/java/com/microsoft/tang/util/walk/GraphVisitorGraphviz.java
/* * Copyright 2013 Microsoft. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.tang.util.walk; import com.microsoft.tang.Configuration; import com.microsoft.tang.types.Node; import com.microsoft.tang.types.ClassNode; import com.microsoft.tang.types.PackageNode; import com.microsoft.tang.types.NamedParameterNode; /** * Build a Graphviz representation of the configuration graph. * @author sergiym */ public final class GraphVisitorGraphviz extends AbstractTypedNodeVisitor implements EdgeVisitor { /** Legend for the configuration graph in Graphviz format */ private final static String LEGEND = " subgraph Legend {\n" + " label=\"Legend\";" + " PackageNode [shape=folder];\n" + " ClassNode [shape=box];\n" + " NamedParameterNode [shape=oval];\n" + " legend1l [shape=point, label=\"\"];\n" + " legend1r [shape=point, label=\"\"];\n" + " legend2l [shape=point, label=\"\"];\n" + " legend2r [shape=point, label=\"\"];\n" + " legend1l -> legend1r [style=dashed, dir=back, arrowtail=empty, label=\"implements\"];\n" + " legend2l -> legend2r [style=solid, dir=back, arrowtail=diamond, label=\"contains\"];\n" + " }\n"; /** Accumulate string representation of the graph here. */ private final transient StringBuilder mGraphStr = new StringBuilder("digraph G {\n" + LEGEND); /** * @return TANG configuration represented as a Graphviz DOT string. */ @Override public String toString() { return this.mGraphStr.toString() + "}\n"; } /** * Process current class configuration node. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final ClassNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getName()) .append("\", shape=box];\n"); for (final Object implNode : aNode.getKnownImplementations()) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" -> \"node_") .append(((ClassNode) implNode).getName()) .append("\" [style=dashed, dir=back, arrowtail=empty];\n"); } return true; } /** * Process current package configuration node. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final PackageNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getFullName()) .append("\", shape=folder];\n"); return true; } /** * Process current configuration node for the named parameter. * @param aNode Current configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final NamedParameterNode aNode) { this.mGraphStr .append(" \"node_") .append(aNode.getName()) .append("\" [label=\"") .append(aNode.getSimpleArgName()) // parameter type, e.g. "Integer" .append("\\n") .append(aNode.getName()) // short name, e.g. "NumberOfThreads" .append(" = ") .append(aNode.getDefaultInstanceAsString()) // default value, e.g. "4" .append("\", shape=oval];\n"); return true; } /** * Process current edge of the configuration graph. * @param aNodeFrom Current configuration node. * @param aNodeTo Destination configuration node. * @return true to proceed with the next node, false to cancel. */ @Override public boolean visit(final Node aNodeFrom, final Node aNodeTo) { this.mGraphStr .append(" \"node_") .append(aNodeFrom.getName()) .append("\" -> \"node_") .append(aNodeTo.getName()) .append("\" [style=solid, dir=back, arrowtail=diamond];\n"); return true; } /** * Produce a Graphviz DOT string for a given TANG configuration. * @param config TANG configuration object. * @return configuration graph represented as a string in Graphviz DOT format. */ public static String getGraphvizStr(final Configuration config) { final GraphVisitorGraphviz visitor = new GraphVisitorGraphviz(); Walk.preorder(visitor, visitor, config); return visitor.toString(); } }
final? version of the pgraphviz layout
tang/src/main/java/com/microsoft/tang/util/walk/GraphVisitorGraphviz.java
final? version of the pgraphviz layout
Java
apache-2.0
efb6b541373d92d014cbbe220c69ca6caf9713cf
0
google/error-prone,cushon/error-prone,cushon/error-prone,cushon/error-prone,google/error-prone,cushon/error-prone
/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import com.google.errorprone.BugCheckerRefactoringTestHelper; import com.google.errorprone.CompilationTestHelper; import com.sun.tools.javac.main.Main.Result; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit test for {@link RestrictedApiChecker} */ @RunWith(JUnit4.class) public class RestrictedApiCheckerTest { private final CompilationTestHelper helper; private final BugCheckerRefactoringTestHelper refactoringTest; public RestrictedApiCheckerTest() { this(RestrictedApiChecker.class); } protected RestrictedApiCheckerTest(Class<? extends BugChecker> checker) { helper = CompilationTestHelper.newInstance(checker, RestrictedApiCheckerTest.class) .addSourceFile("Allowlist.java") .addSourceFile("RestrictedApiMethods.java") .matchAllDiagnostics(); refactoringTest = BugCheckerRefactoringTestHelper.newInstance(checker, RestrictedApiCheckerTest.class); } @Test public void testNormalCallAllowed() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " m.normalMethod();", " m.accept(m::normalMethod);", " }", "}") .doTest(); } @Test public void testRestrictedCallProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedCallProhibited_inherited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods.Subclass m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: ipsum", " m.dontCallMe();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " // BUG: Diagnostic contains: ipsum", " m.accept(m::dontCallMe);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedCallAllowedOnWhitelistedPath() { helper .addSourceLines( "testsuite/Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " m.restrictedMethod();", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testRestrictedStaticCallProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.restrictedStaticMethod();", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.accept(RestrictedApiMethods::restrictedStaticMethod);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " new RestrictedApiMethods(0);", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.accept(RestrictedApiMethods::new);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorViaAnonymousClassProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " new RestrictedApiMethods() {};", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorViaAnonymousClassAllowed() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @Allowlist ", " void foo() {", " new RestrictedApiMethods() {};", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testRestrictedCallAnonymousClassFromInterface() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " new IFaceWithRestriction() {", " @Override", " public void dontCallMe() {}", " }", " // BUG: Diagnostic contains: ipsum", " .dontCallMe();", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testImplicitRestrictedConstructorProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase extends RestrictedApiMethods {", " // BUG: Diagnostic contains: lorem", " public Testcase() {}", "}") .expectResult(Result.ERROR) .doTest(); } @Ignore("Doesn't work yet") @Test public void testImplicitRestrictedConstructorProhibited_implicitConstructor() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", " // BUG: Diagnostic contains: lorem", "class Testcase extends RestrictedApiMethods {}") .expectResult(Result.ERROR) .doTest(); } @Test public void testAllowWithWarning() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @AllowlistWithWarning", " void foo(RestrictedApiMethods m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testAllowWithoutWarning() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @Allowlist", " void foo(RestrictedApiMethods m) {", " m.restrictedMethod();", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } // Regression test for b/36160747 @Test public void testAllowAllDefinitionsInFile() { helper .addSourceLines( "Testcase.java", "", "package separate.test;", "", "import com.google.errorprone.annotations.RestrictedApi;", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "class Testcase {", " @Allowlist", " void caller() {", " restrictedMethod();", " }", " @RestrictedApi(", " explanation=\"test\",", " whitelistAnnotations = {Allowlist.class},", " link = \"foo\"", " )", " void restrictedMethod() {", " }", " @Target({ElementType.METHOD, ElementType.CONSTRUCTOR})", " @interface Allowlist {}", "}") .doTest(); } }
core/src/test/java/com/google/errorprone/bugpatterns/RestrictedApiCheckerTest.java
/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import com.google.errorprone.BugCheckerRefactoringTestHelper; import com.google.errorprone.CompilationTestHelper; import com.sun.tools.javac.main.Main.Result; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit test for {@link RestrictedApiChecker} */ @RunWith(JUnit4.class) public class RestrictedApiCheckerTest { private final CompilationTestHelper helper; private final BugCheckerRefactoringTestHelper refactoringTest; public RestrictedApiCheckerTest() { this(RestrictedApiChecker.class); } protected RestrictedApiCheckerTest(Class<? extends BugChecker> checker) { helper = CompilationTestHelper.newInstance(checker, RestrictedApiCheckerTest.class) .addSourceFile("RestrictedApiMethods.java") .matchAllDiagnostics(); refactoringTest = BugCheckerRefactoringTestHelper.newInstance(checker, RestrictedApiCheckerTest.class); } @Test public void testNormalCallAllowed() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " m.normalMethod();", " m.accept(m::normalMethod);", " }", "}") .doTest(); } @Test public void testRestrictedCallProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedCallProhibited_inherited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods.Subclass m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: ipsum", " m.dontCallMe();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " // BUG: Diagnostic contains: ipsum", " m.accept(m::dontCallMe);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedCallAllowedOnWhitelistedPath() { helper .addSourceLines( "testsuite/Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo(RestrictedApiMethods m) {", " m.restrictedMethod();", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testRestrictedStaticCallProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.restrictedStaticMethod();", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.accept(RestrictedApiMethods::restrictedStaticMethod);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " new RestrictedApiMethods(0);", " // BUG: Diagnostic contains: lorem", " RestrictedApiMethods.accept(RestrictedApiMethods::new);", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorViaAnonymousClassProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " // BUG: Diagnostic contains: lorem", " new RestrictedApiMethods() {};", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testRestrictedConstructorViaAnonymousClassAllowed() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @Allowlist ", " void foo() {", " new RestrictedApiMethods() {};", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testRestrictedCallAnonymousClassFromInterface() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " void foo() {", " new IFaceWithRestriction() {", " @Override", " public void dontCallMe() {}", " }", " // BUG: Diagnostic contains: ipsum", " .dontCallMe();", " }", "}") .expectResult(Result.ERROR) .doTest(); } @Test public void testImplicitRestrictedConstructorProhibited() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase extends RestrictedApiMethods {", " // BUG: Diagnostic contains: lorem", " public Testcase() {}", "}") .expectResult(Result.ERROR) .doTest(); } @Ignore("Doesn't work yet") @Test public void testImplicitRestrictedConstructorProhibited_implicitConstructor() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", " // BUG: Diagnostic contains: lorem", "class Testcase extends RestrictedApiMethods {}") .expectResult(Result.ERROR) .doTest(); } @Test public void testAllowWithWarning() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @AllowlistWithWarning", " void foo(RestrictedApiMethods m) {", " // BUG: Diagnostic contains: lorem", " m.restrictedMethod();", " // BUG: Diagnostic contains: lorem", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } @Test public void testAllowWithoutWarning() { helper .addSourceLines( "Testcase.java", "package com.google.errorprone.bugpatterns.testdata;", "class Testcase {", " @Allowlist", " void foo(RestrictedApiMethods m) {", " m.restrictedMethod();", " m.accept(m::restrictedMethod);", " }", "}") .expectResult(Result.OK) .doTest(); } // Regression test for b/36160747 @Test public void testAllowAllDefinitionsInFile() { helper .addSourceLines( "Testcase.java", "", "package separate.test;", "", "import com.google.errorprone.annotations.RestrictedApi;", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "class Testcase {", " @Allowlist", " void caller() {", " restrictedMethod();", " }", " @RestrictedApi(", " explanation=\"test\",", " whitelistAnnotations = {Allowlist.class},", " link = \"foo\"", " )", " void restrictedMethod() {", " }", " @Target({ElementType.METHOD, ElementType.CONSTRUCTOR})", " @interface Allowlist {}", "}") .doTest(); } }
Explicitly add a test source that was previously being discovered on the sourcepath. PiperOrigin-RevId: 348524001
core/src/test/java/com/google/errorprone/bugpatterns/RestrictedApiCheckerTest.java
Explicitly add a test source
Java
apache-2.0
791daa3850dfff7a81892ca4d9b1c5312d0cf561
0
buckett/sakai-gitflow,ktakacs/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,kwedoff1/sakai,lorenamgUMU/sakai,OpenCollabZA/sakai,maurercw/gradebookNG,clhedrick/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,willkara/sakai,colczr/sakai,willkara/sakai,ouit0408/sakai,pushyamig/sakai,pushyamig/sakai,steveswinsburg/gradebookNG,Fudan-University/sakai,buckett/sakai-gitflow,conder/sakai,lorenamgUMU/sakai,ouit0408/sakai,ktakacs/sakai,rodriguezdevera/sakai,rodriguezdevera/sakai,OpenCollabZA/sakai,bzhouduke123/sakai,frasese/sakai,frasese/sakai,rodriguezdevera/sakai,buckett/sakai-gitflow,rodriguezdevera/sakai,OpenCollabZA/sakai,zqian/sakai,duke-compsci290-spring2016/sakai,pushyamig/sakai,buckett/sakai-gitflow,ktakacs/sakai,conder/sakai,clhedrick/sakai,clhedrick/sakai,duke-compsci290-spring2016/sakai,Fudan-University/sakai,rodriguezdevera/sakai,liubo404/sakai,liubo404/sakai,Fudan-University/sakai,ktakacs/sakai,joserabal/sakai,kwedoff1/sakai,bzhouduke123/sakai,colczr/sakai,steveswinsburg/gradebookNG,joserabal/sakai,liubo404/sakai,liubo404/sakai,steveswinsburg/gradebookNG,ouit0408/sakai,zqian/sakai,kwedoff1/sakai,buckett/sakai-gitflow,conder/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,frasese/sakai,kwedoff1/sakai,colczr/sakai,zqian/sakai,buckett/sakai-gitflow,bzhouduke123/sakai,willkara/sakai,willkara/sakai,clhedrick/sakai,pushyamig/sakai,joserabal/sakai,colczr/sakai,duke-compsci290-spring2016/sakai,Fudan-University/sakai,ktakacs/sakai,OpenCollabZA/sakai,pushyamig/sakai,OpenCollabZA/sakai,conder/sakai,Fudan-University/sakai,frasese/sakai,duke-compsci290-spring2016/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,clhedrick/sakai,lorenamgUMU/sakai,buckett/sakai-gitflow,zqian/sakai,OpenCollabZA/sakai,bzhouduke123/sakai,frasese/sakai,liubo404/sakai,pushyamig/sakai,liubo404/sakai,lorenamgUMU/sakai,maurercw/gradebookNG,bzhouduke123/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,maurercw/gradebookNG,zqian/sakai,Fudan-University/sakai,joserabal/sakai,ktakacs/sakai,colczr/sakai,OpenCollabZA/sakai,buckett/sakai-gitflow,clhedrick/sakai,ktakacs/sakai,lorenamgUMU/sakai,conder/sakai,clhedrick/sakai,conder/sakai,Fudan-University/sakai,joserabal/sakai,ouit0408/sakai,colczr/sakai,joserabal/sakai,pushyamig/sakai,ouit0408/sakai,ouit0408/sakai,willkara/sakai,ktakacs/sakai,willkara/sakai,zqian/sakai,kwedoff1/sakai,zqian/sakai,colczr/sakai,liubo404/sakai,bzhouduke123/sakai,pushyamig/sakai,bzhouduke123/sakai,Fudan-University/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,ouit0408/sakai,frasese/sakai,zqian/sakai,frasese/sakai,liubo404/sakai,duke-compsci290-spring2016/sakai,conder/sakai,willkara/sakai,lorenamgUMU/sakai,conder/sakai,clhedrick/sakai,colczr/sakai,ouit0408/sakai,frasese/sakai,kwedoff1/sakai,rodriguezdevera/sakai,kwedoff1/sakai,willkara/sakai
package org.sakaiproject.gradebookng.business.helpers; import au.com.bytecode.opencsv.CSVReader; import lombok.extern.apachecommons.CommonsLog; import org.apache.commons.lang.StringUtils; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.gradebookng.business.model.ImportColumn; import org.sakaiproject.gradebookng.business.model.ImportedGrade; import org.sakaiproject.gradebookng.business.model.ImportedGradeItem; import org.sakaiproject.gradebookng.business.model.ImportedGradeWrapper; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItem; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemDetail; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemStatus; import org.sakaiproject.gradebookng.tool.model.AssignmentStudentGradeInfo; import org.sakaiproject.gradebookng.tool.model.GradeInfo; import org.sakaiproject.gradebookng.tool.model.StudentGradeInfo; import org.sakaiproject.service.gradebook.shared.Assignment; import org.sakaiproject.util.BaseResourcePropertiesEdit; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.MessageFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by chmaurer on 1/21/15. */ @CommonsLog public class ImportGradesHelper extends BaseImportHelper { private static final String IMPORT_USER_ID="Student ID"; private static final String IMPORT_USER_NAME="Student Name"; protected static final String ASSIGNMENT_HEADER_PATTERN = "{0} [{1}]"; protected static final String ASSIGNMENT_HEADER_COMMENT_PATTERN = "*/ {0} Comments */"; protected static final String HEADER_STANDARD_PATTERN = "{0}"; /** * Parse a CSV into a list of ImportedGrade objects. Returns list if ok, or null if error * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseCsv(InputStream is) { //manually parse method so we can support arbitrary columns CSVReader reader = new CSVReader(new InputStreamReader(is)); String [] nextLine; int lineCount = 0; List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer,ImportColumn> mapping = null; try { while ((nextLine = reader.readNext()) != null) { if(lineCount == 0) { //header row, capture it mapping = mapHeaderRow(nextLine); } else { //map the fields into the object list.add(mapLine(nextLine, mapping)); } lineCount++; } } catch (Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } finally { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } /** * Parse an XLS into a list of ImportedGrade objects * Note that only the first sheet of the Excel file is supported. * * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseXls(InputStream is) { int lineCount = 0; List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer,ImportColumn> mapping = null; try { Workbook wb = WorkbookFactory.create(is); Sheet sheet = wb.getSheetAt(0); for (Row row : sheet) { String[] r = convertRow(row); if(lineCount == 0) { //header row, capture it mapping = mapHeaderRow(r); } else { //map the fields into the object list.add(mapLine(r, mapping)); } lineCount++; } } catch (Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } // private static List<ProcessedGradeItem> processAssignmentNames(Map<Integer,String> mapping) { // List<String> assignmentNames = new ArrayList<String>(); // for(Map.Entry<Integer,String> entry: mapping.entrySet()) { // int i = entry.getKey(); // //trim in case some whitespace crept in // String col = trim(entry.getValue()); // // //Find all columns that are not well known // if(!StringUtils.equals(col, IMPORT_USER_ID) && !StringUtils.equals(col, IMPORT_USER_NAME)) { // // String assignmentName = parseHeaderForAssignmentName(col); // if (!assignmentNames.contains(assignmentName)) // assignmentNames.add(assignmentName); // } // } // return assignmentNames; // } private static Object[] parseHeaderForAssignmentName(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_PATTERN); Object[] parsedObject; try { parsedObject = mf.parse(headerValue); } catch (ParseException e) { mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_COMMENT_PATTERN); try { parsedObject = mf.parse(headerValue); } catch (ParseException e1) { throw new RuntimeException("Error parsing grade import"); } } return parsedObject; } private static boolean isCommentsColumn(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_COMMENT_PATTERN); try { mf.parse(headerValue); } catch (ParseException e) { return false; } return true; } private static boolean isGradeColumn(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_PATTERN); try { mf.parse(headerValue); } catch (ParseException e) { return false; } return true; } /** * Takes a row of data and maps it into the appropriate ImportedGrade properties * We have a fixed list of properties, anything else goes into ResourceProperties * @param line * @param mapping * @return */ private static ImportedGrade mapLine(String[] line, Map<Integer,ImportColumn> mapping){ ImportedGrade grade = new ImportedGrade(); ResourceProperties p = new BaseResourcePropertiesEdit(); for(Map.Entry<Integer,ImportColumn> entry: mapping.entrySet()) { int i = entry.getKey(); //trim in case some whitespace crept in ImportColumn importColumn = entry.getValue(); // String col = trim(entry.getValue()); // In case there aren't enough data fields in the line to match up with the number of columns needed String lineVal = null; if (i < line.length) { lineVal = trim(line[i]); } //now check each of the main properties in turn to determine which one to set, otherwise set into props if(StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_ID)) { grade.setStudentId(lineVal); } else if(StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_NAME)) { grade.setStudentName(lineVal); } else if(ImportColumn.TYPE_ITEM_WITH_POINTS==importColumn.getType()) { String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemScore(lineVal); } else if(ImportColumn.TYPE_ITEM_WITH_COMMENTS==importColumn.getType()) { String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemComment(lineVal); } else { //only add if not blank if(StringUtils.isNotBlank(lineVal)) { p.addProperty(importColumn.getColumnTitle(), lineVal); } } } grade.setProperties(p); return grade; } public static List<ProcessedGradeItem> processImportedGrades(ImportedGradeWrapper importedGradeWrapper, List<Assignment> assignments, List<StudentGradeInfo> currentGrades) { List<ProcessedGradeItem> processedGradeItems = new ArrayList<ProcessedGradeItem>(); Map<String, Assignment> assignmentNameMap = new HashMap<String, Assignment>(); Map<Long, AssignmentStudentGradeInfo> transformedGradeMap = transformCurrentGrades(currentGrades); //Map the assignment name back to the Id for (Assignment assignment : assignments) { assignmentNameMap.put(assignment.getName(), assignment); } for (ImportColumn column : importedGradeWrapper.getColumns()) { ProcessedGradeItem processedGradeItem = new ProcessedGradeItem(); String assignmentName = column.getColumnTitle(); if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { processedGradeItem.setItemTitle(assignmentName); processedGradeItem.setItemPointValue(column.getPoints()); } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { processedGradeItem.setItemTitle(assignmentName + " Comments"); processedGradeItem.setItemPointValue("N/A"); } else { //Just get out log.warn("Bad column type - " + column.getType() + ". Skipping."); continue; } Assignment assignment = assignmentNameMap.get(assignmentName); ProcessedGradeItemStatus status = determineStatus(column, assignment, importedGradeWrapper, transformedGradeMap); processedGradeItem.setStatus(status); if (assignment != null) { processedGradeItem.setItemId(assignment.getId()); } List<ProcessedGradeItemDetail> processedGradeItemDetails = new ArrayList<>(); for (ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(assignmentName); if (importedGradeItem != null) { ProcessedGradeItemDetail processedGradeItemDetail = new ProcessedGradeItemDetail(); processedGradeItemDetail.setStudentId(importedGrade.getStudentId()); processedGradeItemDetail.setGrade(importedGradeItem.getGradeItemScore()); } } processedGradeItem.setProcessedGradeItemDetails(processedGradeItemDetails); processedGradeItems.add(processedGradeItem); } return processedGradeItems; } private static ProcessedGradeItemStatus determineStatus(ImportColumn column, Assignment assignment, ImportedGradeWrapper importedGradeWrapper, Map<Long, AssignmentStudentGradeInfo> transformedGradeMap) { ProcessedGradeItemStatus status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UNKNOWN); if (assignment == null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NEW); } else if (assignment.getExternalId() != null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_EXTERNAL, assignment.getExternalAppName()); } else { for (ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { AssignmentStudentGradeInfo assignmentStudentGradeInfo = transformedGradeMap.get(assignment.getId()); ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(column.getColumnTitle()); String actualScore = null; String actualComment = null; if (assignmentStudentGradeInfo != null) { GradeInfo actualGradeInfo = assignmentStudentGradeInfo.getStudentGrades().get(importedGrade.getStudentId()); if (actualGradeInfo != null) { actualScore = actualGradeInfo.getGrade(); actualComment = actualGradeInfo.getGradeComment(); } } String importedScore = null; String importedComment = null; if (importedGradeItem != null) { importedScore = importedGradeItem.getGradeItemScore(); importedComment = importedGradeItem.getGradeItemComment(); } if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { if (importedScore != null && !importedScore.equals(actualScore)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { if (importedComment != null && !importedComment.equals(actualComment)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } } // If we get here, must not have been any changes if (status.getStatusCode() == ProcessedGradeItemStatus.STATUS_UNKNOWN) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NA); } //TODO - What about if a user was added to the import file? // That probably means that actualGradeInfo from up above is null...but what do I do? } return status; } private static Map<Long, AssignmentStudentGradeInfo> transformCurrentGrades(List<StudentGradeInfo> currentGrades) { Map<Long, AssignmentStudentGradeInfo> assignmentMap = new HashMap<Long, AssignmentStudentGradeInfo>(); for (StudentGradeInfo studentGradeInfo : currentGrades) { for (Map.Entry<Long, GradeInfo> entry : studentGradeInfo.getGrades().entrySet()) { Long assignmentId = entry.getKey(); AssignmentStudentGradeInfo assignmentStudentGradeInfo = assignmentMap.get(assignmentId); if (assignmentStudentGradeInfo == null) { assignmentStudentGradeInfo = new AssignmentStudentGradeInfo(); assignmentStudentGradeInfo.setAssignmemtId(assignmentId); assignmentMap.put(assignmentId, assignmentStudentGradeInfo); } assignmentStudentGradeInfo.addGrade(studentGradeInfo.getStudentEid(), entry.getValue()); } } return assignmentMap; } }
tool/src/java/org/sakaiproject/gradebookng/business/helpers/ImportGradesHelper.java
package org.sakaiproject.gradebookng.business.helpers; import au.com.bytecode.opencsv.CSVReader; import lombok.extern.apachecommons.CommonsLog; import org.apache.commons.lang.StringUtils; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.gradebookng.business.model.ImportColumn; import org.sakaiproject.gradebookng.business.model.ImportedGrade; import org.sakaiproject.gradebookng.business.model.ImportedGradeItem; import org.sakaiproject.gradebookng.business.model.ImportedGradeWrapper; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItem; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemDetail; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemStatus; import org.sakaiproject.gradebookng.tool.model.AssignmentStudentGradeInfo; import org.sakaiproject.gradebookng.tool.model.GradeInfo; import org.sakaiproject.gradebookng.tool.model.StudentGradeInfo; import org.sakaiproject.service.gradebook.shared.Assignment; import org.sakaiproject.util.BaseResourcePropertiesEdit; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.MessageFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by chmaurer on 1/21/15. */ @CommonsLog public class ImportGradesHelper extends BaseImportHelper { private static final String IMPORT_USER_ID="Student ID"; private static final String IMPORT_USER_NAME="Student Name"; protected static final String ASSIGNMENT_HEADER_PATTERN = "{0} [{1}]"; protected static final String ASSIGNMENT_HEADER_COMMENT_PATTERN = "*/ {0} Comments */"; protected static final String HEADER_STANDARD_PATTERN = "{0}"; /** * Parse a CSV into a list of ImportedGrade objects. Returns list if ok, or null if error * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseCsv(InputStream is) { //manually parse method so we can support arbitrary columns CSVReader reader = new CSVReader(new InputStreamReader(is)); String [] nextLine; int lineCount = 0; List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer,ImportColumn> mapping = null; try { while ((nextLine = reader.readNext()) != null) { if(lineCount == 0) { //header row, capture it mapping = mapHeaderRow(nextLine); } else { //map the fields into the object list.add(mapLine(nextLine, mapping)); } lineCount++; } } catch (Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } finally { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } /** * Parse an XLS into a list of ImportedGrade objects * Note that only the first sheet of the Excel file is supported. * * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseXls(InputStream is) { int lineCount = 0; List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer,ImportColumn> mapping = null; try { Workbook wb = WorkbookFactory.create(is); Sheet sheet = wb.getSheetAt(0); for (Row row : sheet) { String[] r = convertRow(row); if(lineCount == 0) { //header row, capture it mapping = mapHeaderRow(r); } else { //map the fields into the object list.add(mapLine(r, mapping)); } lineCount++; } } catch (Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } // private static List<ProcessedGradeItem> processAssignmentNames(Map<Integer,String> mapping) { // List<String> assignmentNames = new ArrayList<String>(); // for(Map.Entry<Integer,String> entry: mapping.entrySet()) { // int i = entry.getKey(); // //trim in case some whitespace crept in // String col = trim(entry.getValue()); // // //Find all columns that are not well known // if(!StringUtils.equals(col, IMPORT_USER_ID) && !StringUtils.equals(col, IMPORT_USER_NAME)) { // // String assignmentName = parseHeaderForAssignmentName(col); // if (!assignmentNames.contains(assignmentName)) // assignmentNames.add(assignmentName); // } // } // return assignmentNames; // } private static Object[] parseHeaderForAssignmentName(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_PATTERN); Object[] parsedObject; try { parsedObject = mf.parse(headerValue); } catch (ParseException e) { mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_COMMENT_PATTERN); try { parsedObject = mf.parse(headerValue); } catch (ParseException e1) { throw new RuntimeException("Error parsing grade import"); } } return parsedObject; } private static boolean isCommentsColumn(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_COMMENT_PATTERN); try { mf.parse(headerValue); } catch (ParseException e) { return false; } return true; } private static boolean isGradeColumn(String headerValue) { MessageFormat mf = new MessageFormat(ImportGradesHelper.ASSIGNMENT_HEADER_PATTERN); try { mf.parse(headerValue); } catch (ParseException e) { return false; } return true; } /** * Takes a row of data and maps it into the appropriate ImportedGrade properties * We have a fixed list of properties, anything else goes into ResourceProperties * @param line * @param mapping * @return */ private static ImportedGrade mapLine(String[] line, Map<Integer,ImportColumn> mapping){ ImportedGrade grade = new ImportedGrade(); ResourceProperties p = new BaseResourcePropertiesEdit(); for(Map.Entry<Integer,ImportColumn> entry: mapping.entrySet()) { int i = entry.getKey(); //trim in case some whitespace crept in ImportColumn importColumn = entry.getValue(); // String col = trim(entry.getValue()); // In case there aren't enough data fields in the line to match up with the number of columns needed String lineVal = null; if (i < line.length) { lineVal = trim(line[i]); } //now check each of the main properties in turn to determine which one to set, otherwise set into props if(StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_ID)) { grade.setStudentId(lineVal); } else if(StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_NAME)) { grade.setStudentName(lineVal); } else if(ImportColumn.TYPE_ITEM_WITH_POINTS==importColumn.getType()) { String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemScore(lineVal); } else if(ImportColumn.TYPE_ITEM_WITH_COMMENTS==importColumn.getType()) { String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemComment(lineVal); } else { //only add if not blank if(StringUtils.isNotBlank(lineVal)) { p.addProperty(importColumn.getColumnTitle(), lineVal); } } } grade.setProperties(p); return grade; } public static List<ProcessedGradeItem> processImportedGrades(ImportedGradeWrapper importedGradeWrapper, List<Assignment> assignments, List<StudentGradeInfo> currentGrades) { List<ProcessedGradeItem> processedGradeItems = new ArrayList<ProcessedGradeItem>(); Map<String, Assignment> assignmentNameMap = new HashMap<String, Assignment>(); Map<Long, AssignmentStudentGradeInfo> transformedGradeMap = transformCurrentGrades(currentGrades); //Map the assignment name back to the Id for (Assignment assignment : assignments) { assignmentNameMap.put(assignment.getName(), assignment); } for (ImportColumn column : importedGradeWrapper.getColumns()) { ProcessedGradeItem processedGradeItem = new ProcessedGradeItem(); String assignmentName = column.getColumnTitle(); if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { processedGradeItem.setItemTitle(assignmentName); processedGradeItem.setItemPointValue(column.getPoints()); } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { processedGradeItem.setItemTitle(assignmentName + " Comments"); processedGradeItem.setItemPointValue("N/A"); } else { //Just get out log.warn("Bad column type - " + column.getType() + ". Skipping."); continue; } Assignment assignment = assignmentNameMap.get(assignmentName); ProcessedGradeItemStatus status = determineStatus(column, assignment, importedGradeWrapper, transformedGradeMap); processedGradeItem.setStatus(status); if (assignment != null) { processedGradeItem.setItemId(assignment.getId()); } List<ProcessedGradeItemDetail> processedGradeItemDetails = new ArrayList<>(); for (ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(assignmentName); if (importedGradeItem != null) { ProcessedGradeItemDetail processedGradeItemDetail = new ProcessedGradeItemDetail(); processedGradeItemDetail.setStudentId(importedGrade.getStudentId()); processedGradeItemDetail.setGrade(importedGradeItem.getGradeItemScore()); } } processedGradeItem.setProcessedGradeItemDetails(processedGradeItemDetails); processedGradeItems.add(processedGradeItem); } return processedGradeItems; } private static ProcessedGradeItemStatus determineStatus(ImportColumn column, Assignment assignment, ImportedGradeWrapper importedGradeWrapper, Map<Long, AssignmentStudentGradeInfo> transformedGradeMap) { ProcessedGradeItemStatus status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UNKNOWN); if (assignment == null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NEW); } else if (assignment.getExternalId() != null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_EXTERNAL, assignment.getExternalAppName()); } else { for (ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { AssignmentStudentGradeInfo assignmentStudentGradeInfo = transformedGradeMap.get(assignment.getId()); ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(column.getColumnTitle()); String actualScore = null; String actualComment = null; if (assignmentStudentGradeInfo != null) { GradeInfo actualGradeInfo = assignmentStudentGradeInfo.getStudentGrades().get(importedGrade.getStudentId()); if (actualGradeInfo != null) { actualScore = actualGradeInfo.getGrade(); actualComment = actualGradeInfo.getGradeComment(); } } String importedScore = null; String importedComment = null; if (importedGradeItem != null) { importedScore = importedGradeItem.getGradeItemScore(); importedComment = importedGradeItem.getGradeItemComment(); } if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { if (importedScore != null && !importedScore.equals(actualScore)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { if (importedComment != null && !importedComment.equals(actualComment)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } } // If we get here, must not have been any changes if (status.getStatusCode() == ProcessedGradeItemStatus.STATUS_UNKNOWN) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NA); } //TODO - What about if a user was added to the import file? // That probably means that actualGradeInfo from up above is null...but what do I do? } return status; } private static Map<Long, AssignmentStudentGradeInfo> transformCurrentGrades(List<StudentGradeInfo> currentGrades) { Map<Long, AssignmentStudentGradeInfo> assignmentMap = new HashMap<Long, AssignmentStudentGradeInfo>(); for (StudentGradeInfo studentGradeInfo : currentGrades) { for (Map.Entry<Long, GradeInfo> entry : studentGradeInfo.getGrades().entrySet()) { Long assignmentId = entry.getKey(); AssignmentStudentGradeInfo assignmentStudentGradeInfo = assignmentMap.get(assignmentId); if (assignmentStudentGradeInfo == null) { assignmentStudentGradeInfo = new AssignmentStudentGradeInfo(); assignmentStudentGradeInfo.setAssignmemtId(assignmentId); assignmentMap.put(assignmentId, assignmentStudentGradeInfo); } assignmentStudentGradeInfo.addGrade(studentGradeInfo.getStudentUuid(), entry.getValue()); // assignmentStudentGradeInfo.setGradeInfo(entry.getValue()); // assignmentStudentGradeInfo.setStudentId(studentGradeInfo.getStudentUuid()); } } return assignmentMap; } }
Fix for issue 101. Switch to the student eid instead of the uuid.
tool/src/java/org/sakaiproject/gradebookng/business/helpers/ImportGradesHelper.java
Fix for issue 101. Switch to the student eid instead of the uuid.
Java
apache-2.0
0facc25ea8a18eabea3f1ebb23e8351888e738ea
0
freme-project/Broker,freme-project/Broker,freme-project/Broker
package eu.freme.broker.integration_tests; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.exceptions.UnirestException; import com.mashape.unirest.request.HttpRequestWithBody; import eu.freme.conversion.rdf.RDFConstants; import org.junit.Ignore; import org.junit.Test; import java.io.IOException; import java.net.URLEncoder; import static org.junit.Assert.assertTrue; /** * @author Jan Nehring - [email protected] */ public class TildeETranslationTest extends IntegrationTest{ String clientId = "u-bd13faca-b816-4085-95d5-05373d695ab7"; String sourceLang = "en"; String targetLang = "de"; String translationSystemId = "smt-76cd2e73-05c6-4d51-b02f-4fc9c4d40813"; public TildeETranslationTest(){super("/e-translation/tilde");} private HttpRequestWithBody baseRequest() { return baseRequestPost("").queryString("client-id", clientId) .queryString("source-lang", sourceLang) .queryString("target-lang", targetLang) .queryString("translation-system-id", translationSystemId); } @Test public void testEtranslate() throws UnirestException, IOException, Exception { HttpResponse<String> response = baseRequest() .queryString("input", "<p>hello world</p>") .queryString("outformat","rdf-xml") .asString(); System.err.println(response); } }
src/test/java/eu/freme/broker/integration_tests/TildeETranslationTest.java
package eu.freme.broker.integration_tests; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.exceptions.UnirestException; import com.mashape.unirest.request.HttpRequestWithBody; import eu.freme.conversion.rdf.RDFConstants; import org.junit.Ignore; import org.junit.Test; import java.io.IOException; import java.net.URLEncoder; import static org.junit.Assert.assertTrue; /** * @author Jan Nehring - [email protected] */ public class TildeETranslationTest extends IntegrationTest{ String clientId = "u-bd13faca-b816-4085-95d5-05373d695ab7"; String sourceLang = "en"; String targetLang = "de"; String translationSystemId = "smt-76cd2e73-05c6-4d51-b02f-4fc9c4d40813"; public TildeETranslationTest(){super("/e-translation/tilde");} private HttpRequestWithBody baseRequest() { return baseRequestPost("").queryString("client-id", clientId) .queryString("source-lang", sourceLang) .queryString("target-lang", targetLang) .queryString("translation-system-id", translationSystemId); } @Test @Ignore //TODO: wait for Issue: Timeouts on e-Terminology https://github.com/freme-project/Broker/issues/43 public void testEtranslate() throws UnirestException, IOException, Exception { HttpResponse<String> response = baseRequest() .queryString("input", "hello world") .queryString("informat", "text") .queryString("outformat","rdf-xml") .asString(); validateNIFResponse(response, RDFConstants.RDFSerialization.RDF_XML); String data = readFile("src/test/resources/rdftest/e-translate/data.turtle"); response = baseRequest().header("Content-Type", "text/turtle") .body(data).asString(); validateNIFResponse(response, RDFConstants.RDFSerialization.TURTLE); assertTrue(response.getStatus() == 200); assertTrue(response.getBody().length() > 0); data = readFile("src/test/resources/rdftest/e-translate/data.json"); response = baseRequest().header("Content-Type", "application/json+ld") .queryString("outformat","json-ld") .body(data).asString(); assertTrue(response.getStatus() == 200); assertTrue(response.getBody().length() > 0); validateNIFResponse(response, RDFConstants.RDFSerialization.JSON_LD); data = readFile("src/test/resources/rdftest/e-translate/data.txt"); response = baseRequest() .queryString("input", URLEncoder.encode(data, "UTF-8")) .queryString("informat", "text") .queryString("outformat","n3") .asString(); validateNIFResponse(response, RDFConstants.RDFSerialization.N3); response = baseRequest() .queryString("input", URLEncoder.encode(data, "UTF-8")) .queryString("informat", "text") .queryString("outformat","n-triples") .asString(); validateNIFResponse(response, RDFConstants.RDFSerialization.N_TRIPLES); } }
fixes #72
src/test/java/eu/freme/broker/integration_tests/TildeETranslationTest.java
fixes #72
Java
bsd-3-clause
8df60ab852e032f954202ed52383d17c2ecd0d03
0
NCIP/catissue-advanced-query,NCIP/catissue-advanced-query
package edu.wustl.query.action; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import edu.common.dynamicextensions.domain.Entity; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.wustl.cab2b.server.cache.EntityCache; import edu.wustl.common.query.pvmanager.impl.PVManagerException; import edu.wustl.common.vocab.IConcept; import edu.wustl.common.vocab.IVocabulary; import edu.wustl.common.vocab.VocabularyException; import edu.wustl.common.vocab.impl.Vocabulary; import edu.wustl.query.bizlogic.BizLogicFactory; import edu.wustl.query.bizlogic.SearchPermissibleValueBizlogic; import edu.wustl.query.util.global.Constants; import edu.wustl.query.util.global.VIProperties; /** * @author amit_doshi * Action Class to show the UI for Vocabulary Interface and to handle the Ajax request */ public class SearchMappedPVsAction extends Action { /** * This method handles the various Ajax request for VI * @param mapping mapping * @param form form * @param request request * @param response response * @throws Exception Exception * @return ActionForward actionForward */ @SuppressWarnings("unchecked") @Override public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { final String targetVocabURN = request.getParameter(Constants.SELECTED_BOX); //get the id of the component on which user click to search for PVs String componentId = request.getParameter(Constants.COMPONENT_ID); String editVocabURN = request.getParameter("editVocabURN"); componentId = getComponentId(request, componentId); String entityName = (String) request.getSession().getAttribute(Constants.ENTITY_NAME); Entity entity = (Entity) EntityCache.getCache().getEntityById(Long.valueOf((entityName))); Map<String, AttributeInterface> enumAttributeMap = (HashMap<String, AttributeInterface>) request.getSession().getAttribute(Constants.ENUMRATED_ATTRIBUTE); AttributeInterface attribute = (AttributeInterface) enumAttributeMap.get(Constants.ATTRIBUTE_INTERFACE + componentId); if (targetVocabURN != null)// Need to retrieve HTML for requested Vocabulary Mapped or sou { // user clicked on radio boxes //AJAX Request handler for Getting Mapping data for source or target vocabularies String htmlResponse = getPVsForRequestedVoab(request, targetVocabURN, componentId, entity, attribute); response.getWriter().write(htmlResponse); return null; } //new request for entity; remove the message from the session removeHTMLFromSesson(request); if(editVocabURN.equals("null") || editVocabURN.equals(VIProperties.sourceVocabUrn) ) { /* load source vocabulary if in edit mode as well as in not edit mode*/ String srcHTML=getPVsForSourceVocab(attribute, entity, componentId, request); //set the data in session because need to show this data on page load request.getSession().setAttribute(Constants.PV_HTML+VIProperties.sourceVocabUrn, srcHTML); } else { //need to load other vocabulary in edit mode setEditVocabHTML(request, editVocabURN, entity, attribute); } setComponentId(request, componentId); SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); request.getSession().setAttribute(Constants.VOCABULIRES, bizLogic.getVocabularies()); return mapping.findForward(edu.wustl.query.util.global.Constants.SUCCESS); } /** * @param request * @param editVocabURN * @param entity * @param attribute * @throws VocabularyException * @throws PVManagerException */ private void setEditVocabHTML(HttpServletRequest request, String editVocabURN, Entity entity, AttributeInterface attribute) throws VocabularyException, PVManagerException { String trgHTML=getMappingForTargetVocab(editVocabURN, attribute, entity); String[] trgHTMLAll=trgHTML.split(Constants.MSG_DEL); if(trgHTMLAll.length>1) { request.getSession().setAttribute(Constants.PV_HTML+editVocabURN, trgHTMLAll[0]); request.getSession().setAttribute(Constants.SRC_VOCAB_MESSAGE, trgHTMLAll[1]); } else { request.getSession().setAttribute(Constants.PV_HTML+editVocabURN, trgHTML); } } /** * @param request * @param targetVocabURN * @param componentId * @param entity * @param attribute * @return * @throws VocabularyException * @throws PVManagerException */ private String getPVsForRequestedVoab(HttpServletRequest request, final String targetVocabURN, String componentId, Entity entity, AttributeInterface attribute) throws VocabularyException, PVManagerException { String htmlResponse =null; if(targetVocabURN.equals(VIProperties.sourceVocabUrn)) { htmlResponse= getPVsForSourceVocab(attribute, entity, componentId, request); } else { htmlResponse= getMappingForTargetVocab(targetVocabURN, attribute, entity); } return htmlResponse; } /** * @param request * @param componentId */ private void setComponentId(HttpServletRequest request, String componentId) { if (componentId != null) { request.getSession().setAttribute(Constants.COMPONENT_ID, componentId); } } /** * @param request * @param componentId * @return */ private String getComponentId(HttpServletRequest request, String componentId) { if (componentId == null) { //need to save component id into the session for next Ajax requests componentId = (String) request.getSession().getAttribute(Constants.COMPONENT_ID); } return componentId; } /** * @param request */ @SuppressWarnings("unchecked") private void removeHTMLFromSesson(HttpServletRequest request) { request.getSession().removeAttribute(Constants.SRC_VOCAB_MESSAGE); Enumeration attributeNames = request.getSession().getAttributeNames(); while(attributeNames.hasMoreElements()) { String atr=attributeNames.nextElement().toString(); if(atr.indexOf(Constants.PV_HTML)==0) { request.getSession().removeAttribute(atr); } } } /** * This method generate the HTML for the Source vocabulary (MED 1.0) * @param attribute * @param entity * @param componentId * @param request * @throws VocabularyException * @throws PVManagerException */ private String getPVsForSourceVocab(AttributeInterface attribute, EntityInterface entity, String componentId, HttpServletRequest request) throws VocabularyException, PVManagerException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); StringBuffer html = new StringBuffer(); List<IConcept> pvList = bizLogic.getConfiguredPermissibleValueList(attribute, entity); String srcVocabURN = VIProperties.sourceVocabUrn; String vocabDisName = bizLogic.getDisplayNameForVocab(srcVocabURN); html.append(bizLogic.getRootVocabularyNodeHTML(srcVocabURN, vocabDisName)); if(pvList != null && !pvList.isEmpty()) { for(IConcept concept:pvList) { String checkboxId = srcVocabURN + Constants.ID_DEL+ concept.getCode(); checkboxId=checkboxId.replaceAll("'", ""); html.append(bizLogic.getHTMLForConcept(srcVocabURN,concept,checkboxId)); } html.append(bizLogic.getEndHTML()); if( pvList.size()==VIProperties.maxPVsToShow)// Need to show Message Too Many Result on UI { html.append(bizLogic.getInfoMessage()); request.getSession().setAttribute(Constants.SRC_VOCAB_MESSAGE, bizLogic.getInfoMessage() .replace(Constants.MSG_DEL,"")); } } else { html.append(bizLogic.getNoMappingFoundHTML()); html.append(bizLogic.getEndHTML()); } return html.toString(); } /** * This method returns the data mapped vocabularies * @param targetVocabURN * @param attribute * @param entity * @return * @throws VocabularyException * @throws PVManagerException */ private String getMappingForTargetVocab(String targetVocabURN, AttributeInterface attribute, EntityInterface entity) throws VocabularyException, PVManagerException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); IVocabulary souVocabulary = bizLogic.getVocabulary(VIProperties.sourceVocabUrn); // Get the target vocabulary info from parameter String targetVocabDisName=bizLogic.getDisplayNameForVocab(targetVocabURN); IVocabulary targVocabulary = bizLogic.getVocabulary(targetVocabURN); StringBuffer html = new StringBuffer(); if (!((Vocabulary)souVocabulary).equals(targVocabulary)) { html.append(bizLogic.getRootVocabularyNodeHTML(targetVocabURN,targetVocabDisName)); Map<String, List<IConcept>> vocabMappings = bizLogic.getMappedConcepts(attribute, targVocabulary, entity); html.append(getMappedHTMLForTargetVocab(targetVocabURN,vocabMappings)); } return html.toString(); } /** * This method returns the mapping data as HTML * @param html * @param vocabName * @param vocabversoin * @param vocabMappings * @throws VocabularyException * @throws NumberFormatException */ private StringBuffer getMappedHTMLForTargetVocab(String vocabURN, Map<String, List<IConcept>> vocabMappings) throws NumberFormatException, VocabularyException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); StringBuffer mappedHTML=new StringBuffer(); int displayPVCount=1; int maxPv=0; boolean isMsgDisplayed=false; if (vocabMappings != null && vocabMappings.size()!=0) { Set<String> keySet = vocabMappings.keySet(); Iterator<String> iterator = keySet.iterator(); while (iterator.hasNext()) { String conceptCode = iterator.next(); List<IConcept> mappingList = vocabMappings.get(conceptCode); maxPv=maxPv+mappingList.size(); ListIterator<IConcept> mappingListItr = mappingList.listIterator(); while (mappingListItr.hasNext()) { IConcept concept = (IConcept) mappingListItr.next(); String checkboxId = vocabURN + Constants.ID_DEL + conceptCode; mappedHTML.append(bizLogic.getHTMLForConcept(vocabURN,concept, checkboxId)); } } } else { mappedHTML.append(bizLogic.getNoMappingFoundHTML()); } mappedHTML.append(bizLogic.getEndHTML()); if(maxPv>=VIProperties.maxPVsToShow) { mappedHTML.append(bizLogic.getInfoMessage()); } return mappedHTML; } }
WEB-INF/src/edu/wustl/query/action/SearchMappedPVsAction.java
package edu.wustl.query.action; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import edu.common.dynamicextensions.domain.Entity; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.wustl.cab2b.server.cache.EntityCache; import edu.wustl.common.query.pvmanager.impl.PVManagerException; import edu.wustl.common.vocab.IConcept; import edu.wustl.common.vocab.IVocabulary; import edu.wustl.common.vocab.VocabularyException; import edu.wustl.common.vocab.impl.Vocabulary; import edu.wustl.query.bizlogic.BizLogicFactory; import edu.wustl.query.bizlogic.SearchPermissibleValueBizlogic; import edu.wustl.query.util.global.Constants; import edu.wustl.query.util.global.VIProperties; /** * @author amit_doshi * Action Class to show the UI for Vocabulary Interface and to handle the Ajax request */ public class SearchMappedPVsAction extends Action { /** * This method handles the various Ajax request for VI * @param mapping mapping * @param form form * @param request request * @param response response * @throws Exception Exception * @return ActionForward actionForward */ @SuppressWarnings("unchecked") @Override public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { final String targetVocabURN = request.getParameter(Constants.SELECTED_BOX); //get the id of the component on which user click to search for PVs String componentId = request.getParameter(Constants.COMPONENT_ID); String editVocabURN = request.getParameter("editVocabURN"); componentId = getComponentId(request, componentId); String entityName = (String) request.getSession().getAttribute(Constants.ENTITY_NAME); Entity entity = (Entity) EntityCache.getCache().getEntityById(Long.valueOf((entityName))); Map<String, AttributeInterface> enumAttributeMap = (HashMap<String, AttributeInterface>) request.getSession().getAttribute(Constants.ENUMRATED_ATTRIBUTE); AttributeInterface attribute = (AttributeInterface) enumAttributeMap.get(Constants.ATTRIBUTE_INTERFACE + componentId); if (targetVocabURN != null)// Need to retrieve HTML for requested Vocabulary Mapped or sou { // user clicked on radio boxes //AJAX Request handler for Getting Mapping data for source or target vocabularies String htmlResponse = getPVsForRequestedVoab(request, targetVocabURN, componentId, entity, attribute); response.getWriter().write(htmlResponse); return null; } //new request for entity; remove the message from the session removeHTMLFromSesson(request); if(editVocabURN.equals("null") || editVocabURN.equals(VIProperties.sourceVocabUrn) ) { /* load source vocabulary if in edit mode as well as in not edit mode*/ String srcHTML=getPVsForSourceVocab(attribute, entity, componentId, request); //set the data in session because need to show this data on page load request.getSession().setAttribute(Constants.PV_HTML+VIProperties.sourceVocabUrn, srcHTML); } else { //need to load other vocabulary in edit mode setEditVocabHTML(request, editVocabURN, entity, attribute); } setComponentId(request, componentId); SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); request.getSession().setAttribute(Constants.VOCABULIRES, bizLogic.getVocabularies()); return mapping.findForward(edu.wustl.query.util.global.Constants.SUCCESS); } /** * @param request * @param editVocabURN * @param entity * @param attribute * @throws VocabularyException * @throws PVManagerException */ private void setEditVocabHTML(HttpServletRequest request, String editVocabURN, Entity entity, AttributeInterface attribute) throws VocabularyException, PVManagerException { String trgHTML=getMappingForTargetVocab(editVocabURN, attribute, entity); String[] trgHTMLAll=trgHTML.split(Constants.MSG_DEL); if(trgHTMLAll.length>1) { request.getSession().setAttribute(Constants.PV_HTML+editVocabURN, trgHTMLAll[0]); request.getSession().setAttribute(Constants.SRC_VOCAB_MESSAGE, trgHTMLAll[1]); } else { request.getSession().setAttribute(Constants.PV_HTML+editVocabURN, trgHTML); } } /** * @param request * @param targetVocabURN * @param componentId * @param entity * @param attribute * @return * @throws VocabularyException * @throws PVManagerException */ private String getPVsForRequestedVoab(HttpServletRequest request, final String targetVocabURN, String componentId, Entity entity, AttributeInterface attribute) throws VocabularyException, PVManagerException { String htmlResponse =null; if(targetVocabURN.equals(VIProperties.sourceVocabUrn)) { htmlResponse= getPVsForSourceVocab(attribute, entity, componentId, request); } else { htmlResponse= getMappingForTargetVocab(targetVocabURN, attribute, entity); } return htmlResponse; } /** * @param request * @param componentId */ private void setComponentId(HttpServletRequest request, String componentId) { if (componentId != null) { request.getSession().setAttribute(Constants.COMPONENT_ID, componentId); } } /** * @param request * @param componentId * @return */ private String getComponentId(HttpServletRequest request, String componentId) { if (componentId == null) { //need to save component id into the session for next Ajax requests componentId = (String) request.getSession().getAttribute(Constants.COMPONENT_ID); } return componentId; } /** * @param request */ @SuppressWarnings("unchecked") private void removeHTMLFromSesson(HttpServletRequest request) { request.getSession().removeAttribute(Constants.SRC_VOCAB_MESSAGE); Enumeration attributeNames = request.getSession().getAttributeNames(); while(attributeNames.hasMoreElements()) { String atr=attributeNames.nextElement().toString(); if(atr.indexOf(Constants.PV_HTML)==0) { request.getSession().removeAttribute(atr); } } } /** * This method generate the HTML for the Source vocabulary (MED 1.0) * @param attribute * @param entity * @param componentId * @param request * @throws VocabularyException * @throws PVManagerException */ private String getPVsForSourceVocab(AttributeInterface attribute, EntityInterface entity, String componentId, HttpServletRequest request) throws VocabularyException, PVManagerException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); StringBuffer html = new StringBuffer(); List<IConcept> pvList = bizLogic.getConfiguredPermissibleValueList(attribute, entity); String srcVocabURN = VIProperties.sourceVocabUrn; String vocabDisName = bizLogic.getDisplayNameForVocab(srcVocabURN); html.append(bizLogic.getRootVocabularyNodeHTML(srcVocabURN, vocabDisName)); if(pvList != null && !pvList.isEmpty()) { for(IConcept concept:pvList) { String checkboxId = srcVocabURN + Constants.ID_DEL + concept.getCode(); html.append(bizLogic.getHTMLForConcept(srcVocabURN,concept,checkboxId)); } html.append(bizLogic.getEndHTML()); if( pvList.size()==VIProperties.maxPVsToShow)// Need to show Message Too Many Result on UI { html.append(bizLogic.getInfoMessage()); request.getSession().setAttribute(Constants.SRC_VOCAB_MESSAGE, bizLogic.getInfoMessage() .replace(Constants.MSG_DEL,"")); } } else { html.append(bizLogic.getNoMappingFoundHTML()); html.append(bizLogic.getEndHTML()); } return html.toString(); } /** * This method returns the data mapped vocabularies * @param targetVocabURN * @param attribute * @param entity * @return * @throws VocabularyException * @throws PVManagerException */ private String getMappingForTargetVocab(String targetVocabURN, AttributeInterface attribute, EntityInterface entity) throws VocabularyException, PVManagerException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); IVocabulary souVocabulary = bizLogic.getVocabulary(VIProperties.sourceVocabUrn); // Get the target vocabulary info from parameter String targetVocabDisName=bizLogic.getDisplayNameForVocab(targetVocabURN); IVocabulary targVocabulary = bizLogic.getVocabulary(targetVocabURN); StringBuffer html = new StringBuffer(); if (!((Vocabulary)souVocabulary).equals(targVocabulary)) { html.append(bizLogic.getRootVocabularyNodeHTML(targetVocabURN,targetVocabDisName)); Map<String, List<IConcept>> vocabMappings = bizLogic.getMappedConcepts(attribute, targVocabulary, entity); html.append(getMappedHTMLForTargetVocab(targetVocabURN,vocabMappings)); } return html.toString(); } /** * This method returns the mapping data as HTML * @param html * @param vocabName * @param vocabversoin * @param vocabMappings * @throws VocabularyException * @throws NumberFormatException */ private StringBuffer getMappedHTMLForTargetVocab(String vocabURN, Map<String, List<IConcept>> vocabMappings) throws NumberFormatException, VocabularyException { SearchPermissibleValueBizlogic bizLogic = (SearchPermissibleValueBizlogic) BizLogicFactory .getInstance().getBizLogic(Constants.SEARCH_PV_FROM_VOCAB_BILOGIC_ID); StringBuffer mappedHTML=new StringBuffer(); int displayPVCount=1; int maxPv=0; boolean isMsgDisplayed=false; if (vocabMappings != null && vocabMappings.size()!=0) { Set<String> keySet = vocabMappings.keySet(); Iterator<String> iterator = keySet.iterator(); while (iterator.hasNext()) { String conceptCode = iterator.next(); List<IConcept> mappingList = vocabMappings.get(conceptCode); maxPv=maxPv+mappingList.size(); ListIterator<IConcept> mappingListItr = mappingList.listIterator(); while (mappingListItr.hasNext()) { IConcept concept = (IConcept) mappingListItr.next(); String checkboxId = vocabURN + Constants.ID_DEL + conceptCode; mappedHTML.append(bizLogic.getHTMLForConcept(vocabURN,concept, checkboxId)); } } } else { mappedHTML.append(bizLogic.getNoMappingFoundHTML()); } mappedHTML.append(bizLogic.getEndHTML()); if(maxPv>=VIProperties.maxPVsToShow) { mappedHTML.append(bizLogic.getInfoMessage()); } return mappedHTML; } }
Issue resolved : If concept code contains Apostrophe(') then user is not able to add PVs SVN-Revision: 5538
WEB-INF/src/edu/wustl/query/action/SearchMappedPVsAction.java
Issue resolved : If concept code contains Apostrophe(') then user is not able to add PVs
Java
bsd-3-clause
09be20a4d51226fa09db73bfe9105613b3675f48
0
cesarmarinhorj/gremlin,tinkerpop/gremlin,samanalysis/gremlin,samanalysis/gremlin,tinkerpop/gremlin,ccagnoli/gremlin,ccagnoli/gremlin,cesarmarinhorj/gremlin
package com.tinkerpop.gremlin.compiler.pipes; import com.tinkerpop.gremlin.compiler.Atom; import com.tinkerpop.gremlin.compiler.GremlinEvaluator; import com.tinkerpop.gremlin.compiler.operations.BinaryOperation; import com.tinkerpop.gremlin.compiler.operations.Operation; import com.tinkerpop.gremlin.compiler.operations.logic.*; import com.tinkerpop.gremlin.compiler.types.Range; import com.tinkerpop.pipes.IdentityPipe; import com.tinkerpop.pipes.Pipe; import com.tinkerpop.pipes.SingleIterator; import com.tinkerpop.pipes.filter.AndFilterPipe; import com.tinkerpop.pipes.filter.ComparisonFilterPipe.Filter; import com.tinkerpop.pipes.filter.OrFilterPipe; import com.tinkerpop.pipes.filter.RangeFilterPipe; import com.tinkerpop.pipes.pgm.*; import com.tinkerpop.pipes.util.HasNextPipe; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * @author Pavel A. Yaskevich */ public class GremlinPipesHelper { @SuppressWarnings("rawtypes") public static List<Pipe> pipesForStep(Atom token, List<Operation> predicates) throws RuntimeException { List<Pipe> pipes = new ArrayList<Pipe>(); String tokenString = (String) token.getValue(); Pipe tokenPipe = pipeForToken(token); if (tokenPipe != null) { pipes.add(tokenPipe); } else { if (GremlinEvaluator.paths.isPath(tokenString)) { pipes.addAll(GremlinEvaluator.paths.getPath(tokenString)); } else { throw new RuntimeException("No pipe exists for '" + tokenString + "'."); } } for (int i = 0; i < predicates.size(); i++) { pipes.add(pipeForPredicate(predicates.get(i))); } return pipes; } @SuppressWarnings("rawtypes") private static Pipe pipeForToken(final Atom tokenAtom) { Pipe pipe = null; if (tokenAtom.isIdentifier()) { String value = (String) tokenAtom.getValue(); if (value.equals(".")) pipe = new IdentityPipe(); // outgoing edges if (value.equals("outE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.OUT_EDGES); // outgoing vertices if (value.equals("outV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.OUT_VERTEX); // ingoing edges if (value.equals("inE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.IN_EDGES); // ingoing vertices if (value.equals("inV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.IN_VERTEX); // both vertices if (value.equals("bothV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.BOTH_VERTICES); // both edges if (value.equals("bothE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.BOTH_EDGES); // vertex iterator if (value.equals("V")) pipe = new GraphElementPipe(GraphElementPipe.ElementType.VERTEX); // edge iterator if (value.equals("E")) pipe = new GraphElementPipe(GraphElementPipe.ElementType.EDGE); } if (tokenAtom.isProperty()) pipe = new GremlinPropertyPipe(tokenAtom.getValue()); return pipe; } @SuppressWarnings({"rawtypes"}) private static Pipe pipeForPredicate(Operation predicate) throws RuntimeException { if (predicate instanceof BinaryOperation) { Operation[] operands = ((BinaryOperation) predicate).getOperands(); if (predicate instanceof And) return andFilterPipe(operands); if (predicate instanceof Or) return orFilterPipe(operands); Atom operandA = operands[0].compute(); Atom operandB = operands[1].compute(); String key = (String) operandA.getValue(); Object storedObject = null; if (operandB.isNumber()) storedObject = (Number) operandB.getValue(); else storedObject = (String) operandB.getValue(); if (predicate instanceof Equality) return propertyFilterPipe(key, storedObject, Filter.NOT_EQUAL); if (predicate instanceof UnEquality) return propertyFilterPipe(key, storedObject, Filter.EQUAL); if (predicate instanceof GreaterThan) return propertyFilterPipe(key, storedObject, Filter.LESS_THAN); if (predicate instanceof GreaterThanOrEqual) return propertyFilterPipe(key, storedObject, Filter.LESS_THAN_EQUAL); if (predicate instanceof LessThan) return propertyFilterPipe(key, storedObject, Filter.GREATER_THAN); if (predicate instanceof LessThanOrEqual) return propertyFilterPipe(key, storedObject, Filter.GREATER_THAN_EQUAL); } else { // unary operation like var def or premitive type Atom unaryAtom = predicate.compute(); if (unaryAtom.isNumber()) { int idx = ((Number) unaryAtom.getValue()).intValue(); return (idx == 0) ? new RangeFilterPipe(0, 1) : new RangeFilterPipe(idx, idx + 1); } if (unaryAtom.getValue() instanceof Range) { Range range = (Range) unaryAtom.getValue(); return new RangeFilterPipe(range.getMinimum(), range.getMaximum()); } } throw new RuntimeException("Can't map - " + predicate.getClass() + " to any of existing pipes."); } @SuppressWarnings({"rawtypes", "unchecked"}) private static List<Pipe> pipesForAndOrOperations(final Operation... operands) { List<Pipe> pipes = new ArrayList<Pipe>(); for (int i = 0; i < operands.length; i++) { pipes.add(new HasNextPipe(pipeForPredicate(operands[i]))); } return pipes; } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe andFilterPipe(final Operation... operands) { return new AndFilterPipe(pipesForAndOrOperations(operands)); } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe orFilterPipe(final Operation... operands) { return new OrFilterPipe(pipesForAndOrOperations(operands)); } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe propertyFilterPipe(final String key, final Object storedObject, final Filter filter) { if (key.equals("label")) { return new LabelFilterPipe((String) storedObject, filter); } else { return new PropertyFilterPipe(key, storedObject, filter); } } public static Iterator pipelineStartPoint(Object point) { return (point instanceof Iterable) ? ((Iterable) point).iterator() : new SingleIterator(point); } }
src/main/java/com/tinkerpop/gremlin/compiler/pipes/GremlinPipesHelper.java
package com.tinkerpop.gremlin.compiler.pipes; import com.tinkerpop.gremlin.compiler.Atom; import com.tinkerpop.gremlin.compiler.GremlinEvaluator; import com.tinkerpop.gremlin.compiler.operations.BinaryOperation; import com.tinkerpop.gremlin.compiler.operations.Operation; import com.tinkerpop.gremlin.compiler.operations.logic.*; import com.tinkerpop.gremlin.compiler.types.Range; import com.tinkerpop.pipes.IdentityPipe; import com.tinkerpop.pipes.Pipe; import com.tinkerpop.pipes.SingleIterator; import com.tinkerpop.pipes.filter.AndFilterPipe; import com.tinkerpop.pipes.filter.ComparisonFilterPipe.Filter; import com.tinkerpop.pipes.filter.OrFilterPipe; import com.tinkerpop.pipes.filter.RangeFilterPipe; import com.tinkerpop.pipes.pgm.*; import com.tinkerpop.pipes.util.HasNextPipe; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * @author Pavel A. Yaskevich */ public class GremlinPipesHelper { @SuppressWarnings("rawtypes") public static List<Pipe> pipesForStep(Atom token, List<Operation> predicates) throws RuntimeException { List<Pipe> pipes = new ArrayList<Pipe>(); String tokenString = (String) token.getValue(); Pipe tokenPipe = pipeForToken(token); if (tokenPipe != null) { pipes.add(tokenPipe); } else { if (GremlinEvaluator.paths.isPath(tokenString)) { pipes.addAll(GremlinEvaluator.paths.getPath(tokenString)); } else { throw new RuntimeException("No pipe exists for '" + tokenString + "'."); } } for (int i = 0; i < predicates.size(); i++) { pipes.add(pipeForPredicate(predicates.get(i))); } return pipes; } @SuppressWarnings("rawtypes") private static Pipe pipeForToken(final Atom tokenAtom) { Pipe pipe = null; if (tokenAtom.isIdentifier()) { String value = (String) tokenAtom.getValue(); if (value.equals(".")) pipe = new IdentityPipe(); // outgoing edges if (value.equals("outE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.OUT_EDGES); // outgoing vertices if (value.equals("outV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.OUT_VERTEX); // ingoing edges if (value.equals("inE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.IN_EDGES); // ingoing vertices if (value.equals("inV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.IN_VERTEX); // both vertices if (value.equals("bothV")) pipe = new EdgeVertexPipe(EdgeVertexPipe.Step.BOTH_VERTICES); // both edges if (value.equals("bothE")) pipe = new VertexEdgePipe(VertexEdgePipe.Step.BOTH_EDGES); // vertex iterator if (value.equals("V")) pipe = new GraphElementPipe(GraphElementPipe.ElementType.VERTEX); // edge iterator if (value.equals("E")) pipe = new GraphElementPipe(GraphElementPipe.ElementType.EDGE); } if (tokenAtom.isProperty()) pipe = new GremlinPropertyPipe(tokenAtom.getValue()); return pipe; } @SuppressWarnings({"rawtypes"}) private static Pipe pipeForPredicate(Operation predicate) throws RuntimeException { if (predicate instanceof BinaryOperation) { Operation[] operands = ((BinaryOperation) predicate).getOperands(); if (predicate instanceof And) return andFilterPipe(operands); if (predicate instanceof Or) return orFilterPipe(operands); Atom operandA = operands[0].compute(); Atom operandB = operands[1].compute(); String key = (String) operandA.getValue(); Object storedObject = null; if (operandB.isNumber()) storedObject = (Number) operandB.getValue(); else storedObject = (String) operandB.getValue(); if (predicate instanceof Equality) return propertyFilterPipe(key, storedObject, Filter.NOT_EQUAL); if (predicate instanceof UnEquality) return propertyFilterPipe(key, storedObject, Filter.EQUAL); if (predicate instanceof GreaterThan) return propertyFilterPipe(key, storedObject, Filter.LESS_THAN); if (predicate instanceof GreaterThanOrEqual) return propertyFilterPipe(key, storedObject, Filter.LESS_THAN_EQUAL); if (predicate instanceof LessThan) return propertyFilterPipe(key, storedObject, Filter.GREATER_THAN); if (predicate instanceof LessThanOrEqual) return propertyFilterPipe(key, storedObject, Filter.GREATER_THAN_EQUAL); } else { // unary operation like var def or premitive type Atom unaryAtom = predicate.compute(); if (unaryAtom.isNumber()) { int idx = ((Number) unaryAtom.getValue()).intValue(); return (idx == 0) ? new RangeFilterPipe(0, 1) : new RangeFilterPipe(idx, idx + 1); } if (unaryAtom.getValue() instanceof Range) { Range range = (Range) unaryAtom.getValue(); return new RangeFilterPipe(range.getMinimum(), range.getMaximum()); } } throw new RuntimeException("Can't map - " + predicate.getClass() + " to any of existing pipes."); } @SuppressWarnings({"rawtypes", "unchecked"}) private static List<Pipe> pipesForAndOrOperations(final Operation... operands) { List<Pipe> pipes = new ArrayList<Pipe>(); for (int i = 0; i < operands.length; i++) { pipes.add(new HasNextPipe(pipeForPredicate(operands[i]))); } return pipes; } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe andFilterPipe(final Operation... operands) { return new AndFilterPipe(pipesForAndOrOperations(operands)); } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe orFilterPipe(final Operation... operands) { return new OrFilterPipe(pipesForAndOrOperations(operands)); } @SuppressWarnings({"rawtypes", "unchecked"}) private static Pipe propertyFilterPipe(final String key, final Object storedObject, final Filter filter) { if (key.equals("label")) { return new LabelFilterPipe((String) storedObject, filter); } else { return new PropertyFilterPipe(key, storedObject, filter); } } public static Iterator pipelineStartPoint(Object point) { return (point instanceof Iterator) ? (Iterator) point : new SingleIterator(point); } }
Iterator fixed to Iterable in pipelineStartPoint method of GremlinPipesHelper.
src/main/java/com/tinkerpop/gremlin/compiler/pipes/GremlinPipesHelper.java
Iterator fixed to Iterable in pipelineStartPoint method of GremlinPipesHelper.
Java
mit
944591cbc71202c337c14cc8b5a205eff4c47d06
0
shrayasr/PCache
package main.com.pcache.core; import java.io.IOException; import java.net.ServerSocket; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; public class Server { private static int _PORT_NUMBER=6369; private static int _POOL_SIZE = 10; public static void main (String[] args) { Options options = new Options(); Option pool_size = new Option("pool_size", true, "No. of threads to use to handle connections"); pool_size.isRequired(); options.addOption(pool_size); CommandLineParser parser = new GnuParser(); CommandLine cmd; try { cmd = parser.parse(options, args); if (cmd.hasOption("pool_size")) { _POOL_SIZE = Integer.parseInt(cmd.getOptionValue("pool_size")); } } catch (ParseException e1) { e1.printStackTrace(); } System.out.println("Starting PCache Server"); System.out.println("PORT: " + _PORT_NUMBER); System.out.println("THREAD POOL SIZE: " + _POOL_SIZE); ExecutorService executorService = Executors.newFixedThreadPool(_POOL_SIZE); boolean listening = true; try (ServerSocket serverSocket = new ServerSocket(_PORT_NUMBER,400)) { while (listening) { executorService.execute(new RequestHandler(serverSocket.accept())); } } catch (IOException e) { e.printStackTrace(); } } }
src/main/com/pcache/core/Server.java
package main.com.pcache.core; import java.io.IOException; import java.net.ServerSocket; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; public class Server { private static int _PORT_NUMBER=6369; private static int _POOL_SIZE = 10; public static void main (String[] args) throws ParseException { Options options = new Options(); Option pool_size = new Option("pool_size", true, "No. of threads to use to handle connections"); pool_size.isRequired(); options.addOption(pool_size); CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("pool_size")) { _POOL_SIZE = Integer.parseInt(cmd.getOptionValue("pool_size")); } System.out.println("Starting PCache Server"); System.out.println("PORT: " + _PORT_NUMBER); System.out.println("THREAD POOL SIZE: " + _POOL_SIZE); ExecutorService executorService = Executors.newFixedThreadPool(_POOL_SIZE); boolean listening = true; try (ServerSocket serverSocket = new ServerSocket(_PORT_NUMBER)) { while (listening) { executorService.execute(new RequestHandler(serverSocket.accept())); } } catch (IOException e) { e.printStackTrace(); } } }
Better connection handling at the server level - Explicitly handling the exception thrown by the Gnu Parser for commandline arguments - Setting backlog connections to 400, meaning that 400 connections can be queue'd at the socket point
src/main/com/pcache/core/Server.java
Better connection handling at the server level
Java
mit
3ac1e2945f3ea8f58f2d26a4e80b6dacb1412bf3
0
Linguastat/datasift-java,datasift/datasift-java,Linguastat/datasift-java,datasift/datasift-java
package com.datasift.client.push; import com.datasift.client.DataSiftClient; import com.datasift.client.exceptions.IllegalDataSiftPullFormat; import com.datasift.client.exceptions.PushSubscriptionNotFound; import com.datasift.client.stream.Interaction; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.higgs.http.client.readers.Reader; import io.netty.buffer.ByteBuf; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpResponseStatus; import java.io.IOException; /** * @author Courtney Robinson <[email protected]> */ public class PullReader extends Reader<String> { public static final String HEADER_NEXT_CURSOR = "X-DataSift-Cursor-Next", HEADER_CURRENT_CURSOR = "X-DataSift-Cursor-Current", HEADER_FORMAT = "X-DataSift-Format", FORMAT_ARRAY = "json_array", FORMAT_META = "json_meta", FORMAT_NEW_LINE = "json_new_line"; protected String currentCursor, nextCursor, format; // protected JsonFactory factory; // protected JsonParser parser; protected final PulledInteractions queue; private boolean done, headersSet; protected int status = -1; protected int backOff = 1; protected int successiveNoContent; public PullReader(PulledInteractions queue) { this.queue = queue; // factory = new JsonFactory(); // try { // parser = factory.createParser(data); // } catch (IOException e) { // e.printStackTrace(); // } //todo as the amount of data that can be requested with pull increased, memory may become an issue //replace current processing with Jackson's stream processing to improve memory efficiency /** JsonToken current; while ((current = parser.nextToken()) != JsonToken.END_OBJECT) { String fieldName = parser.getCurrentName(); if (fieldName.equals("records")) { if (current == JsonToken.START_ARRAY) { // For each of the records in the array while (parser.nextToken() != JsonToken.END_ARRAY) { // read the record into a tree model, // this moves the parsing position to the end of it JsonNode node = parser.readValueAsTree(); // And now we have random access to everything in the object System.out.println("field1: " + node.get("field1").getValueAsText()); System.out.println("field2: " + node.get("field2").getValueAsText()); } } else { System.out.println("Error: records should be an array: skipping."); parser.skipChildren(); } } else { System.out.println("Unprocessed property: " + fieldName); parser.skipChildren(); } } */ } @Override public void onStatus(HttpResponseStatus status) { super.onStatus(status); this.status = status.code(); checkResponseStatus(); if (this.status == 204) { successiveNoContent++; } } protected boolean checkResponseStatus() { switch (this.status) { case 404: throw new PushSubscriptionNotFound(response); //403 and 429 are rate limit status case 403: case 429: backOff = 30; break; //in all cases below we want to back off case 503: //service or one of it's dependencies is unavailable case 500: // all hell broke loose case 204: // no data //exponentially back off up to 60 seconds backOff = backOff * 2; if (backOff > 60) { backOff = 60; } break; case 200: backOff = 0; break; default: return false; } return true; } @Override public void onHeaders(HttpHeaders headers) { super.onHeaders(headers); checkHeaders(); headersSet = true; } protected void checkHeaders() { HttpHeaders headers = response.getHeaders(); if (headers != null && headers.names() != null) { for (String k : headers.names()) { if (HEADER_CURRENT_CURSOR.equalsIgnoreCase(k)) { currentCursor = headers.get(k); } else if (HEADER_NEXT_CURSOR.equalsIgnoreCase(k)) { nextCursor = headers.get(k); } else if (HEADER_FORMAT.equalsIgnoreCase(k)) { format = headers.get(k); } } } } @Override public void data(ByteBuf data) { buffer.writeBytes(data); processData(); } @Override public void done() { done = true; processData(); } protected void processData() { checkHeaders(); if (backOff == 0 && //if we got a 503,500 or 204 back off will not be 0 //if status isn't a 204 and we're not backing off then we're in an invalid state if the rest holds true status != 204 && headersSet && (format == null || format.isEmpty())) { throw new IllegalDataSiftPullFormat("The DataSift API failed to provide the format of the data. " + "Please raise the issue with support", response); } if (format != null) { switch (format) { case FORMAT_NEW_LINE: if (done) { //chunked responses will cause new line to break // early so only do it when he entire response is received readLineByLine(); } break; case FORMAT_ARRAY: readArray(); break; case FORMAT_META: readObject(); break; default: throw new IllegalDataSiftPullFormat("DataSift format '" + format + "' is not supported", response); } } } protected void send(Interaction interaction) { queue.add(interaction); } protected void readObject() { //until we support JSON stream processing only do this when the entire response is read if (done) { String data = getDataAsString(); if (data == null || data.isEmpty()) { return; } try { ObjectNode meta = (ObjectNode) DataSiftClient.MAPPER.readTree(data); ArrayNode interactions = (ArrayNode) meta.get("interactions"); for (JsonNode interaction : interactions) { send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.warn("Failed to decode interactions", e); } } } protected void readArray() { //until we support JSON stream processing only do this when the entire response is read if (done) { String data = getDataAsString(); if (data == null || data.isEmpty()) { return; } try { ArrayNode interactions = (ArrayNode) DataSiftClient.MAPPER.readTree(data); for (JsonNode interaction : interactions) { send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.warn("Failed to decode interactions", e); } } } protected void readLineByLine() { String line; try { while ((line = data.readLine()) != null) { //System.out.println(line); ObjectNode interaction = (ObjectNode) DataSiftClient.MAPPER.readTree(line); send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.info("Failed to decode interaction ", e); } } protected String getDataAsString() { String str = buffer.toString(0, buffer.writerIndex(), utf8); buffer.readerIndex(buffer.writerIndex()); return str; } public void reset() { //only reset back off if the current request was successful and we've reached 1 minute back off if (status == 200 || backOff >= 60) { successiveNoContent = 0; backOff = 1; } currentCursor = nextCursor = format = null; done = false; status = -1; headersSet = false; } }
src/main/java/com/datasift/client/push/PullReader.java
package com.datasift.client.push; import com.datasift.client.DataSiftClient; import com.datasift.client.exceptions.IllegalDataSiftPullFormat; import com.datasift.client.exceptions.PushSubscriptionNotFound; import com.datasift.client.stream.Interaction; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.higgs.http.client.readers.Reader; import io.netty.buffer.ByteBuf; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpResponseStatus; import java.io.IOException; /** * @author Courtney Robinson <[email protected]> */ public class PullReader extends Reader<String> { public static final String HEADER_NEXT_CURSOR = "X-DataSift-Cursor-Next", HEADER_CURRENT_CURSOR = "X-DataSift-Cursor-Current", HEADER_FORMAT = "X-DataSift-Format", FORMAT_ARRAY = "json_array", FORMAT_META = "json_meta", FORMAT_NEW_LINE = "json_new_line"; protected String currentCursor, nextCursor, format; // protected JsonFactory factory; // protected JsonParser parser; protected final PulledInteractions queue; private boolean done, headersSet; protected int status = -1; protected int backOff = 1; protected int successiveNoContent; public PullReader(PulledInteractions queue) { this.queue = queue; // factory = new JsonFactory(); // try { // parser = factory.createParser(data); // } catch (IOException e) { // e.printStackTrace(); // } //todo as the amount of data that can be requested with pull increased, memory may become an issue //replace current processing with Jackson's stream processing to improve memory efficiency /** JsonToken current; while ((current = parser.nextToken()) != JsonToken.END_OBJECT) { String fieldName = parser.getCurrentName(); if (fieldName.equals("records")) { if (current == JsonToken.START_ARRAY) { // For each of the records in the array while (parser.nextToken() != JsonToken.END_ARRAY) { // read the record into a tree model, // this moves the parsing position to the end of it JsonNode node = parser.readValueAsTree(); // And now we have random access to everything in the object System.out.println("field1: " + node.get("field1").getValueAsText()); System.out.println("field2: " + node.get("field2").getValueAsText()); } } else { System.out.println("Error: records should be an array: skipping."); parser.skipChildren(); } } else { System.out.println("Unprocessed property: " + fieldName); parser.skipChildren(); } } */ } @Override public void onStatus(HttpResponseStatus status) { super.onStatus(status); this.status = status.code(); checkResponseStatus(); if (this.status == 204) { successiveNoContent++; } } protected boolean checkResponseStatus() { switch (this.status) { case 404: throw new PushSubscriptionNotFound(response); //403 and 429 are rate limit status case 403: case 429: backOff = 30; break; //in all cases below we want to back off case 503: //service or one of it's dependencies is unavailable case 500: // all hell broke loose case 204: // no data //exponentially back off up to 60 seconds backOff = backOff * 2; if (backOff > 60) { backOff = 60; } break; case 200: backOff = 0; break; default: return false; } return true; } @Override public void onHeaders(HttpHeaders headers) { super.onHeaders(headers); checkHeaders(); headersSet = true; } protected void checkHeaders() { HttpHeaders headers = response.getHeaders(); if (headers != null && headers.names() != null) { for (String k : headers.names()) { if (HEADER_CURRENT_CURSOR.equalsIgnoreCase(k)) { currentCursor = headers.get(k); } else if (HEADER_NEXT_CURSOR.equalsIgnoreCase(k)) { nextCursor = headers.get(k); } else if (HEADER_FORMAT.equalsIgnoreCase(k)) { format = headers.get(k); } } } } @Override public void data(ByteBuf data) { buffer.writeBytes(data); processData(); } @Override public void done() { done = true; processData(); } protected void processData() { checkHeaders(); if (backOff == 0 && //if we got a 503,500 or 204 back off will not be 0 //if status isn't a 204 and we're not backing off then we're in an invalid state if the rest holds true status != 204 && headersSet && (format == null || format.isEmpty())) { throw new IllegalDataSiftPullFormat("The DataSift API failed to provide the format of the data. " + "Please raise the issue with support", response); } if (format != null) { switch (format) { case FORMAT_NEW_LINE: if (done) { //chunked responses will cause new line to break // early so only do it when he entire response is received readLineByLine(); } break; case FORMAT_ARRAY: readArray(); break; case FORMAT_META: readObject(); break; default: throw new IllegalDataSiftPullFormat("DataSift format '" + format + "' is not supported", response); } } } protected void send(Interaction interaction) { queue.add(interaction); } protected void readObject() { //until we support JSON stream processing only do this when the entire response is read if (done) { String data = getDataAsString(); try { ObjectNode meta = (ObjectNode) DataSiftClient.MAPPER.readTree(data); ArrayNode interactions = (ArrayNode) meta.get("interactions"); for (JsonNode interaction : interactions) { send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.warn("Failed to decode interactions", e); } } } protected void readArray() { //until we support JSON stream processing only do this when the entire response is read if (done) { String data = getDataAsString(); try { ArrayNode interactions = (ArrayNode) DataSiftClient.MAPPER.readTree(data); for (JsonNode interaction : interactions) { send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.warn("Failed to decode interactions", e); } } } protected void readLineByLine() { String line; try { while ((line = data.readLine()) != null) { //System.out.println(line); ObjectNode interaction = (ObjectNode) DataSiftClient.MAPPER.readTree(line); send(new Interaction(interaction)); } buffer.discardReadBytes(); } catch (IOException e) { log.info("Failed to decode interaction ", e); } } protected String getDataAsString() { String str = buffer.toString(0, buffer.writerIndex(), utf8); buffer.readerIndex(buffer.writerIndex()); return str; } public void reset() { //only reset back off if the current request was successful and we've reached 1 minute back off if (status == 200 || backOff >= 60) { successiveNoContent = 0; backOff = 1; } currentCursor = nextCursor = format = null; done = false; status = -1; headersSet = false; } }
make sure that the response payload is never null or empty before attempting decode
src/main/java/com/datasift/client/push/PullReader.java
make sure that the response payload is never null or empty before attempting decode
Java
mit
7d531176d8b7d82a3cebbe4933e12d333c3776a9
0
ClintonCao/UnifiedASATVisualizer,ClintonCao/Contextproject-TSE,ClintonCao/Contextproject-TSE,ClintonCao/Contextproject-TSE,ClintonCao/UnifiedASATVisualizer,ClintonCao/UnifiedASATVisualizer
package BlueTurtle.TSE; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import BlueTurtle.groupers.WarningGrouper; import BlueTurtle.groupers.WarningGrouper.Criteria; import BlueTurtle.parsers.CheckStyleXMLParser; import BlueTurtle.parsers.FindBugsXMLParser; import BlueTurtle.parsers.PMDXMLParser; import BlueTurtle.parsers.XMLParser; import BlueTurtle.summarizers.Summarizer; import BlueTurtle.warnings.Warning; import BlueTurtle.writers.JSWriter; /** * JSONFormatter reads the output of Checkstyle, Findbugs and PMD and produces a * summarized defect output. * * @author BlueTurtle. * */ public class JSONFormatter { private List<Warning> totalWarnings = new ArrayList<Warning>(); /** * Produces a list of warnings for by reading the output of PMD, Checkstyle * and Findbugs. Then converts it to JSON format and writes it to a * JavaScript file. * * @throws IOException * File not found. */ public void format() throws IOException { parseFile(new CheckStyleXMLParser(), JavaController.getCheckStyleOutputFile()); parseFile(new PMDXMLParser(), JavaController.getPmdOutputFile()); parseFile(new FindBugsXMLParser(), JavaController.getFindBugsOutputFile()); writeJSON(); } /** * Parse ASAT output and produce list of warnings. * * @return List of warnings. * @throws IOException * File not found. */ private void parseFile(XMLParser xmlParser, String filePath) throws IOException { if(!new File(filePath).exists()) { return; } totalWarnings.addAll(xmlParser.parseFile(filePath)); } /** * Groups the warnings together by packages and writes it as JSON output to * a JavaScript file. * * @param warnings * List of warnings to work with. * @throws IOException * Output file not found. */ private void writeJSON() throws IOException { WarningGrouper wg = new WarningGrouper(totalWarnings); List<Summarizer> list = wg.groupBy(Criteria.PACKAGES); JSWriter jwriter = JSWriter.getInstance(); jwriter.setSummarizedWarnings(list); jwriter.writeToJSFormat("visualization/JSON/outputWarningsJSON.js"); } }
src/main/java/BlueTurtle/TSE/JSONFormatter.java
package BlueTurtle.TSE; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import BlueTurtle.groupers.WarningGrouper; import BlueTurtle.groupers.WarningGrouper.Criteria; import BlueTurtle.parsers.CheckStyleXMLParser; import BlueTurtle.parsers.FindBugsXMLParser; import BlueTurtle.parsers.PMDXMLParser; import BlueTurtle.parsers.XMLParser; import BlueTurtle.summarizers.Summarizer; import BlueTurtle.warnings.Warning; import BlueTurtle.writers.JSWriter; /** * JSONFormatter reads the output of Checkstyle, Findbugs and PMD and produces a * summarized defect output. * * @author BlueTurtle. * */ public class JSONFormatter { private List<Warning> totalWarnings = new ArrayList<Warning>(); /** * Produces a list of warnings for by reading the output of PMD, Checkstyle * and Findbugs. Then converts it to JSON format and writes it to a * JavaScript file. * * @throws IOException * File not found. */ public void format() throws IOException { parseFile(new CheckStyleXMLParser(), JavaController.getCheckStyleOutputFile()); parseFile(new PMDXMLParser(), JavaController.getPmdOutputFile()); parseFile(new FindBugsXMLParser(), JavaController.getFindBugsOutputFile()); writeJSON(); } /** * Parse ASAT output and produce list of warnings. * * @return List of warnings. * @throws IOException * File not found. */ private void parseFile(XMLParser xmlParser, String filePath) throws IOException { File file = new File(filePath); if(!file.exists()) { return; } totalWarnings.addAll(xmlParser.parseFile(filePath)); } /** * Groups the warnings together by packages and writes it as JSON output to * a JavaScript file. * * @param warnings * List of warnings to work with. * @throws IOException * Output file not found. */ private void writeJSON() throws IOException { WarningGrouper wg = new WarningGrouper(totalWarnings); List<Summarizer> list = wg.groupBy(Criteria.PACKAGES); JSWriter jwriter = JSWriter.getInstance(); jwriter.setSummarizedWarnings(list); jwriter.writeToJSFormat("visualization/JSON/outputWarningsJSON.js"); } }
Removed single use file variable in parseFile in JSONFOrmatter.
src/main/java/BlueTurtle/TSE/JSONFormatter.java
Removed single use file variable in parseFile in JSONFOrmatter.
Java
mit
66b5e260e1dc26d05803a0220e48a1a10f20619f
0
hpe-idol/java-powerpoint-report,hpautonomy/find,hpe-idol/find,hpautonomy/find,hpe-idol/find,hpautonomy/find,hpautonomy/find,hpautonomy/find,hpe-idol/find,hpe-idol/find,hpe-idol/find,hpe-idol/java-powerpoint-report
package com.autonomy.abc.selenium.menubar; import com.autonomy.abc.selenium.page.AppBody; import org.openqa.selenium.By; import org.openqa.selenium.Keys; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import java.util.List; public class TopNavBar extends TabBar { public TopNavBar(final WebDriver driver) { super(driver.findElement(By.cssSelector(".navbar-static-top")), driver); } @Override public TopNavBarTab getTab(final NavBarTabId id) { return new TopNavBarTab(this, id.toString()); } @Override public void switchPage(final NavBarTabId tabId) { findElement(By.cssSelector(".fa-cog")).click(); super.switchPage(tabId); } @Override public TopNavBarTab getSelectedTab() { final List<WebElement> activeTabs = $el().findElements(By.cssSelector("li.active")); if (activeTabs.size() != 1) { throw new IllegalStateException("Number of active tabs != 1"); } return new TopNavBarTab(activeTabs.get(0), getDriver()); } public void search(final String searchTerm) { findElement(By.cssSelector("[name='top-search']")).clear(); findElement(By.cssSelector("[name='top-search']")).sendKeys(searchTerm); findElement(By.cssSelector("[name='top-search']")).sendKeys(Keys.RETURN); loadOrFadeWait(); // new AppBody(getDriver()).getSearchPage().waitForSearchLoadIndicatorToDisappear(); } public void sideBarToggle() { findElement(By.cssSelector(".navbar-minimize")).click(); } public void notificationsDropdown() { findElement(By.className("count-info")).click(); } public String getSearchBarText() { return findElement(By.cssSelector("[name='top-search']")).getAttribute("value"); } public void showSideBar() { if (getDriver().findElement(By.cssSelector("body")).getAttribute("class").contains("hide-navbar")) { sideBarToggle(); } } }
src/main/java/com/autonomy/abc/selenium/menubar/TopNavBar.java
package com.autonomy.abc.selenium.menubar; import com.autonomy.abc.selenium.page.AppBody; import org.openqa.selenium.By; import org.openqa.selenium.Keys; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import java.util.List; public class TopNavBar extends TabBar { public TopNavBar(final WebDriver driver) { super(driver.findElement(By.cssSelector(".navbar-static-top")), driver); } @Override public TopNavBarTab getTab(final NavBarTabId id) { return new TopNavBarTab(this, id.toString()); } @Override public void switchPage(final NavBarTabId tabId) { findElement(By.cssSelector(".fa-cog")).click(); super.switchPage(tabId); } @Override public TopNavBarTab getSelectedTab() { final List<WebElement> activeTabs = $el().findElements(By.cssSelector("li.active")); if (activeTabs.size() != 1) { throw new IllegalStateException("Number of active tabs != 1"); } return new TopNavBarTab(activeTabs.get(0), getDriver()); } public void search(final String searchTerm) { findElement(By.cssSelector("[name='top-search']")).clear(); findElement(By.cssSelector("[name='top-search']")).sendKeys(searchTerm); findElement(By.cssSelector("[name='top-search']")).sendKeys(Keys.RETURN); loadOrFadeWait(); new AppBody(getDriver()).getSearchPage().waitForSearchLoadIndicatorToDisappear(); } public void sideBarToggle() { findElement(By.cssSelector(".navbar-minimize")).click(); } public void notificationsDropdown() { findElement(By.className("count-info")).click(); } public String getSearchBarText() { return findElement(By.cssSelector("[name='top-search']")).getAttribute("value"); } public void showSideBar() { if (getDriver().findElement(By.cssSelector("body")).getAttribute("class").contains("hide-navbar")) { sideBarToggle(); } } }
[abc] mockui remove reference to SearchPage [git-p4: depot-paths = "//depot/products/frontend/abc/master/": change = 1249433]
src/main/java/com/autonomy/abc/selenium/menubar/TopNavBar.java
[abc] mockui remove reference to SearchPage
Java
mit
804516cfeb63560c2a2b4e62abfec91121fb32f1
0
8-Bit-Warframe/Lost-Sector
package com.ezardlabs.lostsector.objects.menus; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.GuiRenderer; import com.ezardlabs.dethsquare.GuiText; import com.ezardlabs.dethsquare.Input; import com.ezardlabs.dethsquare.Input.KeyCode; import com.ezardlabs.dethsquare.Script; import com.ezardlabs.dethsquare.TextureAtlas; import com.ezardlabs.dethsquare.Time; import com.ezardlabs.dethsquare.Vector2; public class Menu extends Script { private final String[] options; private final MenuAction[] actions; private final Vector2 offset; private GameObject[] pieces; private GameObject[] texts; private GuiText[] guiTexts; private GameObject[] highlights; private boolean startOpen = false; private boolean open = false; protected Menu() { options = getOptions(); actions = getActions(); offset = getOffset(); startOpen = shouldStartOpen(); } public Menu(String[] options, MenuAction[] actions) { this(options, actions, new Vector2(), false); } public Menu(String[] options, MenuAction[] actions, boolean open) { this(options, actions, new Vector2(), open); } public Menu(String[] options, MenuAction[] actions, Vector2 offset) { this(options, actions, offset, false); } public Menu(String[] options, MenuAction[] actions, Vector2 offset, boolean startOpen) { this.options = options; this.actions = actions; this.offset = offset; this.startOpen = startOpen; if (options.length < 2 || options.length > 5) { throw new IllegalArgumentException("You must supply between 2 and 5 options to the menu"); } } @Override public void start() { runWhenPaused = true; pieces = new GameObject[options.length]; pieces[0] = GameObject.instantiate( new GameObject("Menu Piece 0", new GuiRenderer("images/menus/main/top.png", 816, 156)), new Vector2()); for (int i = 1; i < pieces.length - 1; i++) { pieces[i] = GameObject.instantiate( new GameObject("Menu Piece " + i, new GuiRenderer("images/menus/main/middle.png", 816, 156)), new Vector2()); } pieces[pieces.length - 1] = GameObject.instantiate(new GameObject("Menu Piece " + (pieces.length - 1), new GuiRenderer("images/menus/main/bottom.png", 816, 144)), new Vector2()); texts = new GameObject[options.length]; guiTexts = new GuiText[options.length]; for (int i = 0; i < texts.length; i++) { guiTexts[i] = new GuiText(options[i], TextureAtlas.load("fonts"), 50, 1); texts[i] = GameObject.instantiate(new GameObject("Menu Option", guiTexts[i]), new Vector2()); } highlights = new GameObject[options.length]; for (int i = 0; i < highlights.length - 1; i++) { highlights[i] = GameObject.instantiate( new GameObject("Menu Highlight", new GuiRenderer("images/menus/main/highlight.png", 156, 66)), new Vector2()); } highlights[highlights.length - 1] = GameObject.instantiate( new GameObject("Menu Highlight", new GuiRenderer("images/menus/main/highlight_bottom.png", 414, 66)), new Vector2()); float height = (options.length - 1) * 156 + 144; for (int i = 0; i < options.length; i++) { pieces[i].transform.position.set(960 - 408, 540 - height / 2 + 156 * i); highlights[i].transform.position.set(pieces[i].transform.position.offset(i == texts.length - 1 ? 66 : 636, i == texts.length - 1 ? 54 : 25)); texts[i].transform.position.set( pieces[i].transform.position.offset(150, 40 + (i == texts.length - 1 ? 28 : 0))); } if (startOpen) { open(); } else { close(); } } @Override public void update() { if (open && guiTexts != null) { for (int i = 0; i < guiTexts.length; i++) { if (guiTexts[i].hitTest(Input.mousePosition)) { highlights[i].setActive(true); if (Input.getKeyDown(KeyCode.MOUSE_LEFT)) { actions[i].onMenuItemSelected(this, i, options[i]); } } else { highlights[i].setActive(false); } } } } public void open() { open = true; for (GameObject piece : pieces) { piece.setActive(true); } for (GameObject highlight : highlights) { highlight.setActive(false); } for (GameObject text : texts) { text.setActive(true); } Time.pause(); } public void close() { open = false; for (GameObject piece : pieces) { piece.setActive(false); } for (GameObject highlight : highlights) { highlight.setActive(false); } for (GameObject text : texts) { text.setActive(false); } Time.resume(); } public void toggle() { if (isOpen()) { close(); } else { open(); } } public boolean isOpen() { return open; } protected String[] getOptions() { return new String[0]; } protected MenuAction[] getActions() { return new MenuAction[0]; } protected Vector2 getOffset() { return new Vector2(); } protected boolean shouldStartOpen() { return false; } public interface MenuAction { void onMenuItemSelected(Menu menu, int index, String text); } }
src/main/java/com/ezardlabs/lostsector/objects/menus/Menu.java
package com.ezardlabs.lostsector.objects.menus; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.GuiRenderer; import com.ezardlabs.dethsquare.GuiText; import com.ezardlabs.dethsquare.Input; import com.ezardlabs.dethsquare.Input.KeyCode; import com.ezardlabs.dethsquare.Script; import com.ezardlabs.dethsquare.TextureAtlas; import com.ezardlabs.dethsquare.Time; import com.ezardlabs.dethsquare.Vector2; public class Menu extends Script { private static final TextureAtlas FONT = TextureAtlas.load("fonts"); private final String[] options; private final MenuAction[] actions; private final Vector2 offset; private GameObject[] pieces; private GameObject[] texts; private GuiText[] guiTexts; private GameObject[] highlights; private boolean startOpen = false; private boolean open = false; protected Menu() { options = getOptions(); actions = getActions(); offset = getOffset(); startOpen = shouldStartOpen(); } public Menu(String[] options, MenuAction[] actions) { this(options, actions, new Vector2(), false); } public Menu(String[] options, MenuAction[] actions, boolean open) { this(options, actions, new Vector2(), open); } public Menu(String[] options, MenuAction[] actions, Vector2 offset) { this(options, actions, offset, false); } public Menu(String[] options, MenuAction[] actions, Vector2 offset, boolean startOpen) { this.options = options; this.actions = actions; this.offset = offset; this.startOpen = startOpen; if (options.length < 2 || options.length > 5) { throw new IllegalArgumentException("You must supply between 2 and 5 options to the menu"); } } @Override public void start() { runWhenPaused = true; pieces = new GameObject[options.length]; pieces[0] = GameObject.instantiate( new GameObject("Menu Piece 0", new GuiRenderer("images/menus/main/top.png", 816, 156)), new Vector2()); for (int i = 1; i < pieces.length - 1; i++) { pieces[i] = GameObject.instantiate( new GameObject("Menu Piece " + i, new GuiRenderer("images/menus/main/middle.png", 816, 156)), new Vector2()); } pieces[pieces.length - 1] = GameObject.instantiate(new GameObject("Menu Piece " + (pieces.length - 1), new GuiRenderer("images/menus/main/bottom.png", 816, 144)), new Vector2()); texts = new GameObject[options.length]; guiTexts = new GuiText[options.length]; for (int i = 0; i < texts.length; i++) { guiTexts[i] = new GuiText(options[i], FONT, 50, 1); texts[i] = GameObject.instantiate(new GameObject("Menu Option", guiTexts[i]), new Vector2()); } highlights = new GameObject[options.length]; for (int i = 0; i < highlights.length - 1; i++) { highlights[i] = GameObject.instantiate( new GameObject("Menu Highlight", new GuiRenderer("images/menus/main/highlight.png", 156, 66)), new Vector2()); } highlights[highlights.length - 1] = GameObject.instantiate( new GameObject("Menu Highlight", new GuiRenderer("images/menus/main/highlight_bottom.png", 414, 66)), new Vector2()); float height = (options.length - 1) * 156 + 144; for (int i = 0; i < options.length; i++) { pieces[i].transform.position.set(960 - 408, 540 - height / 2 + 156 * i); highlights[i].transform.position.set(pieces[i].transform.position.offset(i == texts.length - 1 ? 66 : 636, i == texts.length - 1 ? 54 : 25)); texts[i].transform.position.set( pieces[i].transform.position.offset(150, 40 + (i == texts.length - 1 ? 28 : 0))); } if (startOpen) { open(); } else { close(); } } @Override public void update() { if (open && guiTexts != null) { for (int i = 0; i < guiTexts.length; i++) { if (guiTexts[i].hitTest(Input.mousePosition)) { highlights[i].setActive(true); if (Input.getKeyDown(KeyCode.MOUSE_LEFT)) { actions[i].onMenuItemSelected(this, i, options[i]); } } else { highlights[i].setActive(false); } } } } public void open() { open = true; for (GameObject piece : pieces) { piece.setActive(true); } for (GameObject highlight : highlights) { highlight.setActive(false); } for (GameObject text : texts) { text.setActive(true); } Time.pause(); } public void close() { open = false; for (GameObject piece : pieces) { piece.setActive(false); } for (GameObject highlight : highlights) { highlight.setActive(false); } for (GameObject text : texts) { text.setActive(false); } Time.resume(); } public void toggle() { if (isOpen()) { close(); } else { open(); } } public boolean isOpen() { return open; } protected String[] getOptions() { return new String[0]; } protected MenuAction[] getActions() { return new MenuAction[0]; } protected Vector2 getOffset() { return new Vector2(); } protected boolean shouldStartOpen() { return false; } public interface MenuAction { void onMenuItemSelected(Menu menu, int index, String text); } }
Don't statically cache font, as this causes bugs when the level is changed
src/main/java/com/ezardlabs/lostsector/objects/menus/Menu.java
Don't statically cache font, as this causes bugs when the level is changed
Java
mit
6b81d1d5b465d030df945dcf57eed8d87eef69e2
0
invisiblecloud/invoice-capture-client
package com.invisiblecollector.model; import java.util.Date; /** * Model for searching debts * * <p>Implements a fluent builder API. */ public class FindDebtsBuilder extends Model { private static final String DATE_ERROR_MSG = "to_date must follow from_date"; /** * Set search start date * * @param fromDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a to_date set with an earlier date */ public FindDebtsBuilder withFromDate(Date fromDate) { assertDateOrder(fromDate, getToDate(), DATE_ERROR_MSG); setDate("from_date", fromDate); return this; } /** * Set search end date * * @param toDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a from_date with a later date */ public FindDebtsBuilder withToDate(Date toDate) { assertDateOrder(getFromDate(), toDate, DATE_ERROR_MSG); setDate("to_date", toDate); return this; } private static final String DUE_DATE_ERROR_MSG = "to_duedate must follow from_duedate"; /** * Set search start due date * * @param fromDueDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a to_duedate with an earlier date */ public FindDebtsBuilder withFromDueDate(Date fromDueDate) { assertDateOrder(fromDueDate, getToDueDate(), DUE_DATE_ERROR_MSG); setDate("from_duedate", fromDueDate); return this; } /** * Set search end due date * * @param toDueDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a from_duedate with a later date */ public FindDebtsBuilder withToDueDate(Date toDueDate) { assertDateOrder(getFromDueDate(), toDueDate, DUE_DATE_ERROR_MSG); setDate("to_duedate", toDueDate); return this; } public FindDebtsBuilder withNumber(String number) { fields.put("number", number); return this; } public Date getToDueDate() { return getDate("to_duedate"); } public Date getFromDueDate() { return getDate("from_duedate"); } public Date getFromDate() { return getDate("from_date"); } public Date getToDate() { return getDate("to_date"); } public String getNumber() { return getString("number"); } @Override public int hashCode() { pmdWorkaround(); return super.hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof FindDebtsBuilder)) { return false; } else if (this == obj) { return true; } else { return super.equals(obj); } } }
src/main/java/com/invisiblecollector/model/FindDebtsBuilder.java
package com.invisiblecollector.model; import java.util.Date; /** * Model for searching debts * * <p>Implements a fluent builder API. */ public class FindDebtsBuilder extends Model { private static final String DATE_ERROR_MSG = "to_date must follow from_date"; /** * Set search start date * * @param fromDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a to_date set with an earlier date */ public FindDebtsBuilder withFromDate(Date fromDate) { assertDateOrder(fromDate, getToDate(), DATE_ERROR_MSG); setDate("from_date", fromDate); return this; } /** * Set search end date * * @param toDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a from_date with a later date */ public FindDebtsBuilder withToDate(Date toDate) { assertDateOrder(getFromDate(), toDate, DATE_ERROR_MSG); setDate("to_date", toDate); return this; } private static final String DUE_DATE_ERROR_MSG = "to_duedate must follow from_duedate"; /** * Set search start due date * * @param fromDueDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a to_duedate with an earlier date */ public FindDebtsBuilder withFromDueDate(Date fromDueDate) { assertDateOrder(fromDueDate, getToDueDate(), DUE_DATE_ERROR_MSG); setDate("from_duedate", fromDueDate); return this; } /** * Set search end due date * * @param toDueDate the date. time is ignored. * @return this * @throws IllegalArgumentException if there is a from_duedate with a later date */ public FindDebtsBuilder withToDueDate(Date toDueDate) { assertDateOrder(getFromDueDate(), toDueDate, DUE_DATE_ERROR_MSG); setDate("to_duedate", toDueDate); return this; } public FindDebtsBuilder withNumber(String number) { fields.put("number", number); return this; } public FindDebtsBuilder withShowDebits(Boolean showDebits) { fields.put("show_debits", showDebits); return this; } public Date getToDueDate() { return getDate("to_duedate"); } public Date getFromDueDate() { return getDate("from_duedate"); } public Date getFromDate() { return getDate("from_date"); } public Date getToDate() { return getDate("to_date"); } public String getNumber() { return getString("number"); } public Boolean getShowDebits() { return getBoolean("show_debits"); } @Override public int hashCode() { pmdWorkaround(); return super.hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof FindDebtsBuilder)) { return false; } else if (this == obj) { return true; } else { return super.equals(obj); } } }
remove extra find debts builder model fields
src/main/java/com/invisiblecollector/model/FindDebtsBuilder.java
remove extra find debts builder model fields
Java
epl-1.0
41c13e25b7fb17c3b6105081cf9ff97049ff1da5
0
gnodet/wikitext
/******************************************************************************* * Copyright (c) 2004 - 2005 University Of British Columbia and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * University Of British Columbia - initial API and implementation *******************************************************************************/ /* * Created on Jul 22, 2004 */ package org.eclipse.mylar.core.internal; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.mylar.core.IMylarContext; import org.eclipse.mylar.core.IMylarElement; import org.eclipse.mylar.core.InteractionEvent; import org.eclipse.mylar.core.internal.dt.MylarInterest; /** * @author Mik Kersten */ public class MylarContext implements IMylarContext, Serializable { private static final long serialVersionUID = 1L; private String id; private List<InteractionEvent> interactionHistory = new ArrayList<InteractionEvent>(); protected transient Map<String, MylarContextElement> nodes = new HashMap<String, MylarContextElement>(); protected transient MylarContextElement activeNode = null; protected transient List tempRaised = new ArrayList(); protected transient Map<String, IMylarElement> landmarks; protected transient ScalingFactors scaling; private transient InteractionEvent lastEdgeEvent = null; private transient MylarContextElement lastEdgeNode = null; private transient int numUserEvents = 0; public MylarContext() { // only needed for serialization } void parseInteractionHistory() { nodes = new HashMap<String, MylarContextElement>(); landmarks = new HashMap<String, IMylarElement>(); for (InteractionEvent event : interactionHistory) parseInteractionEvent(event); updateLandmarks(); activeNode = lastEdgeNode; } public MylarContext(String id, ScalingFactors scaling) { this.id = id; this.scaling = scaling; parseInteractionHistory(); } public IMylarElement parseEvent(InteractionEvent event) { interactionHistory.add(event); return parseInteractionEvent(event); } /** * Propagations and predictions are not addes as edges */ @MylarInterest(level = MylarInterest.Level.LANDMARK) private IMylarElement parseInteractionEvent(InteractionEvent event) { if (event.getKind().isUserEvent()) numUserEvents++; MylarContextElement node = nodes.get(event.getStructureHandle()); if (node == null) { node = new MylarContextElement(event.getContentType(), event.getStructureHandle(), this); nodes.put(event.getStructureHandle(), node); } if (event.getNavigation() != null && !event.getNavigation().equals("null") && lastEdgeEvent != null && lastEdgeNode != null && event.getKind() != InteractionEvent.Kind.PROPAGATION && event.getKind() != InteractionEvent.Kind.PREDICTION) { IMylarElement navigationSource = nodes.get(lastEdgeEvent.getStructureHandle()); if (navigationSource != null) { MylarContextRelation edge = lastEdgeNode.getRelation(event.getStructureHandle()); if (edge == null) { edge = new MylarContextRelation(event.getContentType(), event.getNavigation(), lastEdgeNode, node, this); lastEdgeNode.addEdge(edge); } DegreeOfInterest doi = (DegreeOfInterest) edge.getInterest(); doi.addEvent(event); } } DegreeOfInterest doi = (DegreeOfInterest) node.getInterest(); doi.addEvent(event); if (doi.isLandmark()) { landmarks.put(node.getHandleIdentifier(), node); } else { landmarks.remove(node.getHandleIdentifier()); // TODO: redundant } if (event.getKind().isUserEvent()) { lastEdgeEvent = event; lastEdgeNode = node; activeNode = node; } return node; } private void updateLandmarks() { // landmarks = new HashMap<String, ITaskscapeNode>(); for (MylarContextElement node : nodes.values()) { if (node.getInterest().isLandmark()) landmarks.put(node.getHandleIdentifier(), node); } } public IMylarElement get(String elementHandle) { return nodes.get(elementHandle); } public List<IMylarElement> getInteresting() { List<IMylarElement> elements = new ArrayList<IMylarElement>(); for (String key : new ArrayList<String>(nodes.keySet())) { // in case it changes during update MylarContextElement info = nodes.get(key); if (info.getInterest().isInteresting()) { elements.add(info); } } return elements; } public List<IMylarElement> getLandmarks() { return Collections.unmodifiableList(new ArrayList<IMylarElement>(landmarks.values())); } public void updateElementHandle(IMylarElement element, String newHandle) { MylarContextElement currElement = nodes.remove(element.getHandleIdentifier()); if (currElement != null) { currElement.setHandleIdentifier(newHandle); nodes.put(newHandle, currElement); } } public IMylarElement getActiveNode() { return activeNode; } /** * @param handleIdentifier */ public void delete(IMylarElement node) { landmarks.remove(node.getHandleIdentifier()); nodes.remove(node.getHandleIdentifier()); } public synchronized List<IMylarElement> getAllElements() { return new ArrayList<IMylarElement>(nodes.values()); } public String getId() { return id; } @Override public String toString() { return id; } public void reset() { interactionHistory.clear(); nodes.clear(); interactionHistory.clear(); landmarks.clear(); activeNode = null; } public int getUserEventCount() { return numUserEvents; } /** * TODO: make unmodifiable? */ public List<InteractionEvent> getInteractionHistory() { return interactionHistory; } public void collapse() { List<InteractionEvent> collapsedHistory = new ArrayList<InteractionEvent>(); for (MylarContextElement node : nodes.values()) { if (!node.equals(activeNode)) { collapseNode(collapsedHistory, node); } } collapseNode(collapsedHistory, activeNode); interactionHistory.clear(); interactionHistory.addAll(collapsedHistory); } private void collapseNode(List<InteractionEvent> collapsedHistory, MylarContextElement node) { if (node != null) { collapsedHistory.addAll(((DegreeOfInterest) node.getInterest()).getCollapsedEvents()); } } }
org.eclipse.mylyn.context.core/src/org/eclipse/mylyn/core/internal/MylarContext.java
/******************************************************************************* * Copyright (c) 2004 - 2005 University Of British Columbia and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * University Of British Columbia - initial API and implementation *******************************************************************************/ /* * Created on Jul 22, 2004 */ package org.eclipse.mylar.core.internal; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.mylar.core.IMylarContext; import org.eclipse.mylar.core.IMylarElement; import org.eclipse.mylar.core.InteractionEvent; import org.eclipse.mylar.core.internal.dt.MylarInterest; /** * @author Mik Kersten */ public class MylarContext implements IMylarContext, Serializable { private static final long serialVersionUID = 1L; private String id; private List<InteractionEvent> interactionHistory = new ArrayList<InteractionEvent>(); protected transient Map<String, MylarContextElement> nodes = new HashMap<String, MylarContextElement>(); protected transient MylarContextElement activeNode = null; protected transient List tempRaised = new ArrayList(); protected transient Map<String, IMylarElement> landmarks; protected transient ScalingFactors scaling; private transient InteractionEvent lastEdgeEvent = null; private transient MylarContextElement lastEdgeNode = null; private transient int numUserEvents = 0; public MylarContext() { // only needed for serialization } void parseInteractionHistory() { nodes = new HashMap<String, MylarContextElement>(); landmarks = new HashMap<String, IMylarElement>(); for (InteractionEvent event : interactionHistory) parseInteractionEvent(event); updateLandmarks(); activeNode = lastEdgeNode; } public MylarContext(String id, ScalingFactors scaling) { this.id = id; this.scaling = scaling; parseInteractionHistory(); } public IMylarElement parseEvent(InteractionEvent event) { interactionHistory.add(event); return parseInteractionEvent(event); } /** * Propagations and predictions are not addes as edges */ @MylarInterest(level = MylarInterest.Level.LANDMARK) private IMylarElement parseInteractionEvent(InteractionEvent event) { if (event.getKind().isUserEvent()) numUserEvents++; MylarContextElement node = nodes.get(event.getStructureHandle()); if (node == null) { node = new MylarContextElement(event.getContentType(), event.getStructureHandle(), this); nodes.put(event.getStructureHandle(), node); } if (event.getNavigation() != null && !event.getNavigation().equals("null") && lastEdgeEvent != null && lastEdgeNode != null && event.getKind() != InteractionEvent.Kind.PROPAGATION && event.getKind() != InteractionEvent.Kind.PREDICTION) { IMylarElement navigationSource = nodes.get(lastEdgeEvent.getStructureHandle()); if (navigationSource != null) { MylarContextRelation edge = lastEdgeNode.getRelation(event.getStructureHandle()); if (edge == null) { edge = new MylarContextRelation(event.getContentType(), event.getNavigation(), lastEdgeNode, node, this); lastEdgeNode.addEdge(edge); } DegreeOfInterest doi = (DegreeOfInterest) edge.getInterest(); doi.addEvent(event); } } DegreeOfInterest doi = (DegreeOfInterest) node.getInterest(); doi.addEvent(event); if (doi.isLandmark()) { landmarks.put(node.getHandleIdentifier(), node); } else { landmarks.remove(node.getHandleIdentifier()); // TODO: redundant } if (event.getKind().isUserEvent()) { lastEdgeEvent = event; lastEdgeNode = node; activeNode = node; } return node; } private void updateLandmarks() { // landmarks = new HashMap<String, ITaskscapeNode>(); for (MylarContextElement node : nodes.values()) { if (node.getInterest().isLandmark()) landmarks.put(node.getHandleIdentifier(), node); } } public IMylarElement get(String elementHandle) { return nodes.get(elementHandle); } public List<IMylarElement> getInteresting() { List<IMylarElement> elements = new ArrayList<IMylarElement>(); for (String key : new ArrayList<String>(nodes.keySet())) { // in case it changes MylarContextElement info = nodes.get(key); if (info.getInterest().isInteresting()) { elements.add(info); } } return elements; } public List<IMylarElement> getLandmarks() { return Collections.unmodifiableList(new ArrayList<IMylarElement>(landmarks.values())); } public void updateElementHandle(IMylarElement element, String newHandle) { MylarContextElement currElement = nodes.remove(element.getHandleIdentifier()); if (currElement != null) { currElement.setHandleIdentifier(newHandle); nodes.put(newHandle, currElement); } } public IMylarElement getActiveNode() { return activeNode; } /** * @param handleIdentifier */ public void delete(IMylarElement node) { landmarks.remove(node.getHandleIdentifier()); nodes.remove(node.getHandleIdentifier()); } public synchronized List<IMylarElement> getAllElements() { return new ArrayList<IMylarElement>(nodes.values()); } public String getId() { return id; } @Override public String toString() { return id; } public void reset() { interactionHistory.clear(); nodes.clear(); interactionHistory.clear(); landmarks.clear(); activeNode = null; } public int getUserEventCount() { return numUserEvents; } /** * TODO: make unmodifiable? */ public List<InteractionEvent> getInteractionHistory() { return interactionHistory; } public void collapse() { List<InteractionEvent> collapsedHistory = new ArrayList<InteractionEvent>(); for (MylarContextElement node : nodes.values()) { if (!node.equals(activeNode)) { collapseNode(collapsedHistory, node); } } collapseNode(collapsedHistory, activeNode); interactionHistory.clear(); interactionHistory.addAll(collapsedHistory); } private void collapseNode(List<InteractionEvent> collapsedHistory, MylarContextElement node) { if (node != null) { collapsedHistory.addAll(((DegreeOfInterest) node.getInterest()).getCollapsedEvents()); } } }
Progress on: Bug 119380: MyLar Seems to Block and make Eclipse unusable when running external build https://bugs.eclipse.org/bugs/show_bug.cgi?id=119380
org.eclipse.mylyn.context.core/src/org/eclipse/mylyn/core/internal/MylarContext.java
Progress on: Bug 119380: MyLar Seems to Block and make Eclipse unusable when running external build https://bugs.eclipse.org/bugs/show_bug.cgi?id=119380
Java
epl-1.0
51dcf3e583f11f94fac41b51cb4de78453b0abed
0
ELTE-Soft/xUML-RT-Executor
package hu.eltesoft.modelexecution.cli; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.tools.DiagnosticCollector; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; import org.eclipse.emf.common.util.TreeIterator; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.eclipse.papyrus.infra.core.resource.ModelSet; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Operation; import org.eclipse.uml2.uml.UMLPackage; import org.eclipse.uml2.uml.resource.UMLResource; import hu.eltesoft.modelexecution.cli.exceptions.CliIncQueryException; import hu.eltesoft.modelexecution.cli.exceptions.CliJavaCompilerException; import hu.eltesoft.modelexecution.cli.exceptions.CliRuntimeException; import hu.eltesoft.modelexecution.cli.exceptions.FileWriteException; import hu.eltesoft.modelexecution.cli.exceptions.JavaFileGenerationError; import hu.eltesoft.modelexecution.cli.exceptions.MissingJavaCompilerException; import hu.eltesoft.modelexecution.cli.exceptions.ModelLoadFailedException; import hu.eltesoft.modelexecution.cli.exceptions.RootDirCreationFailed; import hu.eltesoft.modelexecution.filemanager.FileManager; import hu.eltesoft.modelexecution.m2m.logic.SourceCodeChangeListener; import hu.eltesoft.modelexecution.m2m.logic.SourceCodeTask; import hu.eltesoft.modelexecution.m2m.logic.translators.ResourceTranslator; import hu.eltesoft.modelexecution.m2m.metamodel.base.NamedReference; import hu.eltesoft.modelexecution.m2t.java.DebugSymbols; import hu.eltesoft.modelexecution.m2t.smap.xtend.SourceMappedText; public class StandaloneModelCompiler { private static final String ACTION_LANGUAGE_PATHMAP = "pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/"; private static final String RALF_LIBRARY_PATHMAP = "pathmap://RALF/library.uml"; private static final String XUMLRT_PROFILE_PATHMAP = "pathmap://XUMLRT_PROFILE/"; private ConsoleLogger logger; public StandaloneModelCompiler(ConsoleLogger logger) { this.logger = logger; } public void compileModel(String modelPath, String rootDir) { List<String> generatedJavaFiles = generateSources(modelPath, rootDir); compileSources(rootDir, generatedJavaFiles); } private List<String> generateSources(String modelPath, String rootDir) { List<String> generatedFiles = new ArrayList<>(); try { logger.verboseTimeMsg(Messages.COMPILING_MODEL_TO_JAVA); ModelSet modelSet = new ModelSet(); registerUmlResourceType(modelSet); registerPathMaps(modelSet); registerReducedAlfLanguage(); logger.verboseTimeMsg(Messages.LOADING_MODEL, modelPath); URI fileURI = URI.createFileURI(modelPath); Resource resource = modelSet.getResource(fileURI, true); if (resource == null) { throw new ModelLoadFailedException(modelPath); } createRootDirIfNeeded(rootDir); FileManager fileMan = new FileManager(rootDir); boolean[] anyErrorsDuringGeneration = { false }; SourceCodeChangeListener listener = new SourceCodeChangeListener() { @Override public void sourceCodeChanged(String qualifiedName, SourceMappedText smTxt, DebugSymbols symbols) { String fileText = smTxt.getText().toString(); try { String path = fileMan.addOrUpdate(qualifiedName, fileText); generatedFiles.add(path); } catch (IOException e) { logger.verboseTimeMsg(Messages.JAVA_FILE_SAVE_FAILED, qualifiedName); anyErrorsDuringGeneration[0] = true; } }; @Override public void sourceCodeDeleted(String qualifiedName) { fileMan.remove(qualifiedName); } }; ResourceTranslator translator = ResourceTranslator.create(modelSet); List<SourceCodeTask> taskQueue = translator.fullTranslation(); logger.verboseTimeMsg(Messages.ANALYSING_MODEL); taskQueue.forEach(t -> t.perform(listener)); saveNameMapping(rootDir, resource); if (anyErrorsDuringGeneration[0]) { throw new JavaFileGenerationError(); } return generatedFiles; } catch (RuntimeException e) { if (e.getCause() instanceof IncQueryException) { throw new CliIncQueryException((IncQueryException) e.getCause()); } throw e; } } /* * @return EClass-EOperation name pairs in the model are mapped onto their * internal representations. */ private Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> getNameMapping( String rootDir, Resource resource) { Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> classAndOpMapping = new HashMap<>(); TreeIterator<EObject> eObjIt = resource.getAllContents(); while (eObjIt.hasNext()) { EObject eObj = eObjIt.next(); if (!(eObj instanceof Class)) { continue; } Class eClass = (Class) eObj; String eClassId = NamedReference.getIdentifier(eClass); for (Operation eOperation : eClass.getAllOperations()) { String eOperationId = NamedReference.getIdentifier(eOperation); classAndOpMapping.put(new AbstractMap.SimpleImmutableEntry<>(eClass.getName(), eOperation.getName()), new AbstractMap.SimpleImmutableEntry<>(eClassId, eOperationId)); } } return classAndOpMapping; } private void saveNameMapping(String rootDir, Resource resource) { Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> nameMapping = getNameMapping( rootDir, resource); File mappingFile = new File(rootDir, StandaloneModelExecutor.MAPPING_FILE_NAME); try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(mappingFile));) { oos.writeObject(nameMapping); } catch (IOException e) { e.printStackTrace(); throw new FileWriteException(mappingFile.getAbsolutePath()); } } private void compileSources(String rootDir, List<String> generatedJavaFiles) { logger.verboseTimeMsg(Messages.COMPILING_JAVA_TO_CLASS); JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); if (compiler == null) { throw new MissingJavaCompilerException(); } DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>(); try (StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, null);) { Iterable<? extends JavaFileObject> compilationUnits = fileManager .getJavaFileObjectsFromStrings(generatedJavaFiles); List<String> compilationOptions = null; JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, diagnostics, compilationOptions, null, compilationUnits); boolean success = task.call(); if (!success) { throw new CliJavaCompilerException(diagnostics.getDiagnostics()); } } catch (IOException e) { throw new CliJavaCompilerException(); } } private void createRootDirIfNeeded(String rootDirName) { if (rootDirName == null) { return; } File rootDir = new File(rootDirName); if (rootDir.exists()) { logger.verboseTimeMsg(Messages.USING_EXISTING_ROOT_DIR, rootDirName); } else { logger.verboseTimeMsg(Messages.CREATING_ROOT_DIR, rootDirName); boolean success = rootDir.mkdir(); if (!success) { throw new RootDirCreationFailed(rootDirName); } } } private void registerUmlResourceType(ModelSet modelSet) { modelSet.getPackageRegistry().put(UMLPackage.eNS_URI, UMLPackage.eINSTANCE); Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put(UMLResource.FILE_EXTENSION, UMLResource.Factory.INSTANCE); } private void registerPathMaps(ModelSet modelSet) { String jarPath; try { jarPath = URLDecoder.decode( StandaloneModelCompiler.class.getProtectionDomain().getCodeSource().getLocation().getPath(), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new CliRuntimeException(e); } Map<URI, URI> uriMap = modelSet.getURIConverter().getURIMap(); uriMap.clear(); URI uri = URI.createURI("jar:file:" + jarPath + "!/"); uriMap.put(URI.createURI(UMLResource.LIBRARIES_PATHMAP), uri.appendSegment("libraries").appendSegment("")); uriMap.put(URI.createURI(UMLResource.METAMODELS_PATHMAP), uri.appendSegment("metamodels").appendSegment("")); uriMap.put(URI.createURI(UMLResource.PROFILES_PATHMAP), uri.appendSegment("profiles").appendSegment("")); uriMap.put(URI.createURI(ACTION_LANGUAGE_PATHMAP), uri.appendSegment("resources").appendSegment("action-language-profile").appendSegment("")); uriMap.put(URI.createURI(RALF_LIBRARY_PATHMAP), uri.appendSegment("model").appendSegment("collections").appendSegment("collections.uml")); uriMap.put(URI.createURI(XUMLRT_PROFILE_PATHMAP), uri.appendSegment("profile").appendSegment("")); } private void registerReducedAlfLanguage() { if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("ecore")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("ecore", new org.eclipse.emf.ecore.xmi.impl.EcoreResourceFactoryImpl()); } if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("xmi")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("xmi", new org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl()); } if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("xtextbin")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("xtextbin", new org.eclipse.xtext.resource.impl.BinaryGrammarResourceFactoryImpl()); } if (!EPackage.Registry.INSTANCE.containsKey(org.eclipse.xtext.XtextPackage.eNS_URI)) { EPackage.Registry.INSTANCE.put(org.eclipse.xtext.XtextPackage.eNS_URI, org.eclipse.xtext.XtextPackage.eINSTANCE); } if (!EPackage.Registry.INSTANCE.containsKey("http://www.incquerylabs.com/uml/ralf/ReducedAlfLanguage")) { EPackage.Registry.INSTANCE.put("http://www.incquerylabs.com/uml/ralf/ReducedAlfLanguage", com.incquerylabs.uml.ralf.reducedAlfLanguage.ReducedAlfLanguagePackage.eINSTANCE); } } }
plugins/hu.eltesoft.modelexecution.cli/src/hu/eltesoft/modelexecution/cli/StandaloneModelCompiler.java
package hu.eltesoft.modelexecution.cli; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.tools.DiagnosticCollector; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; import org.eclipse.emf.common.util.TreeIterator; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.eclipse.papyrus.infra.core.resource.ModelSet; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Operation; import org.eclipse.uml2.uml.UMLPackage; import org.eclipse.uml2.uml.resource.UMLResource; import hu.eltesoft.modelexecution.cli.exceptions.CliIncQueryException; import hu.eltesoft.modelexecution.cli.exceptions.CliJavaCompilerException; import hu.eltesoft.modelexecution.cli.exceptions.CliRuntimeException; import hu.eltesoft.modelexecution.cli.exceptions.FileWriteException; import hu.eltesoft.modelexecution.cli.exceptions.JavaFileGenerationError; import hu.eltesoft.modelexecution.cli.exceptions.MissingJavaCompilerException; import hu.eltesoft.modelexecution.cli.exceptions.ModelLoadFailedException; import hu.eltesoft.modelexecution.cli.exceptions.RootDirCreationFailed; import hu.eltesoft.modelexecution.filemanager.FileManager; import hu.eltesoft.modelexecution.m2m.logic.SourceCodeChangeListener; import hu.eltesoft.modelexecution.m2m.logic.SourceCodeTask; import hu.eltesoft.modelexecution.m2m.logic.translators.ResourceTranslator; import hu.eltesoft.modelexecution.m2m.metamodel.base.NamedReference; import hu.eltesoft.modelexecution.m2t.java.DebugSymbols; import hu.eltesoft.modelexecution.m2t.smap.xtend.SourceMappedText; public class StandaloneModelCompiler { private static final String ACTION_LANGUAGE_PATHMAP = "pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/"; private static final String RALF_LIBRARY_PATHMAP = "pathmap://RALF/library.uml"; private static final String XUMLRT_PROFILE_PATHMAP = "pathmap://XUMLRT_PROFILE/"; private ConsoleLogger logger; public StandaloneModelCompiler(ConsoleLogger logger) { this.logger = logger; } public void compileModel(String modelPath, String rootDir) { List<String> generatedJavaFiles = generateSources(modelPath, rootDir); compileSources(rootDir, generatedJavaFiles); } private List<String> generateSources(String modelPath, String rootDir) { List<String> generatedFiles = new ArrayList<>(); try { logger.verboseTimeMsg(Messages.COMPILING_MODEL_TO_JAVA); registerUmlResourceType(); registerPathMaps(); registerReducedAlfLanguage(); logger.verboseTimeMsg(Messages.LOADING_MODEL, modelPath); URI fileURI = URI.createFileURI(modelPath); ModelSet modelSet = new ModelSet(); Resource resource = modelSet.getResource(fileURI, true); if (resource == null) { throw new ModelLoadFailedException(modelPath); } createRootDirIfNeeded(rootDir); FileManager fileMan = new FileManager(rootDir); boolean[] anyErrorsDuringGeneration = { false }; SourceCodeChangeListener listener = new SourceCodeChangeListener() { @Override public void sourceCodeChanged(String qualifiedName, SourceMappedText smTxt, DebugSymbols symbols) { String fileText = smTxt.getText().toString(); try { String path = fileMan.addOrUpdate(qualifiedName, fileText); generatedFiles.add(path); } catch (IOException e) { logger.verboseTimeMsg(Messages.JAVA_FILE_SAVE_FAILED, qualifiedName); anyErrorsDuringGeneration[0] = true; } }; @Override public void sourceCodeDeleted(String qualifiedName) { fileMan.remove(qualifiedName); } }; ResourceTranslator translator = ResourceTranslator.create(modelSet); List<SourceCodeTask> taskQueue = translator.fullTranslation(); logger.verboseTimeMsg(Messages.ANALYSING_MODEL); taskQueue.forEach(t -> t.perform(listener)); saveNameMapping(rootDir, resource); if (anyErrorsDuringGeneration[0]) { throw new JavaFileGenerationError(); } return generatedFiles; } catch (RuntimeException e) { if (e.getCause() instanceof IncQueryException) { throw new CliIncQueryException((IncQueryException) e.getCause()); } throw e; } } /* * @return EClass-EOperation name pairs in the model are mapped onto their * internal representations. */ private Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> getNameMapping( String rootDir, Resource resource) { Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> classAndOpMapping = new HashMap<>(); TreeIterator<EObject> eObjIt = resource.getAllContents(); while (eObjIt.hasNext()) { EObject eObj = eObjIt.next(); if (!(eObj instanceof Class)) { continue; } Class eClass = (Class) eObj; String eClassId = NamedReference.getIdentifier(eClass); for (Operation eOperation : eClass.getAllOperations()) { String eOperationId = NamedReference.getIdentifier(eOperation); classAndOpMapping.put(new AbstractMap.SimpleImmutableEntry<>(eClass.getName(), eOperation.getName()), new AbstractMap.SimpleImmutableEntry<>(eClassId, eOperationId)); } } return classAndOpMapping; } private void saveNameMapping(String rootDir, Resource resource) { Map<AbstractMap.SimpleImmutableEntry<String, String>, AbstractMap.SimpleImmutableEntry<String, String>> nameMapping = getNameMapping( rootDir, resource); File mappingFile = new File(rootDir, StandaloneModelExecutor.MAPPING_FILE_NAME); try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(mappingFile));) { oos.writeObject(nameMapping); } catch (IOException e) { e.printStackTrace(); throw new FileWriteException(mappingFile.getAbsolutePath()); } } private void compileSources(String rootDir, List<String> generatedJavaFiles) { logger.verboseTimeMsg(Messages.COMPILING_JAVA_TO_CLASS); JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); if (compiler == null) { throw new MissingJavaCompilerException(); } DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>(); try (StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, null);) { Iterable<? extends JavaFileObject> compilationUnits = fileManager .getJavaFileObjectsFromStrings(generatedJavaFiles); List<String> compilationOptions = null; JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, diagnostics, compilationOptions, null, compilationUnits); boolean success = task.call(); if (!success) { throw new CliJavaCompilerException(diagnostics.getDiagnostics()); } } catch (IOException e) { throw new CliJavaCompilerException(); } } private void createRootDirIfNeeded(String rootDirName) { if (rootDirName == null) { return; } File rootDir = new File(rootDirName); if (rootDir.exists()) { logger.verboseTimeMsg(Messages.USING_EXISTING_ROOT_DIR, rootDirName); } else { logger.verboseTimeMsg(Messages.CREATING_ROOT_DIR, rootDirName); boolean success = rootDir.mkdir(); if (!success) { throw new RootDirCreationFailed(rootDirName); } } } private void registerUmlResourceType() { new ResourceSetImpl().getPackageRegistry().put(UMLPackage.eNS_URI, UMLPackage.eINSTANCE); Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put(UMLResource.FILE_EXTENSION, UMLResource.Factory.INSTANCE); } private void registerPathMaps() { String jarPath; try { jarPath = URLDecoder.decode( StandaloneModelCompiler.class.getProtectionDomain().getCodeSource().getLocation().getPath(), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new CliRuntimeException(e); } ResourceSet resourceSet = new ResourceSetImpl(); Map<URI, URI> uriMap = resourceSet.getURIConverter().getURIMap(); uriMap.clear(); URI uri = URI.createURI("jar:file:" + jarPath + "!/"); uriMap.put(URI.createURI(UMLResource.LIBRARIES_PATHMAP), uri.appendSegment("libraries").appendSegment("")); uriMap.put(URI.createURI(UMLResource.METAMODELS_PATHMAP), uri.appendSegment("metamodels").appendSegment("")); uriMap.put(URI.createURI(UMLResource.PROFILES_PATHMAP), uri.appendSegment("profiles").appendSegment("")); uriMap.put(URI.createURI(ACTION_LANGUAGE_PATHMAP), uri.appendSegment("resources").appendSegment("action-language-profile").appendSegment("")); uriMap.put(URI.createURI(RALF_LIBRARY_PATHMAP), uri.appendSegment("model").appendSegment("collections").appendSegment("collections.uml")); uriMap.put(URI.createURI(XUMLRT_PROFILE_PATHMAP), uri.appendSegment("profile").appendSegment("")); } private void registerReducedAlfLanguage() { if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("ecore")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("ecore", new org.eclipse.emf.ecore.xmi.impl.EcoreResourceFactoryImpl()); } if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("xmi")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("xmi", new org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl()); } if (!Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().containsKey("xtextbin")) { Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("xtextbin", new org.eclipse.xtext.resource.impl.BinaryGrammarResourceFactoryImpl()); } if (!EPackage.Registry.INSTANCE.containsKey(org.eclipse.xtext.XtextPackage.eNS_URI)) { EPackage.Registry.INSTANCE.put(org.eclipse.xtext.XtextPackage.eNS_URI, org.eclipse.xtext.XtextPackage.eINSTANCE); } if (!EPackage.Registry.INSTANCE.containsKey("http://www.incquerylabs.com/uml/ralf/ReducedAlfLanguage")) { EPackage.Registry.INSTANCE.put("http://www.incquerylabs.com/uml/ralf/ReducedAlfLanguage", com.incquerylabs.uml.ralf.reducedAlfLanguage.ReducedAlfLanguagePackage.eINSTANCE); } } }
cli-refactor: registration of pathmaps on the correct resource set
plugins/hu.eltesoft.modelexecution.cli/src/hu/eltesoft/modelexecution/cli/StandaloneModelCompiler.java
cli-refactor: registration of pathmaps on the correct resource set
Java
lgpl-2.1
a77bfc278478f8dae92dfbd2c009399ffb134786
0
hungerburg/exist,joewiz/exist,wshager/exist,olvidalo/exist,ljo/exist,MjAbuz/exist,hungerburg/exist,adamretter/exist,ljo/exist,joewiz/exist,lcahlander/exist,wolfgangmm/exist,shabanovd/exist,wshager/exist,jensopetersen/exist,jessealama/exist,MjAbuz/exist,eXist-db/exist,kohsah/exist,MjAbuz/exist,joewiz/exist,dizzzz/exist,opax/exist,jessealama/exist,kohsah/exist,kohsah/exist,wshager/exist,hungerburg/exist,jensopetersen/exist,wshager/exist,windauer/exist,jessealama/exist,ambs/exist,patczar/exist,patczar/exist,opax/exist,dizzzz/exist,shabanovd/exist,dizzzz/exist,zwobit/exist,wolfgangmm/exist,olvidalo/exist,olvidalo/exist,olvidalo/exist,ambs/exist,zwobit/exist,zwobit/exist,opax/exist,zwobit/exist,lcahlander/exist,patczar/exist,wshager/exist,joewiz/exist,jensopetersen/exist,olvidalo/exist,eXist-db/exist,jessealama/exist,MjAbuz/exist,zwobit/exist,patczar/exist,jensopetersen/exist,opax/exist,adamretter/exist,wshager/exist,eXist-db/exist,RemiKoutcherawy/exist,zwobit/exist,shabanovd/exist,windauer/exist,hungerburg/exist,wolfgangmm/exist,eXist-db/exist,patczar/exist,jessealama/exist,wolfgangmm/exist,adamretter/exist,adamretter/exist,ljo/exist,windauer/exist,RemiKoutcherawy/exist,RemiKoutcherawy/exist,opax/exist,wolfgangmm/exist,patczar/exist,joewiz/exist,RemiKoutcherawy/exist,ljo/exist,eXist-db/exist,ambs/exist,shabanovd/exist,adamretter/exist,MjAbuz/exist,ambs/exist,jensopetersen/exist,lcahlander/exist,MjAbuz/exist,RemiKoutcherawy/exist,lcahlander/exist,jensopetersen/exist,dizzzz/exist,adamretter/exist,ambs/exist,ljo/exist,lcahlander/exist,kohsah/exist,shabanovd/exist,hungerburg/exist,windauer/exist,windauer/exist,kohsah/exist,windauer/exist,jessealama/exist,dizzzz/exist,joewiz/exist,ambs/exist,RemiKoutcherawy/exist,dizzzz/exist,eXist-db/exist,ljo/exist,shabanovd/exist,lcahlander/exist,kohsah/exist,wolfgangmm/exist
/* * eXist Open Source Native XML Database * Copyright (C) 2008-2010 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * $Id$ */ package org.exist.xquery.functions.xmldb; import org.apache.log4j.Logger; import org.exist.dom.QName; import org.exist.security.PermissionDeniedException; import org.exist.xmldb.IndexQueryService; import org.exist.xmldb.XmldbURI; import org.exist.xquery.BasicFunction; import org.exist.xquery.Cardinality; import org.exist.xquery.FunctionSignature; import org.exist.xquery.XPathException; import org.exist.xquery.XQueryContext; import org.exist.xquery.value.BooleanValue; import org.exist.xquery.value.FunctionReturnSequenceType; import org.exist.xquery.value.FunctionParameterSequenceType; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceType; import org.exist.xquery.value.Type; import org.xmldb.api.base.Collection; import org.xmldb.api.base.XMLDBException; /** * Reindex a collection in the database. * * @author dizzzz * @author ljo * */ public class XMLDBReindex extends XMLDBAbstractCollectionManipulator { protected static final Logger logger = Logger.getLogger(XMLDBReindex.class); public final static FunctionSignature signature = new FunctionSignature( new QName("reindex", XMLDBModule.NAMESPACE_URI, XMLDBModule.PREFIX), "Reindex collection $collection-uri. " + XMLDBModule.COLLECTION_URI + " " + XMLDBModule.NEED_PRIV_USER, new SequenceType[]{ new FunctionParameterSequenceType("collection-uri", Type.STRING, Cardinality.EXACTLY_ONE, "The collection URI") }, new FunctionReturnSequenceType(Type.BOOLEAN, Cardinality.EXACTLY_ONE, "true() if successfully reindexed, false() otherwise")); /** * @param context */ public XMLDBReindex(XQueryContext context) { super(context, signature, false); } public Sequence evalWithCollection(Collection collection, Sequence[] args, Sequence contextSequence) throws XPathException { // Check for DBA user if (!context.getUser().hasDbaRole()) { logger.error("Permission denied, user '" + context.getUser().getName() + "' must be a DBA to reindex the database"); return BooleanValue.FALSE; } // Check if collection does exist if (collection == null) { logger.error("Collection " + args[0].getStringValue() + " does not exist."); return BooleanValue.FALSE; } // Reindex try { IndexQueryService iqs = (IndexQueryService) collection.getService("IndexQueryService", "1.0"); iqs.reindexCollection(); } catch (XMLDBException xe) { logger.error("Unable to reindex collection", xe); return BooleanValue.FALSE; } return BooleanValue.TRUE; } }
src/org/exist/xquery/functions/xmldb/XMLDBReindex.java
/* * eXist Open Source Native XML Database * Copyright (C) 2008-2009 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * $Id$ */ package org.exist.xquery.functions.xmldb; import org.apache.log4j.Logger; import org.exist.collections.Collection; import org.exist.dom.QName; import org.exist.security.PermissionDeniedException; import org.exist.xmldb.XmldbURI; import org.exist.xquery.BasicFunction; import org.exist.xquery.Cardinality; import org.exist.xquery.FunctionSignature; import org.exist.xquery.XPathException; import org.exist.xquery.XQueryContext; import org.exist.xquery.value.BooleanValue; import org.exist.xquery.value.FunctionReturnSequenceType; import org.exist.xquery.value.FunctionParameterSequenceType; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceType; import org.exist.xquery.value.Type; /** * Reindex a collection in the database. * * @author dizzzz * @author ljo * */ public class XMLDBReindex extends BasicFunction { protected static final Logger logger = Logger.getLogger(XMLDBReindex.class); public final static FunctionSignature signature = new FunctionSignature( new QName("reindex", XMLDBModule.NAMESPACE_URI, XMLDBModule.PREFIX), // yes, only a path not an uri /ljo "Reindex collection $collection-path. " + XMLDBModule.NEED_PRIV_USER, new SequenceType[]{ new FunctionParameterSequenceType("collection-path", Type.STRING, Cardinality.EXACTLY_ONE, "The collection path") }, new FunctionReturnSequenceType(Type.BOOLEAN, Cardinality.EXACTLY_ONE, "true() if successfully reindexed, false() otherwise")); /** * @param context */ public XMLDBReindex(XQueryContext context) { super(context, signature); } public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { // this is "/db" String ROOTCOLLECTION = XmldbURI.ROOT_COLLECTION_URI.toString(); // Check for DBA user if (!context.getUser().hasDbaRole()) { logger.error("Permission denied, user '" + context.getUser().getName() + "' must be a DBA to reindex the database"); return BooleanValue.FALSE; } // Get collection path String collectionArg = args[0].getStringValue(); // Collection should start with /db if (!collectionArg.startsWith(ROOTCOLLECTION)) { logger.error("Collection should start with " + ROOTCOLLECTION); return BooleanValue.FALSE; } // Check if collection does exist XmldbURI colName = XmldbURI.create(collectionArg); Collection coll = context.getBroker().getCollection(colName); if (coll == null) { logger.error("Collection " + colName.toString() + " does not exist."); return BooleanValue.FALSE; } // Reindex try { context.getBroker().reindexCollection(colName); } catch (PermissionDeniedException ex) { logger.error(ex.getMessage()); return BooleanValue.FALSE; } return BooleanValue.TRUE; } }
[documentation-fix][bugfix] xdb:reindex() now takes collection URIs like the other xdb functions. svn path=/trunk/eXist/; revision=11058
src/org/exist/xquery/functions/xmldb/XMLDBReindex.java
[documentation-fix][bugfix] xdb:reindex() now takes collection URIs like the other xdb functions.
Java
apache-2.0
3de05cf9c31587ffae2f1e1b55f5b8704272e853
0
AVnetWS/Hentoid,AVnetWS/Hentoid,AVnetWS/Hentoid
package me.devsaki.hentoid.views.ssiv; import android.content.ContentResolver; import android.content.Context; import android.content.res.TypedArray; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.Point; import android.graphics.PointF; import android.graphics.Rect; import android.graphics.RectF; import android.net.Uri; import android.os.AsyncTask; import android.os.Handler; import android.provider.MediaStore; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.ViewParent; import androidx.annotation.AnyThread; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.exifinterface.media.ExifInterface; import com.davemorrissey.labs.subscaleview.ImageViewState; import com.davemorrissey.labs.subscaleview.R.styleable; import com.davemorrissey.labs.subscaleview.decoder.CompatDecoderFactory; import com.davemorrissey.labs.subscaleview.decoder.DecoderFactory; import com.davemorrissey.labs.subscaleview.decoder.ImageDecoder; import com.davemorrissey.labs.subscaleview.decoder.ImageRegionDecoder; import com.davemorrissey.labs.subscaleview.decoder.SkiaImageDecoder; import com.davemorrissey.labs.subscaleview.decoder.SkiaImageRegionDecoder; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import me.devsaki.hentoid.R; import timber.log.Timber; /** * <p> * Displays an image subsampled as necessary to avoid loading too much image data into memory. After zooming in, * a set of image tiles subsampled at higher resolution are loaded and displayed over the base layer. During pan and * zoom, tiles off screen or higher/lower resolution than required are discarded from memory. * </p><p> * Tiles are no larger than the max supported bitmap size, so with large images tiling may be used even when zoomed out. * </p><p> * v prefixes - coordinates, translations and distances measured in screen (view) pixels * <br> * s prefixes - coordinates, translations and distances measured in rotated and cropped source image pixels (scaled) * <br> * f prefixes - coordinates, translations and distances measured in original unrotated, uncropped source file pixels * </p><p> * <a href="https://github.com/davemorrissey/subsampling-scale-image-view">View project on GitHub</a> * </p> */ @SuppressWarnings("unused") public class CustomSubsamplingScaleImageView extends View { private static final String TAG = CustomSubsamplingScaleImageView.class.getSimpleName(); /** * Attempt to use EXIF information on the image to rotate it. Works for external files only. */ public static final int ORIENTATION_USE_EXIF = -1; /** * Display the image file in its native orientation. */ public static final int ORIENTATION_0 = 0; /** * Rotate the image 90 degrees clockwise. */ public static final int ORIENTATION_90 = 90; /** * Rotate the image 180 degrees. */ public static final int ORIENTATION_180 = 180; /** * Rotate the image 270 degrees clockwise. */ public static final int ORIENTATION_270 = 270; private static final List<Integer> VALID_ORIENTATIONS = Arrays.asList(ORIENTATION_0, ORIENTATION_90, ORIENTATION_180, ORIENTATION_270, ORIENTATION_USE_EXIF); /** * During zoom animation, keep the point of the image that was tapped in the same place, and scale the image around it. */ public static final int ZOOM_FOCUS_FIXED = 1; /** * During zoom animation, move the point of the image that was tapped to the center of the screen. */ public static final int ZOOM_FOCUS_CENTER = 2; /** * Zoom in to and center the tapped point immediately without animating. */ public static final int ZOOM_FOCUS_CENTER_IMMEDIATE = 3; private static final List<Integer> VALID_ZOOM_STYLES = Arrays.asList(ZOOM_FOCUS_FIXED, ZOOM_FOCUS_CENTER, ZOOM_FOCUS_CENTER_IMMEDIATE); /** * Quadratic ease out. Not recommended for scale animation, but good for panning. */ public static final int EASE_OUT_QUAD = 1; /** * Quadratic ease in and out. */ public static final int EASE_IN_OUT_QUAD = 2; private static final List<Integer> VALID_EASING_STYLES = Arrays.asList(EASE_IN_OUT_QUAD, EASE_OUT_QUAD); /** * Don't allow the image to be panned off screen. As much of the image as possible is always displayed, centered in the view when it is smaller. This is the best option for galleries. */ public static final int PAN_LIMIT_INSIDE = 1; /** * Allows the image to be panned until it is just off screen, but no further. The edge of the image will stop when it is flush with the screen edge. */ public static final int PAN_LIMIT_OUTSIDE = 2; /** * Allows the image to be panned until a corner reaches the center of the screen but no further. Useful when you want to pan any spot on the image to the exact center of the screen. */ public static final int PAN_LIMIT_CENTER = 3; private static final List<Integer> VALID_PAN_LIMITS = Arrays.asList(PAN_LIMIT_INSIDE, PAN_LIMIT_OUTSIDE, PAN_LIMIT_CENTER); /** * Scale the image so that both dimensions of the image will be equal to or less than the corresponding dimension of the view. The image is then centered in the view. This is the default behaviour and best for galleries. */ public static final int SCALE_TYPE_CENTER_INSIDE = 1; /** * Scale the image uniformly so that both dimensions of the image will be equal to or larger than the corresponding dimension of the view. The image is then centered in the view. */ public static final int SCALE_TYPE_CENTER_CROP = 2; /** * Scale the image so that both dimensions of the image will be equal to or less than the maxScale and equal to or larger than minScale. The image is then centered in the view. */ public static final int SCALE_TYPE_CUSTOM = 3; /** * Scale the image so that both dimensions of the image will be equal to or larger than the corresponding dimension of the view. The top left is shown. */ public static final int SCALE_TYPE_START = 4; private static final List<Integer> VALID_SCALE_TYPES = Arrays.asList(SCALE_TYPE_CENTER_CROP, SCALE_TYPE_CENTER_INSIDE, SCALE_TYPE_CUSTOM, SCALE_TYPE_START); /** * State change originated from animation. */ public static final int ORIGIN_ANIM = 1; /** * State change originated from touch gesture. */ public static final int ORIGIN_TOUCH = 2; /** * State change originated from a fling momentum anim. */ public static final int ORIGIN_FLING = 3; /** * State change originated from a double tap zoom anim. */ public static final int ORIGIN_DOUBLE_TAP_ZOOM = 4; // Bitmap (preview or full image) private Bitmap bitmap; // Whether the bitmap is a preview image private boolean bitmapIsPreview; // Specifies if a cache handler is also referencing the bitmap. Do not recycle if so. private boolean bitmapIsCached; // Uri of full size image private Uri uri; // Sample size used to display the whole image when fully zoomed out private int fullImageSampleSize; // Map of zoom level to tile grid private Map<Integer, List<Tile>> tileMap; // Overlay tile boundaries and other info private boolean debug; // Image orientation setting private int orientation = ORIENTATION_0; // Max scale allowed (prevent infinite zoom) private float maxScale = 2F; // Min scale allowed (prevent infinite zoom) private float minScale = minScale(); // Density to reach before loading higher resolution tiles private int minimumTileDpi = -1; // Pan limiting style private int panLimit = PAN_LIMIT_INSIDE; // Minimum scale type private int minimumScaleType = SCALE_TYPE_CENTER_INSIDE; // overrides for the dimensions of the generated tiles public static final int TILE_SIZE_AUTO = Integer.MAX_VALUE; private int maxTileWidth = TILE_SIZE_AUTO; private int maxTileHeight = TILE_SIZE_AUTO; // An executor service for loading of images private Executor executor = AsyncTask.THREAD_POOL_EXECUTOR; // Whether tiles should be loaded while gestures and animations are still in progress private boolean eagerLoadingEnabled = true; // Gesture detection settings private boolean panEnabled = true; private boolean zoomEnabled = true; private boolean quickScaleEnabled = true; // Double tap zoom behaviour private float doubleTapZoomScale = 1F; private int doubleTapZoomStyle = ZOOM_FOCUS_FIXED; private int doubleTapZoomDuration = 500; // Current scale and scale at start of zoom private float scale; private float scaleStart; // Screen coordinate of top-left corner of source image private PointF vTranslate; private PointF vTranslateStart; private PointF vTranslateBefore; // Source coordinate to center on, used when new position is set externally before view is ready private Float pendingScale; private PointF sPendingCenter; private PointF sRequestedCenter; // Source image dimensions and orientation - dimensions relate to the unrotated image private int sWidth; private int sHeight; private int sOrientation; private Rect sRegion; private Rect pRegion; // Is two-finger zooming in progress private boolean isZooming; // Is one-finger panning in progress private boolean isPanning; // Is quick-scale gesture in progress private boolean isQuickScaling; // Max touches used in current gesture private int maxTouchCount; // Fling detector private GestureDetector detector; private GestureDetector singleDetector; // Tile and image decoding private ImageRegionDecoder decoder; private final ReadWriteLock decoderLock = new ReentrantReadWriteLock(true); private DecoderFactory<? extends ImageDecoder> bitmapDecoderFactory = new CompatDecoderFactory<ImageDecoder>(SkiaImageDecoder.class); private DecoderFactory<? extends ImageRegionDecoder> regionDecoderFactory = new CompatDecoderFactory<ImageRegionDecoder>(SkiaImageRegionDecoder.class); // Debug values private PointF vCenterStart; private float vDistStart; // Current quickscale state private final float quickScaleThreshold; private float quickScaleLastDistance; private boolean quickScaleMoved; private PointF quickScaleVLastPoint; private PointF quickScaleSCenter; private PointF quickScaleVStart; // Scale and center animation tracking private Anim anim; // Whether a ready notification has been sent to subclasses private boolean readySent; // Whether a base layer loaded notification has been sent to subclasses private boolean imageLoadedSent; // Event listener private OnImageEventListener onImageEventListener; // Scale and center listener private OnStateChangedListener onStateChangedListener; // Long click listener private OnLongClickListener onLongClickListener; // Long click handler private final Handler handler; private static final int MESSAGE_LONG_CLICK = 1; // Paint objects created once and reused for efficiency private Paint bitmapPaint; private Paint debugTextPaint; private Paint debugLinePaint; private Paint tileBgPaint; // Volatile fields used to reduce object creation private ScaleAndTranslate satTemp; private Matrix matrix; private RectF sRect; private final float[] srcArray = new float[8]; private final float[] dstArray = new float[8]; //The logical density of the display private final float density; // A global preference for bitmap format, available to decoder classes that respect it private static Bitmap.Config preferredBitmapConfig; // Switch to ignore all touch events (used in vertical mode when the container view is the one handling touch events) private boolean ignoreTouchEvents = false; // Dimensions used to preload the image before the view actually appears on screen / gets its display dimensions private Point preloadDimensions = null; public CustomSubsamplingScaleImageView(Context context, AttributeSet attr) { super(context, attr); density = getResources().getDisplayMetrics().density; setMinimumDpi(160); setDoubleTapZoomDpi(160); setMinimumTileDpi(320); setGestureDetector(context); this.handler = new Handler(message -> { if (message.what == MESSAGE_LONG_CLICK && onLongClickListener != null) { maxTouchCount = 0; CustomSubsamplingScaleImageView.super.setOnLongClickListener(onLongClickListener); performLongClick(); CustomSubsamplingScaleImageView.super.setOnLongClickListener(null); } return true; }); // Handle XML attributes if (attr != null) { TypedArray typedAttr = getContext().obtainStyledAttributes(attr, R.styleable.CustomSubsamplingScaleImageView); if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_assetName)) { String assetName = typedAttr.getString(styleable.SubsamplingScaleImageView_assetName); if (assetName != null && assetName.length() > 0) { setImage(ImageSource.asset(assetName).tilingEnabled()); } } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_src)) { int resId = typedAttr.getResourceId(styleable.SubsamplingScaleImageView_src, 0); if (resId > 0) { setImage(ImageSource.resource(resId).tilingEnabled()); } } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_panEnabled)) { setPanEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_panEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_zoomEnabled)) { setZoomEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_zoomEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_quickScaleEnabled)) { setQuickScaleEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_quickScaleEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_tileBackgroundColor)) { setTileBackgroundColor(typedAttr.getColor(styleable.SubsamplingScaleImageView_tileBackgroundColor, Color.argb(0, 0, 0, 0))); } typedAttr.recycle(); } quickScaleThreshold = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, context.getResources().getDisplayMetrics()); } public CustomSubsamplingScaleImageView(Context context) { this(context, null); } /** * Get the current preferred configuration for decoding bitmaps. {@link ImageDecoder} and {@link ImageRegionDecoder} * instances can read this and use it when decoding images. * * @return the preferred bitmap configuration, or null if none has been set. */ public static Bitmap.Config getPreferredBitmapConfig() { return preferredBitmapConfig; } /** * Set a global preferred bitmap config shared by all view instance and applied to new instances * initialised after the call is made. This is a hint only; the bundled {@link ImageDecoder} and * {@link ImageRegionDecoder} classes all respect this (except when they were constructed with * an instance-specific config) but custom decoder classes will not. * * @param preferredBitmapConfig the bitmap configuration to be used by future instances of the view. Pass null to restore the default. */ public static void setPreferredBitmapConfig(Bitmap.Config preferredBitmapConfig) { CustomSubsamplingScaleImageView.preferredBitmapConfig = preferredBitmapConfig; } /** * Sets the image orientation. It's best to call this before setting the image file or asset, because it may waste * loading of tiles. However, this can be freely called at any time. * * @param orientation orientation to be set. See ORIENTATION_ static fields for valid values. */ public final void setOrientation(int orientation) { if (!VALID_ORIENTATIONS.contains(orientation)) { throw new IllegalArgumentException("Invalid orientation: " + orientation); } this.orientation = orientation; reset(false); invalidate(); requestLayout(); } /** * Set the image source from a bitmap, resource, asset, file or other URI. * * @param imageSource Image source. */ public final void setImage(@NonNull ImageSource imageSource) { setImage(imageSource, null, null); } /** * Set the image source from a bitmap, resource, asset, file or other URI, starting with a given orientation * setting, scale and center. This is the best method to use when you want scale and center to be restored * after screen orientation change; it avoids any redundant loading of tiles in the wrong orientation. * * @param imageSource Image source. * @param state State to be restored. Nullable. */ public final void setImage(@NonNull ImageSource imageSource, ImageViewState state) { setImage(imageSource, null, state); } /** * Set the image source from a bitmap, resource, asset, file or other URI, providing a preview image to be * displayed until the full size image is loaded. * <p> * You must declare the dimensions of the full size image by calling {@link ImageSource#dimensions(int, int)} * on the imageSource object. The preview source will be ignored if you don't provide dimensions, * and if you provide a bitmap for the full size image. * * @param imageSource Image source. Dimensions must be declared. * @param previewSource Optional source for a preview image to be displayed and allow interaction while the full size image loads. */ public final void setImage(@NonNull ImageSource imageSource, ImageSource previewSource) { setImage(imageSource, previewSource, null); } /** * Set the image source from a bitmap, resource, asset, file or other URI, providing a preview image to be * displayed until the full size image is loaded, starting with a given orientation setting, scale and center. * This is the best method to use when you want scale and center to be restored after screen orientation change; * it avoids any redundant loading of tiles in the wrong orientation. * <p> * You must declare the dimensions of the full size image by calling {@link ImageSource#dimensions(int, int)} * on the imageSource object. The preview source will be ignored if you don't provide dimensions, * and if you provide a bitmap for the full size image. * * @param imageSource Image source. Dimensions must be declared. * @param previewSource Optional source for a preview image to be displayed and allow interaction while the full size image loads. * @param state State to be restored. Nullable. */ public final void setImage(@NonNull ImageSource imageSource, ImageSource previewSource, ImageViewState state) { reset(true); if (state != null) { restoreState(state); } if (previewSource != null) { if (imageSource.getBitmap() != null) { throw new IllegalArgumentException("Preview image cannot be used when a bitmap is provided for the main image"); } if (imageSource.getSWidth() <= 0 || imageSource.getSHeight() <= 0) { throw new IllegalArgumentException("Preview image cannot be used unless dimensions are provided for the main image"); } this.sWidth = imageSource.getSWidth(); this.sHeight = imageSource.getSHeight(); this.pRegion = previewSource.getSRegion(); if (previewSource.getBitmap() != null) { this.bitmapIsCached = previewSource.isCached(); onPreviewLoaded(previewSource.getBitmap()); } else { Uri uri = previewSource.getUri(); if (uri == null && previewSource.getResource() != null) { uri = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + getContext().getPackageName() + "/" + previewSource.getResource()); } BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, true); execute(task); } } if (imageSource.getBitmap() != null && imageSource.getSRegion() != null) { onImageLoaded(Bitmap.createBitmap(imageSource.getBitmap(), imageSource.getSRegion().left, imageSource.getSRegion().top, imageSource.getSRegion().width(), imageSource.getSRegion().height()), ORIENTATION_0, false); } else if (imageSource.getBitmap() != null) { onImageLoaded(imageSource.getBitmap(), ORIENTATION_0, imageSource.isCached()); } else { sRegion = imageSource.getSRegion(); uri = imageSource.getUri(); if (uri == null && imageSource.getResource() != null) { uri = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + getContext().getPackageName() + "/" + imageSource.getResource()); } if (imageSource.getTile() || sRegion != null) { // Load the bitmap using tile decoding. TilesInitTask task = new TilesInitTask(this, getContext(), regionDecoderFactory, uri); execute(task); } else { // Load the bitmap as a single image. BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, false); execute(task); } } } /** * Reset all state before setting/changing image or setting new rotation. */ private void reset(boolean newImage) { debug("reset newImage=" + newImage); scale = 0f; scaleStart = 0f; vTranslate = null; vTranslateStart = null; vTranslateBefore = null; pendingScale = 0f; sPendingCenter = null; sRequestedCenter = null; isZooming = false; isPanning = false; isQuickScaling = false; maxTouchCount = 0; fullImageSampleSize = 0; vCenterStart = null; vDistStart = 0; quickScaleLastDistance = 0f; quickScaleMoved = false; quickScaleSCenter = null; quickScaleVLastPoint = null; quickScaleVStart = null; anim = null; satTemp = null; matrix = null; sRect = null; if (newImage) { uri = null; decoderLock.writeLock().lock(); try { if (decoder != null) { decoder.recycle(); decoder = null; } } finally { decoderLock.writeLock().unlock(); } if (bitmap != null && !bitmapIsCached) { bitmap.recycle(); } if (bitmap != null && bitmapIsCached && onImageEventListener != null) { onImageEventListener.onPreviewReleased(); } sWidth = 0; sHeight = 0; sOrientation = 0; sRegion = null; pRegion = null; readySent = false; imageLoadedSent = false; bitmap = null; bitmapIsPreview = false; bitmapIsCached = false; } if (tileMap != null) { for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { for (Tile tile : tileMapEntry.getValue()) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } } tileMap = null; } setGestureDetector(getContext()); } private void setGestureDetector(final Context context) { this.detector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { if (panEnabled && readySent && vTranslate != null && e1 != null && e2 != null && (Math.abs(e1.getX() - e2.getX()) > 50 || Math.abs(e1.getY() - e2.getY()) > 50) && (Math.abs(velocityX) > 500 || Math.abs(velocityY) > 500) && !isZooming) { PointF vTranslateEnd = new PointF(vTranslate.x + (velocityX * 0.25f), vTranslate.y + (velocityY * 0.25f)); float sCenterXEnd = ((getWidthInternal() / 2f) - vTranslateEnd.x) / scale; float sCenterYEnd = ((getHeightInternal() / 2f) - vTranslateEnd.y) / scale; new AnimationBuilder(new PointF(sCenterXEnd, sCenterYEnd)).withEasing(EASE_OUT_QUAD).withPanLimited(false).withOrigin(ORIGIN_FLING).start(); return true; } return super.onFling(e1, e2, velocityX, velocityY); } @Override public boolean onSingleTapConfirmed(MotionEvent e) { performClick(); return true; } @Override public boolean onDoubleTap(MotionEvent e) { if (zoomEnabled && readySent && vTranslate != null) { // Hacky solution for #15 - after a double tap the GestureDetector gets in a state // where the next fling is ignored, so here we replace it with a new one. setGestureDetector(context); if (quickScaleEnabled) { // Store quick scale params. This will become either a double tap zoom or a // quick scale depending on whether the user swipes. vCenterStart = new PointF(e.getX(), e.getY()); vTranslateStart = new PointF(vTranslate.x, vTranslate.y); scaleStart = scale; isQuickScaling = true; isZooming = true; quickScaleLastDistance = -1F; quickScaleSCenter = viewToSourceCoord(vCenterStart); if (null == quickScaleSCenter) throw new IllegalStateException("vTranslate is null; aborting"); quickScaleVStart = new PointF(e.getX(), e.getY()); quickScaleVLastPoint = new PointF(quickScaleSCenter.x, quickScaleSCenter.y); quickScaleMoved = false; // We need to get events in onTouchEvent after this. return false; } else { // Start double tap zoom animation. PointF sCenter = viewToSourceCoord(new PointF(e.getX(), e.getY())); if (null == sCenter) throw new IllegalStateException("vTranslate is null; aborting"); doubleTapZoom(sCenter, new PointF(e.getX(), e.getY())); return true; } } return super.onDoubleTapEvent(e); } }); singleDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onSingleTapConfirmed(MotionEvent e) { performClick(); return true; } }); } /** * On resize, preserve center and scale. Various behaviours are possible, override this method to use another. */ @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { debug("onSizeChanged %dx%d -> %dx%d", oldw, oldh, w, h); PointF sCenter = getCenter(); if (readySent && sCenter != null) { this.anim = null; this.pendingScale = scale; this.sPendingCenter = sCenter; } } /** * Measures the width and height of the view, preserving the aspect ratio of the image displayed if wrap_content is * used. The image will scale within this box, not resizing the view as it is zoomed. */ @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec); int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec); int parentWidth = MeasureSpec.getSize(widthMeasureSpec); int parentHeight = MeasureSpec.getSize(heightMeasureSpec); boolean resizeWidth = widthSpecMode != MeasureSpec.EXACTLY; boolean resizeHeight = heightSpecMode != MeasureSpec.EXACTLY; int width = parentWidth; int height = parentHeight; if (sWidth > 0 && sHeight > 0) { if (resizeWidth && resizeHeight) { width = sWidth(); height = sHeight(); } else if (resizeHeight) { height = (int) ((((double) sHeight() / (double) sWidth()) * width)); } else if (resizeWidth) { width = (int) ((((double) sWidth() / (double) sHeight()) * height)); } } width = Math.max(width, getSuggestedMinimumWidth()); height = Math.max(height, getSuggestedMinimumHeight()); setMeasuredDimension(width, height); } public void setIgnoreTouchEvents(boolean ignoreTouchEvents) { this.ignoreTouchEvents = ignoreTouchEvents; } /** * Handle touch events. One finger pans, and two finger pinch and zoom plus panning. */ @Override public boolean onTouchEvent(@NonNull MotionEvent event) { if (ignoreTouchEvents) return false; // During non-interruptible anims, ignore all touch events if (anim != null && !anim.interruptible) { requestDisallowInterceptTouchEvent(true); return true; } else { if (anim != null && anim.listener != null) { try { anim.listener.onInterruptedByUser(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } anim = null; } // Abort if not ready if (vTranslate == null) { if (singleDetector != null) { singleDetector.onTouchEvent(event); } return true; } // Detect flings, taps and double taps if (!isQuickScaling && (detector == null || detector.onTouchEvent(event))) { isZooming = false; isPanning = false; maxTouchCount = 0; return true; } if (vTranslateStart == null) { vTranslateStart = new PointF(0, 0); } if (vTranslateBefore == null) { vTranslateBefore = new PointF(0, 0); } if (vCenterStart == null) { vCenterStart = new PointF(0, 0); } // Store current values so we can send an event if they change float scaleBefore = scale; vTranslateBefore.set(vTranslate); boolean handled = onTouchEventInternal(event); sendStateChanged(scaleBefore, vTranslateBefore, ORIGIN_TOUCH); return handled || super.onTouchEvent(event); } @SuppressWarnings("deprecation") private boolean onTouchEventInternal(@NonNull MotionEvent event) { int touchCount = event.getPointerCount(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_1_DOWN: case MotionEvent.ACTION_POINTER_2_DOWN: anim = null; requestDisallowInterceptTouchEvent(true); maxTouchCount = Math.max(maxTouchCount, touchCount); if (touchCount >= 2) { if (zoomEnabled) { // Start pinch to zoom. Calculate distance between touch points and center point of the pinch. float distance = distance(event.getX(0), event.getX(1), event.getY(0), event.getY(1)); scaleStart = scale; vDistStart = distance; vTranslateStart.set(vTranslate.x, vTranslate.y); vCenterStart.set((event.getX(0) + event.getX(1)) / 2, (event.getY(0) + event.getY(1)) / 2); } else { // Abort all gestures on second touch maxTouchCount = 0; } // Cancel long click timer handler.removeMessages(MESSAGE_LONG_CLICK); } else if (!isQuickScaling) { // Start one-finger pan vTranslateStart.set(vTranslate.x, vTranslate.y); vCenterStart.set(event.getX(), event.getY()); // Start long click timer handler.sendEmptyMessageDelayed(MESSAGE_LONG_CLICK, 600); } return true; case MotionEvent.ACTION_MOVE: boolean consumed = false; if (maxTouchCount > 0) { if (touchCount >= 2) { // Calculate new distance between touch points, to scale and pan relative to start values. float vDistEnd = distance(event.getX(0), event.getX(1), event.getY(0), event.getY(1)); float vCenterEndX = (event.getX(0) + event.getX(1)) / 2; float vCenterEndY = (event.getY(0) + event.getY(1)) / 2; if (zoomEnabled && (distance(vCenterStart.x, vCenterEndX, vCenterStart.y, vCenterEndY) > 5 || Math.abs(vDistEnd - vDistStart) > 5 || isPanning)) { isZooming = true; isPanning = true; consumed = true; double previousScale = scale; scale = Math.min(maxScale, (vDistEnd / vDistStart) * scaleStart); if (scale <= minScale()) { // Minimum scale reached so don't pan. Adjust start settings so any expand will zoom in. vDistStart = vDistEnd; scaleStart = minScale(); vCenterStart.set(vCenterEndX, vCenterEndY); vTranslateStart.set(vTranslate); } else if (panEnabled) { // Translate to place the source image coordinate that was at the center of the pinch at the start // at the center of the pinch now, to give simultaneous pan + zoom. float vLeftStart = vCenterStart.x - vTranslateStart.x; float vTopStart = vCenterStart.y - vTranslateStart.y; float vLeftNow = vLeftStart * (scale / scaleStart); float vTopNow = vTopStart * (scale / scaleStart); vTranslate.x = vCenterEndX - vLeftNow; vTranslate.y = vCenterEndY - vTopNow; if ((previousScale * sHeight() < getHeightInternal() && scale * sHeight() >= getHeightInternal()) || (previousScale * sWidth() < getWidthInternal() && scale * sWidth() >= getWidthInternal())) { fitToBounds(true); vCenterStart.set(vCenterEndX, vCenterEndY); vTranslateStart.set(vTranslate); scaleStart = scale; vDistStart = vDistEnd; } } else if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. vTranslate.x = (getWidthInternal() / 2f) - (scale * sRequestedCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sRequestedCenter.y); } else { // With no requested center, scale around the image center. vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); } fitToBounds(true); refreshRequiredTiles(eagerLoadingEnabled); } } else if (isQuickScaling) { // One finger zoom // Stole Google's Magical Formula™ to make sure it feels the exact same float dist = Math.abs(quickScaleVStart.y - event.getY()) * 2 + quickScaleThreshold; if (quickScaleLastDistance == -1f) { quickScaleLastDistance = dist; } boolean isUpwards = event.getY() > quickScaleVLastPoint.y; quickScaleVLastPoint.set(0, event.getY()); float spanDiff = Math.abs(1 - (dist / quickScaleLastDistance)) * 0.5f; if (spanDiff > 0.03f || quickScaleMoved) { quickScaleMoved = true; float multiplier = 1; if (quickScaleLastDistance > 0) { multiplier = isUpwards ? (1 + spanDiff) : (1 - spanDiff); } double previousScale = scale; scale = Math.max(minScale(), Math.min(maxScale, scale * multiplier)); if (panEnabled) { float vLeftStart = vCenterStart.x - vTranslateStart.x; float vTopStart = vCenterStart.y - vTranslateStart.y; float vLeftNow = vLeftStart * (scale / scaleStart); float vTopNow = vTopStart * (scale / scaleStart); vTranslate.x = vCenterStart.x - vLeftNow; vTranslate.y = vCenterStart.y - vTopNow; if ((previousScale * sHeight() < getHeightInternal() && scale * sHeight() >= getHeightInternal()) || (previousScale * sWidth() < getWidthInternal() && scale * sWidth() >= getWidthInternal())) { fitToBounds(true); vCenterStart.set(sourceToViewCoord(quickScaleSCenter)); vTranslateStart.set(vTranslate); scaleStart = scale; dist = 0; } } else if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. vTranslate.x = (getWidthInternal() / 2f) - (scale * sRequestedCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sRequestedCenter.y); } else { // With no requested center, scale around the image center. vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); } } quickScaleLastDistance = dist; fitToBounds(true); refreshRequiredTiles(eagerLoadingEnabled); consumed = true; } else if (!isZooming) { // One finger pan - translate the image. We do this calculation even with pan disabled so click // and long click behaviour is preserved. float dx = Math.abs(event.getX() - vCenterStart.x); float dy = Math.abs(event.getY() - vCenterStart.y); //On the Samsung S6 long click event does not work, because the dx > 5 usually true float offset = density * 5; if (dx > offset || dy > offset || isPanning) { consumed = true; vTranslate.x = vTranslateStart.x + (event.getX() - vCenterStart.x); vTranslate.y = vTranslateStart.y + (event.getY() - vCenterStart.y); float lastX = vTranslate.x; float lastY = vTranslate.y; fitToBounds(true); boolean atXEdge = lastX != vTranslate.x; boolean atYEdge = lastY != vTranslate.y; boolean edgeXSwipe = atXEdge && dx > dy && !isPanning; boolean edgeYSwipe = atYEdge && dy > dx && !isPanning; boolean yPan = lastY == vTranslate.y && dy > offset * 3; if (!edgeXSwipe && !edgeYSwipe && (!atXEdge || !atYEdge || yPan || isPanning)) { isPanning = true; } else if (dx > offset || dy > offset) { // Haven't panned the image, and we're at the left or right edge. Switch to page swipe. maxTouchCount = 0; handler.removeMessages(MESSAGE_LONG_CLICK); requestDisallowInterceptTouchEvent(false); } if (!panEnabled) { vTranslate.x = vTranslateStart.x; vTranslate.y = vTranslateStart.y; requestDisallowInterceptTouchEvent(false); } refreshRequiredTiles(eagerLoadingEnabled); } } } if (consumed) { handler.removeMessages(MESSAGE_LONG_CLICK); invalidate(); return true; } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: case MotionEvent.ACTION_POINTER_2_UP: handler.removeMessages(MESSAGE_LONG_CLICK); if (isQuickScaling) { isQuickScaling = false; if (!quickScaleMoved) { doubleTapZoom(quickScaleSCenter, vCenterStart); } } if (maxTouchCount > 0 && (isZooming || isPanning)) { if (isZooming && touchCount == 2) { // Convert from zoom to pan with remaining touch isPanning = true; vTranslateStart.set(vTranslate.x, vTranslate.y); if (event.getActionIndex() == 1) { vCenterStart.set(event.getX(0), event.getY(0)); } else { vCenterStart.set(event.getX(1), event.getY(1)); } } if (touchCount < 3) { // End zooming when only one touch point isZooming = false; } if (touchCount < 2) { // End panning when no touch points isPanning = false; maxTouchCount = 0; } // Trigger load of tiles now required refreshRequiredTiles(true); return true; } if (touchCount == 1) { isZooming = false; isPanning = false; maxTouchCount = 0; } return true; } return false; } private void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { ViewParent parent = getParent(); if (parent != null) { parent.requestDisallowInterceptTouchEvent(disallowIntercept); } } /** * Double tap zoom handler triggered from gesture detector or on touch, depending on whether * quick scale is enabled. */ private void doubleTapZoom(PointF sCenter, PointF vFocus) { if (!panEnabled) { if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. sCenter.x = sRequestedCenter.x; sCenter.y = sRequestedCenter.y; } else { // With no requested center, scale around the image center. sCenter.x = sWidth() / 2f; sCenter.y = sHeight() / 2f; } } float doubleTapZoomScale = Math.min(maxScale, CustomSubsamplingScaleImageView.this.doubleTapZoomScale); boolean zoomIn = (scale <= doubleTapZoomScale * 0.9) || scale == minScale; float targetScale = zoomIn ? doubleTapZoomScale : minScale(); if (doubleTapZoomStyle == ZOOM_FOCUS_CENTER_IMMEDIATE) { setScaleAndCenter(targetScale, sCenter); } else if (doubleTapZoomStyle == ZOOM_FOCUS_CENTER || !zoomIn || !panEnabled) { new AnimationBuilder(targetScale, sCenter).withInterruptible(false).withDuration(doubleTapZoomDuration).withOrigin(ORIGIN_DOUBLE_TAP_ZOOM).start(); } else if (doubleTapZoomStyle == ZOOM_FOCUS_FIXED) { new AnimationBuilder(targetScale, sCenter, vFocus).withInterruptible(false).withDuration(doubleTapZoomDuration).withOrigin(ORIGIN_DOUBLE_TAP_ZOOM).start(); } invalidate(); } /** * Draw method should not be called until the view has dimensions so the first calls are used as triggers to calculate * the scaling and tiling required. Once the view is setup, tiles are displayed as they are loaded. */ @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); createPaints(); // If image or view dimensions are not known yet, abort. if (sWidth == 0 || sHeight == 0 || getWidthInternal() == 0 || getHeightInternal() == 0) { return; } // When using tiles, on first render with no tile map ready, initialise it and kick off async base image loading. if (tileMap == null && decoder != null) { initialiseBaseLayer(getMaxBitmapDimensions(canvas)); } // If image has been loaded or supplied as a bitmap, onDraw may be the first time the view has // dimensions and therefore the first opportunity to set scale and translate. If this call returns // false there is nothing to be drawn so return immediately. if (!checkReady()) { return; } // Set scale and translate before draw. preDraw(); // If animating scale, calculate current scale and center with easing equations if (anim != null && anim.vFocusStart != null) { // Store current values so we can send an event if they change float scaleBefore = scale; if (vTranslateBefore == null) { vTranslateBefore = new PointF(0, 0); } vTranslateBefore.set(vTranslate); long scaleElapsed = System.currentTimeMillis() - anim.time; boolean finished = scaleElapsed > anim.duration; scaleElapsed = Math.min(scaleElapsed, anim.duration); scale = ease(anim.easing, scaleElapsed, anim.scaleStart, anim.scaleEnd - anim.scaleStart, anim.duration); // Apply required animation to the focal point float vFocusNowX = ease(anim.easing, scaleElapsed, anim.vFocusStart.x, anim.vFocusEnd.x - anim.vFocusStart.x, anim.duration); float vFocusNowY = ease(anim.easing, scaleElapsed, anim.vFocusStart.y, anim.vFocusEnd.y - anim.vFocusStart.y, anim.duration); // Find out where the focal point is at this scale and adjust its position to follow the animation path vTranslate.x -= sourceToViewX(anim.sCenterEnd.x) - vFocusNowX; vTranslate.y -= sourceToViewY(anim.sCenterEnd.y) - vFocusNowY; // For translate anims, showing the image non-centered is never allowed, for scaling anims it is during the animation. fitToBounds(finished || (anim.scaleStart == anim.scaleEnd)); sendStateChanged(scaleBefore, vTranslateBefore, anim.origin); refreshRequiredTiles(finished); if (finished) { if (anim.listener != null) { try { anim.listener.onComplete(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } anim = null; } invalidate(); } if (tileMap != null && isBaseLayerReady()) { // Optimum sample size for current scale int sampleSize = Math.min(fullImageSampleSize, calculateInSampleSize(scale)); // First check for missing tiles - if there are any we need the base layer underneath to avoid gaps boolean hasMissingTiles = false; for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == sampleSize) { for (Tile tile : tileMapEntry.getValue()) { if (tile.visible && (tile.loading || tile.bitmap == null)) { hasMissingTiles = true; } } } } // Render all loaded tiles. LinkedHashMap used for bottom up rendering - lower res tiles underneath. for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == sampleSize || hasMissingTiles) { for (Tile tile : tileMapEntry.getValue()) { sourceToViewRect(tile.sRect, tile.vRect); if (!tile.loading && tile.bitmap != null) { if (tileBgPaint != null) { canvas.drawRect(tile.vRect, tileBgPaint); } if (matrix == null) { matrix = new Matrix(); } matrix.reset(); setMatrixArray(srcArray, 0, 0, tile.bitmap.getWidth(), 0, tile.bitmap.getWidth(), tile.bitmap.getHeight(), 0, tile.bitmap.getHeight()); if (getRequiredRotation() == ORIENTATION_0) { setMatrixArray(dstArray, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom); } else if (getRequiredRotation() == ORIENTATION_90) { setMatrixArray(dstArray, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top); } else if (getRequiredRotation() == ORIENTATION_180) { setMatrixArray(dstArray, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top); } else if (getRequiredRotation() == ORIENTATION_270) { setMatrixArray(dstArray, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom); } matrix.setPolyToPoly(srcArray, 0, dstArray, 0, 4); canvas.drawBitmap(tile.bitmap, matrix, bitmapPaint); if (debug) { canvas.drawRect(tile.vRect, debugLinePaint); } } else if (tile.loading && debug) { canvas.drawText("LOADING", (float)tile.vRect.left + px(5), (float)tile.vRect.top + px(35), debugTextPaint); } if (tile.visible && debug) { canvas.drawText("ISS " + tile.sampleSize + " RECT " + tile.sRect.top + "," + tile.sRect.left + "," + tile.sRect.bottom + "," + tile.sRect.right, (float)tile.vRect.left + px(5), (float)tile.vRect.top + px(15), debugTextPaint); } } } } } else if (bitmap != null) { float xScale = scale, yScale = scale; if (bitmapIsPreview) { xScale = scale * ((float) sWidth / bitmap.getWidth()); yScale = scale * ((float) sHeight / bitmap.getHeight()); } if (matrix == null) { matrix = new Matrix(); } matrix.reset(); matrix.postScale(xScale, yScale); matrix.postRotate(getRequiredRotation()); matrix.postTranslate(vTranslate.x, vTranslate.y); if (getRequiredRotation() == ORIENTATION_180) { matrix.postTranslate(scale * sWidth, scale * sHeight); } else if (getRequiredRotation() == ORIENTATION_90) { matrix.postTranslate(scale * sHeight, 0); } else if (getRequiredRotation() == ORIENTATION_270) { matrix.postTranslate(0, scale * sWidth); } if (tileBgPaint != null) { if (sRect == null) { sRect = new RectF(); } sRect.set(0f, 0f, bitmapIsPreview ? bitmap.getWidth() : sWidth, bitmapIsPreview ? bitmap.getHeight() : sHeight); matrix.mapRect(sRect); canvas.drawRect(sRect, tileBgPaint); } canvas.drawBitmap(bitmap, matrix, bitmapPaint); } if (debug) { canvas.drawText("Scale: " + String.format(Locale.ENGLISH, "%.2f", scale) + " (" + String.format(Locale.ENGLISH, "%.2f", minScale()) + " - " + String.format(Locale.ENGLISH, "%.2f", maxScale) + ")", px(5), px(15), debugTextPaint); canvas.drawText("Translate: " + String.format(Locale.ENGLISH, "%.2f", vTranslate.x) + ":" + String.format(Locale.ENGLISH, "%.2f", vTranslate.y), px(5), px(30), debugTextPaint); PointF center = getCenter(); if (null != center) canvas.drawText("Source center: " + String.format(Locale.ENGLISH, "%.2f", center.x) + ":" + String.format(Locale.ENGLISH, "%.2f", center.y), px(5), px(45), debugTextPaint); if (anim != null) { PointF vCenterStart = sourceToViewCoord(anim.sCenterStart); PointF vCenterEndRequested = sourceToViewCoord(anim.sCenterEndRequested); PointF vCenterEnd = sourceToViewCoord(anim.sCenterEnd); if (vCenterStart != null) { canvas.drawCircle(vCenterStart.x, vCenterStart.y, px(10), debugLinePaint); debugLinePaint.setColor(Color.RED); } if (vCenterEndRequested != null) { canvas.drawCircle(vCenterEndRequested.x, vCenterEndRequested.y, px(20), debugLinePaint); debugLinePaint.setColor(Color.BLUE); } if (vCenterEnd != null) { canvas.drawCircle(vCenterEnd.x, vCenterEnd.y, px(25), debugLinePaint); debugLinePaint.setColor(Color.CYAN); } canvas.drawCircle(getWidthInternal() / 2f, getHeightInternal() / 2f, px(30), debugLinePaint); } if (vCenterStart != null) { debugLinePaint.setColor(Color.RED); canvas.drawCircle(vCenterStart.x, vCenterStart.y, px(20), debugLinePaint); } if (quickScaleSCenter != null) { debugLinePaint.setColor(Color.BLUE); canvas.drawCircle(sourceToViewX(quickScaleSCenter.x), sourceToViewY(quickScaleSCenter.y), px(35), debugLinePaint); } if (quickScaleVStart != null && isQuickScaling) { debugLinePaint.setColor(Color.CYAN); canvas.drawCircle(quickScaleVStart.x, quickScaleVStart.y, px(30), debugLinePaint); } debugLinePaint.setColor(Color.MAGENTA); } } /** * Helper method for setting the values of a tile matrix array. */ private void setMatrixArray(float[] array, float f0, float f1, float f2, float f3, float f4, float f5, float f6, float f7) { array[0] = f0; array[1] = f1; array[2] = f2; array[3] = f3; array[4] = f4; array[5] = f5; array[6] = f6; array[7] = f7; } /** * Checks whether the base layer of tiles or full size bitmap is ready. */ private boolean isBaseLayerReady() { if (bitmap != null && !bitmapIsPreview) { return true; } else if (tileMap != null) { boolean baseLayerReady = true; for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == fullImageSampleSize) { for (Tile tile : tileMapEntry.getValue()) { if (tile.loading || tile.bitmap == null) { baseLayerReady = false; } } } } return baseLayerReady; } return false; } /** * Check whether view and image dimensions are known and either a preview, full size image or * base layer tiles are loaded. First time, send ready event to listener. The next draw will * display an image. */ private boolean checkReady() { boolean ready = getWidthInternal() > 0 && getHeightInternal() > 0 && sWidth > 0 && sHeight > 0 && (bitmap != null || isBaseLayerReady()); if (!readySent && ready) { preDraw(); readySent = true; onReady(); if (onImageEventListener != null) { onImageEventListener.onReady(); } } return ready; } /** * Check whether either the full size bitmap or base layer tiles are loaded. First time, send image * loaded event to listener. */ private boolean checkImageLoaded() { boolean imageLoaded = isBaseLayerReady(); if (!imageLoadedSent && imageLoaded) { preDraw(); imageLoadedSent = true; onImageLoaded(); if (onImageEventListener != null) { onImageEventListener.onImageLoaded(); } } return imageLoaded; } /** * Creates Paint objects once when first needed. */ private void createPaints() { if (bitmapPaint == null) { bitmapPaint = new Paint(); bitmapPaint.setAntiAlias(true); bitmapPaint.setFilterBitmap(true); bitmapPaint.setDither(true); } if ((debugTextPaint == null || debugLinePaint == null) && debug) { debugTextPaint = new Paint(); debugTextPaint.setTextSize(px(12)); debugTextPaint.setColor(Color.MAGENTA); debugTextPaint.setStyle(Style.FILL); debugLinePaint = new Paint(); debugLinePaint.setColor(Color.MAGENTA); debugLinePaint.setStyle(Style.STROKE); debugLinePaint.setStrokeWidth(px(1)); } } /** * Called on first draw when the view has dimensions. Calculates the initial sample size and starts async loading of * the base layer image - the whole source subsampled as necessary. */ private synchronized void initialiseBaseLayer(@NonNull Point maxTileDimensions) { debug("initialiseBaseLayer maxTileDimensions=%dx%d", maxTileDimensions.x, maxTileDimensions.y); satTemp = new ScaleAndTranslate(0f, new PointF(0, 0)); fitToBounds(true, satTemp); // Load double resolution - next level will be split into four tiles and at the center all four are required, // so don't bother with tiling until the next level 16 tiles are needed. fullImageSampleSize = calculateInSampleSize(satTemp.scale); if (fullImageSampleSize > 1) { fullImageSampleSize /= 2; } if (fullImageSampleSize == 1 && sRegion == null && sWidth() < maxTileDimensions.x && sHeight() < maxTileDimensions.y) { // Whole image is required at native resolution, and is smaller than the canvas max bitmap size. // Use BitmapDecoder for better image support. decoder.recycle(); decoder = null; BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, false); execute(task); } else { initialiseTileMap(maxTileDimensions); List<Tile> baseGrid = tileMap.get(fullImageSampleSize); for (Tile baseTile : baseGrid) { TileLoadTask task = new TileLoadTask(this, decoder, baseTile); execute(task); } refreshRequiredTiles(true); } } /** * Loads the optimum tiles for display at the current scale and translate, so the screen can be filled with tiles * that are at least as high resolution as the screen. Frees up bitmaps that are now off the screen. * * @param load Whether to load the new tiles needed. Use false while scrolling/panning for performance. */ private void refreshRequiredTiles(boolean load) { if (decoder == null || tileMap == null) { return; } int sampleSize = Math.min(fullImageSampleSize, calculateInSampleSize(scale)); // Load tiles of the correct sample size that are on screen. Discard tiles off screen, and those that are higher // resolution than required, or lower res than required but not the base layer, so the base layer is always present. for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { for (Tile tile : tileMapEntry.getValue()) { if (tile.sampleSize < sampleSize || (tile.sampleSize > sampleSize && tile.sampleSize != fullImageSampleSize)) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } if (tile.sampleSize == sampleSize) { if (tileVisible(tile)) { tile.visible = true; if (!tile.loading && tile.bitmap == null && load) { TileLoadTask task = new TileLoadTask(this, decoder, tile); execute(task); } } else if (tile.sampleSize != fullImageSampleSize) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } } else if (tile.sampleSize == fullImageSampleSize) { tile.visible = true; } } } } /** * Determine whether tile is visible. */ private boolean tileVisible(Tile tile) { float sVisLeft = viewToSourceX(0), sVisRight = viewToSourceX(getWidthInternal()), sVisTop = viewToSourceY(0), sVisBottom = viewToSourceY(getHeightInternal()); return !(sVisLeft > tile.sRect.right || tile.sRect.left > sVisRight || sVisTop > tile.sRect.bottom || tile.sRect.top > sVisBottom); } /** * Sets scale and translate ready for the next draw. */ private void preDraw() { if (getWidthInternal() == 0 || getHeightInternal() == 0 || sWidth <= 0 || sHeight <= 0) { return; } // If waiting to translate to new center position, set translate now if (sPendingCenter != null && pendingScale != null) { scale = pendingScale; if (vTranslate == null) { vTranslate = new PointF(); } vTranslate.x = (getWidthInternal() / 2f) - (scale * sPendingCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sPendingCenter.y); sPendingCenter = null; pendingScale = null; fitToBounds(true); refreshRequiredTiles(true); } // On first display of base image set up position, and in other cases make sure scale is correct. fitToBounds(false); } /** * Calculates sample size to fit the source image in given bounds. */ private int calculateInSampleSize(float scale) { if (minimumTileDpi > 0) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; scale = (minimumTileDpi / averageDpi) * scale; } int reqWidth = (int) (sWidth() * scale); int reqHeight = (int) (sHeight() * scale); // Raw height and width of image int inSampleSize = 1; if (reqWidth == 0 || reqHeight == 0) { return 32; } if (sHeight() > reqHeight || sWidth() > reqWidth) { // Calculate ratios of height and width to requested height and width final int heightRatio = Math.round((float) sHeight() / (float) reqHeight); final int widthRatio = Math.round((float) sWidth() / (float) reqWidth); // Choose the smallest ratio as inSampleSize value, this will guarantee // a final image with both dimensions larger than or equal to the // requested height and width. inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio; } // We want the actual sample size that will be used, so round down to nearest power of 2. int power = 1; while (power * 2 < inSampleSize) { power = power * 2; } return power; } /** * Adjusts hypothetical future scale and translate values to keep scale within the allowed range and the image on screen. Minimum scale * is set so one dimension fills the view and the image is centered on the other dimension. Used to calculate what the target of an * animation should be. * * @param center Whether the image should be centered in the dimension it's too small to fill. While animating this can be false to avoid changes in direction as bounds are reached. * @param sat The scale we want and the translation we're aiming for. The values are adjusted to be valid. */ private void fitToBounds(boolean center, ScaleAndTranslate sat) { if (panLimit == PAN_LIMIT_OUTSIDE && isReady()) { center = false; } PointF vTranslate = sat.vTranslate; float scale = limitedScale(sat.scale); float scaleWidth = scale * sWidth(); float scaleHeight = scale * sHeight(); if (panLimit == PAN_LIMIT_CENTER && isReady()) { vTranslate.x = Math.max(vTranslate.x, getWidthInternal() / 2f - scaleWidth); vTranslate.y = Math.max(vTranslate.y, getHeightInternal() / 2f - scaleHeight); } else if (center) { vTranslate.x = Math.max(vTranslate.x, getWidthInternal() - scaleWidth); vTranslate.y = Math.max(vTranslate.y, getHeightInternal() - scaleHeight); } else { vTranslate.x = Math.max(vTranslate.x, -scaleWidth); vTranslate.y = Math.max(vTranslate.y, -scaleHeight); } // Asymmetric padding adjustments float xPaddingRatio = getPaddingLeft() > 0 || getPaddingRight() > 0 ? getPaddingLeft() / (float) (getPaddingLeft() + getPaddingRight()) : 0.5f; float yPaddingRatio = getPaddingTop() > 0 || getPaddingBottom() > 0 ? getPaddingTop() / (float) (getPaddingTop() + getPaddingBottom()) : 0.5f; float maxTx; float maxTy; if (panLimit == PAN_LIMIT_CENTER && isReady()) { maxTx = Math.max(0, getWidthInternal() / 2); maxTy = Math.max(0, getHeightInternal() / 2); } else if (center) { maxTx = Math.max(0, (getWidthInternal() - scaleWidth) * xPaddingRatio); maxTy = Math.max(0, (getHeightInternal() - scaleHeight) * yPaddingRatio); } else { maxTx = Math.max(0, getWidthInternal()); maxTy = Math.max(0, getHeightInternal()); } vTranslate.x = Math.min(vTranslate.x, maxTx); vTranslate.y = Math.min(vTranslate.y, maxTy); sat.scale = scale; } /** * Adjusts current scale and translate values to keep scale within the allowed range and the image on screen. Minimum scale * is set so one dimension fills the view and the image is centered on the other dimension. * * @param center Whether the image should be centered in the dimension it's too small to fill. While animating this can be false to avoid changes in direction as bounds are reached. */ private void fitToBounds(boolean center) { boolean init = false; if (vTranslate == null) { init = true; vTranslate = new PointF(0, 0); } if (satTemp == null) { satTemp = new ScaleAndTranslate(0, new PointF(0, 0)); } satTemp.scale = scale; satTemp.vTranslate.set(vTranslate); fitToBounds(center, satTemp); scale = satTemp.scale; vTranslate.set(satTemp.vTranslate); if (init && minimumScaleType != SCALE_TYPE_START) { vTranslate.set(vTranslateForSCenter(sWidth() / 2f, sHeight() / 2f, scale)); } } /** * Once source image and view dimensions are known, creates a map of sample size to tile grid. */ private void initialiseTileMap(Point maxTileDimensions) { debug("initialiseTileMap maxTileDimensions=%dx%d", maxTileDimensions.x, maxTileDimensions.y); this.tileMap = new LinkedHashMap<>(); int sampleSize = fullImageSampleSize; int xTiles = 1; int yTiles = 1; while (true) { int sTileWidth = sWidth() / xTiles; int sTileHeight = sHeight() / yTiles; int subTileWidth = sTileWidth / sampleSize; int subTileHeight = sTileHeight / sampleSize; while (subTileWidth + xTiles + 1 > maxTileDimensions.x || (subTileWidth > getWidthInternal() * 1.25 && sampleSize < fullImageSampleSize)) { xTiles += 1; sTileWidth = sWidth() / xTiles; subTileWidth = sTileWidth / sampleSize; } while (subTileHeight + yTiles + 1 > maxTileDimensions.y || (subTileHeight > getHeightInternal() * 1.25 && sampleSize < fullImageSampleSize)) { yTiles += 1; sTileHeight = sHeight() / yTiles; subTileHeight = sTileHeight / sampleSize; } List<Tile> tileGrid = new ArrayList<>(xTiles * yTiles); for (int x = 0; x < xTiles; x++) { for (int y = 0; y < yTiles; y++) { Tile tile = new Tile(); tile.sampleSize = sampleSize; tile.visible = sampleSize == fullImageSampleSize; tile.sRect = new Rect( x * sTileWidth, y * sTileHeight, x == xTiles - 1 ? sWidth() : (x + 1) * sTileWidth, y == yTiles - 1 ? sHeight() : (y + 1) * sTileHeight ); tile.vRect = new Rect(0, 0, 0, 0); tile.fileSRect = new Rect(tile.sRect); tileGrid.add(tile); } } tileMap.put(sampleSize, tileGrid); if (sampleSize == 1) { break; } else { sampleSize /= 2; } } } /** * Async task used to get image details without blocking the UI thread. */ private static class TilesInitTask extends AsyncTask<Void, Void, int[]> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<Context> contextRef; private final WeakReference<DecoderFactory<? extends ImageRegionDecoder>> decoderFactoryRef; private final Uri source; private ImageRegionDecoder decoder; private Exception exception; TilesInitTask(CustomSubsamplingScaleImageView view, Context context, DecoderFactory<? extends ImageRegionDecoder> decoderFactory, Uri source) { this.viewRef = new WeakReference<>(view); this.contextRef = new WeakReference<>(context); this.decoderFactoryRef = new WeakReference<>(decoderFactory); this.source = source; } @Override protected int[] doInBackground(Void... params) { try { String sourceUri = source.toString(); Context context = contextRef.get(); DecoderFactory<? extends ImageRegionDecoder> decoderFactory = decoderFactoryRef.get(); CustomSubsamplingScaleImageView view = viewRef.get(); if (context != null && decoderFactory != null && view != null) { view.debug("TilesInitTask.doInBackground"); decoder = decoderFactory.make(); Point dimensions = decoder.init(context, source); int sWidth = dimensions.x; int sHeight = dimensions.y; int exifOrientation = view.getExifOrientation(context, sourceUri); if (view.sRegion != null) { view.sRegion.left = Math.max(0, view.sRegion.left); view.sRegion.top = Math.max(0, view.sRegion.top); view.sRegion.right = Math.min(sWidth, view.sRegion.right); view.sRegion.bottom = Math.min(sHeight, view.sRegion.bottom); sWidth = view.sRegion.width(); sHeight = view.sRegion.height(); } return new int[]{sWidth, sHeight, exifOrientation}; } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to initialise bitmap decoder"); this.exception = e; } return null; } @Override protected void onPostExecute(int[] xyo) { final CustomSubsamplingScaleImageView view = viewRef.get(); if (view != null) { if (decoder != null && xyo != null && xyo.length == 3) { view.onTilesInited(decoder, xyo[0], xyo[1], xyo[2]); } else if (exception != null && view.onImageEventListener != null) { view.onImageEventListener.onImageLoadError(exception); } } } } /** * Called by worker task when decoder is ready and image size and EXIF orientation is known. */ private synchronized void onTilesInited(ImageRegionDecoder decoder, int sWidth, int sHeight, int sOrientation) { debug("onTilesInited sWidth=%d, sHeight=%d, sOrientation=%d", sWidth, sHeight, orientation); // If actual dimensions don't match the declared size, reset everything. if (this.sWidth > 0 && this.sHeight > 0 && (this.sWidth != sWidth || this.sHeight != sHeight)) { reset(false); if (bitmap != null) { if (!bitmapIsCached) { bitmap.recycle(); } bitmap = null; if (onImageEventListener != null && bitmapIsCached) { onImageEventListener.onPreviewReleased(); } bitmapIsPreview = false; bitmapIsCached = false; } } this.decoder = decoder; this.sWidth = sWidth; this.sHeight = sHeight; this.sOrientation = sOrientation; checkReady(); if (!checkImageLoaded() && maxTileWidth > 0 && maxTileWidth != TILE_SIZE_AUTO && maxTileHeight > 0 && maxTileHeight != TILE_SIZE_AUTO && getWidthInternal() > 0 && getHeightInternal() > 0) { initialiseBaseLayer(new Point(maxTileWidth, maxTileHeight)); } invalidate(); requestLayout(); } /** * Async task used to load images without blocking the UI thread. */ private static class TileLoadTask extends AsyncTask<Void, Void, Bitmap> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<ImageRegionDecoder> decoderRef; private final WeakReference<Tile> tileRef; private Exception exception; TileLoadTask(CustomSubsamplingScaleImageView view, ImageRegionDecoder decoder, Tile tile) { this.viewRef = new WeakReference<>(view); this.decoderRef = new WeakReference<>(decoder); this.tileRef = new WeakReference<>(tile); tile.loading = true; } @Override protected Bitmap doInBackground(Void... params) { try { CustomSubsamplingScaleImageView view = viewRef.get(); ImageRegionDecoder decoder = decoderRef.get(); Tile tile = tileRef.get(); if (decoder != null && tile != null && view != null && decoder.isReady() && tile.visible) { view.debug("TileLoadTask.doInBackground, tile.sRect=%s, tile.sampleSize=%d", tile.sRect, tile.sampleSize); view.decoderLock.readLock().lock(); try { if (decoder.isReady()) { // Update tile's file sRect according to rotation view.fileSRect(tile.sRect, tile.fileSRect); if (view.sRegion != null) { tile.fileSRect.offset(view.sRegion.left, view.sRegion.top); } return decoder.decodeRegion(tile.fileSRect, tile.sampleSize); } else { tile.loading = false; } } finally { view.decoderLock.readLock().unlock(); } } else if (tile != null) { tile.loading = false; } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to decode tile"); this.exception = e; } catch (OutOfMemoryError e) { Timber.tag(TAG).e(e, "Failed to decode tile - OutOfMemoryError"); this.exception = new RuntimeException(e); } return null; } @Override protected void onPostExecute(Bitmap bitmap) { final CustomSubsamplingScaleImageView subsamplingScaleImageView = viewRef.get(); final Tile tile = tileRef.get(); if (subsamplingScaleImageView != null && tile != null) { if (bitmap != null) { tile.bitmap = bitmap; tile.loading = false; subsamplingScaleImageView.onTileLoaded(); } else if (exception != null && subsamplingScaleImageView.onImageEventListener != null) { subsamplingScaleImageView.onImageEventListener.onTileLoadError(exception); } } } } /** * Called by worker task when a tile has loaded. Redraws the view. */ private synchronized void onTileLoaded() { debug("onTileLoaded"); checkReady(); checkImageLoaded(); if (isBaseLayerReady() && bitmap != null) { if (!bitmapIsCached) { bitmap.recycle(); } bitmap = null; if (onImageEventListener != null && bitmapIsCached) { onImageEventListener.onPreviewReleased(); } bitmapIsPreview = false; bitmapIsCached = false; } invalidate(); } /** * Async task used to load bitmap without blocking the UI thread. */ private static class BitmapLoadTask extends AsyncTask<Void, Void, Integer> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<Context> contextRef; private final WeakReference<DecoderFactory<? extends ImageDecoder>> decoderFactoryRef; private final Uri source; private final boolean preview; private Bitmap bitmap; private Exception exception; BitmapLoadTask(CustomSubsamplingScaleImageView view, Context context, DecoderFactory<? extends ImageDecoder> decoderFactory, Uri source, boolean preview) { this.viewRef = new WeakReference<>(view); this.contextRef = new WeakReference<>(context); this.decoderFactoryRef = new WeakReference<>(decoderFactory); this.source = source; this.preview = preview; } @Override protected Integer doInBackground(Void... params) { try { String sourceUri = source.toString(); Context context = contextRef.get(); DecoderFactory<? extends ImageDecoder> decoderFactory = decoderFactoryRef.get(); CustomSubsamplingScaleImageView view = viewRef.get(); if (context != null && decoderFactory != null && view != null) { view.debug("BitmapLoadTask.doInBackground"); bitmap = decoderFactory.make().decode(context, source); return view.getExifOrientation(context, sourceUri); } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to load bitmap"); this.exception = e; } catch (OutOfMemoryError e) { Timber.tag(TAG).e(e, "Failed to load bitmap - OutOfMemoryError"); this.exception = new RuntimeException(e); } return null; } @Override protected void onPostExecute(Integer orientation) { CustomSubsamplingScaleImageView subsamplingScaleImageView = viewRef.get(); if (subsamplingScaleImageView != null) { if (bitmap != null && orientation != null) { if (preview) { subsamplingScaleImageView.onPreviewLoaded(bitmap); } else { subsamplingScaleImageView.onImageLoaded(bitmap, orientation, false); } } else if (exception != null && subsamplingScaleImageView.onImageEventListener != null) { if (preview) { subsamplingScaleImageView.onImageEventListener.onPreviewLoadError(exception); } else { subsamplingScaleImageView.onImageEventListener.onImageLoadError(exception); } } } } } /** * Called by worker task when preview image is loaded. */ private synchronized void onPreviewLoaded(Bitmap previewBitmap) { debug("onPreviewLoaded"); if (bitmap != null || imageLoadedSent) { previewBitmap.recycle(); return; } if (pRegion != null) { bitmap = Bitmap.createBitmap(previewBitmap, pRegion.left, pRegion.top, pRegion.width(), pRegion.height()); } else { bitmap = previewBitmap; } bitmapIsPreview = true; if (checkReady()) { invalidate(); requestLayout(); } } /** * Called by worker task when full size image bitmap is ready (tiling is disabled). */ private synchronized void onImageLoaded(Bitmap bitmap, int sOrientation, boolean bitmapIsCached) { debug("onImageLoaded"); // If actual dimensions don't match the declared size, reset everything. if (this.sWidth > 0 && this.sHeight > 0 && (this.sWidth != bitmap.getWidth() || this.sHeight != bitmap.getHeight())) { reset(false); } if (this.bitmap != null && !this.bitmapIsCached) { this.bitmap.recycle(); } if (this.bitmap != null && this.bitmapIsCached && onImageEventListener != null) { onImageEventListener.onPreviewReleased(); } this.bitmapIsPreview = false; this.bitmapIsCached = bitmapIsCached; this.bitmap = bitmap; this.sWidth = bitmap.getWidth(); this.sHeight = bitmap.getHeight(); this.sOrientation = sOrientation; boolean ready = checkReady(); boolean imageLoaded = checkImageLoaded(); if (ready || imageLoaded) { invalidate(); requestLayout(); } } /** * Helper method for load tasks. Examines the EXIF info on the image file to determine the orientation. * This will only work for external files, not assets, resources or other URIs. */ @AnyThread private int getExifOrientation(Context context, String sourceUri) { int exifOrientation = ORIENTATION_0; if (sourceUri.startsWith(ContentResolver.SCHEME_CONTENT)) { Cursor cursor = null; try { String[] columns = {MediaStore.Images.Media.ORIENTATION}; cursor = context.getContentResolver().query(Uri.parse(sourceUri), columns, null, null, null); if (cursor != null) { if (cursor.moveToFirst()) { int orientation = cursor.getInt(0); if (VALID_ORIENTATIONS.contains(orientation) && orientation != ORIENTATION_USE_EXIF) { exifOrientation = orientation; } else { Timber.w(TAG, "Unsupported orientation: %s", orientation); } } } } catch (Exception e) { Timber.w(TAG, "Could not get orientation of image from media store"); } finally { if (cursor != null) { cursor.close(); } } } else if (sourceUri.startsWith(ImageSource.FILE_SCHEME) && !sourceUri.startsWith(ImageSource.ASSET_SCHEME)) { try { ExifInterface exifInterface = new ExifInterface(sourceUri.substring(ImageSource.FILE_SCHEME.length() - 1)); int orientationAttr = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); if (orientationAttr == ExifInterface.ORIENTATION_NORMAL || orientationAttr == ExifInterface.ORIENTATION_UNDEFINED) { exifOrientation = ORIENTATION_0; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_90) { exifOrientation = ORIENTATION_90; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_180) { exifOrientation = ORIENTATION_180; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_270) { exifOrientation = ORIENTATION_270; } else { Timber.w(TAG, "Unsupported EXIF orientation: %s", orientationAttr); } } catch (Exception e) { Timber.w(TAG, "Could not get EXIF orientation of image"); } } return exifOrientation; } private void execute(AsyncTask<Void, Void, ?> asyncTask) { asyncTask.executeOnExecutor(executor); } private static class Tile { private Rect sRect; private int sampleSize; private Bitmap bitmap; private boolean loading; private boolean visible; // Volatile fields instantiated once then updated before use to reduce GC. private Rect vRect; private Rect fileSRect; } private static class Anim { private float scaleStart; // Scale at start of anim private float scaleEnd; // Scale at end of anim (target) private PointF sCenterStart; // Source center point at start private PointF sCenterEnd; // Source center point at end, adjusted for pan limits private PointF sCenterEndRequested; // Source center point that was requested, without adjustment private PointF vFocusStart; // View point that was double tapped private PointF vFocusEnd; // Where the view focal point should be moved to during the anim private long duration = 500; // How long the anim takes private boolean interruptible = true; // Whether the anim can be interrupted by a touch private int easing = EASE_IN_OUT_QUAD; // Easing style private int origin = ORIGIN_ANIM; // Animation origin (API, double tap or fling) private long time = System.currentTimeMillis(); // Start time private OnAnimationEventListener listener; // Event listener } private static class ScaleAndTranslate { private ScaleAndTranslate(float scale, PointF vTranslate) { this.scale = scale; this.vTranslate = vTranslate; } private float scale; private final PointF vTranslate; } /** * Set scale, center and orientation from saved state. */ private void restoreState(ImageViewState state) { if (state != null && VALID_ORIENTATIONS.contains(state.getOrientation())) { this.orientation = state.getOrientation(); this.pendingScale = state.getScale(); this.sPendingCenter = state.getCenter(); invalidate(); } } /** * By default the View automatically calculates the optimal tile size. Set this to override this, and force an upper limit to the dimensions of the generated tiles. Passing {@link #TILE_SIZE_AUTO} will re-enable the default behaviour. * * @param maxPixels Maximum tile size X and Y in pixels. */ public void setMaxTileSize(int maxPixels) { this.maxTileWidth = maxPixels; this.maxTileHeight = maxPixels; } /** * By default the View automatically calculates the optimal tile size. Set this to override this, and force an upper limit to the dimensions of the generated tiles. Passing {@link #TILE_SIZE_AUTO} will re-enable the default behaviour. * * @param maxPixelsX Maximum tile width. * @param maxPixelsY Maximum tile height. */ public void setMaxTileSize(int maxPixelsX, int maxPixelsY) { this.maxTileWidth = maxPixelsX; this.maxTileHeight = maxPixelsY; } /** * Use canvas max bitmap width and height instead of the default 2048, to avoid redundant tiling. */ @NonNull private Point getMaxBitmapDimensions(Canvas canvas) { return new Point(Math.min(canvas.getMaximumBitmapWidth(), maxTileWidth), Math.min(canvas.getMaximumBitmapHeight(), maxTileHeight)); } /** * Get source width taking rotation into account. */ @SuppressWarnings("SuspiciousNameCombination") private int sWidth() { int rotation = getRequiredRotation(); if (rotation == 90 || rotation == 270) { return sHeight; } else { return sWidth; } } /** * Get source height taking rotation into account. */ @SuppressWarnings("SuspiciousNameCombination") private int sHeight() { int rotation = getRequiredRotation(); if (rotation == 90 || rotation == 270) { return sWidth; } else { return sHeight; } } /** * Converts source rectangle from tile, which treats the image file as if it were in the correct orientation already, * to the rectangle of the image that needs to be loaded. */ @SuppressWarnings("SuspiciousNameCombination") @AnyThread private void fileSRect(Rect sRect, Rect target) { if (getRequiredRotation() == 0) { target.set(sRect); } else if (getRequiredRotation() == 90) { target.set(sRect.top, sHeight - sRect.right, sRect.bottom, sHeight - sRect.left); } else if (getRequiredRotation() == 180) { target.set(sWidth - sRect.right, sHeight - sRect.bottom, sWidth - sRect.left, sHeight - sRect.top); } else { target.set(sWidth - sRect.bottom, sRect.left, sWidth - sRect.top, sRect.right); } } /** * Determines the rotation to be applied to tiles, based on EXIF orientation or chosen setting. */ @AnyThread private int getRequiredRotation() { if (orientation == ORIENTATION_USE_EXIF) { return sOrientation; } else { return orientation; } } /** * Pythagoras distance between two points. */ private float distance(float x0, float x1, float y0, float y1) { float x = x0 - x1; float y = y0 - y1; return (float) Math.sqrt(x * x + y * y); } /** * Releases all resources the view is using and resets the state, nulling any fields that use significant memory. * After you have called this method, the view can be re-used by setting a new image. Settings are remembered * but state (scale and center) is forgotten. You can restore these yourself if required. */ public void recycle() { reset(true); bitmapPaint = null; debugTextPaint = null; debugLinePaint = null; tileBgPaint = null; } /** * Convert screen to source x coordinate. */ private float viewToSourceX(float vx) { if (vTranslate == null) { return Float.NaN; } return (vx - vTranslate.x) / scale; } /** * Convert screen to source y coordinate. */ private float viewToSourceY(float vy) { if (vTranslate == null) { return Float.NaN; } return (vy - vTranslate.y) / scale; } /** * Converts a rectangle within the view to the corresponding rectangle from the source file, taking * into account the current scale, translation, orientation and clipped region. This can be used * to decode a bitmap from the source file. * <p> * This method will only work when the image has fully initialised, after {@link #isReady()} returns * true. It is not guaranteed to work with preloaded bitmaps. * <p> * The result is written to the fRect argument. Re-use a single instance for efficiency. * * @param vRect rectangle representing the view area to interpret. * @param fRect rectangle instance to which the result will be written. Re-use for efficiency. */ public void viewToFileRect(Rect vRect, Rect fRect) { if (vTranslate == null || !readySent) { return; } fRect.set( (int) viewToSourceX(vRect.left), (int) viewToSourceY(vRect.top), (int) viewToSourceX(vRect.right), (int) viewToSourceY(vRect.bottom)); fileSRect(fRect, fRect); fRect.set( Math.max(0, fRect.left), Math.max(0, fRect.top), Math.min(sWidth, fRect.right), Math.min(sHeight, fRect.bottom) ); if (sRegion != null) { fRect.offset(sRegion.left, sRegion.top); } } /** * Find the area of the source file that is currently visible on screen, taking into account the * current scale, translation, orientation and clipped region. This is a convenience method; see * {@link #viewToFileRect(Rect, Rect)}. * * @param fRect rectangle instance to which the result will be written. Re-use for efficiency. */ public void visibleFileRect(Rect fRect) { if (vTranslate == null || !readySent) { return; } fRect.set(0, 0, getWidthInternal(), getHeightInternal()); viewToFileRect(fRect, fRect); } /** * Convert screen coordinate to source coordinate. * * @param vxy view X/Y coordinate. * @return a coordinate representing the corresponding source coordinate. */ @Nullable public final PointF viewToSourceCoord(PointF vxy) { return viewToSourceCoord(vxy.x, vxy.y, new PointF()); } /** * Convert screen coordinate to source coordinate. * * @param vx view X coordinate. * @param vy view Y coordinate. * @return a coordinate representing the corresponding source coordinate. */ @Nullable public final PointF viewToSourceCoord(float vx, float vy) { return viewToSourceCoord(vx, vy, new PointF()); } /** * Convert screen coordinate to source coordinate. * * @param vxy view coordinates to convert. * @param sTarget target object for result. The same instance is also returned. * @return source coordinates. This is the same instance passed to the sTarget param. */ @Nullable public final PointF viewToSourceCoord(PointF vxy, @NonNull PointF sTarget) { return viewToSourceCoord(vxy.x, vxy.y, sTarget); } /** * Convert screen coordinate to source coordinate. * * @param vx view X coordinate. * @param vy view Y coordinate. * @param sTarget target object for result. The same instance is also returned. * @return source coordinates. This is the same instance passed to the sTarget param. */ @Nullable public final PointF viewToSourceCoord(float vx, float vy, @NonNull PointF sTarget) { if (vTranslate == null) { return null; } sTarget.set(viewToSourceX(vx), viewToSourceY(vy)); return sTarget; } /** * Convert source to view x coordinate. */ private float sourceToViewX(float sx) { if (vTranslate == null) { return Float.NaN; } return (sx * scale) + vTranslate.x; } /** * Convert source to view y coordinate. */ private float sourceToViewY(float sy) { if (vTranslate == null) { return Float.NaN; } return (sy * scale) + vTranslate.y; } /** * Convert source coordinate to view coordinate. * * @param sxy source coordinates to convert. * @return view coordinates. */ @Nullable public final PointF sourceToViewCoord(PointF sxy) { return sourceToViewCoord(sxy.x, sxy.y, new PointF()); } /** * Convert source coordinate to view coordinate. * * @param sx source X coordinate. * @param sy source Y coordinate. * @return view coordinates. */ @Nullable public final PointF sourceToViewCoord(float sx, float sy) { return sourceToViewCoord(sx, sy, new PointF()); } /** * Convert source coordinate to view coordinate. * * @param sxy source coordinates to convert. * @param vTarget target object for result. The same instance is also returned. * @return view coordinates. This is the same instance passed to the vTarget param. */ @SuppressWarnings("UnusedReturnValue") @Nullable public final PointF sourceToViewCoord(PointF sxy, @NonNull PointF vTarget) { return sourceToViewCoord(sxy.x, sxy.y, vTarget); } /** * Convert source coordinate to view coordinate. * * @param sx source X coordinate. * @param sy source Y coordinate. * @param vTarget target object for result. The same instance is also returned. * @return view coordinates. This is the same instance passed to the vTarget param. */ @Nullable public final PointF sourceToViewCoord(float sx, float sy, @NonNull PointF vTarget) { if (vTranslate == null) { return null; } vTarget.set(sourceToViewX(sx), sourceToViewY(sy)); return vTarget; } /** * Convert source rect to screen rect, integer values. */ private void sourceToViewRect(@NonNull Rect sRect, @NonNull Rect vTarget) { vTarget.set( (int) sourceToViewX(sRect.left), (int) sourceToViewY(sRect.top), (int) sourceToViewX(sRect.right), (int) sourceToViewY(sRect.bottom) ); } /** * Get the translation required to place a given source coordinate at the center of the screen, with the center * adjusted for asymmetric padding. Accepts the desired scale as an argument, so this is independent of current * translate and scale. The result is fitted to bounds, putting the image point as near to the screen center as permitted. */ @NonNull private PointF vTranslateForSCenter(float sCenterX, float sCenterY, float scale) { int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; if (satTemp == null) { satTemp = new ScaleAndTranslate(0, new PointF(0, 0)); } satTemp.scale = scale; satTemp.vTranslate.set(vxCenter - (sCenterX * scale), vyCenter - (sCenterY * scale)); fitToBounds(true, satTemp); return satTemp.vTranslate; } /** * Given a requested source center and scale, calculate what the actual center will have to be to keep the image in * pan limits, keeping the requested center as near to the middle of the screen as allowed. */ @NonNull private PointF limitedSCenter(float sCenterX, float sCenterY, float scale, @NonNull PointF sTarget) { PointF vTranslate = vTranslateForSCenter(sCenterX, sCenterY, scale); int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; float sx = (vxCenter - vTranslate.x) / scale; float sy = (vyCenter - vTranslate.y) / scale; sTarget.set(sx, sy); return sTarget; } /** * Returns the minimum allowed scale. */ private float minScale() { int vPadding = getPaddingBottom() + getPaddingTop(); int hPadding = getPaddingLeft() + getPaddingRight(); if (minimumScaleType == SCALE_TYPE_CENTER_CROP || minimumScaleType == SCALE_TYPE_START) { return Math.max((getWidthInternal() - hPadding) / (float) sWidth(), (getHeightInternal() - vPadding) / (float) sHeight()); } else if (minimumScaleType == SCALE_TYPE_CUSTOM && minScale > 0) { return minScale; } else { return Math.min((getWidthInternal() - hPadding) / (float) sWidth(), (getHeightInternal() - vPadding) / (float) sHeight()); } } /** * Adjust a requested scale to be within the allowed limits. */ private float limitedScale(float targetScale) { targetScale = Math.max(minScale(), targetScale); targetScale = Math.min(maxScale, targetScale); return targetScale; } /** * Apply a selected type of easing. * * @param type Easing type, from static fields * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float ease(int type, long time, float from, float change, long duration) { switch (type) { case EASE_IN_OUT_QUAD: return easeInOutQuad(time, from, change, duration); case EASE_OUT_QUAD: return easeOutQuad(time, from, change, duration); default: throw new IllegalStateException("Unexpected easing type: " + type); } } /** * Quadratic easing for fling. With thanks to Robert Penner - http://gizma.com/easing/ * * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float easeOutQuad(long time, float from, float change, long duration) { float progress = (float) time / (float) duration; return -change * progress * (progress - 2) + from; } /** * Quadratic easing for scale and center animations. With thanks to Robert Penner - http://gizma.com/easing/ * * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float easeInOutQuad(long time, float from, float change, long duration) { float timeF = time / (duration / 2f); if (timeF < 1) { return (change / 2f * timeF * timeF) + from; } else { timeF--; return (-change / 2f) * (timeF * (timeF - 2) - 1) + from; } } /** * Debug logger */ @AnyThread private void debug(String message, Object... args) { if (debug) { Timber.d(message, args); } } /** * For debug overlays. Scale pixel value according to screen density. */ private int px(int px) { return (int) (density * px); } /** * Swap the default region decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. Your class must have a * public default constructor. * * @param regionDecoderClass The {@link ImageRegionDecoder} implementation to use. */ public final void setRegionDecoderClass(@NonNull Class<? extends ImageRegionDecoder> regionDecoderClass) { this.regionDecoderFactory = new CompatDecoderFactory<>(regionDecoderClass); } /** * Swap the default region decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. * * @param regionDecoderFactory The {@link DecoderFactory} implementation that produces {@link ImageRegionDecoder} * instances. */ public final void setRegionDecoderFactory(@NonNull DecoderFactory<? extends ImageRegionDecoder> regionDecoderFactory) { this.regionDecoderFactory = regionDecoderFactory; } /** * Swap the default bitmap decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. Your class must have a * public default constructor. * * @param bitmapDecoderClass The {@link ImageDecoder} implementation to use. */ public final void setBitmapDecoderClass(@NonNull Class<? extends ImageDecoder> bitmapDecoderClass) { this.bitmapDecoderFactory = new CompatDecoderFactory<>(bitmapDecoderClass); } /** * Swap the default bitmap decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. * * @param bitmapDecoderFactory The {@link DecoderFactory} implementation that produces {@link ImageDecoder} instances. */ public final void setBitmapDecoderFactory(@NonNull DecoderFactory<? extends ImageDecoder> bitmapDecoderFactory) { this.bitmapDecoderFactory = bitmapDecoderFactory; } /** * Calculate how much further the image can be panned in each direction. The results are set on * the supplied {@link RectF} and expressed as screen pixels. For example, if the image cannot be * panned any further towards the left, the value of {@link RectF#left} will be set to 0. * * @param vTarget target object for results. Re-use for efficiency. */ public final void getPanRemaining(RectF vTarget) { if (!isReady()) { return; } float scaleWidth = scale * sWidth(); float scaleHeight = scale * sHeight(); if (panLimit == PAN_LIMIT_CENTER) { vTarget.top = Math.max(0, -(vTranslate.y - (getHeightInternal() / 2f))); vTarget.left = Math.max(0, -(vTranslate.x - (getWidthInternal() / 2f))); vTarget.bottom = Math.max(0, vTranslate.y - ((getHeightInternal() / 2f) - scaleHeight)); vTarget.right = Math.max(0, vTranslate.x - ((getWidthInternal() / 2f) - scaleWidth)); } else if (panLimit == PAN_LIMIT_OUTSIDE) { vTarget.top = Math.max(0, -(vTranslate.y - getHeightInternal())); vTarget.left = Math.max(0, -(vTranslate.x - getWidthInternal())); vTarget.bottom = Math.max(0, vTranslate.y + scaleHeight); vTarget.right = Math.max(0, vTranslate.x + scaleWidth); } else { vTarget.top = Math.max(0, -vTranslate.y); vTarget.left = Math.max(0, -vTranslate.x); vTarget.bottom = Math.max(0, (scaleHeight + vTranslate.y) - getHeightInternal()); vTarget.right = Math.max(0, (scaleWidth + vTranslate.x) - getWidthInternal()); } } /** * Set the pan limiting style. See static fields. Normally {@link #PAN_LIMIT_INSIDE} is best, for image galleries. * * @param panLimit a pan limit constant. See static fields. */ public final void setPanLimit(int panLimit) { if (!VALID_PAN_LIMITS.contains(panLimit)) { throw new IllegalArgumentException("Invalid pan limit: " + panLimit); } this.panLimit = panLimit; if (isReady()) { fitToBounds(true); invalidate(); } } /** * Set the minimum scale type. See static fields. Normally {@link #SCALE_TYPE_CENTER_INSIDE} is best, for image galleries. * * @param scaleType a scale type constant. See static fields. */ public final void setMinimumScaleType(int scaleType) { if (!VALID_SCALE_TYPES.contains(scaleType)) { throw new IllegalArgumentException("Invalid scale type: " + scaleType); } this.minimumScaleType = scaleType; if (isReady()) { fitToBounds(true); invalidate(); } } /** * Set the maximum scale allowed. A value of 1 means 1:1 pixels at maximum scale. You may wish to set this according * to screen density - on a retina screen, 1:1 may still be too small. Consider using {@link #setMinimumDpi(int)}, * which is density aware. * * @param maxScale maximum scale expressed as a source/view pixels ratio. */ public final void setMaxScale(float maxScale) { this.maxScale = maxScale; } /** * Set the minimum scale allowed. A value of 1 means 1:1 pixels at minimum scale. You may wish to set this according * to screen density. Consider using {@link #setMaximumDpi(int)}, which is density aware. * * @param minScale minimum scale expressed as a source/view pixels ratio. */ public final void setMinScale(float minScale) { this.minScale = minScale; } /** * This is a screen density aware alternative to {@link #setMaxScale(float)}; it allows you to express the maximum * allowed scale in terms of the minimum pixel density. This avoids the problem of 1:1 scale still being * too small on a high density screen. A sensible starting point is 160 - the default used by this view. * * @param dpi Source image pixel density at maximum zoom. */ public final void setMinimumDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setMaxScale(averageDpi / dpi); } /** * This is a screen density aware alternative to {@link #setMinScale(float)}; it allows you to express the minimum * allowed scale in terms of the maximum pixel density. * * @param dpi Source image pixel density at minimum zoom. */ public final void setMaximumDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setMinScale(averageDpi / dpi); } /** * Returns the maximum allowed scale. * * @return the maximum scale as a source/view pixels ratio. */ public float getMaxScale() { return maxScale; } /** * Returns the minimum allowed scale. * * @return the minimum scale as a source/view pixels ratio. */ public final float getMinScale() { return minScale(); } /** * By default, image tiles are at least as high resolution as the screen. For a retina screen this may not be * necessary, and may increase the likelihood of an OutOfMemoryError. This method sets a DPI at which higher * resolution tiles should be loaded. Using a lower number will on average use less memory but result in a lower * quality image. 160-240dpi will usually be enough. This should be called before setting the image source, * because it affects which tiles get loaded. When using an untiled source image this method has no effect. * * @param minimumTileDpi Tile loading threshold. */ public void setMinimumTileDpi(int minimumTileDpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; this.minimumTileDpi = (int) Math.min(averageDpi, minimumTileDpi); if (isReady()) { reset(false); invalidate(); } } /** * Returns the source point at the center of the view. * * @return the source coordinates current at the center of the view. */ @Nullable public final PointF getCenter() { int mX = getWidthInternal() / 2; int mY = getHeightInternal() / 2; return viewToSourceCoord(mX, mY); } /** * Returns the current scale value. * * @return the current scale as a source/view pixels ratio. */ public final float getScale() { return scale; } /** * Externally change the scale and translation of the source image. This may be used with getCenter() and getScale() * to restore the scale and zoom after a screen rotate. * * @param scale New scale to set. * @param sCenter New source image coordinate to center on the screen, subject to boundaries. */ public final void setScaleAndCenter(float scale, @Nullable PointF sCenter) { this.anim = null; this.pendingScale = scale; this.sPendingCenter = sCenter; this.sRequestedCenter = sCenter; invalidate(); } /** * Fully zoom out and return the image to the middle of the screen. This might be useful if you have a view pager * and want images to be reset when the user has moved to another page. */ public final void resetScaleAndCenter() { this.anim = null; this.pendingScale = limitedScale(0); if (isReady()) { this.sPendingCenter = new PointF(sWidth() / 2f, sHeight() / 2f); } else { this.sPendingCenter = new PointF(0, 0); } invalidate(); } /** * Call to find whether the view is initialised, has dimensions, and will display an image on * the next draw. If a preview has been provided, it may be the preview that will be displayed * and the full size image may still be loading. If no preview was provided, this is called once * the base layer tiles of the full size image are loaded. * * @return true if the view is ready to display an image and accept touch gestures. */ public final boolean isReady() { return readySent; } /** * Called once when the view is initialised, has dimensions, and will display an image on the * next draw. This is triggered at the same time as {@link OnImageEventListener#onReady()} but * allows a subclass to receive this event without using a listener. */ @SuppressWarnings("EmptyMethod") protected void onReady() { } /** * Call to find whether the main image (base layer tiles where relevant) have been loaded. Before * this event the view is blank unless a preview was provided. * * @return true if the main image (not the preview) has been loaded and is ready to display. */ public final boolean isImageLoaded() { return imageLoadedSent; } /** * Called once when the full size image or its base layer tiles have been loaded. */ @SuppressWarnings("EmptyMethod") protected void onImageLoaded() { } /** * Get source width, ignoring orientation. If {@link #getOrientation()} returns 90 or 270, you can use {@link #getSHeight()} * for the apparent width. * * @return the source image width in pixels. */ public final int getSWidth() { return sWidth; } /** * Get source height, ignoring orientation. If {@link #getOrientation()} returns 90 or 270, you can use {@link #getSWidth()} * for the apparent height. * * @return the source image height in pixels. */ public final int getSHeight() { return sHeight; } /** * Returns the orientation setting. This can return {@link #ORIENTATION_USE_EXIF}, in which case it doesn't tell you * the applied orientation of the image. For that, use {@link #getAppliedOrientation()}. * * @return the orientation setting. See static fields. */ public final int getOrientation() { return orientation; } /** * Returns the actual orientation of the image relative to the source file. This will be based on the source file's * EXIF orientation if you're using ORIENTATION_USE_EXIF. Values are 0, 90, 180, 270. * * @return the orientation applied after EXIF information has been extracted. See static fields. */ public final int getAppliedOrientation() { return getRequiredRotation(); } /** * Get the current state of the view (scale, center, orientation) for restoration after rotate. Will return null if * the view is not ready. * * @return an {@link ImageViewState} instance representing the current position of the image. null if the view isn't ready. */ @Nullable public final ImageViewState getState() { PointF center = getCenter(); if (vTranslate != null && sWidth > 0 && sHeight > 0 && center != null) { return new ImageViewState(getScale(), center, getOrientation()); } return null; } /** * Returns true if zoom gesture detection is enabled. * * @return true if zoom gesture detection is enabled. */ public final boolean isZoomEnabled() { return zoomEnabled; } /** * Enable or disable zoom gesture detection. Disabling zoom locks the the current scale. * * @param zoomEnabled true to enable zoom gestures, false to disable. */ public final void setZoomEnabled(boolean zoomEnabled) { this.zoomEnabled = zoomEnabled; } /** * Returns true if double tap &amp; swipe to zoom is enabled. * * @return true if double tap &amp; swipe to zoom is enabled. */ public final boolean isQuickScaleEnabled() { return quickScaleEnabled; } /** * Enable or disable double tap &amp; swipe to zoom. * * @param quickScaleEnabled true to enable quick scale, false to disable. */ public final void setQuickScaleEnabled(boolean quickScaleEnabled) { this.quickScaleEnabled = quickScaleEnabled; } /** * Returns true if pan gesture detection is enabled. * * @return true if pan gesture detection is enabled. */ public final boolean isPanEnabled() { return panEnabled; } /** * Enable or disable pan gesture detection. Disabling pan causes the image to be centered. Pan * can still be changed from code. * * @param panEnabled true to enable panning, false to disable. */ public final void setPanEnabled(boolean panEnabled) { this.panEnabled = panEnabled; if (!panEnabled && vTranslate != null) { vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); if (isReady()) { refreshRequiredTiles(true); invalidate(); } } } /** * Set a solid color to render behind tiles, useful for displaying transparent PNGs. * * @param tileBgColor Background color for tiles. */ public final void setTileBackgroundColor(int tileBgColor) { if (Color.alpha(tileBgColor) == 0) { tileBgPaint = null; } else { tileBgPaint = new Paint(); tileBgPaint.setStyle(Style.FILL); tileBgPaint.setColor(tileBgColor); } invalidate(); } /** * Set the scale the image will zoom in to when double tapped. This also the scale point where a double tap is interpreted * as a zoom out gesture - if the scale is greater than 90% of this value, a double tap zooms out. Avoid using values * greater than the max zoom. * * @param doubleTapZoomScale New value for double tap gesture zoom scale. */ public final void setDoubleTapZoomScale(float doubleTapZoomScale) { this.doubleTapZoomScale = doubleTapZoomScale; } /** * A density aware alternative to {@link #setDoubleTapZoomScale(float)}; this allows you to express the scale the * image will zoom in to when double tapped in terms of the image pixel density. Values lower than the max scale will * be ignored. A sensible starting point is 160 - the default used by this view. * * @param dpi New value for double tap gesture zoom scale. */ public final void setDoubleTapZoomDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setDoubleTapZoomScale(averageDpi / dpi); } /** * Set the type of zoom animation to be used for double taps. See static fields. * * @param doubleTapZoomStyle New value for zoom style. */ public final void setDoubleTapZoomStyle(int doubleTapZoomStyle) { if (!VALID_ZOOM_STYLES.contains(doubleTapZoomStyle)) { throw new IllegalArgumentException("Invalid zoom style: " + doubleTapZoomStyle); } this.doubleTapZoomStyle = doubleTapZoomStyle; } /** * Set the duration of the double tap zoom animation. * * @param durationMs Duration in milliseconds. */ public final void setDoubleTapZoomDuration(int durationMs) { this.doubleTapZoomDuration = Math.max(0, durationMs); } /** * <p> * Provide an {@link Executor} to be used for loading images. By default, {@link AsyncTask#THREAD_POOL_EXECUTOR} * is used to minimise contention with other background work the app is doing. You can also choose * to use {@link AsyncTask#SERIAL_EXECUTOR} if you want to limit concurrent background tasks. * Alternatively you can supply an {@link Executor} of your own to avoid any contention. It is * strongly recommended to use a single executor instance for the life of your application, not * one per view instance. * </p><p> * <b>Warning:</b> If you are using a custom implementation of {@link ImageRegionDecoder}, and you * supply an executor with more than one thread, you must make sure your implementation supports * multi-threaded bitmap decoding or has appropriate internal synchronization. From SDK 21, Android's * {@link android.graphics.BitmapRegionDecoder} uses an internal lock so it is thread safe but * there is no advantage to using multiple threads. * </p> * * @param executor an {@link Executor} for image loading. */ public void setExecutor(@NonNull Executor executor) { this.executor = executor; } /** * Enable or disable eager loading of tiles that appear on screen during gestures or animations, * while the gesture or animation is still in progress. By default this is enabled to improve * responsiveness, but it can result in tiles being loaded and discarded more rapidly than * necessary and reduce the animation frame rate on old/cheap devices. Disable this on older * devices if you see poor performance. Tiles will then be loaded only when gestures and animations * are completed. * * @param eagerLoadingEnabled true to enable loading during gestures, false to delay loading until gestures end */ public void setEagerLoadingEnabled(boolean eagerLoadingEnabled) { this.eagerLoadingEnabled = eagerLoadingEnabled; } /** * Enables visual debugging, showing tile boundaries and sizes. * * @param debug true to enable debugging, false to disable. */ public final void setDebug(boolean debug) { this.debug = debug; } /** * Check if an image has been set. The image may not have been loaded and displayed yet. * * @return If an image is currently set. */ public boolean hasImage() { return uri != null || bitmap != null; } /** * {@inheritDoc} */ @Override public void setOnLongClickListener(OnLongClickListener onLongClickListener) { this.onLongClickListener = onLongClickListener; } /** * Add a listener allowing notification of load and error events. Extend {@link DefaultOnImageEventListener} * to simplify implementation. * * @param onImageEventListener an {@link OnImageEventListener} instance. */ public void setOnImageEventListener(OnImageEventListener onImageEventListener) { this.onImageEventListener = onImageEventListener; } /** * Add a listener for pan and zoom events. Extend {@link DefaultOnStateChangedListener} to simplify * implementation. * * @param onStateChangedListener an {@link OnStateChangedListener} instance. */ public void setOnStateChangedListener(OnStateChangedListener onStateChangedListener) { this.onStateChangedListener = onStateChangedListener; } private void sendStateChanged(float oldScale, PointF oldVTranslate, int origin) { if (onStateChangedListener != null && scale != oldScale) { onStateChangedListener.onScaleChanged(scale, origin); } if (onStateChangedListener != null && !vTranslate.equals(oldVTranslate)) { onStateChangedListener.onCenterChanged(getCenter(), origin); } } /** * Creates a panning animation builder, that when started will animate the image to place the given coordinates of * the image in the center of the screen. If doing this would move the image beyond the edges of the screen, the * image is instead animated to move the center point as near to the center of the screen as is allowed - it's * guaranteed to be on screen. * * @param sCenter Target center point * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateCenter(PointF sCenter) { if (!isReady()) { return null; } return new AnimationBuilder(sCenter); } /** * Creates a scale animation builder, that when started will animate a zoom in or out. If this would move the image * beyond the panning limits, the image is automatically panned during the animation. * * @param scale Target scale. * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateScale(float scale) { if (!isReady()) { return null; } return new AnimationBuilder(scale); } /** * Creates a scale animation builder, that when started will animate a zoom in or out. If this would move the image * beyond the panning limits, the image is automatically panned during the animation. * * @param scale Target scale. * @param sCenter Target source center. * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateScaleAndCenter(float scale, PointF sCenter) { if (!isReady()) { return null; } return new AnimationBuilder(scale, sCenter); } public void setPreloadDimensions(int width, int height) { preloadDimensions = new Point(width, height); } private int getWidthInternal() { if (getWidth() > 0 || null == preloadDimensions) return getWidth(); else return preloadDimensions.x; } private int getHeightInternal() { if (getHeight() > 0 || null == preloadDimensions) return getHeight(); else return preloadDimensions.y; } /** * Builder class used to set additional options for a scale animation. Create an instance using {@link #animateScale(float)}, * then set your options and call {@link #start()}. */ public final class AnimationBuilder { private final float targetScale; private final PointF targetSCenter; private final PointF vFocus; private long duration = 500; private int easing = EASE_IN_OUT_QUAD; private int origin = ORIGIN_ANIM; private boolean interruptible = true; private boolean panLimited = true; private OnAnimationEventListener listener; private AnimationBuilder(PointF sCenter) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = null; } private AnimationBuilder(float scale) { this.targetScale = scale; this.targetSCenter = getCenter(); this.vFocus = null; } private AnimationBuilder(float scale, PointF sCenter) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = null; } private AnimationBuilder(float scale, PointF sCenter, PointF vFocus) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = vFocus; } /** * Desired duration of the anim in milliseconds. Default is 500. * * @param duration duration in milliseconds. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withDuration(long duration) { this.duration = duration; return this; } /** * Whether the animation can be interrupted with a touch. Default is true. * * @param interruptible interruptible flag. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withInterruptible(boolean interruptible) { this.interruptible = interruptible; return this; } /** * Set the easing style. See static fields. {@link #EASE_IN_OUT_QUAD} is recommended, and the default. * * @param easing easing style. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withEasing(int easing) { if (!VALID_EASING_STYLES.contains(easing)) { throw new IllegalArgumentException("Unknown easing type: " + easing); } this.easing = easing; return this; } /** * Add an animation event listener. * * @param listener The listener. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withOnAnimationEventListener(OnAnimationEventListener listener) { this.listener = listener; return this; } /** * Only for internal use. When set to true, the animation proceeds towards the actual end point - the nearest * point to the center allowed by pan limits. When false, animation is in the direction of the requested end * point and is stopped when the limit for each axis is reached. The latter behaviour is used for flings but * nothing else. */ @NonNull private AnimationBuilder withPanLimited(boolean panLimited) { this.panLimited = panLimited; return this; } /** * Only for internal use. Indicates what caused the animation. */ @NonNull private AnimationBuilder withOrigin(int origin) { this.origin = origin; return this; } /** * Starts the animation. */ public void start() { if (anim != null && anim.listener != null) { try { anim.listener.onInterruptedByNewAnim(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; float targetScale = limitedScale(this.targetScale); PointF targetSCenter = panLimited ? limitedSCenter(this.targetSCenter.x, this.targetSCenter.y, targetScale, new PointF()) : this.targetSCenter; anim = new Anim(); anim.scaleStart = scale; anim.scaleEnd = targetScale; anim.time = System.currentTimeMillis(); anim.sCenterEndRequested = targetSCenter; anim.sCenterStart = getCenter(); anim.sCenterEnd = targetSCenter; anim.vFocusStart = sourceToViewCoord(targetSCenter); anim.vFocusEnd = new PointF( vxCenter, vyCenter ); anim.duration = duration; anim.interruptible = interruptible; anim.easing = easing; anim.origin = origin; anim.time = System.currentTimeMillis(); anim.listener = listener; if (vFocus != null) { // Calculate where translation will be at the end of the anim float vTranslateXEnd = vFocus.x - (targetScale * anim.sCenterStart.x); float vTranslateYEnd = vFocus.y - (targetScale * anim.sCenterStart.y); ScaleAndTranslate satEnd = new ScaleAndTranslate(targetScale, new PointF(vTranslateXEnd, vTranslateYEnd)); // Fit the end translation into bounds fitToBounds(true, satEnd); // Adjust the position of the focus point at end so image will be in bounds anim.vFocusEnd = new PointF( vFocus.x + (satEnd.vTranslate.x - vTranslateXEnd), vFocus.y + (satEnd.vTranslate.y - vTranslateYEnd) ); } invalidate(); } } /** * An event listener for animations, allows events to be triggered when an animation completes, * is aborted by another animation starting, or is aborted by a touch event. Note that none of * these events are triggered if the activity is paused, the image is swapped, or in other cases * where the view's internal state gets wiped or draw events stop. */ @SuppressWarnings("EmptyMethod") public interface OnAnimationEventListener { /** * The animation has completed, having reached its endpoint. */ void onComplete(); /** * The animation has been aborted before reaching its endpoint because the user touched the screen. */ void onInterruptedByUser(); /** * The animation has been aborted before reaching its endpoint because a new animation has been started. */ void onInterruptedByNewAnim(); } /** * Default implementation of {@link OnAnimationEventListener} for extension. This does nothing in any method. */ public static class DefaultOnAnimationEventListener implements OnAnimationEventListener { @Override public void onComplete() { } @Override public void onInterruptedByUser() { } @Override public void onInterruptedByNewAnim() { } } /** * An event listener, allowing subclasses and activities to be notified of significant events. */ @SuppressWarnings("EmptyMethod") public interface OnImageEventListener { /** * Called when the dimensions of the image and view are known, and either a preview image, * the full size image, or base layer tiles are loaded. This indicates the scale and translate * are known and the next draw will display an image. This event can be used to hide a loading * graphic, or inform a subclass that it is safe to draw overlays. */ void onReady(); /** * Called when the full size image is ready. When using tiling, this means the lowest resolution * base layer of tiles are loaded, and when tiling is disabled, the image bitmap is loaded. * This event could be used as a trigger to enable gestures if you wanted interaction disabled * while only a preview is displayed, otherwise for most cases {@link #onReady()} is the best * event to listen to. */ void onImageLoaded(); /** * Called when a preview image could not be loaded. This method cannot be relied upon; certain * encoding types of supported image formats can result in corrupt or blank images being loaded * and displayed with no detectable error. The view will continue to load the full size image. * * @param e The exception thrown. This error is logged by the view. */ void onPreviewLoadError(Exception e); /** * Indicates an error initiliasing the decoder when using a tiling, or when loading the full * size bitmap when tiling is disabled. This method cannot be relied upon; certain encoding * types of supported image formats can result in corrupt or blank images being loaded and * displayed with no detectable error. * * @param e The exception thrown. This error is also logged by the view. */ void onImageLoadError(Exception e); /** * Called when an image tile could not be loaded. This method cannot be relied upon; certain * encoding types of supported image formats can result in corrupt or blank images being loaded * and displayed with no detectable error. Most cases where an unsupported file is used will * result in an error caught by {@link #onImageLoadError(Exception)}. * * @param e The exception thrown. This error is logged by the view. */ void onTileLoadError(Exception e); /** * Called when a bitmap set using ImageSource.cachedBitmap is no longer being used by the View. * This is useful if you wish to manage the bitmap after the preview is shown */ void onPreviewReleased(); } /** * Default implementation of {@link OnImageEventListener} for extension. This does nothing in any method. */ public static class DefaultOnImageEventListener implements OnImageEventListener { @Override public void onReady() { } @Override public void onImageLoaded() { } @Override public void onPreviewLoadError(Exception e) { } @Override public void onImageLoadError(Exception e) { } @Override public void onTileLoadError(Exception e) { } @Override public void onPreviewReleased() { } } /** * An event listener, allowing activities to be notified of pan and zoom events. Initialisation * and calls made by your code do not trigger events; touch events and animations do. Methods in * this listener will be called on the UI thread and may be called very frequently - your * implementation should return quickly. */ @SuppressWarnings("EmptyMethod") public interface OnStateChangedListener { /** * The scale has changed. Use with {@link #getMaxScale()} and {@link #getMinScale()} to determine * whether the image is fully zoomed in or out. * * @param newScale The new scale. * @param origin Where the event originated from - one of {@link #ORIGIN_ANIM}, {@link #ORIGIN_TOUCH}. */ void onScaleChanged(float newScale, int origin); /** * The source center has been changed. This can be a result of panning or zooming. * * @param newCenter The new source center point. * @param origin Where the event originated from - one of {@link #ORIGIN_ANIM}, {@link #ORIGIN_TOUCH}. */ void onCenterChanged(PointF newCenter, int origin); } /** * Default implementation of {@link OnStateChangedListener}. This does nothing in any method. */ public static class DefaultOnStateChangedListener implements OnStateChangedListener { @Override public void onCenterChanged(PointF newCenter, int origin) { } @Override public void onScaleChanged(float newScale, int origin) { } } }
app/src/main/java/me/devsaki/hentoid/views/ssiv/CustomSubsamplingScaleImageView.java
package me.devsaki.hentoid.views.ssiv; import android.content.ContentResolver; import android.content.Context; import android.content.res.TypedArray; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.Point; import android.graphics.PointF; import android.graphics.Rect; import android.graphics.RectF; import android.net.Uri; import android.os.AsyncTask; import android.os.Handler; import android.provider.MediaStore; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.ViewParent; import androidx.annotation.AnyThread; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.exifinterface.media.ExifInterface; import com.davemorrissey.labs.subscaleview.ImageViewState; import com.davemorrissey.labs.subscaleview.R.styleable; import com.davemorrissey.labs.subscaleview.decoder.CompatDecoderFactory; import com.davemorrissey.labs.subscaleview.decoder.DecoderFactory; import com.davemorrissey.labs.subscaleview.decoder.ImageDecoder; import com.davemorrissey.labs.subscaleview.decoder.ImageRegionDecoder; import com.davemorrissey.labs.subscaleview.decoder.SkiaImageDecoder; import com.davemorrissey.labs.subscaleview.decoder.SkiaImageRegionDecoder; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import me.devsaki.hentoid.R; import timber.log.Timber; /** * <p> * Displays an image subsampled as necessary to avoid loading too much image data into memory. After zooming in, * a set of image tiles subsampled at higher resolution are loaded and displayed over the base layer. During pan and * zoom, tiles off screen or higher/lower resolution than required are discarded from memory. * </p><p> * Tiles are no larger than the max supported bitmap size, so with large images tiling may be used even when zoomed out. * </p><p> * v prefixes - coordinates, translations and distances measured in screen (view) pixels * <br> * s prefixes - coordinates, translations and distances measured in rotated and cropped source image pixels (scaled) * <br> * f prefixes - coordinates, translations and distances measured in original unrotated, uncropped source file pixels * </p><p> * <a href="https://github.com/davemorrissey/subsampling-scale-image-view">View project on GitHub</a> * </p> */ @SuppressWarnings("unused") public class CustomSubsamplingScaleImageView extends View { private static final String TAG = CustomSubsamplingScaleImageView.class.getSimpleName(); /** * Attempt to use EXIF information on the image to rotate it. Works for external files only. */ public static final int ORIENTATION_USE_EXIF = -1; /** * Display the image file in its native orientation. */ public static final int ORIENTATION_0 = 0; /** * Rotate the image 90 degrees clockwise. */ public static final int ORIENTATION_90 = 90; /** * Rotate the image 180 degrees. */ public static final int ORIENTATION_180 = 180; /** * Rotate the image 270 degrees clockwise. */ public static final int ORIENTATION_270 = 270; private static final List<Integer> VALID_ORIENTATIONS = Arrays.asList(ORIENTATION_0, ORIENTATION_90, ORIENTATION_180, ORIENTATION_270, ORIENTATION_USE_EXIF); /** * During zoom animation, keep the point of the image that was tapped in the same place, and scale the image around it. */ public static final int ZOOM_FOCUS_FIXED = 1; /** * During zoom animation, move the point of the image that was tapped to the center of the screen. */ public static final int ZOOM_FOCUS_CENTER = 2; /** * Zoom in to and center the tapped point immediately without animating. */ public static final int ZOOM_FOCUS_CENTER_IMMEDIATE = 3; private static final List<Integer> VALID_ZOOM_STYLES = Arrays.asList(ZOOM_FOCUS_FIXED, ZOOM_FOCUS_CENTER, ZOOM_FOCUS_CENTER_IMMEDIATE); /** * Quadratic ease out. Not recommended for scale animation, but good for panning. */ public static final int EASE_OUT_QUAD = 1; /** * Quadratic ease in and out. */ public static final int EASE_IN_OUT_QUAD = 2; private static final List<Integer> VALID_EASING_STYLES = Arrays.asList(EASE_IN_OUT_QUAD, EASE_OUT_QUAD); /** * Don't allow the image to be panned off screen. As much of the image as possible is always displayed, centered in the view when it is smaller. This is the best option for galleries. */ public static final int PAN_LIMIT_INSIDE = 1; /** * Allows the image to be panned until it is just off screen, but no further. The edge of the image will stop when it is flush with the screen edge. */ public static final int PAN_LIMIT_OUTSIDE = 2; /** * Allows the image to be panned until a corner reaches the center of the screen but no further. Useful when you want to pan any spot on the image to the exact center of the screen. */ public static final int PAN_LIMIT_CENTER = 3; private static final List<Integer> VALID_PAN_LIMITS = Arrays.asList(PAN_LIMIT_INSIDE, PAN_LIMIT_OUTSIDE, PAN_LIMIT_CENTER); /** * Scale the image so that both dimensions of the image will be equal to or less than the corresponding dimension of the view. The image is then centered in the view. This is the default behaviour and best for galleries. */ public static final int SCALE_TYPE_CENTER_INSIDE = 1; /** * Scale the image uniformly so that both dimensions of the image will be equal to or larger than the corresponding dimension of the view. The image is then centered in the view. */ public static final int SCALE_TYPE_CENTER_CROP = 2; /** * Scale the image so that both dimensions of the image will be equal to or less than the maxScale and equal to or larger than minScale. The image is then centered in the view. */ public static final int SCALE_TYPE_CUSTOM = 3; /** * Scale the image so that both dimensions of the image will be equal to or larger than the corresponding dimension of the view. The top left is shown. */ public static final int SCALE_TYPE_START = 4; private static final List<Integer> VALID_SCALE_TYPES = Arrays.asList(SCALE_TYPE_CENTER_CROP, SCALE_TYPE_CENTER_INSIDE, SCALE_TYPE_CUSTOM, SCALE_TYPE_START); /** * State change originated from animation. */ public static final int ORIGIN_ANIM = 1; /** * State change originated from touch gesture. */ public static final int ORIGIN_TOUCH = 2; /** * State change originated from a fling momentum anim. */ public static final int ORIGIN_FLING = 3; /** * State change originated from a double tap zoom anim. */ public static final int ORIGIN_DOUBLE_TAP_ZOOM = 4; // Bitmap (preview or full image) private Bitmap bitmap; // Whether the bitmap is a preview image private boolean bitmapIsPreview; // Specifies if a cache handler is also referencing the bitmap. Do not recycle if so. private boolean bitmapIsCached; // Uri of full size image private Uri uri; // Sample size used to display the whole image when fully zoomed out private int fullImageSampleSize; // Map of zoom level to tile grid private Map<Integer, List<Tile>> tileMap; // Overlay tile boundaries and other info private boolean debug; // Image orientation setting private int orientation = ORIENTATION_0; // Max scale allowed (prevent infinite zoom) private float maxScale = 2F; // Min scale allowed (prevent infinite zoom) private float minScale = minScale(); // Density to reach before loading higher resolution tiles private int minimumTileDpi = -1; // Pan limiting style private int panLimit = PAN_LIMIT_INSIDE; // Minimum scale type private int minimumScaleType = SCALE_TYPE_CENTER_INSIDE; // overrides for the dimensions of the generated tiles public static final int TILE_SIZE_AUTO = Integer.MAX_VALUE; private int maxTileWidth = TILE_SIZE_AUTO; private int maxTileHeight = TILE_SIZE_AUTO; // An executor service for loading of images private Executor executor = AsyncTask.THREAD_POOL_EXECUTOR; // Whether tiles should be loaded while gestures and animations are still in progress private boolean eagerLoadingEnabled = true; // Gesture detection settings private boolean panEnabled = true; private boolean zoomEnabled = true; private boolean quickScaleEnabled = true; // Double tap zoom behaviour private float doubleTapZoomScale = 1F; private int doubleTapZoomStyle = ZOOM_FOCUS_FIXED; private int doubleTapZoomDuration = 500; // Current scale and scale at start of zoom private float scale; private float scaleStart; // Screen coordinate of top-left corner of source image private PointF vTranslate; private PointF vTranslateStart; private PointF vTranslateBefore; // Source coordinate to center on, used when new position is set externally before view is ready private Float pendingScale; private PointF sPendingCenter; private PointF sRequestedCenter; // Source image dimensions and orientation - dimensions relate to the unrotated image private int sWidth; private int sHeight; private int sOrientation; private Rect sRegion; private Rect pRegion; // Is two-finger zooming in progress private boolean isZooming; // Is one-finger panning in progress private boolean isPanning; // Is quick-scale gesture in progress private boolean isQuickScaling; // Max touches used in current gesture private int maxTouchCount; // Fling detector private GestureDetector detector; private GestureDetector singleDetector; // Tile and image decoding private ImageRegionDecoder decoder; private final ReadWriteLock decoderLock = new ReentrantReadWriteLock(true); private DecoderFactory<? extends ImageDecoder> bitmapDecoderFactory = new CompatDecoderFactory<ImageDecoder>(SkiaImageDecoder.class); private DecoderFactory<? extends ImageRegionDecoder> regionDecoderFactory = new CompatDecoderFactory<ImageRegionDecoder>(SkiaImageRegionDecoder.class); // Debug values private PointF vCenterStart; private float vDistStart; // Current quickscale state private final float quickScaleThreshold; private float quickScaleLastDistance; private boolean quickScaleMoved; private PointF quickScaleVLastPoint; private PointF quickScaleSCenter; private PointF quickScaleVStart; // Scale and center animation tracking private Anim anim; // Whether a ready notification has been sent to subclasses private boolean readySent; // Whether a base layer loaded notification has been sent to subclasses private boolean imageLoadedSent; // Event listener private OnImageEventListener onImageEventListener; // Scale and center listener private OnStateChangedListener onStateChangedListener; // Long click listener private OnLongClickListener onLongClickListener; // Long click handler private final Handler handler; private static final int MESSAGE_LONG_CLICK = 1; // Paint objects created once and reused for efficiency private Paint bitmapPaint; private Paint debugTextPaint; private Paint debugLinePaint; private Paint tileBgPaint; // Volatile fields used to reduce object creation private ScaleAndTranslate satTemp; private Matrix matrix; private RectF sRect; private final float[] srcArray = new float[8]; private final float[] dstArray = new float[8]; //The logical density of the display private final float density; // A global preference for bitmap format, available to decoder classes that respect it private static Bitmap.Config preferredBitmapConfig; // Switch to ignore all touch events (used in vertical mode when the container view is the one handling touch events) private boolean ignoreTouchEvents = false; // Dimensions used to preload the image before the view actually appears on screen / gets its display dimensions private Point preloadDimensions = null; public CustomSubsamplingScaleImageView(Context context, AttributeSet attr) { super(context, attr); density = getResources().getDisplayMetrics().density; setMinimumDpi(160); setDoubleTapZoomDpi(160); setMinimumTileDpi(320); setGestureDetector(context); this.handler = new Handler(message -> { if (message.what == MESSAGE_LONG_CLICK && onLongClickListener != null) { maxTouchCount = 0; CustomSubsamplingScaleImageView.super.setOnLongClickListener(onLongClickListener); performLongClick(); CustomSubsamplingScaleImageView.super.setOnLongClickListener(null); } return true; }); // Handle XML attributes if (attr != null) { TypedArray typedAttr = getContext().obtainStyledAttributes(attr, R.styleable.CustomSubsamplingScaleImageView); if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_assetName)) { String assetName = typedAttr.getString(styleable.SubsamplingScaleImageView_assetName); if (assetName != null && assetName.length() > 0) { setImage(ImageSource.asset(assetName).tilingEnabled()); } } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_src)) { int resId = typedAttr.getResourceId(styleable.SubsamplingScaleImageView_src, 0); if (resId > 0) { setImage(ImageSource.resource(resId).tilingEnabled()); } } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_panEnabled)) { setPanEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_panEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_zoomEnabled)) { setZoomEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_zoomEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_quickScaleEnabled)) { setQuickScaleEnabled(typedAttr.getBoolean(styleable.SubsamplingScaleImageView_quickScaleEnabled, true)); } if (typedAttr.hasValue(styleable.SubsamplingScaleImageView_tileBackgroundColor)) { setTileBackgroundColor(typedAttr.getColor(styleable.SubsamplingScaleImageView_tileBackgroundColor, Color.argb(0, 0, 0, 0))); } typedAttr.recycle(); } quickScaleThreshold = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, context.getResources().getDisplayMetrics()); } public CustomSubsamplingScaleImageView(Context context) { this(context, null); } /** * Get the current preferred configuration for decoding bitmaps. {@link ImageDecoder} and {@link ImageRegionDecoder} * instances can read this and use it when decoding images. * * @return the preferred bitmap configuration, or null if none has been set. */ public static Bitmap.Config getPreferredBitmapConfig() { return preferredBitmapConfig; } /** * Set a global preferred bitmap config shared by all view instance and applied to new instances * initialised after the call is made. This is a hint only; the bundled {@link ImageDecoder} and * {@link ImageRegionDecoder} classes all respect this (except when they were constructed with * an instance-specific config) but custom decoder classes will not. * * @param preferredBitmapConfig the bitmap configuration to be used by future instances of the view. Pass null to restore the default. */ public static void setPreferredBitmapConfig(Bitmap.Config preferredBitmapConfig) { CustomSubsamplingScaleImageView.preferredBitmapConfig = preferredBitmapConfig; } /** * Sets the image orientation. It's best to call this before setting the image file or asset, because it may waste * loading of tiles. However, this can be freely called at any time. * * @param orientation orientation to be set. See ORIENTATION_ static fields for valid values. */ public final void setOrientation(int orientation) { if (!VALID_ORIENTATIONS.contains(orientation)) { throw new IllegalArgumentException("Invalid orientation: " + orientation); } this.orientation = orientation; reset(false); invalidate(); requestLayout(); } /** * Set the image source from a bitmap, resource, asset, file or other URI. * * @param imageSource Image source. */ public final void setImage(@NonNull ImageSource imageSource) { setImage(imageSource, null, null); } /** * Set the image source from a bitmap, resource, asset, file or other URI, starting with a given orientation * setting, scale and center. This is the best method to use when you want scale and center to be restored * after screen orientation change; it avoids any redundant loading of tiles in the wrong orientation. * * @param imageSource Image source. * @param state State to be restored. Nullable. */ public final void setImage(@NonNull ImageSource imageSource, ImageViewState state) { setImage(imageSource, null, state); } /** * Set the image source from a bitmap, resource, asset, file or other URI, providing a preview image to be * displayed until the full size image is loaded. * <p> * You must declare the dimensions of the full size image by calling {@link ImageSource#dimensions(int, int)} * on the imageSource object. The preview source will be ignored if you don't provide dimensions, * and if you provide a bitmap for the full size image. * * @param imageSource Image source. Dimensions must be declared. * @param previewSource Optional source for a preview image to be displayed and allow interaction while the full size image loads. */ public final void setImage(@NonNull ImageSource imageSource, ImageSource previewSource) { setImage(imageSource, previewSource, null); } /** * Set the image source from a bitmap, resource, asset, file or other URI, providing a preview image to be * displayed until the full size image is loaded, starting with a given orientation setting, scale and center. * This is the best method to use when you want scale and center to be restored after screen orientation change; * it avoids any redundant loading of tiles in the wrong orientation. * <p> * You must declare the dimensions of the full size image by calling {@link ImageSource#dimensions(int, int)} * on the imageSource object. The preview source will be ignored if you don't provide dimensions, * and if you provide a bitmap for the full size image. * * @param imageSource Image source. Dimensions must be declared. * @param previewSource Optional source for a preview image to be displayed and allow interaction while the full size image loads. * @param state State to be restored. Nullable. */ public final void setImage(@NonNull ImageSource imageSource, ImageSource previewSource, ImageViewState state) { reset(true); if (state != null) { restoreState(state); } if (previewSource != null) { if (imageSource.getBitmap() != null) { throw new IllegalArgumentException("Preview image cannot be used when a bitmap is provided for the main image"); } if (imageSource.getSWidth() <= 0 || imageSource.getSHeight() <= 0) { throw new IllegalArgumentException("Preview image cannot be used unless dimensions are provided for the main image"); } this.sWidth = imageSource.getSWidth(); this.sHeight = imageSource.getSHeight(); this.pRegion = previewSource.getSRegion(); if (previewSource.getBitmap() != null) { this.bitmapIsCached = previewSource.isCached(); onPreviewLoaded(previewSource.getBitmap()); } else { Uri uri = previewSource.getUri(); if (uri == null && previewSource.getResource() != null) { uri = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + getContext().getPackageName() + "/" + previewSource.getResource()); } BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, true); execute(task); } } if (imageSource.getBitmap() != null && imageSource.getSRegion() != null) { onImageLoaded(Bitmap.createBitmap(imageSource.getBitmap(), imageSource.getSRegion().left, imageSource.getSRegion().top, imageSource.getSRegion().width(), imageSource.getSRegion().height()), ORIENTATION_0, false); } else if (imageSource.getBitmap() != null) { onImageLoaded(imageSource.getBitmap(), ORIENTATION_0, imageSource.isCached()); } else { sRegion = imageSource.getSRegion(); uri = imageSource.getUri(); if (uri == null && imageSource.getResource() != null) { uri = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + getContext().getPackageName() + "/" + imageSource.getResource()); } if (imageSource.getTile() || sRegion != null) { // Load the bitmap using tile decoding. TilesInitTask task = new TilesInitTask(this, getContext(), regionDecoderFactory, uri); execute(task); } else { // Load the bitmap as a single image. BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, false); execute(task); } } } /** * Reset all state before setting/changing image or setting new rotation. */ private void reset(boolean newImage) { debug("reset newImage=" + newImage); scale = 0f; scaleStart = 0f; vTranslate = null; vTranslateStart = null; vTranslateBefore = null; pendingScale = 0f; sPendingCenter = null; sRequestedCenter = null; isZooming = false; isPanning = false; isQuickScaling = false; maxTouchCount = 0; fullImageSampleSize = 0; vCenterStart = null; vDistStart = 0; quickScaleLastDistance = 0f; quickScaleMoved = false; quickScaleSCenter = null; quickScaleVLastPoint = null; quickScaleVStart = null; anim = null; satTemp = null; matrix = null; sRect = null; if (newImage) { uri = null; decoderLock.writeLock().lock(); try { if (decoder != null) { decoder.recycle(); decoder = null; } } finally { decoderLock.writeLock().unlock(); } if (bitmap != null && !bitmapIsCached) { bitmap.recycle(); } if (bitmap != null && bitmapIsCached && onImageEventListener != null) { onImageEventListener.onPreviewReleased(); } sWidth = 0; sHeight = 0; sOrientation = 0; sRegion = null; pRegion = null; readySent = false; imageLoadedSent = false; bitmap = null; bitmapIsPreview = false; bitmapIsCached = false; } if (tileMap != null) { for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { for (Tile tile : tileMapEntry.getValue()) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } } tileMap = null; } setGestureDetector(getContext()); } private void setGestureDetector(final Context context) { this.detector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { if (panEnabled && readySent && vTranslate != null && e1 != null && e2 != null && (Math.abs(e1.getX() - e2.getX()) > 50 || Math.abs(e1.getY() - e2.getY()) > 50) && (Math.abs(velocityX) > 500 || Math.abs(velocityY) > 500) && !isZooming) { PointF vTranslateEnd = new PointF(vTranslate.x + (velocityX * 0.25f), vTranslate.y + (velocityY * 0.25f)); float sCenterXEnd = ((getWidthInternal() / 2f) - vTranslateEnd.x) / scale; float sCenterYEnd = ((getHeightInternal() / 2f) - vTranslateEnd.y) / scale; new AnimationBuilder(new PointF(sCenterXEnd, sCenterYEnd)).withEasing(EASE_OUT_QUAD).withPanLimited(false).withOrigin(ORIGIN_FLING).start(); return true; } return super.onFling(e1, e2, velocityX, velocityY); } @Override public boolean onSingleTapConfirmed(MotionEvent e) { performClick(); return true; } @Override public boolean onDoubleTap(MotionEvent e) { if (zoomEnabled && readySent && vTranslate != null) { // Hacky solution for #15 - after a double tap the GestureDetector gets in a state // where the next fling is ignored, so here we replace it with a new one. setGestureDetector(context); if (quickScaleEnabled) { // Store quick scale params. This will become either a double tap zoom or a // quick scale depending on whether the user swipes. vCenterStart = new PointF(e.getX(), e.getY()); vTranslateStart = new PointF(vTranslate.x, vTranslate.y); scaleStart = scale; isQuickScaling = true; isZooming = true; quickScaleLastDistance = -1F; quickScaleSCenter = viewToSourceCoord(vCenterStart); if (null == quickScaleSCenter) throw new IllegalStateException("vTranslate is null; aborting"); quickScaleVStart = new PointF(e.getX(), e.getY()); quickScaleVLastPoint = new PointF(quickScaleSCenter.x, quickScaleSCenter.y); quickScaleMoved = false; // We need to get events in onTouchEvent after this. return false; } else { // Start double tap zoom animation. PointF sCenter = viewToSourceCoord(new PointF(e.getX(), e.getY())); if (null == sCenter) throw new IllegalStateException("vTranslate is null; aborting"); doubleTapZoom(sCenter, new PointF(e.getX(), e.getY())); return true; } } return super.onDoubleTapEvent(e); } }); singleDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onSingleTapConfirmed(MotionEvent e) { performClick(); return true; } }); } /** * On resize, preserve center and scale. Various behaviours are possible, override this method to use another. */ @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { debug("onSizeChanged %dx%d -> %dx%d", oldw, oldh, w, h); PointF sCenter = getCenter(); if (readySent && sCenter != null) { this.anim = null; this.pendingScale = scale; this.sPendingCenter = sCenter; } } /** * Measures the width and height of the view, preserving the aspect ratio of the image displayed if wrap_content is * used. The image will scale within this box, not resizing the view as it is zoomed. */ @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec); int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec); int parentWidth = MeasureSpec.getSize(widthMeasureSpec); int parentHeight = MeasureSpec.getSize(heightMeasureSpec); boolean resizeWidth = widthSpecMode != MeasureSpec.EXACTLY; boolean resizeHeight = heightSpecMode != MeasureSpec.EXACTLY; int width = parentWidth; int height = parentHeight; if (sWidth > 0 && sHeight > 0) { if (resizeWidth && resizeHeight) { width = sWidth(); height = sHeight(); } else if (resizeHeight) { height = (int) ((((double) sHeight() / (double) sWidth()) * width)); } else if (resizeWidth) { width = (int) ((((double) sWidth() / (double) sHeight()) * height)); } } width = Math.max(width, getSuggestedMinimumWidth()); height = Math.max(height, getSuggestedMinimumHeight()); setMeasuredDimension(width, height); } public void setIgnoreTouchEvents(boolean ignoreTouchEvents) { this.ignoreTouchEvents = ignoreTouchEvents; } /** * Handle touch events. One finger pans, and two finger pinch and zoom plus panning. */ @Override public boolean onTouchEvent(@NonNull MotionEvent event) { if (ignoreTouchEvents) return false; // During non-interruptible anims, ignore all touch events if (anim != null && !anim.interruptible) { requestDisallowInterceptTouchEvent(true); return true; } else { if (anim != null && anim.listener != null) { try { anim.listener.onInterruptedByUser(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } anim = null; } // Abort if not ready if (vTranslate == null) { if (singleDetector != null) { singleDetector.onTouchEvent(event); } return true; } // Detect flings, taps and double taps if (!isQuickScaling && (detector == null || detector.onTouchEvent(event))) { isZooming = false; isPanning = false; maxTouchCount = 0; return true; } if (vTranslateStart == null) { vTranslateStart = new PointF(0, 0); } if (vTranslateBefore == null) { vTranslateBefore = new PointF(0, 0); } if (vCenterStart == null) { vCenterStart = new PointF(0, 0); } // Store current values so we can send an event if they change float scaleBefore = scale; vTranslateBefore.set(vTranslate); boolean handled = onTouchEventInternal(event); sendStateChanged(scaleBefore, vTranslateBefore, ORIGIN_TOUCH); return handled || super.onTouchEvent(event); } @SuppressWarnings("deprecation") private boolean onTouchEventInternal(@NonNull MotionEvent event) { int touchCount = event.getPointerCount(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_1_DOWN: case MotionEvent.ACTION_POINTER_2_DOWN: anim = null; requestDisallowInterceptTouchEvent(true); maxTouchCount = Math.max(maxTouchCount, touchCount); if (touchCount >= 2) { if (zoomEnabled) { // Start pinch to zoom. Calculate distance between touch points and center point of the pinch. float distance = distance(event.getX(0), event.getX(1), event.getY(0), event.getY(1)); scaleStart = scale; vDistStart = distance; vTranslateStart.set(vTranslate.x, vTranslate.y); vCenterStart.set((event.getX(0) + event.getX(1)) / 2, (event.getY(0) + event.getY(1)) / 2); } else { // Abort all gestures on second touch maxTouchCount = 0; } // Cancel long click timer handler.removeMessages(MESSAGE_LONG_CLICK); } else if (!isQuickScaling) { // Start one-finger pan vTranslateStart.set(vTranslate.x, vTranslate.y); vCenterStart.set(event.getX(), event.getY()); // Start long click timer handler.sendEmptyMessageDelayed(MESSAGE_LONG_CLICK, 600); } return true; case MotionEvent.ACTION_MOVE: boolean consumed = false; if (maxTouchCount > 0) { if (touchCount >= 2) { // Calculate new distance between touch points, to scale and pan relative to start values. float vDistEnd = distance(event.getX(0), event.getX(1), event.getY(0), event.getY(1)); float vCenterEndX = (event.getX(0) + event.getX(1)) / 2; float vCenterEndY = (event.getY(0) + event.getY(1)) / 2; if (zoomEnabled && (distance(vCenterStart.x, vCenterEndX, vCenterStart.y, vCenterEndY) > 5 || Math.abs(vDistEnd - vDistStart) > 5 || isPanning)) { isZooming = true; isPanning = true; consumed = true; double previousScale = scale; scale = Math.min(maxScale, (vDistEnd / vDistStart) * scaleStart); if (scale <= minScale()) { // Minimum scale reached so don't pan. Adjust start settings so any expand will zoom in. vDistStart = vDistEnd; scaleStart = minScale(); vCenterStart.set(vCenterEndX, vCenterEndY); vTranslateStart.set(vTranslate); } else if (panEnabled) { // Translate to place the source image coordinate that was at the center of the pinch at the start // at the center of the pinch now, to give simultaneous pan + zoom. float vLeftStart = vCenterStart.x - vTranslateStart.x; float vTopStart = vCenterStart.y - vTranslateStart.y; float vLeftNow = vLeftStart * (scale / scaleStart); float vTopNow = vTopStart * (scale / scaleStart); vTranslate.x = vCenterEndX - vLeftNow; vTranslate.y = vCenterEndY - vTopNow; if ((previousScale * sHeight() < getHeightInternal() && scale * sHeight() >= getHeightInternal()) || (previousScale * sWidth() < getWidthInternal() && scale * sWidth() >= getWidthInternal())) { fitToBounds(true); vCenterStart.set(vCenterEndX, vCenterEndY); vTranslateStart.set(vTranslate); scaleStart = scale; vDistStart = vDistEnd; } } else if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. vTranslate.x = (getWidthInternal() / 2f) - (scale * sRequestedCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sRequestedCenter.y); } else { // With no requested center, scale around the image center. vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); } fitToBounds(true); refreshRequiredTiles(eagerLoadingEnabled); } } else if (isQuickScaling) { // One finger zoom // Stole Google's Magical Formula™ to make sure it feels the exact same float dist = Math.abs(quickScaleVStart.y - event.getY()) * 2 + quickScaleThreshold; if (quickScaleLastDistance == -1f) { quickScaleLastDistance = dist; } boolean isUpwards = event.getY() > quickScaleVLastPoint.y; quickScaleVLastPoint.set(0, event.getY()); float spanDiff = Math.abs(1 - (dist / quickScaleLastDistance)) * 0.5f; if (spanDiff > 0.03f || quickScaleMoved) { quickScaleMoved = true; float multiplier = 1; if (quickScaleLastDistance > 0) { multiplier = isUpwards ? (1 + spanDiff) : (1 - spanDiff); } double previousScale = scale; scale = Math.max(minScale(), Math.min(maxScale, scale * multiplier)); if (panEnabled) { float vLeftStart = vCenterStart.x - vTranslateStart.x; float vTopStart = vCenterStart.y - vTranslateStart.y; float vLeftNow = vLeftStart * (scale / scaleStart); float vTopNow = vTopStart * (scale / scaleStart); vTranslate.x = vCenterStart.x - vLeftNow; vTranslate.y = vCenterStart.y - vTopNow; if ((previousScale * sHeight() < getHeightInternal() && scale * sHeight() >= getHeightInternal()) || (previousScale * sWidth() < getWidthInternal() && scale * sWidth() >= getWidthInternal())) { fitToBounds(true); vCenterStart.set(sourceToViewCoord(quickScaleSCenter)); vTranslateStart.set(vTranslate); scaleStart = scale; dist = 0; } } else if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. vTranslate.x = (getWidthInternal() / 2f) - (scale * sRequestedCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sRequestedCenter.y); } else { // With no requested center, scale around the image center. vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); } } quickScaleLastDistance = dist; fitToBounds(true); refreshRequiredTiles(eagerLoadingEnabled); consumed = true; } else if (!isZooming) { // One finger pan - translate the image. We do this calculation even with pan disabled so click // and long click behaviour is preserved. float dx = Math.abs(event.getX() - vCenterStart.x); float dy = Math.abs(event.getY() - vCenterStart.y); //On the Samsung S6 long click event does not work, because the dx > 5 usually true float offset = density * 5; if (dx > offset || dy > offset || isPanning) { consumed = true; vTranslate.x = vTranslateStart.x + (event.getX() - vCenterStart.x); vTranslate.y = vTranslateStart.y + (event.getY() - vCenterStart.y); float lastX = vTranslate.x; float lastY = vTranslate.y; fitToBounds(true); boolean atXEdge = lastX != vTranslate.x; boolean atYEdge = lastY != vTranslate.y; boolean edgeXSwipe = atXEdge && dx > dy && !isPanning; boolean edgeYSwipe = atYEdge && dy > dx && !isPanning; boolean yPan = lastY == vTranslate.y && dy > offset * 3; if (!edgeXSwipe && !edgeYSwipe && (!atXEdge || !atYEdge || yPan || isPanning)) { isPanning = true; } else if (dx > offset || dy > offset) { // Haven't panned the image, and we're at the left or right edge. Switch to page swipe. maxTouchCount = 0; handler.removeMessages(MESSAGE_LONG_CLICK); requestDisallowInterceptTouchEvent(false); } if (!panEnabled) { vTranslate.x = vTranslateStart.x; vTranslate.y = vTranslateStart.y; requestDisallowInterceptTouchEvent(false); } refreshRequiredTiles(eagerLoadingEnabled); } } } if (consumed) { handler.removeMessages(MESSAGE_LONG_CLICK); invalidate(); return true; } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: case MotionEvent.ACTION_POINTER_2_UP: handler.removeMessages(MESSAGE_LONG_CLICK); if (isQuickScaling) { isQuickScaling = false; if (!quickScaleMoved) { doubleTapZoom(quickScaleSCenter, vCenterStart); } } if (maxTouchCount > 0 && (isZooming || isPanning)) { if (isZooming && touchCount == 2) { // Convert from zoom to pan with remaining touch isPanning = true; vTranslateStart.set(vTranslate.x, vTranslate.y); if (event.getActionIndex() == 1) { vCenterStart.set(event.getX(0), event.getY(0)); } else { vCenterStart.set(event.getX(1), event.getY(1)); } } if (touchCount < 3) { // End zooming when only one touch point isZooming = false; } if (touchCount < 2) { // End panning when no touch points isPanning = false; maxTouchCount = 0; } // Trigger load of tiles now required refreshRequiredTiles(true); return true; } if (touchCount == 1) { isZooming = false; isPanning = false; maxTouchCount = 0; } return true; } return false; } private void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { ViewParent parent = getParent(); if (parent != null) { parent.requestDisallowInterceptTouchEvent(disallowIntercept); } } /** * Double tap zoom handler triggered from gesture detector or on touch, depending on whether * quick scale is enabled. */ private void doubleTapZoom(PointF sCenter, PointF vFocus) { if (!panEnabled) { if (sRequestedCenter != null) { // With a center specified from code, zoom around that point. sCenter.x = sRequestedCenter.x; sCenter.y = sRequestedCenter.y; } else { // With no requested center, scale around the image center. sCenter.x = sWidth() / 2; sCenter.y = sHeight() / 2; } } float doubleTapZoomScale = Math.min(maxScale, CustomSubsamplingScaleImageView.this.doubleTapZoomScale); boolean zoomIn = (scale <= doubleTapZoomScale * 0.9) || scale == minScale; float targetScale = zoomIn ? doubleTapZoomScale : minScale(); if (doubleTapZoomStyle == ZOOM_FOCUS_CENTER_IMMEDIATE) { setScaleAndCenter(targetScale, sCenter); } else if (doubleTapZoomStyle == ZOOM_FOCUS_CENTER || !zoomIn || !panEnabled) { new AnimationBuilder(targetScale, sCenter).withInterruptible(false).withDuration(doubleTapZoomDuration).withOrigin(ORIGIN_DOUBLE_TAP_ZOOM).start(); } else if (doubleTapZoomStyle == ZOOM_FOCUS_FIXED) { new AnimationBuilder(targetScale, sCenter, vFocus).withInterruptible(false).withDuration(doubleTapZoomDuration).withOrigin(ORIGIN_DOUBLE_TAP_ZOOM).start(); } invalidate(); } /** * Draw method should not be called until the view has dimensions so the first calls are used as triggers to calculate * the scaling and tiling required. Once the view is setup, tiles are displayed as they are loaded. */ @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); createPaints(); // If image or view dimensions are not known yet, abort. if (sWidth == 0 || sHeight == 0 || getWidthInternal() == 0 || getHeightInternal() == 0) { return; } // When using tiles, on first render with no tile map ready, initialise it and kick off async base image loading. if (tileMap == null && decoder != null) { initialiseBaseLayer(getMaxBitmapDimensions(canvas)); } // If image has been loaded or supplied as a bitmap, onDraw may be the first time the view has // dimensions and therefore the first opportunity to set scale and translate. If this call returns // false there is nothing to be drawn so return immediately. if (!checkReady()) { return; } // Set scale and translate before draw. preDraw(); // If animating scale, calculate current scale and center with easing equations if (anim != null && anim.vFocusStart != null) { // Store current values so we can send an event if they change float scaleBefore = scale; if (vTranslateBefore == null) { vTranslateBefore = new PointF(0, 0); } vTranslateBefore.set(vTranslate); long scaleElapsed = System.currentTimeMillis() - anim.time; boolean finished = scaleElapsed > anim.duration; scaleElapsed = Math.min(scaleElapsed, anim.duration); scale = ease(anim.easing, scaleElapsed, anim.scaleStart, anim.scaleEnd - anim.scaleStart, anim.duration); // Apply required animation to the focal point float vFocusNowX = ease(anim.easing, scaleElapsed, anim.vFocusStart.x, anim.vFocusEnd.x - anim.vFocusStart.x, anim.duration); float vFocusNowY = ease(anim.easing, scaleElapsed, anim.vFocusStart.y, anim.vFocusEnd.y - anim.vFocusStart.y, anim.duration); // Find out where the focal point is at this scale and adjust its position to follow the animation path vTranslate.x -= sourceToViewX(anim.sCenterEnd.x) - vFocusNowX; vTranslate.y -= sourceToViewY(anim.sCenterEnd.y) - vFocusNowY; // For translate anims, showing the image non-centered is never allowed, for scaling anims it is during the animation. fitToBounds(finished || (anim.scaleStart == anim.scaleEnd)); sendStateChanged(scaleBefore, vTranslateBefore, anim.origin); refreshRequiredTiles(finished); if (finished) { if (anim.listener != null) { try { anim.listener.onComplete(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } anim = null; } invalidate(); } if (tileMap != null && isBaseLayerReady()) { // Optimum sample size for current scale int sampleSize = Math.min(fullImageSampleSize, calculateInSampleSize(scale)); // First check for missing tiles - if there are any we need the base layer underneath to avoid gaps boolean hasMissingTiles = false; for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == sampleSize) { for (Tile tile : tileMapEntry.getValue()) { if (tile.visible && (tile.loading || tile.bitmap == null)) { hasMissingTiles = true; } } } } // Render all loaded tiles. LinkedHashMap used for bottom up rendering - lower res tiles underneath. for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == sampleSize || hasMissingTiles) { for (Tile tile : tileMapEntry.getValue()) { sourceToViewRect(tile.sRect, tile.vRect); if (!tile.loading && tile.bitmap != null) { if (tileBgPaint != null) { canvas.drawRect(tile.vRect, tileBgPaint); } if (matrix == null) { matrix = new Matrix(); } matrix.reset(); setMatrixArray(srcArray, 0, 0, tile.bitmap.getWidth(), 0, tile.bitmap.getWidth(), tile.bitmap.getHeight(), 0, tile.bitmap.getHeight()); if (getRequiredRotation() == ORIENTATION_0) { setMatrixArray(dstArray, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom); } else if (getRequiredRotation() == ORIENTATION_90) { setMatrixArray(dstArray, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top); } else if (getRequiredRotation() == ORIENTATION_180) { setMatrixArray(dstArray, tile.vRect.right, tile.vRect.bottom, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top); } else if (getRequiredRotation() == ORIENTATION_270) { setMatrixArray(dstArray, tile.vRect.left, tile.vRect.bottom, tile.vRect.left, tile.vRect.top, tile.vRect.right, tile.vRect.top, tile.vRect.right, tile.vRect.bottom); } matrix.setPolyToPoly(srcArray, 0, dstArray, 0, 4); canvas.drawBitmap(tile.bitmap, matrix, bitmapPaint); if (debug) { canvas.drawRect(tile.vRect, debugLinePaint); } } else if (tile.loading && debug) { canvas.drawText("LOADING", tile.vRect.left + px(5), tile.vRect.top + px(35), debugTextPaint); } if (tile.visible && debug) { canvas.drawText("ISS " + tile.sampleSize + " RECT " + tile.sRect.top + "," + tile.sRect.left + "," + tile.sRect.bottom + "," + tile.sRect.right, tile.vRect.left + px(5), tile.vRect.top + px(15), debugTextPaint); } } } } } else if (bitmap != null) { float xScale = scale, yScale = scale; if (bitmapIsPreview) { xScale = scale * ((float) sWidth / bitmap.getWidth()); yScale = scale * ((float) sHeight / bitmap.getHeight()); } if (matrix == null) { matrix = new Matrix(); } matrix.reset(); matrix.postScale(xScale, yScale); matrix.postRotate(getRequiredRotation()); matrix.postTranslate(vTranslate.x, vTranslate.y); if (getRequiredRotation() == ORIENTATION_180) { matrix.postTranslate(scale * sWidth, scale * sHeight); } else if (getRequiredRotation() == ORIENTATION_90) { matrix.postTranslate(scale * sHeight, 0); } else if (getRequiredRotation() == ORIENTATION_270) { matrix.postTranslate(0, scale * sWidth); } if (tileBgPaint != null) { if (sRect == null) { sRect = new RectF(); } sRect.set(0f, 0f, bitmapIsPreview ? bitmap.getWidth() : sWidth, bitmapIsPreview ? bitmap.getHeight() : sHeight); matrix.mapRect(sRect); canvas.drawRect(sRect, tileBgPaint); } canvas.drawBitmap(bitmap, matrix, bitmapPaint); } if (debug) { canvas.drawText("Scale: " + String.format(Locale.ENGLISH, "%.2f", scale) + " (" + String.format(Locale.ENGLISH, "%.2f", minScale()) + " - " + String.format(Locale.ENGLISH, "%.2f", maxScale) + ")", px(5), px(15), debugTextPaint); canvas.drawText("Translate: " + String.format(Locale.ENGLISH, "%.2f", vTranslate.x) + ":" + String.format(Locale.ENGLISH, "%.2f", vTranslate.y), px(5), px(30), debugTextPaint); PointF center = getCenter(); if (null != center) canvas.drawText("Source center: " + String.format(Locale.ENGLISH, "%.2f", center.x) + ":" + String.format(Locale.ENGLISH, "%.2f", center.y), px(5), px(45), debugTextPaint); if (anim != null) { PointF vCenterStart = sourceToViewCoord(anim.sCenterStart); PointF vCenterEndRequested = sourceToViewCoord(anim.sCenterEndRequested); PointF vCenterEnd = sourceToViewCoord(anim.sCenterEnd); if (vCenterStart != null) { canvas.drawCircle(vCenterStart.x, vCenterStart.y, px(10), debugLinePaint); debugLinePaint.setColor(Color.RED); } if (vCenterEndRequested != null) { canvas.drawCircle(vCenterEndRequested.x, vCenterEndRequested.y, px(20), debugLinePaint); debugLinePaint.setColor(Color.BLUE); } if (vCenterEnd != null) { canvas.drawCircle(vCenterEnd.x, vCenterEnd.y, px(25), debugLinePaint); debugLinePaint.setColor(Color.CYAN); } canvas.drawCircle(getWidthInternal() / 2, getHeightInternal() / 2, px(30), debugLinePaint); } if (vCenterStart != null) { debugLinePaint.setColor(Color.RED); canvas.drawCircle(vCenterStart.x, vCenterStart.y, px(20), debugLinePaint); } if (quickScaleSCenter != null) { debugLinePaint.setColor(Color.BLUE); canvas.drawCircle(sourceToViewX(quickScaleSCenter.x), sourceToViewY(quickScaleSCenter.y), px(35), debugLinePaint); } if (quickScaleVStart != null && isQuickScaling) { debugLinePaint.setColor(Color.CYAN); canvas.drawCircle(quickScaleVStart.x, quickScaleVStart.y, px(30), debugLinePaint); } debugLinePaint.setColor(Color.MAGENTA); } } /** * Helper method for setting the values of a tile matrix array. */ private void setMatrixArray(float[] array, float f0, float f1, float f2, float f3, float f4, float f5, float f6, float f7) { array[0] = f0; array[1] = f1; array[2] = f2; array[3] = f3; array[4] = f4; array[5] = f5; array[6] = f6; array[7] = f7; } /** * Checks whether the base layer of tiles or full size bitmap is ready. */ private boolean isBaseLayerReady() { if (bitmap != null && !bitmapIsPreview) { return true; } else if (tileMap != null) { boolean baseLayerReady = true; for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { if (tileMapEntry.getKey() == fullImageSampleSize) { for (Tile tile : tileMapEntry.getValue()) { if (tile.loading || tile.bitmap == null) { baseLayerReady = false; } } } } return baseLayerReady; } return false; } /** * Check whether view and image dimensions are known and either a preview, full size image or * base layer tiles are loaded. First time, send ready event to listener. The next draw will * display an image. */ private boolean checkReady() { boolean ready = getWidthInternal() > 0 && getHeightInternal() > 0 && sWidth > 0 && sHeight > 0 && (bitmap != null || isBaseLayerReady()); if (!readySent && ready) { preDraw(); readySent = true; onReady(); if (onImageEventListener != null) { onImageEventListener.onReady(); } } return ready; } /** * Check whether either the full size bitmap or base layer tiles are loaded. First time, send image * loaded event to listener. */ private boolean checkImageLoaded() { boolean imageLoaded = isBaseLayerReady(); if (!imageLoadedSent && imageLoaded) { preDraw(); imageLoadedSent = true; onImageLoaded(); if (onImageEventListener != null) { onImageEventListener.onImageLoaded(); } } return imageLoaded; } /** * Creates Paint objects once when first needed. */ private void createPaints() { if (bitmapPaint == null) { bitmapPaint = new Paint(); bitmapPaint.setAntiAlias(true); bitmapPaint.setFilterBitmap(true); bitmapPaint.setDither(true); } if ((debugTextPaint == null || debugLinePaint == null) && debug) { debugTextPaint = new Paint(); debugTextPaint.setTextSize(px(12)); debugTextPaint.setColor(Color.MAGENTA); debugTextPaint.setStyle(Style.FILL); debugLinePaint = new Paint(); debugLinePaint.setColor(Color.MAGENTA); debugLinePaint.setStyle(Style.STROKE); debugLinePaint.setStrokeWidth(px(1)); } } /** * Called on first draw when the view has dimensions. Calculates the initial sample size and starts async loading of * the base layer image - the whole source subsampled as necessary. */ private synchronized void initialiseBaseLayer(@NonNull Point maxTileDimensions) { debug("initialiseBaseLayer maxTileDimensions=%dx%d", maxTileDimensions.x, maxTileDimensions.y); satTemp = new ScaleAndTranslate(0f, new PointF(0, 0)); fitToBounds(true, satTemp); // Load double resolution - next level will be split into four tiles and at the center all four are required, // so don't bother with tiling until the next level 16 tiles are needed. fullImageSampleSize = calculateInSampleSize(satTemp.scale); if (fullImageSampleSize > 1) { fullImageSampleSize /= 2; } if (fullImageSampleSize == 1 && sRegion == null && sWidth() < maxTileDimensions.x && sHeight() < maxTileDimensions.y) { // Whole image is required at native resolution, and is smaller than the canvas max bitmap size. // Use BitmapDecoder for better image support. decoder.recycle(); decoder = null; BitmapLoadTask task = new BitmapLoadTask(this, getContext(), bitmapDecoderFactory, uri, false); execute(task); } else { initialiseTileMap(maxTileDimensions); List<Tile> baseGrid = tileMap.get(fullImageSampleSize); for (Tile baseTile : baseGrid) { TileLoadTask task = new TileLoadTask(this, decoder, baseTile); execute(task); } refreshRequiredTiles(true); } } /** * Loads the optimum tiles for display at the current scale and translate, so the screen can be filled with tiles * that are at least as high resolution as the screen. Frees up bitmaps that are now off the screen. * * @param load Whether to load the new tiles needed. Use false while scrolling/panning for performance. */ private void refreshRequiredTiles(boolean load) { if (decoder == null || tileMap == null) { return; } int sampleSize = Math.min(fullImageSampleSize, calculateInSampleSize(scale)); // Load tiles of the correct sample size that are on screen. Discard tiles off screen, and those that are higher // resolution than required, or lower res than required but not the base layer, so the base layer is always present. for (Map.Entry<Integer, List<Tile>> tileMapEntry : tileMap.entrySet()) { for (Tile tile : tileMapEntry.getValue()) { if (tile.sampleSize < sampleSize || (tile.sampleSize > sampleSize && tile.sampleSize != fullImageSampleSize)) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } if (tile.sampleSize == sampleSize) { if (tileVisible(tile)) { tile.visible = true; if (!tile.loading && tile.bitmap == null && load) { TileLoadTask task = new TileLoadTask(this, decoder, tile); execute(task); } } else if (tile.sampleSize != fullImageSampleSize) { tile.visible = false; if (tile.bitmap != null) { tile.bitmap.recycle(); tile.bitmap = null; } } } else if (tile.sampleSize == fullImageSampleSize) { tile.visible = true; } } } } /** * Determine whether tile is visible. */ private boolean tileVisible(Tile tile) { float sVisLeft = viewToSourceX(0), sVisRight = viewToSourceX(getWidthInternal()), sVisTop = viewToSourceY(0), sVisBottom = viewToSourceY(getHeightInternal()); return !(sVisLeft > tile.sRect.right || tile.sRect.left > sVisRight || sVisTop > tile.sRect.bottom || tile.sRect.top > sVisBottom); } /** * Sets scale and translate ready for the next draw. */ private void preDraw() { if (getWidthInternal() == 0 || getHeightInternal() == 0 || sWidth <= 0 || sHeight <= 0) { return; } // If waiting to translate to new center position, set translate now if (sPendingCenter != null && pendingScale != null) { scale = pendingScale; if (vTranslate == null) { vTranslate = new PointF(); } vTranslate.x = (getWidthInternal() / 2f) - (scale * sPendingCenter.x); vTranslate.y = (getHeightInternal() / 2f) - (scale * sPendingCenter.y); sPendingCenter = null; pendingScale = null; fitToBounds(true); refreshRequiredTiles(true); } // On first display of base image set up position, and in other cases make sure scale is correct. fitToBounds(false); } /** * Calculates sample size to fit the source image in given bounds. */ private int calculateInSampleSize(float scale) { if (minimumTileDpi > 0) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; scale = (minimumTileDpi / averageDpi) * scale; } int reqWidth = (int) (sWidth() * scale); int reqHeight = (int) (sHeight() * scale); // Raw height and width of image int inSampleSize = 1; if (reqWidth == 0 || reqHeight == 0) { return 32; } if (sHeight() > reqHeight || sWidth() > reqWidth) { // Calculate ratios of height and width to requested height and width final int heightRatio = Math.round((float) sHeight() / (float) reqHeight); final int widthRatio = Math.round((float) sWidth() / (float) reqWidth); // Choose the smallest ratio as inSampleSize value, this will guarantee // a final image with both dimensions larger than or equal to the // requested height and width. inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio; } // We want the actual sample size that will be used, so round down to nearest power of 2. int power = 1; while (power * 2 < inSampleSize) { power = power * 2; } return power; } /** * Adjusts hypothetical future scale and translate values to keep scale within the allowed range and the image on screen. Minimum scale * is set so one dimension fills the view and the image is centered on the other dimension. Used to calculate what the target of an * animation should be. * * @param center Whether the image should be centered in the dimension it's too small to fill. While animating this can be false to avoid changes in direction as bounds are reached. * @param sat The scale we want and the translation we're aiming for. The values are adjusted to be valid. */ private void fitToBounds(boolean center, ScaleAndTranslate sat) { if (panLimit == PAN_LIMIT_OUTSIDE && isReady()) { center = false; } PointF vTranslate = sat.vTranslate; float scale = limitedScale(sat.scale); float scaleWidth = scale * sWidth(); float scaleHeight = scale * sHeight(); if (panLimit == PAN_LIMIT_CENTER && isReady()) { vTranslate.x = Math.max(vTranslate.x, getWidthInternal() / 2f - scaleWidth); vTranslate.y = Math.max(vTranslate.y, getHeightInternal() / 2f - scaleHeight); } else if (center) { vTranslate.x = Math.max(vTranslate.x, getWidthInternal() - scaleWidth); vTranslate.y = Math.max(vTranslate.y, getHeightInternal() - scaleHeight); } else { vTranslate.x = Math.max(vTranslate.x, -scaleWidth); vTranslate.y = Math.max(vTranslate.y, -scaleHeight); } // Asymmetric padding adjustments float xPaddingRatio = getPaddingLeft() > 0 || getPaddingRight() > 0 ? getPaddingLeft() / (float) (getPaddingLeft() + getPaddingRight()) : 0.5f; float yPaddingRatio = getPaddingTop() > 0 || getPaddingBottom() > 0 ? getPaddingTop() / (float) (getPaddingTop() + getPaddingBottom()) : 0.5f; float maxTx; float maxTy; if (panLimit == PAN_LIMIT_CENTER && isReady()) { maxTx = Math.max(0, getWidthInternal() / 2); maxTy = Math.max(0, getHeightInternal() / 2); } else if (center) { maxTx = Math.max(0, (getWidthInternal() - scaleWidth) * xPaddingRatio); maxTy = Math.max(0, (getHeightInternal() - scaleHeight) * yPaddingRatio); } else { maxTx = Math.max(0, getWidthInternal()); maxTy = Math.max(0, getHeightInternal()); } vTranslate.x = Math.min(vTranslate.x, maxTx); vTranslate.y = Math.min(vTranslate.y, maxTy); sat.scale = scale; } /** * Adjusts current scale and translate values to keep scale within the allowed range and the image on screen. Minimum scale * is set so one dimension fills the view and the image is centered on the other dimension. * * @param center Whether the image should be centered in the dimension it's too small to fill. While animating this can be false to avoid changes in direction as bounds are reached. */ private void fitToBounds(boolean center) { boolean init = false; if (vTranslate == null) { init = true; vTranslate = new PointF(0, 0); } if (satTemp == null) { satTemp = new ScaleAndTranslate(0, new PointF(0, 0)); } satTemp.scale = scale; satTemp.vTranslate.set(vTranslate); fitToBounds(center, satTemp); scale = satTemp.scale; vTranslate.set(satTemp.vTranslate); if (init && minimumScaleType != SCALE_TYPE_START) { vTranslate.set(vTranslateForSCenter(sWidth() / 2, sHeight() / 2, scale)); } } /** * Once source image and view dimensions are known, creates a map of sample size to tile grid. */ private void initialiseTileMap(Point maxTileDimensions) { debug("initialiseTileMap maxTileDimensions=%dx%d", maxTileDimensions.x, maxTileDimensions.y); this.tileMap = new LinkedHashMap<>(); int sampleSize = fullImageSampleSize; int xTiles = 1; int yTiles = 1; while (true) { int sTileWidth = sWidth() / xTiles; int sTileHeight = sHeight() / yTiles; int subTileWidth = sTileWidth / sampleSize; int subTileHeight = sTileHeight / sampleSize; while (subTileWidth + xTiles + 1 > maxTileDimensions.x || (subTileWidth > getWidthInternal() * 1.25 && sampleSize < fullImageSampleSize)) { xTiles += 1; sTileWidth = sWidth() / xTiles; subTileWidth = sTileWidth / sampleSize; } while (subTileHeight + yTiles + 1 > maxTileDimensions.y || (subTileHeight > getHeightInternal() * 1.25 && sampleSize < fullImageSampleSize)) { yTiles += 1; sTileHeight = sHeight() / yTiles; subTileHeight = sTileHeight / sampleSize; } List<Tile> tileGrid = new ArrayList<>(xTiles * yTiles); for (int x = 0; x < xTiles; x++) { for (int y = 0; y < yTiles; y++) { Tile tile = new Tile(); tile.sampleSize = sampleSize; tile.visible = sampleSize == fullImageSampleSize; tile.sRect = new Rect( x * sTileWidth, y * sTileHeight, x == xTiles - 1 ? sWidth() : (x + 1) * sTileWidth, y == yTiles - 1 ? sHeight() : (y + 1) * sTileHeight ); tile.vRect = new Rect(0, 0, 0, 0); tile.fileSRect = new Rect(tile.sRect); tileGrid.add(tile); } } tileMap.put(sampleSize, tileGrid); if (sampleSize == 1) { break; } else { sampleSize /= 2; } } } /** * Async task used to get image details without blocking the UI thread. */ private static class TilesInitTask extends AsyncTask<Void, Void, int[]> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<Context> contextRef; private final WeakReference<DecoderFactory<? extends ImageRegionDecoder>> decoderFactoryRef; private final Uri source; private ImageRegionDecoder decoder; private Exception exception; TilesInitTask(CustomSubsamplingScaleImageView view, Context context, DecoderFactory<? extends ImageRegionDecoder> decoderFactory, Uri source) { this.viewRef = new WeakReference<>(view); this.contextRef = new WeakReference<>(context); this.decoderFactoryRef = new WeakReference<>(decoderFactory); this.source = source; } @Override protected int[] doInBackground(Void... params) { try { String sourceUri = source.toString(); Context context = contextRef.get(); DecoderFactory<? extends ImageRegionDecoder> decoderFactory = decoderFactoryRef.get(); CustomSubsamplingScaleImageView view = viewRef.get(); if (context != null && decoderFactory != null && view != null) { view.debug("TilesInitTask.doInBackground"); decoder = decoderFactory.make(); Point dimensions = decoder.init(context, source); int sWidth = dimensions.x; int sHeight = dimensions.y; int exifOrientation = view.getExifOrientation(context, sourceUri); if (view.sRegion != null) { view.sRegion.left = Math.max(0, view.sRegion.left); view.sRegion.top = Math.max(0, view.sRegion.top); view.sRegion.right = Math.min(sWidth, view.sRegion.right); view.sRegion.bottom = Math.min(sHeight, view.sRegion.bottom); sWidth = view.sRegion.width(); sHeight = view.sRegion.height(); } return new int[]{sWidth, sHeight, exifOrientation}; } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to initialise bitmap decoder"); this.exception = e; } return null; } @Override protected void onPostExecute(int[] xyo) { final CustomSubsamplingScaleImageView view = viewRef.get(); if (view != null) { if (decoder != null && xyo != null && xyo.length == 3) { view.onTilesInited(decoder, xyo[0], xyo[1], xyo[2]); } else if (exception != null && view.onImageEventListener != null) { view.onImageEventListener.onImageLoadError(exception); } } } } /** * Called by worker task when decoder is ready and image size and EXIF orientation is known. */ private synchronized void onTilesInited(ImageRegionDecoder decoder, int sWidth, int sHeight, int sOrientation) { debug("onTilesInited sWidth=%d, sHeight=%d, sOrientation=%d", sWidth, sHeight, orientation); // If actual dimensions don't match the declared size, reset everything. if (this.sWidth > 0 && this.sHeight > 0 && (this.sWidth != sWidth || this.sHeight != sHeight)) { reset(false); if (bitmap != null) { if (!bitmapIsCached) { bitmap.recycle(); } bitmap = null; if (onImageEventListener != null && bitmapIsCached) { onImageEventListener.onPreviewReleased(); } bitmapIsPreview = false; bitmapIsCached = false; } } this.decoder = decoder; this.sWidth = sWidth; this.sHeight = sHeight; this.sOrientation = sOrientation; checkReady(); if (!checkImageLoaded() && maxTileWidth > 0 && maxTileWidth != TILE_SIZE_AUTO && maxTileHeight > 0 && maxTileHeight != TILE_SIZE_AUTO && getWidthInternal() > 0 && getHeightInternal() > 0) { initialiseBaseLayer(new Point(maxTileWidth, maxTileHeight)); } invalidate(); requestLayout(); } /** * Async task used to load images without blocking the UI thread. */ private static class TileLoadTask extends AsyncTask<Void, Void, Bitmap> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<ImageRegionDecoder> decoderRef; private final WeakReference<Tile> tileRef; private Exception exception; TileLoadTask(CustomSubsamplingScaleImageView view, ImageRegionDecoder decoder, Tile tile) { this.viewRef = new WeakReference<>(view); this.decoderRef = new WeakReference<>(decoder); this.tileRef = new WeakReference<>(tile); tile.loading = true; } @Override protected Bitmap doInBackground(Void... params) { try { CustomSubsamplingScaleImageView view = viewRef.get(); ImageRegionDecoder decoder = decoderRef.get(); Tile tile = tileRef.get(); if (decoder != null && tile != null && view != null && decoder.isReady() && tile.visible) { view.debug("TileLoadTask.doInBackground, tile.sRect=%s, tile.sampleSize=%d", tile.sRect, tile.sampleSize); view.decoderLock.readLock().lock(); try { if (decoder.isReady()) { // Update tile's file sRect according to rotation view.fileSRect(tile.sRect, tile.fileSRect); if (view.sRegion != null) { tile.fileSRect.offset(view.sRegion.left, view.sRegion.top); } return decoder.decodeRegion(tile.fileSRect, tile.sampleSize); } else { tile.loading = false; } } finally { view.decoderLock.readLock().unlock(); } } else if (tile != null) { tile.loading = false; } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to decode tile"); this.exception = e; } catch (OutOfMemoryError e) { Timber.tag(TAG).e(e, "Failed to decode tile - OutOfMemoryError"); this.exception = new RuntimeException(e); } return null; } @Override protected void onPostExecute(Bitmap bitmap) { final CustomSubsamplingScaleImageView subsamplingScaleImageView = viewRef.get(); final Tile tile = tileRef.get(); if (subsamplingScaleImageView != null && tile != null) { if (bitmap != null) { tile.bitmap = bitmap; tile.loading = false; subsamplingScaleImageView.onTileLoaded(); } else if (exception != null && subsamplingScaleImageView.onImageEventListener != null) { subsamplingScaleImageView.onImageEventListener.onTileLoadError(exception); } } } } /** * Called by worker task when a tile has loaded. Redraws the view. */ private synchronized void onTileLoaded() { debug("onTileLoaded"); checkReady(); checkImageLoaded(); if (isBaseLayerReady() && bitmap != null) { if (!bitmapIsCached) { bitmap.recycle(); } bitmap = null; if (onImageEventListener != null && bitmapIsCached) { onImageEventListener.onPreviewReleased(); } bitmapIsPreview = false; bitmapIsCached = false; } invalidate(); } /** * Async task used to load bitmap without blocking the UI thread. */ private static class BitmapLoadTask extends AsyncTask<Void, Void, Integer> { private final WeakReference<CustomSubsamplingScaleImageView> viewRef; private final WeakReference<Context> contextRef; private final WeakReference<DecoderFactory<? extends ImageDecoder>> decoderFactoryRef; private final Uri source; private final boolean preview; private Bitmap bitmap; private Exception exception; BitmapLoadTask(CustomSubsamplingScaleImageView view, Context context, DecoderFactory<? extends ImageDecoder> decoderFactory, Uri source, boolean preview) { this.viewRef = new WeakReference<>(view); this.contextRef = new WeakReference<>(context); this.decoderFactoryRef = new WeakReference<>(decoderFactory); this.source = source; this.preview = preview; } @Override protected Integer doInBackground(Void... params) { try { String sourceUri = source.toString(); Context context = contextRef.get(); DecoderFactory<? extends ImageDecoder> decoderFactory = decoderFactoryRef.get(); CustomSubsamplingScaleImageView view = viewRef.get(); if (context != null && decoderFactory != null && view != null) { view.debug("BitmapLoadTask.doInBackground"); bitmap = decoderFactory.make().decode(context, source); return view.getExifOrientation(context, sourceUri); } } catch (Exception e) { Timber.tag(TAG).e(e, "Failed to load bitmap"); this.exception = e; } catch (OutOfMemoryError e) { Timber.tag(TAG).e(e, "Failed to load bitmap - OutOfMemoryError"); this.exception = new RuntimeException(e); } return null; } @Override protected void onPostExecute(Integer orientation) { CustomSubsamplingScaleImageView subsamplingScaleImageView = viewRef.get(); if (subsamplingScaleImageView != null) { if (bitmap != null && orientation != null) { if (preview) { subsamplingScaleImageView.onPreviewLoaded(bitmap); } else { subsamplingScaleImageView.onImageLoaded(bitmap, orientation, false); } } else if (exception != null && subsamplingScaleImageView.onImageEventListener != null) { if (preview) { subsamplingScaleImageView.onImageEventListener.onPreviewLoadError(exception); } else { subsamplingScaleImageView.onImageEventListener.onImageLoadError(exception); } } } } } /** * Called by worker task when preview image is loaded. */ private synchronized void onPreviewLoaded(Bitmap previewBitmap) { debug("onPreviewLoaded"); if (bitmap != null || imageLoadedSent) { previewBitmap.recycle(); return; } if (pRegion != null) { bitmap = Bitmap.createBitmap(previewBitmap, pRegion.left, pRegion.top, pRegion.width(), pRegion.height()); } else { bitmap = previewBitmap; } bitmapIsPreview = true; if (checkReady()) { invalidate(); requestLayout(); } } /** * Called by worker task when full size image bitmap is ready (tiling is disabled). */ private synchronized void onImageLoaded(Bitmap bitmap, int sOrientation, boolean bitmapIsCached) { debug("onImageLoaded"); // If actual dimensions don't match the declared size, reset everything. if (this.sWidth > 0 && this.sHeight > 0 && (this.sWidth != bitmap.getWidth() || this.sHeight != bitmap.getHeight())) { reset(false); } if (this.bitmap != null && !this.bitmapIsCached) { this.bitmap.recycle(); } if (this.bitmap != null && this.bitmapIsCached && onImageEventListener != null) { onImageEventListener.onPreviewReleased(); } this.bitmapIsPreview = false; this.bitmapIsCached = bitmapIsCached; this.bitmap = bitmap; this.sWidth = bitmap.getWidth(); this.sHeight = bitmap.getHeight(); this.sOrientation = sOrientation; boolean ready = checkReady(); boolean imageLoaded = checkImageLoaded(); if (ready || imageLoaded) { invalidate(); requestLayout(); } } /** * Helper method for load tasks. Examines the EXIF info on the image file to determine the orientation. * This will only work for external files, not assets, resources or other URIs. */ @AnyThread private int getExifOrientation(Context context, String sourceUri) { int exifOrientation = ORIENTATION_0; if (sourceUri.startsWith(ContentResolver.SCHEME_CONTENT)) { Cursor cursor = null; try { String[] columns = {MediaStore.Images.Media.ORIENTATION}; cursor = context.getContentResolver().query(Uri.parse(sourceUri), columns, null, null, null); if (cursor != null) { if (cursor.moveToFirst()) { int orientation = cursor.getInt(0); if (VALID_ORIENTATIONS.contains(orientation) && orientation != ORIENTATION_USE_EXIF) { exifOrientation = orientation; } else { Timber.w(TAG, "Unsupported orientation: %s", orientation); } } } } catch (Exception e) { Timber.w(TAG, "Could not get orientation of image from media store"); } finally { if (cursor != null) { cursor.close(); } } } else if (sourceUri.startsWith(ImageSource.FILE_SCHEME) && !sourceUri.startsWith(ImageSource.ASSET_SCHEME)) { try { ExifInterface exifInterface = new ExifInterface(sourceUri.substring(ImageSource.FILE_SCHEME.length() - 1)); int orientationAttr = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); if (orientationAttr == ExifInterface.ORIENTATION_NORMAL || orientationAttr == ExifInterface.ORIENTATION_UNDEFINED) { exifOrientation = ORIENTATION_0; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_90) { exifOrientation = ORIENTATION_90; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_180) { exifOrientation = ORIENTATION_180; } else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_270) { exifOrientation = ORIENTATION_270; } else { Timber.w(TAG, "Unsupported EXIF orientation: %s", orientationAttr); } } catch (Exception e) { Timber.w(TAG, "Could not get EXIF orientation of image"); } } return exifOrientation; } private void execute(AsyncTask<Void, Void, ?> asyncTask) { asyncTask.executeOnExecutor(executor); } private static class Tile { private Rect sRect; private int sampleSize; private Bitmap bitmap; private boolean loading; private boolean visible; // Volatile fields instantiated once then updated before use to reduce GC. private Rect vRect; private Rect fileSRect; } private static class Anim { private float scaleStart; // Scale at start of anim private float scaleEnd; // Scale at end of anim (target) private PointF sCenterStart; // Source center point at start private PointF sCenterEnd; // Source center point at end, adjusted for pan limits private PointF sCenterEndRequested; // Source center point that was requested, without adjustment private PointF vFocusStart; // View point that was double tapped private PointF vFocusEnd; // Where the view focal point should be moved to during the anim private long duration = 500; // How long the anim takes private boolean interruptible = true; // Whether the anim can be interrupted by a touch private int easing = EASE_IN_OUT_QUAD; // Easing style private int origin = ORIGIN_ANIM; // Animation origin (API, double tap or fling) private long time = System.currentTimeMillis(); // Start time private OnAnimationEventListener listener; // Event listener } private static class ScaleAndTranslate { private ScaleAndTranslate(float scale, PointF vTranslate) { this.scale = scale; this.vTranslate = vTranslate; } private float scale; private final PointF vTranslate; } /** * Set scale, center and orientation from saved state. */ private void restoreState(ImageViewState state) { if (state != null && VALID_ORIENTATIONS.contains(state.getOrientation())) { this.orientation = state.getOrientation(); this.pendingScale = state.getScale(); this.sPendingCenter = state.getCenter(); invalidate(); } } /** * By default the View automatically calculates the optimal tile size. Set this to override this, and force an upper limit to the dimensions of the generated tiles. Passing {@link #TILE_SIZE_AUTO} will re-enable the default behaviour. * * @param maxPixels Maximum tile size X and Y in pixels. */ public void setMaxTileSize(int maxPixels) { this.maxTileWidth = maxPixels; this.maxTileHeight = maxPixels; } /** * By default the View automatically calculates the optimal tile size. Set this to override this, and force an upper limit to the dimensions of the generated tiles. Passing {@link #TILE_SIZE_AUTO} will re-enable the default behaviour. * * @param maxPixelsX Maximum tile width. * @param maxPixelsY Maximum tile height. */ public void setMaxTileSize(int maxPixelsX, int maxPixelsY) { this.maxTileWidth = maxPixelsX; this.maxTileHeight = maxPixelsY; } /** * Use canvas max bitmap width and height instead of the default 2048, to avoid redundant tiling. */ @NonNull private Point getMaxBitmapDimensions(Canvas canvas) { return new Point(Math.min(canvas.getMaximumBitmapWidth(), maxTileWidth), Math.min(canvas.getMaximumBitmapHeight(), maxTileHeight)); } /** * Get source width taking rotation into account. */ @SuppressWarnings("SuspiciousNameCombination") private int sWidth() { int rotation = getRequiredRotation(); if (rotation == 90 || rotation == 270) { return sHeight; } else { return sWidth; } } /** * Get source height taking rotation into account. */ @SuppressWarnings("SuspiciousNameCombination") private int sHeight() { int rotation = getRequiredRotation(); if (rotation == 90 || rotation == 270) { return sWidth; } else { return sHeight; } } /** * Converts source rectangle from tile, which treats the image file as if it were in the correct orientation already, * to the rectangle of the image that needs to be loaded. */ @SuppressWarnings("SuspiciousNameCombination") @AnyThread private void fileSRect(Rect sRect, Rect target) { if (getRequiredRotation() == 0) { target.set(sRect); } else if (getRequiredRotation() == 90) { target.set(sRect.top, sHeight - sRect.right, sRect.bottom, sHeight - sRect.left); } else if (getRequiredRotation() == 180) { target.set(sWidth - sRect.right, sHeight - sRect.bottom, sWidth - sRect.left, sHeight - sRect.top); } else { target.set(sWidth - sRect.bottom, sRect.left, sWidth - sRect.top, sRect.right); } } /** * Determines the rotation to be applied to tiles, based on EXIF orientation or chosen setting. */ @AnyThread private int getRequiredRotation() { if (orientation == ORIENTATION_USE_EXIF) { return sOrientation; } else { return orientation; } } /** * Pythagoras distance between two points. */ private float distance(float x0, float x1, float y0, float y1) { float x = x0 - x1; float y = y0 - y1; return (float) Math.sqrt(x * x + y * y); } /** * Releases all resources the view is using and resets the state, nulling any fields that use significant memory. * After you have called this method, the view can be re-used by setting a new image. Settings are remembered * but state (scale and center) is forgotten. You can restore these yourself if required. */ public void recycle() { reset(true); bitmapPaint = null; debugTextPaint = null; debugLinePaint = null; tileBgPaint = null; } /** * Convert screen to source x coordinate. */ private float viewToSourceX(float vx) { if (vTranslate == null) { return Float.NaN; } return (vx - vTranslate.x) / scale; } /** * Convert screen to source y coordinate. */ private float viewToSourceY(float vy) { if (vTranslate == null) { return Float.NaN; } return (vy - vTranslate.y) / scale; } /** * Converts a rectangle within the view to the corresponding rectangle from the source file, taking * into account the current scale, translation, orientation and clipped region. This can be used * to decode a bitmap from the source file. * <p> * This method will only work when the image has fully initialised, after {@link #isReady()} returns * true. It is not guaranteed to work with preloaded bitmaps. * <p> * The result is written to the fRect argument. Re-use a single instance for efficiency. * * @param vRect rectangle representing the view area to interpret. * @param fRect rectangle instance to which the result will be written. Re-use for efficiency. */ public void viewToFileRect(Rect vRect, Rect fRect) { if (vTranslate == null || !readySent) { return; } fRect.set( (int) viewToSourceX(vRect.left), (int) viewToSourceY(vRect.top), (int) viewToSourceX(vRect.right), (int) viewToSourceY(vRect.bottom)); fileSRect(fRect, fRect); fRect.set( Math.max(0, fRect.left), Math.max(0, fRect.top), Math.min(sWidth, fRect.right), Math.min(sHeight, fRect.bottom) ); if (sRegion != null) { fRect.offset(sRegion.left, sRegion.top); } } /** * Find the area of the source file that is currently visible on screen, taking into account the * current scale, translation, orientation and clipped region. This is a convenience method; see * {@link #viewToFileRect(Rect, Rect)}. * * @param fRect rectangle instance to which the result will be written. Re-use for efficiency. */ public void visibleFileRect(Rect fRect) { if (vTranslate == null || !readySent) { return; } fRect.set(0, 0, getWidthInternal(), getHeightInternal()); viewToFileRect(fRect, fRect); } /** * Convert screen coordinate to source coordinate. * * @param vxy view X/Y coordinate. * @return a coordinate representing the corresponding source coordinate. */ @Nullable public final PointF viewToSourceCoord(PointF vxy) { return viewToSourceCoord(vxy.x, vxy.y, new PointF()); } /** * Convert screen coordinate to source coordinate. * * @param vx view X coordinate. * @param vy view Y coordinate. * @return a coordinate representing the corresponding source coordinate. */ @Nullable public final PointF viewToSourceCoord(float vx, float vy) { return viewToSourceCoord(vx, vy, new PointF()); } /** * Convert screen coordinate to source coordinate. * * @param vxy view coordinates to convert. * @param sTarget target object for result. The same instance is also returned. * @return source coordinates. This is the same instance passed to the sTarget param. */ @Nullable public final PointF viewToSourceCoord(PointF vxy, @NonNull PointF sTarget) { return viewToSourceCoord(vxy.x, vxy.y, sTarget); } /** * Convert screen coordinate to source coordinate. * * @param vx view X coordinate. * @param vy view Y coordinate. * @param sTarget target object for result. The same instance is also returned. * @return source coordinates. This is the same instance passed to the sTarget param. */ @Nullable public final PointF viewToSourceCoord(float vx, float vy, @NonNull PointF sTarget) { if (vTranslate == null) { return null; } sTarget.set(viewToSourceX(vx), viewToSourceY(vy)); return sTarget; } /** * Convert source to view x coordinate. */ private float sourceToViewX(float sx) { if (vTranslate == null) { return Float.NaN; } return (sx * scale) + vTranslate.x; } /** * Convert source to view y coordinate. */ private float sourceToViewY(float sy) { if (vTranslate == null) { return Float.NaN; } return (sy * scale) + vTranslate.y; } /** * Convert source coordinate to view coordinate. * * @param sxy source coordinates to convert. * @return view coordinates. */ @Nullable public final PointF sourceToViewCoord(PointF sxy) { return sourceToViewCoord(sxy.x, sxy.y, new PointF()); } /** * Convert source coordinate to view coordinate. * * @param sx source X coordinate. * @param sy source Y coordinate. * @return view coordinates. */ @Nullable public final PointF sourceToViewCoord(float sx, float sy) { return sourceToViewCoord(sx, sy, new PointF()); } /** * Convert source coordinate to view coordinate. * * @param sxy source coordinates to convert. * @param vTarget target object for result. The same instance is also returned. * @return view coordinates. This is the same instance passed to the vTarget param. */ @SuppressWarnings("UnusedReturnValue") @Nullable public final PointF sourceToViewCoord(PointF sxy, @NonNull PointF vTarget) { return sourceToViewCoord(sxy.x, sxy.y, vTarget); } /** * Convert source coordinate to view coordinate. * * @param sx source X coordinate. * @param sy source Y coordinate. * @param vTarget target object for result. The same instance is also returned. * @return view coordinates. This is the same instance passed to the vTarget param. */ @Nullable public final PointF sourceToViewCoord(float sx, float sy, @NonNull PointF vTarget) { if (vTranslate == null) { return null; } vTarget.set(sourceToViewX(sx), sourceToViewY(sy)); return vTarget; } /** * Convert source rect to screen rect, integer values. */ private void sourceToViewRect(@NonNull Rect sRect, @NonNull Rect vTarget) { vTarget.set( (int) sourceToViewX(sRect.left), (int) sourceToViewY(sRect.top), (int) sourceToViewX(sRect.right), (int) sourceToViewY(sRect.bottom) ); } /** * Get the translation required to place a given source coordinate at the center of the screen, with the center * adjusted for asymmetric padding. Accepts the desired scale as an argument, so this is independent of current * translate and scale. The result is fitted to bounds, putting the image point as near to the screen center as permitted. */ @NonNull private PointF vTranslateForSCenter(float sCenterX, float sCenterY, float scale) { int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; if (satTemp == null) { satTemp = new ScaleAndTranslate(0, new PointF(0, 0)); } satTemp.scale = scale; satTemp.vTranslate.set(vxCenter - (sCenterX * scale), vyCenter - (sCenterY * scale)); fitToBounds(true, satTemp); return satTemp.vTranslate; } /** * Given a requested source center and scale, calculate what the actual center will have to be to keep the image in * pan limits, keeping the requested center as near to the middle of the screen as allowed. */ @NonNull private PointF limitedSCenter(float sCenterX, float sCenterY, float scale, @NonNull PointF sTarget) { PointF vTranslate = vTranslateForSCenter(sCenterX, sCenterY, scale); int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; float sx = (vxCenter - vTranslate.x) / scale; float sy = (vyCenter - vTranslate.y) / scale; sTarget.set(sx, sy); return sTarget; } /** * Returns the minimum allowed scale. */ private float minScale() { int vPadding = getPaddingBottom() + getPaddingTop(); int hPadding = getPaddingLeft() + getPaddingRight(); if (minimumScaleType == SCALE_TYPE_CENTER_CROP || minimumScaleType == SCALE_TYPE_START) { return Math.max((getWidthInternal() - hPadding) / (float) sWidth(), (getHeightInternal() - vPadding) / (float) sHeight()); } else if (minimumScaleType == SCALE_TYPE_CUSTOM && minScale > 0) { return minScale; } else { return Math.min((getWidthInternal() - hPadding) / (float) sWidth(), (getHeightInternal() - vPadding) / (float) sHeight()); } } /** * Adjust a requested scale to be within the allowed limits. */ private float limitedScale(float targetScale) { targetScale = Math.max(minScale(), targetScale); targetScale = Math.min(maxScale, targetScale); return targetScale; } /** * Apply a selected type of easing. * * @param type Easing type, from static fields * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float ease(int type, long time, float from, float change, long duration) { switch (type) { case EASE_IN_OUT_QUAD: return easeInOutQuad(time, from, change, duration); case EASE_OUT_QUAD: return easeOutQuad(time, from, change, duration); default: throw new IllegalStateException("Unexpected easing type: " + type); } } /** * Quadratic easing for fling. With thanks to Robert Penner - http://gizma.com/easing/ * * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float easeOutQuad(long time, float from, float change, long duration) { float progress = (float) time / (float) duration; return -change * progress * (progress - 2) + from; } /** * Quadratic easing for scale and center animations. With thanks to Robert Penner - http://gizma.com/easing/ * * @param time Elapsed time * @param from Start value * @param change Target value * @param duration Anm duration * @return Current value */ private float easeInOutQuad(long time, float from, float change, long duration) { float timeF = time / (duration / 2f); if (timeF < 1) { return (change / 2f * timeF * timeF) + from; } else { timeF--; return (-change / 2f) * (timeF * (timeF - 2) - 1) + from; } } /** * Debug logger */ @AnyThread private void debug(String message, Object... args) { if (debug) { Timber.d(message, args); } } /** * For debug overlays. Scale pixel value according to screen density. */ private int px(int px) { return (int) (density * px); } /** * Swap the default region decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. Your class must have a * public default constructor. * * @param regionDecoderClass The {@link ImageRegionDecoder} implementation to use. */ public final void setRegionDecoderClass(@NonNull Class<? extends ImageRegionDecoder> regionDecoderClass) { this.regionDecoderFactory = new CompatDecoderFactory<>(regionDecoderClass); } /** * Swap the default region decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. * * @param regionDecoderFactory The {@link DecoderFactory} implementation that produces {@link ImageRegionDecoder} * instances. */ public final void setRegionDecoderFactory(@NonNull DecoderFactory<? extends ImageRegionDecoder> regionDecoderFactory) { this.regionDecoderFactory = regionDecoderFactory; } /** * Swap the default bitmap decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. Your class must have a * public default constructor. * * @param bitmapDecoderClass The {@link ImageDecoder} implementation to use. */ public final void setBitmapDecoderClass(@NonNull Class<? extends ImageDecoder> bitmapDecoderClass) { this.bitmapDecoderFactory = new CompatDecoderFactory<>(bitmapDecoderClass); } /** * Swap the default bitmap decoder implementation for one of your own. You must do this before setting the image file or * asset, and you cannot use a custom decoder when using layout XML to set an asset name. * * @param bitmapDecoderFactory The {@link DecoderFactory} implementation that produces {@link ImageDecoder} instances. */ public final void setBitmapDecoderFactory(@NonNull DecoderFactory<? extends ImageDecoder> bitmapDecoderFactory) { this.bitmapDecoderFactory = bitmapDecoderFactory; } /** * Calculate how much further the image can be panned in each direction. The results are set on * the supplied {@link RectF} and expressed as screen pixels. For example, if the image cannot be * panned any further towards the left, the value of {@link RectF#left} will be set to 0. * * @param vTarget target object for results. Re-use for efficiency. */ public final void getPanRemaining(RectF vTarget) { if (!isReady()) { return; } float scaleWidth = scale * sWidth(); float scaleHeight = scale * sHeight(); if (panLimit == PAN_LIMIT_CENTER) { vTarget.top = Math.max(0, -(vTranslate.y - (getHeightInternal() / 2f))); vTarget.left = Math.max(0, -(vTranslate.x - (getWidthInternal() / 2f))); vTarget.bottom = Math.max(0, vTranslate.y - ((getHeightInternal() / 2f) - scaleHeight)); vTarget.right = Math.max(0, vTranslate.x - ((getWidthInternal() / 2f) - scaleWidth)); } else if (panLimit == PAN_LIMIT_OUTSIDE) { vTarget.top = Math.max(0, -(vTranslate.y - getHeightInternal())); vTarget.left = Math.max(0, -(vTranslate.x - getWidthInternal())); vTarget.bottom = Math.max(0, vTranslate.y + scaleHeight); vTarget.right = Math.max(0, vTranslate.x + scaleWidth); } else { vTarget.top = Math.max(0, -vTranslate.y); vTarget.left = Math.max(0, -vTranslate.x); vTarget.bottom = Math.max(0, (scaleHeight + vTranslate.y) - getHeightInternal()); vTarget.right = Math.max(0, (scaleWidth + vTranslate.x) - getWidthInternal()); } } /** * Set the pan limiting style. See static fields. Normally {@link #PAN_LIMIT_INSIDE} is best, for image galleries. * * @param panLimit a pan limit constant. See static fields. */ public final void setPanLimit(int panLimit) { if (!VALID_PAN_LIMITS.contains(panLimit)) { throw new IllegalArgumentException("Invalid pan limit: " + panLimit); } this.panLimit = panLimit; if (isReady()) { fitToBounds(true); invalidate(); } } /** * Set the minimum scale type. See static fields. Normally {@link #SCALE_TYPE_CENTER_INSIDE} is best, for image galleries. * * @param scaleType a scale type constant. See static fields. */ public final void setMinimumScaleType(int scaleType) { if (!VALID_SCALE_TYPES.contains(scaleType)) { throw new IllegalArgumentException("Invalid scale type: " + scaleType); } this.minimumScaleType = scaleType; if (isReady()) { fitToBounds(true); invalidate(); } } /** * Set the maximum scale allowed. A value of 1 means 1:1 pixels at maximum scale. You may wish to set this according * to screen density - on a retina screen, 1:1 may still be too small. Consider using {@link #setMinimumDpi(int)}, * which is density aware. * * @param maxScale maximum scale expressed as a source/view pixels ratio. */ public final void setMaxScale(float maxScale) { this.maxScale = maxScale; } /** * Set the minimum scale allowed. A value of 1 means 1:1 pixels at minimum scale. You may wish to set this according * to screen density. Consider using {@link #setMaximumDpi(int)}, which is density aware. * * @param minScale minimum scale expressed as a source/view pixels ratio. */ public final void setMinScale(float minScale) { this.minScale = minScale; } /** * This is a screen density aware alternative to {@link #setMaxScale(float)}; it allows you to express the maximum * allowed scale in terms of the minimum pixel density. This avoids the problem of 1:1 scale still being * too small on a high density screen. A sensible starting point is 160 - the default used by this view. * * @param dpi Source image pixel density at maximum zoom. */ public final void setMinimumDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setMaxScale(averageDpi / dpi); } /** * This is a screen density aware alternative to {@link #setMinScale(float)}; it allows you to express the minimum * allowed scale in terms of the maximum pixel density. * * @param dpi Source image pixel density at minimum zoom. */ public final void setMaximumDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setMinScale(averageDpi / dpi); } /** * Returns the maximum allowed scale. * * @return the maximum scale as a source/view pixels ratio. */ public float getMaxScale() { return maxScale; } /** * Returns the minimum allowed scale. * * @return the minimum scale as a source/view pixels ratio. */ public final float getMinScale() { return minScale(); } /** * By default, image tiles are at least as high resolution as the screen. For a retina screen this may not be * necessary, and may increase the likelihood of an OutOfMemoryError. This method sets a DPI at which higher * resolution tiles should be loaded. Using a lower number will on average use less memory but result in a lower * quality image. 160-240dpi will usually be enough. This should be called before setting the image source, * because it affects which tiles get loaded. When using an untiled source image this method has no effect. * * @param minimumTileDpi Tile loading threshold. */ public void setMinimumTileDpi(int minimumTileDpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; this.minimumTileDpi = (int) Math.min(averageDpi, minimumTileDpi); if (isReady()) { reset(false); invalidate(); } } /** * Returns the source point at the center of the view. * * @return the source coordinates current at the center of the view. */ @Nullable public final PointF getCenter() { int mX = getWidthInternal() / 2; int mY = getHeightInternal() / 2; return viewToSourceCoord(mX, mY); } /** * Returns the current scale value. * * @return the current scale as a source/view pixels ratio. */ public final float getScale() { return scale; } /** * Externally change the scale and translation of the source image. This may be used with getCenter() and getScale() * to restore the scale and zoom after a screen rotate. * * @param scale New scale to set. * @param sCenter New source image coordinate to center on the screen, subject to boundaries. */ public final void setScaleAndCenter(float scale, @Nullable PointF sCenter) { this.anim = null; this.pendingScale = scale; this.sPendingCenter = sCenter; this.sRequestedCenter = sCenter; invalidate(); } /** * Fully zoom out and return the image to the middle of the screen. This might be useful if you have a view pager * and want images to be reset when the user has moved to another page. */ public final void resetScaleAndCenter() { this.anim = null; this.pendingScale = limitedScale(0); if (isReady()) { this.sPendingCenter = new PointF(sWidth() / 2, sHeight() / 2); } else { this.sPendingCenter = new PointF(0, 0); } invalidate(); } /** * Call to find whether the view is initialised, has dimensions, and will display an image on * the next draw. If a preview has been provided, it may be the preview that will be displayed * and the full size image may still be loading. If no preview was provided, this is called once * the base layer tiles of the full size image are loaded. * * @return true if the view is ready to display an image and accept touch gestures. */ public final boolean isReady() { return readySent; } /** * Called once when the view is initialised, has dimensions, and will display an image on the * next draw. This is triggered at the same time as {@link OnImageEventListener#onReady()} but * allows a subclass to receive this event without using a listener. */ @SuppressWarnings("EmptyMethod") protected void onReady() { } /** * Call to find whether the main image (base layer tiles where relevant) have been loaded. Before * this event the view is blank unless a preview was provided. * * @return true if the main image (not the preview) has been loaded and is ready to display. */ public final boolean isImageLoaded() { return imageLoadedSent; } /** * Called once when the full size image or its base layer tiles have been loaded. */ @SuppressWarnings("EmptyMethod") protected void onImageLoaded() { } /** * Get source width, ignoring orientation. If {@link #getOrientation()} returns 90 or 270, you can use {@link #getSHeight()} * for the apparent width. * * @return the source image width in pixels. */ public final int getSWidth() { return sWidth; } /** * Get source height, ignoring orientation. If {@link #getOrientation()} returns 90 or 270, you can use {@link #getSWidth()} * for the apparent height. * * @return the source image height in pixels. */ public final int getSHeight() { return sHeight; } /** * Returns the orientation setting. This can return {@link #ORIENTATION_USE_EXIF}, in which case it doesn't tell you * the applied orientation of the image. For that, use {@link #getAppliedOrientation()}. * * @return the orientation setting. See static fields. */ public final int getOrientation() { return orientation; } /** * Returns the actual orientation of the image relative to the source file. This will be based on the source file's * EXIF orientation if you're using ORIENTATION_USE_EXIF. Values are 0, 90, 180, 270. * * @return the orientation applied after EXIF information has been extracted. See static fields. */ public final int getAppliedOrientation() { return getRequiredRotation(); } /** * Get the current state of the view (scale, center, orientation) for restoration after rotate. Will return null if * the view is not ready. * * @return an {@link ImageViewState} instance representing the current position of the image. null if the view isn't ready. */ @Nullable public final ImageViewState getState() { if (vTranslate != null && sWidth > 0 && sHeight > 0 && getCenter() != null) { return new ImageViewState(getScale(), getCenter(), getOrientation()); } return null; } /** * Returns true if zoom gesture detection is enabled. * * @return true if zoom gesture detection is enabled. */ public final boolean isZoomEnabled() { return zoomEnabled; } /** * Enable or disable zoom gesture detection. Disabling zoom locks the the current scale. * * @param zoomEnabled true to enable zoom gestures, false to disable. */ public final void setZoomEnabled(boolean zoomEnabled) { this.zoomEnabled = zoomEnabled; } /** * Returns true if double tap &amp; swipe to zoom is enabled. * * @return true if double tap &amp; swipe to zoom is enabled. */ public final boolean isQuickScaleEnabled() { return quickScaleEnabled; } /** * Enable or disable double tap &amp; swipe to zoom. * * @param quickScaleEnabled true to enable quick scale, false to disable. */ public final void setQuickScaleEnabled(boolean quickScaleEnabled) { this.quickScaleEnabled = quickScaleEnabled; } /** * Returns true if pan gesture detection is enabled. * * @return true if pan gesture detection is enabled. */ public final boolean isPanEnabled() { return panEnabled; } /** * Enable or disable pan gesture detection. Disabling pan causes the image to be centered. Pan * can still be changed from code. * * @param panEnabled true to enable panning, false to disable. */ public final void setPanEnabled(boolean panEnabled) { this.panEnabled = panEnabled; if (!panEnabled && vTranslate != null) { vTranslate.x = (getWidthInternal() / 2f) - (scale * (sWidth() / 2f)); vTranslate.y = (getHeightInternal() / 2f) - (scale * (sHeight() / 2f)); if (isReady()) { refreshRequiredTiles(true); invalidate(); } } } /** * Set a solid color to render behind tiles, useful for displaying transparent PNGs. * * @param tileBgColor Background color for tiles. */ public final void setTileBackgroundColor(int tileBgColor) { if (Color.alpha(tileBgColor) == 0) { tileBgPaint = null; } else { tileBgPaint = new Paint(); tileBgPaint.setStyle(Style.FILL); tileBgPaint.setColor(tileBgColor); } invalidate(); } /** * Set the scale the image will zoom in to when double tapped. This also the scale point where a double tap is interpreted * as a zoom out gesture - if the scale is greater than 90% of this value, a double tap zooms out. Avoid using values * greater than the max zoom. * * @param doubleTapZoomScale New value for double tap gesture zoom scale. */ public final void setDoubleTapZoomScale(float doubleTapZoomScale) { this.doubleTapZoomScale = doubleTapZoomScale; } /** * A density aware alternative to {@link #setDoubleTapZoomScale(float)}; this allows you to express the scale the * image will zoom in to when double tapped in terms of the image pixel density. Values lower than the max scale will * be ignored. A sensible starting point is 160 - the default used by this view. * * @param dpi New value for double tap gesture zoom scale. */ public final void setDoubleTapZoomDpi(int dpi) { DisplayMetrics metrics = getResources().getDisplayMetrics(); float averageDpi = (metrics.xdpi + metrics.ydpi) / 2; setDoubleTapZoomScale(averageDpi / dpi); } /** * Set the type of zoom animation to be used for double taps. See static fields. * * @param doubleTapZoomStyle New value for zoom style. */ public final void setDoubleTapZoomStyle(int doubleTapZoomStyle) { if (!VALID_ZOOM_STYLES.contains(doubleTapZoomStyle)) { throw new IllegalArgumentException("Invalid zoom style: " + doubleTapZoomStyle); } this.doubleTapZoomStyle = doubleTapZoomStyle; } /** * Set the duration of the double tap zoom animation. * * @param durationMs Duration in milliseconds. */ public final void setDoubleTapZoomDuration(int durationMs) { this.doubleTapZoomDuration = Math.max(0, durationMs); } /** * <p> * Provide an {@link Executor} to be used for loading images. By default, {@link AsyncTask#THREAD_POOL_EXECUTOR} * is used to minimise contention with other background work the app is doing. You can also choose * to use {@link AsyncTask#SERIAL_EXECUTOR} if you want to limit concurrent background tasks. * Alternatively you can supply an {@link Executor} of your own to avoid any contention. It is * strongly recommended to use a single executor instance for the life of your application, not * one per view instance. * </p><p> * <b>Warning:</b> If you are using a custom implementation of {@link ImageRegionDecoder}, and you * supply an executor with more than one thread, you must make sure your implementation supports * multi-threaded bitmap decoding or has appropriate internal synchronization. From SDK 21, Android's * {@link android.graphics.BitmapRegionDecoder} uses an internal lock so it is thread safe but * there is no advantage to using multiple threads. * </p> * * @param executor an {@link Executor} for image loading. */ public void setExecutor(@NonNull Executor executor) { this.executor = executor; } /** * Enable or disable eager loading of tiles that appear on screen during gestures or animations, * while the gesture or animation is still in progress. By default this is enabled to improve * responsiveness, but it can result in tiles being loaded and discarded more rapidly than * necessary and reduce the animation frame rate on old/cheap devices. Disable this on older * devices if you see poor performance. Tiles will then be loaded only when gestures and animations * are completed. * * @param eagerLoadingEnabled true to enable loading during gestures, false to delay loading until gestures end */ public void setEagerLoadingEnabled(boolean eagerLoadingEnabled) { this.eagerLoadingEnabled = eagerLoadingEnabled; } /** * Enables visual debugging, showing tile boundaries and sizes. * * @param debug true to enable debugging, false to disable. */ public final void setDebug(boolean debug) { this.debug = debug; } /** * Check if an image has been set. The image may not have been loaded and displayed yet. * * @return If an image is currently set. */ public boolean hasImage() { return uri != null || bitmap != null; } /** * {@inheritDoc} */ @Override public void setOnLongClickListener(OnLongClickListener onLongClickListener) { this.onLongClickListener = onLongClickListener; } /** * Add a listener allowing notification of load and error events. Extend {@link DefaultOnImageEventListener} * to simplify implementation. * * @param onImageEventListener an {@link OnImageEventListener} instance. */ public void setOnImageEventListener(OnImageEventListener onImageEventListener) { this.onImageEventListener = onImageEventListener; } /** * Add a listener for pan and zoom events. Extend {@link DefaultOnStateChangedListener} to simplify * implementation. * * @param onStateChangedListener an {@link OnStateChangedListener} instance. */ public void setOnStateChangedListener(OnStateChangedListener onStateChangedListener) { this.onStateChangedListener = onStateChangedListener; } private void sendStateChanged(float oldScale, PointF oldVTranslate, int origin) { if (onStateChangedListener != null && scale != oldScale) { onStateChangedListener.onScaleChanged(scale, origin); } if (onStateChangedListener != null && !vTranslate.equals(oldVTranslate)) { onStateChangedListener.onCenterChanged(getCenter(), origin); } } /** * Creates a panning animation builder, that when started will animate the image to place the given coordinates of * the image in the center of the screen. If doing this would move the image beyond the edges of the screen, the * image is instead animated to move the center point as near to the center of the screen as is allowed - it's * guaranteed to be on screen. * * @param sCenter Target center point * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateCenter(PointF sCenter) { if (!isReady()) { return null; } return new AnimationBuilder(sCenter); } /** * Creates a scale animation builder, that when started will animate a zoom in or out. If this would move the image * beyond the panning limits, the image is automatically panned during the animation. * * @param scale Target scale. * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateScale(float scale) { if (!isReady()) { return null; } return new AnimationBuilder(scale); } /** * Creates a scale animation builder, that when started will animate a zoom in or out. If this would move the image * beyond the panning limits, the image is automatically panned during the animation. * * @param scale Target scale. * @param sCenter Target source center. * @return {@link AnimationBuilder} instance. Call {@link CustomSubsamplingScaleImageView.AnimationBuilder#start()} to start the anim. */ @Nullable public AnimationBuilder animateScaleAndCenter(float scale, PointF sCenter) { if (!isReady()) { return null; } return new AnimationBuilder(scale, sCenter); } public void setPreloadDimensions(int width, int height) { preloadDimensions = new Point(width, height); } private int getWidthInternal() { if (getWidth() > 0 || null == preloadDimensions) return getWidth(); else return preloadDimensions.x; } private int getHeightInternal() { if (getHeight() > 0 || null == preloadDimensions) return getHeight(); else return preloadDimensions.y; } /** * Builder class used to set additional options for a scale animation. Create an instance using {@link #animateScale(float)}, * then set your options and call {@link #start()}. */ public final class AnimationBuilder { private final float targetScale; private final PointF targetSCenter; private final PointF vFocus; private long duration = 500; private int easing = EASE_IN_OUT_QUAD; private int origin = ORIGIN_ANIM; private boolean interruptible = true; private boolean panLimited = true; private OnAnimationEventListener listener; private AnimationBuilder(PointF sCenter) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = null; } private AnimationBuilder(float scale) { this.targetScale = scale; this.targetSCenter = getCenter(); this.vFocus = null; } private AnimationBuilder(float scale, PointF sCenter) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = null; } private AnimationBuilder(float scale, PointF sCenter, PointF vFocus) { this.targetScale = scale; this.targetSCenter = sCenter; this.vFocus = vFocus; } /** * Desired duration of the anim in milliseconds. Default is 500. * * @param duration duration in milliseconds. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withDuration(long duration) { this.duration = duration; return this; } /** * Whether the animation can be interrupted with a touch. Default is true. * * @param interruptible interruptible flag. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withInterruptible(boolean interruptible) { this.interruptible = interruptible; return this; } /** * Set the easing style. See static fields. {@link #EASE_IN_OUT_QUAD} is recommended, and the default. * * @param easing easing style. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withEasing(int easing) { if (!VALID_EASING_STYLES.contains(easing)) { throw new IllegalArgumentException("Unknown easing type: " + easing); } this.easing = easing; return this; } /** * Add an animation event listener. * * @param listener The listener. * @return this builder for method chaining. */ @NonNull public AnimationBuilder withOnAnimationEventListener(OnAnimationEventListener listener) { this.listener = listener; return this; } /** * Only for internal use. When set to true, the animation proceeds towards the actual end point - the nearest * point to the center allowed by pan limits. When false, animation is in the direction of the requested end * point and is stopped when the limit for each axis is reached. The latter behaviour is used for flings but * nothing else. */ @NonNull private AnimationBuilder withPanLimited(boolean panLimited) { this.panLimited = panLimited; return this; } /** * Only for internal use. Indicates what caused the animation. */ @NonNull private AnimationBuilder withOrigin(int origin) { this.origin = origin; return this; } /** * Starts the animation. */ public void start() { if (anim != null && anim.listener != null) { try { anim.listener.onInterruptedByNewAnim(); } catch (Exception e) { Timber.tag(TAG).w(e, "Error thrown by animation listener"); } } int vxCenter = getPaddingLeft() + (getWidthInternal() - getPaddingRight() - getPaddingLeft()) / 2; int vyCenter = getPaddingTop() + (getHeightInternal() - getPaddingBottom() - getPaddingTop()) / 2; float targetScale = limitedScale(this.targetScale); PointF targetSCenter = panLimited ? limitedSCenter(this.targetSCenter.x, this.targetSCenter.y, targetScale, new PointF()) : this.targetSCenter; anim = new Anim(); anim.scaleStart = scale; anim.scaleEnd = targetScale; anim.time = System.currentTimeMillis(); anim.sCenterEndRequested = targetSCenter; anim.sCenterStart = getCenter(); anim.sCenterEnd = targetSCenter; anim.vFocusStart = sourceToViewCoord(targetSCenter); anim.vFocusEnd = new PointF( vxCenter, vyCenter ); anim.duration = duration; anim.interruptible = interruptible; anim.easing = easing; anim.origin = origin; anim.time = System.currentTimeMillis(); anim.listener = listener; if (vFocus != null) { // Calculate where translation will be at the end of the anim float vTranslateXEnd = vFocus.x - (targetScale * anim.sCenterStart.x); float vTranslateYEnd = vFocus.y - (targetScale * anim.sCenterStart.y); ScaleAndTranslate satEnd = new ScaleAndTranslate(targetScale, new PointF(vTranslateXEnd, vTranslateYEnd)); // Fit the end translation into bounds fitToBounds(true, satEnd); // Adjust the position of the focus point at end so image will be in bounds anim.vFocusEnd = new PointF( vFocus.x + (satEnd.vTranslate.x - vTranslateXEnd), vFocus.y + (satEnd.vTranslate.y - vTranslateYEnd) ); } invalidate(); } } /** * An event listener for animations, allows events to be triggered when an animation completes, * is aborted by another animation starting, or is aborted by a touch event. Note that none of * these events are triggered if the activity is paused, the image is swapped, or in other cases * where the view's internal state gets wiped or draw events stop. */ @SuppressWarnings("EmptyMethod") public interface OnAnimationEventListener { /** * The animation has completed, having reached its endpoint. */ void onComplete(); /** * The animation has been aborted before reaching its endpoint because the user touched the screen. */ void onInterruptedByUser(); /** * The animation has been aborted before reaching its endpoint because a new animation has been started. */ void onInterruptedByNewAnim(); } /** * Default implementation of {@link OnAnimationEventListener} for extension. This does nothing in any method. */ public static class DefaultOnAnimationEventListener implements OnAnimationEventListener { @Override public void onComplete() { } @Override public void onInterruptedByUser() { } @Override public void onInterruptedByNewAnim() { } } /** * An event listener, allowing subclasses and activities to be notified of significant events. */ @SuppressWarnings("EmptyMethod") public interface OnImageEventListener { /** * Called when the dimensions of the image and view are known, and either a preview image, * the full size image, or base layer tiles are loaded. This indicates the scale and translate * are known and the next draw will display an image. This event can be used to hide a loading * graphic, or inform a subclass that it is safe to draw overlays. */ void onReady(); /** * Called when the full size image is ready. When using tiling, this means the lowest resolution * base layer of tiles are loaded, and when tiling is disabled, the image bitmap is loaded. * This event could be used as a trigger to enable gestures if you wanted interaction disabled * while only a preview is displayed, otherwise for most cases {@link #onReady()} is the best * event to listen to. */ void onImageLoaded(); /** * Called when a preview image could not be loaded. This method cannot be relied upon; certain * encoding types of supported image formats can result in corrupt or blank images being loaded * and displayed with no detectable error. The view will continue to load the full size image. * * @param e The exception thrown. This error is logged by the view. */ void onPreviewLoadError(Exception e); /** * Indicates an error initiliasing the decoder when using a tiling, or when loading the full * size bitmap when tiling is disabled. This method cannot be relied upon; certain encoding * types of supported image formats can result in corrupt or blank images being loaded and * displayed with no detectable error. * * @param e The exception thrown. This error is also logged by the view. */ void onImageLoadError(Exception e); /** * Called when an image tile could not be loaded. This method cannot be relied upon; certain * encoding types of supported image formats can result in corrupt or blank images being loaded * and displayed with no detectable error. Most cases where an unsupported file is used will * result in an error caught by {@link #onImageLoadError(Exception)}. * * @param e The exception thrown. This error is logged by the view. */ void onTileLoadError(Exception e); /** * Called when a bitmap set using ImageSource.cachedBitmap is no longer being used by the View. * This is useful if you wish to manage the bitmap after the preview is shown */ void onPreviewReleased(); } /** * Default implementation of {@link OnImageEventListener} for extension. This does nothing in any method. */ public static class DefaultOnImageEventListener implements OnImageEventListener { @Override public void onReady() { } @Override public void onImageLoaded() { } @Override public void onPreviewLoadError(Exception e) { } @Override public void onImageLoadError(Exception e) { } @Override public void onTileLoadError(Exception e) { } @Override public void onPreviewReleased() { } } /** * An event listener, allowing activities to be notified of pan and zoom events. Initialisation * and calls made by your code do not trigger events; touch events and animations do. Methods in * this listener will be called on the UI thread and may be called very frequently - your * implementation should return quickly. */ @SuppressWarnings("EmptyMethod") public interface OnStateChangedListener { /** * The scale has changed. Use with {@link #getMaxScale()} and {@link #getMinScale()} to determine * whether the image is fully zoomed in or out. * * @param newScale The new scale. * @param origin Where the event originated from - one of {@link #ORIGIN_ANIM}, {@link #ORIGIN_TOUCH}. */ void onScaleChanged(float newScale, int origin); /** * The source center has been changed. This can be a result of panning or zooming. * * @param newCenter The new source center point. * @param origin Where the event originated from - one of {@link #ORIGIN_ANIM}, {@link #ORIGIN_TOUCH}. */ void onCenterChanged(PointF newCenter, int origin); } /** * Default implementation of {@link OnStateChangedListener}. This does nothing in any method. */ public static class DefaultOnStateChangedListener implements OnStateChangedListener { @Override public void onCenterChanged(PointF newCenter, int origin) { } @Override public void onScaleChanged(float newScale, int origin) { } } }
Fix several issues reported by Sonar
app/src/main/java/me/devsaki/hentoid/views/ssiv/CustomSubsamplingScaleImageView.java
Fix several issues reported by Sonar
Java
apache-2.0
eb9a28281a31eaf38298a2fe1ecaa595dcbd11e3
0
MatthewTamlin/Mixtape
/* * Copyright 2017 Matthew Tamlin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.matthewtamlin.mixtape.library.databinders; import android.os.AsyncTask; import android.widget.TextView; import com.matthewtamlin.java_utilities.checkers.NullChecker; import com.matthewtamlin.java_utilities.testing.Tested; import com.matthewtamlin.mixtape.library.caching.LibraryItemCache; import com.matthewtamlin.mixtape.library.data.DisplayableDefaults; import com.matthewtamlin.mixtape.library.data.LibraryItem; import java.util.HashMap; import java.util.Iterator; /** * Binds title data from LibraryItems to TextViews. Data is cached as it is loaded to improve future * performance, and asynchronous processing is only used if data is not already cached. */ @Tested(testMethod = "unit") public final class TitleBinder implements DataBinder<LibraryItem, TextView> { /** * Identifies this class during logging. */ private static final String TAG = "[TitleBinder]"; /** * Caches titles to increase efficiency and performance. */ private final LibraryItemCache cache; /** * Supplies the default title. */ private final DisplayableDefaults defaults; /** * A record of all bind tasks currently in progress, where each task is mapped to the TextView * it is updating. */ private final HashMap<TextView, BinderTask> tasks = new HashMap<>(); /** * Constructs a new TitleBinder. * * @param cache * a cache for storing titles, may already contain data, not null * @param defaults * supplies the default title, not null * @throws IllegalArgumentException * if {@code cache} is null * @throws IllegalArgumentException * if {@code defaults} is null */ public TitleBinder(final LibraryItemCache cache, final DisplayableDefaults defaults) { this.cache = NullChecker.checkNotNull(cache, "cache cannot be null"); this.defaults = NullChecker.checkNotNull(defaults, "defaults cannot be null"); } @Override public final void bind(final TextView view, final LibraryItem data) { NullChecker.checkNotNull(view, "textView cannot be null"); // There should never be more than one task operating on the same TextView concurrently cancel(view); // Create the task but don't execute it immediately final BinderTask task = new BinderTask(view, data); tasks.put(view, task); // Asynchronous processing is unnecessary overhead if the title is already cached if (cache.containsTitle(data)) { task.onPreExecute(); task.onPostExecute(cache.getTitle(data)); } else { task.execute(); } } @Override public final void cancel(final TextView view) { final AsyncTask task = tasks.get(view); if (task != null) { task.cancel(false); tasks.remove(view); } } @Override public final void cancelAll() { final Iterator<TextView> textViewIterator = tasks.keySet().iterator(); while (textViewIterator.hasNext()) { final AsyncTask existingTask = tasks.get(textViewIterator.next()); if (existingTask != null) { existingTask.cancel(false); textViewIterator.remove(); } } } /** * @return the cache used to store titles, not null */ public final LibraryItemCache getCache() { return cache; } /** * @return the source of the default titles, not null */ public final DisplayableDefaults getDefaults() { return defaults; } /** * Loads LibraryItem titles in the background and binds the data to the UI when available. If * data cannot be loaded for any reason, then the default title is used instead. Caching is used * to increase performance. */ private final class BinderTask extends AsyncTask<Void, Void, CharSequence> { /** * The TextView to bind data to. */ private final TextView textView; /** * The LibraryItem to source the title from. */ private final LibraryItem data; /** * Constructs a new BinderTask. * * @param textView * the TextView to bind data to, not null * @param data * the LibraryItem to source the title from, not null * @throws IllegalArgumentException * if {@code textView} is null */ public BinderTask(final TextView textView, final LibraryItem data) { this.textView = NullChecker.checkNotNull(textView, "textView cannot be null"); this.data = data; } @Override public final void onPreExecute() { // If the task has been cancelled, it must not modify the UI if (!isCancelled()) { textView.setText(null); } } @Override public final CharSequence doInBackground(final Void... params) { if (isCancelled() || data == null) { return null; } cache.cacheTitle(data, true); return cache.getTitle(data) == null ? defaults.getTitle() : cache.getTitle(data); } @Override protected final void onPostExecute(final CharSequence title) { // If the task has been cancelled, it must not modify the UI if (!isCancelled()) { textView.setText(null); // Resets the view to ensure the text changes textView.setText(title); } else { textView.setText(null); } } } }
library/src/main/java/com/matthewtamlin/mixtape/library/databinders/TitleBinder.java
/* * Copyright 2017 Matthew Tamlin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.matthewtamlin.mixtape.library.databinders; import android.os.AsyncTask; import android.widget.TextView; import com.matthewtamlin.java_utilities.checkers.NullChecker; import com.matthewtamlin.java_utilities.testing.Tested; import com.matthewtamlin.mixtape.library.caching.LibraryItemCache; import com.matthewtamlin.mixtape.library.data.DisplayableDefaults; import com.matthewtamlin.mixtape.library.data.LibraryItem; import java.util.HashMap; import java.util.Iterator; /** * Binds title data from LibraryItems to TextViews. Data is cached as it is loaded to improve future * performance, and asynchronous processing is only used if data is not already cached. Defaults * are used if all attempts to load data fail. */ @Tested(testMethod = "unit") public final class TitleBinder implements DataBinder<LibraryItem, TextView> { /** * Identifies this class during logging. */ private static final String TAG = "[TitleBinder]"; /** * Caches titles to increase efficiency and performance. */ private final LibraryItemCache cache; /** * Supplies the default title. */ private final DisplayableDefaults defaults; /** * A record of all bind tasks currently in progress, where each task is mapped to the TextView * it is updating. */ private final HashMap<TextView, BinderTask> tasks = new HashMap<>(); /** * Constructs a new TitleBinder. * * @param cache * a cache for storing titles, may already contain data, not null * @param defaults * supplies the default title, not null * @throws IllegalArgumentException * if {@code cache} is null * @throws IllegalArgumentException * if {@code defaults} is null */ public TitleBinder(final LibraryItemCache cache, final DisplayableDefaults defaults) { this.cache = NullChecker.checkNotNull(cache, "cache cannot be null"); this.defaults = NullChecker.checkNotNull(defaults, "defaults cannot be null"); } @Override public final void bind(final TextView view, final LibraryItem data) { NullChecker.checkNotNull(view, "textView cannot be null"); // There should never be more than one task operating on the same TextView concurrently cancel(view); // Create the task but don't execute it immediately final BinderTask task = new BinderTask(view, data); tasks.put(view, task); // Asynchronous processing is unnecessary overhead if the title is already cached if (cache.containsTitle(data)) { task.onPreExecute(); task.onPostExecute(cache.getTitle(data)); } else { task.execute(); } } @Override public final void cancel(final TextView view) { final AsyncTask task = tasks.get(view); if (task != null) { task.cancel(false); tasks.remove(view); } } @Override public final void cancelAll() { final Iterator<TextView> textViewIterator = tasks.keySet().iterator(); while (textViewIterator.hasNext()) { final AsyncTask existingTask = tasks.get(textViewIterator.next()); if (existingTask != null) { existingTask.cancel(false); textViewIterator.remove(); } } } /** * @return the cache used to store titles, not null */ public final LibraryItemCache getCache() { return cache; } /** * @return the source of the default titles, not null */ public final DisplayableDefaults getDefaults() { return defaults; } /** * Loads LibraryItem titles in the background and binds the data to the UI when available. If * data cannot be loaded for any reason, then the default title is used instead. Caching is used * to increase performance. */ private final class BinderTask extends AsyncTask<Void, Void, CharSequence> { /** * The TextView to bind data to. */ private final TextView textView; /** * The LibraryItem to source the title from. */ private final LibraryItem data; /** * Constructs a new BinderTask. * * @param textView * the TextView to bind data to, not null * @param data * the LibraryItem to source the title from, not null * @throws IllegalArgumentException * if {@code textView} is null */ public BinderTask(final TextView textView, final LibraryItem data) { this.textView = NullChecker.checkNotNull(textView, "textView cannot be null"); this.data = data; } @Override public final void onPreExecute() { // If the task has been cancelled, it must not modify the UI if (!isCancelled()) { textView.setText(null); } } @Override public final CharSequence doInBackground(final Void... params) { if (isCancelled() || data == null) { return null; } cache.cacheTitle(data, true); return cache.getTitle(data) == null ? defaults.getTitle() : cache.getTitle(data); } @Override protected final void onPostExecute(final CharSequence title) { // If the task has been cancelled, it must not modify the UI if (!isCancelled()) { textView.setText(null); // Resets the view to ensure the text changes textView.setText(title); } else { textView.setText(null); } } } }
Updated Javadoc
library/src/main/java/com/matthewtamlin/mixtape/library/databinders/TitleBinder.java
Updated Javadoc
Java
apache-2.0
7df5baf0742aa3f55ed9bea6609cddb86c68b2db
0
mike-tr-adamson/incubator-tinkerpop,rmagen/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,dalaro/incubator-tinkerpop,rmagen/incubator-tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop,PommeVerte/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,apache/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,edgarRd/incubator-tinkerpop,n-tran/incubator-tinkerpop,jorgebay/tinkerpop,gdelafosse/incubator-tinkerpop,artem-aliev/tinkerpop,apache/incubator-tinkerpop,robertdale/tinkerpop,apache/incubator-tinkerpop,samiunn/incubator-tinkerpop,newkek/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,artem-aliev/tinkerpop,edgarRd/incubator-tinkerpop,krlohnes/tinkerpop,vtslab/incubator-tinkerpop,dalaro/incubator-tinkerpop,apache/tinkerpop,artem-aliev/tinkerpop,apache/tinkerpop,artem-aliev/tinkerpop,vtslab/incubator-tinkerpop,robertdale/tinkerpop,RedSeal-co/incubator-tinkerpop,velo/incubator-tinkerpop,robertdale/tinkerpop,apache/tinkerpop,vtslab/incubator-tinkerpop,dalaro/incubator-tinkerpop,velo/incubator-tinkerpop,pluradj/incubator-tinkerpop,krlohnes/tinkerpop,RussellSpitzer/incubator-tinkerpop,n-tran/incubator-tinkerpop,artem-aliev/tinkerpop,jorgebay/tinkerpop,samiunn/incubator-tinkerpop,pluradj/incubator-tinkerpop,samiunn/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,velo/incubator-tinkerpop,robertdale/tinkerpop,krlohnes/tinkerpop,PommeVerte/incubator-tinkerpop,newkek/incubator-tinkerpop,edgarRd/incubator-tinkerpop,n-tran/incubator-tinkerpop,apache/tinkerpop,apache/tinkerpop,newkek/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,krlohnes/tinkerpop,jorgebay/tinkerpop,rmagen/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,pluradj/incubator-tinkerpop,apache/tinkerpop,BrynCooke/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,apache/tinkerpop,jorgebay/tinkerpop,BrynCooke/incubator-tinkerpop
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.driver.util; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public class ConfigurationEvaluator { private final List<Integer> minConnectionPoolSizeRange = Arrays.asList(4,8,12,16,32,64,96,128,192,256,384,512); private final List<Integer> maxConnectionPoolSizeRange = Arrays.asList(16,32,64,96,128,192,256,384,512); private final List<Integer> minSimultaneousUsagePerConnectionRange = Arrays.asList(4,8,16,24,32,64,96,128); private final List<Integer> maxSimultaneousUsagePerConnectionRange = Arrays.asList(4,8,16,24,32,64,96,128); private final List<Integer> minInProcessPerConnectionRange = Arrays.asList(2,4,8,16,32,64,96,128); private final List<Integer> maxInProcessPerConnectionRange = Arrays.asList(16,32,64,96,128); private final List<Integer> workerPoolSizeRange = Arrays.asList(1,2,3,4,8,16,32); private final List<Integer> nioPoolSizeRange = Arrays.asList(1,2,4); private final List<Integer> parallelismSizeRange = Arrays.asList(1,2,4,8,16); public Stream<String[]> generate(final String [] args) throws Exception { final Set<String> configsTried = new HashSet<>(); // get ready for the some serious brute-force action here for (int ir = 0; ir < nioPoolSizeRange.size(); ir++) { for (int is = 0; is < parallelismSizeRange.size(); is++) { for (int it = 0; it < workerPoolSizeRange.size(); it++) { for (int iu = 0; iu < minInProcessPerConnectionRange.size(); iu++) { for (int iv = 0; iv < maxInProcessPerConnectionRange.size(); iv++) { for (int iw = 0; iw < minConnectionPoolSizeRange.size(); iw++) { for (int ix = 0; ix < maxConnectionPoolSizeRange.size(); ix++) { for (int iy = 0; iy < minSimultaneousUsagePerConnectionRange.size(); iy++) { for (int iz = 0; iz < maxSimultaneousUsagePerConnectionRange.size(); iz++) { if (minConnectionPoolSizeRange.get(iw) <= maxConnectionPoolSizeRange.get(ix) && minInProcessPerConnectionRange.get(iu) <= maxInProcessPerConnectionRange.get(iv) && minSimultaneousUsagePerConnectionRange.get(iy) <= maxSimultaneousUsagePerConnectionRange.get(iz) && maxSimultaneousUsagePerConnectionRange.get(iz) <= maxInProcessPerConnectionRange.get(iv)) { final String s = String.join(",", String.valueOf(ir), String.valueOf(is), String.valueOf(it), String.valueOf(iu), String.valueOf(iv), String.valueOf(iw), String.valueOf(ix), String.valueOf(iy), String.valueOf(iz)); if (!configsTried.contains(s)) { final Object[] argsToProfiler = Stream.of("nioPoolSize", nioPoolSizeRange.get(ir).toString(), "parallelism", parallelismSizeRange.get(is).toString(), "workerPoolSize", workerPoolSizeRange.get(it).toString(), "minInProcessPerConnection", minInProcessPerConnectionRange.get(iu).toString(), "maxInProcessPerConnection", maxInProcessPerConnectionRange.get(iv).toString(), "minConnectionPoolSize", minConnectionPoolSizeRange.get(iw).toString(), "maxConnectionPoolSize", maxConnectionPoolSizeRange.get(ix).toString(), "minSimultaneousUsagePerConnection", minSimultaneousUsagePerConnectionRange.get(iy).toString(), "maxSimultaneousUsagePerConnection", maxSimultaneousUsagePerConnectionRange.get(iz).toString(), "noExit", Boolean.TRUE.toString()).toArray(); final Object[] withExtraArgs = args.length > 0 ? Stream.concat(Stream.of(args), Stream.of(argsToProfiler)).toArray() : argsToProfiler; final String[] stringProfilerArgs = Arrays.copyOf(withExtraArgs, withExtraArgs.length, String[].class); System.out.println("Testing with: " + Arrays.toString(stringProfilerArgs)); ProfilingApplication.main(stringProfilerArgs); TimeUnit.SECONDS.sleep(5); configsTried.add(s); } } } } } } } } } } } System.out.println(configsTried.size()); return null; } public static void main(final String [] args) { try { new ConfigurationEvaluator().generate(args); System.exit(0); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } } }
gremlin-driver/src/main/java/org/apache/tinkerpop/gremlin/driver/util/ConfigurationEvaluator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.driver.util; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public class ConfigurationEvaluator { private final List<Integer> minConnectionPoolSizeRange = Arrays.asList(4,8,12,16,32,64,96,128,192,256,384,512); private final List<Integer> maxConnectionPoolSizeRange = Arrays.asList(16,32,64,96,128,192,256,384,512); private final List<Integer> minSimultaneousUsagePerConnectionRange = Arrays.asList(4,8,16,24,32,64,96,128); private final List<Integer> maxSimultaneousUsagePerConnectionRange = Arrays.asList(4,8,16,24,32,64,96,128); private final List<Integer> minInProcessPerConnectionRange = Arrays.asList(2,4,8,16,32,64,96,128); private final List<Integer> maxInProcessPerConnectionRange = Arrays.asList(16,32,64,96,128); private final List<Integer> workerPoolSizeRange = Arrays.asList(1,2,3,4,8,16,32); private final List<Integer> nioPoolSizeRange = Arrays.asList(1,2,4); private final List<Integer> parallelismSizeRange = Arrays.asList(1,2,4,8,16); public Stream<String[]> generate(final String [] args) throws Exception { final Set<Set<Integer>> configsTried = new HashSet<>(); // get ready for the some serious brute-force action here for (int ir = 0; ir < nioPoolSizeRange.size(); ir++) { for (int is = 0; is < parallelismSizeRange.size(); is++) { for (int it = 0; it < workerPoolSizeRange.size(); it++) { for (int iu = 0; iu < minInProcessPerConnectionRange.size(); iu++) { for (int iv = 0; iv < maxInProcessPerConnectionRange.size(); iv++) { for (int iw = 0; iw < minConnectionPoolSizeRange.size(); iw++) { for (int ix = 0; ix < maxConnectionPoolSizeRange.size(); ix++) { for (int iy = 0; iy < minSimultaneousUsagePerConnectionRange.size(); iy++) { for (int iz = 0; iz < maxSimultaneousUsagePerConnectionRange.size(); iz++) { if (minConnectionPoolSizeRange.get(iw) <= maxConnectionPoolSizeRange.get(ix) && minInProcessPerConnectionRange.get(iu) <= maxInProcessPerConnectionRange.get(iv) && minSimultaneousUsagePerConnectionRange.get(iy) <= maxSimultaneousUsagePerConnectionRange.get(iz) && maxSimultaneousUsagePerConnectionRange.get(iz) <= maxInProcessPerConnectionRange.get(iv)) { final Set s = new HashSet(Arrays.asList(it, iu, iv, iw, ix, iy, iz)); if (!configsTried.contains(s)) { final Object[] argsToProfiler = Stream.of("nioPoolSize", nioPoolSizeRange.get(ir).toString(), "parallelism", parallelismSizeRange.get(is).toString(), "workerPoolSize", workerPoolSizeRange.get(it).toString(), "minInProcessPerConnection", minInProcessPerConnectionRange.get(iu).toString(), "maxInProcessPerConnection", maxInProcessPerConnectionRange.get(iv).toString(), "minConnectionPoolSize", minConnectionPoolSizeRange.get(iw).toString(), "maxConnectionPoolSize", maxConnectionPoolSizeRange.get(ix).toString(), "minSimultaneousUsagePerConnection", minSimultaneousUsagePerConnectionRange.get(iy).toString(), "maxSimultaneousUsagePerConnection", maxSimultaneousUsagePerConnectionRange.get(iz).toString(), "noExit", Boolean.TRUE.toString()).toArray(); final Object[] withExtraArgs = args.length > 0 ? Stream.concat(Stream.of(args), Stream.of(argsToProfiler)).toArray() : argsToProfiler; final String[] stringProfilerArgs = Arrays.copyOf(withExtraArgs, withExtraArgs.length, String[].class); System.out.println("Testing with: " + Arrays.toString(stringProfilerArgs)); ProfilingApplication.main(stringProfilerArgs); TimeUnit.SECONDS.sleep(5); configsTried.add(s); } } } } } } } } } } } System.out.println(configsTried.size()); return null; } public static void main(final String [] args) { try { new ConfigurationEvaluator().generate(args); System.exit(0); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } } }
Adjust profiler config runner.
gremlin-driver/src/main/java/org/apache/tinkerpop/gremlin/driver/util/ConfigurationEvaluator.java
Adjust profiler config runner.
Java
apache-2.0
941c97534ab49103cc365920b595f0099ca5a534
0
samaitra/ignite,agura/incubator-ignite,apache/ignite,zzcclp/ignite,svladykin/ignite,vadopolski/ignite,amirakhmedov/ignite,nivanov/ignite,vladisav/ignite,abhishek-ch/incubator-ignite,thuTom/ignite,vladisav/ignite,alexzaitzev/ignite,SomeFire/ignite,voipp/ignite,VladimirErshov/ignite,sk0x50/ignite,voipp/ignite,apache/ignite,sylentprayer/ignite,ryanzz/ignite,shurun19851206/ignite,a1vanov/ignite,leveyj/ignite,afinka77/ignite,irudyak/ignite,kromulan/ignite,ashutakGG/incubator-ignite,agoncharuk/ignite,WilliamDo/ignite,zzcclp/ignite,dlnufox/ignite,rfqu/ignite,andrey-kuznetsov/ignite,WilliamDo/ignite,SomeFire/ignite,samaitra/ignite,ascherbakoff/ignite,vldpyatkov/ignite,ptupitsyn/ignite,avinogradovgg/ignite,thuTom/ignite,endian675/ignite,shroman/ignite,xtern/ignite,endian675/ignite,gargvish/ignite,agura/incubator-ignite,endian675/ignite,a1vanov/ignite,leveyj/ignite,DoudTechData/ignite,BiryukovVA/ignite,NSAmelchev/ignite,nizhikov/ignite,irudyak/ignite,svladykin/ignite,alexzaitzev/ignite,kromulan/ignite,kidaa/incubator-ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,samaitra/ignite,ascherbakoff/ignite,kidaa/incubator-ignite,chandresh-pancholi/ignite,kromulan/ignite,shurun19851206/ignite,voipp/ignite,shurun19851206/ignite,wmz7year/ignite,ascherbakoff/ignite,leveyj/ignite,ascherbakoff/ignite,sylentprayer/ignite,daradurvs/ignite,adeelmahmood/ignite,irudyak/ignite,xtern/ignite,agura/incubator-ignite,nizhikov/ignite,daradurvs/ignite,vldpyatkov/ignite,sk0x50/ignite,apacheignite/ignite,afinka77/ignite,StalkXT/ignite,xtern/ignite,apache/ignite,akuznetsov-gridgain/ignite,leveyj/ignite,leveyj/ignite,abhishek-ch/incubator-ignite,abhishek-ch/incubator-ignite,vldpyatkov/ignite,thuTom/ignite,shroman/ignite,apacheignite/ignite,DoudTechData/ignite,dlnufox/ignite,agoncharuk/ignite,alexzaitzev/ignite,akuznetsov-gridgain/ignite,xtern/ignite,dmagda/incubator-ignite,agoncharuk/ignite,wmz7year/ignite,sk0x50/ignite,vsisko/incubator-ignite,ntikhonov/ignite,SharplEr/ignite,vsisko/incubator-ignite,amirakhmedov/ignite,WilliamDo/ignite,arijitt/incubator-ignite,ntikhonov/ignite,apache/ignite,ptupitsyn/ignite,murador/ignite,kidaa/incubator-ignite,SharplEr/ignite,vadopolski/ignite,tkpanther/ignite,ptupitsyn/ignite,shroman/ignite,dream-x/ignite,svladykin/ignite,ryanzz/ignite,ilantukh/ignite,SomeFire/ignite,mcherkasov/ignite,andrey-kuznetsov/ignite,apacheignite/ignite,ntikhonov/ignite,vsisko/incubator-ignite,adeelmahmood/ignite,wmz7year/ignite,afinka77/ignite,a1vanov/ignite,NSAmelchev/ignite,psadusumilli/ignite,BiryukovVA/ignite,irudyak/ignite,amirakhmedov/ignite,ryanzz/ignite,NSAmelchev/ignite,voipp/ignite,amirakhmedov/ignite,sylentprayer/ignite,kromulan/ignite,pperalta/ignite,vsuslov/incubator-ignite,afinka77/ignite,avinogradovgg/ignite,SharplEr/ignite,NSAmelchev/ignite,VladimirErshov/ignite,akuznetsov-gridgain/ignite,mcherkasov/ignite,VladimirErshov/ignite,ryanzz/ignite,agoncharuk/ignite,dmagda/incubator-ignite,murador/ignite,sylentprayer/ignite,vldpyatkov/ignite,thuTom/ignite,nivanov/ignite,daradurvs/ignite,f7753/ignite,ntikhonov/ignite,vladisav/ignite,ascherbakoff/ignite,thuTom/ignite,f7753/ignite,iveselovskiy/ignite,andrey-kuznetsov/ignite,louishust/incubator-ignite,alexzaitzev/ignite,leveyj/ignite,adeelmahmood/ignite,NSAmelchev/ignite,ryanzz/ignite,BiryukovVA/ignite,apache/ignite,f7753/ignite,DoudTechData/ignite,abhishek-ch/incubator-ignite,apacheignite/ignite,BiryukovVA/ignite,NSAmelchev/ignite,rfqu/ignite,BiryukovVA/ignite,alexzaitzev/ignite,alexzaitzev/ignite,ntikhonov/ignite,pperalta/ignite,dmagda/incubator-ignite,vladisav/ignite,sk0x50/ignite,vsuslov/incubator-ignite,pperalta/ignite,DoudTechData/ignite,WilliamDo/ignite,zzcclp/ignite,dlnufox/ignite,gridgain/apache-ignite,SomeFire/ignite,amirakhmedov/ignite,dlnufox/ignite,f7753/ignite,daradurvs/ignite,thuTom/ignite,DoudTechData/ignite,rfqu/ignite,pperalta/ignite,BiryukovVA/ignite,murador/ignite,alexzaitzev/ignite,nizhikov/ignite,louishust/incubator-ignite,kidaa/incubator-ignite,dlnufox/ignite,SharplEr/ignite,kidaa/incubator-ignite,abhishek-ch/incubator-ignite,agura/incubator-ignite,rfqu/ignite,ptupitsyn/ignite,nizhikov/ignite,agoncharuk/ignite,adeelmahmood/ignite,agoncharuk/ignite,iveselovskiy/ignite,apacheignite/ignite,vladisav/ignite,f7753/ignite,afinka77/ignite,irudyak/ignite,chandresh-pancholi/ignite,zzcclp/ignite,tkpanther/ignite,shurun19851206/ignite,WilliamDo/ignite,StalkXT/ignite,dream-x/ignite,samaitra/ignite,nivanov/ignite,sk0x50/ignite,endian675/ignite,nivanov/ignite,ilantukh/ignite,WilliamDo/ignite,nizhikov/ignite,samaitra/ignite,dream-x/ignite,iveselovskiy/ignite,mcherkasov/ignite,BiryukovVA/ignite,akuznetsov-gridgain/ignite,shurun19851206/ignite,adeelmahmood/ignite,vadopolski/ignite,apache/ignite,voipp/ignite,agoncharuk/ignite,mcherkasov/ignite,rfqu/ignite,gridgain/apache-ignite,dmagda/incubator-ignite,kromulan/ignite,ptupitsyn/ignite,irudyak/ignite,VladimirErshov/ignite,leveyj/ignite,tkpanther/ignite,ptupitsyn/ignite,shurun19851206/ignite,mcherkasov/ignite,nizhikov/ignite,SharplEr/ignite,vsuslov/incubator-ignite,StalkXT/ignite,nizhikov/ignite,gargvish/ignite,daradurvs/ignite,avinogradovgg/ignite,amirakhmedov/ignite,gargvish/ignite,murador/ignite,NSAmelchev/ignite,SomeFire/ignite,irudyak/ignite,kromulan/ignite,voipp/ignite,DoudTechData/ignite,psadusumilli/ignite,zzcclp/ignite,xtern/ignite,voipp/ignite,SharplEr/ignite,WilliamDo/ignite,ptupitsyn/ignite,tkpanther/ignite,sylentprayer/ignite,gridgain/apache-ignite,StalkXT/ignite,a1vanov/ignite,daradurvs/ignite,psadusumilli/ignite,amirakhmedov/ignite,vsisko/incubator-ignite,adeelmahmood/ignite,alexzaitzev/ignite,murador/ignite,wmz7year/ignite,psadusumilli/ignite,chandresh-pancholi/ignite,afinka77/ignite,ntikhonov/ignite,louishust/incubator-ignite,dmagda/incubator-ignite,murador/ignite,StalkXT/ignite,andrey-kuznetsov/ignite,daradurvs/ignite,andrey-kuznetsov/ignite,murador/ignite,chandresh-pancholi/ignite,vadopolski/ignite,a1vanov/ignite,shroman/ignite,sk0x50/ignite,arijitt/incubator-ignite,sk0x50/ignite,dlnufox/ignite,voipp/ignite,andrey-kuznetsov/ignite,adeelmahmood/ignite,irudyak/ignite,ntikhonov/ignite,tkpanther/ignite,NSAmelchev/ignite,gridgain/apache-ignite,amirakhmedov/ignite,samaitra/ignite,agura/incubator-ignite,dmagda/incubator-ignite,amirakhmedov/ignite,adeelmahmood/ignite,DoudTechData/ignite,shroman/ignite,vladisav/ignite,nivanov/ignite,wmz7year/ignite,wmz7year/ignite,dream-x/ignite,apacheignite/ignite,dmagda/incubator-ignite,chandresh-pancholi/ignite,shroman/ignite,ashutakGG/incubator-ignite,rfqu/ignite,sylentprayer/ignite,mcherkasov/ignite,tkpanther/ignite,f7753/ignite,samaitra/ignite,StalkXT/ignite,vsuslov/incubator-ignite,iveselovskiy/ignite,daradurvs/ignite,abhishek-ch/incubator-ignite,leveyj/ignite,agura/incubator-ignite,wmz7year/ignite,pperalta/ignite,gridgain/apache-ignite,vsisko/incubator-ignite,samaitra/ignite,vsuslov/incubator-ignite,gargvish/ignite,shroman/ignite,agura/incubator-ignite,psadusumilli/ignite,nizhikov/ignite,ilantukh/ignite,StalkXT/ignite,dlnufox/ignite,VladimirErshov/ignite,irudyak/ignite,vadopolski/ignite,vadopolski/ignite,ascherbakoff/ignite,avinogradovgg/ignite,StalkXT/ignite,mcherkasov/ignite,vldpyatkov/ignite,endian675/ignite,arijitt/incubator-ignite,SharplEr/ignite,ryanzz/ignite,endian675/ignite,vladisav/ignite,ptupitsyn/ignite,chandresh-pancholi/ignite,kromulan/ignite,murador/ignite,a1vanov/ignite,nivanov/ignite,svladykin/ignite,ilantukh/ignite,vldpyatkov/ignite,apacheignite/ignite,kromulan/ignite,shurun19851206/ignite,gridgain/apache-ignite,xtern/ignite,afinka77/ignite,louishust/incubator-ignite,thuTom/ignite,ascherbakoff/ignite,tkpanther/ignite,iveselovskiy/ignite,afinka77/ignite,nizhikov/ignite,ilantukh/ignite,apache/ignite,wmz7year/ignite,f7753/ignite,andrey-kuznetsov/ignite,svladykin/ignite,vsisko/incubator-ignite,akuznetsov-gridgain/ignite,nivanov/ignite,DoudTechData/ignite,psadusumilli/ignite,avinogradovgg/ignite,svladykin/ignite,samaitra/ignite,dream-x/ignite,mcherkasov/ignite,ilantukh/ignite,shurun19851206/ignite,shroman/ignite,andrey-kuznetsov/ignite,VladimirErshov/ignite,ashutakGG/incubator-ignite,ashutakGG/incubator-ignite,NSAmelchev/ignite,ascherbakoff/ignite,apache/ignite,sylentprayer/ignite,vadopolski/ignite,SharplEr/ignite,chandresh-pancholi/ignite,ashutakGG/incubator-ignite,thuTom/ignite,louishust/incubator-ignite,svladykin/ignite,arijitt/incubator-ignite,apacheignite/ignite,gridgain/apache-ignite,dlnufox/ignite,psadusumilli/ignite,sk0x50/ignite,nivanov/ignite,endian675/ignite,StalkXT/ignite,xtern/ignite,agura/incubator-ignite,pperalta/ignite,akuznetsov-gridgain/ignite,vsuslov/incubator-ignite,alexzaitzev/ignite,ilantukh/ignite,zzcclp/ignite,sk0x50/ignite,chandresh-pancholi/ignite,shroman/ignite,rfqu/ignite,xtern/ignite,chandresh-pancholi/ignite,pperalta/ignite,avinogradovgg/ignite,tkpanther/ignite,WilliamDo/ignite,samaitra/ignite,VladimirErshov/ignite,vldpyatkov/ignite,BiryukovVA/ignite,endian675/ignite,dream-x/ignite,SomeFire/ignite,daradurvs/ignite,iveselovskiy/ignite,ilantukh/ignite,f7753/ignite,vldpyatkov/ignite,BiryukovVA/ignite,pperalta/ignite,dmagda/incubator-ignite,voipp/ignite,ryanzz/ignite,rfqu/ignite,psadusumilli/ignite,gargvish/ignite,vadopolski/ignite,zzcclp/ignite,vladisav/ignite,VladimirErshov/ignite,dream-x/ignite,SomeFire/ignite,andrey-kuznetsov/ignite,apache/ignite,SomeFire/ignite,louishust/incubator-ignite,vsisko/incubator-ignite,ptupitsyn/ignite,ilantukh/ignite,gargvish/ignite,arijitt/incubator-ignite,dream-x/ignite,agoncharuk/ignite,ntikhonov/ignite,ptupitsyn/ignite,SharplEr/ignite,xtern/ignite,SomeFire/ignite,BiryukovVA/ignite,a1vanov/ignite,gargvish/ignite,ilantukh/ignite,vsisko/incubator-ignite,gargvish/ignite,zzcclp/ignite,arijitt/incubator-ignite,daradurvs/ignite,ryanzz/ignite,ashutakGG/incubator-ignite,a1vanov/ignite,sylentprayer/ignite,shroman/ignite,kidaa/incubator-ignite,SomeFire/ignite,avinogradovgg/ignite
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers.discovery; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.events.*; import org.apache.ignite.internal.managers.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.eventstorage.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.jobmetrics.*; import org.apache.ignite.internal.processors.security.*; import org.apache.ignite.internal.util.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.lang.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.internal.util.worker.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.security.*; import org.apache.ignite.plugin.segmentation.*; import org.apache.ignite.spi.*; import org.apache.ignite.spi.discovery.*; import org.apache.ignite.thread.*; import org.jdk8.backport.*; import org.jetbrains.annotations.*; import java.io.*; import java.lang.management.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.zip.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.IgniteNodeAttributes.*; import static org.apache.ignite.internal.IgniteVersionUtils.*; import static org.apache.ignite.plugin.segmentation.GridSegmentationPolicy.*; /** * Discovery SPI manager. */ public class GridDiscoveryManager extends GridManagerAdapter<DiscoverySpi> { /** Fake key for {@code null}-named caches. Used inside {@link DiscoCache}. */ private static final String NULL_CACHE_NAME = UUID.randomUUID().toString(); /** Metrics update frequency. */ private static final long METRICS_UPDATE_FREQ = 3000; /** */ private static final MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); /** */ private static final OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean(); /** */ private static final RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean(); /** */ private static final ThreadMXBean threads = ManagementFactory.getThreadMXBean(); /** */ private static final Collection<GarbageCollectorMXBean> gc = ManagementFactory.getGarbageCollectorMXBeans(); /** */ private static final String PREFIX = "Topology snapshot"; /** Discovery cached history size. */ protected static final int DISCOVERY_HISTORY_SIZE = 100; /** Predicate filtering out daemon nodes. */ private static final IgnitePredicate<ClusterNode> daemonFilter = new P1<ClusterNode>() { @Override public boolean apply(ClusterNode n) { return !n.isDaemon(); } }; /** Disco history entries comparator. */ private static final Comparator<Map.Entry<Long, DiscoCache>> histCmp = new Comparator<Map.Entry<Long, DiscoCache>>() { @Override public int compare(Map.Entry<Long, DiscoCache> o1, Map.Entry<Long, DiscoCache> o2) { return o1.getKey().compareTo(o2.getKey()); } }; /** Discovery event worker. */ private final DiscoveryWorker discoWrk = new DiscoveryWorker(); /** Network segment check worker. */ private SegmentCheckWorker segChkWrk; /** Network segment check thread. */ private IgniteThread segChkThread; /** Last logged topology. */ private final AtomicLong lastLoggedTop = new AtomicLong(); /** Local node. */ private ClusterNode locNode; /** Local node daemon flag. */ private boolean isLocDaemon; /** {@code True} if resolvers were configured and network segment check is enabled. */ private boolean hasRslvrs; /** Last segment check result. */ private final AtomicBoolean lastSegChkRes = new AtomicBoolean(true); /** Discovery cache. */ private final AtomicReference<DiscoCache> discoCache = new AtomicReference<>(); /** Topology cache history. */ private final GridBoundedConcurrentLinkedHashMap<Long, DiscoCache> discoCacheHist = new GridBoundedConcurrentLinkedHashMap<>(DISCOVERY_HISTORY_SIZE, DISCOVERY_HISTORY_SIZE, 0.7f, 1); /** Topology snapshots history. */ private volatile Map<Long, Collection<ClusterNode>> topHist = new HashMap<>(); /** Topology version. */ private final GridAtomicLong topVer = new GridAtomicLong(); /** Order supported flag. */ private boolean discoOrdered; /** Topology snapshots history supported flag. */ private boolean histSupported; /** Configured network segment check frequency. */ private long segChkFreq; /** Local node join to topology event. */ private GridFutureAdapterEx<DiscoveryEvent> locJoinEvt = new GridFutureAdapterEx<>(); /** GC CPU load. */ private volatile double gcCpuLoad; /** CPU load. */ private volatile double cpuLoad; /** Metrics. */ private final GridLocalMetrics metrics = createMetrics(); /** Metrics update worker. */ private final MetricsUpdater metricsUpdater = new MetricsUpdater(); /** @param ctx Context. */ public GridDiscoveryManager(GridKernalContext ctx) { super(ctx, ctx.config().getDiscoverySpi()); } /** * @return Memory usage of non-heap memory. */ private MemoryUsage nonHeapMemoryUsage() { // Workaround of exception in WebSphere. // We received the following exception: // java.lang.IllegalArgumentException: used value cannot be larger than the committed value // at java.lang.management.MemoryUsage.<init>(MemoryUsage.java:105) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsageImpl(Native Method) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsage(MemoryMXBeanImpl.java:143) // at org.apache.ignite.spi.metrics.jdk.GridJdkLocalMetricsSpi.getMetrics(GridJdkLocalMetricsSpi.java:242) // // We so had to workaround this with exception handling, because we can not control classes from WebSphere. try { return mem.getNonHeapMemoryUsage(); } catch (IllegalArgumentException ignored) { return new MemoryUsage(0, 0, 0, 0); } } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { super.start(); long totSysMemory = -1; try { totSysMemory = U.<Long>property(os, "totalPhysicalMemorySize"); } catch (RuntimeException ignored) { // No-op. } ctx.addNodeAttribute(IgniteNodeAttributes.ATTR_PHY_RAM, totSysMemory); DiscoverySpi spi = getSpi(); spi.setNodeAttributes(ctx.nodeAttributes(), VER); discoOrdered = discoOrdered(); histSupported = historySupported(); isLocDaemon = ctx.isDaemon(); hasRslvrs = !F.isEmpty(ctx.config().getSegmentationResolvers()); segChkFreq = ctx.config().getSegmentCheckFrequency(); if (hasRslvrs) { if (segChkFreq < 0) throw new IgniteCheckedException("Segment check frequency cannot be negative: " + segChkFreq); if (segChkFreq > 0 && segChkFreq < 2000) U.warn(log, "Configuration parameter 'segmentCheckFrequency' is too low " + "(at least 2000 ms recommended): " + segChkFreq); checkSegmentOnStart(); } new IgniteThread(metricsUpdater).start(); spi.setMetricsProvider(createMetricsProvider()); if (ctx.security().enabled()) { spi.setAuthenticator(new DiscoverySpiNodeAuthenticator() { @Override public SecurityContext authenticateNode(ClusterNode node, GridSecurityCredentials cred) { try { return ctx.security().authenticateNode(node, cred); } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isGlobalNodeAuthentication() { return ctx.security().isGlobalNodeAuthentication(); } }); } spi.setListener(new DiscoverySpiListener() { @Override public void onDiscovery( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, Map<Long, Collection<ClusterNode>> snapshots, @Nullable Serializable data ) { final ClusterNode locNode = localNode(); if (snapshots != null) topHist = snapshots; if (type == EVT_NODE_FAILED || type == EVT_NODE_LEFT) { for (DiscoCache c : discoCacheHist.values()) c.updateAlives(node); } // Put topology snapshot into discovery history. // There is no race possible between history maintenance and concurrent discovery // event notifications, since SPI notifies manager about all events from this listener. if (type != EVT_NODE_METRICS_UPDATED && type != DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) { DiscoCache cache = new DiscoCache(locNode, F.view(topSnapshot, F.remoteNodes(locNode.id()))); discoCacheHist.put(topVer, cache); discoCache.set(cache); } // If this is a local join event, just save it and do not notify listeners. if (type == EVT_NODE_JOINED && node.id().equals(locNode.id())) { DiscoveryEvent discoEvt = new DiscoveryEvent(); discoEvt.node(ctx.discovery().localNode()); discoEvt.eventNode(node); discoEvt.type(EVT_NODE_JOINED); discoEvt.topologySnapshot(topVer, new ArrayList<>( F.viewReadOnly(topSnapshot, new C1<ClusterNode, ClusterNode>() { @Override public ClusterNode apply(ClusterNode e) { return e; } }, daemonFilter))); locJoinEvt.onDone(discoEvt); return; } if (topVer > 0 && (type == EVT_NODE_JOINED || type == EVT_NODE_FAILED || type == EVT_NODE_LEFT)) { boolean set = GridDiscoveryManager.this.topVer.setIfGreater(topVer); assert set : "Topology version has not been updated [this.topVer=" + GridDiscoveryManager.this.topVer + ", topVer=" + topVer + ", node=" + node + ", evt=" + U.gridEventName(type) + ']'; } discoWrk.addEvent(type, topVer, node, topSnapshot, data); } }); spi.setDataExchange(new DiscoverySpiDataExchange() { @Override public Map<Integer, Object> collect(UUID nodeId) { assert nodeId != null; Map<Integer, Object> data = new HashMap<>(); for (GridComponent comp : ctx.components()) { Object compData = comp.collectDiscoveryData(nodeId); if (compData != null) { assert comp.discoveryDataType() != null; data.put(comp.discoveryDataType().ordinal(), compData); } } return data; } @Override public void onExchange(UUID nodeId, Map<Integer, Object> data) { for (Map.Entry<Integer, Object> e : data.entrySet()) { GridComponent comp = null; for (GridComponent c : ctx.components()) { if (c.discoveryDataType() != null && c.discoveryDataType().ordinal() == e.getKey()) { comp = c; break; } } if (comp != null) comp.onDiscoveryDataReceived(nodeId, e.getValue()); else U.warn(log, "Received discovery data for unknown component: " + e.getKey()); } } }); startSpi(); // Start segment check worker only if frequency is greater than 0. if (hasRslvrs && segChkFreq > 0) { segChkWrk = new SegmentCheckWorker(); segChkThread = new IgniteThread(segChkWrk); segChkThread.start(); } checkAttributes(discoCache().remoteNodes()); locNode = spi.getLocalNode(); topVer.setIfGreater(locNode.order()); // Start discovery worker. new IgniteThread(discoWrk).start(); if (log.isDebugEnabled()) log.debug(startInfo()); } /** * @return Metrics. */ private GridLocalMetrics createMetrics() { return new GridLocalMetrics() { @Override public int getAvailableProcessors() { return os.getAvailableProcessors(); } @Override public double getCurrentCpuLoad() { return cpuLoad; } @Override public double getCurrentGcCpuLoad() { return gcCpuLoad; } @Override public long getHeapMemoryInitialized() { return mem.getHeapMemoryUsage().getInit(); } @Override public long getHeapMemoryUsed() { return mem.getHeapMemoryUsage().getUsed(); } @Override public long getHeapMemoryCommitted() { return mem.getHeapMemoryUsage().getCommitted(); } @Override public long getHeapMemoryMaximum() { return mem.getHeapMemoryUsage().getMax(); } @Override public long getNonHeapMemoryInitialized() { return nonHeapMemoryUsage().getInit(); } @Override public long getNonHeapMemoryUsed() { return nonHeapMemoryUsage().getUsed(); } @Override public long getNonHeapMemoryCommitted() { return nonHeapMemoryUsage().getCommitted(); } @Override public long getNonHeapMemoryMaximum() { return nonHeapMemoryUsage().getMax(); } @Override public long getUptime() { return rt.getUptime(); } @Override public long getStartTime() { return rt.getStartTime(); } @Override public int getThreadCount() { return threads.getThreadCount(); } @Override public int getPeakThreadCount() { return threads.getPeakThreadCount(); } @Override public long getTotalStartedThreadCount() { return threads.getTotalStartedThreadCount(); } @Override public int getDaemonThreadCount() { return threads.getDaemonThreadCount(); } }; } /** * @return Metrics provider. */ private DiscoveryMetricsProvider createMetricsProvider() { return new DiscoveryMetricsProvider() { /** */ private final long startTime = U.currentTimeMillis(); /** {@inheritDoc} */ @Override public ClusterMetrics metrics() { GridJobMetrics jm = ctx.jobMetric().getJobMetrics(); ClusterMetricsSnapshot nm = new ClusterMetricsSnapshot(); nm.setLastUpdateTime(U.currentTimeMillis()); // Job metrics. nm.setMaximumActiveJobs(jm.getMaximumActiveJobs()); nm.setCurrentActiveJobs(jm.getCurrentActiveJobs()); nm.setAverageActiveJobs(jm.getAverageActiveJobs()); nm.setMaximumWaitingJobs(jm.getMaximumWaitingJobs()); nm.setCurrentWaitingJobs(jm.getCurrentWaitingJobs()); nm.setAverageWaitingJobs(jm.getAverageWaitingJobs()); nm.setMaximumRejectedJobs(jm.getMaximumRejectedJobs()); nm.setCurrentRejectedJobs(jm.getCurrentRejectedJobs()); nm.setAverageRejectedJobs(jm.getAverageRejectedJobs()); nm.setMaximumCancelledJobs(jm.getMaximumCancelledJobs()); nm.setCurrentCancelledJobs(jm.getCurrentCancelledJobs()); nm.setAverageCancelledJobs(jm.getAverageCancelledJobs()); nm.setTotalRejectedJobs(jm.getTotalRejectedJobs()); nm.setTotalCancelledJobs(jm.getTotalCancelledJobs()); nm.setTotalExecutedJobs(jm.getTotalExecutedJobs()); nm.setMaximumJobWaitTime(jm.getMaximumJobWaitTime()); nm.setCurrentJobWaitTime(jm.getCurrentJobWaitTime()); nm.setAverageJobWaitTime(jm.getAverageJobWaitTime()); nm.setMaximumJobExecuteTime(jm.getMaximumJobExecuteTime()); nm.setCurrentJobExecuteTime(jm.getCurrentJobExecuteTime()); nm.setAverageJobExecuteTime(jm.getAverageJobExecuteTime()); nm.setCurrentIdleTime(jm.getCurrentIdleTime()); nm.setTotalIdleTime(jm.getTotalIdleTime()); nm.setAverageCpuLoad(jm.getAverageCpuLoad()); // Job metrics. nm.setTotalExecutedTasks(ctx.task().getTotalExecutedTasks()); // VM metrics. nm.setAvailableProcessors(metrics.getAvailableProcessors()); nm.setCurrentCpuLoad(metrics.getCurrentCpuLoad()); nm.setCurrentGcCpuLoad(metrics.getCurrentGcCpuLoad()); nm.setHeapMemoryInitialized(metrics.getHeapMemoryInitialized()); nm.setHeapMemoryUsed(metrics.getHeapMemoryUsed()); nm.setHeapMemoryCommitted(metrics.getHeapMemoryCommitted()); nm.setHeapMemoryMaximum(metrics.getHeapMemoryMaximum()); nm.setHeapMemoryTotal(metrics.getHeapMemoryMaximum()); nm.setNonHeapMemoryInitialized(metrics.getNonHeapMemoryInitialized()); nm.setNonHeapMemoryUsed(metrics.getNonHeapMemoryUsed()); nm.setNonHeapMemoryCommitted(metrics.getNonHeapMemoryCommitted()); nm.setNonHeapMemoryMaximum(metrics.getNonHeapMemoryMaximum()); nm.setNonHeapMemoryTotal(metrics.getNonHeapMemoryMaximum()); nm.setUpTime(metrics.getUptime()); nm.setStartTime(metrics.getStartTime()); nm.setNodeStartTime(startTime); nm.setCurrentThreadCount(metrics.getThreadCount()); nm.setMaximumThreadCount(metrics.getPeakThreadCount()); nm.setTotalStartedThreadCount(metrics.getTotalStartedThreadCount()); nm.setCurrentDaemonThreadCount(metrics.getDaemonThreadCount()); // Data metrics. nm.setLastDataVersion(ctx.cache().lastDataVersion()); GridIoManager io = ctx.io(); // IO metrics. nm.setSentMessagesCount(io.getSentMessagesCount()); nm.setSentBytesCount(io.getSentBytesCount()); nm.setReceivedMessagesCount(io.getReceivedMessagesCount()); nm.setReceivedBytesCount(io.getReceivedBytesCount()); nm.setOutboundMessagesQueueSize(io.getOutboundMessagesQueueSize()); return nm; } }; } /** * @return Local metrics. */ public GridLocalMetrics metrics() { return metrics; } /** @return {@code True} if ordering is supported. */ private boolean discoOrdered() { DiscoverySpiOrderSupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiOrderSupport.class); return ann != null && ann.value(); } /** @return {@code True} if topology snapshots history is supported. */ private boolean historySupported() { DiscoverySpiHistorySupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiHistorySupport.class); return ann != null && ann.value(); } /** * Checks segment on start waiting for correct segment if necessary. * * @throws IgniteCheckedException If check failed. */ private void checkSegmentOnStart() throws IgniteCheckedException { assert hasRslvrs; if (log.isDebugEnabled()) log.debug("Starting network segment check."); while (true) { if (ctx.segmentation().isValidSegment()) break; if (ctx.config().isWaitForSegmentOnStart()) { LT.warn(log, null, "Failed to check network segment (retrying every 2000 ms)."); // Wait and check again. U.sleep(2000); } else throw new IgniteCheckedException("Failed to check network segment."); } if (log.isDebugEnabled()) log.debug("Finished network segment check successfully."); } /** * Checks whether attributes of the local node are consistent with remote nodes. * * @param nodes List of remote nodes to check attributes on. * @throws IgniteCheckedException In case of error. */ private void checkAttributes(Iterable<ClusterNode> nodes) throws IgniteCheckedException { ClusterNode locNode = getSpi().getLocalNode(); assert locNode != null; // Fetch local node attributes once. String locPreferIpV4 = locNode.attribute("java.net.preferIPv4Stack"); Object locMode = locNode.attribute(ATTR_DEPLOYMENT_MODE); boolean locP2pEnabled = locNode.attribute(ATTR_PEER_CLASSLOADING); boolean warned = false; for (ClusterNode n : nodes) { String rmtPreferIpV4 = n.attribute("java.net.preferIPv4Stack"); if (!F.eq(rmtPreferIpV4, locPreferIpV4)) { if (!warned) U.warn(log, "Local node's value of 'java.net.preferIPv4Stack' " + "system property differs from remote node's " + "(all nodes in topology should have identical value) " + "[locPreferIpV4=" + locPreferIpV4 + ", rmtPreferIpV4=" + rmtPreferIpV4 + ", locId8=" + U.id8(locNode.id()) + ", rmtId8=" + U.id8(n.id()) + ", rmtAddrs=" + U.addressesAsString(n) + ']', "Local and remote 'java.net.preferIPv4Stack' system properties do not match."); warned = true; } // Daemon nodes are allowed to have any deployment they need. // Skip data center ID check for daemon nodes. if (!isLocDaemon && !n.isDaemon()) { Object rmtMode = n.attribute(ATTR_DEPLOYMENT_MODE); if (!locMode.equals(rmtMode)) throw new IgniteCheckedException("Remote node has deployment mode different from local " + "[locId8=" + U.id8(locNode.id()) + ", locMode=" + locMode + ", rmtId8=" + U.id8(n.id()) + ", rmtMode=" + rmtMode + ", rmtAddrs=" + U.addressesAsString(n) + ']'); boolean rmtP2pEnabled = n.attribute(ATTR_PEER_CLASSLOADING); if (locP2pEnabled != rmtP2pEnabled) throw new IgniteCheckedException("Remote node has peer class loading enabled flag different from local " + "[locId8=" + U.id8(locNode.id()) + ", locPeerClassLoading=" + locP2pEnabled + ", rmtId8=" + U.id8(n.id()) + ", rmtPeerClassLoading=" + rmtP2pEnabled + ", rmtAddrs=" + U.addressesAsString(n) + ']'); } } if (log.isDebugEnabled()) log.debug("Finished node attributes consistency check."); } /** * @param nodes Nodes. * @return Total CPUs. */ private static int cpus(Collection<ClusterNode> nodes) { Collection<String> macSet = new HashSet<>(nodes.size(), 1.0f); int cpus = 0; for (ClusterNode n : nodes) { String macs = n.attribute(ATTR_MACS); if (macSet.add(macs)) cpus += n.metrics().getTotalCpus(); } return cpus; } /** * Prints the latest topology info into log taking into account logging/verbosity settings. */ public void ackTopology() { ackTopology(topVer.get(), false); } /** * Logs grid size for license compliance. * * @param topVer Topology version. * @param throttle Suppress printing if this topology was already printed. */ private void ackTopology(long topVer, boolean throttle) { assert !isLocDaemon; DiscoCache discoCache = discoCache(); Collection<ClusterNode> rmtNodes = discoCache.remoteNodes(); ClusterNode locNode = discoCache.localNode(); Collection<ClusterNode> allNodes = discoCache.allNodes(); long hash = topologyHash(allNodes); // Prevent ack-ing topology for the same topology. // Can happen only during node startup. if (throttle && lastLoggedTop.getAndSet(hash) == hash) return; int totalCpus = cpus(allNodes); double heap = U.heapSize(allNodes, 2); if (log.isQuiet()) U.quiet(false, topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); if (log.isDebugEnabled()) { String dbg = ""; dbg += U.nl() + U.nl() + ">>> +----------------+" + U.nl() + ">>> " + PREFIX + "." + U.nl() + ">>> +----------------+" + U.nl() + ">>> Grid name: " + (ctx.gridName() == null ? "default" : ctx.gridName()) + U.nl() + ">>> Number of nodes: " + (rmtNodes.size() + 1) + U.nl() + (discoOrdered ? ">>> Topology version: " + topVer + U.nl() : "") + ">>> Topology hash: 0x" + Long.toHexString(hash).toUpperCase() + U.nl(); dbg += ">>> Local: " + locNode.id().toString().toUpperCase() + ", " + U.addressesAsString(locNode) + ", " + locNode.order() + ", " + locNode.attribute("os.name") + ' ' + locNode.attribute("os.arch") + ' ' + locNode.attribute("os.version") + ", " + System.getProperty("user.name") + ", " + locNode.attribute("java.runtime.name") + ' ' + locNode.attribute("java.runtime.version") + U.nl(); for (ClusterNode node : rmtNodes) dbg += ">>> Remote: " + node.id().toString().toUpperCase() + ", " + U.addressesAsString(node) + ", " + node.order() + ", " + node.attribute("os.name") + ' ' + node.attribute("os.arch") + ' ' + node.attribute("os.version") + ", " + node.attribute(ATTR_USER_NAME) + ", " + node.attribute("java.runtime.name") + ' ' + node.attribute("java.runtime.version") + U.nl(); dbg += ">>> Total number of CPUs: " + totalCpus + U.nl(); dbg += ">>> Total heap size: " + heap + "GB" + U.nl(); log.debug(dbg); } else if (log.isInfoEnabled()) log.info(topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); } /** * @param rmtNodesNum Remote nodes number. * @param totalCpus Total cpu number. * @param heap Heap size. * @return Topology snapshot message. */ private String topologySnapshotMessage(int rmtNodesNum, int totalCpus, double heap) { return PREFIX + " [" + (discoOrdered ? "ver=" + topVer + ", " : "") + "nodes=" + (rmtNodesNum + 1) + ", CPUs=" + totalCpus + ", heap=" + heap + "GB" + ']'; } /** {@inheritDoc} */ @Override public void onKernalStop0(boolean cancel) { // Stop segment check worker. if (segChkWrk != null) { segChkWrk.cancel(); U.join(segChkThread, log); } if (!locJoinEvt.isDone()) locJoinEvt.onDone(new IgniteCheckedException("Failed to wait for local node joined event (grid is stopping).")); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { // Stop receiving notifications. getSpi().setListener(null); // Stop discovery worker and metrics updater. U.cancel(discoWrk); U.cancel(metricsUpdater); U.join(discoWrk, log); U.join(metricsUpdater, log); // Stop SPI itself. stopSpi(); if (log.isDebugEnabled()) log.debug(stopInfo()); } /** * @param nodeIds Node IDs to check. * @return {@code True} if at least one ID belongs to an alive node. */ public boolean aliveAll(@Nullable Collection<UUID> nodeIds) { if (nodeIds == null || nodeIds.isEmpty()) return false; for (UUID id : nodeIds) if (!alive(id)) return false; return true; } /** * @param nodeId Node ID. * @return {@code True} if node for given ID is alive. */ public boolean alive(UUID nodeId) { assert nodeId != null; boolean alive = getSpi().getNode(nodeId) != null; // Go directly to SPI without checking disco cache. // Refresh disco cache if some node died. if (!alive) { while (true) { DiscoCache c = discoCache(); if (c.node(nodeId) != null) { if (discoCache.compareAndSet(c, null)) break; } else break; } } return alive; } /** * @param node Node. * @return {@code True} if node is alive. */ public boolean alive(ClusterNode node) { assert node != null; return alive(node.id()); } /** * @param nodeId ID of the node. * @return {@code True} if ping succeeded. */ public boolean pingNode(UUID nodeId) { assert nodeId != null; return getSpi().pingNode(nodeId); } /** * @param nodeId ID of the node. * @return Node for ID. */ @Nullable public ClusterNode node(UUID nodeId) { assert nodeId != null; return discoCache().node(nodeId); } /** * Gets collection of node for given node IDs and predicates. * * @param ids Ids to include. * @param p Filter for IDs. * @return Collection with all alive nodes for given IDs. */ public Collection<ClusterNode> nodes(@Nullable Collection<UUID> ids, IgnitePredicate<UUID>... p) { return F.isEmpty(ids) ? Collections.<ClusterNode>emptyList() : F.view( F.viewReadOnly(ids, U.id2Node(ctx), p), F.notNull()); } /** * Gets topology hash for given set of nodes. * * @param nodes Subset of grid nodes for hashing. * @return Hash for given topology. */ public long topologyHash(Iterable<? extends ClusterNode> nodes) { assert nodes != null; Iterator<? extends ClusterNode> iter = nodes.iterator(); if (!iter.hasNext()) return 0; // Special case. List<String> uids = new ArrayList<>(); for (ClusterNode node : nodes) uids.add(node.id().toString()); Collections.sort(uids); CRC32 hash = new CRC32(); for (String uuid : uids) hash.update(uuid.getBytes()); return hash.getValue(); } /** * Gets future that will be completed when current topology version becomes greater or equal to argument passed. * * @param awaitVer Topology version to await. * @return Future. */ public IgniteInternalFuture<Long> topologyFuture(final long awaitVer) { long topVer = topologyVersion(); if (topVer >= awaitVer) return new GridFinishedFuture<>(ctx, topVer); DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer); fut.init(); return fut; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache() { DiscoCache cur; while ((cur = discoCache.get()) == null) // Wrap the SPI collection to avoid possible floating collection. if (discoCache.compareAndSet(null, cur = new DiscoCache(localNode(), getSpi().getRemoteNodes()))) return cur; return cur; } /** @return All non-daemon remote nodes in topology. */ public Collection<ClusterNode> remoteNodes() { return discoCache().remoteNodes(); } /** @return All non-daemon nodes in topology. */ public Collection<ClusterNode> allNodes() { return discoCache().allNodes(); } /** * Gets topology grouped by node versions. * * @return Version to collection of nodes map. */ public NavigableMap<IgniteProductVersion, Collection<ClusterNode>> topologyVersionMap() { return discoCache().versionsMap(); } /** @return Full topology size. */ public int size() { return discoCache().allNodes().size(); } /** * Gets all nodes for given topology version. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> nodes(long topVer) { return resolveDiscoCache(null, topVer).allNodes(); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).cacheNodes(cacheName, topVer); } /** * Gets all nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(long topVer) { return resolveDiscoCache(null, topVer).allNodesWithCaches(topVer); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).remoteCacheNodes(cacheName, topVer); } /** * Gets cache remote nodes for cache with given name. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(long topVer) { return resolveDiscoCache(null, topVer).remoteCacheNodes(topVer); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).aliveCacheNodes(cacheName, topVer); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).aliveRemoteCacheNodes(cacheName, topVer); } /** * Gets alive remote nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveRemoteNodesWithCaches(long topVer) { return resolveDiscoCache(null, topVer).aliveRemoteNodesWithCaches(topVer); } /** * Gets alive nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveNodesWithCaches(long topVer) { return resolveDiscoCache(null, topVer).aliveNodesWithCaches(topVer); } /** * Gets cache nodes for cache with given name that participate in affinity calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache affinity nodes. */ public Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).cacheAffinityNodes(cacheName, topVer); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @param topVer Topology version. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ public boolean hasNearCache(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName); } /** * Gets discovery cache for given topology version. * * @param cacheName Cache name (participates in exception message). * @param topVer Topology version. * @return Discovery cache. */ private DiscoCache resolveDiscoCache(@Nullable String cacheName, long topVer) { DiscoCache cache = topVer == -1 || topVer == topologyVersion() ? discoCache() : discoCacheHist.get(topVer); if (cache == null) { // Find the eldest acceptable discovery cache. Map.Entry<Long, DiscoCache> eldest = Collections.min(discoCacheHist.entrySet(), histCmp); if (topVer < eldest.getKey()) cache = eldest.getValue(); } if (cache == null) { throw new IgniteException("Failed to resolve nodes topology [cacheName=" + cacheName + ", topVer=" + topVer + ", history=" + discoCacheHist.keySet() + ", locNode=" + ctx.discovery().localNode() + ']'); } return cache; } /** * Gets topology by specified version from history storage. * * @param topVer Topology version. * @return Topology nodes or {@code null} if there are no nodes for passed in version. */ @Nullable public Collection<ClusterNode> topology(long topVer) { if (!histSupported) throw new UnsupportedOperationException("Current discovery SPI does not support " + "topology snapshots history (consider using TCP discovery SPI)."); Map<Long, Collection<ClusterNode>> snapshots = topHist; return snapshots.get(topVer); } /** @return All daemon nodes in topology. */ public Collection<ClusterNode> daemonNodes() { return discoCache().daemonNodes(); } /** @return Local node. */ public ClusterNode localNode() { return locNode == null ? getSpi().getLocalNode() : locNode; } /** @return Topology version. */ public long topologyVersion() { return topVer.get(); } /** @return Event that represents a local node joined to topology. */ public DiscoveryEvent localJoinEvent() { try { return locJoinEvt.get(); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** * Gets first grid node start time, see {@link org.apache.ignite.spi.discovery.DiscoverySpi#getGridStartTime()}. * * @return Start time of the first grid node. */ public long gridStartTime() { return getSpi().getGridStartTime(); } /** Stops local node. */ private void stopNode() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.stop(ctx.gridName(), true); } } ).start(); } /** Restarts JVM. */ private void restartJvm() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.restart(true); } } ).start(); } /** * @param evt Event. */ public void sendCustomEvent(Serializable evt) { getSpi().sendCustomEvent(evt); } /** Worker for network segment checks. */ private class SegmentCheckWorker extends GridWorker { /** */ private final BlockingQueue<Object> queue = new LinkedBlockingQueue<>(); /** * */ private SegmentCheckWorker() { super(ctx.gridName(), "disco-net-seg-chk-worker", log); assert hasRslvrs; assert segChkFreq > 0; } /** * */ public void scheduleSegmentCheck() { queue.add(new Object()); } /** {@inheritDoc} */ @SuppressWarnings("StatementWithEmptyBody") @Override protected void body() throws InterruptedException { long lastChk = 0; while (!isCancelled()) { Object req = queue.poll(2000, MILLISECONDS); long now = U.currentTimeMillis(); // Check frequency if segment check has not been requested. if (req == null && (segChkFreq == 0 || lastChk + segChkFreq >= now)) { if (log.isDebugEnabled()) log.debug("Skipping segment check as it has not been requested and it is not time to check."); continue; } // We should always check segment if it has been explicitly // requested (on any node failure or leave). assert req != null || lastChk + segChkFreq < now; // Drain queue. while (queue.poll() != null) { // No-op. } if (lastSegChkRes.get()) { boolean segValid = ctx.segmentation().isValidSegment(); lastChk = now; if (!segValid) { discoWrk.addEvent(EVT_NODE_SEGMENTED, 0, getSpi().getLocalNode(), Collections.<ClusterNode>emptyList(), null); lastSegChkRes.set(false); } if (log.isDebugEnabled()) log.debug("Segment has been checked [requested=" + (req != null) + ", valid=" + segValid + ']'); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SegmentCheckWorker.class, this); } } /** Worker for discovery events. */ private class DiscoveryWorker extends GridWorker { /** Event queue. */ private final BlockingQueue<GridTuple5<Integer, Long, ClusterNode, Collection<ClusterNode>, Serializable>> evts = new LinkedBlockingQueue<>(); /** Node segmented event fired flag. */ private boolean nodeSegFired; /** * */ private DiscoveryWorker() { super(ctx.gridName(), "disco-event-worker", log); } /** * Method is called when any discovery event occurs. * * @param type Discovery event type. See {@link org.apache.ignite.events.DiscoveryEvent} for more details. * @param topVer Topology version. * @param node Remote node this event is connected with. * @param topSnapshot Topology snapshot. */ private void recordEvent(int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot) { assert node != null; if (ctx.event().isRecordable(type)) { DiscoveryEvent evt = new DiscoveryEvent(); evt.node(ctx.discovery().localNode()); evt.eventNode(node); evt.type(type); evt.topologySnapshot(topVer, U.<ClusterNode, ClusterNode>arrayList(topSnapshot, daemonFilter)); if (type == EVT_NODE_METRICS_UPDATED) evt.message("Metrics were updated: " + node); else if (type == EVT_NODE_JOINED) evt.message("Node joined: " + node); else if (type == EVT_NODE_LEFT) evt.message("Node left: " + node); else if (type == EVT_NODE_FAILED) evt.message("Node failed: " + node); else if (type == EVT_NODE_SEGMENTED) evt.message("Node segmented: " + node); else assert false; ctx.event().record(evt); } } /** * @param type Event type. * @param topVer Topology version. * @param node Node. * @param topSnapshot Topology snapshot. */ void addEvent( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, @Nullable Serializable data ) { assert node != null; evts.add(F.t(type, topVer, node, topSnapshot, data)); } /** * @param node Node to get a short description for. * @return Short description for the node to be used in 'quiet' mode. */ private String quietNode(ClusterNode node) { assert node != null; return "nodeId8=" + node.id().toString().substring(0, 8) + ", " + "addrs=" + U.addressesAsString(node) + ", " + "order=" + node.order() + ", " + "CPUs=" + node.metrics().getTotalCpus(); } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException { while (!isCancelled()) { try { body0(); } catch (InterruptedException e) { throw e; } catch (Throwable t) { U.error(log, "Unexpected exception in discovery worker thread (ignored).", t); } } } /** @throws InterruptedException If interrupted. */ @SuppressWarnings("DuplicateCondition") private void body0() throws InterruptedException { GridTuple5<Integer, Long, ClusterNode, Collection<ClusterNode>, Serializable> evt = evts.take(); int type = evt.get1(); long topVer = evt.get2(); ClusterNode node = evt.get3(); boolean isDaemon = node.isDaemon(); boolean segmented = false; switch (type) { case EVT_NODE_JOINED: { assert !discoOrdered || topVer == node.order() : "Invalid topology version [topVer=" + topVer + ", node=" + node + ']'; try { checkAttributes(F.asList(node)); } catch (IgniteCheckedException e) { U.warn(log, e.getMessage()); // We a have well-formed attribute warning here. } if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Added new node to topology: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Added new node to topology: " + node); } else if (log.isDebugEnabled()) log.debug("Added new daemon node to topology: " + node); break; } case EVT_NODE_LEFT: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Node left topology: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Node left topology: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node left topology: " + node); break; } case EVT_NODE_FAILED: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { U.warn(log, "Node FAILED: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Node FAILED: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node FAILED: " + node); break; } case EVT_NODE_SEGMENTED: { assert F.eqNodes(localNode(), node); if (nodeSegFired) { if (log.isDebugEnabled()) { log.debug("Ignored node segmented event [type=EVT_NODE_SEGMENTED, " + "node=" + node + ']'); } return; } // Ignore all further EVT_NODE_SEGMENTED events // until EVT_NODE_RECONNECTED is fired. nodeSegFired = true; lastLoggedTop.set(0); segmented = true; if (!isLocDaemon) U.warn(log, "Local node SEGMENTED: " + node); else if (log.isDebugEnabled()) log.debug("Local node SEGMENTED: " + node); break; } case DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT: { DiscoveryCustomEvent customEvt = new DiscoveryCustomEvent(); customEvt.node(ctx.discovery().localNode()); customEvt.eventNode(node); customEvt.type(type); customEvt.topologySnapshot(topVer, null); customEvt.data(evt.get5()); assert ctx.event().isRecordable(DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT); ctx.event().record(customEvt); return; } // Don't log metric update to avoid flooding the log. case EVT_NODE_METRICS_UPDATED: break; default: assert false : "Invalid discovery event: " + type; } recordEvent(type, topVer, node, evt.get4()); if (segmented) onSegmentation(); } /** * */ private void onSegmentation() { GridSegmentationPolicy segPlc = ctx.config().getSegmentationPolicy(); // Always disconnect first. try { getSpi().disconnect(); } catch (IgniteSpiException e) { U.error(log, "Failed to disconnect discovery SPI.", e); } switch (segPlc) { case RESTART_JVM: U.warn(log, "Restarting JVM according to configured segmentation policy."); restartJvm(); break; case STOP: U.warn(log, "Stopping local node according to configured segmentation policy."); stopNode(); break; default: assert segPlc == NOOP : "Unsupported segmentation policy value: " + segPlc; } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoveryWorker.class, this); } } /** * */ private class MetricsUpdater extends GridWorker { /** */ private long prevGcTime = -1; /** */ private long prevCpuTime = -1; /** * */ private MetricsUpdater() { super(ctx.gridName(), "metrics-updater", log); } /** {@inheritDoc} */ @Override protected void body() throws IgniteInterruptedCheckedException { while (!isCancelled()) { U.sleep(METRICS_UPDATE_FREQ); gcCpuLoad = getGcCpuLoad(); cpuLoad = getCpuLoad(); } } /** * @return GC CPU load. */ private double getGcCpuLoad() { long gcTime = 0; for (GarbageCollectorMXBean bean : gc) { long colTime = bean.getCollectionTime(); if (colTime > 0) gcTime += colTime; } gcTime /= metrics.getAvailableProcessors(); double gc = 0; if (prevGcTime > 0) { long gcTimeDiff = gcTime - prevGcTime; gc = (double)gcTimeDiff / METRICS_UPDATE_FREQ; } prevGcTime = gcTime; return gc; } /** * @return CPU load. */ private double getCpuLoad() { long cpuTime; try { cpuTime = U.<Long>property(os, "processCpuTime"); } catch (IgniteException ignored) { return -1; } // Method reports time in nanoseconds across all processors. cpuTime /= 1000000 * metrics.getAvailableProcessors(); double cpu = 0; if (prevCpuTime > 0) { long cpuTimeDiff = cpuTime - prevCpuTime; // CPU load could go higher than 100% because calculating of cpuTimeDiff also takes some time. cpu = Math.min(1.0, (double)cpuTimeDiff / METRICS_UPDATE_FREQ); } prevCpuTime = cpuTime; return cpu; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MetricsUpdater.class, this, super.toString()); } } /** Discovery topology future. */ private static class DiscoTopologyFuture extends GridFutureAdapter<Long> implements GridLocalEventListener { /** */ private static final long serialVersionUID = 0L; /** Topology await version. */ private long awaitVer; /** Empty constructor required by {@link Externalizable}. */ public DiscoTopologyFuture() { // No-op. } /** * @param ctx Context. * @param awaitVer Await version. */ private DiscoTopologyFuture(GridKernalContext ctx, long awaitVer) { super(ctx); this.awaitVer = awaitVer; } /** Initializes future. */ private void init() { ctx.event().addLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); // Close potential window. long topVer = ctx.discovery().topologyVersion(); if (topVer >= awaitVer) onDone(topVer); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable Long res, @Nullable Throwable err) { if (super.onDone(res, err)) { ctx.event().removeLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); return true; } return false; } /** {@inheritDoc} */ @Override public void onEvent(Event evt) { assert evt.type() == EVT_NODE_JOINED || evt.type() == EVT_NODE_LEFT || evt.type() == EVT_NODE_FAILED; DiscoveryEvent discoEvt = (DiscoveryEvent)evt; if (discoEvt.topologyVersion() >= awaitVer) onDone(discoEvt.topologyVersion()); } } /** Cache for discovery collections. */ private class DiscoCache { /** Remote nodes. */ private final List<ClusterNode> rmtNodes; /** All nodes. */ private final List<ClusterNode> allNodes; /** All nodes with at least one cache configured. */ private final Collection<ClusterNode> allNodesWithCaches; /** All nodes with at least one cache configured. */ private final Collection<ClusterNode> rmtNodesWithCaches; /** Cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> allCacheNodes; /** Remote cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> rmtCacheNodes; /** Cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> affCacheNodes; /** Caches where at least one node has near cache enabled. */ private final Set<String> nearEnabledCaches; /** Nodes grouped by version. */ private final NavigableMap<IgniteProductVersion, Collection<ClusterNode>> nodesByVer; /** Daemon nodes. */ private final List<ClusterNode> daemonNodes; /** Node map. */ private final Map<UUID, ClusterNode> nodeMap; /** Local node. */ private final ClusterNode loc; /** Highest node order. */ private final long maxOrder; /** * Cached alive nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveCacheNodes; /** * Cached alive remote nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveRmtCacheNodes; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveNodesWithCaches; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveRmtNodesWithCaches; /** * @param loc Local node. * @param rmts Remote nodes. */ private DiscoCache(ClusterNode loc, Collection<ClusterNode> rmts) { this.loc = loc; rmtNodes = Collections.unmodifiableList(new ArrayList<>(F.view(rmts, daemonFilter))); assert !rmtNodes.contains(loc) : "Remote nodes collection shouldn't contain local node" + " [rmtNodes=" + rmtNodes + ", loc=" + loc + ']'; List<ClusterNode> all = new ArrayList<>(rmtNodes.size() + 1); if (!loc.isDaemon()) all.add(loc); all.addAll(rmtNodes); allNodes = Collections.unmodifiableList(all); Map<String, Collection<ClusterNode>> cacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> rmtCacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> dhtNodesMap = new HashMap<>(allNodes.size(), 1.0f); Collection<ClusterNode> nodesWithCaches = new ArrayList<>(allNodes.size()); Collection<ClusterNode> rmtNodesWithCaches = new ArrayList<>(allNodes.size()); aliveCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveRmtCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveNodesWithCaches = new ConcurrentSkipListSet<>(); aliveRmtNodesWithCaches = new ConcurrentSkipListSet<>(); nodesByVer = new TreeMap<>(); long maxOrder0 = 0; Set<String> nearEnabledSet = new HashSet<>(); for (ClusterNode node : allNodes) { assert node.order() != 0 : "Invalid node order [locNode=" + loc + ", node=" + node + ']'; if (node.order() > maxOrder0) maxOrder0 = node.order(); GridCacheAttributes[] caches = node.attribute(ATTR_CACHE); if (caches != null) { nodesWithCaches.add(node); if (!loc.id().equals(node.id())) rmtNodesWithCaches.add(node); for (GridCacheAttributes attrs : caches) { addToMap(cacheMap, attrs.cacheName(), node); if (alive(node.id())) addToMap(aliveCacheNodes, maskNull(attrs.cacheName()), node); if (attrs.isAffinityNode()) addToMap(dhtNodesMap, attrs.cacheName(), node); if (attrs.nearCacheEnabled()) nearEnabledSet.add(attrs.cacheName()); if (!loc.id().equals(node.id())) { addToMap(rmtCacheMap, attrs.cacheName(), node); if (alive(node.id())) addToMap(aliveRmtCacheNodes, maskNull(attrs.cacheName()), node); } } if (alive(node.id())) { aliveNodesWithCaches.add(node); if (!loc.id().equals(node.id())) aliveRmtNodesWithCaches.add(node); } } IgniteProductVersion nodeVer = U.productVersion(node); // Create collection for this version if it does not exist. Collection<ClusterNode> nodes = nodesByVer.get(nodeVer); if (nodes == null) { nodes = new ArrayList<>(allNodes.size()); nodesByVer.put(nodeVer, nodes); } nodes.add(node); } // Need second iteration to add this node to all previous node versions. for (ClusterNode node : allNodes) { IgniteProductVersion nodeVer = U.productVersion(node); // Get all versions lower or equal node's version. NavigableMap<IgniteProductVersion, Collection<ClusterNode>> updateView = nodesByVer.headMap(nodeVer, false); for (Collection<ClusterNode> prevVersions : updateView.values()) prevVersions.add(node); } maxOrder = maxOrder0; allCacheNodes = Collections.unmodifiableMap(cacheMap); rmtCacheNodes = Collections.unmodifiableMap(rmtCacheMap); affCacheNodes = Collections.unmodifiableMap(dhtNodesMap); allNodesWithCaches = Collections.unmodifiableCollection(nodesWithCaches); this.rmtNodesWithCaches = Collections.unmodifiableCollection(rmtNodesWithCaches); nearEnabledCaches = Collections.unmodifiableSet(nearEnabledSet); daemonNodes = Collections.unmodifiableList(new ArrayList<>( F.view(F.concat(false, loc, rmts), F0.not(daemonFilter)))); Map<UUID, ClusterNode> nodeMap = new HashMap<>(allNodes().size() + daemonNodes.size(), 1.0f); for (ClusterNode n : F.concat(false, allNodes(), daemonNodes())) nodeMap.put(n.id(), n); this.nodeMap = nodeMap; } /** * Adds node to map. * * @param cacheMap Map to add to. * @param cacheName Cache name. * @param rich Node to add */ private void addToMap(Map<String, Collection<ClusterNode>> cacheMap, String cacheName, ClusterNode rich) { Collection<ClusterNode> cacheNodes = cacheMap.get(cacheName); if (cacheNodes == null) { cacheNodes = new ArrayList<>(allNodes.size()); cacheMap.put(cacheName, cacheNodes); } cacheNodes.add(rich); } /** @return Local node. */ ClusterNode localNode() { return loc; } /** @return Remote nodes. */ Collection<ClusterNode> remoteNodes() { return rmtNodes; } /** @return All nodes. */ Collection<ClusterNode> allNodes() { return allNodes; } /** * @return All nodes with at least one cache configured. */ Collection<ClusterNode> allNodesWithCaches() { return allNodesWithCaches; } /** * Gets collection of nodes which have version equal or greater than {@code ver}. * * @param ver Version to check. * @return Collection of nodes with version equal or greater than {@code ver}. */ Collection<ClusterNode> elderNodes(IgniteProductVersion ver) { Map.Entry<IgniteProductVersion, Collection<ClusterNode>> entry = nodesByVer.ceilingEntry(ver); if (entry == null) return Collections.emptyList(); return entry.getValue(); } /** * @return Versions map. */ NavigableMap<IgniteProductVersion, Collection<ClusterNode>> versionsMap() { return nodesByVer; } /** * Gets collection of nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of nodes. */ Collection<ClusterNode> allNodesWithCaches(final long topVer) { return filter(topVer, allNodesWithCaches); } /** * Gets all nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, allCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, rmtCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(final long topVer) { return filter(topVer, rmtNodesWithCaches); } /** * Gets all nodes that have cache with given name and should participate in affinity calculation. With * partitioned cache nodes with near-only cache do not participate in affinity node calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, affCacheNodes.get(cacheName)); } /** * Gets all alive nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveRmtCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteNodesWithCaches(final long topVer) { return filter(topVer, aliveRmtNodesWithCaches); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveNodesWithCaches(final long topVer) { return filter(topVer, aliveNodesWithCaches); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ boolean hasNearCache(@Nullable String cacheName) { return nearEnabledCaches.contains(cacheName); } /** * Removes left node from cached alives lists. * * @param leftNode Left node. */ void updateAlives(ClusterNode leftNode) { if (leftNode.order() > maxOrder) return; filterNodeMap(aliveCacheNodes, leftNode); filterNodeMap(aliveRmtCacheNodes, leftNode); aliveNodesWithCaches.remove(leftNode); aliveRmtNodesWithCaches.remove(leftNode); } /** * Creates a copy of nodes map without the given node. * * @param map Map to copy. * @param exclNode Node to exclude. */ private void filterNodeMap(ConcurrentMap<String, Collection<ClusterNode>> map, final ClusterNode exclNode) { for (String cacheName : U.cacheNames(exclNode)) { String maskedName = maskNull(cacheName); while (true) { Collection<ClusterNode> oldNodes = map.get(maskedName); if (oldNodes == null || oldNodes.isEmpty()) break; Collection<ClusterNode> newNodes = new ArrayList<>(oldNodes); if (!newNodes.remove(exclNode)) break; if (map.replace(maskedName, oldNodes, newNodes)) break; } } } /** * Replaces {@code null} with {@code NULL_CACHE_NAME}. * * @param cacheName Cache name. * @return Masked name. */ private String maskNull(@Nullable String cacheName) { return cacheName == null ? NULL_CACHE_NAME : cacheName; } /** * @param topVer Topology version. * @param nodes Nodes. * @return Filtered collection (potentially empty, but never {@code null}). */ private Collection<ClusterNode> filter(final long topVer, @Nullable Collection<ClusterNode> nodes) { if (nodes == null) return Collections.emptyList(); // If no filtering needed, return original collection. return nodes.isEmpty() || topVer < 0 || topVer >= maxOrder ? nodes : F.view(nodes, new P1<ClusterNode>() { @Override public boolean apply(ClusterNode node) { return node.order() <= topVer; } }); } /** @return Daemon nodes. */ Collection<ClusterNode> daemonNodes() { return daemonNodes; } /** * @param id Node ID. * @return Node. */ @Nullable ClusterNode node(UUID id) { return nodeMap.get(id); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoCache.class, this, "allNodesWithDaemons", U.toShortString(allNodes)); } } }
modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers.discovery; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.events.*; import org.apache.ignite.internal.managers.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.eventstorage.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.jobmetrics.*; import org.apache.ignite.internal.processors.security.*; import org.apache.ignite.internal.util.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.lang.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.internal.util.worker.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.security.*; import org.apache.ignite.plugin.segmentation.*; import org.apache.ignite.spi.*; import org.apache.ignite.spi.discovery.*; import org.apache.ignite.thread.*; import org.jdk8.backport.*; import org.jetbrains.annotations.*; import java.io.*; import java.lang.management.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.zip.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.IgniteNodeAttributes.*; import static org.apache.ignite.internal.IgniteVersionUtils.*; import static org.apache.ignite.plugin.segmentation.GridSegmentationPolicy.*; /** * Discovery SPI manager. */ public class GridDiscoveryManager extends GridManagerAdapter<DiscoverySpi> { /** Fake key for {@code null}-named caches. Used inside {@link DiscoCache}. */ private static final String NULL_CACHE_NAME = UUID.randomUUID().toString(); /** Metrics update frequency. */ private static final long METRICS_UPDATE_FREQ = 3000; /** */ private static final MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); /** */ private static final OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean(); /** */ private static final RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean(); /** */ private static final ThreadMXBean threads = ManagementFactory.getThreadMXBean(); /** */ private static final Collection<GarbageCollectorMXBean> gc = ManagementFactory.getGarbageCollectorMXBeans(); /** */ private static final String PREFIX = "Topology snapshot"; /** Discovery cached history size. */ protected static final int DISCOVERY_HISTORY_SIZE = 100; /** Predicate filtering out daemon nodes. */ private static final IgnitePredicate<ClusterNode> daemonFilter = new P1<ClusterNode>() { @Override public boolean apply(ClusterNode n) { return !n.isDaemon(); } }; /** Disco history entries comparator. */ private static final Comparator<Map.Entry<Long, DiscoCache>> histCmp = new Comparator<Map.Entry<Long, DiscoCache>>() { @Override public int compare(Map.Entry<Long, DiscoCache> o1, Map.Entry<Long, DiscoCache> o2) { return o1.getKey().compareTo(o2.getKey()); } }; /** Discovery event worker. */ private final DiscoveryWorker discoWrk = new DiscoveryWorker(); /** Network segment check worker. */ private SegmentCheckWorker segChkWrk; /** Network segment check thread. */ private IgniteThread segChkThread; /** Last logged topology. */ private final AtomicLong lastLoggedTop = new AtomicLong(); /** Local node. */ private ClusterNode locNode; /** Local node daemon flag. */ private boolean isLocDaemon; /** {@code True} if resolvers were configured and network segment check is enabled. */ private boolean hasRslvrs; /** Last segment check result. */ private final AtomicBoolean lastSegChkRes = new AtomicBoolean(true); /** Discovery cache. */ private final AtomicReference<DiscoCache> discoCache = new AtomicReference<>(); /** Topology cache history. */ private final GridBoundedConcurrentLinkedHashMap<Long, DiscoCache> discoCacheHist = new GridBoundedConcurrentLinkedHashMap<>(DISCOVERY_HISTORY_SIZE, DISCOVERY_HISTORY_SIZE, 0.7f, 1); /** Topology snapshots history. */ private volatile Map<Long, Collection<ClusterNode>> topHist = new HashMap<>(); /** Topology version. */ private final GridAtomicLong topVer = new GridAtomicLong(); /** Order supported flag. */ private boolean discoOrdered; /** Topology snapshots history supported flag. */ private boolean histSupported; /** Configured network segment check frequency. */ private long segChkFreq; /** Local node join to topology event. */ private GridFutureAdapterEx<DiscoveryEvent> locJoinEvt = new GridFutureAdapterEx<>(); /** GC CPU load. */ private volatile double gcCpuLoad; /** CPU load. */ private volatile double cpuLoad; /** Metrics. */ private final GridLocalMetrics metrics = createMetrics(); /** Metrics update worker. */ private final MetricsUpdater metricsUpdater = new MetricsUpdater(); /** @param ctx Context. */ public GridDiscoveryManager(GridKernalContext ctx) { super(ctx, ctx.config().getDiscoverySpi()); } /** * @return Memory usage of non-heap memory. */ private MemoryUsage nonHeapMemoryUsage() { // Workaround of exception in WebSphere. // We received the following exception: // java.lang.IllegalArgumentException: used value cannot be larger than the committed value // at java.lang.management.MemoryUsage.<init>(MemoryUsage.java:105) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsageImpl(Native Method) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsage(MemoryMXBeanImpl.java:143) // at org.apache.ignite.spi.metrics.jdk.GridJdkLocalMetricsSpi.getMetrics(GridJdkLocalMetricsSpi.java:242) // // We so had to workaround this with exception handling, because we can not control classes from WebSphere. try { return mem.getNonHeapMemoryUsage(); } catch (IllegalArgumentException ignored) { return new MemoryUsage(0, 0, 0, 0); } } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { super.start(); // TODO GG-7574 move to metrics processor? long totSysMemory = -1; try { totSysMemory = U.<Long>property(os, "totalPhysicalMemorySize"); } catch (RuntimeException ignored) { // No-op. } ctx.addNodeAttribute(IgniteNodeAttributes.ATTR_PHY_RAM, totSysMemory); DiscoverySpi spi = getSpi(); spi.setNodeAttributes(ctx.nodeAttributes(), VER); discoOrdered = discoOrdered(); histSupported = historySupported(); isLocDaemon = ctx.isDaemon(); hasRslvrs = !F.isEmpty(ctx.config().getSegmentationResolvers()); segChkFreq = ctx.config().getSegmentCheckFrequency(); if (hasRslvrs) { if (segChkFreq < 0) throw new IgniteCheckedException("Segment check frequency cannot be negative: " + segChkFreq); if (segChkFreq > 0 && segChkFreq < 2000) U.warn(log, "Configuration parameter 'segmentCheckFrequency' is too low " + "(at least 2000 ms recommended): " + segChkFreq); checkSegmentOnStart(); } new IgniteThread(metricsUpdater).start(); spi.setMetricsProvider(createMetricsProvider()); if (ctx.security().enabled()) { spi.setAuthenticator(new DiscoverySpiNodeAuthenticator() { @Override public SecurityContext authenticateNode(ClusterNode node, GridSecurityCredentials cred) { try { return ctx.security().authenticateNode(node, cred); } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isGlobalNodeAuthentication() { return ctx.security().isGlobalNodeAuthentication(); } }); } spi.setListener(new DiscoverySpiListener() { @Override public void onDiscovery( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, Map<Long, Collection<ClusterNode>> snapshots, @Nullable Serializable data ) { final ClusterNode locNode = localNode(); if (snapshots != null) topHist = snapshots; if (type == EVT_NODE_FAILED || type == EVT_NODE_LEFT) { for (DiscoCache c : discoCacheHist.values()) c.updateAlives(node); } // Put topology snapshot into discovery history. // There is no race possible between history maintenance and concurrent discovery // event notifications, since SPI notifies manager about all events from this listener. if (type != EVT_NODE_METRICS_UPDATED && type != DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) { DiscoCache cache = new DiscoCache(locNode, F.view(topSnapshot, F.remoteNodes(locNode.id()))); discoCacheHist.put(topVer, cache); discoCache.set(cache); } // If this is a local join event, just save it and do not notify listeners. if (type == EVT_NODE_JOINED && node.id().equals(locNode.id())) { DiscoveryEvent discoEvt = new DiscoveryEvent(); discoEvt.node(ctx.discovery().localNode()); discoEvt.eventNode(node); discoEvt.type(EVT_NODE_JOINED); discoEvt.topologySnapshot(topVer, new ArrayList<>( F.viewReadOnly(topSnapshot, new C1<ClusterNode, ClusterNode>() { @Override public ClusterNode apply(ClusterNode e) { return e; } }, daemonFilter))); locJoinEvt.onDone(discoEvt); return; } if (topVer > 0 && (type == EVT_NODE_JOINED || type == EVT_NODE_FAILED || type == EVT_NODE_LEFT)) { boolean set = GridDiscoveryManager.this.topVer.setIfGreater(topVer); assert set : "Topology version has not been updated [this.topVer=" + GridDiscoveryManager.this.topVer + ", topVer=" + topVer + ", node=" + node + ", evt=" + U.gridEventName(type) + ']'; } discoWrk.addEvent(type, topVer, node, topSnapshot, data); } }); spi.setDataExchange(new DiscoverySpiDataExchange() { @Override public Map<Integer, Object> collect(UUID nodeId) { assert nodeId != null; Map<Integer, Object> data = new HashMap<>(); for (GridComponent comp : ctx.components()) { Object compData = comp.collectDiscoveryData(nodeId); if (compData != null) { assert comp.discoveryDataType() != null; data.put(comp.discoveryDataType().ordinal(), compData); } } return data; } @Override public void onExchange(UUID nodeId, Map<Integer, Object> data) { for (Map.Entry<Integer, Object> e : data.entrySet()) { GridComponent comp = null; for (GridComponent c : ctx.components()) { if (c.discoveryDataType() != null && c.discoveryDataType().ordinal() == e.getKey()) { comp = c; break; } } if (comp != null) comp.onDiscoveryDataReceived(nodeId, e.getValue()); else U.warn(log, "Received discovery data for unknown component: " + e.getKey()); } } }); startSpi(); // Start segment check worker only if frequency is greater than 0. if (hasRslvrs && segChkFreq > 0) { segChkWrk = new SegmentCheckWorker(); segChkThread = new IgniteThread(segChkWrk); segChkThread.start(); } checkAttributes(discoCache().remoteNodes()); locNode = spi.getLocalNode(); topVer.setIfGreater(locNode.order()); // Start discovery worker. new IgniteThread(discoWrk).start(); if (log.isDebugEnabled()) log.debug(startInfo()); } /** * @return Metrics. */ private GridLocalMetrics createMetrics() { return new GridLocalMetrics() { @Override public int getAvailableProcessors() { return os.getAvailableProcessors(); } @Override public double getCurrentCpuLoad() { return cpuLoad; } @Override public double getCurrentGcCpuLoad() { return gcCpuLoad; } @Override public long getHeapMemoryInitialized() { return mem.getHeapMemoryUsage().getInit(); } @Override public long getHeapMemoryUsed() { return mem.getHeapMemoryUsage().getUsed(); } @Override public long getHeapMemoryCommitted() { return mem.getHeapMemoryUsage().getCommitted(); } @Override public long getHeapMemoryMaximum() { return mem.getHeapMemoryUsage().getMax(); } @Override public long getNonHeapMemoryInitialized() { return nonHeapMemoryUsage().getInit(); } @Override public long getNonHeapMemoryUsed() { return nonHeapMemoryUsage().getUsed(); } @Override public long getNonHeapMemoryCommitted() { return nonHeapMemoryUsage().getCommitted(); } @Override public long getNonHeapMemoryMaximum() { return nonHeapMemoryUsage().getMax(); } @Override public long getUptime() { return rt.getUptime(); } @Override public long getStartTime() { return rt.getStartTime(); } @Override public int getThreadCount() { return threads.getThreadCount(); } @Override public int getPeakThreadCount() { return threads.getPeakThreadCount(); } @Override public long getTotalStartedThreadCount() { return threads.getTotalStartedThreadCount(); } @Override public int getDaemonThreadCount() { return threads.getDaemonThreadCount(); } }; } /** * @return Metrics provider. */ private DiscoveryMetricsProvider createMetricsProvider() { return new DiscoveryMetricsProvider() { /** */ private final long startTime = U.currentTimeMillis(); /** {@inheritDoc} */ @Override public ClusterMetrics metrics() { GridJobMetrics jm = ctx.jobMetric().getJobMetrics(); ClusterMetricsSnapshot nm = new ClusterMetricsSnapshot(); nm.setLastUpdateTime(U.currentTimeMillis()); // Job metrics. nm.setMaximumActiveJobs(jm.getMaximumActiveJobs()); nm.setCurrentActiveJobs(jm.getCurrentActiveJobs()); nm.setAverageActiveJobs(jm.getAverageActiveJobs()); nm.setMaximumWaitingJobs(jm.getMaximumWaitingJobs()); nm.setCurrentWaitingJobs(jm.getCurrentWaitingJobs()); nm.setAverageWaitingJobs(jm.getAverageWaitingJobs()); nm.setMaximumRejectedJobs(jm.getMaximumRejectedJobs()); nm.setCurrentRejectedJobs(jm.getCurrentRejectedJobs()); nm.setAverageRejectedJobs(jm.getAverageRejectedJobs()); nm.setMaximumCancelledJobs(jm.getMaximumCancelledJobs()); nm.setCurrentCancelledJobs(jm.getCurrentCancelledJobs()); nm.setAverageCancelledJobs(jm.getAverageCancelledJobs()); nm.setTotalRejectedJobs(jm.getTotalRejectedJobs()); nm.setTotalCancelledJobs(jm.getTotalCancelledJobs()); nm.setTotalExecutedJobs(jm.getTotalExecutedJobs()); nm.setMaximumJobWaitTime(jm.getMaximumJobWaitTime()); nm.setCurrentJobWaitTime(jm.getCurrentJobWaitTime()); nm.setAverageJobWaitTime(jm.getAverageJobWaitTime()); nm.setMaximumJobExecuteTime(jm.getMaximumJobExecuteTime()); nm.setCurrentJobExecuteTime(jm.getCurrentJobExecuteTime()); nm.setAverageJobExecuteTime(jm.getAverageJobExecuteTime()); nm.setCurrentIdleTime(jm.getCurrentIdleTime()); nm.setTotalIdleTime(jm.getTotalIdleTime()); nm.setAverageCpuLoad(jm.getAverageCpuLoad()); // Job metrics. nm.setTotalExecutedTasks(ctx.task().getTotalExecutedTasks()); // VM metrics. nm.setAvailableProcessors(metrics.getAvailableProcessors()); nm.setCurrentCpuLoad(metrics.getCurrentCpuLoad()); nm.setCurrentGcCpuLoad(metrics.getCurrentGcCpuLoad()); nm.setHeapMemoryInitialized(metrics.getHeapMemoryInitialized()); nm.setHeapMemoryUsed(metrics.getHeapMemoryUsed()); nm.setHeapMemoryCommitted(metrics.getHeapMemoryCommitted()); nm.setHeapMemoryMaximum(metrics.getHeapMemoryMaximum()); nm.setHeapMemoryTotal(metrics.getHeapMemoryMaximum()); nm.setNonHeapMemoryInitialized(metrics.getNonHeapMemoryInitialized()); nm.setNonHeapMemoryUsed(metrics.getNonHeapMemoryUsed()); nm.setNonHeapMemoryCommitted(metrics.getNonHeapMemoryCommitted()); nm.setNonHeapMemoryMaximum(metrics.getNonHeapMemoryMaximum()); nm.setNonHeapMemoryTotal(metrics.getNonHeapMemoryMaximum()); nm.setUpTime(metrics.getUptime()); nm.setStartTime(metrics.getStartTime()); nm.setNodeStartTime(startTime); nm.setCurrentThreadCount(metrics.getThreadCount()); nm.setMaximumThreadCount(metrics.getPeakThreadCount()); nm.setTotalStartedThreadCount(metrics.getTotalStartedThreadCount()); nm.setCurrentDaemonThreadCount(metrics.getDaemonThreadCount()); // Data metrics. nm.setLastDataVersion(ctx.cache().lastDataVersion()); GridIoManager io = ctx.io(); // IO metrics. nm.setSentMessagesCount(io.getSentMessagesCount()); nm.setSentBytesCount(io.getSentBytesCount()); nm.setReceivedMessagesCount(io.getReceivedMessagesCount()); nm.setReceivedBytesCount(io.getReceivedBytesCount()); nm.setOutboundMessagesQueueSize(io.getOutboundMessagesQueueSize()); return nm; } }; } /** * @return Local metrics. */ public GridLocalMetrics metrics() { return metrics; } /** @return {@code True} if ordering is supported. */ private boolean discoOrdered() { DiscoverySpiOrderSupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiOrderSupport.class); return ann != null && ann.value(); } /** @return {@code True} if topology snapshots history is supported. */ private boolean historySupported() { DiscoverySpiHistorySupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiHistorySupport.class); return ann != null && ann.value(); } /** * Checks segment on start waiting for correct segment if necessary. * * @throws IgniteCheckedException If check failed. */ private void checkSegmentOnStart() throws IgniteCheckedException { assert hasRslvrs; if (log.isDebugEnabled()) log.debug("Starting network segment check."); while (true) { if (ctx.segmentation().isValidSegment()) break; if (ctx.config().isWaitForSegmentOnStart()) { LT.warn(log, null, "Failed to check network segment (retrying every 2000 ms)."); // Wait and check again. U.sleep(2000); } else throw new IgniteCheckedException("Failed to check network segment."); } if (log.isDebugEnabled()) log.debug("Finished network segment check successfully."); } /** * Checks whether attributes of the local node are consistent with remote nodes. * * @param nodes List of remote nodes to check attributes on. * @throws IgniteCheckedException In case of error. */ private void checkAttributes(Iterable<ClusterNode> nodes) throws IgniteCheckedException { ClusterNode locNode = getSpi().getLocalNode(); assert locNode != null; // Fetch local node attributes once. String locPreferIpV4 = locNode.attribute("java.net.preferIPv4Stack"); Object locMode = locNode.attribute(ATTR_DEPLOYMENT_MODE); boolean locP2pEnabled = locNode.attribute(ATTR_PEER_CLASSLOADING); boolean warned = false; for (ClusterNode n : nodes) { String rmtPreferIpV4 = n.attribute("java.net.preferIPv4Stack"); if (!F.eq(rmtPreferIpV4, locPreferIpV4)) { if (!warned) U.warn(log, "Local node's value of 'java.net.preferIPv4Stack' " + "system property differs from remote node's " + "(all nodes in topology should have identical value) " + "[locPreferIpV4=" + locPreferIpV4 + ", rmtPreferIpV4=" + rmtPreferIpV4 + ", locId8=" + U.id8(locNode.id()) + ", rmtId8=" + U.id8(n.id()) + ", rmtAddrs=" + U.addressesAsString(n) + ']', "Local and remote 'java.net.preferIPv4Stack' system properties do not match."); warned = true; } // Daemon nodes are allowed to have any deployment they need. // Skip data center ID check for daemon nodes. if (!isLocDaemon && !n.isDaemon()) { Object rmtMode = n.attribute(ATTR_DEPLOYMENT_MODE); if (!locMode.equals(rmtMode)) throw new IgniteCheckedException("Remote node has deployment mode different from local " + "[locId8=" + U.id8(locNode.id()) + ", locMode=" + locMode + ", rmtId8=" + U.id8(n.id()) + ", rmtMode=" + rmtMode + ", rmtAddrs=" + U.addressesAsString(n) + ']'); boolean rmtP2pEnabled = n.attribute(ATTR_PEER_CLASSLOADING); if (locP2pEnabled != rmtP2pEnabled) throw new IgniteCheckedException("Remote node has peer class loading enabled flag different from local " + "[locId8=" + U.id8(locNode.id()) + ", locPeerClassLoading=" + locP2pEnabled + ", rmtId8=" + U.id8(n.id()) + ", rmtPeerClassLoading=" + rmtP2pEnabled + ", rmtAddrs=" + U.addressesAsString(n) + ']'); } } if (log.isDebugEnabled()) log.debug("Finished node attributes consistency check."); } /** * @param nodes Nodes. * @return Total CPUs. */ private static int cpus(Collection<ClusterNode> nodes) { Collection<String> macSet = new HashSet<>(nodes.size(), 1.0f); int cpus = 0; for (ClusterNode n : nodes) { String macs = n.attribute(ATTR_MACS); if (macSet.add(macs)) cpus += n.metrics().getTotalCpus(); } return cpus; } /** * Prints the latest topology info into log taking into account logging/verbosity settings. */ public void ackTopology() { ackTopology(topVer.get(), false); } /** * Logs grid size for license compliance. * * @param topVer Topology version. * @param throttle Suppress printing if this topology was already printed. */ private void ackTopology(long topVer, boolean throttle) { assert !isLocDaemon; DiscoCache discoCache = discoCache(); Collection<ClusterNode> rmtNodes = discoCache.remoteNodes(); ClusterNode locNode = discoCache.localNode(); Collection<ClusterNode> allNodes = discoCache.allNodes(); long hash = topologyHash(allNodes); // Prevent ack-ing topology for the same topology. // Can happen only during node startup. if (throttle && lastLoggedTop.getAndSet(hash) == hash) return; int totalCpus = cpus(allNodes); double heap = U.heapSize(allNodes, 2); if (log.isQuiet()) U.quiet(false, topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); if (log.isDebugEnabled()) { String dbg = ""; dbg += U.nl() + U.nl() + ">>> +----------------+" + U.nl() + ">>> " + PREFIX + "." + U.nl() + ">>> +----------------+" + U.nl() + ">>> Grid name: " + (ctx.gridName() == null ? "default" : ctx.gridName()) + U.nl() + ">>> Number of nodes: " + (rmtNodes.size() + 1) + U.nl() + (discoOrdered ? ">>> Topology version: " + topVer + U.nl() : "") + ">>> Topology hash: 0x" + Long.toHexString(hash).toUpperCase() + U.nl(); dbg += ">>> Local: " + locNode.id().toString().toUpperCase() + ", " + U.addressesAsString(locNode) + ", " + locNode.order() + ", " + locNode.attribute("os.name") + ' ' + locNode.attribute("os.arch") + ' ' + locNode.attribute("os.version") + ", " + System.getProperty("user.name") + ", " + locNode.attribute("java.runtime.name") + ' ' + locNode.attribute("java.runtime.version") + U.nl(); for (ClusterNode node : rmtNodes) dbg += ">>> Remote: " + node.id().toString().toUpperCase() + ", " + U.addressesAsString(node) + ", " + node.order() + ", " + node.attribute("os.name") + ' ' + node.attribute("os.arch") + ' ' + node.attribute("os.version") + ", " + node.attribute(ATTR_USER_NAME) + ", " + node.attribute("java.runtime.name") + ' ' + node.attribute("java.runtime.version") + U.nl(); dbg += ">>> Total number of CPUs: " + totalCpus + U.nl(); dbg += ">>> Total heap size: " + heap + "GB" + U.nl(); log.debug(dbg); } else if (log.isInfoEnabled()) log.info(topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); } /** * @param rmtNodesNum Remote nodes number. * @param totalCpus Total cpu number. * @param heap Heap size. * @return Topology snapshot message. */ private String topologySnapshotMessage(int rmtNodesNum, int totalCpus, double heap) { return PREFIX + " [" + (discoOrdered ? "ver=" + topVer + ", " : "") + "nodes=" + (rmtNodesNum + 1) + ", CPUs=" + totalCpus + ", heap=" + heap + "GB" + ']'; } /** {@inheritDoc} */ @Override public void onKernalStop0(boolean cancel) { // Stop segment check worker. if (segChkWrk != null) { segChkWrk.cancel(); U.join(segChkThread, log); } if (!locJoinEvt.isDone()) locJoinEvt.onDone(new IgniteCheckedException("Failed to wait for local node joined event (grid is stopping).")); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { // Stop receiving notifications. getSpi().setListener(null); // Stop discovery worker and metrics updater. U.cancel(discoWrk); U.cancel(metricsUpdater); U.join(discoWrk, log); U.join(metricsUpdater, log); // Stop SPI itself. stopSpi(); if (log.isDebugEnabled()) log.debug(stopInfo()); } /** * @param nodeIds Node IDs to check. * @return {@code True} if at least one ID belongs to an alive node. */ public boolean aliveAll(@Nullable Collection<UUID> nodeIds) { if (nodeIds == null || nodeIds.isEmpty()) return false; for (UUID id : nodeIds) if (!alive(id)) return false; return true; } /** * @param nodeId Node ID. * @return {@code True} if node for given ID is alive. */ public boolean alive(UUID nodeId) { assert nodeId != null; boolean alive = getSpi().getNode(nodeId) != null; // Go directly to SPI without checking disco cache. // Refresh disco cache if some node died. if (!alive) { while (true) { DiscoCache c = discoCache(); if (c.node(nodeId) != null) { if (discoCache.compareAndSet(c, null)) break; } else break; } } return alive; } /** * @param node Node. * @return {@code True} if node is alive. */ public boolean alive(ClusterNode node) { assert node != null; return alive(node.id()); } /** * @param nodeId ID of the node. * @return {@code True} if ping succeeded. */ public boolean pingNode(UUID nodeId) { assert nodeId != null; return getSpi().pingNode(nodeId); } /** * @param nodeId ID of the node. * @return Node for ID. */ @Nullable public ClusterNode node(UUID nodeId) { assert nodeId != null; return discoCache().node(nodeId); } /** * Gets collection of node for given node IDs and predicates. * * @param ids Ids to include. * @param p Filter for IDs. * @return Collection with all alive nodes for given IDs. */ public Collection<ClusterNode> nodes(@Nullable Collection<UUID> ids, IgnitePredicate<UUID>... p) { return F.isEmpty(ids) ? Collections.<ClusterNode>emptyList() : F.view( F.viewReadOnly(ids, U.id2Node(ctx), p), F.notNull()); } /** * Gets topology hash for given set of nodes. * * @param nodes Subset of grid nodes for hashing. * @return Hash for given topology. */ public long topologyHash(Iterable<? extends ClusterNode> nodes) { assert nodes != null; Iterator<? extends ClusterNode> iter = nodes.iterator(); if (!iter.hasNext()) return 0; // Special case. List<String> uids = new ArrayList<>(); for (ClusterNode node : nodes) uids.add(node.id().toString()); Collections.sort(uids); CRC32 hash = new CRC32(); for (String uuid : uids) hash.update(uuid.getBytes()); return hash.getValue(); } /** * Gets future that will be completed when current topology version becomes greater or equal to argument passed. * * @param awaitVer Topology version to await. * @return Future. */ public IgniteInternalFuture<Long> topologyFuture(final long awaitVer) { long topVer = topologyVersion(); if (topVer >= awaitVer) return new GridFinishedFuture<>(ctx, topVer); DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer); fut.init(); return fut; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache() { DiscoCache cur; while ((cur = discoCache.get()) == null) // Wrap the SPI collection to avoid possible floating collection. if (discoCache.compareAndSet(null, cur = new DiscoCache(localNode(), getSpi().getRemoteNodes()))) return cur; return cur; } /** @return All non-daemon remote nodes in topology. */ public Collection<ClusterNode> remoteNodes() { return discoCache().remoteNodes(); } /** @return All non-daemon nodes in topology. */ public Collection<ClusterNode> allNodes() { return discoCache().allNodes(); } /** * Gets topology grouped by node versions. * * @return Version to collection of nodes map. */ public NavigableMap<IgniteProductVersion, Collection<ClusterNode>> topologyVersionMap() { return discoCache().versionsMap(); } /** @return Full topology size. */ public int size() { return discoCache().allNodes().size(); } /** * Gets all nodes for given topology version. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> nodes(long topVer) { return resolveDiscoCache(null, topVer).allNodes(); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).cacheNodes(cacheName, topVer); } /** * Gets all nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(long topVer) { return resolveDiscoCache(null, topVer).allNodesWithCaches(topVer); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).remoteCacheNodes(cacheName, topVer); } /** * Gets cache remote nodes for cache with given name. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(long topVer) { return resolveDiscoCache(null, topVer).remoteCacheNodes(topVer); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).aliveCacheNodes(cacheName, topVer); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).aliveRemoteCacheNodes(cacheName, topVer); } /** * Gets alive remote nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveRemoteNodesWithCaches(long topVer) { return resolveDiscoCache(null, topVer).aliveRemoteNodesWithCaches(topVer); } /** * Gets alive nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveNodesWithCaches(long topVer) { return resolveDiscoCache(null, topVer).aliveNodesWithCaches(topVer); } /** * Gets cache nodes for cache with given name that participate in affinity calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache affinity nodes. */ public Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).cacheAffinityNodes(cacheName, topVer); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @param topVer Topology version. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ public boolean hasNearCache(@Nullable String cacheName, long topVer) { return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName); } /** * Gets discovery cache for given topology version. * * @param cacheName Cache name (participates in exception message). * @param topVer Topology version. * @return Discovery cache. */ private DiscoCache resolveDiscoCache(@Nullable String cacheName, long topVer) { DiscoCache cache = topVer == -1 || topVer == topologyVersion() ? discoCache() : discoCacheHist.get(topVer); if (cache == null) { // Find the eldest acceptable discovery cache. Map.Entry<Long, DiscoCache> eldest = Collections.min(discoCacheHist.entrySet(), histCmp); if (topVer < eldest.getKey()) cache = eldest.getValue(); } if (cache == null) { throw new IgniteException("Failed to resolve nodes topology [cacheName=" + cacheName + ", topVer=" + topVer + ", history=" + discoCacheHist.keySet() + ", locNode=" + ctx.discovery().localNode() + ']'); } return cache; } /** * Gets topology by specified version from history storage. * * @param topVer Topology version. * @return Topology nodes or {@code null} if there are no nodes for passed in version. */ @Nullable public Collection<ClusterNode> topology(long topVer) { if (!histSupported) throw new UnsupportedOperationException("Current discovery SPI does not support " + "topology snapshots history (consider using TCP discovery SPI)."); Map<Long, Collection<ClusterNode>> snapshots = topHist; return snapshots.get(topVer); } /** @return All daemon nodes in topology. */ public Collection<ClusterNode> daemonNodes() { return discoCache().daemonNodes(); } /** @return Local node. */ public ClusterNode localNode() { return locNode == null ? getSpi().getLocalNode() : locNode; } /** @return Topology version. */ public long topologyVersion() { return topVer.get(); } /** @return Event that represents a local node joined to topology. */ public DiscoveryEvent localJoinEvent() { try { return locJoinEvt.get(); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** * Gets first grid node start time, see {@link org.apache.ignite.spi.discovery.DiscoverySpi#getGridStartTime()}. * * @return Start time of the first grid node. */ public long gridStartTime() { return getSpi().getGridStartTime(); } /** Stops local node. */ private void stopNode() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.stop(ctx.gridName(), true); } } ).start(); } /** Restarts JVM. */ private void restartJvm() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.restart(true); } } ).start(); } /** * @param evt Event. */ public void sendCustomEvent(Serializable evt) { getSpi().sendCustomEvent(evt); } /** Worker for network segment checks. */ private class SegmentCheckWorker extends GridWorker { /** */ private final BlockingQueue<Object> queue = new LinkedBlockingQueue<>(); /** * */ private SegmentCheckWorker() { super(ctx.gridName(), "disco-net-seg-chk-worker", log); assert hasRslvrs; assert segChkFreq > 0; } /** * */ public void scheduleSegmentCheck() { queue.add(new Object()); } /** {@inheritDoc} */ @SuppressWarnings("StatementWithEmptyBody") @Override protected void body() throws InterruptedException { long lastChk = 0; while (!isCancelled()) { Object req = queue.poll(2000, MILLISECONDS); long now = U.currentTimeMillis(); // Check frequency if segment check has not been requested. if (req == null && (segChkFreq == 0 || lastChk + segChkFreq >= now)) { if (log.isDebugEnabled()) log.debug("Skipping segment check as it has not been requested and it is not time to check."); continue; } // We should always check segment if it has been explicitly // requested (on any node failure or leave). assert req != null || lastChk + segChkFreq < now; // Drain queue. while (queue.poll() != null) { // No-op. } if (lastSegChkRes.get()) { boolean segValid = ctx.segmentation().isValidSegment(); lastChk = now; if (!segValid) { discoWrk.addEvent(EVT_NODE_SEGMENTED, 0, getSpi().getLocalNode(), Collections.<ClusterNode>emptyList(), null); lastSegChkRes.set(false); } if (log.isDebugEnabled()) log.debug("Segment has been checked [requested=" + (req != null) + ", valid=" + segValid + ']'); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SegmentCheckWorker.class, this); } } /** Worker for discovery events. */ private class DiscoveryWorker extends GridWorker { /** Event queue. */ private final BlockingQueue<GridTuple5<Integer, Long, ClusterNode, Collection<ClusterNode>, Serializable>> evts = new LinkedBlockingQueue<>(); /** Node segmented event fired flag. */ private boolean nodeSegFired; /** * */ private DiscoveryWorker() { super(ctx.gridName(), "disco-event-worker", log); } /** * Method is called when any discovery event occurs. * * @param type Discovery event type. See {@link org.apache.ignite.events.DiscoveryEvent} for more details. * @param topVer Topology version. * @param node Remote node this event is connected with. * @param topSnapshot Topology snapshot. */ private void recordEvent(int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot) { assert node != null; if (ctx.event().isRecordable(type)) { DiscoveryEvent evt = new DiscoveryEvent(); evt.node(ctx.discovery().localNode()); evt.eventNode(node); evt.type(type); evt.topologySnapshot(topVer, U.<ClusterNode, ClusterNode>arrayList(topSnapshot, daemonFilter)); if (type == EVT_NODE_METRICS_UPDATED) evt.message("Metrics were updated: " + node); else if (type == EVT_NODE_JOINED) evt.message("Node joined: " + node); else if (type == EVT_NODE_LEFT) evt.message("Node left: " + node); else if (type == EVT_NODE_FAILED) evt.message("Node failed: " + node); else if (type == EVT_NODE_SEGMENTED) evt.message("Node segmented: " + node); else assert false; ctx.event().record(evt); } } /** * @param type Event type. * @param topVer Topology version. * @param node Node. * @param topSnapshot Topology snapshot. */ void addEvent( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, @Nullable Serializable data ) { assert node != null; evts.add(F.t(type, topVer, node, topSnapshot, data)); } /** * @param node Node to get a short description for. * @return Short description for the node to be used in 'quiet' mode. */ private String quietNode(ClusterNode node) { assert node != null; return "nodeId8=" + node.id().toString().substring(0, 8) + ", " + "addrs=" + U.addressesAsString(node) + ", " + "order=" + node.order() + ", " + "CPUs=" + node.metrics().getTotalCpus(); } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException { while (!isCancelled()) { try { body0(); } catch (InterruptedException e) { throw e; } catch (Throwable t) { U.error(log, "Unexpected exception in discovery worker thread (ignored).", t); } } } /** @throws InterruptedException If interrupted. */ @SuppressWarnings("DuplicateCondition") private void body0() throws InterruptedException { GridTuple5<Integer, Long, ClusterNode, Collection<ClusterNode>, Serializable> evt = evts.take(); int type = evt.get1(); long topVer = evt.get2(); ClusterNode node = evt.get3(); boolean isDaemon = node.isDaemon(); boolean segmented = false; switch (type) { case EVT_NODE_JOINED: { assert !discoOrdered || topVer == node.order() : "Invalid topology version [topVer=" + topVer + ", node=" + node + ']'; try { checkAttributes(F.asList(node)); } catch (IgniteCheckedException e) { U.warn(log, e.getMessage()); // We a have well-formed attribute warning here. } if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Added new node to topology: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Added new node to topology: " + node); } else if (log.isDebugEnabled()) log.debug("Added new daemon node to topology: " + node); break; } case EVT_NODE_LEFT: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Node left topology: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Node left topology: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node left topology: " + node); break; } case EVT_NODE_FAILED: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { U.warn(log, "Node FAILED: " + node); ackTopology(topVer, true); } else if (log.isDebugEnabled()) log.debug("Node FAILED: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node FAILED: " + node); break; } case EVT_NODE_SEGMENTED: { assert F.eqNodes(localNode(), node); if (nodeSegFired) { if (log.isDebugEnabled()) { log.debug("Ignored node segmented event [type=EVT_NODE_SEGMENTED, " + "node=" + node + ']'); } return; } // Ignore all further EVT_NODE_SEGMENTED events // until EVT_NODE_RECONNECTED is fired. nodeSegFired = true; lastLoggedTop.set(0); segmented = true; if (!isLocDaemon) U.warn(log, "Local node SEGMENTED: " + node); else if (log.isDebugEnabled()) log.debug("Local node SEGMENTED: " + node); break; } case DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT: { DiscoveryCustomEvent customEvt = new DiscoveryCustomEvent(); customEvt.node(ctx.discovery().localNode()); customEvt.eventNode(node); customEvt.type(type); customEvt.topologySnapshot(topVer, null); customEvt.data(evt.get5()); assert ctx.event().isRecordable(DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT); ctx.event().record(customEvt); return; } // Don't log metric update to avoid flooding the log. case EVT_NODE_METRICS_UPDATED: break; default: assert false : "Invalid discovery event: " + type; } recordEvent(type, topVer, node, evt.get4()); if (segmented) onSegmentation(); } /** * */ private void onSegmentation() { GridSegmentationPolicy segPlc = ctx.config().getSegmentationPolicy(); // Always disconnect first. try { getSpi().disconnect(); } catch (IgniteSpiException e) { U.error(log, "Failed to disconnect discovery SPI.", e); } switch (segPlc) { case RESTART_JVM: U.warn(log, "Restarting JVM according to configured segmentation policy."); restartJvm(); break; case STOP: U.warn(log, "Stopping local node according to configured segmentation policy."); stopNode(); break; default: assert segPlc == NOOP : "Unsupported segmentation policy value: " + segPlc; } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoveryWorker.class, this); } } /** * */ private class MetricsUpdater extends GridWorker { /** */ private long prevGcTime = -1; /** */ private long prevCpuTime = -1; /** * */ private MetricsUpdater() { super(ctx.gridName(), "metrics-updater", log); } /** {@inheritDoc} */ @Override protected void body() throws IgniteInterruptedCheckedException { while (!isCancelled()) { U.sleep(METRICS_UPDATE_FREQ); gcCpuLoad = getGcCpuLoad(); cpuLoad = getCpuLoad(); } } /** * @return GC CPU load. */ private double getGcCpuLoad() { long gcTime = 0; for (GarbageCollectorMXBean bean : gc) { long colTime = bean.getCollectionTime(); if (colTime > 0) gcTime += colTime; } gcTime /= metrics.getAvailableProcessors(); double gc = 0; if (prevGcTime > 0) { long gcTimeDiff = gcTime - prevGcTime; gc = (double)gcTimeDiff / METRICS_UPDATE_FREQ; } prevGcTime = gcTime; return gc; } /** * @return CPU load. */ private double getCpuLoad() { long cpuTime; try { cpuTime = U.<Long>property(os, "processCpuTime"); } catch (IgniteException ignored) { return -1; } // Method reports time in nanoseconds across all processors. cpuTime /= 1000000 * metrics.getAvailableProcessors(); double cpu = 0; if (prevCpuTime > 0) { long cpuTimeDiff = cpuTime - prevCpuTime; // CPU load could go higher than 100% because calculating of cpuTimeDiff also takes some time. cpu = Math.min(1.0, (double)cpuTimeDiff / METRICS_UPDATE_FREQ); } prevCpuTime = cpuTime; return cpu; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MetricsUpdater.class, this, super.toString()); } } /** Discovery topology future. */ private static class DiscoTopologyFuture extends GridFutureAdapter<Long> implements GridLocalEventListener { /** */ private static final long serialVersionUID = 0L; /** Topology await version. */ private long awaitVer; /** Empty constructor required by {@link Externalizable}. */ public DiscoTopologyFuture() { // No-op. } /** * @param ctx Context. * @param awaitVer Await version. */ private DiscoTopologyFuture(GridKernalContext ctx, long awaitVer) { super(ctx); this.awaitVer = awaitVer; } /** Initializes future. */ private void init() { ctx.event().addLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); // Close potential window. long topVer = ctx.discovery().topologyVersion(); if (topVer >= awaitVer) onDone(topVer); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable Long res, @Nullable Throwable err) { if (super.onDone(res, err)) { ctx.event().removeLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); return true; } return false; } /** {@inheritDoc} */ @Override public void onEvent(Event evt) { assert evt.type() == EVT_NODE_JOINED || evt.type() == EVT_NODE_LEFT || evt.type() == EVT_NODE_FAILED; DiscoveryEvent discoEvt = (DiscoveryEvent)evt; if (discoEvt.topologyVersion() >= awaitVer) onDone(discoEvt.topologyVersion()); } } /** Cache for discovery collections. */ private class DiscoCache { /** Remote nodes. */ private final List<ClusterNode> rmtNodes; /** All nodes. */ private final List<ClusterNode> allNodes; /** All nodes with at least one cache configured. */ private final Collection<ClusterNode> allNodesWithCaches; /** All nodes with at least one cache configured. */ private final Collection<ClusterNode> rmtNodesWithCaches; /** Cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> allCacheNodes; /** Remote cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> rmtCacheNodes; /** Cache nodes by cache name. */ private final Map<String, Collection<ClusterNode>> affCacheNodes; /** Caches where at least one node has near cache enabled. */ private final Set<String> nearEnabledCaches; /** Nodes grouped by version. */ private final NavigableMap<IgniteProductVersion, Collection<ClusterNode>> nodesByVer; /** Daemon nodes. */ private final List<ClusterNode> daemonNodes; /** Node map. */ private final Map<UUID, ClusterNode> nodeMap; /** Local node. */ private final ClusterNode loc; /** Highest node order. */ private final long maxOrder; /** * Cached alive nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveCacheNodes; /** * Cached alive remote nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveRmtCacheNodes; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveNodesWithCaches; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveRmtNodesWithCaches; /** * @param loc Local node. * @param rmts Remote nodes. */ private DiscoCache(ClusterNode loc, Collection<ClusterNode> rmts) { this.loc = loc; rmtNodes = Collections.unmodifiableList(new ArrayList<>(F.view(rmts, daemonFilter))); assert !rmtNodes.contains(loc) : "Remote nodes collection shouldn't contain local node" + " [rmtNodes=" + rmtNodes + ", loc=" + loc + ']'; List<ClusterNode> all = new ArrayList<>(rmtNodes.size() + 1); if (!loc.isDaemon()) all.add(loc); all.addAll(rmtNodes); allNodes = Collections.unmodifiableList(all); Map<String, Collection<ClusterNode>> cacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> rmtCacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> dhtNodesMap = new HashMap<>(allNodes.size(), 1.0f); Collection<ClusterNode> nodesWithCaches = new ArrayList<>(allNodes.size()); Collection<ClusterNode> rmtNodesWithCaches = new ArrayList<>(allNodes.size()); aliveCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveRmtCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveNodesWithCaches = new ConcurrentSkipListSet<>(); aliveRmtNodesWithCaches = new ConcurrentSkipListSet<>(); nodesByVer = new TreeMap<>(); long maxOrder0 = 0; Set<String> nearEnabledSet = new HashSet<>(); for (ClusterNode node : allNodes) { assert node.order() != 0 : "Invalid node order [locNode=" + loc + ", node=" + node + ']'; if (node.order() > maxOrder0) maxOrder0 = node.order(); GridCacheAttributes[] caches = node.attribute(ATTR_CACHE); if (caches != null) { nodesWithCaches.add(node); if (!loc.id().equals(node.id())) rmtNodesWithCaches.add(node); for (GridCacheAttributes attrs : caches) { addToMap(cacheMap, attrs.cacheName(), node); if (alive(node.id())) addToMap(aliveCacheNodes, maskNull(attrs.cacheName()), node); if (attrs.isAffinityNode()) addToMap(dhtNodesMap, attrs.cacheName(), node); if (attrs.nearCacheEnabled()) nearEnabledSet.add(attrs.cacheName()); if (!loc.id().equals(node.id())) { addToMap(rmtCacheMap, attrs.cacheName(), node); if (alive(node.id())) addToMap(aliveRmtCacheNodes, maskNull(attrs.cacheName()), node); } } if (alive(node.id())) { aliveNodesWithCaches.add(node); if (!loc.id().equals(node.id())) aliveRmtNodesWithCaches.add(node); } } IgniteProductVersion nodeVer = U.productVersion(node); // Create collection for this version if it does not exist. Collection<ClusterNode> nodes = nodesByVer.get(nodeVer); if (nodes == null) { nodes = new ArrayList<>(allNodes.size()); nodesByVer.put(nodeVer, nodes); } nodes.add(node); } // Need second iteration to add this node to all previous node versions. for (ClusterNode node : allNodes) { IgniteProductVersion nodeVer = U.productVersion(node); // Get all versions lower or equal node's version. NavigableMap<IgniteProductVersion, Collection<ClusterNode>> updateView = nodesByVer.headMap(nodeVer, false); for (Collection<ClusterNode> prevVersions : updateView.values()) prevVersions.add(node); } maxOrder = maxOrder0; allCacheNodes = Collections.unmodifiableMap(cacheMap); rmtCacheNodes = Collections.unmodifiableMap(rmtCacheMap); affCacheNodes = Collections.unmodifiableMap(dhtNodesMap); allNodesWithCaches = Collections.unmodifiableCollection(nodesWithCaches); this.rmtNodesWithCaches = Collections.unmodifiableCollection(rmtNodesWithCaches); nearEnabledCaches = Collections.unmodifiableSet(nearEnabledSet); daemonNodes = Collections.unmodifiableList(new ArrayList<>( F.view(F.concat(false, loc, rmts), F0.not(daemonFilter)))); Map<UUID, ClusterNode> nodeMap = new HashMap<>(allNodes().size() + daemonNodes.size(), 1.0f); for (ClusterNode n : F.concat(false, allNodes(), daemonNodes())) nodeMap.put(n.id(), n); this.nodeMap = nodeMap; } /** * Adds node to map. * * @param cacheMap Map to add to. * @param cacheName Cache name. * @param rich Node to add */ private void addToMap(Map<String, Collection<ClusterNode>> cacheMap, String cacheName, ClusterNode rich) { Collection<ClusterNode> cacheNodes = cacheMap.get(cacheName); if (cacheNodes == null) { cacheNodes = new ArrayList<>(allNodes.size()); cacheMap.put(cacheName, cacheNodes); } cacheNodes.add(rich); } /** @return Local node. */ ClusterNode localNode() { return loc; } /** @return Remote nodes. */ Collection<ClusterNode> remoteNodes() { return rmtNodes; } /** @return All nodes. */ Collection<ClusterNode> allNodes() { return allNodes; } /** * @return All nodes with at least one cache configured. */ Collection<ClusterNode> allNodesWithCaches() { return allNodesWithCaches; } /** * Gets collection of nodes which have version equal or greater than {@code ver}. * * @param ver Version to check. * @return Collection of nodes with version equal or greater than {@code ver}. */ Collection<ClusterNode> elderNodes(IgniteProductVersion ver) { Map.Entry<IgniteProductVersion, Collection<ClusterNode>> entry = nodesByVer.ceilingEntry(ver); if (entry == null) return Collections.emptyList(); return entry.getValue(); } /** * @return Versions map. */ NavigableMap<IgniteProductVersion, Collection<ClusterNode>> versionsMap() { return nodesByVer; } /** * Gets collection of nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of nodes. */ Collection<ClusterNode> allNodesWithCaches(final long topVer) { return filter(topVer, allNodesWithCaches); } /** * Gets all nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, allCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, rmtCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(final long topVer) { return filter(topVer, rmtNodesWithCaches); } /** * Gets all nodes that have cache with given name and should participate in affinity calculation. With * partitioned cache nodes with near-only cache do not participate in affinity node calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, affCacheNodes.get(cacheName)); } /** * Gets all alive nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveRmtCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteNodesWithCaches(final long topVer) { return filter(topVer, aliveRmtNodesWithCaches); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveNodesWithCaches(final long topVer) { return filter(topVer, aliveNodesWithCaches); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ boolean hasNearCache(@Nullable String cacheName) { return nearEnabledCaches.contains(cacheName); } /** * Removes left node from cached alives lists. * * @param leftNode Left node. */ void updateAlives(ClusterNode leftNode) { if (leftNode.order() > maxOrder) return; filterNodeMap(aliveCacheNodes, leftNode); filterNodeMap(aliveRmtCacheNodes, leftNode); aliveNodesWithCaches.remove(leftNode); aliveRmtNodesWithCaches.remove(leftNode); } /** * Creates a copy of nodes map without the given node. * * @param map Map to copy. * @param exclNode Node to exclude. */ private void filterNodeMap(ConcurrentMap<String, Collection<ClusterNode>> map, final ClusterNode exclNode) { for (String cacheName : U.cacheNames(exclNode)) { String maskedName = maskNull(cacheName); while (true) { Collection<ClusterNode> oldNodes = map.get(maskedName); if (oldNodes == null || oldNodes.isEmpty()) break; Collection<ClusterNode> newNodes = new ArrayList<>(oldNodes); if (!newNodes.remove(exclNode)) break; if (map.replace(maskedName, oldNodes, newNodes)) break; } } } /** * Replaces {@code null} with {@code NULL_CACHE_NAME}. * * @param cacheName Cache name. * @return Masked name. */ private String maskNull(@Nullable String cacheName) { return cacheName == null ? NULL_CACHE_NAME : cacheName; } /** * @param topVer Topology version. * @param nodes Nodes. * @return Filtered collection (potentially empty, but never {@code null}). */ private Collection<ClusterNode> filter(final long topVer, @Nullable Collection<ClusterNode> nodes) { if (nodes == null) return Collections.emptyList(); // If no filtering needed, return original collection. return nodes.isEmpty() || topVer < 0 || topVer >= maxOrder ? nodes : F.view(nodes, new P1<ClusterNode>() { @Override public boolean apply(ClusterNode node) { return node.order() <= topVer; } }); } /** @return Daemon nodes. */ Collection<ClusterNode> daemonNodes() { return daemonNodes; } /** * @param id Node ID. * @return Node. */ @Nullable ClusterNode node(UUID id) { return nodeMap.get(id); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoCache.class, this, "allNodesWithDaemons", U.toShortString(allNodes)); } } }
# IGNITE-187 Minor TODO cleanup.
modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
# IGNITE-187 Minor TODO cleanup.
Java
apache-2.0
57b216da68eb9752847e6dc416477e2f7e640c12
0
shreejay/uadetector,zpzgone/uadetector,before/uadetector
/******************************************************************************* * Copyright 2012 André Rouél * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package net.sf.uadetector.service; import java.net.URL; import net.sf.uadetector.UserAgentStringParser; import net.sf.uadetector.datareader.DataReader; import net.sf.uadetector.datareader.XmlDataReader; import net.sf.uadetector.datastore.AbstractDataStore; import net.sf.uadetector.datastore.OnlineXmlDataStore; import net.sf.uadetector.parser.UpdatingUserAgentStringParserImpl; import net.sf.uadetector.parser.UserAgentStringParserImpl; /** * Service factory to get preconfigured instances of {@code UserAgentStringParser} implementations. * * @author André Rouél */ public final class UADetectorServiceFactory { /** * Holder to load the parser only when it's needed. */ private static final class OnlineUpdatingParserHolder { private static UserAgentStringParser INSTANCE = new UpdatingUserAgentStringParserImpl(new OnlineXmlDataStore( RESOURCE_MODULE.getData())); } /** * A simple implementation to store <em>UAS data</em> delivered in this module (called <em>uadetector-resource</em>) * only in the heap space. * * @author André Rouél */ public static final class ResourceModuleXmlDataStore extends AbstractDataStore { /** * The default data reader to read in <em>UAS data</em> in XML format */ private static final DataReader DEFAULT_DATA_READER = new XmlDataReader(); /** * Path where the UAS data file is stored for the {@code ClassLoader} */ private static final String PATH = "net/sf/uadetector/resources"; /** * {@link URL} to the UAS data delivered in this module */ public static final URL UAS_DATA = ResourceModuleXmlDataStore.class.getClassLoader().getResource(PATH + "/uas.xml"); /** * {@link URL} to the version information of the delivered UAS data in this module */ public static final URL UAS_VERSION = ResourceModuleXmlDataStore.class.getClassLoader().getResource(PATH + "/uas.version"); /** * Constructs an {@code ResourceModuleXmlDataStore} by reading <em>UAS data</em> by the specified URL * {@link UADetectorServiceFactory#UAS_DATA} (in XML format). */ public ResourceModuleXmlDataStore() { super(DEFAULT_DATA_READER, UAS_DATA, UAS_VERSION, DEFAULT_CHARSET); } } /** * Data store filled with the <em>UAS data</em> that are shipped with this module (JAR) */ private static final ResourceModuleXmlDataStore RESOURCE_MODULE = new ResourceModuleXmlDataStore(); /** * {@link UserAgentStringParser} filled with the <em>UAS data</em> that are shipped with this module (JAR) */ private static final UserAgentStringParser RESOURCE_MODULE_PARSER = new UserAgentStringParserImpl<ResourceModuleXmlDataStore>( RESOURCE_MODULE); /** * Returns an implementation of {@link UserAgentStringParser} which checks at regular intervals for new versions of * <em>UAS data</em> (also known as database). When newer data available, it automatically loads and updates it. * * <p> * At initialization time the returned parser will be loaded with the <em>UAS data</em> of this module (the shipped * one within the <em>uadetector-resources</em> JAR) and tries to update it. The initialization is started only when * this method is called the first time. * * <p> * The static class definition {@link OnlineUpdatingParserHolder} within this factory class is <em>not</em> * initialized until the JVM determines that {@code OnlineUpdatingParserHolder} must be executed. The static class * {@code OnlineUpdatingParserHolder} is only executed when the static method {@code getOnlineUserAgentStringParser} * is invoked on the class {@code UADetectorServiceFactory}, and the first time this happens the JVM will load and * initialize the {@code OnlineUpdatingParserHolder} class. * * <p> * If during the operation the Internet connection gets lost, then this instance continues to work properly (and * under correct log level settings you will get an corresponding log messages). * * @return an user agent string parser with updating service */ public static UserAgentStringParser getOnlineUpdatingParser() { return OnlineUpdatingParserHolder.INSTANCE; } /** * Returns an implementation of {@link UserAgentStringParser} with no updating functions. It will be loaded by using * the shipped <em>UAS data</em> (also known as database) of this module. The database is loaded once during * initialization. The initialization is started at class loading of this class ({@code UADetectorServiceFactory}). * * @return an user agent string parser without updating service */ public static UserAgentStringParser getResourceModuleParser() { return RESOURCE_MODULE_PARSER; } private UADetectorServiceFactory() { // This class is not intended to create objects from it. } }
modules/uadetector-resources/src/main/java/net/sf/uadetector/service/UADetectorServiceFactory.java
/******************************************************************************* * Copyright 2012 André Rouél * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package net.sf.uadetector.service; import java.net.URL; import net.sf.uadetector.UserAgentStringParser; import net.sf.uadetector.datareader.DataReader; import net.sf.uadetector.datareader.XmlDataReader; import net.sf.uadetector.datastore.AbstractDataStore; import net.sf.uadetector.datastore.OnlineXmlDataStore; import net.sf.uadetector.parser.UpdatingUserAgentStringParserImpl; import net.sf.uadetector.parser.UserAgentStringParserImpl; /** * Service factory to get preconfigured instances of {@code UserAgentStringParser} implementations. * * @author André Rouél */ public final class UADetectorServiceFactory { /** * Holder to load the parser only when it's needed. */ private static final class OnlineUpdatingParserHolder { private static UserAgentStringParser INSTANCE = new UpdatingUserAgentStringParserImpl(new OnlineXmlDataStore( RESOURCE_MODULE.getData())); } /** * Holder to load the parser only when it's needed. */ private static final class ResourceModuleParserHolder { private static UserAgentStringParser INSTANCE = new UserAgentStringParserImpl<ResourceModuleXmlDataStore>(RESOURCE_MODULE); } /** * A simple implementation to store <em>UAS data</em> delivered in this module (called <em>uadetector-resource</em>) * only in the heap space. * * @author André Rouél */ public static final class ResourceModuleXmlDataStore extends AbstractDataStore { /** * The default data reader to read in <em>UAS data</em> in XML format */ private static final DataReader DEFAULT_DATA_READER = new XmlDataReader(); /** * Path where the UAS data file is stored for the {@code ClassLoader} */ private static final String PATH = "net/sf/uadetector/resources"; /** * {@link URL} to the UAS data delivered in this module */ public static final URL UAS_DATA = ResourceModuleXmlDataStore.class.getClassLoader().getResource(PATH + "/uas.xml"); /** * {@link URL} to the version information of the delivered UAS data in this module */ public static final URL UAS_VERSION = ResourceModuleXmlDataStore.class.getClassLoader().getResource(PATH + "/uas.version"); /** * Constructs an {@code ResourceModuleXmlDataStore} by reading <em>UAS data</em> by the specified URL * {@link UADetectorServiceFactory#UAS_DATA} (in XML format). */ public ResourceModuleXmlDataStore() { super(DEFAULT_DATA_READER, UAS_DATA, UAS_VERSION, DEFAULT_CHARSET); } } /** * Data store filled with the <em>UAS data</em> that are shipped with this module (JAR) */ private static final ResourceModuleXmlDataStore RESOURCE_MODULE = new ResourceModuleXmlDataStore(); /** * Returns an implementation of {@link UserAgentStringParser} which checks at regular intervals for new versions of * <em>UAS data</em> (also known as database). When newer data available, it automatically loads and updates it. * * <p> * At initialization time the returned parser will be loaded with the <em>UAS data</em> of this module (the shipped * one within the <em>uadetector-resources</em> JAR) and tries to update it. The initialization is started only when * this method is called the first time. * * <p> * The static class definition {@link UpdatingUserAgentStringParserHolder} within this factory class is <em>not</em> * initialized until the JVM determines that {@code UpdatingUserAgentStringParserHolder} must be executed. The * static class {@code UpdatingUserAgentStringParserHolder} is only executed when the static method * {@code getOnlineUserAgentStringParser} is invoked on the class {@code UADetectorServiceFactory}, and the first * time this happens the JVM will load and initialize the {@code UpdatingUserAgentStringParserHolder} class. * * <p> * If during the operation the Internet connection gets lost, then this instance continues to work properly (and * under correct log level settings you will get an corresponding log messages). * * @return an user agent string parser with updating service */ public static UserAgentStringParser getOnlineUpdatingParser() { return OnlineUpdatingParserHolder.INSTANCE; } /** * Gets always the same implementation instance of the interface {@code UserAgentStringParser}. This instance has an * update function so that it checks at regular intervals for new versions of the <em>UAS data</em> (also known as * database). When a newer database is available, it is automatically loaded and updated.<br> * <br> * At initialization time the instance will be loaded with the <em>UAS data</em> of this module (the shipped one * within the <em>uadetector-resources</em> JAR). The initialization is started only when this method is called the * first time.<br> * <br> * The static class definition {@link UpdatingUserAgentStringParserHolder} within this factory class is <em>not</em> * initialized until the JVM determines that {@link UpdatingUserAgentStringParserHolder} must be executed. The * static class {@code UpdatingUserAgentStringParserHolder} is only executed when the static method * {@code getOnlineUserAgentStringParser} is invoked on the class {@code UADetectorServiceFactory}, and the first * time this happens the JVM will load and initialize the {@code UpdatingUserAgentStringParserHolder} class.<br> * <br> * If during the operation the Internet connection gets lost, then this instance continues to work properly (and * under correct log level settings you will get an corresponding log messages). * * @return always the same implementation instance of the interface {@code UserAgentStringParser} and never * {@code null} */ @Deprecated public static UserAgentStringParser getOnlineUserAgentStringParser() { return OnlineUpdatingParserHolder.INSTANCE; } /** * Returns an implementation of {@link UserAgentStringParser} with no updating functions. It will be loaded by using * the shipped <em>UAS data</em> (also known as database) of this module. The database is loaded once during * initialization. The initialization is started only when this method is called the first time. * * <p> * The static class definition {@link ResourceModuleParserHolder} within this factory class is <em>not</em> * initialized until the JVM determines that {@code ResourceModuleParserHolder} must be executed. The static class * {@code UpdatingUserAgentStringParserHolder} is only executed when the static method * {@code getOnlineUserAgentStringParser} is invoked on the class {@code UADetectorServiceFactory}, and the first * time this happens the JVM will load and initialize the {@code ResourceModuleParserHolder} class. * * <p> * If during the operation the Internet connection gets lost, then this instance continues to work properly (and * under correct log level settings you will get an corresponding log messages). * * @return an user agent string parser without updating service */ public static UserAgentStringParser getResourceModuleParser() { return ResourceModuleParserHolder.INSTANCE; } /** * Gets always the same implementation instance of the interface {@code UserAgentStringParser}. This instance works * offline by using the <em>UAS data</em> (also known as database) of this module. The database is loaded once * during initialization. The initialization is started only when this method is called the first time.<br> * <br> * The static class definition {@link ResourceModuleParserHolder} within this factory class is <em>not</em> * initialized until the JVM determines that {@link ResourceModuleParserHolder} must be executed. The static class * {@code OfflineUserAgentStringParserHolder} is only executed when the static method * {@code getUserAgentStringParser} is invoked on the class {@code UADetectorServiceFactory}, and the first time * this happens the JVM will load and initialize the {@code OfflineUserAgentStringParserHolder} class. * * @return always the same implementation instance of the interface {@code UserAgentStringParser} and never * {@code null} */ @Deprecated public static UserAgentStringParser getUserAgentStringParser() { return ResourceModuleParserHolder.INSTANCE; } private UADetectorServiceFactory() { // This class is not intended to create objects from it. } }
Removed deprecated methods and lazy-loading of 'ResourceModuleParser'
modules/uadetector-resources/src/main/java/net/sf/uadetector/service/UADetectorServiceFactory.java
Removed deprecated methods and lazy-loading of 'ResourceModuleParser'
Java
apache-2.0
5a6ac30cbcbfdf54f04341aab28d8500b3537e00
0
blackducksoftware/hub-jira,blackducksoftware/hub-jira,blackducksoftware/hub-jira
/** * Black Duck JIRA Plugin * * Copyright (C) 2019 Black Duck Software, Inc. * http://www.blackducksoftware.com/ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.blackducksoftware.integration.jira.common; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import com.blackducksoftware.integration.jira.common.blackduck.BlackDuckConnectionHelper; import com.blackducksoftware.integration.jira.common.model.BlackDuckProjectMapping; import com.blackducksoftware.integration.jira.common.settings.GlobalConfigurationAccessor; import com.blackducksoftware.integration.jira.common.settings.PluginErrorAccessor; import com.blackducksoftware.integration.jira.common.settings.model.PluginBlackDuckServerConfigModel; import com.blackducksoftware.integration.jira.config.model.BlackDuckJiraConfigSerializable; import com.synopsys.integration.blackduck.api.generated.component.AssignedUserRequest; import com.synopsys.integration.blackduck.api.generated.discovery.ApiDiscovery; import com.synopsys.integration.blackduck.api.generated.response.AssignedProjectView; import com.synopsys.integration.blackduck.api.generated.view.ProjectView; import com.synopsys.integration.blackduck.api.generated.view.UserView; import com.synopsys.integration.blackduck.service.BlackDuckService; import com.synopsys.integration.blackduck.service.BlackDuckServicesFactory; import com.synopsys.integration.exception.IntegrationException; //TODO remove this when Super users in Black Duck no longer need to be assigned to a project in order to receive notifications for that project. Remove when all customers update to the fixed version of BD public class BlackDuckAssignUtil { private final BlackDuckJiraLogger logger = new BlackDuckJiraLogger(Logger.getLogger(this.getClass().getName())); public void assignUserToBlackDuckProject(final PluginErrorAccessor pluginErrorAccessor, final GlobalConfigurationAccessor globalConfigurationAccessor) { try { final Set<BlackDuckProjectMapping> blackDuckProjectMappings = getBlackDuckProjectMappings(globalConfigurationAccessor); if (blackDuckProjectMappings.isEmpty()) { return; } final BlackDuckService blackDuckService = getBlackDuckService(globalConfigurationAccessor); final List<ProjectView> allProjects = getAllBDProjects(blackDuckService); final Set<ProjectView> matchingProjects = getMatchingBDProjects(blackDuckProjectMappings, allProjects); if (matchingProjects.isEmpty()) { return; } final UserView currentUser = getCurrentUser(blackDuckService); final Set<ProjectView> nonAssignedProjects = getProjectsThatNeedAssigning(blackDuckService, currentUser, matchingProjects); if (nonAssignedProjects.isEmpty()) { return; } assignUserToProjects(pluginErrorAccessor, blackDuckService, currentUser, nonAssignedProjects); } catch (final IntegrationException e) { logger.error("Could not assign the Black Duck user to the configured Black Duck projects. " + e.getMessage(), e); pluginErrorAccessor.addBlackDuckError(e, "assignUserToBlackDuckProject"); } } public Set<BlackDuckProjectMapping> getBlackDuckProjectMappings(final GlobalConfigurationAccessor globalConfigurationAccessor) { if (null == globalConfigurationAccessor.getIssueCreationConfig() && null == globalConfigurationAccessor.getIssueCreationConfig().getProjectMapping()) { logger.debug("There is no issue creation configuration or project mappings. Skipping assigning the user to the BD Project."); return new HashSet<>(); } final String projectMappingJson = globalConfigurationAccessor.getIssueCreationConfig().getProjectMapping().getMappingsJson(); if (StringUtils.isBlank(projectMappingJson)) { logger.debug("There are no project mappings. Skipping assigning the user to the BD Project."); return new HashSet<>(); } final BlackDuckJiraConfigSerializable config = new BlackDuckJiraConfigSerializable(); config.setHubProjectMappingsJson(projectMappingJson); if (config.getHubProjectMappings().isEmpty()) { logger.debug("There are no project mappings in the mapping json. Skipping assigning the user to the BD Project."); } return config.getHubProjectMappings(); } public BlackDuckService getBlackDuckService(final GlobalConfigurationAccessor globalConfigurationAccessor) throws IntegrationException { final BlackDuckConnectionHelper blackDuckConnectionHelper = new BlackDuckConnectionHelper(); final PluginBlackDuckServerConfigModel blackDuckServerConfig = globalConfigurationAccessor.getBlackDuckServerConfig(); final BlackDuckServicesFactory blackDuckServicesFactory = blackDuckConnectionHelper.createBlackDuckServicesFactory(logger, blackDuckServerConfig.createBlackDuckServerConfigBuilder()); return blackDuckServicesFactory.createBlackDuckService(); } public List<ProjectView> getAllBDProjects(final BlackDuckService blackDuckService) throws IntegrationException { return blackDuckService.getAllResponses(ApiDiscovery.PROJECTS_LINK_RESPONSE); } public Set<ProjectView> getMatchingBDProjects(final Set<BlackDuckProjectMapping> projectMappings, final List<ProjectView> allProjects) throws IntegrationException { final Map<String, ProjectView> projectMap = allProjects.stream().collect(Collectors.toMap(ProjectView::getName, Function.identity())); final Set<ProjectView> matchingProjects = new HashSet<>(); for (final BlackDuckProjectMapping blackDuckProjectMapping : projectMappings) { final String blackDuckProjectName = blackDuckProjectMapping.getBlackDuckProjectName(); if (blackDuckProjectMapping.isProjectPattern()) { matchingProjects.addAll(projectMap.entrySet().stream() .filter(entry -> entry.getKey().matches(blackDuckProjectName)) .map(Map.Entry::getValue) .collect(Collectors.toSet())); } else { final ProjectView projectView = projectMap.get(blackDuckProjectName); matchingProjects.add(projectView); } } if (matchingProjects.isEmpty()) { logger.debug("There are no BD projects that map the projects configured in the project mappings. Skipping assigning the user to the BD Project."); } return matchingProjects; } public UserView getCurrentUser(final BlackDuckService blackDuckService) throws IntegrationException { return blackDuckService.getResponse(ApiDiscovery.CURRENT_USER_LINK_RESPONSE); } public Set<ProjectView> getProjectsThatNeedAssigning(final BlackDuckService blackDuckService, final UserView currentUser, final Set<ProjectView> matchingProjects) throws IntegrationException { final Set<String> assignedProjects = blackDuckService.getAllResponses(currentUser, UserView.PROJECTS_LINK_RESPONSE) .stream() .map(AssignedProjectView::getName) .collect(Collectors.toSet()); final Set<ProjectView> nonAssignedProjects = matchingProjects.stream() .filter(project -> !assignedProjects.contains(project.getName())) .collect(Collectors.toSet()); if (nonAssignedProjects.isEmpty()) { logger.debug("There are no BD projects that need to have this User assigned to them. Skipping assigning the user to the BD Project."); } return nonAssignedProjects; } public void assignUserToProjects(final PluginErrorAccessor pluginErrorAccessor, final BlackDuckService blackDuckService, final UserView currentUser, final Set<ProjectView> projectsToAssign) throws IntegrationException { final AssignedUserRequest assignedUserRequest = new AssignedUserRequest(); assignedUserRequest.setUser(currentUser.getHref().orElseThrow(() -> new IntegrationException(String.format("The current user, %s, does not have an href.", currentUser.getUserName())))); for (final ProjectView projectView : projectsToAssign) { final Optional<String> projectUsersLinkOptional = projectView.getFirstLink(ProjectView.USERS_LINK); if (projectUsersLinkOptional.isPresent()) { blackDuckService.post(projectUsersLinkOptional.get(), assignedUserRequest); } else { final String errorMessage = String.format("Could not assign the user, %s, to the project %s because there is no users link.", currentUser.getUserName(), projectView.getName()); logger.error(errorMessage); pluginErrorAccessor.addBlackDuckError(errorMessage, "assignUserToBlackDuckProject"); } } } }
src/main/java/com/blackducksoftware/integration/jira/common/BlackDuckAssignUtil.java
/** * Black Duck JIRA Plugin * * Copyright (C) 2019 Black Duck Software, Inc. * http://www.blackducksoftware.com/ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.blackducksoftware.integration.jira.common; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import com.blackducksoftware.integration.jira.common.blackduck.BlackDuckConnectionHelper; import com.blackducksoftware.integration.jira.common.model.BlackDuckProjectMapping; import com.blackducksoftware.integration.jira.common.settings.GlobalConfigurationAccessor; import com.blackducksoftware.integration.jira.common.settings.PluginErrorAccessor; import com.blackducksoftware.integration.jira.common.settings.model.PluginBlackDuckServerConfigModel; import com.blackducksoftware.integration.jira.config.model.BlackDuckJiraConfigSerializable; import com.synopsys.integration.blackduck.api.generated.component.AssignedUserRequest; import com.synopsys.integration.blackduck.api.generated.discovery.ApiDiscovery; import com.synopsys.integration.blackduck.api.generated.view.ProjectView; import com.synopsys.integration.blackduck.api.generated.view.UserView; import com.synopsys.integration.blackduck.service.BlackDuckService; import com.synopsys.integration.blackduck.service.BlackDuckServicesFactory; import com.synopsys.integration.exception.IntegrationException; //TODO remove this when Super users in Black Duck no longer need to be assigned to a project in order to receive notifications for that project. Remove when all customers update to the fixed version of BD public class BlackDuckAssignUtil { private final BlackDuckJiraLogger logger = new BlackDuckJiraLogger(Logger.getLogger(this.getClass().getName())); public void assignUserToBlackDuckProject(final PluginErrorAccessor pluginErrorAccessor, final GlobalConfigurationAccessor globalConfigurationAccessor) { try { final Set<BlackDuckProjectMapping> blackDuckProjectMappings = getBlackDuckProjectMappings(globalConfigurationAccessor); if (blackDuckProjectMappings.isEmpty()) { return; } final BlackDuckService blackDuckService = getBlackDuckService(globalConfigurationAccessor); final List<ProjectView> allProjects = getAllBDProjects(blackDuckService); final Set<ProjectView> matchingProjects = getMatchingBDProjects(blackDuckProjectMappings, allProjects); if (matchingProjects.isEmpty()) { return; } final UserView currentUser = getCurrentUser(blackDuckService); final Set<ProjectView> nonAssignedProjects = getProjectsThatNeedAssigning(blackDuckService, currentUser, matchingProjects); if (nonAssignedProjects.isEmpty()) { return; } assignUserToProjects(pluginErrorAccessor, blackDuckService, currentUser, nonAssignedProjects); } catch (final IntegrationException e) { logger.error("Could not assign the Black Duck user to the configured Black Duck projects. " + e.getMessage(), e); pluginErrorAccessor.addBlackDuckError(e, "assignUserToBlackDuckProject"); } } public Set<BlackDuckProjectMapping> getBlackDuckProjectMappings(final GlobalConfigurationAccessor globalConfigurationAccessor) { if (null == globalConfigurationAccessor.getIssueCreationConfig() && null == globalConfigurationAccessor.getIssueCreationConfig().getProjectMapping()) { logger.debug("There is no issue creation configuration or project mappings. Skipping assigning the user to the BD Project."); return new HashSet<>(); } final String projectMappingJson = globalConfigurationAccessor.getIssueCreationConfig().getProjectMapping().getMappingsJson(); if (StringUtils.isBlank(projectMappingJson)) { logger.debug("There are no project mappings. Skipping assigning the user to the BD Project."); return new HashSet<>(); } final BlackDuckJiraConfigSerializable config = new BlackDuckJiraConfigSerializable(); config.setHubProjectMappingsJson(projectMappingJson); if (config.getHubProjectMappings().isEmpty()) { logger.debug("There are no project mappings in the mapping json. Skipping assigning the user to the BD Project."); return new HashSet<>(); } return config.getHubProjectMappings(); } public BlackDuckService getBlackDuckService(final GlobalConfigurationAccessor globalConfigurationAccessor) throws IntegrationException { final BlackDuckConnectionHelper blackDuckConnectionHelper = new BlackDuckConnectionHelper(); final PluginBlackDuckServerConfigModel blackDuckServerConfig = globalConfigurationAccessor.getBlackDuckServerConfig(); final BlackDuckServicesFactory blackDuckServicesFactory = blackDuckConnectionHelper.createBlackDuckServicesFactory(logger, blackDuckServerConfig.createBlackDuckServerConfigBuilder()); return blackDuckServicesFactory.createBlackDuckService(); } public List<ProjectView> getAllBDProjects(final BlackDuckService blackDuckService) throws IntegrationException { return blackDuckService.getAllResponses(ApiDiscovery.PROJECTS_LINK_RESPONSE); } public Set<ProjectView> getMatchingBDProjects(final Set<BlackDuckProjectMapping> projectMappings, final List<ProjectView> allProjects) throws IntegrationException { final Map<String, ProjectView> projectMap = allProjects.stream().collect(Collectors.toMap(project -> project.getName(), Function.identity())); final Set<ProjectView> matchingProjects = new HashSet<>(); for (final BlackDuckProjectMapping blackDuckProjectMapping : projectMappings) { final String blackDuckProjectName = blackDuckProjectMapping.getBlackDuckProjectName(); if (blackDuckProjectMapping.isProjectPattern()) { matchingProjects.addAll(projectMap.entrySet().stream() .filter(entry -> entry.getKey().matches(blackDuckProjectName)) .map(Map.Entry::getValue) .collect(Collectors.toSet())); } else { final ProjectView projectView = projectMap.get(blackDuckProjectName); matchingProjects.add(projectView); } } if (matchingProjects.isEmpty()) { logger.debug("There are no BD projects that map the projects configured in the project mappings. Skipping assigning the user to the BD Project."); return new HashSet<>(); } return matchingProjects; } public UserView getCurrentUser(final BlackDuckService blackDuckService) throws IntegrationException { return blackDuckService.getResponse(ApiDiscovery.CURRENT_USER_LINK_RESPONSE); } public Set<ProjectView> getProjectsThatNeedAssigning(final BlackDuckService blackDuckService, final UserView currentUser, final Set<ProjectView> matchingProjects) throws IntegrationException { final Set<String> assignedProjects = blackDuckService.getAllResponses(currentUser, UserView.PROJECTS_LINK_RESPONSE) .stream() .map(assignedProject -> assignedProject.getName()) .collect(Collectors.toSet()); final Set<ProjectView> nonAssignedProjects = matchingProjects.stream() .filter(project -> !assignedProjects.contains(project.getName())) .collect(Collectors.toSet()); if (nonAssignedProjects.isEmpty()) { logger.debug("There are no BD projects that need to have this User assigned to them. Skipping assigning the user to the BD Project."); return new HashSet<>(); } return nonAssignedProjects; } public void assignUserToProjects(final PluginErrorAccessor pluginErrorAccessor, final BlackDuckService blackDuckService, final UserView currentUser, final Set<ProjectView> projectsToAssign) throws IntegrationException { final AssignedUserRequest assignedUserRequest = new AssignedUserRequest(); assignedUserRequest.setUser(currentUser.getHref().orElseThrow(() -> new IntegrationException(String.format("The current user, %s, does not have an href.", currentUser.getUserName())))); for (final ProjectView projectView : projectsToAssign) { final Optional<String> projectUsersLinkOptional = projectView.getFirstLink(ProjectView.USERS_LINK); if (projectUsersLinkOptional.isPresent()) { blackDuckService.post(projectUsersLinkOptional.get(), assignedUserRequest); } else { final String errorMessage = String.format("Could not assign the user, %s, to the project %s because there is no users link.", currentUser.getUserName(), projectView.getName()); logger.error(errorMessage); pluginErrorAccessor.addBlackDuckError(errorMessage, "assignUserToBlackDuckProject"); } } } }
JAVA: removing extra returns. changing to method references in streams
src/main/java/com/blackducksoftware/integration/jira/common/BlackDuckAssignUtil.java
JAVA: removing extra returns. changing to method references in streams
Java
apache-2.0
d3e231e4a9802c3e308155c9ca7ec3a05206ced2
0
acgmohu/android-toolkit
package parser.dex; import com.googlecode.dex2jar.Method; import com.googlecode.dex2jar.util.DumpDexCodeAdapter; import java.io.PrintWriter; import java.io.StringWriter; /** * Created by SlowMan on 14-6-4. * 1、解析方法的具體內容。 * 2、解析OP_CONST_STRING 對應的字符串。 */ public class CodeAdapter extends DumpDexCodeAdapter { DexClass dexClass; Method method; StringWriter writer; public CodeAdapter(boolean isStatic, Method method, StringWriter writer, DexClass dexClass) { super(isStatic, method, new PrintWriter(writer)); this.dexClass = dexClass; this.method = method; this.writer = writer; } // @Override // public void visitConstStmt(int opcode, int toReg, Object value, int xt) { // switch (opcode) { // case OP_CONST_STRING: // dexClass.stringData.add(XMLString.escape(value)); // break; // } // } @Override public void visitEnd() { dexClass.methodMap.put(method.toString(), writer.toString()); } }
core/src/main/java/parser/dex/CodeAdapter.java
package parser.dex; import com.googlecode.dex2jar.DexOpcodes; import com.googlecode.dex2jar.Method; import com.googlecode.dex2jar.util.DumpDexCodeAdapter; import com.googlecode.dex2jar.visitors.DexCodeVisitor; import org.objectweb.asm.Opcodes; import parser.utils.XMLString; import java.io.PrintWriter; import java.io.StringWriter; /** * Created by SlowMan on 14-6-4. * 1、解析方法的具體內容。 * 2、解析OP_CONST_STRING 對應的字符串。 */ public class CodeAdapter extends DumpDexCodeAdapter implements DexCodeVisitor, Opcodes, DexOpcodes { DexClass dexClass; Method method; StringWriter writer; public CodeAdapter(boolean isStatic, Method method, StringWriter writer, DexClass dexClass) { super(isStatic, method, new PrintWriter(writer)); this.dexClass = dexClass; this.method = method; this.writer = writer; } @Override public void visitConstStmt(int opcode, int toReg, Object value, int xt) { switch (opcode) { case OP_CONST_STRING: dexClass.stringData.add(XMLString.escape(value)); break; } } @Override public void visitEnd() { dexClass.methodMap.put(method.toString(), writer.toString()); } }
這部分代碼會影響方法內字符串的收集
core/src/main/java/parser/dex/CodeAdapter.java
這部分代碼會影響方法內字符串的收集
Java
apache-2.0
215be426a3230669ffe3790dd70b9ad03dacbd57
0
volkodava/disruptor-benchmark-tests
package com.disruptor.benchmark; import com.lmax.disruptor.*; import com.lmax.disruptor.dsl.Disruptor; import com.lmax.disruptor.dsl.ProducerType; import com.lmax.disruptor.util.DaemonThreadFactory; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.results.format.ResultFormatType; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @State(Scope.Thread) public class DisruptorBenchmarks { private EventHandler<LongEvent> handler; private Disruptor<LongEvent> disruptor; private RingBuffer<LongEvent> ringBuffer; private AtomicInteger eventCount; @Param({"512", "1024", "2048"}) String ringBufferSize; @Param({"SINGLE", "MULTI"}) String producerType; @Param({"com.lmax.disruptor.LiteBlockingWaitStrategy", "com.lmax.disruptor.BlockingWaitStrategy", "com.lmax.disruptor.SleepingWaitStrategy", "com.lmax.disruptor.BusySpinWaitStrategy", "com.lmax.disruptor.YieldingWaitStrategy",}) String waitStrategy; @Setup public void setup() throws Exception { disruptor = new Disruptor<LongEvent>(LongEvent.EVENT_FACTORY, Integer.valueOf(ringBufferSize), DaemonThreadFactory.INSTANCE, ProducerType.valueOf(producerType), createWaitStrategyInstance(waitStrategy)); eventCount = new AtomicInteger(); handler = new EventHandler<LongEvent>() { public void onEvent(LongEvent event, long sequence, boolean endOfBatch) { if (Configuration.VALUE == event.getValue()) { eventCount.incrementAndGet(); } else { throw new IllegalStateException("Expected: " + Configuration.VALUE + ". Actual: " + event.getValue()); } } }; disruptor.handleEventsWith(handler); ringBuffer = disruptor.start(); } @TearDown public void tearDown() { disruptor.shutdown(); } private WaitStrategy createWaitStrategyInstance(String clsName) throws Exception { Class<WaitStrategy> clazz = (Class<WaitStrategy>) Class.forName(clsName); return clazz.newInstance(); } @Benchmark public void processOneMlnEvents() { final int ONE_MLN = 1000000; for (int i = 0; i < ONE_MLN; i++) { ringBuffer.publishEvent(LongEvent.EVENT_TRANSLATOR, Configuration.VALUE); } while (eventCount.get() < ONE_MLN) { Thread.yield(); } } private static final class LongEvent { private long value = -1L; public long getValue() { return value; } public void setValue(long value) { this.value = value; } public final static EventFactory<LongEvent> EVENT_FACTORY = new EventFactory<DisruptorBenchmarks.LongEvent>() { @Override public DisruptorBenchmarks.LongEvent newInstance() { return new DisruptorBenchmarks.LongEvent(); } }; public static final EventTranslatorOneArg<LongEvent, Long> EVENT_TRANSLATOR = new EventTranslatorOneArg<DisruptorBenchmarks.LongEvent, Long>() { @Override public void translateTo(DisruptorBenchmarks.LongEvent event, long sequence, Long value) { event.setValue(value); } }; } public static void main(String[] args) throws Exception { int[] threadConfigurations = {1, 4}; for (int numOfThreads : threadConfigurations) { // run benchmarks with specific number of threads runBenchmarks(numOfThreads); } } private static void runBenchmarks(int numOfThreads) throws Exception { final String resultFileName = "threads_x" + numOfThreads + ".csv"; Options opts = new OptionsBuilder() .include(".*" + DisruptorBenchmarks.class.getSimpleName() + ".*") .forks(Configuration.FORKS) .threads(numOfThreads) .jvmArgs("-server") .mode(Mode.Throughput) .timeUnit(TimeUnit.SECONDS) .warmupIterations(Configuration.WARMUP_ITERATIONS) .measurementIterations(Configuration.MEASUREMENT_ITERATIONS) // Use this to selectively constrain/override parameters // .param("ringBufferSize", "256", "512", "1024", "2048", "4096") .resultFormat(ResultFormatType.CSV) .result(resultFileName) .build(); new Runner(opts).run(); } }
src/main/java/com/disruptor/benchmark/DisruptorBenchmarks.java
package com.disruptor.benchmark; import com.lmax.disruptor.*; import com.lmax.disruptor.dsl.Disruptor; import com.lmax.disruptor.dsl.ProducerType; import com.lmax.disruptor.util.DaemonThreadFactory; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.results.format.ResultFormatType; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @State(Scope.Thread) public class DisruptorBenchmarks { private EventHandler<LongEvent> handler; private Disruptor<LongEvent> disruptor; private RingBuffer<LongEvent> ringBuffer; private AtomicInteger eventCount; @Param({"512", "1024", "2048"}) String ringBufferSize; @Param({"SINGLE", "MULTI"}) String producerType; @Param({"com.lmax.disruptor.LiteBlockingWaitStrategy", "com.lmax.disruptor.BlockingWaitStrategy", "com.lmax.disruptor.SleepingWaitStrategy", "com.lmax.disruptor.BusySpinWaitStrategy", "com.lmax.disruptor.YieldingWaitStrategy",}) String waitStrategy; @Setup public void setup() throws Exception { disruptor = new Disruptor<LongEvent>(LongEvent.EVENT_FACTORY, Integer.valueOf(ringBufferSize), DaemonThreadFactory.INSTANCE, ProducerType.valueOf(producerType), createWaitStrategyInstance(waitStrategy)); eventCount = new AtomicInteger(); handler = new EventHandler<LongEvent>() { public void onEvent(LongEvent event, long sequence, boolean endOfBatch) { if (Configuration.VALUE == event.getValue()) { eventCount.incrementAndGet(); } else { throw new IllegalStateException("Expected: " + Configuration.VALUE + ". Actual: " + event.getValue()); } } }; disruptor.handleEventsWith(handler); ringBuffer = disruptor.start(); } @TearDown public void tearDown() { disruptor.shutdown(); } private WaitStrategy createWaitStrategyInstance(String clsName) throws Exception { Class<WaitStrategy> clazz = (Class<WaitStrategy>) Class.forName(clsName); return clazz.newInstance(); } @Benchmark public void processOneMlnEvents() { final int ONE_MLN = 1000000; for (int i = 0; i < ONE_MLN; i++) { ringBuffer.publishEvent(LongEvent.EVENT_TRANSLATOR, Configuration.VALUE); } while (eventCount.get() < ONE_MLN) { Thread.yield(); } } private static final class LongEvent { private long value = -1L; public long getValue() { return value; } public void setValue(long value) { this.value = value; } public final static EventFactory<LongEvent> EVENT_FACTORY = new EventFactory<DisruptorBenchmarks.LongEvent>() { @Override public DisruptorBenchmarks.LongEvent newInstance() { return new DisruptorBenchmarks.LongEvent(); } }; public static final EventTranslatorOneArg<LongEvent, Long> EVENT_TRANSLATOR = new EventTranslatorOneArg<DisruptorBenchmarks.LongEvent, Long>() { @Override public void translateTo(DisruptorBenchmarks.LongEvent event, long sequence, Long value) { event.setValue(value); } }; } public static void main(String[] args) throws Exception { int[] threadConfigurations = {1, 4}; for (int numOfThreads : threadConfigurations) { // run benchmarks with specific number of threads runBenchmarks(numOfThreads); } } private static void runBenchmarks(int numOfThreads) throws Exception { final String resultFileName = "threads_x" + numOfThreads + ".csv"; Options opts = new OptionsBuilder() .include(".*" + DisruptorBenchmarks.class.getSimpleName() + ".*") .forks(Configuration.FORKS) .threads(numOfThreads) .jvmArgs("-server") .mode(Mode.Throughput) .timeUnit(TimeUnit.SECONDS) .warmupIterations(Configuration.WARMUP_ITERATIONS) .measurementIterations(Configuration.MEASUREMENT_ITERATIONS) // Use this to selectively constrain/override parameters // .param("ringBufferSize", "256", "512", "1024", "2048", "4096") .resultFormat(ResultFormatType.CSV) .result(resultFileName) .build(); new Runner(opts).run(); } }
small refactor
src/main/java/com/disruptor/benchmark/DisruptorBenchmarks.java
small refactor
Java
apache-2.0
245144f6ec621db96235dce3a7aec45332038eeb
0
kubernetes-client/java,kubernetes-client/java
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.spring.extended.manifests; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertNotNull; import static org.awaitility.Awaitility.await; import io.kubernetes.client.openapi.models.V1ConfigMap; import io.kubernetes.client.spring.extended.manifests.annotation.FromConfigMap; import io.kubernetes.client.spring.extended.manifests.config.KubernetesManifestsProperties; import io.kubernetes.client.spring.extended.manifests.configmaps.ConfigMapGetter; import java.time.Duration; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runner.RunWith; import org.junit.runners.model.Statement; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringBootConfiguration; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Bean; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) @SpringBootTest( classes = KubernetesFromConfigMapTest.App.class, properties = { "kubernetes.manifests.refreshInterval=1s", }) public class KubernetesFromConfigMapTest { @Rule public ConfigMapResetter configMapResetter = new ConfigMapResetter(); @SpringBootConfiguration @EnableAutoConfiguration static class App { @Bean public KubernetesFromConfigMapProcessor kubernetesFromConfigMapProcessor() { return new KubernetesFromConfigMapProcessor(); } @Bean public MockAtomicConfigMapGetter mockAtomicConfigMapGetter() { MockAtomicConfigMapGetter atomicConfigMapGetter = new MockAtomicConfigMapGetter(); return atomicConfigMapGetter; } @Bean public KubernetesFromConfigMapTest.MyBean myBean() { return new KubernetesFromConfigMapTest.MyBean(); } } static class MyBean { @FromConfigMap(namespace = "default", name = "foo", configMapGetter = MockConfigMapGetter.class) private Map<String, String> staticData; @FromConfigMap( namespace = "default", name = "foo", configMapGetter = MockAtomicConfigMapGetter.class) private Map<String, String> dynamicData; } @Autowired private KubernetesFromConfigMapTest.MyBean myBean; @Autowired private MockAtomicConfigMapGetter mockAtomicConfigMapGetter; @Autowired private KubernetesManifestsProperties manifestsProperties; @Test public void testReadOnce() { assertNotNull(myBean.staticData); assertEquals("bar", myBean.staticData.get("foo")); } @Test public void testValueUpdate() throws InterruptedException { assertEquals(Duration.ofSeconds(1), manifestsProperties.getRefreshInterval()); assertNotNull(myBean.dynamicData); assertEquals("bar1", myBean.dynamicData.get("foo")); mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo", "bar2")); await() .timeout(manifestsProperties.getRefreshInterval().multipliedBy(2)) .until(() -> "bar2".equals(myBean.dynamicData.get("foo"))); } @Test public void testKeyUpdate() throws InterruptedException { assertEquals(Duration.ofSeconds(1), manifestsProperties.getRefreshInterval()); assertNotNull(myBean.dynamicData); assertEquals("bar1", myBean.dynamicData.get("foo")); mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo1", "bar")); await() .timeout(manifestsProperties.getRefreshInterval().multipliedBy(2)) .until( () -> { return myBean.dynamicData.get("foo") == null && "bar".equals(myBean.dynamicData.get("foo1")); }); } static class MockConfigMapGetter implements ConfigMapGetter { @Override public V1ConfigMap get(String namespace, String name) { return new V1ConfigMap().putDataItem("foo", "bar"); } } static class MockAtomicConfigMapGetter implements ConfigMapGetter { private final AtomicReference<V1ConfigMap> configMapAtomicReference = new AtomicReference<>(); @Override public V1ConfigMap get(String namespace, String name) { return configMapAtomicReference.get(); } } class ConfigMapResetter implements TestRule { @Override public Statement apply(Statement statement, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo", "bar1")); await().until(() -> "bar1".equals(myBean.dynamicData.get("foo"))); statement.evaluate(); } }; } } }
spring/src/test/java/io/kubernetes/client/spring/extended/manifests/KubernetesFromConfigMapTest.java
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.spring.extended.manifests; import static junit.framework.Assert.assertNull; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertNotNull; import io.kubernetes.client.openapi.models.V1ConfigMap; import io.kubernetes.client.spring.extended.manifests.annotation.FromConfigMap; import io.kubernetes.client.spring.extended.manifests.config.KubernetesManifestsProperties; import io.kubernetes.client.spring.extended.manifests.configmaps.ConfigMapGetter; import java.time.Duration; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import org.awaitility.Awaitility; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runner.RunWith; import org.junit.runners.model.Statement; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringBootConfiguration; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Bean; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) @SpringBootTest( classes = KubernetesFromConfigMapTest.App.class, properties = { "kubernetes.manifests.refreshInterval=1s", }) public class KubernetesFromConfigMapTest { @Rule public ConfigMapResetter configMapResetter = new ConfigMapResetter(); @SpringBootConfiguration @EnableAutoConfiguration static class App { @Bean public KubernetesFromConfigMapProcessor kubernetesFromConfigMapProcessor() { return new KubernetesFromConfigMapProcessor(); } @Bean public MockAtomicConfigMapGetter mockAtomicConfigMapGetter() { MockAtomicConfigMapGetter atomicConfigMapGetter = new MockAtomicConfigMapGetter(); return atomicConfigMapGetter; } @Bean public KubernetesFromConfigMapTest.MyBean myBean() { return new KubernetesFromConfigMapTest.MyBean(); } } static class MyBean { @FromConfigMap(namespace = "default", name = "foo", configMapGetter = MockConfigMapGetter.class) private Map<String, String> staticData; @FromConfigMap( namespace = "default", name = "foo", configMapGetter = MockAtomicConfigMapGetter.class) private Map<String, String> dynamicData; } @Autowired private KubernetesFromConfigMapTest.MyBean myBean; @Autowired private MockAtomicConfigMapGetter mockAtomicConfigMapGetter; @Autowired private KubernetesManifestsProperties manifestsProperties; @Test public void testReadOnce() { assertNotNull(myBean.staticData); assertEquals("bar", myBean.staticData.get("foo")); } @Test public void testValueUpdate() throws InterruptedException { assertEquals(Duration.ofSeconds(1), manifestsProperties.getRefreshInterval()); assertNotNull(myBean.dynamicData); assertEquals("bar1", myBean.dynamicData.get("foo")); mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo", "bar2")); Thread.sleep(manifestsProperties.getRefreshInterval().toMillis()); assertEquals("bar2", myBean.dynamicData.get("foo")); } @Test public void testKeyUpdate() throws InterruptedException { assertEquals(Duration.ofSeconds(1), manifestsProperties.getRefreshInterval()); assertNotNull(myBean.dynamicData); assertEquals("bar1", myBean.dynamicData.get("foo")); mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo1", "bar")); Thread.sleep(manifestsProperties.getRefreshInterval().toMillis()); assertNull(myBean.dynamicData.get("foo")); // old key should be removed assertEquals("bar", myBean.dynamicData.get("foo1")); // new key should be added } private void reset() { mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo", "bar1")); } static class MockConfigMapGetter implements ConfigMapGetter { @Override public V1ConfigMap get(String namespace, String name) { return new V1ConfigMap().putDataItem("foo", "bar"); } } static class MockAtomicConfigMapGetter implements ConfigMapGetter { private final AtomicReference<V1ConfigMap> configMapAtomicReference = new AtomicReference<>(); @Override public V1ConfigMap get(String namespace, String name) { return configMapAtomicReference.get(); } } class ConfigMapResetter implements TestRule { @Override public Statement apply(Statement statement, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { mockAtomicConfigMapGetter.configMapAtomicReference.set( new V1ConfigMap().putDataItem("foo", "bar1")); Awaitility.await().until(() -> "bar1".equals(myBean.dynamicData.get("foo"))); statement.evaluate(); } }; } } }
flake: relaxing from-configmap timeout to 2s
spring/src/test/java/io/kubernetes/client/spring/extended/manifests/KubernetesFromConfigMapTest.java
flake: relaxing from-configmap timeout to 2s
Java
apache-2.0
ced8ae0acbbe987f3d93e21a2d265c7b3a27af73
0
chengkaizone/Android-SlideExpandableListView,OrangeGangsters/Android-SlideExpandableListView,heshen/Android-SlideExpandableListView,simple88/Android-SlideExpandableListView,GeekHades/Android-SlideExpandableListView,caobaibing/Android-SlideExpandableListView,sowrabh/Android-SlideExpandableListView,HomHomLin/Android-SlideExpandableListView,tjerkw/Android-SlideExpandableListView,Volcanoscar/Android-SlideExpandableListView,yummy222/Android-SlideExpandableListView,chwnFlyPig/Android-SlideExpandableListView,lorenzos/Android-SlideExpandableListView,hgl888/Android-SlideExpandableListView,pedrocolon93/slideexpandable,lstNull/Android-SlideExpandableListView,confile/Android-SlideExpandableListView,ErNaveen/SlideExpandableListView
package com.tjerkw.slideexpandable.library; import android.os.Parcel; import android.os.Parcelable; import android.util.SparseIntArray; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.widget.LinearLayout; import android.widget.ListAdapter; import java.util.BitSet; /** * Wraps a ListAdapter to give it expandable list view functionality. * The main thing it does is add a listener to the getToggleButton * which expands the getExpandableView for each list item. * * @author tjerk * @date 6/9/12 4:41 PM */ public abstract class AbstractSlideExpandableListAdapter extends WrapperListAdapterImpl { /** * Reference to the last expanded list item. * Since lists are recycled this might be null if * though there is an expanded list item */ private View lastOpen = null; /** * The position of the last expanded list item. * If -1 there is no list item expanded. * Otherwise it points to the position of the last expanded list item */ private int lastOpenPosition = -1; /** * Default Animation duration * Set animation duration with @see setAnimationDuration */ private int animationDuration = 330; /** * A list of positions of all list items that are expanded. * Normally only one is expanded. But a mode to expand * multiple will be added soon. * * If an item onj position x is open, its bit is set */ private BitSet openItems = new BitSet(); /** * We remember, for each collapsable view its height. * So we dont need to recalculate. * The height is calculated just before the view is drawn. */ private final SparseIntArray viewHeights = new SparseIntArray(10); public AbstractSlideExpandableListAdapter(ListAdapter wrapped) { super(wrapped); } @Override public View getView(int position, View view, ViewGroup viewGroup) { view = wrapped.getView(position, view, viewGroup); enableFor(view, position); return view; } /** * This method is used to get the Button view that should * expand or collapse the Expandable View. * <br/> * Normally it will be implemented as: * <pre> * return parent.findViewById(R.id.expand_toggle_button) * </pre> * * A listener will be attached to the button which will * either expand or collapse the expandable view * * @see #getExpandableView(View) * @param parent the list view item * @ensure return!=null * @return a child of parent which is a button */ public abstract View getExpandToggleButton(View parent); /** * This method is used to get the view that will be hidden * initially and expands or collapse when the ExpandToggleButton * is pressed @see getExpandToggleButton * <br/> * Normally it will be implemented as: * <pre> * return parent.findViewById(R.id.expandable) * </pre> * * @see #getExpandToggleButton(View) * @param parent the list view item * @ensure return!=null * @return a child of parent which is a view (or often ViewGroup) * that can be collapsed and expanded */ public abstract View getExpandableView(View parent); /** * Gets the duration of the collapse animation in ms. * Default is 330ms. Override this method to change the default. * * @return the duration of the anim in ms */ public int getAnimationDuration() { return animationDuration; } /** * Set's the Animation duration for the Expandable animation * * @param duration The duration as an integer in MS (duration > 0) * @exception IllegalArgumentException if parameter is less than zero */ public void setAnimationDuration(int duration) { if(duration < 0) { throw new IllegalArgumentException("Duration is less than zero"); } animationDuration = duration; } /** * Check's if any position is currently Expanded * To collapse the open item @see collapseLastOpen * * @return boolean True if there is currently an item expanded, otherwise false */ public boolean isAnyItemExpanded() { return (lastOpenPosition != -1) ? true : false; } public void enableFor(View parent, int position) { View more = getExpandToggleButton(parent); View itemToolbar = getExpandableView(parent); itemToolbar.measure(parent.getWidth(), parent.getHeight()); enableFor(more, itemToolbar, position); itemToolbar.requestLayout(); } private void enableFor(final View button, final View target, final int position) { if(target == lastOpen && position!=lastOpenPosition) { // lastOpen is recycled, so its reference is false lastOpen = null; } if(position == lastOpenPosition) { // re reference to the last view // so when can animate it when collapsed lastOpen = target; } int height = viewHeights.get(position, -1); if(height == -1) { viewHeights.put(position, target.getMeasuredHeight()); updateExpandable(target,position); } else { updateExpandable(target, position); } button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View view) { Animation a = target.getAnimation(); if (a != null && a.hasStarted() && !a.hasEnded()) { a.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { view.performClick(); } @Override public void onAnimationRepeat(Animation animation) { } }); } else { target.setAnimation(null); int type = target.getVisibility() == View.VISIBLE ? ExpandCollapseAnimation.COLLAPSE : ExpandCollapseAnimation.EXPAND; // remember the state if (type == ExpandCollapseAnimation.EXPAND) { openItems.set(position, true); } else { openItems.set(position, false); } // check if we need to collapse a different view if (type == ExpandCollapseAnimation.EXPAND) { if (lastOpenPosition != -1 && lastOpenPosition != position) { if (lastOpen != null) { animateView(lastOpen, ExpandCollapseAnimation.COLLAPSE); } openItems.set(lastOpenPosition, false); } lastOpen = target; lastOpenPosition = position; } else if (lastOpenPosition == position) { lastOpenPosition = -1; } animateView(target, type); } } }); } private void updateExpandable(View target, int position) { final LinearLayout.LayoutParams params = (LinearLayout.LayoutParams)target.getLayoutParams(); if(openItems.get(position)) { target.setVisibility(View.VISIBLE); params.bottomMargin = 0; } else { target.setVisibility(View.GONE); params.bottomMargin = 0-viewHeights.get(position); } } /** * Performs either COLLAPSE or EXPAND animation on the target view * @param target the view to animate * @param type the animation type, either ExpandCollapseAnimation.COLLAPSE * or ExpandCollapseAnimation.EXPAND */ private void animateView(final View target, final int type) { Animation anim = new ExpandCollapseAnimation( target, type ); anim.setDuration(getAnimationDuration()); target.startAnimation(anim); } /** * Closes the current open item. * If it is current visible it will be closed with an animation. * * @return true if an item was closed, false otherwise */ public boolean collapseLastOpen() { if(isAnyItemExpanded()) { // if visible animate it out if(lastOpen != null) { animateView(lastOpen, ExpandCollapseAnimation.COLLAPSE); } openItems.set(lastOpenPosition, false); lastOpenPosition = -1; return true; } return false; } public Parcelable onSaveInstanceState(Parcelable parcelable) { SavedState ss = new SavedState(parcelable); ss.lastOpenPosition = this.lastOpenPosition; ss.openItems = this.openItems; return ss; } public void onRestoreInstanceState(SavedState state) { this.lastOpenPosition = state.lastOpenPosition; this.openItems = state.openItems; } /** * Utility methods to read and write a bitset from and to a Parcel */ private static BitSet readBitSet(Parcel src) { int cardinality = src.readInt(); BitSet set = new BitSet(); for (int i = 0; i < cardinality; i++) { set.set(src.readInt()); } return set; } private static void writeBitSet(Parcel dest, BitSet set) { int nextSetBit = -1; dest.writeInt(set.cardinality()); while ((nextSetBit = set.nextSetBit(nextSetBit + 1)) != -1) { dest.writeInt(nextSetBit); } } /** * The actual state class */ static class SavedState extends View.BaseSavedState { public BitSet openItems = null; public int lastOpenPosition = -1; SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); in.writeInt(lastOpenPosition); writeBitSet(in, openItems); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); lastOpenPosition = out.readInt(); openItems = readBitSet(out); } //required field that makes Parcelables from a Parcel public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { public SavedState createFromParcel(Parcel in) { return new SavedState(in); } public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
library/src/com/tjerkw/slideexpandable/library/AbstractSlideExpandableListAdapter.java
package com.tjerkw.slideexpandable.library; import android.os.Parcel; import android.os.Parcelable; import android.util.SparseIntArray; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.widget.LinearLayout; import android.widget.ListAdapter; import java.util.BitSet; /** * Wraps a ListAdapter to give it expandable list view functionality. * The main thing it does is add a listener to the getToggleButton * which expands the getExpandableView for each list item. * * @author tjerk * @date 6/9/12 4:41 PM */ public abstract class AbstractSlideExpandableListAdapter extends WrapperListAdapterImpl { /** * Reference to the last expanded list item. * Since lists are recycled this might be null if * though there is an expanded list item */ private View lastOpen = null; /** * The position of the last expanded list item. * If -1 there is no list item expanded. * Otherwise it points to the position of the last expanded list item */ private int lastOpenPosition = -1; /** * Default Animation duration * Set animation duration with @see setAnimationDuration */ private int animationDuration = 330; /** * A list of positions of all list items that are expanded. * Normally only one is expanded. But a mode to expand * multiple will be added soon. * * If an item onj position x is open, its bit is set */ private BitSet openItems = new BitSet(); /** * We remember, for each collapsable view its height. * So we dont need to recalculate. * The height is calculated just before the view is drawn. */ private final SparseIntArray viewHeights = new SparseIntArray(10); public AbstractSlideExpandableListAdapter(ListAdapter wrapped) { super(wrapped); } @Override public View getView(int position, View view, ViewGroup viewGroup) { view = wrapped.getView(position, view, viewGroup); enableFor(view, position); return view; } /** * This method is used to get the Button view that should * expand or collapse the Expandable View. * <br/> * Normally it will be implemented as: * <pre> * return parent.findViewById(R.id.expand_toggle_button) * </pre> * * A listener will be attached to the button which will * either expand or collapse the expandable view * * @see #getExpandableView(View) * @param parent the list view item * @ensure return!=null * @return a child of parent which is a button */ public abstract View getExpandToggleButton(View parent); /** * This method is used to get the view that will be hidden * initially and expands or collapse when the ExpandToggleButton * is pressed @see getExpandToggleButton * <br/> * Normally it will be implemented as: * <pre> * return parent.findViewById(R.id.expandable) * </pre> * * @see #getExpandToggleButton(View) * @param parent the list view item * @ensure return!=null * @return a child of parent which is a view (or often ViewGroup) * that can be collapsed and expanded */ public abstract View getExpandableView(View parent); /** * Gets the duration of the collapse animation in ms. * Default is 330ms. Override this method to change the default. * * @return the duration of the anim in ms */ public int getAnimationDuration() { return animationDuration; } /** * Set's the Animation duration for the Expandable animation * * @param duration The duration as an integer in MS (duration > 0) * @exception IllegalArgumentException if parameter is less than zero */ public void setAnimationDuration(int duration) { if(duration < 0) { throw new IllegalArgumentException("Duration is less than zero"); } animationDuration = duration; } /** * Check's if any position is currently Expanded * To collapse the open item @see collapseLastOpen * * @return boolean True if there is currently an item expanded, otherwise false */ public boolean isAnyItemExpanded() { return (lastOpenPosition != -1) ? true : false; } public void enableFor(View parent, int position) { View more = getExpandToggleButton(parent); View itemToolbar = getExpandableView(parent); itemToolbar.measure(parent.getWidth(), parent.getHeight()); enableFor(more, itemToolbar, position); } private void enableFor(final View button, final View target, final int position) { if(target == lastOpen && position!=lastOpenPosition) { // lastOpen is recycled, so its reference is false lastOpen = null; } if(position == lastOpenPosition) { // re reference to the last view // so when can animate it when collapsed lastOpen = target; } int height = viewHeights.get(position, -1); if(height == -1) { viewHeights.put(position, target.getMeasuredHeight()); updateExpandable(target,position); } else { updateExpandable(target, position); } button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View view) { Animation a = target.getAnimation(); if (a != null && a.hasStarted() && !a.hasEnded()) { a.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { view.performClick(); } @Override public void onAnimationRepeat(Animation animation) { } }); } else { target.setAnimation(null); int type = target.getVisibility() == View.VISIBLE ? ExpandCollapseAnimation.COLLAPSE : ExpandCollapseAnimation.EXPAND; // remember the state if (type == ExpandCollapseAnimation.EXPAND) { openItems.set(position, true); } else { openItems.set(position, false); } // check if we need to collapse a different view if (type == ExpandCollapseAnimation.EXPAND) { if (lastOpenPosition != -1 && lastOpenPosition != position) { if (lastOpen != null) { animateView(lastOpen, ExpandCollapseAnimation.COLLAPSE); } openItems.set(lastOpenPosition, false); } lastOpen = target; lastOpenPosition = position; } else if (lastOpenPosition == position) { lastOpenPosition = -1; } animateView(target, type); } } }); } private void updateExpandable(View target, int position) { final LinearLayout.LayoutParams params = (LinearLayout.LayoutParams)target.getLayoutParams(); if(openItems.get(position)) { target.setVisibility(View.VISIBLE); params.bottomMargin = 0; } else { target.setVisibility(View.GONE); params.bottomMargin = 0-viewHeights.get(position); } } /** * Performs either COLLAPSE or EXPAND animation on the target view * @param target the view to animate * @param type the animation type, either ExpandCollapseAnimation.COLLAPSE * or ExpandCollapseAnimation.EXPAND */ private void animateView(final View target, final int type) { Animation anim = new ExpandCollapseAnimation( target, type ); anim.setDuration(getAnimationDuration()); target.startAnimation(anim); } /** * Closes the current open item. * If it is current visible it will be closed with an animation. * * @return true if an item was closed, false otherwise */ public boolean collapseLastOpen() { if(isAnyItemExpanded()) { // if visible animate it out if(lastOpen != null) { animateView(lastOpen, ExpandCollapseAnimation.COLLAPSE); } openItems.set(lastOpenPosition, false); lastOpenPosition = -1; return true; } return false; } public Parcelable onSaveInstanceState(Parcelable parcelable) { SavedState ss = new SavedState(parcelable); ss.lastOpenPosition = this.lastOpenPosition; ss.openItems = this.openItems; return ss; } public void onRestoreInstanceState(SavedState state) { this.lastOpenPosition = state.lastOpenPosition; this.openItems = state.openItems; } /** * Utility methods to read and write a bitset from and to a Parcel */ private static BitSet readBitSet(Parcel src) { int cardinality = src.readInt(); BitSet set = new BitSet(); for (int i = 0; i < cardinality; i++) { set.set(src.readInt()); } return set; } private static void writeBitSet(Parcel dest, BitSet set) { int nextSetBit = -1; dest.writeInt(set.cardinality()); while ((nextSetBit = set.nextSetBit(nextSetBit + 1)) != -1) { dest.writeInt(nextSetBit); } } /** * The actual state class */ static class SavedState extends View.BaseSavedState { public BitSet openItems = null; public int lastOpenPosition = -1; SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); in.writeInt(lastOpenPosition); writeBitSet(in, openItems); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); lastOpenPosition = out.readInt(); openItems = readBitSet(out); } //required field that makes Parcelables from a Parcel public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { public SavedState createFromParcel(Parcel in) { return new SavedState(in); } public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
ReLayout toolbar after measure() The toolbar layout needs to be relayout after the call to measure() otherwise, for example, having centered content in the toolbar will appear as left align after stopping and restarting the activity (i.e. switching off the screen)
library/src/com/tjerkw/slideexpandable/library/AbstractSlideExpandableListAdapter.java
ReLayout toolbar after measure()
Java
apache-2.0
bd7930fec37e12e7a7fca648d842a20334d48437
0
CloudSlang/cloud-slang,CloudSlang/cloud-slang,CloudSlang/cloud-slang,CloudSlang/cloud-slang
/******************************************************************************* * (c) Copyright 2016 Hewlett-Packard Development Company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * *******************************************************************************/ package io.cloudslang.lang.tools.build; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import io.cloudslang.lang.api.Slang; import io.cloudslang.lang.commons.services.api.UserConfigurationService; import io.cloudslang.lang.commons.services.impl.UserConfigurationServiceImpl; import io.cloudslang.lang.logging.LoggingService; import io.cloudslang.lang.logging.LoggingServiceImpl; import io.cloudslang.lang.tools.build.commands.ApplicationArgs; import io.cloudslang.lang.tools.build.tester.IRunTestResults; import io.cloudslang.lang.tools.build.tester.TestRun; import io.cloudslang.lang.tools.build.tester.parallel.report.SlangTestCaseRunReportGeneratorService; import io.cloudslang.lang.tools.build.tester.parse.SlangTestCase; import io.cloudslang.lang.tools.build.tester.runconfiguration.TestRunInfoService; import io.cloudslang.score.events.ScoreEvent; import io.cloudslang.score.events.ScoreEventListener; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.collections4.ListUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.Validate; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import org.springframework.context.support.ClassPathXmlApplicationContext; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getBooleanFromPropertiesWithDefault; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getEnumInstanceFromPropertiesWithDefault; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getIntFromPropertiesWithDefaultAndRange; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getListForPrint; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.ALL_PARALLEL; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.ALL_SEQUENTIAL; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.POSSIBLY_MIXED; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_COVERAGE; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_PARALLEL_THREAD_COUNT; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_PARALLEL; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_RUN_UNSPECIFIED; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_SEQUENTIAL; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_TO_RUN; import static io.cloudslang.lang.tools.build.tester.SlangTestRunner.MAX_TIME_PER_TESTCASE_IN_MINUTES; import static io.cloudslang.lang.tools.build.tester.SlangTestRunner.TEST_CASE_TIMEOUT_IN_MINUTES_KEY; import static io.cloudslang.lang.tools.build.tester.parallel.services.ParallelTestCaseExecutorService.SLANG_TEST_RUNNER_THREAD_COUNT; import static java.lang.Integer.parseInt; import static java.lang.String.format; import static java.lang.String.valueOf; import static java.lang.System.getProperty; import static java.lang.System.setProperty; import static java.nio.file.Files.createDirectories; import static java.nio.file.Files.exists; import static java.nio.file.Files.isRegularFile; import static java.nio.file.LinkOption.NOFOLLOW_LINKS; import static java.nio.file.Paths.get; import static java.util.Locale.ENGLISH; import static org.apache.commons.collections4.ListUtils.removeAll; import static org.apache.commons.collections4.ListUtils.union; import static org.apache.commons.collections4.MapUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase; import static org.apache.commons.lang3.StringUtils.isEmpty; public class SlangBuildMain { public static final String DEFAULT_TESTS = "default"; private static final String TEST_CASE_REPORT_LOCATION = "cloudslang.test.case.report.location"; private static final String CONTENT_DIR = File.separator + "content"; private static final String TEST_DIR = File.separator + "test"; private static final Logger log = Logger.getLogger(SlangBuildMain.class); private static final int MAX_THREADS_TEST_RUNNER = 32; private static final String MESSAGE_NOT_SCHEDULED_FOR_RUN_RULES = "Rules '%s' defined in '%s' key " + "are not scheduled for run."; private static final String MESSAGE_TEST_SUITES_WITH_UNSPECIFIED_MAPPING = "Test suites '%s' have " + "unspecified mapping. They will run in '%s' mode."; private static final String PROPERTIES_FILE_EXTENSION = "properties"; private static final String DID_NOT_DETECT_RUN_CONFIGURATION_PROPERTIES_FILE = "Did not detect run " + "configuration properties file at path '%s'. " + "Check that the path you are using is an absolute path. " + "Check that the path separator is '\\\\' for Windows, or '/' for Linux."; private static final String NEW_LINE = System.lineSeparator(); private static final String MESSAGE_BOTH_PARALLEL_AND_SEQUENTIAL_EXECUTION = "The '%s' suites are configured for " + "both parallel and sequential execution." + " Each test suite must have only one execution mode (parallel or sequential)."; private static final String MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE = "Error loading smart " + "mode configuration file:"; private static final String LOG4J_CONFIGURATION_KEY = "log4j.configuration"; private static final String LOG4J_ERROR_PREFIX = "log4j: error loading log4j properties file."; private static final String LOG4J_ERROR_SUFFIX = "Using default configuration."; private static final String APP_HOME_KEY = "app.home"; // This class is a used in the interaction with the run configuration property file static class RunConfigurationProperties { static final String TEST_COVERAGE = "test.coverage"; static final String TEST_PARALLEL_THREAD_COUNT = "test.parallel.thread.count"; static final String TEST_SUITES_TO_RUN = "test.suites.active"; static final String TEST_SUITES_PARALLEL = "test.suites.parallel"; static final String TEST_SUITES_SEQUENTIAL = "test.suites.sequential"; static final String TEST_SUITES_RUN_UNSPECIFIED = "test.suites.run.mode.unspecified"; } // The possible ways to execute a test case public enum TestCaseRunMode { PARALLEL, SEQUENTIAL } // The typical configuration on how to configure the run of all tests as a bulk public enum BulkRunMode { ALL_PARALLEL, ALL_SEQUENTIAL, POSSIBLY_MIXED } // The possible ways to run tests: everything or the tests affected by current changelist public enum BuildMode { BASIC, CHANGED } public static void main(String[] args) { loadUserProperties(); configureLog4j(); ApplicationArgs appArgs = new ApplicationArgs(); parseArgs(args, appArgs); String projectPath = parseProjectPathArg(appArgs); final String contentPath = defaultIfEmpty(appArgs.getContentRoot(), projectPath + CONTENT_DIR); final String testsPath = defaultIfEmpty(appArgs.getTestRoot(), projectPath + TEST_DIR); List<String> testSuites = parseTestSuites(appArgs); boolean shouldPrintCoverageData = appArgs.shouldOutputCoverage(); boolean runTestsInParallel = appArgs.isParallel(); int threadCount = parseThreadCountArg(appArgs, runTestsInParallel); String testCaseTimeout = parseTestTimeout(appArgs); setProperty(TEST_CASE_TIMEOUT_IN_MINUTES_KEY, valueOf(testCaseTimeout)); final boolean shouldValidateDescription = appArgs.shouldValidateDescription(); String runConfigPath = FilenameUtils.normalize(appArgs.getRunConfigPath()); BuildMode buildMode = null; Set<String> changedFiles = null; try { String smartModePath = appArgs.getChangesOnlyConfigPath(); if (StringUtils.isEmpty(smartModePath)) { buildMode = BuildMode.BASIC; changedFiles = new HashSet<>(); printBuildModeInfo(buildMode); } else { buildMode = BuildMode.CHANGED; changedFiles = readChangedFilesFromSource(smartModePath); printBuildModeInfo(buildMode); } } catch (Exception ex) { log.error("Exception: " + ex.getMessage()); System.exit(1); } // Override with the values from the file if configured List<String> testSuitesParallel = new ArrayList<>(); List<String> testSuitesSequential = new ArrayList<>(); BulkRunMode bulkRunMode = runTestsInParallel ? ALL_PARALLEL : ALL_SEQUENTIAL; TestCaseRunMode unspecifiedTestSuiteRunMode = runTestsInParallel ? TestCaseRunMode.PARALLEL : TestCaseRunMode.SEQUENTIAL; if (get(runConfigPath).isAbsolute() && isRegularFile(get(runConfigPath), NOFOLLOW_LINKS) && equalsIgnoreCase(PROPERTIES_FILE_EXTENSION, FilenameUtils.getExtension(runConfigPath))) { Properties runConfigurationProperties = ArgumentProcessorUtils.getPropertiesFromFile(runConfigPath); shouldPrintCoverageData = getBooleanFromPropertiesWithDefault(TEST_COVERAGE, shouldPrintCoverageData, runConfigurationProperties); threadCount = getIntFromPropertiesWithDefaultAndRange(TEST_PARALLEL_THREAD_COUNT, Runtime.getRuntime().availableProcessors(), runConfigurationProperties, 1, MAX_THREADS_TEST_RUNNER + 1); testSuites = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_TO_RUN); testSuitesParallel = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_PARALLEL); testSuitesSequential = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_SEQUENTIAL); addErrorIfSameTestSuiteIsInBothParallelOrSequential(testSuitesParallel, testSuitesSequential); unspecifiedTestSuiteRunMode = getEnumInstanceFromPropertiesWithDefault(TEST_SUITES_RUN_UNSPECIFIED, unspecifiedTestSuiteRunMode, runConfigurationProperties); addWarningsForMisconfiguredTestSuites(unspecifiedTestSuiteRunMode, testSuites, testSuitesSequential, testSuitesParallel); bulkRunMode = POSSIBLY_MIXED; } else { // Warn when file is misconfigured, relative path, file does not exist or is not a properties file log.info(format(DID_NOT_DETECT_RUN_CONFIGURATION_PROPERTIES_FILE, runConfigPath)); } String testCaseReportLocation = getProperty(TEST_CASE_REPORT_LOCATION); if (StringUtils.isBlank(testCaseReportLocation)) { log.info("Test case report location property [" + TEST_CASE_REPORT_LOCATION + "] is not defined. Report will be skipped."); } // Setting thread count for visibility in ParallelTestCaseExecutorService setProperty(SLANG_TEST_RUNNER_THREAD_COUNT, valueOf(threadCount)); log.info(NEW_LINE + "------------------------------------------------------------"); log.info("Building project: " + projectPath); log.info("Content root is at: " + contentPath); log.info("Test root is at: " + testsPath); log.info("Active test suites are: " + getListForPrint(testSuites)); log.info("Parallel run mode is configured for test suites: " + getListForPrint(testSuitesParallel)); log.info("Sequential run mode is configured for test suites: " + getListForPrint(testSuitesSequential)); log.info("Default run mode '" + unspecifiedTestSuiteRunMode.name().toLowerCase() + "' is configured for test suites: " + getListForPrint(getDefaultRunModeTestSuites(testSuites, testSuitesParallel, testSuitesSequential))); log.info("Bulk run mode for tests: " + getBulkModeForPrint(bulkRunMode)); log.info("Print coverage data: " + valueOf(shouldPrintCoverageData)); log.info("Validate description: " + valueOf(shouldValidateDescription)); log.info("Thread count: " + threadCount); log.info("Test case timeout in minutes: " + (isEmpty(testCaseTimeout) ? valueOf(MAX_TIME_PER_TESTCASE_IN_MINUTES) : testCaseTimeout)); log.info(NEW_LINE + "Loading..."); ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("spring/testRunnerContext.xml"); context.registerShutdownHook(); SlangBuilder slangBuilder = context.getBean(SlangBuilder.class); LoggingService loggingService = context.getBean(LoggingServiceImpl.class); Slang slang = context.getBean(Slang.class); try { updateTestSuiteMappings(context.getBean(TestRunInfoService.class), testSuitesParallel, testSuitesSequential, testSuites, unspecifiedTestSuiteRunMode); registerEventHandlers(slang); List<RuntimeException> exceptions = new ArrayList<>(); SlangBuildResults buildResults = slangBuilder.buildSlangContent(projectPath, contentPath, testsPath, testSuites, shouldValidateDescription, bulkRunMode, buildMode, changedFiles); exceptions.addAll(buildResults.getCompilationExceptions()); if (exceptions.size() > 0) { logErrors(exceptions, projectPath, loggingService); } IRunTestResults runTestsResults = buildResults.getRunTestsResults(); Map<String, TestRun> skippedTests = runTestsResults.getSkippedTests(); if (isNotEmpty(skippedTests)) { printSkippedTestsSummary(skippedTests, loggingService); } printPassedTests(runTestsResults, loggingService); if (shouldPrintCoverageData) { printTestCoverageData(runTestsResults, loggingService); } if (isNotEmpty(runTestsResults.getFailedTests())) { printBuildFailureSummary(projectPath, runTestsResults, loggingService); } else { printBuildSuccessSummary(contentPath, buildResults, runTestsResults, loggingService); } loggingService.waitForAllLogTasksToFinish(); generateTestCaseReport( context.getBean(SlangTestCaseRunReportGeneratorService.class), runTestsResults, testCaseReportLocation ); System.exit(isNotEmpty(runTestsResults.getFailedTests()) ? 1 : 0); } catch (Throwable e) { logErrorsPrefix(loggingService); loggingService.logEvent(Level.ERROR, "Exception: " + e.getMessage()); logErrorsSuffix(projectPath, loggingService); System.exit(1); } } private static void configureLog4j() { String configFilename = System.getProperty(LOG4J_CONFIGURATION_KEY); String errorMessage = null; try { if (StringUtils.isEmpty(configFilename)) { errorMessage = "Config file name is empty."; } else { String normalizedPath = FilenameUtils.normalize(configFilename); if (normalizedPath == null) { errorMessage = "Normalized config file path is null."; } else if (!isUnderAppHome(normalizedPath, getNormalizedApplicationHome())) { errorMessage = "Normalized config file path[" + normalizedPath + "] " + "is not under application home directory"; } else { if (!isRegularFile(get(normalizedPath), NOFOLLOW_LINKS)) { errorMessage = "Normalized config file path[" + normalizedPath + "]" + " does not lead to a regular file."; } else { Properties log4jProperties = new Properties(); try (InputStream log4jInputStream = SlangBuildMain.class.getResourceAsStream(normalizedPath)) { log4jProperties.load(log4jInputStream); PropertyConfigurator.configure(log4jProperties); } } } } } catch (IOException | RuntimeException ex) { errorMessage = ex.getMessage(); } if (StringUtils.isNotEmpty(errorMessage)) { System.out.printf("%s%n\t%s%n\t%s%n", LOG4J_ERROR_PREFIX, errorMessage, LOG4J_ERROR_SUFFIX); } } private static boolean isUnderAppHome(String normalizedFilePath, String normalizedAppHome) { return normalizedFilePath.startsWith(normalizedAppHome); } private static String getNormalizedApplicationHome() { String appHome = System.getProperty(APP_HOME_KEY); if (StringUtils.isEmpty(appHome)) { throw new RuntimeException(APP_HOME_KEY + " system property is empty"); } String normalizedAppHome = FilenameUtils.normalize(appHome); if (normalizedAppHome == null) { throw new RuntimeException("Normalized app home path is null."); } return normalizedAppHome; } private static void printBuildModeInfo(BuildMode buildMode) { log.info("Build mode set to: " + buildMode); } private static Set<String> readChangedFilesFromSource(String filePath) throws IOException { String normalizedPath = FilenameUtils.normalize(filePath); if (!get(normalizedPath).isAbsolute()) { throw new RuntimeException(MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE + " Path[" + normalizedPath + "] is not an absolute path."); } if (!isRegularFile(get(normalizedPath), NOFOLLOW_LINKS)) { throw new RuntimeException(MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE + " Path[" + normalizedPath + "] does not lead to a regular file."); } return ArgumentProcessorUtils.loadChangedItems(normalizedPath); } private static void addErrorIfSameTestSuiteIsInBothParallelOrSequential(List<String> testSuitesParallel, List<String> testSuitesSequential) { final List<String> intersection = ListUtils.intersection(testSuitesParallel, testSuitesSequential); if (!intersection.isEmpty()) { final String message = String.format(MESSAGE_BOTH_PARALLEL_AND_SEQUENTIAL_EXECUTION, getListForPrint(intersection)); log.error(message); throw new IllegalStateException(); } } /** * @param bulkRunMode the mode to configure the run of all tests * @return String friendly version for print to the log or console */ private static String getBulkModeForPrint(final BulkRunMode bulkRunMode) { return bulkRunMode.toString().replace("_", " ").toLowerCase(ENGLISH); } /** * @param testRunInfoService the service responsible for managing run information * @param parallelSuites the suite names to be executed in parallel * @param sequentialSuites the suite names to be executed in sequential manner * @param activeSuites the suite names that are active * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. */ private static void updateTestSuiteMappings(final TestRunInfoService testRunInfoService, final List<String> parallelSuites, final List<String> sequentialSuites, final List<String> activeSuites, final TestCaseRunMode unspecifiedTestSuiteRunMode) { testRunInfoService.setRunModeForTestSuites(parallelSuites, TestCaseRunMode.PARALLEL); testRunInfoService.setRunModeForTestSuites(sequentialSuites, TestCaseRunMode.SEQUENTIAL); testRunInfoService.setRunModeForTestSuites( getDefaultRunModeTestSuites(activeSuites, parallelSuites, sequentialSuites), unspecifiedTestSuiteRunMode); } /** * @param activeSuites the suite names that are active * @param parallelSuites the suite names to be executed in parallel * @param sequentialSuites the suite names to be executed in sequential manner * @return */ private static List<String> getDefaultRunModeTestSuites(final List<String> activeSuites, final List<String> parallelSuites, final List<String> sequentialSuites) { return removeAll(new ArrayList<>(activeSuites), union(parallelSuites, sequentialSuites)); } /** * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. * @param activeSuites the suite names that are active * @param sequentialSuites the suite names to be executed in sequential manner * @param parallelSuites the suite names to be executed in parallel */ private static void addWarningsForMisconfiguredTestSuites(final TestCaseRunMode unspecifiedTestSuiteRunMode, final List<String> activeSuites, final List<String> sequentialSuites, final List<String> parallelSuites) { addWarningForSubsetOfRules(activeSuites, sequentialSuites, TEST_SUITES_SEQUENTIAL); addWarningForSubsetOfRules(activeSuites, parallelSuites, TEST_SUITES_PARALLEL); addInformativeNoteForUnspecifiedRules(unspecifiedTestSuiteRunMode, activeSuites, sequentialSuites, parallelSuites); } /** * Displays an informative message in case there is at least one test suite left for default run mode. * * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. * @param activeSuites the suite names that are active * @param sequentialSuites the suite names to be executed in sequential manner * @param parallelSuites the suite names to be executed in parallel */ private static void addInformativeNoteForUnspecifiedRules(final TestCaseRunMode unspecifiedTestSuiteRunMode, final List<String> activeSuites, final List<String> sequentialSuites, final List<String> parallelSuites) { List<String> union = union(sequentialSuites, parallelSuites); if (!union.containsAll(activeSuites)) { List<String> copy = new ArrayList<>(activeSuites); copy.removeAll(union); log.info(format(MESSAGE_TEST_SUITES_WITH_UNSPECIFIED_MAPPING, getListForPrint(copy), unspecifiedTestSuiteRunMode.name())); } } /** * Displays a warning message for test suites that have rules defined for sequential or parallel execution * but are not in active test suites. * * @param testSuites suite names contained in 'container' suites * @param testSuitesContained suite names contained in 'contained' suites * @param key run configuration property key */ private static void addWarningForSubsetOfRules(List<String> testSuites, List<String> testSuitesContained, String key) { List<String> intersectWithContained = ListUtils.intersection(testSuites, testSuitesContained); if (intersectWithContained.size() != testSuitesContained.size()) { List<String> notScheduledForRun = new ArrayList<>(testSuitesContained); notScheduledForRun.removeAll(intersectWithContained); log.warn(format(MESSAGE_NOT_SCHEDULED_FOR_RUN_RULES, getListForPrint(notScheduledForRun), key)); } } /** * Returns the names of the suites from the run configuration java.util.Properties object at a certain key. * * @param runConfigurationProperties * @param key * @return */ private static List<String> getTestSuitesForKey(Properties runConfigurationProperties, String key) { final String valueList = runConfigurationProperties.getProperty(key); return ArgumentProcessorUtils.parseTestSuitesToList(valueList); } private static void logErrors(List<RuntimeException> exceptions, String projectPath, final LoggingService loggingService) { logErrorsPrefix(loggingService); for (RuntimeException runtimeException : exceptions) { loggingService.logEvent(Level.ERROR, "Exception: " + runtimeException.getMessage()); } logErrorsSuffix(projectPath, loggingService); System.exit(1); } private static void logErrorsSuffix(String projectPath, final LoggingService loggingService) { loggingService.logEvent(Level.ERROR, "FAILURE: Validation of slang files for project: \"" + projectPath + "\" failed."); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); loggingService.logEvent(Level.ERROR, ""); } private static void logErrorsPrefix(final LoggingService loggingService) { loggingService.logEvent(Level.ERROR, ""); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); } private static void generateTestCaseReport( SlangTestCaseRunReportGeneratorService reportGeneratorService, IRunTestResults runTestsResults, String testCaseReportLocation) throws IOException { if (StringUtils.isNotBlank(testCaseReportLocation)) { Path reportDirectoryPath = get(testCaseReportLocation); if (!exists(reportDirectoryPath)) { createDirectories(reportDirectoryPath); } reportGeneratorService.generateReport(runTestsResults, reportDirectoryPath.toString()); } } @SuppressWarnings("Duplicates") private static void loadUserProperties() { try { UserConfigurationService userConfigurationService = new UserConfigurationServiceImpl(); userConfigurationService.loadUserProperties(); } catch (Exception ex) { System.out.println("Error occurred while loading user configuration: " + ex.getMessage()); ex.printStackTrace(); } } private static void parseArgs(String[] args, ApplicationArgs appArgs) { try { JCommander commander = new JCommander(appArgs, args); if (appArgs.isHelp()) { commander.usage(); System.exit(0); } } catch (ParameterException e) { System.out.println(e.getMessage()); System.out.println("You can use '--help' for usage"); System.exit(1); } } private static List<String> parseTestSuites(ApplicationArgs appArgs) { final List<String> testSuitesArg = ListUtils.defaultIfNull(appArgs.getTestSuites(), new ArrayList<String>()); return ArgumentProcessorUtils.parseTestSuitesToList(testSuitesArg); } private static String parseTestTimeout(ApplicationArgs appArgs) { Map<String, String> dynamicArgs = appArgs.getDynamicParams(); return dynamicArgs.get(TEST_CASE_TIMEOUT_IN_MINUTES_KEY); } private static int parseThreadCountArg(ApplicationArgs appArgs, boolean isParallel) { if (!isParallel) { return 1; } else { int defaultThreadCount = Runtime.getRuntime().availableProcessors(); String threadCountErrorMessage = format("Thread count is misconfigured. The thread count value must be a " + "positive integer less than or equal to %d. Using %d threads.", MAX_THREADS_TEST_RUNNER, defaultThreadCount); try { String stringThreadCount = appArgs.getThreadCount(); if (stringThreadCount != null) { int threadCount = parseInt(stringThreadCount); if ((threadCount > 0) && (threadCount <= MAX_THREADS_TEST_RUNNER)) { return threadCount; } else { log.warn(threadCountErrorMessage); } } } catch (NumberFormatException nfEx) { log.warn(threadCountErrorMessage); } return defaultThreadCount; } } private static void printBuildSuccessSummary(String contentPath, SlangBuildResults buildResults, IRunTestResults runTestsResults, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "BUILD SUCCESS"); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Found " + buildResults.getNumberOfCompiledSources() + " slang files under directory: \"" + contentPath + "\" and all are valid."); printNumberOfPassedAndSkippedTests(runTestsResults, loggingService); loggingService.logEvent(Level.INFO, ""); } private static void printNumberOfPassedAndSkippedTests(IRunTestResults runTestsResults, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, runTestsResults.getPassedTests().size() + " test cases passed"); Map<String, TestRun> skippedTests = runTestsResults.getSkippedTests(); if (skippedTests.size() > 0) { loggingService.logEvent(Level.INFO, skippedTests.size() + " test cases skipped"); } } private static void printPassedTests(IRunTestResults runTestsResults, final LoggingService loggingService) { if (runTestsResults.getPassedTests().size() > 0) { loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + runTestsResults.getPassedTests().size() + " test cases passed:"); for (Map.Entry<String, TestRun> passedTest : runTestsResults.getPassedTests().entrySet()) { String testCaseReference = SlangTestCase.generateTestCaseReference(passedTest.getValue().getTestCase()); loggingService.logEvent(Level.INFO, "- " + testCaseReference.replaceAll("\n", "\n\t")); } } } private static void printBuildFailureSummary(String projectPath, IRunTestResults runTestsResults, final LoggingService loggingService) { printNumberOfPassedAndSkippedTests(runTestsResults, loggingService); final Map<String, TestRun> failedTests = runTestsResults.getFailedTests(); logErrorsPrefix(loggingService); loggingService.logEvent(Level.ERROR, "BUILD FAILURE"); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); loggingService.logEvent(Level.ERROR, "CloudSlang build for repository: \"" + projectPath + "\" failed due to failed tests."); loggingService.logEvent(Level.ERROR, "Following " + failedTests.size() + " tests failed:"); for (Map.Entry<String, TestRun> failedTest : failedTests.entrySet()) { String failureMessage = failedTest.getValue().getMessage(); loggingService.logEvent(Level.ERROR, "- " + failureMessage.replaceAll("\n", "\n\t")); } loggingService.logEvent(Level.ERROR, ""); } private static void printSkippedTestsSummary(Map<String, TestRun> skippedTests, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + skippedTests.size() + " tests were skipped:"); for (Map.Entry<String, TestRun> skippedTest : skippedTests.entrySet()) { String message = skippedTest.getValue().getMessage(); loggingService.logEvent(Level.INFO, "- " + message.replaceAll("\n", "\n\t")); } } private static void printTestCoverageData(IRunTestResults runTestsResults, final LoggingService loggingService) { printCoveredExecutables(runTestsResults.getCoveredExecutables(), loggingService); printUncoveredExecutables(runTestsResults.getUncoveredExecutables(), loggingService); int coveredExecutablesSize = runTestsResults.getCoveredExecutables().size(); int uncoveredExecutablesSize = runTestsResults.getUncoveredExecutables().size(); int totalNumberOfExecutables = coveredExecutablesSize + uncoveredExecutablesSize; double coveragePercentage = (double) coveredExecutablesSize / (double) totalNumberOfExecutables * 100; loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, ((int) coveragePercentage) + "% of the content has tests"); loggingService.logEvent(Level.INFO, "Out of " + totalNumberOfExecutables + " executables, " + coveredExecutablesSize + " executables have tests"); } private static void printCoveredExecutables(Set<String> coveredExecutables, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + coveredExecutables.size() + " executables have tests:"); for (String executable : coveredExecutables) { loggingService.logEvent(Level.INFO, "- " + executable); } } private static void printUncoveredExecutables(Set<String> uncoveredExecutables, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + uncoveredExecutables.size() + " executables do not have tests:"); for (String executable : uncoveredExecutables) { loggingService.logEvent(Level.INFO, "- " + executable); } } private static String parseProjectPathArg(ApplicationArgs args) { String repositoryPath; if (args.getProjectRoot() != null) { repositoryPath = args.getProjectRoot(); // if only one parameter was passed, we treat it as the project root // i.e. './cslang-builder some/path/to/project' } else if (args.getParameters().size() == 1) { repositoryPath = args.getParameters().get(0); } else { repositoryPath = System.getProperty("user.dir"); } repositoryPath = FilenameUtils.separatorsToSystem(repositoryPath); Validate.isTrue(new File(repositoryPath).isDirectory(), "Directory path argument \'" + repositoryPath + "\' does not lead to a directory"); return repositoryPath; } private static void registerEventHandlers(Slang slang) { slang.subscribeOnAllEvents(new ScoreEventListener() { @Override public synchronized void onEvent(ScoreEvent event) { logEvent(event); } }); } private static void logEvent(ScoreEvent event) { log.debug(("Event received: " + event.getEventType() + " Data is: " + event.getData())); } }
cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java
/******************************************************************************* * (c) Copyright 2016 Hewlett-Packard Development Company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * *******************************************************************************/ package io.cloudslang.lang.tools.build; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import io.cloudslang.lang.api.Slang; import io.cloudslang.lang.commons.services.api.UserConfigurationService; import io.cloudslang.lang.commons.services.impl.UserConfigurationServiceImpl; import io.cloudslang.lang.logging.LoggingService; import io.cloudslang.lang.logging.LoggingServiceImpl; import io.cloudslang.lang.tools.build.commands.ApplicationArgs; import io.cloudslang.lang.tools.build.tester.IRunTestResults; import io.cloudslang.lang.tools.build.tester.TestRun; import io.cloudslang.lang.tools.build.tester.parallel.report.SlangTestCaseRunReportGeneratorService; import io.cloudslang.lang.tools.build.tester.parse.SlangTestCase; import io.cloudslang.lang.tools.build.tester.runconfiguration.TestRunInfoService; import io.cloudslang.score.events.ScoreEvent; import io.cloudslang.score.events.ScoreEventListener; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.collections4.ListUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.Validate; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import org.springframework.context.support.ClassPathXmlApplicationContext; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getBooleanFromPropertiesWithDefault; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getEnumInstanceFromPropertiesWithDefault; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getIntFromPropertiesWithDefaultAndRange; import static io.cloudslang.lang.tools.build.ArgumentProcessorUtils.getListForPrint; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.ALL_PARALLEL; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.ALL_SEQUENTIAL; import static io.cloudslang.lang.tools.build.SlangBuildMain.BulkRunMode.POSSIBLY_MIXED; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_COVERAGE; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_PARALLEL_THREAD_COUNT; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_PARALLEL; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_RUN_UNSPECIFIED; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_SEQUENTIAL; import static io.cloudslang.lang.tools.build.SlangBuildMain.RunConfigurationProperties.TEST_SUITES_TO_RUN; import static io.cloudslang.lang.tools.build.tester.SlangTestRunner.MAX_TIME_PER_TESTCASE_IN_MINUTES; import static io.cloudslang.lang.tools.build.tester.SlangTestRunner.TEST_CASE_TIMEOUT_IN_MINUTES_KEY; import static io.cloudslang.lang.tools.build.tester.parallel.services.ParallelTestCaseExecutorService.SLANG_TEST_RUNNER_THREAD_COUNT; import static java.lang.Integer.parseInt; import static java.lang.String.format; import static java.lang.String.valueOf; import static java.lang.System.getProperty; import static java.lang.System.setProperty; import static java.nio.file.Files.createDirectories; import static java.nio.file.Files.exists; import static java.nio.file.Files.isRegularFile; import static java.nio.file.LinkOption.NOFOLLOW_LINKS; import static java.nio.file.Paths.get; import static java.util.Locale.ENGLISH; import static org.apache.commons.collections4.ListUtils.removeAll; import static org.apache.commons.collections4.ListUtils.union; import static org.apache.commons.collections4.MapUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase; import static org.apache.commons.lang3.StringUtils.isEmpty; public class SlangBuildMain { public static final String DEFAULT_TESTS = "default"; private static final String TEST_CASE_REPORT_LOCATION = "cloudslang.test.case.report.location"; private static final String CONTENT_DIR = File.separator + "content"; private static final String TEST_DIR = File.separator + "test"; private static final Logger log = Logger.getLogger(SlangBuildMain.class); private static final int MAX_THREADS_TEST_RUNNER = 32; private static final String MESSAGE_NOT_SCHEDULED_FOR_RUN_RULES = "Rules '%s' defined in '%s' key " + "are not scheduled for run."; private static final String MESSAGE_TEST_SUITES_WITH_UNSPECIFIED_MAPPING = "Test suites '%s' have " + "unspecified mapping. They will run in '%s' mode."; private static final String PROPERTIES_FILE_EXTENSION = "properties"; private static final String DID_NOT_DETECT_RUN_CONFIGURATION_PROPERTIES_FILE = "Did not detect run " + "configuration properties file at path '%s'. " + "Check that the path you are using is an absolute path. " + "Check that the path separator is '\\\\' for Windows, or '/' for Linux."; private static final String NEW_LINE = System.lineSeparator(); private static final String MESSAGE_BOTH_PARALLEL_AND_SEQUENTIAL_EXECUTION = "The '%s' suites are configured for " + "both parallel and sequential execution." + " Each test suite must have only one execution mode (parallel or sequential)."; private static final String MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE = "Error loading smart " + "mode configuration file:"; private static final String LOG4J_CONFIGURATION_KEY = "log4j.configuration"; private static final String LOG4J_ERROR_PREFIX = "log4j: error loading log4j properties file."; private static final String LOG4J_ERROR_SUFFIX = "Using default configuration."; // This class is a used in the interaction with the run configuration property file static class RunConfigurationProperties { static final String TEST_COVERAGE = "test.coverage"; static final String TEST_PARALLEL_THREAD_COUNT = "test.parallel.thread.count"; static final String TEST_SUITES_TO_RUN = "test.suites.active"; static final String TEST_SUITES_PARALLEL = "test.suites.parallel"; static final String TEST_SUITES_SEQUENTIAL = "test.suites.sequential"; static final String TEST_SUITES_RUN_UNSPECIFIED = "test.suites.run.mode.unspecified"; } // The possible ways to execute a test case public enum TestCaseRunMode { PARALLEL, SEQUENTIAL } // The typical configuration on how to configure the run of all tests as a bulk public enum BulkRunMode { ALL_PARALLEL, ALL_SEQUENTIAL, POSSIBLY_MIXED } // The possible ways to run tests: everything or the tests affected by current changelist public enum BuildMode { BASIC, CHANGED } public static void main(String[] args) { loadUserProperties(); configureLog4j(); ApplicationArgs appArgs = new ApplicationArgs(); parseArgs(args, appArgs); String projectPath = parseProjectPathArg(appArgs); final String contentPath = defaultIfEmpty(appArgs.getContentRoot(), projectPath + CONTENT_DIR); final String testsPath = defaultIfEmpty(appArgs.getTestRoot(), projectPath + TEST_DIR); List<String> testSuites = parseTestSuites(appArgs); boolean shouldPrintCoverageData = appArgs.shouldOutputCoverage(); boolean runTestsInParallel = appArgs.isParallel(); int threadCount = parseThreadCountArg(appArgs, runTestsInParallel); String testCaseTimeout = parseTestTimeout(appArgs); setProperty(TEST_CASE_TIMEOUT_IN_MINUTES_KEY, valueOf(testCaseTimeout)); final boolean shouldValidateDescription = appArgs.shouldValidateDescription(); String runConfigPath = FilenameUtils.normalize(appArgs.getRunConfigPath()); BuildMode buildMode = null; Set<String> changedFiles = null; try { String smartModePath = appArgs.getChangesOnlyConfigPath(); if (StringUtils.isEmpty(smartModePath)) { buildMode = BuildMode.BASIC; changedFiles = new HashSet<>(); printBuildModeInfo(buildMode); } else { buildMode = BuildMode.CHANGED; changedFiles = readChangedFilesFromSource(smartModePath); printBuildModeInfo(buildMode); } } catch (Exception ex) { log.error("Exception: " + ex.getMessage()); System.exit(1); } // Override with the values from the file if configured List<String> testSuitesParallel = new ArrayList<>(); List<String> testSuitesSequential = new ArrayList<>(); BulkRunMode bulkRunMode = runTestsInParallel ? ALL_PARALLEL : ALL_SEQUENTIAL; TestCaseRunMode unspecifiedTestSuiteRunMode = runTestsInParallel ? TestCaseRunMode.PARALLEL : TestCaseRunMode.SEQUENTIAL; if (get(runConfigPath).isAbsolute() && isRegularFile(get(runConfigPath), NOFOLLOW_LINKS) && equalsIgnoreCase(PROPERTIES_FILE_EXTENSION, FilenameUtils.getExtension(runConfigPath))) { Properties runConfigurationProperties = ArgumentProcessorUtils.getPropertiesFromFile(runConfigPath); shouldPrintCoverageData = getBooleanFromPropertiesWithDefault(TEST_COVERAGE, shouldPrintCoverageData, runConfigurationProperties); threadCount = getIntFromPropertiesWithDefaultAndRange(TEST_PARALLEL_THREAD_COUNT, Runtime.getRuntime().availableProcessors(), runConfigurationProperties, 1, MAX_THREADS_TEST_RUNNER + 1); testSuites = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_TO_RUN); testSuitesParallel = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_PARALLEL); testSuitesSequential = getTestSuitesForKey(runConfigurationProperties, TEST_SUITES_SEQUENTIAL); addErrorIfSameTestSuiteIsInBothParallelOrSequential(testSuitesParallel, testSuitesSequential); unspecifiedTestSuiteRunMode = getEnumInstanceFromPropertiesWithDefault(TEST_SUITES_RUN_UNSPECIFIED, unspecifiedTestSuiteRunMode, runConfigurationProperties); addWarningsForMisconfiguredTestSuites(unspecifiedTestSuiteRunMode, testSuites, testSuitesSequential, testSuitesParallel); bulkRunMode = POSSIBLY_MIXED; } else { // Warn when file is misconfigured, relative path, file does not exist or is not a properties file log.info(format(DID_NOT_DETECT_RUN_CONFIGURATION_PROPERTIES_FILE, runConfigPath)); } String testCaseReportLocation = getProperty(TEST_CASE_REPORT_LOCATION); if (StringUtils.isBlank(testCaseReportLocation)) { log.info("Test case report location property [" + TEST_CASE_REPORT_LOCATION + "] is not defined. Report will be skipped."); } // Setting thread count for visibility in ParallelTestCaseExecutorService setProperty(SLANG_TEST_RUNNER_THREAD_COUNT, valueOf(threadCount)); log.info(NEW_LINE + "------------------------------------------------------------"); log.info("Building project: " + projectPath); log.info("Content root is at: " + contentPath); log.info("Test root is at: " + testsPath); log.info("Active test suites are: " + getListForPrint(testSuites)); log.info("Parallel run mode is configured for test suites: " + getListForPrint(testSuitesParallel)); log.info("Sequential run mode is configured for test suites: " + getListForPrint(testSuitesSequential)); log.info("Default run mode '" + unspecifiedTestSuiteRunMode.name().toLowerCase() + "' is configured for test suites: " + getListForPrint(getDefaultRunModeTestSuites(testSuites, testSuitesParallel, testSuitesSequential))); log.info("Bulk run mode for tests: " + getBulkModeForPrint(bulkRunMode)); log.info("Print coverage data: " + valueOf(shouldPrintCoverageData)); log.info("Validate description: " + valueOf(shouldValidateDescription)); log.info("Thread count: " + threadCount); log.info("Test case timeout in minutes: " + (isEmpty(testCaseTimeout) ? valueOf(MAX_TIME_PER_TESTCASE_IN_MINUTES) : testCaseTimeout)); log.info(NEW_LINE + "Loading..."); ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("spring/testRunnerContext.xml"); context.registerShutdownHook(); SlangBuilder slangBuilder = context.getBean(SlangBuilder.class); LoggingService loggingService = context.getBean(LoggingServiceImpl.class); Slang slang = context.getBean(Slang.class); try { updateTestSuiteMappings(context.getBean(TestRunInfoService.class), testSuitesParallel, testSuitesSequential, testSuites, unspecifiedTestSuiteRunMode); registerEventHandlers(slang); List<RuntimeException> exceptions = new ArrayList<>(); SlangBuildResults buildResults = slangBuilder.buildSlangContent(projectPath, contentPath, testsPath, testSuites, shouldValidateDescription, bulkRunMode, buildMode, changedFiles); exceptions.addAll(buildResults.getCompilationExceptions()); if (exceptions.size() > 0) { logErrors(exceptions, projectPath, loggingService); } IRunTestResults runTestsResults = buildResults.getRunTestsResults(); Map<String, TestRun> skippedTests = runTestsResults.getSkippedTests(); if (isNotEmpty(skippedTests)) { printSkippedTestsSummary(skippedTests, loggingService); } printPassedTests(runTestsResults, loggingService); if (shouldPrintCoverageData) { printTestCoverageData(runTestsResults, loggingService); } if (isNotEmpty(runTestsResults.getFailedTests())) { printBuildFailureSummary(projectPath, runTestsResults, loggingService); } else { printBuildSuccessSummary(contentPath, buildResults, runTestsResults, loggingService); } loggingService.waitForAllLogTasksToFinish(); generateTestCaseReport( context.getBean(SlangTestCaseRunReportGeneratorService.class), runTestsResults, testCaseReportLocation ); System.exit(isNotEmpty(runTestsResults.getFailedTests()) ? 1 : 0); } catch (Throwable e) { logErrorsPrefix(loggingService); loggingService.logEvent(Level.ERROR, "Exception: " + e.getMessage()); logErrorsSuffix(projectPath, loggingService); System.exit(1); } } private static void configureLog4j() { String configFilename = System.getProperty(LOG4J_CONFIGURATION_KEY); String errorMessage = null; if (StringUtils.isEmpty(configFilename)) { errorMessage = "Config file name is empty."; } else { String normalizedPath = FilenameUtils.normalize(configFilename); if (!get(normalizedPath).isAbsolute()) { errorMessage = "Path[" + normalizedPath + "] is not an absolute path."; } else { if (!isRegularFile(get(normalizedPath), NOFOLLOW_LINKS)) { errorMessage = "Path[" + normalizedPath + "] does not lead to a regular file."; } else { try { PropertyConfigurator.configure(configFilename); } catch (RuntimeException rex) { errorMessage = rex.getMessage(); } } } } if (StringUtils.isNotEmpty(errorMessage)) { System.out.printf("%s%n\t%s%n\t%s%n", LOG4J_ERROR_PREFIX, errorMessage, LOG4J_ERROR_SUFFIX); } } private static void printBuildModeInfo(BuildMode buildMode) { log.info("Build mode set to: " + buildMode); } private static Set<String> readChangedFilesFromSource(String filePath) throws IOException { String normalizedPath = FilenameUtils.normalize(filePath); if (!get(normalizedPath).isAbsolute()) { throw new RuntimeException(MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE + " Path[" + normalizedPath + "] is not an absolute path."); } if (!isRegularFile(get(normalizedPath), NOFOLLOW_LINKS)) { throw new RuntimeException(MESSAGE_ERROR_LOADING_SMART_MODE_CONFIG_FILE + " Path[" + normalizedPath + "] does not lead to a regular file."); } return ArgumentProcessorUtils.loadChangedItems(normalizedPath); } private static void addErrorIfSameTestSuiteIsInBothParallelOrSequential(List<String> testSuitesParallel, List<String> testSuitesSequential) { final List<String> intersection = ListUtils.intersection(testSuitesParallel, testSuitesSequential); if (!intersection.isEmpty()) { final String message = String.format(MESSAGE_BOTH_PARALLEL_AND_SEQUENTIAL_EXECUTION, getListForPrint(intersection)); log.error(message); throw new IllegalStateException(); } } /** * @param bulkRunMode the mode to configure the run of all tests * @return String friendly version for print to the log or console */ private static String getBulkModeForPrint(final BulkRunMode bulkRunMode) { return bulkRunMode.toString().replace("_", " ").toLowerCase(ENGLISH); } /** * @param testRunInfoService the service responsible for managing run information * @param parallelSuites the suite names to be executed in parallel * @param sequentialSuites the suite names to be executed in sequential manner * @param activeSuites the suite names that are active * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. */ private static void updateTestSuiteMappings(final TestRunInfoService testRunInfoService, final List<String> parallelSuites, final List<String> sequentialSuites, final List<String> activeSuites, final TestCaseRunMode unspecifiedTestSuiteRunMode) { testRunInfoService.setRunModeForTestSuites(parallelSuites, TestCaseRunMode.PARALLEL); testRunInfoService.setRunModeForTestSuites(sequentialSuites, TestCaseRunMode.SEQUENTIAL); testRunInfoService.setRunModeForTestSuites( getDefaultRunModeTestSuites(activeSuites, parallelSuites, sequentialSuites), unspecifiedTestSuiteRunMode); } /** * @param activeSuites the suite names that are active * @param parallelSuites the suite names to be executed in parallel * @param sequentialSuites the suite names to be executed in sequential manner * @return */ private static List<String> getDefaultRunModeTestSuites(final List<String> activeSuites, final List<String> parallelSuites, final List<String> sequentialSuites) { return removeAll(new ArrayList<>(activeSuites), union(parallelSuites, sequentialSuites)); } /** * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. * @param activeSuites the suite names that are active * @param sequentialSuites the suite names to be executed in sequential manner * @param parallelSuites the suite names to be executed in parallel */ private static void addWarningsForMisconfiguredTestSuites(final TestCaseRunMode unspecifiedTestSuiteRunMode, final List<String> activeSuites, final List<String> sequentialSuites, final List<String> parallelSuites) { addWarningForSubsetOfRules(activeSuites, sequentialSuites, TEST_SUITES_SEQUENTIAL); addWarningForSubsetOfRules(activeSuites, parallelSuites, TEST_SUITES_PARALLEL); addInformativeNoteForUnspecifiedRules(unspecifiedTestSuiteRunMode, activeSuites, sequentialSuites, parallelSuites); } /** * Displays an informative message in case there is at least one test suite left for default run mode. * * @param unspecifiedTestSuiteRunMode the default run mode for suites that don't explicitly mention a run mode. * @param activeSuites the suite names that are active * @param sequentialSuites the suite names to be executed in sequential manner * @param parallelSuites the suite names to be executed in parallel */ private static void addInformativeNoteForUnspecifiedRules(final TestCaseRunMode unspecifiedTestSuiteRunMode, final List<String> activeSuites, final List<String> sequentialSuites, final List<String> parallelSuites) { List<String> union = union(sequentialSuites, parallelSuites); if (!union.containsAll(activeSuites)) { List<String> copy = new ArrayList<>(activeSuites); copy.removeAll(union); log.info(format(MESSAGE_TEST_SUITES_WITH_UNSPECIFIED_MAPPING, getListForPrint(copy), unspecifiedTestSuiteRunMode.name())); } } /** * Displays a warning message for test suites that have rules defined for sequential or parallel execution * but are not in active test suites. * * @param testSuites suite names contained in 'container' suites * @param testSuitesContained suite names contained in 'contained' suites * @param key run configuration property key */ private static void addWarningForSubsetOfRules(List<String> testSuites, List<String> testSuitesContained, String key) { List<String> intersectWithContained = ListUtils.intersection(testSuites, testSuitesContained); if (intersectWithContained.size() != testSuitesContained.size()) { List<String> notScheduledForRun = new ArrayList<>(testSuitesContained); notScheduledForRun.removeAll(intersectWithContained); log.warn(format(MESSAGE_NOT_SCHEDULED_FOR_RUN_RULES, getListForPrint(notScheduledForRun), key)); } } /** * Returns the names of the suites from the run configuration java.util.Properties object at a certain key. * * @param runConfigurationProperties * @param key * @return */ private static List<String> getTestSuitesForKey(Properties runConfigurationProperties, String key) { final String valueList = runConfigurationProperties.getProperty(key); return ArgumentProcessorUtils.parseTestSuitesToList(valueList); } private static void logErrors(List<RuntimeException> exceptions, String projectPath, final LoggingService loggingService) { logErrorsPrefix(loggingService); for (RuntimeException runtimeException : exceptions) { loggingService.logEvent(Level.ERROR, "Exception: " + runtimeException.getMessage()); } logErrorsSuffix(projectPath, loggingService); System.exit(1); } private static void logErrorsSuffix(String projectPath, final LoggingService loggingService) { loggingService.logEvent(Level.ERROR, "FAILURE: Validation of slang files for project: \"" + projectPath + "\" failed."); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); loggingService.logEvent(Level.ERROR, ""); } private static void logErrorsPrefix(final LoggingService loggingService) { loggingService.logEvent(Level.ERROR, ""); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); } private static void generateTestCaseReport( SlangTestCaseRunReportGeneratorService reportGeneratorService, IRunTestResults runTestsResults, String testCaseReportLocation) throws IOException { if (StringUtils.isNotBlank(testCaseReportLocation)) { Path reportDirectoryPath = get(testCaseReportLocation); if (!exists(reportDirectoryPath)) { createDirectories(reportDirectoryPath); } reportGeneratorService.generateReport(runTestsResults, reportDirectoryPath.toString()); } } @SuppressWarnings("Duplicates") private static void loadUserProperties() { try { UserConfigurationService userConfigurationService = new UserConfigurationServiceImpl(); userConfigurationService.loadUserProperties(); } catch (Exception ex) { System.out.println("Error occurred while loading user configuration: " + ex.getMessage()); ex.printStackTrace(); } } private static void parseArgs(String[] args, ApplicationArgs appArgs) { try { JCommander commander = new JCommander(appArgs, args); if (appArgs.isHelp()) { commander.usage(); System.exit(0); } } catch (ParameterException e) { System.out.println(e.getMessage()); System.out.println("You can use '--help' for usage"); System.exit(1); } } private static List<String> parseTestSuites(ApplicationArgs appArgs) { final List<String> testSuitesArg = ListUtils.defaultIfNull(appArgs.getTestSuites(), new ArrayList<String>()); return ArgumentProcessorUtils.parseTestSuitesToList(testSuitesArg); } private static String parseTestTimeout(ApplicationArgs appArgs) { Map<String, String> dynamicArgs = appArgs.getDynamicParams(); return dynamicArgs.get(TEST_CASE_TIMEOUT_IN_MINUTES_KEY); } private static int parseThreadCountArg(ApplicationArgs appArgs, boolean isParallel) { if (!isParallel) { return 1; } else { int defaultThreadCount = Runtime.getRuntime().availableProcessors(); String threadCountErrorMessage = format("Thread count is misconfigured. The thread count value must be a " + "positive integer less than or equal to %d. Using %d threads.", MAX_THREADS_TEST_RUNNER, defaultThreadCount); try { String stringThreadCount = appArgs.getThreadCount(); if (stringThreadCount != null) { int threadCount = parseInt(stringThreadCount); if ((threadCount > 0) && (threadCount <= MAX_THREADS_TEST_RUNNER)) { return threadCount; } else { log.warn(threadCountErrorMessage); } } } catch (NumberFormatException nfEx) { log.warn(threadCountErrorMessage); } return defaultThreadCount; } } private static void printBuildSuccessSummary(String contentPath, SlangBuildResults buildResults, IRunTestResults runTestsResults, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "BUILD SUCCESS"); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Found " + buildResults.getNumberOfCompiledSources() + " slang files under directory: \"" + contentPath + "\" and all are valid."); printNumberOfPassedAndSkippedTests(runTestsResults, loggingService); loggingService.logEvent(Level.INFO, ""); } private static void printNumberOfPassedAndSkippedTests(IRunTestResults runTestsResults, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, runTestsResults.getPassedTests().size() + " test cases passed"); Map<String, TestRun> skippedTests = runTestsResults.getSkippedTests(); if (skippedTests.size() > 0) { loggingService.logEvent(Level.INFO, skippedTests.size() + " test cases skipped"); } } private static void printPassedTests(IRunTestResults runTestsResults, final LoggingService loggingService) { if (runTestsResults.getPassedTests().size() > 0) { loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + runTestsResults.getPassedTests().size() + " test cases passed:"); for (Map.Entry<String, TestRun> passedTest : runTestsResults.getPassedTests().entrySet()) { String testCaseReference = SlangTestCase.generateTestCaseReference(passedTest.getValue().getTestCase()); loggingService.logEvent(Level.INFO, "- " + testCaseReference.replaceAll("\n", "\n\t")); } } } private static void printBuildFailureSummary(String projectPath, IRunTestResults runTestsResults, final LoggingService loggingService) { printNumberOfPassedAndSkippedTests(runTestsResults, loggingService); final Map<String, TestRun> failedTests = runTestsResults.getFailedTests(); logErrorsPrefix(loggingService); loggingService.logEvent(Level.ERROR, "BUILD FAILURE"); loggingService.logEvent(Level.ERROR, "------------------------------------------------------------"); loggingService.logEvent(Level.ERROR, "CloudSlang build for repository: \"" + projectPath + "\" failed due to failed tests."); loggingService.logEvent(Level.ERROR, "Following " + failedTests.size() + " tests failed:"); for (Map.Entry<String, TestRun> failedTest : failedTests.entrySet()) { String failureMessage = failedTest.getValue().getMessage(); loggingService.logEvent(Level.ERROR, "- " + failureMessage.replaceAll("\n", "\n\t")); } loggingService.logEvent(Level.ERROR, ""); } private static void printSkippedTestsSummary(Map<String, TestRun> skippedTests, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + skippedTests.size() + " tests were skipped:"); for (Map.Entry<String, TestRun> skippedTest : skippedTests.entrySet()) { String message = skippedTest.getValue().getMessage(); loggingService.logEvent(Level.INFO, "- " + message.replaceAll("\n", "\n\t")); } } private static void printTestCoverageData(IRunTestResults runTestsResults, final LoggingService loggingService) { printCoveredExecutables(runTestsResults.getCoveredExecutables(), loggingService); printUncoveredExecutables(runTestsResults.getUncoveredExecutables(), loggingService); int coveredExecutablesSize = runTestsResults.getCoveredExecutables().size(); int uncoveredExecutablesSize = runTestsResults.getUncoveredExecutables().size(); int totalNumberOfExecutables = coveredExecutablesSize + uncoveredExecutablesSize; double coveragePercentage = (double) coveredExecutablesSize / (double) totalNumberOfExecutables * 100; loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, ((int) coveragePercentage) + "% of the content has tests"); loggingService.logEvent(Level.INFO, "Out of " + totalNumberOfExecutables + " executables, " + coveredExecutablesSize + " executables have tests"); } private static void printCoveredExecutables(Set<String> coveredExecutables, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + coveredExecutables.size() + " executables have tests:"); for (String executable : coveredExecutables) { loggingService.logEvent(Level.INFO, "- " + executable); } } private static void printUncoveredExecutables(Set<String> uncoveredExecutables, final LoggingService loggingService) { loggingService.logEvent(Level.INFO, ""); loggingService.logEvent(Level.INFO, "------------------------------------------------------------"); loggingService.logEvent(Level.INFO, "Following " + uncoveredExecutables.size() + " executables do not have tests:"); for (String executable : uncoveredExecutables) { loggingService.logEvent(Level.INFO, "- " + executable); } } private static String parseProjectPathArg(ApplicationArgs args) { String repositoryPath; if (args.getProjectRoot() != null) { repositoryPath = args.getProjectRoot(); // if only one parameter was passed, we treat it as the project root // i.e. './cslang-builder some/path/to/project' } else if (args.getParameters().size() == 1) { repositoryPath = args.getParameters().get(0); } else { repositoryPath = System.getProperty("user.dir"); } repositoryPath = FilenameUtils.separatorsToSystem(repositoryPath); Validate.isTrue(new File(repositoryPath).isDirectory(), "Directory path argument \'" + repositoryPath + "\' does not lead to a directory"); return repositoryPath; } private static void registerEventHandlers(Slang slang) { slang.subscribeOnAllEvents(new ScoreEventListener() { @Override public synchronized void onEvent(ScoreEvent event) { logEvent(event); } }); } private static void logEvent(ScoreEvent event) { log.debug(("Event received: " + event.getEventType() + " Data is: " + event.getData())); } }
refactor log4j file check Signed-off-by: Bonczidai Levente <[email protected]>
cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java
refactor log4j file check
Java
apache-2.0
f1fa039dfcbbed957d4030c6890537f44770a001
0
bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud
package com.planet_ink.coffee_mud.Abilities.Spells; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Spell_FakeWeapon extends Spell { @Override public String ID() { return "Spell_FakeWeapon"; } private final static String localizedName = CMLib.lang().L("Fake Weapon"); @Override public String name() { return localizedName; } @Override protected int canAffectCode() { return CAN_ITEMS; } @Override protected int canTargetCode() { return 0; } @Override public int classificationCode() { return Ability.ACODE_SPELL|Ability.DOMAIN_ILLUSION; } @Override public int abstractQuality() { return Ability.QUALITY_INDIFFERENT; } @Override public void unInvoke() { Item item=null; if(affected instanceof Item) item=(Item)affected; super.unInvoke(); if((item != null)&&(super.canBeUninvoked())) item.destroy(); } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if((affected!=null)&&(affected instanceof Item)) { if((msg.tool()==affected) &&(msg.targetMinor()==CMMsg.TYP_DAMAGE)) { int damageType=Weapon.TYPE_BURSTING; if(affected instanceof Weapon) damageType=((Weapon)affected).weaponDamageType(); if(msg.sourceMessage()!=null) msg.setSourceMessage(CMLib.combat().replaceDamageTag(msg.sourceMessage(), msg.value(), damageType, CMMsg.View.SOURCE)); if(msg.targetMessage()!=null) msg.setTargetMessage(CMLib.combat().replaceDamageTag(msg.targetMessage(), msg.value(), damageType, CMMsg.View.TARGET)); if(msg.othersMessage()!=null) msg.setOthersMessage(CMLib.combat().replaceDamageTag(msg.othersMessage(), msg.value(), damageType, CMMsg.View.OTHERS)); msg.setValue(0); } else if((msg.target()!=null) &&(!(msg.target() instanceof DeadBody)) &&((msg.target()==affected) ||(msg.target()==((Item)affected).container()) ||(msg.target()==((Item)affected).ultimateContainer(null)))) { // what is this trying to prevent, because it's damn broad right now... if(((CMath.bset(msg.sourceMajor(),CMMsg.MASK_MAGIC)) ||(CMath.bset(msg.targetMajor(),CMMsg.MASK_MAGIC)) ||(CMath.bset(msg.othersMajor(),CMMsg.MASK_MAGIC)))) { Room room=null; if(msg.source().location()!=null) room=msg.source().location(); if(room==null) room=CMLib.map().roomLocation(affected); if(room!=null) room.showHappens(CMMsg.MSG_OK_VISUAL,L("Magic energy fizzles around @x1 and is absorbed into the air.",affected.Name())); return false; } else if(msg.tool() instanceof Ability) { msg.source().tell(L("That doesn't appear to work on @x1",affected.name())); return false; } } } return super.okMessage(myHost,msg); } @Override public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel) { final String weaponName=CMParms.combine(commands,0); final String[] choices={"sword","dagger","mace","staff","axe","hammer", "flail"}; int choice=-1; for(int i=0;i<choices.length;i++) { if(choices[i].equalsIgnoreCase(weaponName)) choice=i; } if(choice<0) { mob.tell(L("You must specify what kind of weapon to create: sword, dagger, mace, flail, staff, axe, or hammer.")); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,null,this,somanticCastCode(mob,null,auto),auto?"":L("^S<S-NAME> wave(s) <S-HIS-HER> arms around dramatically.^?")); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); final Weapon weapon=(Weapon)CMClass.getItem("GenWeapon"); weapon.basePhyStats().setAttackAdjustment(100 +(10 * super.getXLEVELLevel(mob))); weapon.basePhyStats().setDamage(75+(3 * super.getXLEVELLevel(mob))); weapon.basePhyStats().setDisposition(weapon.basePhyStats().disposition()|PhyStats.IS_BONUS); weapon.setMaterial(RawMaterial.RESOURCE_COTTON); switch(choice) { case 0: weapon.setName(L("a fancy sword")); weapon.setDisplayText(L("a fancy sword sits here")); weapon.setDescription(L("looks fit to cut something up!")); weapon.setWeaponClassification(Weapon.CLASS_SWORD); weapon.setWeaponDamageType(Weapon.TYPE_SLASHING); break; case 1: weapon.setName(L("a sharp dagger")); weapon.setDisplayText(L("a sharp dagger sits here")); weapon.setDescription(L("looks fit to cut something up!")); weapon.setWeaponClassification(Weapon.CLASS_DAGGER); weapon.setWeaponDamageType(Weapon.TYPE_PIERCING); break; case 2: weapon.setName(L("a large mace")); weapon.setDisplayText(L("a large mace sits here")); weapon.setDescription(L("looks fit to whomp on something with!")); weapon.setWeaponClassification(Weapon.CLASS_BLUNT); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 3: weapon.setName(L("a quarterstaff")); weapon.setDisplayText(L("a quarterstaff sits here")); weapon.setDescription(L("looks like a reliable weapon")); weapon.setWeaponClassification(Weapon.CLASS_STAFF); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 4: weapon.setName(L("a deadly axe")); weapon.setDisplayText(L("a deadly axe sits here")); weapon.setDescription(L("looks fit to shop something up!")); weapon.setWeaponClassification(Weapon.CLASS_AXE); weapon.setWeaponDamageType(Weapon.TYPE_SLASHING); break; case 5: weapon.setName(L("a large hammer")); weapon.setDisplayText(L("a large hammer sits here")); weapon.setDescription(L("looks fit to pound something into a pulp!")); weapon.setWeaponClassification(Weapon.CLASS_HAMMER); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 6: weapon.setName(L("a large flail")); weapon.setDisplayText(L("a large flail sits here")); weapon.setDescription(L("looks fit to pound something into a pulp!")); weapon.setWeaponClassification(Weapon.CLASS_FLAILED); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; } weapon.basePhyStats().setWeight(0); weapon.setBaseValue(0); weapon.recoverPhyStats(); mob.addItem(weapon); mob.location().show(mob,null,weapon,CMMsg.MSG_OK_ACTION,L("Suddenly, <S-NAME> own(s) <O-NAME>!")); beneficialAffect(mob,weapon,asLevel,0); } } else beneficialVisualFizzle(mob,null,L("<S-NAME> dramatically wave(s) <S-HIS-HER> arms around, but fizzle(s) the spell.")); // return whether it worked return success; } }
com/planet_ink/coffee_mud/Abilities/Spells/Spell_FakeWeapon.java
package com.planet_ink.coffee_mud.Abilities.Spells; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Spell_FakeWeapon extends Spell { @Override public String ID() { return "Spell_FakeWeapon"; } private final static String localizedName = CMLib.lang().L("Fake Weapon"); @Override public String name() { return localizedName; } @Override protected int canAffectCode() { return CAN_ITEMS; } @Override protected int canTargetCode() { return 0; } @Override public int classificationCode() { return Ability.ACODE_SPELL|Ability.DOMAIN_ILLUSION; } @Override public int abstractQuality() { return Ability.QUALITY_INDIFFERENT; } @Override public void unInvoke() { Item item=null; if(affected instanceof Item) item=(Item)affected; super.unInvoke(); if((item != null)&&(super.canBeUninvoked())) item.destroy(); } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if((affected!=null)&&(affected instanceof Item)) { if((msg.tool()==affected) &&(msg.targetMinor()==CMMsg.TYP_DAMAGE)) { int damageType=Weapon.TYPE_BURSTING; if(affected instanceof Weapon) damageType=((Weapon)affected).weaponDamageType(); if(msg.sourceMessage()!=null) msg.setSourceMessage(CMLib.combat().replaceDamageTag(msg.sourceMessage(), msg.value(), damageType, CMMsg.View.SOURCE)); if(msg.targetMessage()!=null) msg.setTargetMessage(CMLib.combat().replaceDamageTag(msg.targetMessage(), msg.value(), damageType, CMMsg.View.TARGET)); if(msg.othersMessage()!=null) msg.setOthersMessage(CMLib.combat().replaceDamageTag(msg.othersMessage(), msg.value(), damageType, CMMsg.View.OTHERS)); msg.setValue(0); } else if((msg.target()!=null) &&((msg.target()==affected) ||(msg.target()==((Item)affected).container()) ||(msg.target()==((Item)affected).ultimateContainer(null)))) { if(((CMath.bset(msg.sourceMajor(),CMMsg.MASK_MAGIC)) ||(CMath.bset(msg.targetMajor(),CMMsg.MASK_MAGIC)) ||(CMath.bset(msg.othersMajor(),CMMsg.MASK_MAGIC)))) { Room room=null; if(msg.source().location()!=null) room=msg.source().location(); if(room==null) room=CMLib.map().roomLocation(affected); if(room!=null) room.showHappens(CMMsg.MSG_OK_VISUAL,L("Magic energy fizzles around @x1 and is absorbed into the air.",affected.Name())); return false; } else if(msg.tool() instanceof Ability) { msg.source().tell(L("That doesn't appear to work on @x1",affected.name())); return false; } } } return super.okMessage(myHost,msg); } @Override public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel) { final String weaponName=CMParms.combine(commands,0); final String[] choices={"sword","dagger","mace","staff","axe","hammer", "flail"}; int choice=-1; for(int i=0;i<choices.length;i++) { if(choices[i].equalsIgnoreCase(weaponName)) choice=i; } if(choice<0) { mob.tell(L("You must specify what kind of weapon to create: sword, dagger, mace, flail, staff, axe, or hammer.")); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,null,this,somanticCastCode(mob,null,auto),auto?"":L("^S<S-NAME> wave(s) <S-HIS-HER> arms around dramatically.^?")); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); final Weapon weapon=(Weapon)CMClass.getItem("GenWeapon"); weapon.basePhyStats().setAttackAdjustment(100 +(10 * super.getXLEVELLevel(mob))); weapon.basePhyStats().setDamage(75+(3 * super.getXLEVELLevel(mob))); weapon.basePhyStats().setDisposition(weapon.basePhyStats().disposition()|PhyStats.IS_BONUS); weapon.setMaterial(RawMaterial.RESOURCE_COTTON); switch(choice) { case 0: weapon.setName(L("a fancy sword")); weapon.setDisplayText(L("a fancy sword sits here")); weapon.setDescription(L("looks fit to cut something up!")); weapon.setWeaponClassification(Weapon.CLASS_SWORD); weapon.setWeaponDamageType(Weapon.TYPE_SLASHING); break; case 1: weapon.setName(L("a sharp dagger")); weapon.setDisplayText(L("a sharp dagger sits here")); weapon.setDescription(L("looks fit to cut something up!")); weapon.setWeaponClassification(Weapon.CLASS_DAGGER); weapon.setWeaponDamageType(Weapon.TYPE_PIERCING); break; case 2: weapon.setName(L("a large mace")); weapon.setDisplayText(L("a large mace sits here")); weapon.setDescription(L("looks fit to whomp on something with!")); weapon.setWeaponClassification(Weapon.CLASS_BLUNT); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 3: weapon.setName(L("a quarterstaff")); weapon.setDisplayText(L("a quarterstaff sits here")); weapon.setDescription(L("looks like a reliable weapon")); weapon.setWeaponClassification(Weapon.CLASS_STAFF); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 4: weapon.setName(L("a deadly axe")); weapon.setDisplayText(L("a deadly axe sits here")); weapon.setDescription(L("looks fit to shop something up!")); weapon.setWeaponClassification(Weapon.CLASS_AXE); weapon.setWeaponDamageType(Weapon.TYPE_SLASHING); break; case 5: weapon.setName(L("a large hammer")); weapon.setDisplayText(L("a large hammer sits here")); weapon.setDescription(L("looks fit to pound something into a pulp!")); weapon.setWeaponClassification(Weapon.CLASS_HAMMER); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; case 6: weapon.setName(L("a large flail")); weapon.setDisplayText(L("a large flail sits here")); weapon.setDescription(L("looks fit to pound something into a pulp!")); weapon.setWeaponClassification(Weapon.CLASS_FLAILED); weapon.setWeaponDamageType(Weapon.TYPE_BASHING); break; } weapon.basePhyStats().setWeight(0); weapon.setBaseValue(0); weapon.recoverPhyStats(); mob.addItem(weapon); mob.location().show(mob,null,weapon,CMMsg.MSG_OK_ACTION,L("Suddenly, <S-NAME> own(s) <O-NAME>!")); beneficialAffect(mob,weapon,asLevel,0); } } else beneficialVisualFizzle(mob,null,L("<S-NAME> dramatically wave(s) <S-HIS-HER> arms around, but fizzle(s) the spell.")); // return whether it worked return success; } }
fake weapon fix. git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@16236 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Abilities/Spells/Spell_FakeWeapon.java
fake weapon fix.
Java
apache-2.0
eb73e36a158008c0048411f98dd69ac3fe58306f
0
youknowone/zxing,youknowone/zxing,youknowone/zxing,youknowone/zxing
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android; import com.google.zxing.ResultPoint; import com.google.zxing.client.android.camera.CameraManager; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.view.View; import java.util.ArrayList; import java.util.List; /** * This view is overlaid on top of the camera preview. It adds the viewfinder rectangle and partial * transparency outside it, as well as the laser scanner animation and result points. * * @author [email protected] (Daniel Switkin) */ public final class ViewfinderView extends View { private static final int[] SCANNER_ALPHA = {0, 64, 128, 192, 255, 192, 128, 64}; private static final long ANIMATION_DELAY = 80L; private static final int CURRENT_POINT_OPACITY = 0xA0; private static final int MAX_RESULT_POINTS = 20; private static final int POINT_SIZE = 6; private CameraManager cameraManager; private final Paint paint; private Bitmap resultBitmap; private final int maskColor; private final int resultColor; private final int laserColor; private final int resultPointColor; private int scannerAlpha; private List<ResultPoint> possibleResultPoints; private List<ResultPoint> lastPossibleResultPoints; // This constructor is used when the class is built from an XML resource. public ViewfinderView(Context context, AttributeSet attrs) { super(context, attrs); // Initialize these once for performance rather than calling them every time in onDraw(). paint = new Paint(Paint.ANTI_ALIAS_FLAG); Resources resources = getResources(); maskColor = resources.getColor(R.color.viewfinder_mask); resultColor = resources.getColor(R.color.result_view); laserColor = resources.getColor(R.color.viewfinder_laser); resultPointColor = resources.getColor(R.color.possible_result_points); scannerAlpha = 0; possibleResultPoints = new ArrayList<ResultPoint>(5); lastPossibleResultPoints = null; } public void setCameraManager(CameraManager cameraManager) { this.cameraManager = cameraManager; } @Override public void onDraw(Canvas canvas) { if (cameraManager == null) { return; // not ready yet, early draw before done configuring } Rect frame = cameraManager.getFramingRect(); Rect previewFrame = cameraManager.getFramingRectInPreview(); if (frame == null || previewFrame == null) { return; } int width = canvas.getWidth(); int height = canvas.getHeight(); // Draw the exterior (i.e. outside the framing rect) darkened paint.setColor(resultBitmap != null ? resultColor : maskColor); canvas.drawRect(0, 0, width, frame.top, paint); canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint); canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1, paint); canvas.drawRect(0, frame.bottom + 1, width, height, paint); if (resultBitmap != null) { // Draw the opaque result bitmap over the scanning rectangle paint.setAlpha(CURRENT_POINT_OPACITY); canvas.drawBitmap(resultBitmap, null, frame, paint); } else { // Draw a red "laser scanner" line through the middle to show decoding is active paint.setColor(laserColor); paint.setAlpha(SCANNER_ALPHA[scannerAlpha]); scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length; int middle = frame.height() / 2 + frame.top; canvas.drawRect(frame.left + 2, middle - 1, frame.right - 1, middle + 2, paint); float scaleX = frame.width() / (float) previewFrame.width(); float scaleY = frame.height() / (float) previewFrame.height(); List<ResultPoint> currentPossible = possibleResultPoints; List<ResultPoint> currentLast = lastPossibleResultPoints; int frameLeft = frame.left; int frameTop = frame.top; if (currentPossible.isEmpty()) { lastPossibleResultPoints = null; } else { possibleResultPoints = new ArrayList<ResultPoint>(5); lastPossibleResultPoints = currentPossible; paint.setAlpha(CURRENT_POINT_OPACITY); paint.setColor(resultPointColor); synchronized (currentPossible) { for (ResultPoint point : currentPossible) { canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), POINT_SIZE, paint); } } } if (currentLast != null) { paint.setAlpha(CURRENT_POINT_OPACITY / 2); paint.setColor(resultPointColor); synchronized (currentLast) { float radius = POINT_SIZE / 2.0f; for (ResultPoint point : currentLast) { canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), radius, paint); } } } // Request another update at the animation interval, but only repaint the laser line, // not the entire viewfinder mask. postInvalidateDelayed(ANIMATION_DELAY, frame.left - POINT_SIZE, frame.top - POINT_SIZE, frame.right + POINT_SIZE, frame.bottom + POINT_SIZE); } } public void drawViewfinder() { Bitmap resultBitmap = this.resultBitmap; this.resultBitmap = null; if (resultBitmap != null) { resultBitmap.recycle(); } invalidate(); } /** * Draw a bitmap with the result points highlighted instead of the live scanning display. * * @param barcode An image of the decoded barcode. */ public void drawResultBitmap(Bitmap barcode) { resultBitmap = barcode; invalidate(); } public void addPossibleResultPoint(ResultPoint point) { List<ResultPoint> points = possibleResultPoints; synchronized (points) { points.add(point); int size = points.size(); if (size > MAX_RESULT_POINTS) { // trim it points.subList(0, size - MAX_RESULT_POINTS / 2).clear(); } } } }
android/src/com/google/zxing/client/android/ViewfinderView.java
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android; import com.google.zxing.ResultPoint; import com.google.zxing.client.android.camera.CameraManager; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.view.View; import java.util.ArrayList; import java.util.List; /** * This view is overlaid on top of the camera preview. It adds the viewfinder rectangle and partial * transparency outside it, as well as the laser scanner animation and result points. * * @author [email protected] (Daniel Switkin) */ public final class ViewfinderView extends View { private static final int[] SCANNER_ALPHA = {0, 64, 128, 192, 255, 192, 128, 64}; private static final long ANIMATION_DELAY = 80L; private static final int CURRENT_POINT_OPACITY = 0xA0; private static final int MAX_RESULT_POINTS = 20; private static final int POINT_SIZE = 6; private CameraManager cameraManager; private final Paint paint; private Bitmap resultBitmap; private final int maskColor; private final int resultColor; private final int laserColor; private final int resultPointColor; private int scannerAlpha; private List<ResultPoint> possibleResultPoints; private List<ResultPoint> lastPossibleResultPoints; // This constructor is used when the class is built from an XML resource. public ViewfinderView(Context context, AttributeSet attrs) { super(context, attrs); // Initialize these once for performance rather than calling them every time in onDraw(). paint = new Paint(Paint.ANTI_ALIAS_FLAG); Resources resources = getResources(); maskColor = resources.getColor(R.color.viewfinder_mask); resultColor = resources.getColor(R.color.result_view); laserColor = resources.getColor(R.color.viewfinder_laser); resultPointColor = resources.getColor(R.color.possible_result_points); scannerAlpha = 0; possibleResultPoints = new ArrayList<ResultPoint>(5); lastPossibleResultPoints = null; } public void setCameraManager(CameraManager cameraManager) { this.cameraManager = cameraManager; } @Override public void onDraw(Canvas canvas) { if (cameraManager == null) { return; // not ready yet, early draw before done configuring } Rect frame = cameraManager.getFramingRect(); if (frame == null) { return; } int width = canvas.getWidth(); int height = canvas.getHeight(); // Draw the exterior (i.e. outside the framing rect) darkened paint.setColor(resultBitmap != null ? resultColor : maskColor); canvas.drawRect(0, 0, width, frame.top, paint); canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint); canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1, paint); canvas.drawRect(0, frame.bottom + 1, width, height, paint); if (resultBitmap != null) { // Draw the opaque result bitmap over the scanning rectangle paint.setAlpha(CURRENT_POINT_OPACITY); canvas.drawBitmap(resultBitmap, null, frame, paint); } else { // Draw a red "laser scanner" line through the middle to show decoding is active paint.setColor(laserColor); paint.setAlpha(SCANNER_ALPHA[scannerAlpha]); scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length; int middle = frame.height() / 2 + frame.top; canvas.drawRect(frame.left + 2, middle - 1, frame.right - 1, middle + 2, paint); Rect previewFrame = cameraManager.getFramingRectInPreview(); float scaleX = frame.width() / (float) previewFrame.width(); float scaleY = frame.height() / (float) previewFrame.height(); List<ResultPoint> currentPossible = possibleResultPoints; List<ResultPoint> currentLast = lastPossibleResultPoints; int frameLeft = frame.left; int frameTop = frame.top; if (currentPossible.isEmpty()) { lastPossibleResultPoints = null; } else { possibleResultPoints = new ArrayList<ResultPoint>(5); lastPossibleResultPoints = currentPossible; paint.setAlpha(CURRENT_POINT_OPACITY); paint.setColor(resultPointColor); synchronized (currentPossible) { for (ResultPoint point : currentPossible) { canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), POINT_SIZE, paint); } } } if (currentLast != null) { paint.setAlpha(CURRENT_POINT_OPACITY / 2); paint.setColor(resultPointColor); synchronized (currentLast) { float radius = POINT_SIZE / 2.0f; for (ResultPoint point : currentLast) { canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), radius, paint); } } } // Request another update at the animation interval, but only repaint the laser line, // not the entire viewfinder mask. postInvalidateDelayed(ANIMATION_DELAY, frame.left - POINT_SIZE, frame.top - POINT_SIZE, frame.right + POINT_SIZE, frame.bottom + POINT_SIZE); } } public void drawViewfinder() { Bitmap resultBitmap = this.resultBitmap; this.resultBitmap = null; if (resultBitmap != null) { resultBitmap.recycle(); } invalidate(); } /** * Draw a bitmap with the result points highlighted instead of the live scanning display. * * @param barcode An image of the decoded barcode. */ public void drawResultBitmap(Bitmap barcode) { resultBitmap = barcode; invalidate(); } public void addPossibleResultPoint(ResultPoint point) { List<ResultPoint> points = possibleResultPoints; synchronized (points) { points.add(point); int size = points.size(); if (size > MAX_RESULT_POINTS) { // trim it points.subList(0, size - MAX_RESULT_POINTS / 2).clear(); } } } }
Avoid weird NPE observed in 4.3.2 git-svn-id: d565a5fbffec933846bf643895bf9c245569575b@2792 59b500cc-1b3d-0410-9834-0bbf25fbcc57
android/src/com/google/zxing/client/android/ViewfinderView.java
Avoid weird NPE observed in 4.3.2
Java
apache-2.0
988ed8c87e02a9496c6de058317258e2c67837fc
0
coolcrowd/object-service,coolcrowd/ObjectService,coolcrowd/ObjectService,coolcrowd/object-service,coolcrowd/object-service,coolcrowd/ObjectService,coolcrowd/object-service,coolcrowd/ObjectService
package edu.kit.ipd.crowdcontrol.objectservice.moneytransfer; import edu.kit.ipd.crowdcontrol.objectservice.database.model.tables.records.GiftCodeRecord; import org.junit.Before; import org.junit.Test; import javax.mail.*; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import java.io.*; import java.util.Properties; import static org.junit.Assert.assertTrue; /** * Tests the parsing of mail with amazon giftcodes. * @author Felix Rittler */ public class MailParserTest { private Properties props; private Authenticator auth; @Test public void test() throws Exception { Session session = Session.getInstance(props,auth); MimeMessage mail = new MimeMessage(session); MimeMultipart part = new MimeMultipart(); mail.setContent(part); MimeBodyPart mimeBodyPart = new MimeBodyPart(); part.addBodyPart(mimeBodyPart); MimeMultipart innerPart = new MimeMultipart(); mimeBodyPart.setContent(innerPart); MimeBodyPart innerBody = new MimeBodyPart(); innerPart.addBodyPart(innerBody); FileReader file = new FileReader("src/test/resources/parserTestMessage.txt"); BufferedReader reader = new BufferedReader(file); StringBuilder content = new StringBuilder(); String messageLine; while ((messageLine = reader.readLine()) != null) { content.append(messageLine); content.append(System.getProperty("line.separator")); } innerBody.setContent(content, "text/plain"); GiftCodeRecord rec = MailParser.parseAmazonGiftCode(mail); assertTrue(rec.getAmount()==15); assertTrue(rec.getCode().equals("5X4F-H8359N-Q2JM")); } @Before public void setUp() throws Exception { props = new Properties(); props.put("mail.store.protocol", "imap"); props.put("mail.imap.host", "imap.gmail.com"); props.put("mail.imap.port", "993"); props.put("mail.imap.ssl", "true"); props.put("mail.imap.ssl.enable", "true"); java.security.Security.addProvider(new com.sun.net.ssl.internal.ssl.Provider()); props.put("mail.imap.socketFactory.class", "javax.net.ssl.SSLSocketFactory"); auth = new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(null, null); } }; } }
src/test/java/edu/kit/ipd/crowdcontrol/objectservice/moneytransfer/MailParserTest.java
package edu.kit.ipd.crowdcontrol.objectservice.moneytransfer; import edu.kit.ipd.crowdcontrol.objectservice.database.model.tables.records.GiftCodeRecord; import org.junit.Before; import org.junit.Test; import javax.mail.*; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import java.io.*; import java.util.Properties; import static org.junit.Assert.assertTrue; /** * Tests the parsing of mail with amazon giftcodes. * @author Felix Rittler */ public class MailParserTest { private Properties props; private Authenticator auth; @Test public void test() throws Exception { Session session = Session.getInstance(props,auth); MimeMessage mail = new MimeMessage(session); MimeMultipart part = new MimeMultipart(); mail.setContent(part); MimeBodyPart mimeBodyPart = new MimeBodyPart(); part.addBodyPart(mimeBodyPart); MimeMultipart innerPart = new MimeMultipart(); mimeBodyPart.setContent(innerPart); MimeBodyPart innerBody = new MimeBodyPart(); innerPart.addBodyPart(innerBody); FileReader file = new FileReader("src/test/resources/parserTestMessage.txt"); BufferedReader reader = new BufferedReader(file); StringBuilder content = new StringBuilder(); String messageLine; while ((messageLine = reader.readLine()) != null) { content.append(messageLine); content.append(System.getProperty("line.separator")); } innerBody.setContent(content, "text/plain"); GiftCodeRecord rec = MailParser.parseAmazonGiftCode(mail); assertTrue(rec.getAmount()==15); assertTrue(rec.getCode().equals("5X4F-H8359N-Q2JM")); } @Before public void setUp() throws Exception { props = new Properties(); props.put("mail.store.protocol", "imap"); props.put("mail.imap.host", "imap.gmail.com"); props.put("mail.imap.port", "993"); props.put("mail.imap.ssl", "true"); props.put("mail.imap.ssl.enable", "true"); java.security.Security.addProvider(new com.sun.net.ssl.internal.ssl.Provider()); props.put("mail.imap.socketFactory.class", "javax.net.ssl.SSLSocketFactory"); Properties properties = new Properties(); BufferedInputStream stream = new BufferedInputStream(new FileInputStream("src/test/resources/gmailLogin.properties")); properties.load(stream); stream.close(); auth = new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(properties.getProperty("username"), properties.getProperty("password")); } }; } }
deletes gmailLogin.props, not needed
src/test/java/edu/kit/ipd/crowdcontrol/objectservice/moneytransfer/MailParserTest.java
deletes gmailLogin.props, not needed
Java
apache-2.0
6c3f7dd675d64edd51fc350b026353854285be4c
0
NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.omf; import java.io.IOException; import java.nio.charset.StandardCharsets; import ghidra.app.util.bin.BinaryReader; public class OmfCommentRecord extends OmfRecord { // Language translator comment public static final byte COMMENT_CLASS_TRANSLATOR = 0; // Record specifying name of object public static final byte COMMENT_CLASS_LIBMOD = (byte) 0xA3; // Default library cmd public static final byte COMMENT_CLASS_DEFAULT_LIBRARY = (byte) 0x9F; private byte commentType; private byte commentClass; private String value; public OmfCommentRecord(BinaryReader reader) throws IOException { readRecordHeader(reader); commentType = reader.readNextByte(); commentClass = reader.readNextByte(); byte[] bytes = reader.readNextByteArray( getRecordLength() - 3 /* 3 = sizeof(commentType+commentClass+trailing_crcbyte*/); if (commentClass == COMMENT_CLASS_TRANSLATOR || commentClass == COMMENT_CLASS_LIBMOD || commentClass == COMMENT_CLASS_DEFAULT_LIBRARY) { value = new String(bytes, StandardCharsets.US_ASCII); // assuming ASCII } readCheckSumByte(reader); } public byte getCommentClass() { return commentClass; } public String getValue() { return value; } }
Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/omf/OmfCommentRecord.java
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.omf; import java.io.IOException; import java.nio.charset.StandardCharsets; import ghidra.app.util.bin.BinaryReader; public class OmfCommentRecord extends OmfRecord { // Language translator comment public static final byte COMMENT_CLASS_TRANSLATOR = 0; // Record specifying name of object public static final byte COMMENT_CLASS_LIBMOD = (byte) 0xA3; // Default library cmd public static final byte COMMENT_CLASS_DEFAULT_LIBRARY = (byte) 0x9F; private byte commentType; private byte commentClass; private String value; public OmfCommentRecord(BinaryReader reader) throws IOException { readRecordHeader(reader); commentType = reader.readNextByte(); commentClass = reader.readNextByte(); byte[] bytes = reader.readNextByteArray( getRecordLength() - 3 /* 3 = sizeof(commentType+commentClass+trailing_crcbyte*/); if (commentClass == COMMENT_CLASS_TRANSLATOR || commentClass == COMMENT_CLASS_LIBMOD || commentClass == COMMENT_CLASS_DEFAULT_LIBRARY) { value = new String(bytes, StandardCharsets.US_ASCII); // assuming ASCII } readCheckSumByte(reader); } public byte getCommentClass() { return commentClass; } public String getValue() { return value; } }
GP-2686 fixed PR source formatting
Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/omf/OmfCommentRecord.java
GP-2686 fixed PR source formatting
Java
mit
605a484cf473b7abfd5539b111fa9d989ccb8c55
0
graphql-java/graphql-java,graphql-java/graphql-java
package graphql.relay; import graphql.PublicApi; import graphql.schema.DataFetcher; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInputObjectField; import graphql.schema.GraphQLInputObjectType; import graphql.schema.GraphQLInterfaceType; import graphql.schema.GraphQLList; import graphql.schema.GraphQLNonNull; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import graphql.schema.TypeResolver; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import static graphql.Scalars.GraphQLBoolean; import static graphql.Scalars.GraphQLID; import static graphql.Scalars.GraphQLInt; import static graphql.Scalars.GraphQLString; import static graphql.schema.GraphQLArgument.newArgument; import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; import static graphql.schema.GraphQLInputObjectField.newInputObjectField; import static graphql.schema.GraphQLInputObjectType.newInputObject; import static graphql.schema.GraphQLInterfaceType.newInterface; import static graphql.schema.GraphQLObjectType.newObject; /** * This can be used to compose graphql runtime types that implement * that Relay specification. * * See <a href="https://facebook.github.io/relay/graphql/connections.htm">https://facebook.github.io/relay/graphql/connections.htm</a> */ @PublicApi public class Relay { public static final String NODE = "Node"; private final GraphQLObjectType pageInfoType = newObject() .name("PageInfo") .description("Information about pagination in a connection.") .field(newFieldDefinition() .name("hasNextPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating forwards, are there more items?")) .field(newFieldDefinition() .name("hasPreviousPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating backwards, are there more items?")) .field(newFieldDefinition() .name("startCursor") .type(GraphQLString) .description("When paginating backwards, the cursor to continue.")) .field(newFieldDefinition() .name("endCursor") .type(GraphQLString) .description("When paginating forwards, the cursor to continue.")) .build(); public GraphQLInterfaceType nodeInterface(TypeResolver typeResolver) { return newInterface() .name(NODE) .description("An object with an ID") .typeResolver(typeResolver) .field(newFieldDefinition() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public GraphQLFieldDefinition nodeField(GraphQLInterfaceType nodeInterface, DataFetcher nodeDataFetcher) { return newFieldDefinition() .name("node") .description("Fetches an object given its ID") .type(nodeInterface) .dataFetcher(nodeDataFetcher) .argument(newArgument() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public List<GraphQLArgument> getConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getBackwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getForwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); return args; } public GraphQLObjectType edgeType(String name, GraphQLOutputType nodeType, GraphQLInterfaceType nodeInterface, List<GraphQLFieldDefinition> edgeFields) { return newObject() .name(name + "Edge") .description("An edge in a connection") .field(newFieldDefinition() .name("node") .type(nodeType) .description("The item at the end of the edge")) .field(newFieldDefinition() .name("cursor") .type(new GraphQLNonNull(GraphQLString)) .description("cursor marks a unique position or index into the connection")) .fields(edgeFields) .build(); } public GraphQLObjectType connectionType(String name, GraphQLObjectType edgeType, List<GraphQLFieldDefinition> connectionFields) { return newObject() .name(name + "Connection") .description("A connection to a list of items.") .field(newFieldDefinition() .name("edges") .description("a list of edges") .type(new GraphQLList(edgeType))) .field(newFieldDefinition() .name("pageInfo") .description("details about this specific page") .type(new GraphQLNonNull(pageInfoType))) .fields(connectionFields) .build(); } public GraphQLFieldDefinition mutationWithClientMutationId(String name, String fieldName, List<GraphQLInputObjectField> inputFields, List<GraphQLFieldDefinition> outputFields, DataFetcher dataFetcher) { GraphQLInputObjectField clientMutationIdInputField = newInputObjectField() .name("clientMutationId") .type(GraphQLString) .build(); GraphQLFieldDefinition clientMutationIdPayloadField = newFieldDefinition() .name("clientMutationId") .type(GraphQLString) .build(); return mutation(name, fieldName, addElementToList(inputFields, clientMutationIdInputField), addElementToList(outputFields, clientMutationIdPayloadField), dataFetcher); } private static <T> List<T> addElementToList(List<T> list, T element) { ArrayList<T> result = new ArrayList<>(list); result.add(element); return result; } public GraphQLFieldDefinition mutation(String name, String fieldName, List<GraphQLInputObjectField> inputFields, List<GraphQLFieldDefinition> outputFields, DataFetcher dataFetcher) { GraphQLInputObjectType inputObjectType = newInputObject() .name(name + "Input") .fields(inputFields) .build(); GraphQLObjectType outputType = newObject() .name(name + "Payload") .fields(outputFields) .build(); return newFieldDefinition() .name(fieldName) .type(outputType) .argument(newArgument() .name("input") .type(new GraphQLNonNull(inputObjectType))) .dataFetcher(dataFetcher) .build(); } public static class ResolvedGlobalId { public ResolvedGlobalId(String type, String id) { this.type = type; this.id = id; } private final String type; private final String id; public String getType() { return type; } public String getId() { return id; } } private static final java.util.Base64.Encoder encoder = java.util.Base64.getEncoder(); private static final java.util.Base64.Decoder decoder = java.util.Base64.getDecoder(); public String toGlobalId(String type, String id) { return encoder.encodeToString((type + ":" + id).getBytes(StandardCharsets.UTF_8)); } public ResolvedGlobalId fromGlobalId(String globalId) { String[] split = new String(decoder.decode(globalId), StandardCharsets.UTF_8).split(":", 2); if (split.length != 2) { throw new IllegalArgumentException(String.format("expecting a valid global id, got %s", globalId)); } return new ResolvedGlobalId(split[0], split[1]); } }
src/main/java/graphql/relay/Relay.java
package graphql.relay; import graphql.PublicApi; import graphql.schema.DataFetcher; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInputObjectField; import graphql.schema.GraphQLInputObjectType; import graphql.schema.GraphQLInterfaceType; import graphql.schema.GraphQLList; import graphql.schema.GraphQLNonNull; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import graphql.schema.TypeResolver; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import static graphql.Scalars.GraphQLBoolean; import static graphql.Scalars.GraphQLID; import static graphql.Scalars.GraphQLInt; import static graphql.Scalars.GraphQLString; import static graphql.schema.GraphQLArgument.newArgument; import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; import static graphql.schema.GraphQLInputObjectField.newInputObjectField; import static graphql.schema.GraphQLInputObjectType.newInputObject; import static graphql.schema.GraphQLInterfaceType.newInterface; import static graphql.schema.GraphQLObjectType.newObject; /** * This can be used to compose graphql runtime types that implement * that Relay specification. * * See <a href="https://facebook.github.io/relay/graphql/connections.htm">https://facebook.github.io/relay/graphql/connections.htm</a> */ @PublicApi public class Relay { public static final String NODE = "Node"; private final GraphQLObjectType pageInfoType = newObject() .name("PageInfo") .description("Information about pagination in a connection.") .field(newFieldDefinition() .name("hasNextPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating forwards, are there more items?")) .field(newFieldDefinition() .name("hasPreviousPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating backwards, are there more items?")) .field(newFieldDefinition() .name("startCursor") .type(GraphQLString) .description("When paginating backwards, the cursor to continue.")) .field(newFieldDefinition() .name("endCursor") .type(GraphQLString) .description("When paginating forwards, the cursor to continue.")) .build(); public GraphQLInterfaceType nodeInterface(TypeResolver typeResolver) { return newInterface() .name(NODE) .description("An object with an ID") .typeResolver(typeResolver) .field(newFieldDefinition() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public GraphQLFieldDefinition nodeField(GraphQLInterfaceType nodeInterface, DataFetcher nodeDataFetcher) { return newFieldDefinition() .name("node") .description("Fetches an object given its ID") .type(nodeInterface) .dataFetcher(nodeDataFetcher) .argument(newArgument() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public List<GraphQLArgument> getConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getBackwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getForwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); return args; } public GraphQLObjectType edgeType(String name, GraphQLOutputType nodeType, GraphQLInterfaceType nodeInterface, List<GraphQLFieldDefinition> edgeFields) { return newObject() .name(name + "Edge") .description("An edge in a connection") .field(newFieldDefinition() .name("node") .type(nodeType) .description("The item at the end of the edge")) .field(newFieldDefinition() .name("cursor") .type(new GraphQLNonNull(GraphQLString)) .description("cursor marks a unique position or index into the connection")) .fields(edgeFields) .build(); } public GraphQLObjectType connectionType(String name, GraphQLObjectType edgeType, List<GraphQLFieldDefinition> connectionFields) { return newObject() .name(name + "Connection") .description("A connection to a list of items.") .field(newFieldDefinition() .name("edges") .description("a list of edges") .type(new GraphQLList(edgeType))) .field(newFieldDefinition() .name("pageInfo") .description("details about this specific page") .type(new GraphQLNonNull(pageInfoType))) .fields(connectionFields) .build(); } public GraphQLFieldDefinition mutationWithClientMutationId(String name, String fieldName, List<GraphQLInputObjectField> inputFields, List<GraphQLFieldDefinition> outputFields, DataFetcher dataFetcher) { GraphQLInputObjectType inputObjectType = newInputObject() .name(name + "Input") .field(newInputObjectField() .name("clientMutationId") .type(GraphQLString)) .fields(inputFields) .build(); GraphQLObjectType outputType = newObject() .name(name + "Payload") .field(newFieldDefinition() .name("clientMutationId") .type(GraphQLString)) .fields(outputFields) .build(); return newFieldDefinition() .name(fieldName) .type(outputType) .argument(newArgument() .name("input") .type(new GraphQLNonNull(inputObjectType))) .dataFetcher(dataFetcher) .build(); } public static class ResolvedGlobalId { public ResolvedGlobalId(String type, String id) { this.type = type; this.id = id; } private final String type; private final String id; public String getType() { return type; } public String getId() { return id; } } private static final java.util.Base64.Encoder encoder = java.util.Base64.getEncoder(); private static final java.util.Base64.Decoder decoder = java.util.Base64.getDecoder(); public String toGlobalId(String type, String id) { return encoder.encodeToString((type + ":" + id).getBytes(StandardCharsets.UTF_8)); } public ResolvedGlobalId fromGlobalId(String globalId) { String[] split = new String(decoder.decode(globalId), StandardCharsets.UTF_8).split(":", 2); if (split.length != 2) { throw new IllegalArgumentException(String.format("expecting a valid global id, got %s", globalId)); } return new ResolvedGlobalId(split[0], split[1]); } }
Make a Relay mutation without a clientMutationId
src/main/java/graphql/relay/Relay.java
Make a Relay mutation without a clientMutationId
Java
mit
4db5f4edfebc750aa7b36a970d97fa0b90a3e90f
0
armandgray/taapProject,armandgray/taapProject,armandgray/taapProject
package com.armandgray.taap.settings; import android.content.Intent; import android.net.Uri; import com.armandgray.taap.settings.detail.SettingsDetailActivity; public class SettingsActivityController implements SettingsActivityViews.SettingsViewsListener { static final String ARMANDGRAY_COM = "http://armandgray.com"; // TODO change to Taap URL static final String GOOGLE_PLAY_STORE_TAAP = "https://play.google.com/store/apps/details?id=com.armandgray.taap&hl=en"; public static final String COPYRIGHT = "Copyright"; public static final String SELECTED_ITEM = "SELECTED_ITEM"; public static final String TERMS_AND_CONDITIONS = "Terms & Conditions"; public static final String SOFTWARE_LICENSES = "Software Licenses"; SettingsActivity activity; SettingsActivityViews views; SettingsActivityController(SettingsActivity activity) { this.activity = activity; this.views = new SettingsActivityViews(activity, this); views.setupActivityInitialState(); } @Override public void onTvRateThisAppClick() { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(GOOGLE_PLAY_STORE_TAAP))); } @Override public void onTvSeeMoreClick() { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(ARMANDGRAY_COM))); } @Override public void onTvCopyrightClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, COPYRIGHT); activity.startActivity(intent); } @Override public void onTvTermsConditionsClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, TERMS_AND_CONDITIONS); activity.startActivity(intent); } @Override public void onTvSoftwareLicensesClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, SOFTWARE_LICENSES); activity.startActivity(intent); } }
TAAP/app/src/main/java/com/armandgray/taap/settings/SettingsActivityController.java
package com.armandgray.taap.settings; import android.content.Intent; import android.net.Uri; import com.armandgray.taap.settings.detail.SettingsDetailActivity; public class SettingsActivityController implements SettingsActivityViews.SettingsViewsListener { static final String ARMANDGRAY_COM = "http://armandgray.com"; // TODO change to Taap URL static final String GOOGLE_PLAY_STORE_TAAP = "https://play.google.com/store/apps/details?id=com.armandgray.seeme&hl=en"; public static final String COPYRIGHT = "Copyright"; public static final String SELECTED_ITEM = "SELECTED_ITEM"; public static final String TERMS_AND_CONDITIONS = "Terms & Conditions"; public static final String SOFTWARE_LICENSES = "Software Licenses"; SettingsActivity activity; SettingsActivityViews views; SettingsActivityController(SettingsActivity activity) { this.activity = activity; this.views = new SettingsActivityViews(activity, this); views.setupActivityInitialState(); } @Override public void onTvRateThisAppClick() { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(GOOGLE_PLAY_STORE_TAAP))); } @Override public void onTvSeeMoreClick() { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(ARMANDGRAY_COM))); } @Override public void onTvCopyrightClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, COPYRIGHT); activity.startActivity(intent); } @Override public void onTvTermsConditionsClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, TERMS_AND_CONDITIONS); activity.startActivity(intent); } @Override public void onTvSoftwareLicensesClick() { Intent intent = new Intent(activity, SettingsDetailActivity.class); intent.putExtra(SELECTED_ITEM, SOFTWARE_LICENSES); activity.startActivity(intent); } }
set google play store url to to pkg name
TAAP/app/src/main/java/com/armandgray/taap/settings/SettingsActivityController.java
set google play store url to to pkg name
Java
mit
798472bec582517c89767bdaab9566f940b3bcf7
0
LinDA-tools/RDF2Any,LinDA-tools/RDF2Any,LinDA-tools/RDF2Any
package de.unibonn.iai.eis.linda.converters.impl.results; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import de.unibonn.iai.eis.linda.querybuilder.classes.RDFClass; import de.unibonn.iai.eis.linda.querybuilder.classes.RDFClassProperty; import de.unibonn.iai.eis.linda.querybuilder.objects.RDFObject; /** * @author gsingharoy * * * This class gives the JSON output when a class convert for JSON is called * **/ public class JSONObjectsOutput { public String dataset; public List<Object> classes; public Map<String,Object> properties; public List<Object> objects; private Map<String, String> propertyDictionary; private RDFClass forClass; public JSONObjectsOutput(RDFClass forClass) { // TODO Auto-generated constructor stub this.dataset = forClass.dataset; this.classes = new ArrayList<Object>(); this.properties = new HashMap<String,Object>(); this.objects = new ArrayList<Object>(); this.forClass = forClass; generatePropertiesForOutput(); Map<String,Object> classDef = new HashMap<String,Object>(); classDef.put("uri", forClass.uri); classDef.put("label",forClass.label); List<String> classProperties = new ArrayList<String>(); @SuppressWarnings("rawtypes") Iterator it = this.propertyDictionary.entrySet().iterator(); while (it.hasNext()) { @SuppressWarnings("rawtypes") Map.Entry pairs = (Map.Entry)it.next(); classProperties.add((String) pairs.getValue()); it.remove(); // avoids a ConcurrentModificationException } classDef.put("properties", classProperties); this.classes.add(classDef); } private void generatePropertiesForOutput(){ this.propertyDictionary = new HashMap<String, String>(); for(RDFClassProperty prop: this.forClass.properties){ String propVar = prop.getPropertyUnderscoreVariableName(); this.propertyDictionary.put(prop.uri, propVar); Map<String,Object> propertyMap = new HashMap<String,Object>(); propertyMap.put("uri",prop.uri); propertyMap.put("label",prop.label); propertyMap.put("type",prop.type); propertyMap.put("range",prop.range); this.properties.put(propVar,propertyMap); } } }
linda/src/main/java/de/unibonn/iai/eis/linda/converters/impl/results/JSONObjectsOutput.java
package de.unibonn.iai.eis.linda.converters.impl.results; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import de.unibonn.iai.eis.linda.querybuilder.classes.RDFClass; import de.unibonn.iai.eis.linda.querybuilder.classes.RDFClassProperty; import de.unibonn.iai.eis.linda.querybuilder.objects.RDFObject; /** * @author gsingharoy * * * This class gives the JSON output when a class convert for JSON is called * **/ public class JSONObjectsOutput { public String dataset; public List<Object> classes; public Map<String,Object> properties; public List<Object> objects; private Map<String, String> propertyDictionary; private RDFClass forClass; public JSONObjectsOutput(RDFClass forClass) { // TODO Auto-generated constructor stub this.dataset = forClass.dataset; this.classes = new ArrayList<Object>(); this.properties = new HashMap<String,Object>(); this.objects = new ArrayList<Object>(); this.forClass = forClass; generatePropertiesForOutput(); Map<String,Object> classDef = new HashMap<String,Object>(); classDef.put("uri", forClass.uri); classDef.put("label",forClass.label); List<String> classProperties = new ArrayList<String>(); @SuppressWarnings("rawtypes") Iterator it = this.propertyDictionary.entrySet().iterator(); while (it.hasNext()) { @SuppressWarnings("rawtypes") Map.Entry pairs = (Map.Entry)it.next(); classProperties.add((String) pairs.getValue()); it.remove(); // avoids a ConcurrentModificationException } classDef.put("properties", classProperties); this.classes.add(classDef); } private void generatePropertiesForOutput(){ this.propertyDictionary = new HashMap<String, String>(); for(RDFClassProperty prop: this.forClass.properties){ String propVar = prop.getPropertyUnderscoreVariableName(); this.propertyDictionary.put(prop.uri, propVar); this.properties.put(propVar,prop); } } }
shortened the values sent in properties in JSON convert
linda/src/main/java/de/unibonn/iai/eis/linda/converters/impl/results/JSONObjectsOutput.java
shortened the values sent in properties in JSON convert
Java
mit
250ddf23b97515ddcd7941e0fe63338d9194a6f9
0
frc3946/Stronghold,frc3946/Stronghold
package org.usfirst.frc.team3946.robot; import org.usfirst.frc.team3946.robot.commands.AutoTravel; import org.usfirst.frc.team3946.robot.subsystems.BallPickup; import org.usfirst.frc.team3946.robot.subsystems.CatapultPositioner; import org.usfirst.frc.team3946.robot.subsystems.DriveTrainEncoder; import org.usfirst.frc.team3946.robot.subsystems.Drivetrain; import org.usfirst.frc.team3946.robot.subsystems.IntakePositioner; import org.usfirst.frc.team3946.robot.subsystems.LaunchLatch; import edu.wpi.first.wpilibj.AnalogGyro; import edu.wpi.first.wpilibj.AnalogInput; import edu.wpi.first.wpilibj.BuiltInAccelerometer; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.interfaces.Accelerometer; import edu.wpi.first.wpilibj.interfaces.Gyro; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SendableChooser; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { public static OI oi; public static Drivetrain drivetrain = new Drivetrain(); public static DriveTrainEncoder driveTrainEncoder = new DriveTrainEncoder(); public static AnalogGyro gyro = new AnalogGyro(1); public static AnalogInput ballFinder = new AnalogInput(3); public static BallPickup ballPickup = new BallPickup(); public static IntakePositioner intakePositioner = new IntakePositioner(); public static LaunchLatch launchLatch = new LaunchLatch(); public static CatapultPositioner catapultPositioner = new CatapultPositioner(); public static Compressor compressor = new Compressor(0); public static Accelerometer accel = new BuiltInAccelerometer(); Command autonomousCommand; SendableChooser chooser; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { oi = new OI(); driveTrainEncoder.initEncoders(); chooser = new SendableChooser(); chooser.addDefault("Position One", "Position One"); chooser.addObject("Position Two", "Position Two"); chooser.addObject("Position Three", "Position Three"); chooser.addObject("Position Four", "Position Foue"); chooser.addObject("Position Five", "Position Five"); SmartDashboard.putData("Auto mode", chooser); } /** * This function is called once each time the robot enters Disabled mode. * You can use it to reset any subsystem information you want to clear when * the robot is disabled. */ public void disabledInit() { } public void disabledPeriodic() { Scheduler.getInstance().run(); } /** * This autonomous (along with the chooser code above) shows how to select * between different autonomous modes using the dashboard. The sendable * chooser code works with the Java SmartDashboard. If you prefer the * LabVIEW Dashboard, remove all of the chooser code and uncomment the * getString code to get the auto name from the text box below the Gyro * * You can add additional auto modes by adding additional commands to the * chooser code above (like the commented example) or additional comparisons * to the switch structure below with additional strings & commands. */ public void autonomousInit() { // autonomousCommand = (Command) chooser.getSelected(); String autoSelected = SmartDashboard.getString("Auto Selector", "Default"); switch (autoSelected) { case "Position One": default: autonomousCommand = new AutoTravel(5, 60); break; case "Position Two": autonomousCommand = new AutoTravel(5, 30); break; case "Position Three": autonomousCommand = new AutoTravel(5, 15); break; case "Position Four": autonomousCommand = new AutoTravel(5, 5); break; case "Position Five": autonomousCommand = new AutoTravel(5, -30); break; } // schedule the autonomous command (example) if (autonomousCommand != null) autonomousCommand.start(); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. if (autonomousCommand != null) autonomousCommand.cancel(); } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); SmartDashboard.putNumber("Actual Right Speed", Robot.driveTrainEncoder.getRightRate()); SmartDashboard.putNumber("Actual Right Distance", Robot.driveTrainEncoder.getRightDistance()); SmartDashboard.putNumber("Range Finder", Robot.ballFinder.getVoltage()); SmartDashboard.putNumber("Gyro", Robot.gyro.getAngle()); SmartDashboard.putNumber("Actual Left Speed", Robot.driveTrainEncoder.getLeftRate()); SmartDashboard.putNumber("Actual Left Distance", Robot.driveTrainEncoder.getLeftDistance()); SmartDashboard.putNumber("Accel X Value", Robot.accel.getX()); SmartDashboard.putNumber("Accel Y Value", Robot.accel.getY()); SmartDashboard.putNumber("Accel Z Value", Robot.accel.getZ()); SmartDashboard.putNumber("Angle", (Math.atan2(Robot.accel.getY(), Robot.accel.getZ())) * (180 / Math.PI)); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } }
src/org/usfirst/frc/team3946/robot/Robot.java
package org.usfirst.frc.team3946.robot; import org.usfirst.frc.team3946.robot.commands.AutoDriveTest; import org.usfirst.frc.team3946.robot.commands.AutoTravel; import org.usfirst.frc.team3946.robot.subsystems.BallPickup; import org.usfirst.frc.team3946.robot.subsystems.CatapultPositioner; import org.usfirst.frc.team3946.robot.subsystems.DriveTrainEncoder; import org.usfirst.frc.team3946.robot.subsystems.Drivetrain; import org.usfirst.frc.team3946.robot.subsystems.IntakePositioner; import org.usfirst.frc.team3946.robot.subsystems.LaunchLatch; import edu.wpi.first.wpilibj.AnalogGyro; import edu.wpi.first.wpilibj.AnalogInput; import edu.wpi.first.wpilibj.BuiltInAccelerometer; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.interfaces.Accelerometer; import edu.wpi.first.wpilibj.interfaces.Gyro; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SendableChooser; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { public static OI oi; public static Drivetrain drivetrain = new Drivetrain(); public static DriveTrainEncoder driveTrainEncoder = new DriveTrainEncoder(); public static AnalogGyro gyro = new AnalogGyro(1); public static AnalogInput ballFinder = new AnalogInput(3); public static BallPickup ballPickup = new BallPickup(); public static IntakePositioner intakePositioner = new IntakePositioner(); public static LaunchLatch launchLatch = new LaunchLatch(); public static CatapultPositioner catapultPositioner = new CatapultPositioner(); public static Compressor compressor = new Compressor(0); public static Accelerometer accel = new BuiltInAccelerometer(); Command autonomousCommand; SendableChooser chooser; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { oi = new OI(); driveTrainEncoder.initEncoders(); chooser = new SendableChooser(); chooser.addDefault("Position One", "Position One"); chooser.addObject("Position Two", "Position Two"); chooser.addObject("Position Three", "Position Three"); chooser.addObject("Position Four", "Position Foue"); chooser.addObject("Position Five", "Position Five"); SmartDashboard.putData("Auto mode", chooser); } /** * This function is called once each time the robot enters Disabled mode. * You can use it to reset any subsystem information you want to clear when * the robot is disabled. */ public void disabledInit() { } public void disabledPeriodic() { Scheduler.getInstance().run(); } /** * This autonomous (along with the chooser code above) shows how to select * between different autonomous modes using the dashboard. The sendable * chooser code works with the Java SmartDashboard. If you prefer the * LabVIEW Dashboard, remove all of the chooser code and uncomment the * getString code to get the auto name from the text box below the Gyro * * You can add additional auto modes by adding additional commands to the * chooser code above (like the commented example) or additional comparisons * to the switch structure below with additional strings & commands. */ public void autonomousInit() { // autonomousCommand = (Command) chooser.getSelected(); String autoSelected = SmartDashboard.getString("Auto Selector", "Default"); switch (autoSelected) { case "Position One": default: autonomousCommand = new AutoTravel(5, 60); break; case "Position Two": autonomousCommand = new AutoTravel(5, 30); break; case "Position Three": autonomousCommand = new AutoTravel(5, 15); break; case "Position Four": autonomousCommand = new AutoTravel(5, 5); break; case "Position Five": autonomousCommand = new AutoTravel(5, -30); break; } // schedule the autonomous command (example) if (autonomousCommand != null) autonomousCommand.start(); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. if (autonomousCommand != null) autonomousCommand.cancel(); } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); SmartDashboard.putNumber("Actual Right Speed", Robot.driveTrainEncoder.getRightRate()); SmartDashboard.putNumber("Actual Right Distance", Robot.driveTrainEncoder.getRightDistance()); SmartDashboard.putNumber("Range Finder", Robot.ballFinder.getVoltage()); SmartDashboard.putNumber("Gyro", Robot.gyro.getAngle()); SmartDashboard.putNumber("Actual Left Speed", Robot.driveTrainEncoder.getLeftRate()); SmartDashboard.putNumber("Actual Left Distance", Robot.driveTrainEncoder.getLeftDistance()); SmartDashboard.putNumber("Accel X Value", Robot.accel.getX()); SmartDashboard.putNumber("Accel Y Value", Robot.accel.getY()); SmartDashboard.putNumber("Accel Z Value", Robot.accel.getZ()); SmartDashboard.putNumber("Angle", (Math.atan2(Robot.accel.getY(), Robot.accel.getZ())) * (180 / Math.PI)); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } }
2-13 afternoon updates
src/org/usfirst/frc/team3946/robot/Robot.java
2-13 afternoon updates
Java
mit
9db610662a61e6443189204731cc898dcb35c775
0
MarkEWaite/git-plugin,martinda/git-plugin,jenkinsci/git-plugin,martinda/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,martinda/git-plugin,MarkEWaite/git-plugin,MarkEWaite/git-plugin,jenkinsci/git-plugin,jenkinsci/git-plugin
package hudson.plugins.git; import com.cloudbees.plugins.credentials.CredentialsMatcher; import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import com.google.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.AbortException; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.init.Initializer; import hudson.model.*; import hudson.model.Descriptor.FormException; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.GitSCMExtensionDescriptor; import hudson.plugins.git.extensions.impl.AuthorInChangelog; import hudson.plugins.git.extensions.impl.BuildChooserSetting; import hudson.plugins.git.extensions.impl.BuildSingleRevisionOnly; import hudson.plugins.git.extensions.impl.ChangelogToBranch; import hudson.plugins.git.extensions.impl.CloneOption; import hudson.plugins.git.extensions.impl.PathRestriction; import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.plugins.git.extensions.impl.RelativeTargetDirectory; import hudson.plugins.git.extensions.impl.PreBuildMerge; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.plugins.git.util.Build; import hudson.plugins.git.util.*; import hudson.remoting.Channel; import hudson.scm.AbstractScmTagAction; import hudson.scm.ChangeLogParser; import hudson.scm.PollingResult; import hudson.scm.RepositoryBrowser; import hudson.scm.SCMDescriptor; import hudson.scm.SCMRevisionState; import hudson.security.ACL; import hudson.security.Permission; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.SCMTrigger; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import jenkins.model.Jenkins; import jenkins.plugins.git.GitSCMMatrixUtil; import jenkins.plugins.git.GitToolChooser; import net.sf.json.JSONObject; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.eclipse.jgit.transport.URIish; import org.jenkinsci.plugins.gitclient.*; import org.jenkinsci.plugins.scriptsecurity.sandbox.whitelists.Whitelisted; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Serializable; import java.io.Writer; import java.text.MessageFormat; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import static com.google.common.collect.Lists.newArrayList; import static hudson.init.InitMilestone.JOB_LOADED; import static hudson.init.InitMilestone.PLUGINS_STARTED; import hudson.plugins.git.browser.BitbucketWeb; import hudson.plugins.git.browser.GitLab; import hudson.plugins.git.browser.GithubWeb; import static hudson.scm.PollingResult.*; import hudson.Util; import hudson.plugins.git.extensions.impl.ScmName; import hudson.util.LogTaskListener; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.apache.commons.lang.StringUtils.isBlank; /** * Git SCM. * * @author Nigel Magnay * @author Andrew Bayer * @author Nicolas Deloof * @author Kohsuke Kawaguchi * ... and many others */ public class GitSCM extends GitSCMBackwardCompatibility { /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; /** * All the remote repositories that we know about. */ private List<UserRemoteConfig> userRemoteConfigs; private transient List<RemoteConfig> remoteRepositories; /** * All the branches that we wish to care about building. */ private List<BranchSpec> branches; private boolean doGenerateSubmoduleConfigurations = false; @CheckForNull public String gitTool; @CheckForNull private GitRepositoryBrowser browser; private Collection<SubmoduleConfig> submoduleCfg = Collections.<SubmoduleConfig>emptyList(); public static final String GIT_BRANCH = "GIT_BRANCH"; public static final String GIT_LOCAL_BRANCH = "GIT_LOCAL_BRANCH"; public static final String GIT_CHECKOUT_DIR = "GIT_CHECKOUT_DIR"; public static final String GIT_COMMIT = "GIT_COMMIT"; public static final String GIT_PREVIOUS_COMMIT = "GIT_PREVIOUS_COMMIT"; public static final String GIT_PREVIOUS_SUCCESSFUL_COMMIT = "GIT_PREVIOUS_SUCCESSFUL_COMMIT"; public static final String GIT_URL = "GIT_URL"; /** * All the configured extensions attached to this. */ @SuppressFBWarnings(value="SE_BAD_FIELD", justification="Known non-serializable field") private DescribableList<GitSCMExtension,GitSCMExtensionDescriptor> extensions; @Whitelisted @Deprecated @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Unread deprecated collection") public Collection<SubmoduleConfig> getSubmoduleCfg() { return submoduleCfg; } @DataBoundSetter public void setSubmoduleCfg(Collection<SubmoduleConfig> submoduleCfg) { } public static List<UserRemoteConfig> createRepoList(String url, String credentialsId) { List<UserRemoteConfig> repoList = new ArrayList<>(); repoList.add(new UserRemoteConfig(url, null, null, credentialsId)); return repoList; } /** * A convenience constructor that sets everything to default. * * @param repositoryUrl git repository URL * Repository URL to clone from. */ public GitSCM(String repositoryUrl) { this( createRepoList(repositoryUrl, null), Collections.singletonList(new BranchSpec("")), null, null, Collections.<GitSCMExtension>emptyList()); } @Deprecated public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, Boolean doGenerateSubmoduleConfigurations, Collection<SubmoduleConfig> submoduleCfg, @CheckForNull GitRepositoryBrowser browser, @CheckForNull String gitTool, List<GitSCMExtension> extensions) { this(userRemoteConfigs, branches, browser, gitTool, extensions); } @DataBoundConstructor @SuppressFBWarnings(value="EI_EXPOSE_REP2", justification="Modify access is assumed for userRemoteConfigs") public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, @CheckForNull GitRepositoryBrowser browser, @CheckForNull String gitTool, List<GitSCMExtension> extensions) { // moved from createBranches this.branches = isEmpty(branches) ? newArrayList(new BranchSpec("*/master")) : branches; this.userRemoteConfigs = userRemoteConfigs; updateFromUserData(); this.browser = browser; this.configVersion = 2L; this.gitTool = gitTool; this.extensions = new DescribableList<>(Saveable.NOOP,Util.fixNull(extensions)); getBuildChooser(); // set the gitSCM field. } /** * All the configured extensions attached to this {@link GitSCM}. * * Going forward this is primarily how we'll support esoteric use cases. * * @since 2.0 */ @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public DescribableList<GitSCMExtension, GitSCMExtensionDescriptor> getExtensions() { return extensions; } private void updateFromUserData() throws GitException { // do what newInstance used to do directly from the request data if (userRemoteConfigs == null) { return; /* Prevent NPE when no remote config defined */ } try { String[] pUrls = new String[userRemoteConfigs.size()]; String[] repoNames = new String[userRemoteConfigs.size()]; String[] refSpecs = new String[userRemoteConfigs.size()]; for (int i = 0; i < userRemoteConfigs.size(); ++i) { pUrls[i] = userRemoteConfigs.get(i).getUrl(); repoNames[i] = userRemoteConfigs.get(i).getName(); refSpecs[i] = userRemoteConfigs.get(i).getRefspec(); } this.remoteRepositories = DescriptorImpl.createRepositoryConfigurations(pUrls, repoNames, refSpecs); // TODO: replace with new repositories } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } } @SuppressWarnings("deprecation") // `source` field is deprecated but required public Object readResolve() throws IOException { // Migrate data // Default unspecified to v0 if (configVersion == null) { configVersion = 0L; } // Deprecated field needed to retain compatibility if (source != null) { remoteRepositories = new ArrayList<>(); branches = new ArrayList<>(); List<RefSpec> rs = new ArrayList<>(); rs.add(new RefSpec("+refs/heads/*:refs/remotes/origin/*")); remoteRepositories.add(newRemoteConfig("origin", source, rs.toArray(new RefSpec[0]))); if (branch != null) { branches.add(new BranchSpec(branch)); } else { branches.add(new BranchSpec("*/master")); } } if (configVersion < 1 && branches != null) { // Migrate the branch specs from // single * wildcard, to ** wildcard. for (BranchSpec branchSpec : branches) { String name = branchSpec.getName(); name = name.replace("*", "**"); branchSpec.setName(name); } } if (remoteRepositories != null && userRemoteConfigs == null) { userRemoteConfigs = new ArrayList<>(); for(RemoteConfig cfg : remoteRepositories) { // converted as in config.jelly String url = ""; if (cfg.getURIs().size() > 0 && cfg.getURIs().get(0) != null) url = cfg.getURIs().get(0).toPrivateString(); String refspec = ""; if (cfg.getFetchRefSpecs().size() > 0 && cfg.getFetchRefSpecs().get(0) != null) refspec = cfg.getFetchRefSpecs().get(0).toString(); userRemoteConfigs.add(new UserRemoteConfig(url, cfg.getName(), refspec, null)); } } // patch internal objects from user data // if (configVersion == 2) { if (remoteRepositories == null) { // if we don't catch GitException here, the whole job fails to load try { updateFromUserData(); } catch (GitException e) { LOGGER.log(Level.WARNING, "Failed to load SCM data", e); } } if (extensions==null) extensions = new DescribableList<>(Saveable.NOOP); readBackExtensionsFromLegacy(); if (choosingStrategy != null && getBuildChooser().getClass()==DefaultBuildChooser.class) { for (BuildChooserDescriptor d : BuildChooser.all()) { if (choosingStrategy.equals(d.getLegacyId())) { try { setBuildChooser(d.clazz.newInstance()); } catch (InstantiationException | IllegalAccessException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } } } } getBuildChooser(); // set the gitSCM field. return this; } @Override @Whitelisted public GitRepositoryBrowser getBrowser() { return browser; } public void setBrowser(GitRepositoryBrowser browser) { this.browser = browser; } private static final String HOSTNAME_MATCH = "([\\w\\d[-.]]+)" // hostname ; private static final String REPOSITORY_PATH_MATCH = "/*" // Zero or more slashes as start of repository path + "(.+?)" // repository path without leading slashes + "(?:[.]git)?" // optional '.git' suffix + "/*" // optional trailing '/' ; private static final Pattern[] URL_PATTERNS = { /* URL style - like https://github.com/jenkinsci/git-plugin */ Pattern.compile( "(?:\\w+://)" // protocol (scheme) + "(?:.+@)?" // optional username/password + HOSTNAME_MATCH + "(?:[:][\\d]+)?" // optional port number (only honored by git for ssh:// scheme) + "/" // separator between hostname and repository path - '/' + REPOSITORY_PATH_MATCH ), /* Alternate ssh style - like [email protected]:jenkinsci/git-plugin */ Pattern.compile( "(?:git@)" // required username (only optional if local username is 'git') + HOSTNAME_MATCH + ":" // separator between hostname and repository path - ':' + REPOSITORY_PATH_MATCH ) }; @Override public RepositoryBrowser<?> guessBrowser() { Set<String> webUrls = new HashSet<>(); if (remoteRepositories != null) { for (RemoteConfig config : remoteRepositories) { for (URIish uriIsh : config.getURIs()) { String uri = uriIsh.toString(); for (Pattern p : URL_PATTERNS) { Matcher m = p.matcher(uri); if (m.matches()) { webUrls.add("https://" + m.group(1) + "/" + m.group(2) + "/"); } } } } } if (webUrls.isEmpty()) { return null; } if (webUrls.size() == 1) { String url = webUrls.iterator().next(); if (url.startsWith("https://bitbucket.org/")) { return new BitbucketWeb(url); } if (url.startsWith("https://gitlab.com/")) { return new GitLab(url); } if (url.startsWith("https://github.com/")) { return new GithubWeb(url); } return null; } LOGGER.log(Level.INFO, "Multiple browser guess matches for {0}", remoteRepositories); return null; } public boolean isCreateAccountBasedOnEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isCreateAccountBasedOnEmail()); } public boolean isUseExistingAccountWithSameEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isUseExistingAccountWithSameEmail()); } public boolean isHideCredentials() { DescriptorImpl gitDescriptor = getDescriptor(); return gitDescriptor != null && gitDescriptor.isHideCredentials(); } public boolean isAllowSecondFetch() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isAllowSecondFetch()); } public boolean isDisableGitToolChooser() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isDisableGitToolChooser()); } public boolean isAddGitTagAction() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isAddGitTagAction()); } @Whitelisted public BuildChooser getBuildChooser() { BuildChooser bc; BuildChooserSetting bcs = getExtensions().get(BuildChooserSetting.class); if (bcs!=null) bc = bcs.getBuildChooser(); else bc = new DefaultBuildChooser(); bc.gitSCM = this; return bc; } public void setBuildChooser(BuildChooser buildChooser) throws IOException { if (buildChooser.getClass()==DefaultBuildChooser.class) { getExtensions().remove(BuildChooserSetting.class); } else { getExtensions().replace(new BuildChooserSetting(buildChooser)); } } @Deprecated public String getParamLocalBranch(Run<?, ?> build) throws IOException, InterruptedException { return getParamLocalBranch(build, new LogTaskListener(LOGGER, Level.INFO)); } /** * Gets the parameter-expanded effective value in the context of the current build. * @param build run whose local branch name is returned * @param listener build log * @throws IOException on input or output error * @throws InterruptedException when interrupted * @return parameter-expanded local branch name in build. */ public String getParamLocalBranch(Run<?, ?> build, TaskListener listener) throws IOException, InterruptedException { LocalBranch localBranch = getExtensions().get(LocalBranch.class); // substitute build parameters if available return getParameterString(localBranch == null ? null : localBranch.getLocalBranch(), build.getEnvironment(listener)); } @Deprecated public List<RemoteConfig> getParamExpandedRepos(Run<?, ?> build) throws IOException, InterruptedException { return getParamExpandedRepos(build, new LogTaskListener(LOGGER, Level.INFO)); } /** * Expand parameters in {@link #remoteRepositories} with the parameter values provided in the given build * and return them. * * @param build run whose local branch name is returned * @param listener build log * @throws IOException on input or output error * @throws InterruptedException when interrupted * @return can be empty but never null. */ public List<RemoteConfig> getParamExpandedRepos(Run<?, ?> build, TaskListener listener) throws IOException, InterruptedException { List<RemoteConfig> expandedRepos = new ArrayList<>(); EnvVars env = build.getEnvironment(listener); for (RemoteConfig oldRepo : Util.fixNull(remoteRepositories)) { expandedRepos.add(getParamExpandedRepo(env, oldRepo)); } return expandedRepos; } /** * Expand Parameters in the supplied remote repository with the parameter values provided in the given environment variables * @param env Environment variables with parameter values * @param remoteRepository Remote repository with parameters * @return remote repository with expanded parameters */ public RemoteConfig getParamExpandedRepo(EnvVars env, RemoteConfig remoteRepository) { List<RefSpec> refSpecs = getRefSpecs(remoteRepository, env); return newRemoteConfig( getParameterString(remoteRepository.getName(), env), getParameterString(remoteRepository.getURIs().get(0).toPrivateString(), env), refSpecs.toArray(new RefSpec[0])); } public RemoteConfig getRepositoryByName(String repoName) { for (RemoteConfig r : getRepositories()) { if (r.getName().equals(repoName)) { return r; } } return null; } @Exported @Whitelisted public List<UserRemoteConfig> getUserRemoteConfigs() { if (userRemoteConfigs == null) { /* Prevent NPE when no remote config defined */ userRemoteConfigs = new ArrayList<>(); } return Collections.unmodifiableList(userRemoteConfigs); } @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public List<RemoteConfig> getRepositories() { // Handle null-value to ensure backwards-compatibility, ie project configuration missing the <repositories/> XML element if (remoteRepositories == null) { return new ArrayList<>(); } return remoteRepositories; } /** * Derives a local branch name from the remote branch name by removing the * name of the remote from the remote branch name. * <p> * Ex. origin/master becomes master * <p> * Cycles through the list of user remotes looking for a match allowing user * to configure an alternate (not origin) name for the remote. * * @param remoteBranchName branch name whose remote repository name will be removed * @return a local branch name derived by stripping the remote repository * name from the {@code remoteBranchName} parameter. If a matching * remote is not found, the original {@code remoteBranchName} will * be returned. */ public String deriveLocalBranchName(String remoteBranchName) { // default remoteName is 'origin' used if list of user remote configs is empty. String remoteName = "origin"; for (final UserRemoteConfig remote : getUserRemoteConfigs()) { remoteName = remote.getName(); if (remoteName == null || remoteName.isEmpty()) { remoteName = "origin"; } if (remoteBranchName.startsWith(remoteName + "/")) { // found the remote config associated with remoteBranchName break; } } // now strip the remote name and return the resulting local branch name. String localBranchName = remoteBranchName.replaceFirst("^" + remoteName + "/", ""); return localBranchName; } @CheckForNull @Whitelisted public String getGitTool() { return gitTool; } @NonNull public static String getParameterString(@CheckForNull String original, @NonNull EnvVars env) { return env.expand(original); } private List<RefSpec> getRefSpecs(RemoteConfig repo, EnvVars env) { List<RefSpec> refSpecs = new ArrayList<>(); for (RefSpec refSpec : repo.getFetchRefSpecs()) { refSpecs.add(new RefSpec(getParameterString(refSpec.toString(), env))); } return refSpecs; } /** * If the configuration is such that we are tracking just one branch of one repository * return that branch specifier (in the form of something like "origin/master" or a SHA1-hash * * Otherwise return [@code null}. */ @CheckForNull private String getSingleBranch(EnvVars env) { // if we have multiple branches skip to advanced usecase if (getBranches().size() != 1) { return null; } String branch = getBranches().get(0).getName(); String repository = null; if (getRepositories().size() != 1) { for (RemoteConfig repo : getRepositories()) { if (branch.startsWith(repo.getName() + "/")) { repository = repo.getName(); break; } } } else { repository = getRepositories().get(0).getName(); } // replace repository wildcard with repository name if (branch.startsWith("*/") && repository != null) { branch = repository + branch.substring(1); } // if the branch name contains more wildcards then the simple usecase // does not apply and we need to skip to the advanced usecase if (branch.contains("*")) { return null; } // substitute build parameters if available branch = getParameterString(branch, env); // Check for empty string - replace with "**" when seen. if (branch.equals("")) { branch = "**"; } return branch; } @Override public SCMRevisionState calcRevisionsFromBuild(Run<?, ?> abstractBuild, FilePath workspace, Launcher launcher, TaskListener taskListener) throws IOException, InterruptedException { return SCMRevisionState.NONE; } @Override public boolean requiresWorkspaceForPolling() { // TODO would need to use hudson.plugins.git.util.GitUtils.getPollEnvironment return requiresWorkspaceForPolling(new EnvVars()); } /* Package protected for test access */ boolean requiresWorkspaceForPolling(EnvVars environment) { for (GitSCMExtension ext : getExtensions()) { if (ext.requiresWorkspaceForPolling()) return true; } return getSingleBranch(environment) == null; } @Override public PollingResult compareRemoteRevisionWith(Job<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener, SCMRevisionState baseline) throws IOException, InterruptedException { try { return compareRemoteRevisionWithImpl( project, launcher, workspace, listener); } catch (GitException e){ throw new IOException(e); } } public static final Pattern GIT_REF = Pattern.compile("^(refs/[^/]+)/(.+)"); private PollingResult compareRemoteRevisionWithImpl(Job<?, ?> project, Launcher launcher, FilePath workspace, final @NonNull TaskListener listener) throws IOException, InterruptedException { // Poll for changes. Are there any unbuilt revisions that Hudson ought to build ? listener.getLogger().println("Using strategy: " + getBuildChooser().getDisplayName()); final Run lastBuild = project.getLastBuild(); if (lastBuild == null) { // If we've never been built before, well, gotta build! listener.getLogger().println("[poll] No previous build, so forcing an initial build."); return BUILD_NOW; } final BuildData buildData = fixNull(getBuildData(lastBuild)); if (buildData.lastBuild != null) { listener.getLogger().println("[poll] Last Built Revision: " + buildData.lastBuild.revision); } final EnvVars pollEnv = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener, false) : lastBuild.getEnvironment(listener); final String singleBranch = getSingleBranch(pollEnv); if (!requiresWorkspaceForPolling(pollEnv)) { final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener, false) : new EnvVars(); GitClient git = createClient(listener, environment, project, project.getLastBuild(), Jenkins.get(), null); for (RemoteConfig remoteConfig : getParamExpandedRepos(lastBuild, listener)) { String remote = remoteConfig.getName(); List<RefSpec> refSpecs = getRefSpecs(remoteConfig, environment); for (URIish urIish : remoteConfig.getURIs()) { String gitRepo = urIish.toString(); Map<String, ObjectId> heads = git.getHeadRev(gitRepo); if (heads==null || heads.isEmpty()) { listener.getLogger().println("[poll] Couldn't get remote head revision"); return BUILD_NOW; } listener.getLogger().println("Found "+ heads.size() +" remote heads on " + urIish); Iterator<Entry<String, ObjectId>> it = heads.entrySet().iterator(); while (it.hasNext()) { String head = it.next().getKey(); boolean match = false; for (RefSpec spec : refSpecs) { if (spec.matchSource(head)) { match = true; break; } } if (!match) { listener.getLogger().println("Ignoring " + head + " as it doesn't match any of the configured refspecs"); it.remove(); } } for (BranchSpec branchSpec : getBranches()) { for (Entry<String, ObjectId> entry : heads.entrySet()) { final String head = entry.getKey(); // head is "refs/(heads|tags|whatever)/branchName // first, check the a canonical git reference is configured if (!branchSpec.matches(head, environment)) { // convert head `refs/(heads|tags|whatever)/branch` into shortcut notation `remote/branch` String name; Matcher matcher = GIT_REF.matcher(head); if (matcher.matches()) name = remote + head.substring(matcher.group(1).length()); else name = remote + "/" + head; if (!branchSpec.matches(name, environment)) continue; } final ObjectId sha1 = entry.getValue(); Build built = buildData.getLastBuild(sha1); if (built != null) { listener.getLogger().println("[poll] Latest remote head revision on " + head + " is: " + sha1.getName() + " - already built by " + built.getBuildNumber()); continue; } listener.getLogger().println("[poll] Latest remote head revision on " + head + " is: " + sha1.getName()); return BUILD_NOW; } } } } return NO_CHANGES; } final Node node = GitUtils.workspaceToNode(workspace); final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener) : project.getEnvironment(node, listener); FilePath workingDirectory = workingDirectory(project,workspace,environment,listener); // (Re)build if the working directory doesn't exist if (workingDirectory == null || !workingDirectory.exists()) { listener.getLogger().println("[poll] Working Directory does not exist"); return BUILD_NOW; } GitClient git = createClient(listener, environment, project, project.getLastBuild(), node, workingDirectory); if (git.hasGitRepo(false)) { // Repo is there - do a fetch listener.getLogger().println("Fetching changes from the remote Git repositories"); // Fetch updates for (RemoteConfig remoteRepository : getParamExpandedRepos(lastBuild, listener)) { fetchFrom(git, null, listener, remoteRepository); } listener.getLogger().println("Polling for changes in"); Collection<Revision> candidates = getBuildChooser().getCandidateRevisions( true, singleBranch, git, listener, buildData, new BuildChooserContextImpl(project, null, environment)); for (Revision c : candidates) { if (!isRevExcluded(git, c, listener, buildData)) { return PollingResult.SIGNIFICANT; } } return NO_CHANGES; } else { listener.getLogger().println("No Git repository yet, an initial checkout is required"); return PollingResult.SIGNIFICANT; } } /** * Allows {@link Builder}s and {@link Publisher}s to access a configured {@link GitClient} object to * perform additional git operations. * @param listener build log * @param environment environment variables to be used * @param build run context for the returned GitClient * @param workspace client workspace * @return git client for additional git operations * @throws IOException on input or output error * @throws InterruptedException when interrupted */ @NonNull public GitClient createClient(TaskListener listener, EnvVars environment, Run<?,?> build, FilePath workspace) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getParent(), workspace, environment, listener); /* ws will be null if the node which ran the build is offline */ if (ws != null) { ws.mkdirs(); // ensure it exists } return createClient(listener,environment, build.getParent(), build, GitUtils.workspaceToNode(workspace), ws, null); } /** * Allows {@link Publisher} and other post build actions to access a configured {@link GitClient}. * The post build action can use the {@code postBuildUnsupportedCommand} argument to control the * selection of a git tool by {@link GitToolChooser}. * @param listener build log * @param environment environment variables to be used * @param build run context for the returned GitClient * @param workspace client workspace * @param postBuildUnsupportedCommand passed by caller to control choice of git tool by GitTooChooser * @return git client for additional git operations * @throws IOException on input or output error * @throws InterruptedException when interrupted */ @NonNull public GitClient createClient(TaskListener listener, EnvVars environment, Run<?,?> build, FilePath workspace, UnsupportedCommand postBuildUnsupportedCommand) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getParent(), workspace, environment, listener); /* ws will be null if the node which ran the build is offline */ if (ws != null) { ws.mkdirs(); // ensure it exists } return createClient(listener,environment, build.getParent(), build, GitUtils.workspaceToNode(workspace), ws, postBuildUnsupportedCommand); } @NonNull /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Job project, Run<?, ?> build, Node n, FilePath ws) throws IOException, InterruptedException { return createClient(listener, environment, project, build, n, ws, null); } @NonNull /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Job project, Run<?, ?> build, Node n, FilePath ws, UnsupportedCommand postBuildUnsupportedCommand) throws IOException, InterruptedException { if (postBuildUnsupportedCommand == null) { /* UnsupportedCommand supports JGit by default */ postBuildUnsupportedCommand = new UnsupportedCommand(); } String gitExe = getGitExe(n, listener); GitTool gitTool = getGitTool(n, null, listener); if (!isDisableGitToolChooser()) { UnsupportedCommand unsupportedCommand = new UnsupportedCommand(); for (GitSCMExtension ext : extensions) { ext.determineSupportForJGit(this, unsupportedCommand); } GitToolChooser chooser = null; for (UserRemoteConfig uc : getUserRemoteConfigs()) { String ucCredentialsId = uc.getCredentialsId(); String url = getParameterString(uc.getUrl(), environment); /* If any of the extensions do not support JGit, it should not be suggested */ /* If the post build action does not support JGit, it should not be suggested */ chooser = new GitToolChooser(url, project, ucCredentialsId, gitTool, n, listener, unsupportedCommand.determineSupportForJGit() && postBuildUnsupportedCommand.determineSupportForJGit()); } if (chooser != null) { listener.getLogger().println("The recommended git tool is: " + chooser.getGitTool()); String updatedGitExe = chooser.getGitTool(); if (!updatedGitExe.equals("NONE")) { gitExe = updatedGitExe; } } } Git git = Git.with(listener, environment).in(ws).using(gitExe); GitClient c = git.getClient(); for (GitSCMExtension ext : extensions) { c = ext.decorate(this,c); } for (UserRemoteConfig uc : getUserRemoteConfigs()) { String ucCredentialsId = uc.getCredentialsId(); if (ucCredentialsId == null) { listener.getLogger().println("No credentials specified"); } else { String url = getParameterString(uc.getUrl(), environment); StandardUsernameCredentials credentials = lookupScanCredentials(project, build, url, ucCredentialsId); if (credentials != null) { c.addCredentials(url, credentials); if(!isHideCredentials()) { listener.getLogger().printf("using credential %s%n", credentials.getId()); } if (project != null && project.getLastBuild() != null) { CredentialsProvider.track(project.getLastBuild(), credentials); } } else { if(!isHideCredentials()) { listener.getLogger().printf("Warning: CredentialId \"%s\" could not be found.%n", ucCredentialsId); } } } } // TODO add default credentials return c; } private static StandardUsernameCredentials lookupScanCredentials(@CheckForNull Item project, @CheckForNull Run<?, ?> build, @CheckForNull String url, @CheckForNull String ucCredentialsId) { if (Util.fixEmpty(ucCredentialsId) == null) { return null; } else if (build != null) { // preferred mode as it can call Credentials.forRun return CredentialsProvider.findCredentialById( ucCredentialsId, StandardUsernameCredentials.class, build, URIRequirementBuilder.fromUri(url).build()); } else { return CredentialsMatchers.firstOrNull( CredentialsProvider.lookupCredentials( StandardUsernameCredentials.class, project, project instanceof Queue.Task ? ((Queue.Task) project).getDefaultAuthentication() : ACL.SYSTEM, URIRequirementBuilder.fromUri(url).build() ), CredentialsMatchers.allOf(CredentialsMatchers.withId(ucCredentialsId), GitClient.CREDENTIALS_MATCHER) ); } } private static CredentialsMatcher gitScanCredentialsMatcher() { return CredentialsMatchers.anyOf(CredentialsMatchers.instanceOf(StandardUsernamePasswordCredentials.class)); } @NonNull private BuildData fixNull(BuildData bd) { ScmName sn = getExtensions().get(ScmName.class); String scmName = sn == null ? null : sn.getName(); return bd != null ? bd : new BuildData(scmName, getUserRemoteConfigs()); } /** * Fetch information from a particular remote repository. * * @param git git client * @param run run context if it's running for build * @param listener build log * @param remoteRepository remote git repository * @throws InterruptedException when interrupted * @throws IOException on input or output error */ private void fetchFrom(GitClient git, @CheckForNull Run<?, ?> run, TaskListener listener, RemoteConfig remoteRepository) throws InterruptedException, IOException { boolean first = true; for (URIish url : remoteRepository.getURIs()) { try { if (first) { git.setRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); first = false; } else { git.addRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); } FetchCommand fetch = git.fetch_().from(url, remoteRepository.getFetchRefSpecs()); for (GitSCMExtension extension : extensions) { extension.decorateFetchCommand(this, run, git, listener, fetch); } fetch.execute(); } catch (GitException ex) { throw new GitException("Failed to fetch from "+url.toString(), ex); } } } private RemoteConfig newRemoteConfig(String name, String refUrl, RefSpec... refSpec) { try { Config repoConfig = new Config(); // Make up a repo config from the request parameters repoConfig.setString("remote", name, "url", refUrl); List<String> str = new ArrayList<>(); if(refSpec != null && refSpec.length > 0) for (RefSpec rs: refSpec) str.add(rs.toString()); repoConfig.setStringList("remote", name, "fetch", str); return RemoteConfig.getAllRemoteConfigs(repoConfig).get(0); } catch (Exception ex) { throw new GitException("Error trying to create JGit configuration", ex); } } @CheckForNull public GitTool resolveGitTool(TaskListener listener) { return GitUtils.resolveGitTool(gitTool, listener); } public String getGitExe(Node builtOn, TaskListener listener) { return getGitExe(builtOn, null, listener); } /** * Exposing so that we can get this from GitPublisher. * @param builtOn node where build was performed * @param env environment variables used in the build * @param listener build log * @return git exe for builtOn node, often "Default" or "jgit" */ public String getGitExe(Node builtOn, EnvVars env, TaskListener listener) { GitTool tool = GitUtils.resolveGitTool(gitTool, builtOn, env, listener); if(tool == null) { return null; } return tool.getGitExe(); } public GitTool getGitTool(Node builtOn, EnvVars env, TaskListener listener) { GitTool tool = GitUtils.resolveGitTool(gitTool, builtOn, env, listener); return tool; } /*package*/ static class BuildChooserContextImpl implements BuildChooserContext, Serializable { @SuppressFBWarnings(value="SE_BAD_FIELD", justification="known non-serializable field") final Job project; @SuppressFBWarnings(value="SE_BAD_FIELD", justification="known non-serializable field") final Run build; final EnvVars environment; BuildChooserContextImpl(Job project, Run build, EnvVars environment) { this.project = project; this.build = build; this.environment = environment; } public <T> T actOnBuild(@NonNull ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build, FilePath.localChannel); } public <T> T actOnProject(@NonNull ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project, FilePath.localChannel); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } private Object writeReplace() { Channel currentChannel = Channel.current(); if (currentChannel == null) { return null; } return currentChannel.export(BuildChooserContext.class,new BuildChooserContext() { public <T> T actOnBuild(@NonNull ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Channel.current()); } public <T> T actOnProject(@NonNull ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project,Channel.current()); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } }); } } /** * Determines the commit to be built in this round, updating the working tree accordingly, * and return the information about the selected commit. * * <p> * For robustness, this method shouldn't assume too much about the state of the working tree when this method * is called. In a general case, a working tree is a left-over from the previous build, so it can be quite * messed up (such as HEAD pointing to a random branch.) It is expected that this method brings it back * to the predictable clean state by the time this method returns. */ private @NonNull Build determineRevisionToBuild(final Run build, final @NonNull BuildData buildData, final EnvVars environment, final @NonNull GitClient git, final @NonNull TaskListener listener) throws IOException, InterruptedException { PrintStream log = listener.getLogger(); Collection<Revision> candidates = Collections.emptyList(); final BuildChooserContext context = new BuildChooserContextImpl(build.getParent(), build, environment); getBuildChooser().prepareWorkingTree(git, listener, context); if (build.getClass().getName().equals("hudson.matrix.MatrixRun")) { candidates = GitSCMMatrixUtil.populateCandidatesFromRootBuild((AbstractBuild) build, this); } // parameter forcing the commit ID to build if (candidates.isEmpty() ) { final RevisionParameterAction rpa = build.getAction(RevisionParameterAction.class); if (rpa != null) { // in case the checkout is due to a commit notification on a // multiple scm configuration, it should be verified if the triggering repo remote // matches current repo remote to avoid JENKINS-26587 if (rpa.canOriginateFrom(this.getRepositories())) { candidates = Collections.singleton(rpa.toRevision(git)); } else { log.println("skipping resolution of commit " + rpa.commit + ", since it originates from another repository"); } } } if (candidates.isEmpty() ) { final String singleBranch = environment.expand( getSingleBranch(environment) ); candidates = getBuildChooser().getCandidateRevisions( false, singleBranch, git, listener, buildData, context); } if (candidates.isEmpty()) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. throw new AbortException("Couldn't find any revision to build. Verify the repository and branch configuration for this job."); } Revision marked = candidates.iterator().next(); Revision rev = marked; // Modify the revision based on extensions for (GitSCMExtension ext : extensions) { rev = ext.decorateRevisionToBuild(this,build,git,listener,marked,rev); } Build revToBuild = new Build(marked, rev, build.getNumber(), null); buildData.saveBuild(revToBuild); if (buildData.getBuildsByBranchName().size() >= 100) { log.println("JENKINS-19022: warning: possible memory leak due to Git plugin usage; see: https://plugins.jenkins.io/git/#remove-git-plugin-buildsbybranch-builddata-script"); } boolean checkForMultipleRevisions = true; BuildSingleRevisionOnly ext = extensions.get(BuildSingleRevisionOnly.class); if (ext != null) { checkForMultipleRevisions = ext.enableMultipleRevisionDetection(); } if (candidates.size() > 1) { log.println("Multiple candidate revisions"); if (checkForMultipleRevisions) { Job<?, ?> job = build.getParent(); if (job instanceof AbstractProject) { AbstractProject project = (AbstractProject) job; if (!project.isDisabled()) { log.println("Scheduling another build to catch up with " + project.getFullDisplayName()); if (!project.scheduleBuild(0, new SCMTrigger.SCMTriggerCause("This build was triggered by build " + build.getNumber() + " because more than one build candidate was found."))) { log.println("WARNING: multiple candidate revisions, but unable to schedule build of " + project.getFullDisplayName()); } } } } } return revToBuild; } /** * Retrieve Git objects from the specified remotes by doing the likes of clone/fetch/pull/etc. * * By the end of this method, remote refs are updated to include all the commits found in the remote servers. */ private void retrieveChanges(Run build, GitClient git, TaskListener listener) throws IOException, InterruptedException { final PrintStream log = listener.getLogger(); boolean removeSecondFetch = false; List<RemoteConfig> repos = getParamExpandedRepos(build, listener); if (repos.isEmpty()) return; // defensive check even though this is an invalid configuration if (git.hasGitRepo(false)) { // It's an update if (repos.size() == 1) log.println("Fetching changes from the remote Git repository"); else log.println(MessageFormat.format("Fetching changes from {0} remote Git repositories", repos.size())); } else { log.println("Cloning the remote Git repository"); RemoteConfig rc = repos.get(0); try { CloneCommand cmd = git.clone_().url(rc.getURIs().get(0).toPrivateString()).repositoryName(rc.getName()); for (GitSCMExtension ext : extensions) { ext.decorateCloneCommand(this, build, git, listener, cmd); } cmd.execute(); // determine if second fetch is required CloneOption option = extensions.get(CloneOption.class); if (!isAllowSecondFetch()) { removeSecondFetch = determineSecondFetch(option, rc); } } catch (GitException ex) { ex.printStackTrace(listener.error("Error cloning remote repo '" + rc.getName() + "'")); throw new AbortException("Error cloning remote repo '" + rc.getName() + "'"); } } for (RemoteConfig remoteRepository : repos) { if (remoteRepository.equals(repos.get(0)) && removeSecondFetch){ log.println("Avoid second fetch"); continue; } try { fetchFrom(git, build, listener, remoteRepository); } catch (GitException ex) { /* Allow retry by throwing AbortException instead of * GitException. See JENKINS-20531. */ ex.printStackTrace(listener.error("Error fetching remote repo '" + remoteRepository.getName() + "'")); throw new AbortException("Error fetching remote repo '" + remoteRepository.getName() + "'"); } } } private boolean determineSecondFetch(CloneOption option, @NonNull RemoteConfig rc) { List<RefSpec> initialFetchRefSpecs = rc.getFetchRefSpecs(); boolean isDefaultRefspec = true; // default refspec is any refspec with "refs/heads/" mapping boolean removeSecondFetch = true; if (initialFetchRefSpecs != null) { for (RefSpec ref : initialFetchRefSpecs) { if (!ref.toString().contains("refs/heads")) { isDefaultRefspec = false; // if refspec is not of default type, preserve second fetch } } if (option == null) { removeSecondFetch = isDefaultRefspec; } else { if (option.isHonorRefspec()) { removeSecondFetch = true; // avoid second fetch call if honor refspec is enabled } else { removeSecondFetch = isDefaultRefspec; } } } // if initial fetch refspec contains "refs/heads/*" (default refspec), ignore the second fetch call return removeSecondFetch; } @Override public void checkout(Run<?, ?> build, Launcher launcher, FilePath workspace, TaskListener listener, File changelogFile, SCMRevisionState baseline) throws IOException, InterruptedException { if (VERBOSE) listener.getLogger().println("Using checkout strategy: " + getBuildChooser().getDisplayName()); BuildData previousBuildData = getBuildData(build.getPreviousBuild()); // read only BuildData buildData = copyBuildData(build.getPreviousBuild()); if (VERBOSE && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars environment = build.getEnvironment(listener); GitClient git = createClient(listener, environment, build, workspace); if (launcher instanceof Launcher.DecoratedLauncher) { // We cannot check for git instanceof CliGitAPIImpl vs. JGitAPIImpl here since (when running on an agent) we will actually have a RemoteGitImpl which is opaque. listener.getLogger().println("Warning: JENKINS-30600: special launcher " + launcher + " will be ignored (a typical symptom is the Git executable not being run inside a designated container)"); } for (GitSCMExtension ext : extensions) { ext.beforeCheckout(this, build, git, listener); } retrieveChanges(build, git, listener); Build revToBuild = determineRevisionToBuild(build, buildData, environment, git, listener); // Track whether we're trying to add a duplicate BuildData, now that it's been updated with // revision info for this build etc. The default assumption is that it's a duplicate. boolean buildDataAlreadyPresent = false; List<BuildData> actions = build.getActions(BuildData.class); for (BuildData d: actions) { if (d.similarTo(buildData)) { buildDataAlreadyPresent = true; break; } } if (!actions.isEmpty()) { buildData.setIndex(actions.size()+1); } // If the BuildData is not already attached to this build, add it to the build and mark that // it wasn't already present, so that we add the GitTagAction and changelog after the checkout // finishes. if (!buildDataAlreadyPresent) { build.addAction(buildData); } environment.put(GIT_COMMIT, revToBuild.revision.getSha1String()); Branch localBranch = Iterables.getFirst(revToBuild.revision.getBranches(),null); String localBranchName = getParamLocalBranch(build, listener); if (localBranch != null && localBranch.getName() != null) { // null for a detached HEAD String remoteBranchName = getBranchName(localBranch); environment.put(GIT_BRANCH, remoteBranchName); LocalBranch lb = getExtensions().get(LocalBranch.class); if (lb != null) { String lbn = lb.getLocalBranch(); if (lbn == null || lbn.equals("**")) { // local branch is configured with empty value or "**" so use remote branch name for checkout localBranchName = deriveLocalBranchName(remoteBranchName); } environment.put(GIT_LOCAL_BRANCH, localBranchName); } } listener.getLogger().println("Checking out " + revToBuild.revision); CheckoutCommand checkoutCommand = git.checkout().branch(localBranchName).ref(revToBuild.revision.getSha1String()).deleteBranchIfExist(true); for (GitSCMExtension ext : this.getExtensions()) { ext.decorateCheckoutCommand(this, build, git, listener, checkoutCommand); } try { checkoutCommand.execute(); } catch (GitLockFailedException e) { // Rethrow IOException so the retry will be able to catch it throw new IOException("Could not checkout " + revToBuild.revision.getSha1String(), e); } // Needs to be after the checkout so that revToBuild is in the workspace try { printCommitMessageToLog(listener, git, revToBuild); } catch (IOException | ArithmeticException | GitException ge) { // JENKINS-45729 reports a git exception when revToBuild cannot be found in the workspace. // JENKINS-46628 reports a git exception when revToBuild cannot be found in the workspace. // JENKINS-62710 reports a JGit arithmetic exception on an older Java 8 system. // Don't let those exceptions block the build, this is an informational message only listener.getLogger().println("Exception logging commit message for " + revToBuild + ": " + ge.getMessage()); } // Don't add the tag and changelog if we've already processed this BuildData before. if (!buildDataAlreadyPresent) { if (build.getActions(AbstractScmTagAction.class).isEmpty() && isAddGitTagAction()) { // only add the tag action if we can be unique as AbstractScmTagAction has a fixed UrlName // so only one of the actions is addressable by users LOGGER.log(Level.FINE, "Adding GitTagAction to build " + build.number); build.addAction(new GitTagAction(build, workspace, revToBuild.revision)); } else { LOGGER.log(Level.FINE, "Not adding GitTagAction to build " + build.number); } if (changelogFile != null) { computeChangeLog(git, revToBuild.revision, listener, previousBuildData, new FilePath(changelogFile), new BuildChooserContextImpl(build.getParent(), build, environment)); } } for (GitSCMExtension ext : extensions) { ext.onCheckoutCompleted(this, build, git,listener); } } private void printCommitMessageToLog(TaskListener listener, GitClient git, final Build revToBuild) throws IOException { try { RevCommit commit = git.withRepository(new RevCommitRepositoryCallback(revToBuild)); listener.getLogger().println("Commit message: \"" + commit.getShortMessage() + "\""); } catch (InterruptedException | MissingObjectException e) { e.printStackTrace(listener.error("Unable to retrieve commit message")); } } /** * Build up change log from all the branches that we've merged into {@code revToBuild}. * * <p> * Intuitively, a changelog is a list of commits that's added since the "previous build" to the current build. * However, because of the multiple branch support in Git, this notion is ambiguous. For example, consider the * following commit graph where M1...M4 belongs to branch M, B1..B2 belongs to branch B, and so on: * * <pre> * M1 -> M2 -> M3 -> M4 * / \ \ \ * S -> B1 -> B2 \ * \ \ * C1 ---------------> C2 * </pre> * * <p> * If Jenkins built B1, C1, B2, C3 in that order, then one'd prefer that the changelog of B2 only shows * just B1..B2, not C1..B2. To do this, we attribute every build to specific branches, and when we say * "since the previous build", what we really mean is "since the last build that built the same branch". * * <p> * TODO: if a branch merge is configured, then the first build will end up listing all the changes * in the upstream branch, which may be too many. To deal with this nicely, BuildData needs to remember * when we started merging this branch so that we can properly detect if the current build is the * first build that's merging a new branch. * * Another possibly sensible option is to always exclude all the commits that are happening in the remote branch. * Picture yourself developing a feature branch that closely tracks a busy mainline, then you might * not really care the changes going on in the main line. In this way, the changelog only lists your changes, * so "notify those who break the build" will not spam upstream developers, too. * * @param git * Used for invoking Git * @param revToBuild * Points to the revision we'll be building. This includes all the branches we've merged. * @param listener * Used for writing to build console * @param previousBuildData * Information that captures what we did during the last build. We need this for changelog, * or else we won't know where to stop. */ private void computeChangeLog(GitClient git, Revision revToBuild, TaskListener listener, BuildData previousBuildData, FilePath changelogFile, BuildChooserContext context) throws IOException, InterruptedException { boolean executed = false; ChangelogCommand changelog = git.changelog(); changelog.includes(revToBuild.getSha1()); try (Writer out = new OutputStreamWriter(changelogFile.write(),"UTF-8")) { boolean exclusion = false; ChangelogToBranch changelogToBranch = getExtensions().get(ChangelogToBranch.class); if (changelogToBranch != null) { listener.getLogger().println("Using 'Changelog to branch' strategy."); changelog.excludes(changelogToBranch.getOptions().getRef()); exclusion = true; } else { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = getBuildChooser().prevBuildForChangelog(b.getName(), previousBuildData, git, context); if (lastRevWas != null && lastRevWas.revision != null && git.isCommitInRepo(lastRevWas.getSHA1())) { changelog.excludes(lastRevWas.getSHA1()); exclusion = true; } } } if (!exclusion) { // this is the first time we are building this branch, so there's no base line to compare against. // if we force the changelog, it'll contain all the changes in the repo, which is not what we want. listener.getLogger().println("First time build. Skipping changelog."); } else { changelog.to(out).max(MAX_CHANGELOG).execute(); executed = true; } } catch (GitException ge) { ge.printStackTrace(listener.error("Unable to retrieve changeset")); } finally { if (!executed) changelog.abort(); } } @Override @Deprecated // Overrides a deprecated implementation, must also be deprecated public void buildEnvVars(AbstractBuild<?, ?> build, Map<String, String> env) { buildEnvironment(build, env); } @Override public void buildEnvironment(Run<?, ?> build, java.util.Map<String, String> env) { Revision rev = fixNull(getBuildData(build)).getLastBuiltRevision(); if (rev!=null) { Branch branch = Iterables.getFirst(rev.getBranches(), null); if (branch!=null && branch.getName()!=null) { String remoteBranchName = getBranchName(branch); env.put(GIT_BRANCH, remoteBranchName); // TODO this is unmodular; should rather override LocalBranch.populateEnvironmentVariables LocalBranch lb = getExtensions().get(LocalBranch.class); if (lb != null) { // Set GIT_LOCAL_BRANCH variable from the LocalBranch extension String localBranchName = lb.getLocalBranch(); if (localBranchName == null || localBranchName.equals("**")) { // local branch is configured with empty value or "**" so use remote branch name for checkout localBranchName = deriveLocalBranchName(remoteBranchName); } env.put(GIT_LOCAL_BRANCH, localBranchName); } RelativeTargetDirectory rtd = getExtensions().get(RelativeTargetDirectory.class); if (rtd != null) { String localRelativeTargetDir = rtd.getRelativeTargetDir(); if ( localRelativeTargetDir == null ){ localRelativeTargetDir = ""; } env.put(GIT_CHECKOUT_DIR, localRelativeTargetDir); } String prevCommit = getLastBuiltCommitOfBranch(build, branch); if (prevCommit != null) { env.put(GIT_PREVIOUS_COMMIT, prevCommit); } String prevSuccessfulCommit = getLastSuccessfulBuiltCommitOfBranch(build, branch); if (prevSuccessfulCommit != null) { env.put(GIT_PREVIOUS_SUCCESSFUL_COMMIT, prevSuccessfulCommit); } } String sha1 = Util.fixEmpty(rev.getSha1String()); if (sha1 != null && !sha1.isEmpty()) { env.put(GIT_COMMIT, sha1); } } /* Check all repository URLs are not empty */ /* JENKINS-38608 reports an unhelpful error message when a repository URL is empty */ /* Throws an IllegalArgumentException because that exception is thrown by env.put() on a null argument */ int repoCount = 1; for (UserRemoteConfig config:userRemoteConfigs) { if (config.getUrl() == null) { throw new IllegalArgumentException("Git repository URL " + repoCount + " is an empty string in job definition. Checkout requires a valid repository URL"); } repoCount++; } if (userRemoteConfigs.size()>0) { env.put(GIT_URL, userRemoteConfigs.get(0).getUrl()); } if (userRemoteConfigs.size()>1) { int count=1; for (UserRemoteConfig config:userRemoteConfigs) { env.put(GIT_URL+"_"+count, config.getUrl()); count++; } } getDescriptor().populateEnvironmentVariables(env); for (GitSCMExtension ext : extensions) { ext.populateEnvironmentVariables(this, env); } } private String getBranchName(Branch branch) { String name = branch.getName(); if(name.startsWith("refs/remotes/")) { //Restore expected previous behaviour name = name.substring("refs/remotes/".length()); } return name; } private String getLastBuiltCommitOfBranch(Run<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousBuiltBuild() != null) { final Build lastBuildOfBranch = fixNull(getBuildData(build.getPreviousBuiltBuild())).getLastBuildOfBranch(branch.getName()); if (lastBuildOfBranch != null) { Revision previousRev = lastBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } private String getLastSuccessfulBuiltCommitOfBranch(Run<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousSuccessfulBuild() != null) { final Build lastSuccessfulBuildOfBranch = fixNull(getBuildData(build.getPreviousSuccessfulBuild())).getLastBuildOfBranch(branch.getName()); if (lastSuccessfulBuildOfBranch != null) { Revision previousRev = lastSuccessfulBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } @Override public ChangeLogParser createChangeLogParser() { try { GitClient gitClient = Git.with(TaskListener.NULL, new EnvVars()).in(new File(".")).using(gitTool).getClient(); return new GitChangeLogParser(gitClient, getExtensions().get(AuthorInChangelog.class) != null); } catch (IOException | InterruptedException e) { LOGGER.log(Level.WARNING, "Git client using '" + gitTool + "' changelog parser failed, using deprecated changelog parser", e); } return new GitChangeLogParser(null, getExtensions().get(AuthorInChangelog.class) != null); } @Extension public static final class DescriptorImpl extends SCMDescriptor<GitSCM> { private String gitExe; private String globalConfigName; private String globalConfigEmail; private boolean createAccountBasedOnEmail; private boolean useExistingAccountWithSameEmail; // private GitClientType defaultClientType = GitClientType.GITCLI; private boolean showEntireCommitSummaryInChanges; private boolean hideCredentials; private boolean allowSecondFetch; private boolean disableGitToolChooser; private boolean addGitTagAction; public DescriptorImpl() { super(GitSCM.class, GitRepositoryBrowser.class); load(); } @NonNull @Override public Permission getRequiredGlobalConfigPagePermission() { return Jenkins.MANAGE; } /** * Package protected method that was added for temporary use * with the Manage permission until the plugin required a * Jenkins core version that has Manage permission available. * Unfortunately, because it is package protected, it is part * of the class signature and needs to be retained for * compatibility. Method was removed in git plugin 4.8.0 and * the removal seems to have exposed a bug elsewhere that is * reported as https://issues.jenkins.io/browse/JENKINS-66296 , * Restoring this method seems to resolve that issue. */ Permission getJenkinsManageOrAdmin() { return Jenkins.MANAGE; } public boolean isShowEntireCommitSummaryInChanges() { return showEntireCommitSummaryInChanges; } public boolean isHideCredentials() { return hideCredentials; } public void setHideCredentials(boolean hideCredentials) { this.hideCredentials = hideCredentials; } public void setShowEntireCommitSummaryInChanges(boolean showEntireCommitSummaryInChanges) { this.showEntireCommitSummaryInChanges = showEntireCommitSummaryInChanges; } public String getDisplayName() { return "Git"; } @Override public boolean isApplicable(Job project) { return true; } public List<GitSCMExtensionDescriptor> getExtensionDescriptors() { return GitSCMExtensionDescriptor.all(); } public boolean showGitToolOptions() { return Jenkins.get().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations().length>1; } /** * Lists available toolinstallations. * @return list of available git tools */ public List<GitTool> getGitTools() { GitTool[] gitToolInstallations = Jenkins.get().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); return Arrays.asList(gitToolInstallations); } public ListBoxModel doFillGitToolItems() { ListBoxModel r = new ListBoxModel(); for (GitTool git : getGitTools()) { r.add(git.getName()); } return r; } /** * Path to git executable. * @deprecated * @see GitTool * @return git executable */ @Deprecated public String getGitExe() { return gitExe; } /** * Global setting to be used to set GIT_COMMITTER_NAME and GIT_AUTHOR_NAME. * @return user.name value */ public String getGlobalConfigName() { return Util.fixEmptyAndTrim(globalConfigName); } /** * Global setting to be used to set GIT_COMMITTER_NAME and GIT_AUTHOR_NAME. * @param globalConfigName user.name value to be assigned */ public void setGlobalConfigName(String globalConfigName) { this.globalConfigName = globalConfigName; } /** * Global setting to be used to set GIT_COMMITTER_EMAIL and GIT_AUTHOR_EMAIL. * @return user.email value */ public String getGlobalConfigEmail() { return Util.fixEmptyAndTrim(globalConfigEmail); } /** * Global setting to be used to set GIT_COMMITTER_EMAIL and GIT_AUTHOR_EMAIL. * @param globalConfigEmail user.email value to be assigned */ public void setGlobalConfigEmail(String globalConfigEmail) { this.globalConfigEmail = globalConfigEmail; } public boolean isCreateAccountBasedOnEmail() { return createAccountBasedOnEmail; } public void setCreateAccountBasedOnEmail(boolean createAccountBasedOnEmail) { this.createAccountBasedOnEmail = createAccountBasedOnEmail; } public boolean isUseExistingAccountWithSameEmail() { return useExistingAccountWithSameEmail; } public void setUseExistingAccountWithSameEmail(boolean useExistingAccountWithSameEmail) { this.useExistingAccountWithSameEmail = useExistingAccountWithSameEmail; } public boolean isAllowSecondFetch() { return allowSecondFetch; } public void setAllowSecondFetch(boolean allowSecondFetch) { this.allowSecondFetch = allowSecondFetch; } public boolean isDisableGitToolChooser() { return disableGitToolChooser; } public void setDisableGitToolChooser(boolean disableGitToolChooser) { this.disableGitToolChooser = disableGitToolChooser; } public boolean isAddGitTagAction() { return addGitTagAction; } public void setAddGitTagAction(boolean addGitTagAction) { this.addGitTagAction = addGitTagAction; } /** * Old configuration of git executable - exposed so that we can * migrate this setting to GitTool without deprecation warnings. * @return git executable */ public String getOldGitExe() { return gitExe; } public static List<RemoteConfig> createRepositoryConfigurations(String[] urls, String[] repoNames, String[] refs) throws IOException { List<RemoteConfig> remoteRepositories; Config repoConfig = new Config(); // Make up a repo config from the request parameters String[] names = repoNames; names = GitUtils.fixupNames(names, urls); for (int i = 0; i < names.length; i++) { String url = urls[i]; if (url == null) { continue; } String name = names[i]; name = name.replace(' ', '_'); if (isBlank(refs[i])) { refs[i] = "+refs/heads/*:refs/remotes/" + name + "/*"; } repoConfig.setString("remote", name, "url", url); repoConfig.setStringList("remote", name, "fetch", new ArrayList<>(Arrays.asList(refs[i].split("\\s+")))); } try { remoteRepositories = RemoteConfig.getAllRemoteConfigs(repoConfig); } catch (Exception e) { throw new GitException("Error creating repositories", e); } return remoteRepositories; } public static PreBuildMergeOptions createMergeOptions(UserMergeOptions mergeOptionsBean, List<RemoteConfig> remoteRepositories) throws FormException { PreBuildMergeOptions mergeOptions = new PreBuildMergeOptions(); if (mergeOptionsBean != null) { RemoteConfig mergeRemote = null; String mergeRemoteName = mergeOptionsBean.getMergeRemote().trim(); if (mergeRemoteName.length() == 0) { mergeRemote = remoteRepositories.get(0); } else { for (RemoteConfig remote : remoteRepositories) { if (remote.getName().equals(mergeRemoteName)) { mergeRemote = remote; break; } } } if (mergeRemote == null) { throw new FormException("No remote repository configured with name '" + mergeRemoteName + "'", "git.mergeRemote"); } mergeOptions.setMergeRemote(mergeRemote); mergeOptions.setMergeTarget(mergeOptionsBean.getMergeTarget()); mergeOptions.setMergeStrategy(mergeOptionsBean.getMergeStrategy()); mergeOptions.setFastForwardMode(mergeOptionsBean.getFastForwardMode()); } return mergeOptions; } public FormValidation doGitRemoteNameCheck(StaplerRequest req) throws IOException, ServletException { String mergeRemoteName = req.getParameter("value"); boolean isMerge = req.getParameter("isMerge") != null; // Added isMerge because we don't want to allow empty remote names for tag/branch pushes. if (mergeRemoteName.length() == 0 && isMerge) { return FormValidation.ok(); } String[] urls = req.getParameterValues("repo.url"); String[] names = req.getParameterValues("repo.name"); if (urls != null && names != null) for (String name : GitUtils.fixupNames(names, urls)) if (name.equals(mergeRemoteName)) return FormValidation.ok(); return FormValidation.error("No remote repository configured with name '" + mergeRemoteName + "'"); } @Override public boolean configure(StaplerRequest req, JSONObject formData) throws FormException { req.bindJSON(this, formData); save(); return true; } /** * Fill in the environment variables for launching git * @param env base environment variables */ public void populateEnvironmentVariables(Map<String,String> env) { String name = getGlobalConfigName(); if (name!=null) { env.put("GIT_COMMITTER_NAME", name); env.put("GIT_AUTHOR_NAME", name); } String email = getGlobalConfigEmail(); if (email!=null) { env.put("GIT_COMMITTER_EMAIL", email); env.put("GIT_AUTHOR_EMAIL", email); } } // public GitClientType getDefaultClientType() { // return defaultClientType; // } // // public void setDefaultClientType(String defaultClientType) { // this.defaultClientType = GitClientType.valueOf(defaultClientType); // } } private static final long serialVersionUID = 1L; @Whitelisted @Deprecated public boolean isDoGenerateSubmoduleConfigurations() { return false; } @Exported @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public List<BranchSpec> getBranches() { return branches; } @Override public String getKey() { ScmName scmName = getExtensions().get(ScmName.class); if (scmName != null) { return scmName.getName(); } StringBuilder b = new StringBuilder("git"); for (RemoteConfig cfg : getRepositories()) { for (URIish uri : cfg.getURIs()) { b.append(' ').append(uri.toString()); } } return b.toString(); } /** * @deprecated Use {@link PreBuildMerge}. * @return pre-build merge options * @throws FormException on form error */ @Exported @Deprecated public PreBuildMergeOptions getMergeOptions() throws FormException { return DescriptorImpl.createMergeOptions(getUserMergeOptions(), remoteRepositories); } private boolean isRelevantBuildData(BuildData bd) { for(UserRemoteConfig c : getUserRemoteConfigs()) { if(bd.hasBeenReferenced(c.getUrl())) { return true; } } return false; } /** * @deprecated * @param build run whose build data is returned * @param clone true if returned build data should be copied rather than referenced * @return build data for build run */ public BuildData getBuildData(Run build, boolean clone) { return clone ? copyBuildData(build) : getBuildData(build); } /** * Like {@link #getBuildData(Run)}, but copy the data into a new object, * which is used as the first step for updating the data for the next build. * @param build run whose BuildData is returned * @return copy of build data for build */ public BuildData copyBuildData(Run build) { BuildData base = getBuildData(build); ScmName sn = getExtensions().get(ScmName.class); String scmName = sn == null ? null : sn.getName(); if (base==null) return new BuildData(scmName, getUserRemoteConfigs()); else { BuildData buildData = base.clone(); buildData.setScmName(scmName); return buildData; } } /** * Find the build log (BuildData) recorded with the last build that completed. BuildData * may not be recorded if an exception occurs in the plugin logic. * * @param build run whose build data is returned * @return the last recorded build data */ public @CheckForNull BuildData getBuildData(Run build) { BuildData buildData = null; while (build != null) { List<BuildData> buildDataList = build.getActions(BuildData.class); // We need to get the latest recorded build data. It may happen // that the build has more than one checkout of the same repo. List<BuildData> buildDataListReverted = reversedView(buildDataList); for (BuildData bd : buildDataListReverted) { if (bd != null && isRelevantBuildData(bd)) { buildData = bd; break; } } if (buildData != null) { break; } build = build.getPreviousBuild(); } return buildData; } /** * Gets a reversed view of an unmodifiable list without using increasing space or time. * @param list The list to revert. * @param <T> The type of the elements of the list. * @return The list <i>reverted</i>. */ private <T> List<T> reversedView(final List<T> list) { return new AbstractList<T>() { @Override public T get(int index) { return list.get(list.size() - 1 - index); } @Override public int size() { return list.size(); } }; } /** * Given the workspace, gets the working directory, which will be the workspace * if no relative target dir is specified. Otherwise, it'll be "workspace/relativeTargetDir". * * @param context job context for working directory * @param workspace initial FilePath of job workspace * @param environment environment variables used in job context * @param listener build log * @return working directory or null if workspace is null * @throws IOException on input or output error * @throws InterruptedException when interrupted */ protected FilePath workingDirectory(Job<?,?> context, FilePath workspace, EnvVars environment, TaskListener listener) throws IOException, InterruptedException { // JENKINS-10880: workspace can be null if (workspace == null) { return null; } for (GitSCMExtension ext : extensions) { FilePath r = ext.getWorkingDirectory(this, context, workspace, environment, listener); if (r!=null) return r; } return workspace; } /** * Given a Revision "r", check whether the list of revisions "COMMITS_WE_HAVE_BUILT..r" are to be entirely excluded given the exclusion rules * * @param git GitClient object * @param r Revision object * @param listener build log * @return true if any exclusion files are matched, false otherwise. */ private boolean isRevExcluded(GitClient git, Revision r, TaskListener listener, BuildData buildData) throws IOException, InterruptedException { try { List<String> revShow; if (buildData != null && buildData.lastBuild != null) { if (getExtensions().get(PathRestriction.class) != null) { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1()); } else { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1(), false); } } else { revShow = git.showRevision(r.getSha1()); } revShow.add("commit "); // sentinel value int start=0, idx=0; for (String line : revShow) { if (line.startsWith("commit ") && idx!=0) { boolean showEntireCommitSummary = GitChangeSet.isShowEntireCommitSummaryInChanges() || !(git instanceof CliGitAPIImpl); GitChangeSet change = new GitChangeSet(revShow.subList(start,idx), getExtensions().get(AuthorInChangelog.class)!=null, showEntireCommitSummary); Boolean excludeThisCommit=null; for (GitSCMExtension ext : extensions) { excludeThisCommit = ext.isRevExcluded(this, git, change, listener, buildData); if (excludeThisCommit!=null) break; } if (excludeThisCommit==null || !excludeThisCommit) return false; // this sequence of commits have one commit that we want to build start = idx; } idx++; } assert start==revShow.size()-1; // every commit got excluded return true; } catch (GitException e) { e.printStackTrace(listener.error("Failed to determine if we want to exclude " + r.getSha1String())); return false; // for historical reason this is not considered a fatal error. } } /** * Data bound setter for doGenerateSubmoduleConfigurations that * intentionally ignores the value passed by the caller. * Submodule configuration generation was untested and unlikely to * work prior to git plugin 4.6.0. It was removed from git plugin * 4.6.0 to improve the experience for Pipeline Syntax users. * * @param ignoredValue ignored because submodule configuration * generation is no longer supported */ @DataBoundSetter public void setDoGenerateSubmoduleConfigurations(boolean ignoredValue) { } /** * Returns false, the constant value of doGenerateSubmoduleConfigurations. * @return false, the constant value of doGenerateSubmoduleConfigurations. */ @Deprecated public boolean getDoGenerateSubmoduleConfigurations() { return doGenerateSubmoduleConfigurations; } @Initializer(after=PLUGINS_STARTED) public static void onLoaded() { Jenkins jenkins = Jenkins.get(); DescriptorImpl desc = jenkins.getDescriptorByType(DescriptorImpl.class); if (desc.getOldGitExe() != null) { String exe = desc.getOldGitExe(); String defaultGit = GitTool.getDefaultInstallation().getGitExe(); if (exe.equals(defaultGit)) { return; } System.err.println("[WARNING] you're using deprecated gitexe attribute to configure git plugin. Use Git installations"); } } @Initializer(before=JOB_LOADED) public static void configureXtream() { Run.XSTREAM.registerConverter(new ObjectIdConverter()); Items.XSTREAM.registerConverter(new RemoteConfigConverter(Items.XSTREAM)); Items.XSTREAM.alias("org.spearce.jgit.transport.RemoteConfig", RemoteConfig.class); } private static final Logger LOGGER = Logger.getLogger(GitSCM.class.getName()); /** * Set to true to enable more logging to build's {@link TaskListener}. * Used by various classes in this package. */ @SuppressFBWarnings(value="MS_SHOULD_BE_FINAL", justification="Not final so users can adjust log verbosity") public static boolean VERBOSE = Boolean.getBoolean(GitSCM.class.getName() + ".verbose"); /** * To avoid pointlessly large changelog, we'll limit the number of changes up to this. */ public static final int MAX_CHANGELOG = Integer.getInteger(GitSCM.class.getName()+".maxChangelog",1024); }
src/main/java/hudson/plugins/git/GitSCM.java
package hudson.plugins.git; import com.cloudbees.plugins.credentials.CredentialsMatcher; import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import com.google.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.AbortException; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.init.Initializer; import hudson.model.*; import hudson.model.Descriptor.FormException; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.GitSCMExtensionDescriptor; import hudson.plugins.git.extensions.impl.AuthorInChangelog; import hudson.plugins.git.extensions.impl.BuildChooserSetting; import hudson.plugins.git.extensions.impl.BuildSingleRevisionOnly; import hudson.plugins.git.extensions.impl.ChangelogToBranch; import hudson.plugins.git.extensions.impl.CloneOption; import hudson.plugins.git.extensions.impl.PathRestriction; import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.plugins.git.extensions.impl.RelativeTargetDirectory; import hudson.plugins.git.extensions.impl.PreBuildMerge; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.plugins.git.util.Build; import hudson.plugins.git.util.*; import hudson.remoting.Channel; import hudson.scm.AbstractScmTagAction; import hudson.scm.ChangeLogParser; import hudson.scm.PollingResult; import hudson.scm.RepositoryBrowser; import hudson.scm.SCMDescriptor; import hudson.scm.SCMRevisionState; import hudson.security.ACL; import hudson.security.Permission; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.SCMTrigger; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import jenkins.model.Jenkins; import jenkins.plugins.git.GitSCMMatrixUtil; import jenkins.plugins.git.GitToolChooser; import net.sf.json.JSONObject; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.eclipse.jgit.transport.URIish; import org.jenkinsci.plugins.gitclient.*; import org.jenkinsci.plugins.scriptsecurity.sandbox.whitelists.Whitelisted; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Serializable; import java.io.Writer; import java.text.MessageFormat; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import static com.google.common.collect.Lists.newArrayList; import static hudson.init.InitMilestone.JOB_LOADED; import static hudson.init.InitMilestone.PLUGINS_STARTED; import hudson.plugins.git.browser.BitbucketWeb; import hudson.plugins.git.browser.GitLab; import hudson.plugins.git.browser.GithubWeb; import static hudson.scm.PollingResult.*; import hudson.Util; import hudson.plugins.git.extensions.impl.ScmName; import hudson.util.LogTaskListener; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.apache.commons.lang.StringUtils.isBlank; /** * Git SCM. * * @author Nigel Magnay * @author Andrew Bayer * @author Nicolas Deloof * @author Kohsuke Kawaguchi * ... and many others */ public class GitSCM extends GitSCMBackwardCompatibility { /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; /** * All the remote repositories that we know about. */ private List<UserRemoteConfig> userRemoteConfigs; private transient List<RemoteConfig> remoteRepositories; /** * All the branches that we wish to care about building. */ private List<BranchSpec> branches; private boolean doGenerateSubmoduleConfigurations = false; @CheckForNull public String gitTool; @CheckForNull private GitRepositoryBrowser browser; private Collection<SubmoduleConfig> submoduleCfg = Collections.<SubmoduleConfig>emptyList(); public static final String GIT_BRANCH = "GIT_BRANCH"; public static final String GIT_LOCAL_BRANCH = "GIT_LOCAL_BRANCH"; public static final String GIT_CHECKOUT_DIR = "GIT_CHECKOUT_DIR"; public static final String GIT_COMMIT = "GIT_COMMIT"; public static final String GIT_PREVIOUS_COMMIT = "GIT_PREVIOUS_COMMIT"; public static final String GIT_PREVIOUS_SUCCESSFUL_COMMIT = "GIT_PREVIOUS_SUCCESSFUL_COMMIT"; public static final String GIT_URL = "GIT_URL"; /** * All the configured extensions attached to this. */ @SuppressFBWarnings(value="SE_BAD_FIELD", justification="Known non-serializable field") private DescribableList<GitSCMExtension,GitSCMExtensionDescriptor> extensions; @Whitelisted @Deprecated @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Unread deprecated collection") public Collection<SubmoduleConfig> getSubmoduleCfg() { return submoduleCfg; } @DataBoundSetter public void setSubmoduleCfg(Collection<SubmoduleConfig> submoduleCfg) { } public static List<UserRemoteConfig> createRepoList(String url, String credentialsId) { List<UserRemoteConfig> repoList = new ArrayList<>(); repoList.add(new UserRemoteConfig(url, null, null, credentialsId)); return repoList; } /** * A convenience constructor that sets everything to default. * * @param repositoryUrl git repository URL * Repository URL to clone from. */ public GitSCM(String repositoryUrl) { this( createRepoList(repositoryUrl, null), Collections.singletonList(new BranchSpec("")), null, null, Collections.<GitSCMExtension>emptyList()); } @Deprecated public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, Boolean doGenerateSubmoduleConfigurations, Collection<SubmoduleConfig> submoduleCfg, @CheckForNull GitRepositoryBrowser browser, @CheckForNull String gitTool, List<GitSCMExtension> extensions) { this(userRemoteConfigs, branches, browser, gitTool, extensions); } @DataBoundConstructor @SuppressFBWarnings(value="EI_EXPOSE_REP2", justification="Modify access is assumed for userRemoteConfigs") public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, @CheckForNull GitRepositoryBrowser browser, @CheckForNull String gitTool, List<GitSCMExtension> extensions) { // moved from createBranches this.branches = isEmpty(branches) ? newArrayList(new BranchSpec("*/master")) : branches; this.userRemoteConfigs = userRemoteConfigs; updateFromUserData(); this.browser = browser; this.configVersion = 2L; this.gitTool = gitTool; this.extensions = new DescribableList<>(Saveable.NOOP,Util.fixNull(extensions)); getBuildChooser(); // set the gitSCM field. } /** * All the configured extensions attached to this {@link GitSCM}. * * Going forward this is primarily how we'll support esoteric use cases. * * @since 2.0 */ @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public DescribableList<GitSCMExtension, GitSCMExtensionDescriptor> getExtensions() { return extensions; } private void updateFromUserData() throws GitException { // do what newInstance used to do directly from the request data if (userRemoteConfigs == null) { return; /* Prevent NPE when no remote config defined */ } try { String[] pUrls = new String[userRemoteConfigs.size()]; String[] repoNames = new String[userRemoteConfigs.size()]; String[] refSpecs = new String[userRemoteConfigs.size()]; for (int i = 0; i < userRemoteConfigs.size(); ++i) { pUrls[i] = userRemoteConfigs.get(i).getUrl(); repoNames[i] = userRemoteConfigs.get(i).getName(); refSpecs[i] = userRemoteConfigs.get(i).getRefspec(); } this.remoteRepositories = DescriptorImpl.createRepositoryConfigurations(pUrls, repoNames, refSpecs); // TODO: replace with new repositories } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } } @SuppressWarnings("deprecation") // `source` field is deprecated but required public Object readResolve() throws IOException { // Migrate data // Default unspecified to v0 if (configVersion == null) { configVersion = 0L; } // Deprecated field needed to retain compatibility if (source != null) { remoteRepositories = new ArrayList<>(); branches = new ArrayList<>(); List<RefSpec> rs = new ArrayList<>(); rs.add(new RefSpec("+refs/heads/*:refs/remotes/origin/*")); remoteRepositories.add(newRemoteConfig("origin", source, rs.toArray(new RefSpec[0]))); if (branch != null) { branches.add(new BranchSpec(branch)); } else { branches.add(new BranchSpec("*/master")); } } if (configVersion < 1 && branches != null) { // Migrate the branch specs from // single * wildcard, to ** wildcard. for (BranchSpec branchSpec : branches) { String name = branchSpec.getName(); name = name.replace("*", "**"); branchSpec.setName(name); } } if (remoteRepositories != null && userRemoteConfigs == null) { userRemoteConfigs = new ArrayList<>(); for(RemoteConfig cfg : remoteRepositories) { // converted as in config.jelly String url = ""; if (cfg.getURIs().size() > 0 && cfg.getURIs().get(0) != null) url = cfg.getURIs().get(0).toPrivateString(); String refspec = ""; if (cfg.getFetchRefSpecs().size() > 0 && cfg.getFetchRefSpecs().get(0) != null) refspec = cfg.getFetchRefSpecs().get(0).toString(); userRemoteConfigs.add(new UserRemoteConfig(url, cfg.getName(), refspec, null)); } } // patch internal objects from user data // if (configVersion == 2) { if (remoteRepositories == null) { // if we don't catch GitException here, the whole job fails to load try { updateFromUserData(); } catch (GitException e) { LOGGER.log(Level.WARNING, "Failed to load SCM data", e); } } if (extensions==null) extensions = new DescribableList<>(Saveable.NOOP); readBackExtensionsFromLegacy(); if (choosingStrategy != null && getBuildChooser().getClass()==DefaultBuildChooser.class) { for (BuildChooserDescriptor d : BuildChooser.all()) { if (choosingStrategy.equals(d.getLegacyId())) { try { setBuildChooser(d.clazz.newInstance()); } catch (InstantiationException | IllegalAccessException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } } } } getBuildChooser(); // set the gitSCM field. return this; } @Override @Whitelisted public GitRepositoryBrowser getBrowser() { return browser; } public void setBrowser(GitRepositoryBrowser browser) { this.browser = browser; } private static final String HOSTNAME_MATCH = "([\\w\\d[-.]]+)" // hostname ; private static final String REPOSITORY_PATH_MATCH = "/*" // Zero or more slashes as start of repository path + "(.+?)" // repository path without leading slashes + "(?:[.]git)?" // optional '.git' suffix + "/*" // optional trailing '/' ; private static final Pattern[] URL_PATTERNS = { /* URL style - like https://github.com/jenkinsci/git-plugin */ Pattern.compile( "(?:\\w+://)" // protocol (scheme) + "(?:.+@)?" // optional username/password + HOSTNAME_MATCH + "(?:[:][\\d]+)?" // optional port number (only honored by git for ssh:// scheme) + "/" // separator between hostname and repository path - '/' + REPOSITORY_PATH_MATCH ), /* Alternate ssh style - like [email protected]:jenkinsci/git-plugin */ Pattern.compile( "(?:git@)" // required username (only optional if local username is 'git') + HOSTNAME_MATCH + ":" // separator between hostname and repository path - ':' + REPOSITORY_PATH_MATCH ) }; @Override public RepositoryBrowser<?> guessBrowser() { Set<String> webUrls = new HashSet<>(); if (remoteRepositories != null) { for (RemoteConfig config : remoteRepositories) { for (URIish uriIsh : config.getURIs()) { String uri = uriIsh.toString(); for (Pattern p : URL_PATTERNS) { Matcher m = p.matcher(uri); if (m.matches()) { webUrls.add("https://" + m.group(1) + "/" + m.group(2) + "/"); } } } } } if (webUrls.isEmpty()) { return null; } if (webUrls.size() == 1) { String url = webUrls.iterator().next(); if (url.startsWith("https://bitbucket.org/")) { return new BitbucketWeb(url); } if (url.startsWith("https://gitlab.com/")) { return new GitLab(url); } if (url.startsWith("https://github.com/")) { return new GithubWeb(url); } return null; } LOGGER.log(Level.INFO, "Multiple browser guess matches for {0}", remoteRepositories); return null; } public boolean isCreateAccountBasedOnEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isCreateAccountBasedOnEmail()); } public boolean isUseExistingAccountWithSameEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isUseExistingAccountWithSameEmail()); } public boolean isHideCredentials() { DescriptorImpl gitDescriptor = getDescriptor(); return gitDescriptor != null && gitDescriptor.isHideCredentials(); } public boolean isAllowSecondFetch() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isAllowSecondFetch()); } public boolean isDisableGitToolChooser() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isDisableGitToolChooser()); } public boolean isAddGitTagAction() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isAddGitTagAction()); } @Whitelisted public BuildChooser getBuildChooser() { BuildChooser bc; BuildChooserSetting bcs = getExtensions().get(BuildChooserSetting.class); if (bcs!=null) bc = bcs.getBuildChooser(); else bc = new DefaultBuildChooser(); bc.gitSCM = this; return bc; } public void setBuildChooser(BuildChooser buildChooser) throws IOException { if (buildChooser.getClass()==DefaultBuildChooser.class) { getExtensions().remove(BuildChooserSetting.class); } else { getExtensions().replace(new BuildChooserSetting(buildChooser)); } } @Deprecated public String getParamLocalBranch(Run<?, ?> build) throws IOException, InterruptedException { return getParamLocalBranch(build, new LogTaskListener(LOGGER, Level.INFO)); } /** * Gets the parameter-expanded effective value in the context of the current build. * @param build run whose local branch name is returned * @param listener build log * @throws IOException on input or output error * @throws InterruptedException when interrupted * @return parameter-expanded local branch name in build. */ public String getParamLocalBranch(Run<?, ?> build, TaskListener listener) throws IOException, InterruptedException { LocalBranch localBranch = getExtensions().get(LocalBranch.class); // substitute build parameters if available return getParameterString(localBranch == null ? null : localBranch.getLocalBranch(), build.getEnvironment(listener)); } @Deprecated public List<RemoteConfig> getParamExpandedRepos(Run<?, ?> build) throws IOException, InterruptedException { return getParamExpandedRepos(build, new LogTaskListener(LOGGER, Level.INFO)); } /** * Expand parameters in {@link #remoteRepositories} with the parameter values provided in the given build * and return them. * * @param build run whose local branch name is returned * @param listener build log * @throws IOException on input or output error * @throws InterruptedException when interrupted * @return can be empty but never null. */ public List<RemoteConfig> getParamExpandedRepos(Run<?, ?> build, TaskListener listener) throws IOException, InterruptedException { List<RemoteConfig> expandedRepos = new ArrayList<>(); EnvVars env = build.getEnvironment(listener); for (RemoteConfig oldRepo : Util.fixNull(remoteRepositories)) { expandedRepos.add(getParamExpandedRepo(env, oldRepo)); } return expandedRepos; } /** * Expand Parameters in the supplied remote repository with the parameter values provided in the given environment variables * @param env Environment variables with parameter values * @param remoteRepository Remote repository with parameters * @return remote repository with expanded parameters */ public RemoteConfig getParamExpandedRepo(EnvVars env, RemoteConfig remoteRepository) { List<RefSpec> refSpecs = getRefSpecs(remoteRepository, env); return newRemoteConfig( getParameterString(remoteRepository.getName(), env), getParameterString(remoteRepository.getURIs().get(0).toPrivateString(), env), refSpecs.toArray(new RefSpec[0])); } public RemoteConfig getRepositoryByName(String repoName) { for (RemoteConfig r : getRepositories()) { if (r.getName().equals(repoName)) { return r; } } return null; } @Exported @Whitelisted public List<UserRemoteConfig> getUserRemoteConfigs() { if (userRemoteConfigs == null) { /* Prevent NPE when no remote config defined */ userRemoteConfigs = new ArrayList<>(); } return Collections.unmodifiableList(userRemoteConfigs); } @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public List<RemoteConfig> getRepositories() { // Handle null-value to ensure backwards-compatibility, ie project configuration missing the <repositories/> XML element if (remoteRepositories == null) { return new ArrayList<>(); } return remoteRepositories; } /** * Derives a local branch name from the remote branch name by removing the * name of the remote from the remote branch name. * <p> * Ex. origin/master becomes master * <p> * Cycles through the list of user remotes looking for a match allowing user * to configure an alternate (not origin) name for the remote. * * @param remoteBranchName branch name whose remote repository name will be removed * @return a local branch name derived by stripping the remote repository * name from the {@code remoteBranchName} parameter. If a matching * remote is not found, the original {@code remoteBranchName} will * be returned. */ public String deriveLocalBranchName(String remoteBranchName) { // default remoteName is 'origin' used if list of user remote configs is empty. String remoteName = "origin"; for (final UserRemoteConfig remote : getUserRemoteConfigs()) { remoteName = remote.getName(); if (remoteName == null || remoteName.isEmpty()) { remoteName = "origin"; } if (remoteBranchName.startsWith(remoteName + "/")) { // found the remote config associated with remoteBranchName break; } } // now strip the remote name and return the resulting local branch name. String localBranchName = remoteBranchName.replaceFirst("^" + remoteName + "/", ""); return localBranchName; } @CheckForNull @Whitelisted public String getGitTool() { return gitTool; } @NonNull public static String getParameterString(@CheckForNull String original, @NonNull EnvVars env) { return env.expand(original); } private List<RefSpec> getRefSpecs(RemoteConfig repo, EnvVars env) { List<RefSpec> refSpecs = new ArrayList<>(); for (RefSpec refSpec : repo.getFetchRefSpecs()) { refSpecs.add(new RefSpec(getParameterString(refSpec.toString(), env))); } return refSpecs; } /** * If the configuration is such that we are tracking just one branch of one repository * return that branch specifier (in the form of something like "origin/master" or a SHA1-hash * * Otherwise return [@code null}. */ @CheckForNull private String getSingleBranch(EnvVars env) { // if we have multiple branches skip to advanced usecase if (getBranches().size() != 1) { return null; } String branch = getBranches().get(0).getName(); String repository = null; if (getRepositories().size() != 1) { for (RemoteConfig repo : getRepositories()) { if (branch.startsWith(repo.getName() + "/")) { repository = repo.getName(); break; } } } else { repository = getRepositories().get(0).getName(); } // replace repository wildcard with repository name if (branch.startsWith("*/") && repository != null) { branch = repository + branch.substring(1); } // if the branch name contains more wildcards then the simple usecase // does not apply and we need to skip to the advanced usecase if (branch.contains("*")) { return null; } // substitute build parameters if available branch = getParameterString(branch, env); // Check for empty string - replace with "**" when seen. if (branch.equals("")) { branch = "**"; } return branch; } @Override public SCMRevisionState calcRevisionsFromBuild(Run<?, ?> abstractBuild, FilePath workspace, Launcher launcher, TaskListener taskListener) throws IOException, InterruptedException { return SCMRevisionState.NONE; } @Override public boolean requiresWorkspaceForPolling() { // TODO would need to use hudson.plugins.git.util.GitUtils.getPollEnvironment return requiresWorkspaceForPolling(new EnvVars()); } /* Package protected for test access */ boolean requiresWorkspaceForPolling(EnvVars environment) { for (GitSCMExtension ext : getExtensions()) { if (ext.requiresWorkspaceForPolling()) return true; } return getSingleBranch(environment) == null; } @Override public PollingResult compareRemoteRevisionWith(Job<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener, SCMRevisionState baseline) throws IOException, InterruptedException { try { return compareRemoteRevisionWithImpl( project, launcher, workspace, listener); } catch (GitException e){ throw new IOException(e); } } public static final Pattern GIT_REF = Pattern.compile("^(refs/[^/]+)/(.+)"); private PollingResult compareRemoteRevisionWithImpl(Job<?, ?> project, Launcher launcher, FilePath workspace, final @NonNull TaskListener listener) throws IOException, InterruptedException { // Poll for changes. Are there any unbuilt revisions that Hudson ought to build ? listener.getLogger().println("Using strategy: " + getBuildChooser().getDisplayName()); final Run lastBuild = project.getLastBuild(); if (lastBuild == null) { // If we've never been built before, well, gotta build! listener.getLogger().println("[poll] No previous build, so forcing an initial build."); return BUILD_NOW; } final BuildData buildData = fixNull(getBuildData(lastBuild)); if (buildData.lastBuild != null) { listener.getLogger().println("[poll] Last Built Revision: " + buildData.lastBuild.revision); } final EnvVars pollEnv = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener, false) : lastBuild.getEnvironment(listener); final String singleBranch = getSingleBranch(pollEnv); if (!requiresWorkspaceForPolling(pollEnv)) { final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener, false) : new EnvVars(); GitClient git = createClient(listener, environment, project, Jenkins.get(), null); for (RemoteConfig remoteConfig : getParamExpandedRepos(lastBuild, listener)) { String remote = remoteConfig.getName(); List<RefSpec> refSpecs = getRefSpecs(remoteConfig, environment); for (URIish urIish : remoteConfig.getURIs()) { String gitRepo = urIish.toString(); Map<String, ObjectId> heads = git.getHeadRev(gitRepo); if (heads==null || heads.isEmpty()) { listener.getLogger().println("[poll] Couldn't get remote head revision"); return BUILD_NOW; } listener.getLogger().println("Found "+ heads.size() +" remote heads on " + urIish); Iterator<Entry<String, ObjectId>> it = heads.entrySet().iterator(); while (it.hasNext()) { String head = it.next().getKey(); boolean match = false; for (RefSpec spec : refSpecs) { if (spec.matchSource(head)) { match = true; break; } } if (!match) { listener.getLogger().println("Ignoring " + head + " as it doesn't match any of the configured refspecs"); it.remove(); } } for (BranchSpec branchSpec : getBranches()) { for (Entry<String, ObjectId> entry : heads.entrySet()) { final String head = entry.getKey(); // head is "refs/(heads|tags|whatever)/branchName // first, check the a canonical git reference is configured if (!branchSpec.matches(head, environment)) { // convert head `refs/(heads|tags|whatever)/branch` into shortcut notation `remote/branch` String name; Matcher matcher = GIT_REF.matcher(head); if (matcher.matches()) name = remote + head.substring(matcher.group(1).length()); else name = remote + "/" + head; if (!branchSpec.matches(name, environment)) continue; } final ObjectId sha1 = entry.getValue(); Build built = buildData.getLastBuild(sha1); if (built != null) { listener.getLogger().println("[poll] Latest remote head revision on " + head + " is: " + sha1.getName() + " - already built by " + built.getBuildNumber()); continue; } listener.getLogger().println("[poll] Latest remote head revision on " + head + " is: " + sha1.getName()); return BUILD_NOW; } } } } return NO_CHANGES; } final Node node = GitUtils.workspaceToNode(workspace); final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener) : project.getEnvironment(node, listener); FilePath workingDirectory = workingDirectory(project,workspace,environment,listener); // (Re)build if the working directory doesn't exist if (workingDirectory == null || !workingDirectory.exists()) { listener.getLogger().println("[poll] Working Directory does not exist"); return BUILD_NOW; } GitClient git = createClient(listener, environment, project, node, workingDirectory); if (git.hasGitRepo(false)) { // Repo is there - do a fetch listener.getLogger().println("Fetching changes from the remote Git repositories"); // Fetch updates for (RemoteConfig remoteRepository : getParamExpandedRepos(lastBuild, listener)) { fetchFrom(git, null, listener, remoteRepository); } listener.getLogger().println("Polling for changes in"); Collection<Revision> candidates = getBuildChooser().getCandidateRevisions( true, singleBranch, git, listener, buildData, new BuildChooserContextImpl(project, null, environment)); for (Revision c : candidates) { if (!isRevExcluded(git, c, listener, buildData)) { return PollingResult.SIGNIFICANT; } } return NO_CHANGES; } else { listener.getLogger().println("No Git repository yet, an initial checkout is required"); return PollingResult.SIGNIFICANT; } } /** * Allows {@link Builder}s and {@link Publisher}s to access a configured {@link GitClient} object to * perform additional git operations. * @param listener build log * @param environment environment variables to be used * @param build run context for the returned GitClient * @param workspace client workspace * @return git client for additional git operations * @throws IOException on input or output error * @throws InterruptedException when interrupted */ @NonNull public GitClient createClient(TaskListener listener, EnvVars environment, Run<?,?> build, FilePath workspace) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getParent(), workspace, environment, listener); /* ws will be null if the node which ran the build is offline */ if (ws != null) { ws.mkdirs(); // ensure it exists } return createClient(listener,environment, build.getParent(), GitUtils.workspaceToNode(workspace), ws, null); } /** * Allows {@link Publisher} and other post build actions to access a configured {@link GitClient}. * The post build action can use the {@code postBuildUnsupportedCommand} argument to control the * selection of a git tool by {@link GitToolChooser}. * @param listener build log * @param environment environment variables to be used * @param build run context for the returned GitClient * @param workspace client workspace * @param postBuildUnsupportedCommand passed by caller to control choice of git tool by GitTooChooser * @return git client for additional git operations * @throws IOException on input or output error * @throws InterruptedException when interrupted */ @NonNull public GitClient createClient(TaskListener listener, EnvVars environment, Run<?,?> build, FilePath workspace, UnsupportedCommand postBuildUnsupportedCommand) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getParent(), workspace, environment, listener); /* ws will be null if the node which ran the build is offline */ if (ws != null) { ws.mkdirs(); // ensure it exists } return createClient(listener,environment, build.getParent(), GitUtils.workspaceToNode(workspace), ws, postBuildUnsupportedCommand); } @NonNull /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Job project, Node n, FilePath ws) throws IOException, InterruptedException { return createClient(listener, environment, project, n, ws, null); } @NonNull /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Job project, Node n, FilePath ws, UnsupportedCommand postBuildUnsupportedCommand) throws IOException, InterruptedException { if (postBuildUnsupportedCommand == null) { /* UnsupportedCommand supports JGit by default */ postBuildUnsupportedCommand = new UnsupportedCommand(); } String gitExe = getGitExe(n, listener); GitTool gitTool = getGitTool(n, null, listener); if (!isDisableGitToolChooser()) { UnsupportedCommand unsupportedCommand = new UnsupportedCommand(); for (GitSCMExtension ext : extensions) { ext.determineSupportForJGit(this, unsupportedCommand); } GitToolChooser chooser = null; for (UserRemoteConfig uc : getUserRemoteConfigs()) { String ucCredentialsId = uc.getCredentialsId(); String url = getParameterString(uc.getUrl(), environment); /* If any of the extensions do not support JGit, it should not be suggested */ /* If the post build action does not support JGit, it should not be suggested */ chooser = new GitToolChooser(url, project, ucCredentialsId, gitTool, n, listener, unsupportedCommand.determineSupportForJGit() && postBuildUnsupportedCommand.determineSupportForJGit()); } if (chooser != null) { listener.getLogger().println("The recommended git tool is: " + chooser.getGitTool()); String updatedGitExe = chooser.getGitTool(); if (!updatedGitExe.equals("NONE")) { gitExe = updatedGitExe; } } } Git git = Git.with(listener, environment).in(ws).using(gitExe); GitClient c = git.getClient(); for (GitSCMExtension ext : extensions) { c = ext.decorate(this,c); } for (UserRemoteConfig uc : getUserRemoteConfigs()) { String ucCredentialsId = uc.getCredentialsId(); if (ucCredentialsId == null) { listener.getLogger().println("No credentials specified"); } else { String url = getParameterString(uc.getUrl(), environment); StandardUsernameCredentials credentials = lookupScanCredentials(project, url, ucCredentialsId); if (credentials != null) { c.addCredentials(url, credentials); if(!isHideCredentials()) { listener.getLogger().printf("using credential %s%n", credentials.getId()); } if (project != null && project.getLastBuild() != null) { CredentialsProvider.track(project.getLastBuild(), credentials); } } else { if(!isHideCredentials()) { listener.getLogger().printf("Warning: CredentialId \"%s\" could not be found.%n", ucCredentialsId); } } } } // TODO add default credentials return c; } private static StandardUsernameCredentials lookupScanCredentials(@CheckForNull Item project, @CheckForNull String url, @CheckForNull String ucCredentialsId) { if (Util.fixEmpty(ucCredentialsId) == null) { return null; } else { return CredentialsMatchers.firstOrNull( CredentialsProvider.lookupCredentials( StandardUsernameCredentials.class, project, project instanceof Queue.Task ? ((Queue.Task) project).getDefaultAuthentication() : ACL.SYSTEM, URIRequirementBuilder.fromUri(url).build() ), CredentialsMatchers.allOf(CredentialsMatchers.withId(ucCredentialsId), GitClient.CREDENTIALS_MATCHER) ); } } private static CredentialsMatcher gitScanCredentialsMatcher() { return CredentialsMatchers.anyOf(CredentialsMatchers.instanceOf(StandardUsernamePasswordCredentials.class)); } @NonNull private BuildData fixNull(BuildData bd) { ScmName sn = getExtensions().get(ScmName.class); String scmName = sn == null ? null : sn.getName(); return bd != null ? bd : new BuildData(scmName, getUserRemoteConfigs()); } /** * Fetch information from a particular remote repository. * * @param git git client * @param run run context if it's running for build * @param listener build log * @param remoteRepository remote git repository * @throws InterruptedException when interrupted * @throws IOException on input or output error */ private void fetchFrom(GitClient git, @CheckForNull Run<?, ?> run, TaskListener listener, RemoteConfig remoteRepository) throws InterruptedException, IOException { boolean first = true; for (URIish url : remoteRepository.getURIs()) { try { if (first) { git.setRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); first = false; } else { git.addRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); } FetchCommand fetch = git.fetch_().from(url, remoteRepository.getFetchRefSpecs()); for (GitSCMExtension extension : extensions) { extension.decorateFetchCommand(this, run, git, listener, fetch); } fetch.execute(); } catch (GitException ex) { throw new GitException("Failed to fetch from "+url.toString(), ex); } } } private RemoteConfig newRemoteConfig(String name, String refUrl, RefSpec... refSpec) { try { Config repoConfig = new Config(); // Make up a repo config from the request parameters repoConfig.setString("remote", name, "url", refUrl); List<String> str = new ArrayList<>(); if(refSpec != null && refSpec.length > 0) for (RefSpec rs: refSpec) str.add(rs.toString()); repoConfig.setStringList("remote", name, "fetch", str); return RemoteConfig.getAllRemoteConfigs(repoConfig).get(0); } catch (Exception ex) { throw new GitException("Error trying to create JGit configuration", ex); } } @CheckForNull public GitTool resolveGitTool(TaskListener listener) { return GitUtils.resolveGitTool(gitTool, listener); } public String getGitExe(Node builtOn, TaskListener listener) { return getGitExe(builtOn, null, listener); } /** * Exposing so that we can get this from GitPublisher. * @param builtOn node where build was performed * @param env environment variables used in the build * @param listener build log * @return git exe for builtOn node, often "Default" or "jgit" */ public String getGitExe(Node builtOn, EnvVars env, TaskListener listener) { GitTool tool = GitUtils.resolveGitTool(gitTool, builtOn, env, listener); if(tool == null) { return null; } return tool.getGitExe(); } public GitTool getGitTool(Node builtOn, EnvVars env, TaskListener listener) { GitTool tool = GitUtils.resolveGitTool(gitTool, builtOn, env, listener); return tool; } /*package*/ static class BuildChooserContextImpl implements BuildChooserContext, Serializable { @SuppressFBWarnings(value="SE_BAD_FIELD", justification="known non-serializable field") final Job project; @SuppressFBWarnings(value="SE_BAD_FIELD", justification="known non-serializable field") final Run build; final EnvVars environment; BuildChooserContextImpl(Job project, Run build, EnvVars environment) { this.project = project; this.build = build; this.environment = environment; } public <T> T actOnBuild(@NonNull ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build, FilePath.localChannel); } public <T> T actOnProject(@NonNull ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project, FilePath.localChannel); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } private Object writeReplace() { Channel currentChannel = Channel.current(); if (currentChannel == null) { return null; } return currentChannel.export(BuildChooserContext.class,new BuildChooserContext() { public <T> T actOnBuild(@NonNull ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Channel.current()); } public <T> T actOnProject(@NonNull ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project,Channel.current()); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } }); } } /** * Determines the commit to be built in this round, updating the working tree accordingly, * and return the information about the selected commit. * * <p> * For robustness, this method shouldn't assume too much about the state of the working tree when this method * is called. In a general case, a working tree is a left-over from the previous build, so it can be quite * messed up (such as HEAD pointing to a random branch.) It is expected that this method brings it back * to the predictable clean state by the time this method returns. */ private @NonNull Build determineRevisionToBuild(final Run build, final @NonNull BuildData buildData, final EnvVars environment, final @NonNull GitClient git, final @NonNull TaskListener listener) throws IOException, InterruptedException { PrintStream log = listener.getLogger(); Collection<Revision> candidates = Collections.emptyList(); final BuildChooserContext context = new BuildChooserContextImpl(build.getParent(), build, environment); getBuildChooser().prepareWorkingTree(git, listener, context); if (build.getClass().getName().equals("hudson.matrix.MatrixRun")) { candidates = GitSCMMatrixUtil.populateCandidatesFromRootBuild((AbstractBuild) build, this); } // parameter forcing the commit ID to build if (candidates.isEmpty() ) { final RevisionParameterAction rpa = build.getAction(RevisionParameterAction.class); if (rpa != null) { // in case the checkout is due to a commit notification on a // multiple scm configuration, it should be verified if the triggering repo remote // matches current repo remote to avoid JENKINS-26587 if (rpa.canOriginateFrom(this.getRepositories())) { candidates = Collections.singleton(rpa.toRevision(git)); } else { log.println("skipping resolution of commit " + rpa.commit + ", since it originates from another repository"); } } } if (candidates.isEmpty() ) { final String singleBranch = environment.expand( getSingleBranch(environment) ); candidates = getBuildChooser().getCandidateRevisions( false, singleBranch, git, listener, buildData, context); } if (candidates.isEmpty()) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. throw new AbortException("Couldn't find any revision to build. Verify the repository and branch configuration for this job."); } Revision marked = candidates.iterator().next(); Revision rev = marked; // Modify the revision based on extensions for (GitSCMExtension ext : extensions) { rev = ext.decorateRevisionToBuild(this,build,git,listener,marked,rev); } Build revToBuild = new Build(marked, rev, build.getNumber(), null); buildData.saveBuild(revToBuild); if (buildData.getBuildsByBranchName().size() >= 100) { log.println("JENKINS-19022: warning: possible memory leak due to Git plugin usage; see: https://plugins.jenkins.io/git/#remove-git-plugin-buildsbybranch-builddata-script"); } boolean checkForMultipleRevisions = true; BuildSingleRevisionOnly ext = extensions.get(BuildSingleRevisionOnly.class); if (ext != null) { checkForMultipleRevisions = ext.enableMultipleRevisionDetection(); } if (candidates.size() > 1) { log.println("Multiple candidate revisions"); if (checkForMultipleRevisions) { Job<?, ?> job = build.getParent(); if (job instanceof AbstractProject) { AbstractProject project = (AbstractProject) job; if (!project.isDisabled()) { log.println("Scheduling another build to catch up with " + project.getFullDisplayName()); if (!project.scheduleBuild(0, new SCMTrigger.SCMTriggerCause("This build was triggered by build " + build.getNumber() + " because more than one build candidate was found."))) { log.println("WARNING: multiple candidate revisions, but unable to schedule build of " + project.getFullDisplayName()); } } } } } return revToBuild; } /** * Retrieve Git objects from the specified remotes by doing the likes of clone/fetch/pull/etc. * * By the end of this method, remote refs are updated to include all the commits found in the remote servers. */ private void retrieveChanges(Run build, GitClient git, TaskListener listener) throws IOException, InterruptedException { final PrintStream log = listener.getLogger(); boolean removeSecondFetch = false; List<RemoteConfig> repos = getParamExpandedRepos(build, listener); if (repos.isEmpty()) return; // defensive check even though this is an invalid configuration if (git.hasGitRepo(false)) { // It's an update if (repos.size() == 1) log.println("Fetching changes from the remote Git repository"); else log.println(MessageFormat.format("Fetching changes from {0} remote Git repositories", repos.size())); } else { log.println("Cloning the remote Git repository"); RemoteConfig rc = repos.get(0); try { CloneCommand cmd = git.clone_().url(rc.getURIs().get(0).toPrivateString()).repositoryName(rc.getName()); for (GitSCMExtension ext : extensions) { ext.decorateCloneCommand(this, build, git, listener, cmd); } cmd.execute(); // determine if second fetch is required CloneOption option = extensions.get(CloneOption.class); if (!isAllowSecondFetch()) { removeSecondFetch = determineSecondFetch(option, rc); } } catch (GitException ex) { ex.printStackTrace(listener.error("Error cloning remote repo '" + rc.getName() + "'")); throw new AbortException("Error cloning remote repo '" + rc.getName() + "'"); } } for (RemoteConfig remoteRepository : repos) { if (remoteRepository.equals(repos.get(0)) && removeSecondFetch){ log.println("Avoid second fetch"); continue; } try { fetchFrom(git, build, listener, remoteRepository); } catch (GitException ex) { /* Allow retry by throwing AbortException instead of * GitException. See JENKINS-20531. */ ex.printStackTrace(listener.error("Error fetching remote repo '" + remoteRepository.getName() + "'")); throw new AbortException("Error fetching remote repo '" + remoteRepository.getName() + "'"); } } } private boolean determineSecondFetch(CloneOption option, @NonNull RemoteConfig rc) { List<RefSpec> initialFetchRefSpecs = rc.getFetchRefSpecs(); boolean isDefaultRefspec = true; // default refspec is any refspec with "refs/heads/" mapping boolean removeSecondFetch = true; if (initialFetchRefSpecs != null) { for (RefSpec ref : initialFetchRefSpecs) { if (!ref.toString().contains("refs/heads")) { isDefaultRefspec = false; // if refspec is not of default type, preserve second fetch } } if (option == null) { removeSecondFetch = isDefaultRefspec; } else { if (option.isHonorRefspec()) { removeSecondFetch = true; // avoid second fetch call if honor refspec is enabled } else { removeSecondFetch = isDefaultRefspec; } } } // if initial fetch refspec contains "refs/heads/*" (default refspec), ignore the second fetch call return removeSecondFetch; } @Override public void checkout(Run<?, ?> build, Launcher launcher, FilePath workspace, TaskListener listener, File changelogFile, SCMRevisionState baseline) throws IOException, InterruptedException { if (VERBOSE) listener.getLogger().println("Using checkout strategy: " + getBuildChooser().getDisplayName()); BuildData previousBuildData = getBuildData(build.getPreviousBuild()); // read only BuildData buildData = copyBuildData(build.getPreviousBuild()); if (VERBOSE && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars environment = build.getEnvironment(listener); GitClient git = createClient(listener, environment, build, workspace); if (launcher instanceof Launcher.DecoratedLauncher) { // We cannot check for git instanceof CliGitAPIImpl vs. JGitAPIImpl here since (when running on an agent) we will actually have a RemoteGitImpl which is opaque. listener.getLogger().println("Warning: JENKINS-30600: special launcher " + launcher + " will be ignored (a typical symptom is the Git executable not being run inside a designated container)"); } for (GitSCMExtension ext : extensions) { ext.beforeCheckout(this, build, git, listener); } retrieveChanges(build, git, listener); Build revToBuild = determineRevisionToBuild(build, buildData, environment, git, listener); // Track whether we're trying to add a duplicate BuildData, now that it's been updated with // revision info for this build etc. The default assumption is that it's a duplicate. boolean buildDataAlreadyPresent = false; List<BuildData> actions = build.getActions(BuildData.class); for (BuildData d: actions) { if (d.similarTo(buildData)) { buildDataAlreadyPresent = true; break; } } if (!actions.isEmpty()) { buildData.setIndex(actions.size()+1); } // If the BuildData is not already attached to this build, add it to the build and mark that // it wasn't already present, so that we add the GitTagAction and changelog after the checkout // finishes. if (!buildDataAlreadyPresent) { build.addAction(buildData); } environment.put(GIT_COMMIT, revToBuild.revision.getSha1String()); Branch localBranch = Iterables.getFirst(revToBuild.revision.getBranches(),null); String localBranchName = getParamLocalBranch(build, listener); if (localBranch != null && localBranch.getName() != null) { // null for a detached HEAD String remoteBranchName = getBranchName(localBranch); environment.put(GIT_BRANCH, remoteBranchName); LocalBranch lb = getExtensions().get(LocalBranch.class); if (lb != null) { String lbn = lb.getLocalBranch(); if (lbn == null || lbn.equals("**")) { // local branch is configured with empty value or "**" so use remote branch name for checkout localBranchName = deriveLocalBranchName(remoteBranchName); } environment.put(GIT_LOCAL_BRANCH, localBranchName); } } listener.getLogger().println("Checking out " + revToBuild.revision); CheckoutCommand checkoutCommand = git.checkout().branch(localBranchName).ref(revToBuild.revision.getSha1String()).deleteBranchIfExist(true); for (GitSCMExtension ext : this.getExtensions()) { ext.decorateCheckoutCommand(this, build, git, listener, checkoutCommand); } try { checkoutCommand.execute(); } catch (GitLockFailedException e) { // Rethrow IOException so the retry will be able to catch it throw new IOException("Could not checkout " + revToBuild.revision.getSha1String(), e); } // Needs to be after the checkout so that revToBuild is in the workspace try { printCommitMessageToLog(listener, git, revToBuild); } catch (IOException | ArithmeticException | GitException ge) { // JENKINS-45729 reports a git exception when revToBuild cannot be found in the workspace. // JENKINS-46628 reports a git exception when revToBuild cannot be found in the workspace. // JENKINS-62710 reports a JGit arithmetic exception on an older Java 8 system. // Don't let those exceptions block the build, this is an informational message only listener.getLogger().println("Exception logging commit message for " + revToBuild + ": " + ge.getMessage()); } // Don't add the tag and changelog if we've already processed this BuildData before. if (!buildDataAlreadyPresent) { if (build.getActions(AbstractScmTagAction.class).isEmpty() && isAddGitTagAction()) { // only add the tag action if we can be unique as AbstractScmTagAction has a fixed UrlName // so only one of the actions is addressable by users LOGGER.log(Level.FINE, "Adding GitTagAction to build " + build.number); build.addAction(new GitTagAction(build, workspace, revToBuild.revision)); } else { LOGGER.log(Level.FINE, "Not adding GitTagAction to build " + build.number); } if (changelogFile != null) { computeChangeLog(git, revToBuild.revision, listener, previousBuildData, new FilePath(changelogFile), new BuildChooserContextImpl(build.getParent(), build, environment)); } } for (GitSCMExtension ext : extensions) { ext.onCheckoutCompleted(this, build, git,listener); } } private void printCommitMessageToLog(TaskListener listener, GitClient git, final Build revToBuild) throws IOException { try { RevCommit commit = git.withRepository(new RevCommitRepositoryCallback(revToBuild)); listener.getLogger().println("Commit message: \"" + commit.getShortMessage() + "\""); } catch (InterruptedException | MissingObjectException e) { e.printStackTrace(listener.error("Unable to retrieve commit message")); } } /** * Build up change log from all the branches that we've merged into {@code revToBuild}. * * <p> * Intuitively, a changelog is a list of commits that's added since the "previous build" to the current build. * However, because of the multiple branch support in Git, this notion is ambiguous. For example, consider the * following commit graph where M1...M4 belongs to branch M, B1..B2 belongs to branch B, and so on: * * <pre> * M1 -> M2 -> M3 -> M4 * / \ \ \ * S -> B1 -> B2 \ * \ \ * C1 ---------------> C2 * </pre> * * <p> * If Jenkins built B1, C1, B2, C3 in that order, then one'd prefer that the changelog of B2 only shows * just B1..B2, not C1..B2. To do this, we attribute every build to specific branches, and when we say * "since the previous build", what we really mean is "since the last build that built the same branch". * * <p> * TODO: if a branch merge is configured, then the first build will end up listing all the changes * in the upstream branch, which may be too many. To deal with this nicely, BuildData needs to remember * when we started merging this branch so that we can properly detect if the current build is the * first build that's merging a new branch. * * Another possibly sensible option is to always exclude all the commits that are happening in the remote branch. * Picture yourself developing a feature branch that closely tracks a busy mainline, then you might * not really care the changes going on in the main line. In this way, the changelog only lists your changes, * so "notify those who break the build" will not spam upstream developers, too. * * @param git * Used for invoking Git * @param revToBuild * Points to the revision we'll be building. This includes all the branches we've merged. * @param listener * Used for writing to build console * @param previousBuildData * Information that captures what we did during the last build. We need this for changelog, * or else we won't know where to stop. */ private void computeChangeLog(GitClient git, Revision revToBuild, TaskListener listener, BuildData previousBuildData, FilePath changelogFile, BuildChooserContext context) throws IOException, InterruptedException { boolean executed = false; ChangelogCommand changelog = git.changelog(); changelog.includes(revToBuild.getSha1()); try (Writer out = new OutputStreamWriter(changelogFile.write(),"UTF-8")) { boolean exclusion = false; ChangelogToBranch changelogToBranch = getExtensions().get(ChangelogToBranch.class); if (changelogToBranch != null) { listener.getLogger().println("Using 'Changelog to branch' strategy."); changelog.excludes(changelogToBranch.getOptions().getRef()); exclusion = true; } else { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = getBuildChooser().prevBuildForChangelog(b.getName(), previousBuildData, git, context); if (lastRevWas != null && lastRevWas.revision != null && git.isCommitInRepo(lastRevWas.getSHA1())) { changelog.excludes(lastRevWas.getSHA1()); exclusion = true; } } } if (!exclusion) { // this is the first time we are building this branch, so there's no base line to compare against. // if we force the changelog, it'll contain all the changes in the repo, which is not what we want. listener.getLogger().println("First time build. Skipping changelog."); } else { changelog.to(out).max(MAX_CHANGELOG).execute(); executed = true; } } catch (GitException ge) { ge.printStackTrace(listener.error("Unable to retrieve changeset")); } finally { if (!executed) changelog.abort(); } } @Override @Deprecated // Overrides a deprecated implementation, must also be deprecated public void buildEnvVars(AbstractBuild<?, ?> build, Map<String, String> env) { buildEnvironment(build, env); } @Override public void buildEnvironment(Run<?, ?> build, java.util.Map<String, String> env) { Revision rev = fixNull(getBuildData(build)).getLastBuiltRevision(); if (rev!=null) { Branch branch = Iterables.getFirst(rev.getBranches(), null); if (branch!=null && branch.getName()!=null) { String remoteBranchName = getBranchName(branch); env.put(GIT_BRANCH, remoteBranchName); // TODO this is unmodular; should rather override LocalBranch.populateEnvironmentVariables LocalBranch lb = getExtensions().get(LocalBranch.class); if (lb != null) { // Set GIT_LOCAL_BRANCH variable from the LocalBranch extension String localBranchName = lb.getLocalBranch(); if (localBranchName == null || localBranchName.equals("**")) { // local branch is configured with empty value or "**" so use remote branch name for checkout localBranchName = deriveLocalBranchName(remoteBranchName); } env.put(GIT_LOCAL_BRANCH, localBranchName); } RelativeTargetDirectory rtd = getExtensions().get(RelativeTargetDirectory.class); if (rtd != null) { String localRelativeTargetDir = rtd.getRelativeTargetDir(); if ( localRelativeTargetDir == null ){ localRelativeTargetDir = ""; } env.put(GIT_CHECKOUT_DIR, localRelativeTargetDir); } String prevCommit = getLastBuiltCommitOfBranch(build, branch); if (prevCommit != null) { env.put(GIT_PREVIOUS_COMMIT, prevCommit); } String prevSuccessfulCommit = getLastSuccessfulBuiltCommitOfBranch(build, branch); if (prevSuccessfulCommit != null) { env.put(GIT_PREVIOUS_SUCCESSFUL_COMMIT, prevSuccessfulCommit); } } String sha1 = Util.fixEmpty(rev.getSha1String()); if (sha1 != null && !sha1.isEmpty()) { env.put(GIT_COMMIT, sha1); } } /* Check all repository URLs are not empty */ /* JENKINS-38608 reports an unhelpful error message when a repository URL is empty */ /* Throws an IllegalArgumentException because that exception is thrown by env.put() on a null argument */ int repoCount = 1; for (UserRemoteConfig config:userRemoteConfigs) { if (config.getUrl() == null) { throw new IllegalArgumentException("Git repository URL " + repoCount + " is an empty string in job definition. Checkout requires a valid repository URL"); } repoCount++; } if (userRemoteConfigs.size()>0) { env.put(GIT_URL, userRemoteConfigs.get(0).getUrl()); } if (userRemoteConfigs.size()>1) { int count=1; for (UserRemoteConfig config:userRemoteConfigs) { env.put(GIT_URL+"_"+count, config.getUrl()); count++; } } getDescriptor().populateEnvironmentVariables(env); for (GitSCMExtension ext : extensions) { ext.populateEnvironmentVariables(this, env); } } private String getBranchName(Branch branch) { String name = branch.getName(); if(name.startsWith("refs/remotes/")) { //Restore expected previous behaviour name = name.substring("refs/remotes/".length()); } return name; } private String getLastBuiltCommitOfBranch(Run<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousBuiltBuild() != null) { final Build lastBuildOfBranch = fixNull(getBuildData(build.getPreviousBuiltBuild())).getLastBuildOfBranch(branch.getName()); if (lastBuildOfBranch != null) { Revision previousRev = lastBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } private String getLastSuccessfulBuiltCommitOfBranch(Run<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousSuccessfulBuild() != null) { final Build lastSuccessfulBuildOfBranch = fixNull(getBuildData(build.getPreviousSuccessfulBuild())).getLastBuildOfBranch(branch.getName()); if (lastSuccessfulBuildOfBranch != null) { Revision previousRev = lastSuccessfulBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } @Override public ChangeLogParser createChangeLogParser() { try { GitClient gitClient = Git.with(TaskListener.NULL, new EnvVars()).in(new File(".")).using(gitTool).getClient(); return new GitChangeLogParser(gitClient, getExtensions().get(AuthorInChangelog.class) != null); } catch (IOException | InterruptedException e) { LOGGER.log(Level.WARNING, "Git client using '" + gitTool + "' changelog parser failed, using deprecated changelog parser", e); } return new GitChangeLogParser(null, getExtensions().get(AuthorInChangelog.class) != null); } @Extension public static final class DescriptorImpl extends SCMDescriptor<GitSCM> { private String gitExe; private String globalConfigName; private String globalConfigEmail; private boolean createAccountBasedOnEmail; private boolean useExistingAccountWithSameEmail; // private GitClientType defaultClientType = GitClientType.GITCLI; private boolean showEntireCommitSummaryInChanges; private boolean hideCredentials; private boolean allowSecondFetch; private boolean disableGitToolChooser; private boolean addGitTagAction; public DescriptorImpl() { super(GitSCM.class, GitRepositoryBrowser.class); load(); } @NonNull @Override public Permission getRequiredGlobalConfigPagePermission() { return Jenkins.MANAGE; } /** * Package protected method that was added for temporary use * with the Manage permission until the plugin required a * Jenkins core version that has Manage permission available. * Unfortunately, because it is package protected, it is part * of the class signature and needs to be retained for * compatibility. Method was removed in git plugin 4.8.0 and * the removal seems to have exposed a bug elsewhere that is * reported as https://issues.jenkins.io/browse/JENKINS-66296 , * Restoring this method seems to resolve that issue. */ Permission getJenkinsManageOrAdmin() { return Jenkins.MANAGE; } public boolean isShowEntireCommitSummaryInChanges() { return showEntireCommitSummaryInChanges; } public boolean isHideCredentials() { return hideCredentials; } public void setHideCredentials(boolean hideCredentials) { this.hideCredentials = hideCredentials; } public void setShowEntireCommitSummaryInChanges(boolean showEntireCommitSummaryInChanges) { this.showEntireCommitSummaryInChanges = showEntireCommitSummaryInChanges; } public String getDisplayName() { return "Git"; } @Override public boolean isApplicable(Job project) { return true; } public List<GitSCMExtensionDescriptor> getExtensionDescriptors() { return GitSCMExtensionDescriptor.all(); } public boolean showGitToolOptions() { return Jenkins.get().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations().length>1; } /** * Lists available toolinstallations. * @return list of available git tools */ public List<GitTool> getGitTools() { GitTool[] gitToolInstallations = Jenkins.get().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); return Arrays.asList(gitToolInstallations); } public ListBoxModel doFillGitToolItems() { ListBoxModel r = new ListBoxModel(); for (GitTool git : getGitTools()) { r.add(git.getName()); } return r; } /** * Path to git executable. * @deprecated * @see GitTool * @return git executable */ @Deprecated public String getGitExe() { return gitExe; } /** * Global setting to be used to set GIT_COMMITTER_NAME and GIT_AUTHOR_NAME. * @return user.name value */ public String getGlobalConfigName() { return Util.fixEmptyAndTrim(globalConfigName); } /** * Global setting to be used to set GIT_COMMITTER_NAME and GIT_AUTHOR_NAME. * @param globalConfigName user.name value to be assigned */ public void setGlobalConfigName(String globalConfigName) { this.globalConfigName = globalConfigName; } /** * Global setting to be used to set GIT_COMMITTER_EMAIL and GIT_AUTHOR_EMAIL. * @return user.email value */ public String getGlobalConfigEmail() { return Util.fixEmptyAndTrim(globalConfigEmail); } /** * Global setting to be used to set GIT_COMMITTER_EMAIL and GIT_AUTHOR_EMAIL. * @param globalConfigEmail user.email value to be assigned */ public void setGlobalConfigEmail(String globalConfigEmail) { this.globalConfigEmail = globalConfigEmail; } public boolean isCreateAccountBasedOnEmail() { return createAccountBasedOnEmail; } public void setCreateAccountBasedOnEmail(boolean createAccountBasedOnEmail) { this.createAccountBasedOnEmail = createAccountBasedOnEmail; } public boolean isUseExistingAccountWithSameEmail() { return useExistingAccountWithSameEmail; } public void setUseExistingAccountWithSameEmail(boolean useExistingAccountWithSameEmail) { this.useExistingAccountWithSameEmail = useExistingAccountWithSameEmail; } public boolean isAllowSecondFetch() { return allowSecondFetch; } public void setAllowSecondFetch(boolean allowSecondFetch) { this.allowSecondFetch = allowSecondFetch; } public boolean isDisableGitToolChooser() { return disableGitToolChooser; } public void setDisableGitToolChooser(boolean disableGitToolChooser) { this.disableGitToolChooser = disableGitToolChooser; } public boolean isAddGitTagAction() { return addGitTagAction; } public void setAddGitTagAction(boolean addGitTagAction) { this.addGitTagAction = addGitTagAction; } /** * Old configuration of git executable - exposed so that we can * migrate this setting to GitTool without deprecation warnings. * @return git executable */ public String getOldGitExe() { return gitExe; } public static List<RemoteConfig> createRepositoryConfigurations(String[] urls, String[] repoNames, String[] refs) throws IOException { List<RemoteConfig> remoteRepositories; Config repoConfig = new Config(); // Make up a repo config from the request parameters String[] names = repoNames; names = GitUtils.fixupNames(names, urls); for (int i = 0; i < names.length; i++) { String url = urls[i]; if (url == null) { continue; } String name = names[i]; name = name.replace(' ', '_'); if (isBlank(refs[i])) { refs[i] = "+refs/heads/*:refs/remotes/" + name + "/*"; } repoConfig.setString("remote", name, "url", url); repoConfig.setStringList("remote", name, "fetch", new ArrayList<>(Arrays.asList(refs[i].split("\\s+")))); } try { remoteRepositories = RemoteConfig.getAllRemoteConfigs(repoConfig); } catch (Exception e) { throw new GitException("Error creating repositories", e); } return remoteRepositories; } public static PreBuildMergeOptions createMergeOptions(UserMergeOptions mergeOptionsBean, List<RemoteConfig> remoteRepositories) throws FormException { PreBuildMergeOptions mergeOptions = new PreBuildMergeOptions(); if (mergeOptionsBean != null) { RemoteConfig mergeRemote = null; String mergeRemoteName = mergeOptionsBean.getMergeRemote().trim(); if (mergeRemoteName.length() == 0) { mergeRemote = remoteRepositories.get(0); } else { for (RemoteConfig remote : remoteRepositories) { if (remote.getName().equals(mergeRemoteName)) { mergeRemote = remote; break; } } } if (mergeRemote == null) { throw new FormException("No remote repository configured with name '" + mergeRemoteName + "'", "git.mergeRemote"); } mergeOptions.setMergeRemote(mergeRemote); mergeOptions.setMergeTarget(mergeOptionsBean.getMergeTarget()); mergeOptions.setMergeStrategy(mergeOptionsBean.getMergeStrategy()); mergeOptions.setFastForwardMode(mergeOptionsBean.getFastForwardMode()); } return mergeOptions; } public FormValidation doGitRemoteNameCheck(StaplerRequest req) throws IOException, ServletException { String mergeRemoteName = req.getParameter("value"); boolean isMerge = req.getParameter("isMerge") != null; // Added isMerge because we don't want to allow empty remote names for tag/branch pushes. if (mergeRemoteName.length() == 0 && isMerge) { return FormValidation.ok(); } String[] urls = req.getParameterValues("repo.url"); String[] names = req.getParameterValues("repo.name"); if (urls != null && names != null) for (String name : GitUtils.fixupNames(names, urls)) if (name.equals(mergeRemoteName)) return FormValidation.ok(); return FormValidation.error("No remote repository configured with name '" + mergeRemoteName + "'"); } @Override public boolean configure(StaplerRequest req, JSONObject formData) throws FormException { req.bindJSON(this, formData); save(); return true; } /** * Fill in the environment variables for launching git * @param env base environment variables */ public void populateEnvironmentVariables(Map<String,String> env) { String name = getGlobalConfigName(); if (name!=null) { env.put("GIT_COMMITTER_NAME", name); env.put("GIT_AUTHOR_NAME", name); } String email = getGlobalConfigEmail(); if (email!=null) { env.put("GIT_COMMITTER_EMAIL", email); env.put("GIT_AUTHOR_EMAIL", email); } } // public GitClientType getDefaultClientType() { // return defaultClientType; // } // // public void setDefaultClientType(String defaultClientType) { // this.defaultClientType = GitClientType.valueOf(defaultClientType); // } } private static final long serialVersionUID = 1L; @Whitelisted @Deprecated public boolean isDoGenerateSubmoduleConfigurations() { return false; } @Exported @Whitelisted @SuppressFBWarnings(value="EI_EXPOSE_REP", justification="Low risk") public List<BranchSpec> getBranches() { return branches; } @Override public String getKey() { ScmName scmName = getExtensions().get(ScmName.class); if (scmName != null) { return scmName.getName(); } StringBuilder b = new StringBuilder("git"); for (RemoteConfig cfg : getRepositories()) { for (URIish uri : cfg.getURIs()) { b.append(' ').append(uri.toString()); } } return b.toString(); } /** * @deprecated Use {@link PreBuildMerge}. * @return pre-build merge options * @throws FormException on form error */ @Exported @Deprecated public PreBuildMergeOptions getMergeOptions() throws FormException { return DescriptorImpl.createMergeOptions(getUserMergeOptions(), remoteRepositories); } private boolean isRelevantBuildData(BuildData bd) { for(UserRemoteConfig c : getUserRemoteConfigs()) { if(bd.hasBeenReferenced(c.getUrl())) { return true; } } return false; } /** * @deprecated * @param build run whose build data is returned * @param clone true if returned build data should be copied rather than referenced * @return build data for build run */ public BuildData getBuildData(Run build, boolean clone) { return clone ? copyBuildData(build) : getBuildData(build); } /** * Like {@link #getBuildData(Run)}, but copy the data into a new object, * which is used as the first step for updating the data for the next build. * @param build run whose BuildData is returned * @return copy of build data for build */ public BuildData copyBuildData(Run build) { BuildData base = getBuildData(build); ScmName sn = getExtensions().get(ScmName.class); String scmName = sn == null ? null : sn.getName(); if (base==null) return new BuildData(scmName, getUserRemoteConfigs()); else { BuildData buildData = base.clone(); buildData.setScmName(scmName); return buildData; } } /** * Find the build log (BuildData) recorded with the last build that completed. BuildData * may not be recorded if an exception occurs in the plugin logic. * * @param build run whose build data is returned * @return the last recorded build data */ public @CheckForNull BuildData getBuildData(Run build) { BuildData buildData = null; while (build != null) { List<BuildData> buildDataList = build.getActions(BuildData.class); // We need to get the latest recorded build data. It may happen // that the build has more than one checkout of the same repo. List<BuildData> buildDataListReverted = reversedView(buildDataList); for (BuildData bd : buildDataListReverted) { if (bd != null && isRelevantBuildData(bd)) { buildData = bd; break; } } if (buildData != null) { break; } build = build.getPreviousBuild(); } return buildData; } /** * Gets a reversed view of an unmodifiable list without using increasing space or time. * @param list The list to revert. * @param <T> The type of the elements of the list. * @return The list <i>reverted</i>. */ private <T> List<T> reversedView(final List<T> list) { return new AbstractList<T>() { @Override public T get(int index) { return list.get(list.size() - 1 - index); } @Override public int size() { return list.size(); } }; } /** * Given the workspace, gets the working directory, which will be the workspace * if no relative target dir is specified. Otherwise, it'll be "workspace/relativeTargetDir". * * @param context job context for working directory * @param workspace initial FilePath of job workspace * @param environment environment variables used in job context * @param listener build log * @return working directory or null if workspace is null * @throws IOException on input or output error * @throws InterruptedException when interrupted */ protected FilePath workingDirectory(Job<?,?> context, FilePath workspace, EnvVars environment, TaskListener listener) throws IOException, InterruptedException { // JENKINS-10880: workspace can be null if (workspace == null) { return null; } for (GitSCMExtension ext : extensions) { FilePath r = ext.getWorkingDirectory(this, context, workspace, environment, listener); if (r!=null) return r; } return workspace; } /** * Given a Revision "r", check whether the list of revisions "COMMITS_WE_HAVE_BUILT..r" are to be entirely excluded given the exclusion rules * * @param git GitClient object * @param r Revision object * @param listener build log * @return true if any exclusion files are matched, false otherwise. */ private boolean isRevExcluded(GitClient git, Revision r, TaskListener listener, BuildData buildData) throws IOException, InterruptedException { try { List<String> revShow; if (buildData != null && buildData.lastBuild != null) { if (getExtensions().get(PathRestriction.class) != null) { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1()); } else { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1(), false); } } else { revShow = git.showRevision(r.getSha1()); } revShow.add("commit "); // sentinel value int start=0, idx=0; for (String line : revShow) { if (line.startsWith("commit ") && idx!=0) { boolean showEntireCommitSummary = GitChangeSet.isShowEntireCommitSummaryInChanges() || !(git instanceof CliGitAPIImpl); GitChangeSet change = new GitChangeSet(revShow.subList(start,idx), getExtensions().get(AuthorInChangelog.class)!=null, showEntireCommitSummary); Boolean excludeThisCommit=null; for (GitSCMExtension ext : extensions) { excludeThisCommit = ext.isRevExcluded(this, git, change, listener, buildData); if (excludeThisCommit!=null) break; } if (excludeThisCommit==null || !excludeThisCommit) return false; // this sequence of commits have one commit that we want to build start = idx; } idx++; } assert start==revShow.size()-1; // every commit got excluded return true; } catch (GitException e) { e.printStackTrace(listener.error("Failed to determine if we want to exclude " + r.getSha1String())); return false; // for historical reason this is not considered a fatal error. } } /** * Data bound setter for doGenerateSubmoduleConfigurations that * intentionally ignores the value passed by the caller. * Submodule configuration generation was untested and unlikely to * work prior to git plugin 4.6.0. It was removed from git plugin * 4.6.0 to improve the experience for Pipeline Syntax users. * * @param ignoredValue ignored because submodule configuration * generation is no longer supported */ @DataBoundSetter public void setDoGenerateSubmoduleConfigurations(boolean ignoredValue) { } /** * Returns false, the constant value of doGenerateSubmoduleConfigurations. * @return false, the constant value of doGenerateSubmoduleConfigurations. */ @Deprecated public boolean getDoGenerateSubmoduleConfigurations() { return doGenerateSubmoduleConfigurations; } @Initializer(after=PLUGINS_STARTED) public static void onLoaded() { Jenkins jenkins = Jenkins.get(); DescriptorImpl desc = jenkins.getDescriptorByType(DescriptorImpl.class); if (desc.getOldGitExe() != null) { String exe = desc.getOldGitExe(); String defaultGit = GitTool.getDefaultInstallation().getGitExe(); if (exe.equals(defaultGit)) { return; } System.err.println("[WARNING] you're using deprecated gitexe attribute to configure git plugin. Use Git installations"); } } @Initializer(before=JOB_LOADED) public static void configureXtream() { Run.XSTREAM.registerConverter(new ObjectIdConverter()); Items.XSTREAM.registerConverter(new RemoteConfigConverter(Items.XSTREAM)); Items.XSTREAM.alias("org.spearce.jgit.transport.RemoteConfig", RemoteConfig.class); } private static final Logger LOGGER = Logger.getLogger(GitSCM.class.getName()); /** * Set to true to enable more logging to build's {@link TaskListener}. * Used by various classes in this package. */ @SuppressFBWarnings(value="MS_SHOULD_BE_FINAL", justification="Not final so users can adjust log verbosity") public static boolean VERBOSE = Boolean.getBoolean(GitSCM.class.getName() + ".verbose"); /** * To avoid pointlessly large changelog, we'll limit the number of changes up to this. */ public static final int MAX_CHANGELOG = Integer.getInteger(GitSCM.class.getName()+".maxChangelog",1024); }
Use `CredentialsProvider.findCredentialById` whenever possible
src/main/java/hudson/plugins/git/GitSCM.java
Use `CredentialsProvider.findCredentialById` whenever possible
Java
mit
b460569b9841dc958b2af62fac917e3554406cc9
0
alvinlyj/main,CS2103JAN2017-T11-B1/main,CS2103JAN2017-T11-B1/main,alvinlyj/main
package guitests; import static org.junit.Assert.assertTrue; import org.junit.Test; public class ClearCommandTest extends TaskManagerGuiTest { // @@author A0141102H @Test public void clear() { // verify a non-empty list can be cleared assertTrue(eventTaskListPanel.isListMatching(td.getTypicalTasks())); assertTrue(deadlineTaskListPanel.isListMatching(td.getTypicalTasks())); assertTrue(floatingTaskListPanel.isListMatching(td.getTypicalTasks())); assertClearCommandSuccess(); // verify other commands can work after a clear command commandBox.runCommand(td.sampleEvent.getAddCommand()); assertTrue(eventTaskListPanel.isListMatching(td.sampleEvent)); commandBox.runCommand("DELETE 1"); assertListSize(0); // verify clear command works when the list is empty assertClearCommandSuccess(); } private void assertClearCommandSuccess() { commandBox.runCommand("CLEAR"); assertListSize(0); assertResultMessage("Task Manager has been cleared!"); } }
src/test/java/guitests/ClearCommandTest.java
package guitests; import static org.junit.Assert.assertTrue; import org.junit.Test; public class ClearCommandTest extends TaskManagerGuiTest { // @@author A0141102H @Test public void clear() { // verify a non-empty list can be cleared assertTrue(taskListPanel.isListMatching(td.getTypicalTasks())); assertClearCommandSuccess(); // verify other commands can work after a clear command commandBox.runCommand(td.sampleEvent.getAddCommand()); assertTrue(eventTaskListPanel.isListMatching(td.sampleEvent)); commandBox.runCommand("DELETE 1"); assertListSize(0); // verify clear command works when the list is empty assertClearCommandSuccess(); } private void assertClearCommandSuccess() { commandBox.runCommand("CLEAR"); assertListSize(0); assertResultMessage("Task Manager has been cleared!"); } }
Update Clear command test
src/test/java/guitests/ClearCommandTest.java
Update Clear command test
Java
mit
fe3fa14901798492cad10ed88e6a8c72883571ff
0
richard-roberts/SOMns,richard-roberts/SOMns,smarr/SOMns,VAISHALI-DHANOA/SOMns,richard-roberts/SOMns,richard-roberts/SOMns,richard-roberts/SOMns,smarr/SOMns,smarr/SOMns,MetaConc/SOMns,richard-roberts/SOMns,VAISHALI-DHANOA/SOMns,smarr/SOMns,smarr/SOMns,VAISHALI-DHANOA/SOMns,MetaConc/SOMns,MetaConc/SOMns,smarr/SOMns,richard-roberts/SOMns,smarr/SOMns
package som.interpreter.actors; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.NoSuchElementException; import java.util.concurrent.ForkJoinPool; import som.interpreter.actors.SPromise.SResolver; import som.primitives.ObjectPrims.IsValue; import som.vmobjects.SSymbol; import com.oracle.truffle.api.CompilerAsserts; // design goals: // - avoid 1-thread per actor // - have a low-overhead and safe scheduling system // - use an executor or fork/join pool for execution // - each actor should only have at max. one active task // algorithmic sketch // - enqueue message in actor queue // - check whether we need to submit it to the pool // - could perhaps be a simple boolean flag? // - at the end of a turn, we take the next message, and // - submit a new task to the pool // TODO: figure out whether there is a simple look free design commonly used public class Actor { private final ArrayDeque<EventualMessage> mailbox = new ArrayDeque<>(); private boolean isExecuting; private final boolean isMain; private final int id; private static final ArrayList<Actor> actors = new ArrayList<Actor>(); public Actor() { isExecuting = false; isMain = false; synchronized (actors) { actors.add(this); id = actors.size() - 1; } } /** * This constructor should only be used for the main actor! */ public Actor(final boolean isMainActor) { assert isMainActor; isExecuting = true; isMain = true; synchronized (actors) { actors.add(this); id = actors.size() - 1; } } public SPromise eventualSend(final Actor currentActor, final SSymbol selector, final Object[] args) { SPromise result = new SPromise(currentActor); SResolver resolver = new SResolver(result); CompilerAsserts.neverPartOfCompilation("This needs to be optimized"); EventualMessage msg; if (currentActor == this) { // self send, no arg handling needed, they come straight from the same actor msg = new EventualMessage(this, selector, args, resolver, currentActor); } else { for (int i = 0; i < args.length; i++) { args[i] = wrapForUse(args[i], currentActor); } msg = new EventualMessage(this, selector, args, resolver, currentActor); } enqueueMessage(msg); return result; } public Object wrapForUse(final Object o, final Actor owner) { CompilerAsserts.neverPartOfCompilation("This should probably be optimized"); if (o instanceof SFarReference) { if (((SFarReference) o).getActor() == this) { return ((SFarReference) o).getValue(); } } else if (o instanceof SPromise) { // promises cannot just be wrapped in far references, instead, other actors // should get a new promise that is going to be resolved once the original // promise gets resolved SPromise orgProm = (SPromise) o; // assert orgProm.getOwner() == owner; this can be another actor, which initialized a scheduled eventual send by resolving a promise, that's the promise pipelining... if (orgProm.getOwner() == this) { return orgProm; } SPromise remote = new SPromise(this); synchronized (orgProm) { if (orgProm.isSomehowResolved()) { orgProm.copyValueToRemotePromise(remote); } else { ((SPromise) o).addChainedPromise(remote); } return remote; } } else if (!IsValue.isObjectValue(o)) { if (this != owner) { return new SFarReference(owner, o); } } return o; } public synchronized void enqueueMessage(final EventualMessage msg) { assert msg.isReceiverSet(); if (isExecuting) { mailbox.add(msg); } else { ForkJoinPool.commonPool().execute(msg); isExecuting = true; } } /** * This method is only to be called from the EventualMessage task, and the * main Actor in Bootstrap.executeApplication(). */ public synchronized void enqueueNextMessageForProcessing() { try { EventualMessage nextTask = mailbox.remove(); assert isExecuting; ForkJoinPool.commonPool().execute(nextTask); return; } catch (NoSuchElementException e) { isExecuting = false; } } @Override public String toString() { return "Actor[" + (isMain ? "main" : id) + "]"; } }
src/som/interpreter/actors/Actor.java
package som.interpreter.actors; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.NoSuchElementException; import java.util.concurrent.ForkJoinPool; import som.interpreter.actors.SPromise.SResolver; import som.primitives.ObjectPrims.IsValue; import som.vmobjects.SSymbol; import com.oracle.truffle.api.CompilerAsserts; // design goals: // - avoid 1-thread per actor // - have a low-overhead and safe scheduling system // - use an executor or fork/join pool for execution // - each actor should only have at max. one active task // algorithmic sketch // - enqueue message in actor queue // - check whether we need to submit it to the pool // - could perhaps be a simple boolean flag? // - at the end of a turn, we take the next message, and // - submit a new task to the pool // TODO: figure out whether there is a simple look free design commonly used public class Actor { private final ArrayDeque<EventualMessage> mailbox = new ArrayDeque<>(); private boolean isExecuting; private final boolean isMain; private final int id; private static final ArrayList<Actor> actors = new ArrayList<Actor>(); public Actor() { isExecuting = false; isMain = false; synchronized (actors) { actors.add(this); id = actors.size() - 1; } } /** * This constructor should only be used for the main actor! */ public Actor(final boolean isMainActor) { assert isMainActor; isExecuting = true; isMain = true; synchronized (actors) { actors.add(this); id = actors.size() - 1; } } public SPromise eventualSend(final Actor currentActor, final SSymbol selector, final Object[] args) { SPromise result = new SPromise(currentActor); SResolver resolver = new SResolver(result); CompilerAsserts.neverPartOfCompilation("This needs to be optimized"); EventualMessage msg; if (currentActor == this) { // self send, no arg handling needed, they come straight from the same actor msg = new EventualMessage(this, selector, args, resolver, currentActor); } else { for (int i = 0; i < args.length; i++) { args[i] = wrapForUse(args[i], currentActor); } msg = new EventualMessage(this, selector, args, resolver, currentActor); } enqueueMessage(msg); return result; } public Object wrapForUse(final Object o, final Actor owner) { CompilerAsserts.neverPartOfCompilation("This should probably be optimized"); if (o instanceof SFarReference) { if (((SFarReference) o).getActor() == this) { return ((SFarReference) o).getValue(); } } else if (o instanceof SPromise) { // promises cannot just be wrapped in far references, instead, other actors // should get a new promise that is going to be resolved once the original // promise gets resolved SPromise orgProm = (SPromise) o; // assert orgProm.getOwner() == owner; this can be another actor, which initialized a scheduled eventual send by resolving a promise, that's the promise pipelining... if (orgProm.getOwner() == this) { return orgProm; } SPromise remote = new SPromise(this); synchronized (orgProm) { if (orgProm.isSomehowResolved()) { remote.copyValueToRemotePromise(orgProm); } else { ((SPromise) o).addChainedPromise(remote); } return remote; } } else if (!IsValue.isObjectValue(o)) { if (this != owner) { return new SFarReference(owner, o); } } return o; } public synchronized void enqueueMessage(final EventualMessage msg) { assert msg.isReceiverSet(); if (isExecuting) { mailbox.add(msg); } else { ForkJoinPool.commonPool().execute(msg); isExecuting = true; } } /** * This method is only to be called from the EventualMessage task, and the * main Actor in Bootstrap.executeApplication(). */ public synchronized void enqueueNextMessageForProcessing() { try { EventualMessage nextTask = mailbox.remove(); assert isExecuting; ForkJoinPool.commonPool().execute(nextTask); return; } catch (NoSuchElementException e) { isExecuting = false; } } @Override public String toString() { return "Actor[" + (isMain ? "main" : id) + "]"; } }
Fixed reversed value propagation for resolved promise Signed-off-by: Stefan Marr <[email protected]>
src/som/interpreter/actors/Actor.java
Fixed reversed value propagation for resolved promise
Java
mit
3b1e8f223ae82a1d0fc552a067d33f28842d04c6
0
fclairamb/tc65lib,fclairamb/tc65lib
package org.javacint.console; import java.io.InputStream; import java.io.PrintStream; import java.util.Date; import org.javacint.task.Timers; import org.javacint.time.TimeClient; import org.javacint.time.TimeRetriever; import org.javacint.time.ntp.SntpClient; /** * NTP client testing command. Usage:<br /><q>ntp &lt;server&gt;</q> */ public class NTPTestCommand implements ConsoleCommand { private static final String COMMAND = "ntp "; public boolean consoleCommand(String command, InputStream is, final PrintStream out) { if (command.startsWith(COMMAND)) { final String server = command.substring(COMMAND.length()).trim(); TimeRetriever timeRetriever = new TimeRetriever(new TimeClient() { final TimeClient src = new SntpClient(server); public long getTime() throws Exception { long time = src.getTime(); if (time != 0) { out.println("[ NTP ] " + server + " - OK - " + new Date(time * 1000).toString()); } else { out.println("[ NTP ] " + server + " - ERROR"); } return time; } }); Timers.getSlow().schedule(timeRetriever, 0); return true; } else if (command.equals("help")) { out.println("[HELP] ntp <server> - Get time from a server"); } return false; } }
tc65lib/src/org/javacint/console/NTPTestCommand.java
package org.javacint.console; import java.io.InputStream; import java.io.PrintStream; import java.util.Date; import org.javacint.task.Timers; import org.javacint.time.TimeClient; import org.javacint.time.TimeRetriever; import org.javacint.time.ntp.SntpClient; /** * NTP client testing command. Usage:<br /><q>ntp &lt;server&gt;</q> */ public class NTPTestCommand implements ConsoleCommand { private static final String COMMAND = "ntp "; public boolean consoleCommand(String command, InputStream is, final PrintStream out) { if (command.startsWith(COMMAND)) { final String server = command.substring(COMMAND.length()).trim(); TimeRetriever timeRetriever = new TimeRetriever(new TimeClient() { final TimeClient src = new SntpClient(); public long getTime() throws Exception { long time = src.getTime(); if (time != 0) { out.println("[ NTP ] " + server + " - OK - " + new Date(time*1000).toString()); } else { out.println("[ NTP ] " + server + " - ERROR"); } return time; } }); Timers.getSlow().schedule(timeRetriever, 0); return true; } else if (command.equals("help")) { out.println("[HELP] ntp <server> - Get time from a server"); } return false; } }
NTP: Small glitch (it was always using "ntp.pool.org")
tc65lib/src/org/javacint/console/NTPTestCommand.java
NTP: Small glitch (it was always using "ntp.pool.org")
Java
mit
8219af82dc9ada1d692190d7eefb5c04d492445c
0
AppLovin/SDK-Network-Adaptors,AppLovin/SDK-Network-Adaptors
package com.applovin.mediation; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.util.Log; import com.applovin.adview.AppLovinIncentivizedInterstitial; import com.applovin.sdk.AppLovinAd; import com.applovin.sdk.AppLovinAdClickListener; import com.applovin.sdk.AppLovinAdDisplayListener; import com.applovin.sdk.AppLovinAdLoadListener; import com.applovin.sdk.AppLovinAdRewardListener; import com.applovin.sdk.AppLovinAdVideoPlaybackListener; import com.applovin.sdk.AppLovinErrorCodes; import com.applovin.sdk.AppLovinSdk; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.mediation.MediationAdRequest; import com.google.android.gms.ads.mediation.OnContextChangedListener; import com.google.android.gms.ads.reward.RewardItem; import com.google.android.gms.ads.reward.mediation.MediationRewardedVideoAdAdapter; import com.google.android.gms.ads.reward.mediation.MediationRewardedVideoAdListener; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import static android.util.Log.DEBUG; import static android.util.Log.ERROR; /** * AppLovin SDK rewarded video adapter for AdMob. * <p> * Created by Thomas So on 5/29/17. */ public class ApplovinAdapter implements MediationRewardedVideoAdAdapter, OnContextChangedListener, AppLovinAdLoadListener, AppLovinAdDisplayListener, AppLovinAdClickListener, AppLovinAdVideoPlaybackListener, AppLovinAdRewardListener { private static final boolean LOGGING_ENABLED = true; private static final Handler UI_HANDLER = new Handler( Looper.getMainLooper() ); private static final String DEFAULT_ZONE = ""; // A map of Zone -> `AppLovinIncentivizedInterstitial` to be shared by instances of the custom event. // This prevents skipping of ads as this adapter will be re-created and preloaded (along with underlying `AppLovinIncentivizedInterstitial`) // on every ad load regardless if ad was actually displayed or not. private static final Map<String, AppLovinIncentivizedInterstitial> GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS = new HashMap<String, AppLovinIncentivizedInterstitial>(); private boolean initialized; private AppLovinIncentivizedInterstitial incentivizedInterstitial; private Context context; private MediationRewardedVideoAdListener listener; private boolean fullyWatched; private RewardItem reward; // // AdMob Custom Event Methods // @Override public void initialize(final Context context, final MediationAdRequest adRequest, final String userId, final MediationRewardedVideoAdListener listener, final Bundle serverParameters, final Bundle networkExtras) { // SDK versions BELOW 7.2.0 require a instance of an Activity to be passed in as the context if ( AppLovinSdk.VERSION_CODE < 720 && !( context instanceof Activity ) ) { log( ERROR, "Unable to request AppLovin rewarded video. Invalid context provided." ); listener.onInitializationFailed( this, AdRequest.ERROR_CODE_INVALID_REQUEST ); return; } log( DEBUG, "Initializing AppLovin rewarded video..." ); this.context = context; this.listener = listener; if ( !initialized ) { AppLovinSdk.initializeSdk( context ); AppLovinSdk.getInstance( context ).setPluginVersion( "AdMob-2.0" ); initialized = true; } listener.onInitializationSucceeded( this ); } @Override public boolean isInitialized() { return initialized; } @Override public void loadAd(final MediationAdRequest adRequest, final Bundle serverParameters, final Bundle networkExtras) { log( DEBUG, "Requesting AppLovin rewarded video with networkExtras: " + networkExtras ); // Zones support is available on AppLovin SDK 7.5.0 and higher final String zoneId; if ( AppLovinSdk.VERSION_CODE >= 750 && networkExtras != null && networkExtras.containsKey( "zone_id" ) ) { zoneId = networkExtras.getString( "zone_id" ); } else { zoneId = DEFAULT_ZONE; } // Check if incentivized ad for zone already exists if ( GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.containsKey( zoneId ) ) { incentivizedInterstitial = GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.get( zoneId ); } else { // If this is a default Zone, create the incentivized ad normally if ( DEFAULT_ZONE.equals( zoneId ) ) { incentivizedInterstitial = AppLovinIncentivizedInterstitial.create( this.context ); } // Otherwise, use the Zones API else { incentivizedInterstitial = createIncentivizedInterstitialForZoneId( zoneId, AppLovinSdk.getInstance( this.context ) ); } GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.put( zoneId, incentivizedInterstitial ); } incentivizedInterstitial.preload( this ); } @Override public void showVideo() { if ( incentivizedInterstitial.isAdReadyToDisplay() ) { fullyWatched = false; reward = null; try { // AppLovin SDK < 7.2.0 uses an Activity, as opposed to Context in >= 7.2.0 final Class<?> contextClass = ( AppLovinSdk.VERSION_CODE < 720 ) ? Activity.class : Context.class; final Method showMethod = AppLovinIncentivizedInterstitial.class.getMethod( "show", contextClass, String.class, AppLovinAdRewardListener.class, AppLovinAdVideoPlaybackListener.class, AppLovinAdDisplayListener.class, AppLovinAdClickListener.class ); try { showMethod.invoke( incentivizedInterstitial, context, null, this, this, this, this ); } catch ( Throwable th ) { log( ERROR, "Unable to invoke show() method from AppLovinIncentivizedInterstitial." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } catch ( Throwable th ) { log( ERROR, "Unable to get show() method from AppLovinIncentivizedInterstitial." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } else { log( ERROR, "Failed to show an AppLovin rewarded video before one was loaded" ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } @Override public void onPause() {} @Override public void onResume() {} @Override public void onDestroy() {} @Override public void onContextChanged(final Context context) { if ( context != null ) { log( DEBUG, "Context changed: " + context ); this.context = context; } } // // Ad Load Listener // @Override public void adReceived(final AppLovinAd ad) { log( DEBUG, "Rewarded video did load ad: " + ad.getAdIdNumber() ); runOnUiThread( new Runnable() { @Override public void run() { listener.onAdLoaded( ApplovinAdapter.this ); } } ); } @Override public void failedToReceiveAd(final int errorCode) { log( DEBUG, "Rewarded video failed to load with error: " + errorCode ); runOnUiThread( new Runnable() { @Override public void run() { listener.onAdFailedToLoad( ApplovinAdapter.this, toAdMobErrorCode( errorCode ) ); } } ); // TODO: Add support for backfilling on regular ad request if invalid zone entered } // // Ad Display Listener // @Override public void adDisplayed(final AppLovinAd ad) { log( DEBUG, "Rewarded video displayed" ); listener.onAdOpened( this ); } @Override public void adHidden(final AppLovinAd ad) { log( DEBUG, "Rewarded video dismissed" ); if ( fullyWatched && reward != null ) { log( DEBUG, "Rewarded " + reward.getAmount() + " " + reward.getType() ); listener.onRewarded( this, reward ); } listener.onAdClosed( this ); } // // Ad Click Listener // @Override public void adClicked(final AppLovinAd ad) { log( DEBUG, "Rewarded video clicked" ); listener.onAdClicked( this ); listener.onAdLeftApplication( this ); } // // Video Playback Listener // @Override public void videoPlaybackBegan(AppLovinAd ad) { log( DEBUG, "Rewarded video playback began" ); listener.onVideoStarted( this ); } @Override public void videoPlaybackEnded(AppLovinAd ad, double percentViewed, boolean fullyWatched) { log( DEBUG, "Rewarded video playback ended at playback percent: " + percentViewed ); this.fullyWatched = fullyWatched; } // // Reward Listener // @Override public void userOverQuota(final AppLovinAd appLovinAd, final Map map) { log( ERROR, "Rewarded video validation request for ad did exceed quota with response: " + map ); } @Override public void validationRequestFailed(final AppLovinAd appLovinAd, final int errorCode) { log( ERROR, "Rewarded video validation request for ad failed with error code: " + errorCode ); } @Override public void userRewardRejected(final AppLovinAd appLovinAd, final Map map) { log( ERROR, "Rewarded video validation request was rejected with response: " + map ); } @Override public void userDeclinedToViewAd(final AppLovinAd appLovinAd) { log( DEBUG, "User declined to view rewarded video" ); } @Override public void userRewardVerified(final AppLovinAd ad, final Map map) { final String currency = (String) map.get( "currency" ); final String amountStr = (String) map.get( "amount" ); final int amount = (int) Double.parseDouble( amountStr ); // AppLovin returns amount as double log( DEBUG, "Verified " + amount + " " + currency ); reward = new AppLovinRewardItem( amount, currency ); } // // Dynamically create an instance of AppLovinIncentivizedInterstitial with a given zone without breaking backwards compatibility for publishers on older SDKs. // private AppLovinIncentivizedInterstitial createIncentivizedInterstitialForZoneId(final String zoneId, final AppLovinSdk sdk) { AppLovinIncentivizedInterstitial incent = null; try { final Method method = AppLovinIncentivizedInterstitial.class.getMethod( "create", String.class, AppLovinSdk.class ); incent = (AppLovinIncentivizedInterstitial) method.invoke( null, zoneId, sdk ); } catch ( Throwable th ) { log( ERROR, "Unable to load ad for zone: " + zoneId + "..." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INVALID_REQUEST ); } return incent; } // // Utility Methods // private static void log(final int priority, final String message) { if ( LOGGING_ENABLED ) { Log.println( priority, "AppLovinRewardedVideo", message ); } } private static int toAdMobErrorCode(final int applovinErrorCode) { if ( applovinErrorCode == AppLovinErrorCodes.NO_FILL ) { return AdRequest.ERROR_CODE_NO_FILL; } else if ( applovinErrorCode == AppLovinErrorCodes.NO_NETWORK || applovinErrorCode == AppLovinErrorCodes.FETCH_AD_TIMEOUT ) { return AdRequest.ERROR_CODE_NETWORK_ERROR; } else { return AdRequest.ERROR_CODE_INTERNAL_ERROR; } } /** * Reward item wrapper class. */ private static final class AppLovinRewardItem implements RewardItem { private final int amount; private final String type; private AppLovinRewardItem(final int amount, final String type) { this.amount = amount; this.type = type; } @Override public String getType() { return type; } @Override public int getAmount() { return amount; } } /** * Performs the given runnable on the main thread. */ public static void runOnUiThread(final Runnable runnable) { if ( Looper.myLooper() == Looper.getMainLooper() ) { runnable.run(); } else { UI_HANDLER.post( runnable ); } } }
AdMob/Android/ApplovinAdapter.java
package com.applovin.mediation; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.util.Log; import com.applovin.adview.AppLovinIncentivizedInterstitial; import com.applovin.sdk.AppLovinAd; import com.applovin.sdk.AppLovinAdClickListener; import com.applovin.sdk.AppLovinAdDisplayListener; import com.applovin.sdk.AppLovinAdLoadListener; import com.applovin.sdk.AppLovinAdRewardListener; import com.applovin.sdk.AppLovinAdVideoPlaybackListener; import com.applovin.sdk.AppLovinErrorCodes; import com.applovin.sdk.AppLovinSdk; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.mediation.MediationAdRequest; import com.google.android.gms.ads.mediation.OnContextChangedListener; import com.google.android.gms.ads.reward.RewardItem; import com.google.android.gms.ads.reward.mediation.MediationRewardedVideoAdAdapter; import com.google.android.gms.ads.reward.mediation.MediationRewardedVideoAdListener; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import static android.util.Log.DEBUG; import static android.util.Log.ERROR; /** * AppLovin SDK rewarded video adapter for AdMob. * <p> * Created by Thomas So on 5/29/17. */ public class ApplovinAdapter implements MediationRewardedVideoAdAdapter, OnContextChangedListener, AppLovinAdLoadListener, AppLovinAdDisplayListener, AppLovinAdClickListener, AppLovinAdVideoPlaybackListener, AppLovinAdRewardListener { private static final boolean LOGGING_ENABLED = true; private static final Handler UI_HANDLER = new Handler( Looper.getMainLooper() ); private static final String DEFAULT_ZONE = ""; // A map of Zone -> `AppLovinIncentivizedInterstitial` to be shared by instances of the custom event. // This prevents skipping of ads as this adapter will be re-created and preloaded (along with underlying `AppLovinIncentivizedInterstitial`) // on every ad load regardless if ad was actually displayed or not. private static final Map<String, AppLovinIncentivizedInterstitial> GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS = new HashMap<String, AppLovinIncentivizedInterstitial>(); private boolean initialized; private AppLovinIncentivizedInterstitial incentivizedInterstitial; private Context context; private MediationRewardedVideoAdListener listener; private boolean fullyWatched; private RewardItem reward; // // AdMob Custom Event Methods // @Override public void initialize(final Context context, final MediationAdRequest adRequest, final String userId, final MediationRewardedVideoAdListener listener, final Bundle serverParameters, final Bundle networkExtras) { // SDK versions BELOW 7.2.0 require a instance of an Activity to be passed in as the context if ( AppLovinSdk.VERSION_CODE < 720 && !( context instanceof Activity ) ) { log( ERROR, "Unable to request AppLovin rewarded video. Invalid context provided." ); listener.onInitializationFailed( this, AdRequest.ERROR_CODE_INVALID_REQUEST ); return; } log( DEBUG, "Initializing AppLovin rewarded video..." ); this.context = context; this.listener = listener; if ( !initialized ) { AppLovinSdk.initializeSdk( context ); AppLovinSdk.getInstance( context ).setPluginVersion( "AdMob-2.0" ); initialized = true; } listener.onInitializationSucceeded( this ); } @Override public boolean isInitialized() { return initialized; } @Override public void loadAd(final MediationAdRequest adRequest, final Bundle serverParameters, final Bundle networkExtras) { log( DEBUG, "Requesting AppLovin rewarded video with networkExtras: " + networkExtras ); // Zones support is available on AppLovin SDK 7.5.0 and higher final String zoneId; if ( networkExtras != null && networkExtras.containsKey( "zone_id" ) && AppLovinSdk.VERSION_CODE >= 750 ) { zoneId = networkExtras.getString( "zone_id" ); } else { zoneId = DEFAULT_ZONE; } // Check if incentivized ad for zone already exists if ( GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.containsKey( zoneId ) ) { incentivizedInterstitial = GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.get( zoneId ); } else { // If this is a default Zone, create the incentivized ad normally if ( DEFAULT_ZONE.equals( zoneId ) ) { incentivizedInterstitial = AppLovinIncentivizedInterstitial.create( this.context ); } // Otherwise, use the Zones API else { incentivizedInterstitial = createIncentivizedInterstitialForZoneId( zoneId, AppLovinSdk.getInstance( this.context ) ); } GLOBAL_INCENTIVIZED_INTERSTITIAL_ADS.put( zoneId, incentivizedInterstitial ); } incentivizedInterstitial.preload( this ); } @Override public void showVideo() { if ( incentivizedInterstitial.isAdReadyToDisplay() ) { fullyWatched = false; reward = null; try { // AppLovin SDK < 7.2.0 uses an Activity, as opposed to Context in >= 7.2.0 final Class<?> contextClass = ( AppLovinSdk.VERSION_CODE < 720 ) ? Activity.class : Context.class; final Method showMethod = AppLovinIncentivizedInterstitial.class.getMethod( "show", contextClass, String.class, AppLovinAdRewardListener.class, AppLovinAdVideoPlaybackListener.class, AppLovinAdDisplayListener.class, AppLovinAdClickListener.class ); try { showMethod.invoke( incentivizedInterstitial, context, null, this, this, this, this ); } catch ( Throwable th ) { log( ERROR, "Unable to invoke show() method from AppLovinIncentivizedInterstitial." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } catch ( Throwable th ) { log( ERROR, "Unable to get show() method from AppLovinIncentivizedInterstitial." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } else { log( ERROR, "Failed to show an AppLovin rewarded video before one was loaded" ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INTERNAL_ERROR ); } } @Override public void onPause() {} @Override public void onResume() {} @Override public void onDestroy() {} @Override public void onContextChanged(final Context context) { if ( context != null ) { log( DEBUG, "Context changed: " + context ); this.context = context; } } // // Ad Load Listener // @Override public void adReceived(final AppLovinAd ad) { log( DEBUG, "Rewarded video did load ad: " + ad.getAdIdNumber() ); runOnUiThread( new Runnable() { @Override public void run() { listener.onAdLoaded( ApplovinAdapter.this ); } } ); } @Override public void failedToReceiveAd(final int errorCode) { log( DEBUG, "Rewarded video failed to load with error: " + errorCode ); runOnUiThread( new Runnable() { @Override public void run() { listener.onAdFailedToLoad( ApplovinAdapter.this, toAdMobErrorCode( errorCode ) ); } } ); // TODO: Add support for backfilling on regular ad request if invalid zone entered } // // Ad Display Listener // @Override public void adDisplayed(final AppLovinAd ad) { log( DEBUG, "Rewarded video displayed" ); listener.onAdOpened( this ); } @Override public void adHidden(final AppLovinAd ad) { log( DEBUG, "Rewarded video dismissed" ); if ( fullyWatched && reward != null ) { log( DEBUG, "Rewarded " + reward.getAmount() + " " + reward.getType() ); listener.onRewarded( this, reward ); } listener.onAdClosed( this ); } // // Ad Click Listener // @Override public void adClicked(final AppLovinAd ad) { log( DEBUG, "Rewarded video clicked" ); listener.onAdClicked( this ); listener.onAdLeftApplication( this ); } // // Video Playback Listener // @Override public void videoPlaybackBegan(AppLovinAd ad) { log( DEBUG, "Rewarded video playback began" ); listener.onVideoStarted( this ); } @Override public void videoPlaybackEnded(AppLovinAd ad, double percentViewed, boolean fullyWatched) { log( DEBUG, "Rewarded video playback ended at playback percent: " + percentViewed ); this.fullyWatched = fullyWatched; } // // Reward Listener // @Override public void userOverQuota(final AppLovinAd appLovinAd, final Map map) { log( ERROR, "Rewarded video validation request for ad did exceed quota with response: " + map ); } @Override public void validationRequestFailed(final AppLovinAd appLovinAd, final int errorCode) { log( ERROR, "Rewarded video validation request for ad failed with error code: " + errorCode ); } @Override public void userRewardRejected(final AppLovinAd appLovinAd, final Map map) { log( ERROR, "Rewarded video validation request was rejected with response: " + map ); } @Override public void userDeclinedToViewAd(final AppLovinAd appLovinAd) { log( DEBUG, "User declined to view rewarded video" ); } @Override public void userRewardVerified(final AppLovinAd ad, final Map map) { final String currency = (String) map.get( "currency" ); final String amountStr = (String) map.get( "amount" ); final int amount = (int) Double.parseDouble( amountStr ); // AppLovin returns amount as double log( DEBUG, "Verified " + amount + " " + currency ); reward = new AppLovinRewardItem( amount, currency ); } // // Dynamically create an instance of AppLovinIncentivizedInterstitial with a given zone without breaking backwards compatibility for publishers on older SDKs. // private AppLovinIncentivizedInterstitial createIncentivizedInterstitialForZoneId(final String zoneId, final AppLovinSdk sdk) { AppLovinIncentivizedInterstitial incent = null; try { final Method method = AppLovinIncentivizedInterstitial.class.getMethod( "create", String.class, AppLovinSdk.class ); incent = (AppLovinIncentivizedInterstitial) method.invoke( null, zoneId, sdk ); } catch ( Throwable th ) { log( ERROR, "Unable to load ad for zone: " + zoneId + "..." ); listener.onAdFailedToLoad( this, AdRequest.ERROR_CODE_INVALID_REQUEST ); } return incent; } // // Utility Methods // private static void log(final int priority, final String message) { if ( LOGGING_ENABLED ) { Log.println( priority, "AppLovinRewardedVideo", message ); } } private static int toAdMobErrorCode(final int applovinErrorCode) { if ( applovinErrorCode == AppLovinErrorCodes.NO_FILL ) { return AdRequest.ERROR_CODE_NO_FILL; } else if ( applovinErrorCode == AppLovinErrorCodes.NO_NETWORK || applovinErrorCode == AppLovinErrorCodes.FETCH_AD_TIMEOUT ) { return AdRequest.ERROR_CODE_NETWORK_ERROR; } else { return AdRequest.ERROR_CODE_INTERNAL_ERROR; } } /** * Reward item wrapper class. */ private static final class AppLovinRewardItem implements RewardItem { private final int amount; private final String type; private AppLovinRewardItem(final int amount, final String type) { this.amount = amount; this.type = type; } @Override public String getType() { return type; } @Override public int getAmount() { return amount; } } /** * Performs the given runnable on the main thread. */ public static void runOnUiThread(final Runnable runnable) { if ( Looper.myLooper() == Looper.getMainLooper() ) { runnable.run(); } else { UI_HANDLER.post( runnable ); } } }
Check SDK version first for consistency's sake
AdMob/Android/ApplovinAdapter.java
Check SDK version first for consistency's sake
Java
mit
d55a09b34d02973f835759d808bf1fe356aa98df
0
kartoFlane/hiervis,kartoFlane/hiervis,kartoFlane/hiervis
package pl.pwr.hiervis.visualisation; import java.awt.Color; import java.awt.geom.Rectangle2D; import java.text.NumberFormat; import java.util.AbstractMap; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import org.apache.commons.lang3.tuple.Pair; import basic_hierarchy.interfaces.Hierarchy; import basic_hierarchy.interfaces.Instance; import basic_hierarchy.interfaces.Node; import pl.pwr.hiervis.core.ElementRole; import pl.pwr.hiervis.core.HVConfig; import pl.pwr.hiervis.core.HVConstants; import pl.pwr.hiervis.core.HVContext; import pl.pwr.hiervis.util.Utils; import prefuse.Constants; import prefuse.Visualization; import prefuse.action.ActionList; import prefuse.action.RepaintAction; import prefuse.action.assignment.ColorAction; import prefuse.action.layout.AxisLabelLayout; import prefuse.action.layout.AxisLayout; import prefuse.action.layout.graph.NodeLinkTreeLayout; import prefuse.data.Schema; import prefuse.data.Table; import prefuse.data.Tree; import prefuse.data.Tuple; import prefuse.data.expression.AbstractExpression; import prefuse.data.expression.ComparisonPredicate; import prefuse.data.expression.Literal; import prefuse.data.query.NumberRangeModel; import prefuse.render.AxisRenderer; import prefuse.render.DefaultRendererFactory; import prefuse.render.EdgeRenderer; import prefuse.render.Renderer; import prefuse.render.RendererFactory; import prefuse.util.ColorLib; import prefuse.util.ui.ValuedRangeModel; import prefuse.visual.VisualItem; public class HierarchyProcessor { /** * Processes the currently loaded {@link Hierarchy} and creates a {@link Tree} structure * used to visualize {@link Node}s in that hierarchy. * * @param config * the application config * @param sourceRoot * the root node of the hierarchy * @param availableWidth * the width the layout has to work with * @param availableHeight * the height the layout has to work with * @return a tuple of the Tree structure representing the hierarchy, and TreeLayoutData * associated with it, containing information as to how visualize the tree. */ public static Pair<Tree, TreeLayoutData> buildHierarchyTree( HVConfig config, Node sourceRoot, int availableWidth, int availableHeight ) { Tree tree = new Tree(); tree.addColumn( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, String.class ); tree.addColumn( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, int.class ); prefuse.data.Node treeRoot = tree.addRoot(); treeRoot.setString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, sourceRoot.getId() ); treeRoot.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); // path from node to root int maxTreeDepth = 0; int maxTreeWidth = 0; // TODO: In order to improve performance, it might be better to change this to a LinkedList, because // HashMap is only quick once it is already built, but the building process itself could be slow. HashMap<Integer, Integer> treeLevelToWidth = new HashMap<>(); treeLevelToWidth.put( 0, 1 ); Queue<Map.Entry<prefuse.data.Node, Node>> treeParentToSourceChild = new LinkedList<>(); for ( Node sourceChild : sourceRoot.getChildren() ) { treeParentToSourceChild.add( new AbstractMap.SimpleEntry<prefuse.data.Node, Node>( treeRoot, sourceChild ) ); } while ( !treeParentToSourceChild.isEmpty() ) { Entry<prefuse.data.Node, Node> treeParentAndSourceChild = treeParentToSourceChild.remove(); Node sourceGroup = treeParentAndSourceChild.getValue(); // Create a new tree node based on the source group prefuse.data.Node newNode = tree.addChild( treeParentAndSourceChild.getKey() ); newNode.setString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, sourceGroup.getId() ); newNode.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); // Compute new max tree depth int currentNodeDepth = newNode.getDepth(); maxTreeDepth = Math.max( maxTreeDepth, currentNodeDepth ); // Update the number of nodes on this tree level, for later processing Integer treeLevelWidth = treeLevelToWidth.get( currentNodeDepth ); if ( treeLevelWidth == null ) { treeLevelToWidth.put( currentNodeDepth, 1 ); } else { treeLevelToWidth.put( currentNodeDepth, treeLevelWidth + 1 ); } // Enqueue this group's children for processing for ( Node child : sourceGroup.getChildren() ) { treeParentToSourceChild.add( new AbstractMap.SimpleEntry<prefuse.data.Node, Node>( newNode, child ) ); } } // Tree is complete, now find the max tree width maxTreeWidth = Collections.max( treeLevelToWidth.values() ); TreeLayoutData layoutData = new TreeLayoutData( config, tree, maxTreeDepth, maxTreeWidth, availableWidth, availableHeight ); return Pair.of( tree, layoutData ); } @SuppressWarnings("unchecked") public static void updateNodeRoles( HVContext context, int row ) { Tree hierarchyTree = context.getTree(); HVConfig config = context.getConfig(); // Reset all nodes back to 'other' for ( int i = 0; i < hierarchyTree.getNodeCount(); ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); } if ( row < 0 ) return; prefuse.data.Node n = hierarchyTree.getNode( row ); LinkedList<prefuse.data.Node> stack = new LinkedList<>(); stack.add( n ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CHILD.getNumber() ); for ( Iterator<prefuse.data.Node> children = current.children(); children.hasNext(); ) { prefuse.data.Node child = children.next(); stack.add( child ); } } if ( config.isDisplayAllPoints() && n.getParent() != null ) { stack = new LinkedList<>(); // when the parent is empty, then we need to search up in the hierarchy because empty // parents are skipped, but displayed on output images prefuse.data.Node directParent = n.getParent(); stack.add( directParent ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.INDIRECT_PARENT.getNumber() ); if ( current.getParent() != null ) { stack.add( current.getParent() ); } } directParent.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.DIRECT_PARENT.getNumber() ); } n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CURRENT.getNumber() ); } @SuppressWarnings("unchecked") public static void updateTreeNodeRoles( HVContext context, String currentGroupId ) { Tree hierarchyTree = context.getTree(); HVConfig config = context.getConfig(); if ( context.isHierarchyDataLoaded() ) { boolean found = false; for ( int i = 0; i < hierarchyTree.getNodeCount(); ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); // Reset node role to 'other' n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); if ( !found && n.getString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME ).equals( currentGroupId ) ) { found = true; n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CURRENT.getNumber() ); // Color child groups LinkedList<prefuse.data.Node> stack = new LinkedList<>(); stack.add( n ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CHILD.getNumber() ); for ( Iterator<prefuse.data.Node> children = current.children(); children.hasNext(); ) { prefuse.data.Node child = children.next(); stack.add( child ); } } if ( config.isDisplayAllPoints() && n.getParent() != null ) { stack.clear(); // IF the parent is empty, then we need to search up in the hierarchy because empty // parents are skipped, but displayed on output images prefuse.data.Node directParent = n.getParent(); stack.add( directParent ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.INDIRECT_PARENT.getNumber() ); if ( current.getParent() != null ) { stack.add( current.getParent() ); } } directParent.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.DIRECT_PARENT.getNumber() ); } } } } } public static Visualization createTreeVisualization( HVContext context ) { return createTreeVisualization( context, null ); } public static Visualization createTreeVisualization( HVContext context, String currentGroupId ) { updateTreeNodeRoles( context, currentGroupId ); Tree hierarchyTree = context.getTree(); TreeLayoutData layoutData = context.getTreeLayoutData(); HVConfig config = context.getConfig(); Visualization vis = new Visualization(); if ( context.isHierarchyDataLoaded() ) { vis.add( HVConstants.HIERARCHY_DATA_NAME, hierarchyTree ); NodeRenderer r = new NodeRenderer( layoutData.getNodeSize(), config ); DefaultRendererFactory drf = new DefaultRendererFactory( r ); EdgeRenderer edgeRenderer = new EdgeRenderer( prefuse.Constants.EDGE_TYPE_LINE ); drf.setDefaultEdgeRenderer( edgeRenderer ); vis.setRendererFactory( drf ); ColorAction edgesColor = new ColorAction( HVConstants.HIERARCHY_DATA_NAME + ".edges", VisualItem.STROKECOLOR, ColorLib.color( Color.lightGray ) ); NodeLinkTreeLayout treeLayout = new NodeLinkTreeLayout( HVConstants.HIERARCHY_DATA_NAME, layoutData.getTreeOrientation(), layoutData.getDepthSpace(), layoutData.getSiblingSpace(), layoutData.getSubtreeSpace() ); treeLayout.setRootNodeOffset( 0 );// 0.5*finalSizeOfNodes);//offset is set in order to show all nodes on images treeLayout.setLayoutBounds( new Rectangle2D.Double( 0, 0, layoutData.getLayoutWidth(), layoutData.getLayoutHeight() ) ); ActionList layout = new ActionList(); layout.add( treeLayout ); layout.add( new RepaintAction() ); vis.putAction( HVConstants.HIERARCHY_DATA_NAME + ".edges", edgesColor ); vis.putAction( HVConstants.HIERARCHY_DATA_NAME + ".layout", layout ); // TODO we can here implement a heuristic that will check if after enlarging // the border lines (rows and columns) of pixels do not contain other values // than background colour. If so, then we are expanding one again, otherwise // we have appropriate size of image } return vis; } public static void layoutVisualization( Visualization vis ) { Utils.waitUntilActivitiesAreFinished(); vis.run( HVConstants.HIERARCHY_DATA_NAME + ".edges" ); vis.run( HVConstants.HIERARCHY_DATA_NAME + ".layout" ); Utils.waitUntilActivitiesAreFinished(); } public static Table createInstanceTable( HVConfig config, Hierarchy hierarchy, Tree hierarchyTree ) { String[] dataNames = getFeatureNames( hierarchy ); Table table = createEmptyInstanceTable( config, dataNames ); processInstanceData( config, hierarchy, hierarchyTree, table ); return table; } /** * If the input file had a first row with column names, then this method returns those names. * If the first row did not contain column names, it creates artificial names ("dimension #") * * @param hierarchy * the hierarchy to get the names for * @return array of names for instance features */ public static String[] getFeatureNames( Hierarchy hierarchy ) { String[] dataNames = hierarchy.getDataNames(); if ( dataNames == null ) { // Input file had no column names -- got to make them up ourselves. try { Instance instance = hierarchy.getRoot().getSubtreeInstances().get( 0 ); int dimCount = instance.getData().length; dataNames = new String[dimCount]; for ( int i = 0; i < dimCount; ++i ) { dataNames[i] = "dimension " + ( i + 1 ); } } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Could not get an instance from the hierarchy. Is the hierarchy empty?" ); } } return dataNames; } /** * Creates a new, empty table used to hold processed instance data. * * @param config * the application config * @param dataNames * array of names for instance features * @return the created table */ private static Table createEmptyInstanceTable( HVConfig config, String[] dataNames ) { Table table = new Table(); for ( int i = 0; i < dataNames.length; ++i ) { table.addColumn( dataNames[i], double.class ); } table.addColumn( HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME, prefuse.data.Node.class ); // table.addColumn( HVConstants.PREFUSE_INSTANCE_VISIBLE_COLUMN_NAME, boolean.class ); // table.addColumn( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, int.class ); if ( config.hasInstanceNameAttribute() ) { table.addColumn( HVConstants.PREFUSE_INSTANCE_LABEL_COLUMN_NAME, String.class ); } return table; } /** * Processes raw hierarchy data and saves it in the specified table. * * @param config * the application config * @param hierarchy * the hierarchy to process * @param hierarchyTree * the processed hierarchy tree * @param table * the table the processed data will be saved in. */ private static void processInstanceData( HVConfig config, Hierarchy hierarchy, Tree hierarchyTree, Table table ) { // TODO: Implement some sort of culling so that we remove overlapping instances? // Could use k-d trees maybe? for ( Instance instance : hierarchy.getRoot().getSubtreeInstances() ) { int row = table.addRow(); double[] data = instance.getData(); for ( int i = 0; i < data.length; ++i ) { table.set( row, i, data[i] ); } prefuse.data.Node node = findGroup( hierarchyTree, config.isUseTrueClass() ? instance.getTrueClass() : instance.getNodeId() ); table.set( row, HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME, node ); // table.set( row, HVConstants.PREFUSE_INSTANCE_VISIBLE_COLUMN_NAME, true ); // table.set( row, HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, 0 ); if ( config.hasInstanceNameAttribute() ) { table.set( row, HVConstants.PREFUSE_INSTANCE_LABEL_COLUMN_NAME, instance.getInstanceName() ); } } } public static prefuse.data.Node findGroup( Tree hierarchyTree, String name ) { // TODO: // Can potentially speed this up by using a lookup cache in the form of a hash map. // Not sure if worth it, though. int nodeCount = hierarchyTree.getNodeCount(); for ( int i = 0; i < nodeCount; ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); if ( n.getString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME ).equals( name ) ) { return n; } } return null; } public static Visualization createInstanceVisualization( HVContext context, Node group, int pointSize, int dimX, int dimY, boolean withLabels ) { HVConfig config = context.getConfig(); Visualization vis = new Visualization(); String nameLabelsX = "labelsX"; String nameLabelsY = "labelsY"; if ( withLabels ) { vis.setRendererFactory( new RendererFactory() { Renderer rendererAxisX = new AxisRenderer( Constants.CENTER, Constants.FAR_BOTTOM ); Renderer rendererAxisY = new AxisRenderer( Constants.FAR_LEFT, Constants.CENTER ); Renderer rendererPoint = new PointRenderer( new Rectangle2D.Double( 0, 0, pointSize, pointSize ) ); public Renderer getRenderer( VisualItem item ) { if ( item.isInGroup( nameLabelsX ) ) return rendererAxisX; if ( item.isInGroup( nameLabelsY ) ) return rendererAxisY; return rendererPoint; } } ); } else { vis.setRendererFactory( new DefaultRendererFactory( new PointRenderer( new Rectangle2D.Double( 0, 0, pointSize, pointSize ) ) ) ); } Table table = context.getInstanceTable(); vis.addTable( HVConstants.INSTANCE_DATA_NAME, table ); Node root = context.getHierarchy().getRoot(); Rectangle2D bounds = Utils.calculateBoundingRectForCluster( root, dimX, dimY ); AxisLayout axisX = new AxisLayout( HVConstants.INSTANCE_DATA_NAME, table.getColumnName( dimX ), Constants.X_AXIS ); ValuedRangeModel rangeModelX = new NumberRangeModel( bounds.getMinX(), bounds.getMaxX(), bounds.getMinX(), bounds.getMaxX() ); axisX.setRangeModel( rangeModelX ); AxisLayout axisY = new AxisLayout( HVConstants.INSTANCE_DATA_NAME, table.getColumnName( dimY ), Constants.Y_AXIS ); ValuedRangeModel rangeModelY = new NumberRangeModel( bounds.getMinY(), bounds.getMaxY(), bounds.getMinY(), bounds.getMaxY() ); axisY.setRangeModel( rangeModelY ); ColorAction colorize = new ColorAction( HVConstants.INSTANCE_DATA_NAME, VisualItem.FILLCOLOR ); colorize.setDefaultColor( Utils.rgba( Color.MAGENTA ) ); colorize.add( getPredicateFor( ElementRole.CURRENT ), Utils.rgba( config.getCurrentGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.DIRECT_PARENT ), Utils.rgba( config.getParentGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.INDIRECT_PARENT ), Utils.rgba( config.getAncestorGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.CHILD ), Utils.rgba( config.getChildGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.OTHER ), Utils.rgba( config.getOtherGroupColor() ) ); ActionList axisActions = new ActionList(); axisActions.add( axisX ); axisActions.add( axisY ); if ( withLabels ) { AxisLabelLayout labelX = new AxisLabelLayout( nameLabelsX, axisX ); labelX.setNumberFormat( NumberFormat.getNumberInstance() ); labelX.setRangeModel( rangeModelX ); labelX.setScale( Constants.LINEAR_SCALE ); AxisLabelLayout labelY = new AxisLabelLayout( nameLabelsY, axisY ); labelY.setNumberFormat( NumberFormat.getNumberInstance() ); labelY.setRangeModel( rangeModelY ); labelY.setScale( Constants.LINEAR_SCALE ); axisActions.add( labelX ); axisActions.add( labelY ); } ActionList drawActions = new ActionList(); drawActions.add( axisActions ); drawActions.add( colorize ); drawActions.add( new RepaintAction() ); vis.putAction( "draw", drawActions ); vis.putAction( "axis", axisActions ); vis.putAction( "repaint", new RepaintAction() ); return vis; } /** * @param elementRole * the {@link ElementRole} to test for * @return creates and returns a predicate which returns true for instances whose node's * {@link ElementRole} is the same as the one passed in argument. */ private static ComparisonPredicate getPredicateFor( ElementRole elementRole ) { return new ComparisonPredicate( ComparisonPredicate.EQ, new InstanceNodeExpression(), Literal.getLiteral( elementRole.getNumber() ) ); } /** * Given a row from the instance data table, extracts the node to which that instance belongs and returns * its {@link ElementRole}. */ @SuppressWarnings("rawtypes") private static class InstanceNodeExpression extends AbstractExpression { public Class getType( Schema s ) { return int.class; } public Object get( Tuple t ) { return getInt( t ); } public int getInt( Tuple t ) { prefuse.data.Node node = (prefuse.data.Node)t.get( HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME ); return node.getInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME ); } } }
src/pl/pwr/hiervis/visualisation/HierarchyProcessor.java
package pl.pwr.hiervis.visualisation; import java.awt.Color; import java.awt.geom.Rectangle2D; import java.text.NumberFormat; import java.util.AbstractMap; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import org.apache.commons.lang3.tuple.Pair; import basic_hierarchy.interfaces.Hierarchy; import basic_hierarchy.interfaces.Instance; import basic_hierarchy.interfaces.Node; import pl.pwr.hiervis.core.ElementRole; import pl.pwr.hiervis.core.HVConfig; import pl.pwr.hiervis.core.HVConstants; import pl.pwr.hiervis.core.HVContext; import pl.pwr.hiervis.util.Utils; import prefuse.Constants; import prefuse.Visualization; import prefuse.action.ActionList; import prefuse.action.RepaintAction; import prefuse.action.assignment.ColorAction; import prefuse.action.layout.AxisLabelLayout; import prefuse.action.layout.AxisLayout; import prefuse.action.layout.graph.NodeLinkTreeLayout; import prefuse.data.Schema; import prefuse.data.Table; import prefuse.data.Tree; import prefuse.data.Tuple; import prefuse.data.expression.AbstractExpression; import prefuse.data.expression.ComparisonPredicate; import prefuse.data.expression.Literal; import prefuse.data.query.NumberRangeModel; import prefuse.render.AxisRenderer; import prefuse.render.DefaultRendererFactory; import prefuse.render.EdgeRenderer; import prefuse.render.Renderer; import prefuse.render.RendererFactory; import prefuse.util.ColorLib; import prefuse.util.ui.ValuedRangeModel; import prefuse.visual.VisualItem; public class HierarchyProcessor { /** * Processes the currently loaded {@link Hierarchy} and creates a {@link Tree} structure * used to visualize {@link Node}s in that hierarchy. * * @param config * the application config * @param sourceRoot * the root node of the hierarchy * @param availableWidth * the width the layout has to work with * @param availableHeight * the height the layout has to work with * @return a tuple of the Tree structure representing the hierarchy, and TreeLayoutData * associated with it, containing information as to how visualize the tree. */ public static Pair<Tree, TreeLayoutData> buildHierarchyTree( HVConfig config, Node sourceRoot, int availableWidth, int availableHeight ) { Tree tree = new Tree(); tree.addColumn( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, String.class ); tree.addColumn( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, int.class ); prefuse.data.Node treeRoot = tree.addRoot(); treeRoot.setString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, sourceRoot.getId() ); treeRoot.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); // path from node to root int maxTreeDepth = 0; int maxTreeWidth = 0; // TODO: In order to improve performance, it might be better to change this to a LinkedList, because // HashMap is only quick once it is already built, but the building process itself could be slow. HashMap<Integer, Integer> treeLevelToWidth = new HashMap<>(); treeLevelToWidth.put( 0, 1 ); Queue<Map.Entry<prefuse.data.Node, Node>> treeParentToSourceChild = new LinkedList<>(); for ( Node sourceChild : sourceRoot.getChildren() ) { treeParentToSourceChild.add( new AbstractMap.SimpleEntry<prefuse.data.Node, Node>( treeRoot, sourceChild ) ); } while ( !treeParentToSourceChild.isEmpty() ) { Entry<prefuse.data.Node, Node> treeParentAndSourceChild = treeParentToSourceChild.remove(); Node sourceGroup = treeParentAndSourceChild.getValue(); // Create a new tree node based on the source group prefuse.data.Node newNode = tree.addChild( treeParentAndSourceChild.getKey() ); newNode.setString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME, sourceGroup.getId() ); newNode.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); // Compute new max tree depth int currentNodeDepth = newNode.getDepth(); maxTreeDepth = Math.max( maxTreeDepth, currentNodeDepth ); // Update the number of nodes on this tree level, for later processing Integer treeLevelWidth = treeLevelToWidth.get( currentNodeDepth ); if ( treeLevelWidth == null ) { treeLevelToWidth.put( currentNodeDepth, 1 ); } else { treeLevelToWidth.put( currentNodeDepth, treeLevelWidth + 1 ); } // Enqueue this group's children for processing for ( Node child : sourceGroup.getChildren() ) { treeParentToSourceChild.add( new AbstractMap.SimpleEntry<prefuse.data.Node, Node>( newNode, child ) ); } } // Tree is complete, now find the max tree width maxTreeWidth = Collections.max( treeLevelToWidth.values() ); TreeLayoutData layoutData = new TreeLayoutData( config, tree, maxTreeDepth, maxTreeWidth, availableWidth, availableHeight ); return Pair.of( tree, layoutData ); } @SuppressWarnings("unchecked") public static void updateNodeRoles( HVContext context, int row ) { Tree hierarchyTree = context.getTree(); HVConfig config = context.getConfig(); // Reset all nodes back to 'other' for ( int i = 0; i < hierarchyTree.getNodeCount(); ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); } if ( row < 0 ) return; prefuse.data.Node n = hierarchyTree.getNode( row ); LinkedList<prefuse.data.Node> stack = new LinkedList<>(); stack.add( n ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CHILD.getNumber() ); for ( Iterator<prefuse.data.Node> children = current.children(); children.hasNext(); ) { prefuse.data.Node child = children.next(); stack.add( child ); } } if ( config.isDisplayAllPoints() && n.getParent() != null ) { stack = new LinkedList<>(); // when the parent is empty, then we need to search up in the hierarchy because empty // parents are skipped, but displayed on output images prefuse.data.Node directParent = n.getParent(); stack.add( directParent ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.INDIRECT_PARENT.getNumber() ); if ( current.getParent() != null ) { stack.add( current.getParent() ); } } directParent.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.DIRECT_PARENT.getNumber() ); } n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CURRENT.getNumber() ); } @SuppressWarnings("unchecked") public static void updateTreeNodeRoles( HVContext context, String currentGroupId ) { Tree hierarchyTree = context.getTree(); HVConfig config = context.getConfig(); if ( context.isHierarchyDataLoaded() ) { boolean found = false; for ( int i = 0; i < hierarchyTree.getNodeCount(); ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); // Reset node role to 'other' n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.OTHER.getNumber() ); if ( !found && n.getString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME ).equals( currentGroupId ) ) { found = true; n.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CURRENT.getNumber() ); // Color child groups LinkedList<prefuse.data.Node> stack = new LinkedList<>(); stack.add( n ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.CHILD.getNumber() ); for ( Iterator<prefuse.data.Node> children = current.children(); children.hasNext(); ) { prefuse.data.Node child = children.next(); stack.add( child ); } } if ( config.isDisplayAllPoints() && n.getParent() != null ) { stack.clear(); // IF the parent is empty, then we need to search up in the hierarchy because empty // parents are skipped, but displayed on output images prefuse.data.Node directParent = n.getParent(); stack.add( directParent ); while ( !stack.isEmpty() ) { prefuse.data.Node current = stack.removeFirst(); current.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.INDIRECT_PARENT.getNumber() ); if ( current.getParent() != null ) { stack.add( current.getParent() ); } } directParent.setInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, ElementRole.DIRECT_PARENT.getNumber() ); } } } } } public static Visualization createTreeVisualization( HVContext context ) { return createTreeVisualization( context, null ); } public static Visualization createTreeVisualization( HVContext context, String currentGroupId ) { updateTreeNodeRoles( context, currentGroupId ); Tree hierarchyTree = context.getTree(); TreeLayoutData layoutData = context.getTreeLayoutData(); HVConfig config = context.getConfig(); Visualization vis = new Visualization(); if ( context.isHierarchyDataLoaded() ) { vis.add( HVConstants.HIERARCHY_DATA_NAME, hierarchyTree ); NodeRenderer r = new NodeRenderer( layoutData.getNodeSize(), config ); DefaultRendererFactory drf = new DefaultRendererFactory( r ); EdgeRenderer edgeRenderer = new EdgeRenderer( prefuse.Constants.EDGE_TYPE_LINE ); drf.setDefaultEdgeRenderer( edgeRenderer ); vis.setRendererFactory( drf ); ColorAction edgesColor = new ColorAction( HVConstants.HIERARCHY_DATA_NAME + ".edges", VisualItem.STROKECOLOR, ColorLib.color( Color.lightGray ) ); NodeLinkTreeLayout treeLayout = new NodeLinkTreeLayout( HVConstants.HIERARCHY_DATA_NAME, layoutData.getTreeOrientation(), layoutData.getDepthSpace(), layoutData.getSiblingSpace(), layoutData.getSubtreeSpace() ); treeLayout.setRootNodeOffset( 0 );// 0.5*finalSizeOfNodes);//offset is set in order to show all nodes on images treeLayout.setLayoutBounds( new Rectangle2D.Double( 0, 0, layoutData.getLayoutWidth(), layoutData.getLayoutHeight() ) ); ActionList layout = new ActionList(); layout.add( treeLayout ); layout.add( new RepaintAction() ); vis.putAction( HVConstants.HIERARCHY_DATA_NAME + ".edges", edgesColor ); vis.putAction( HVConstants.HIERARCHY_DATA_NAME + ".layout", layout ); // TODO we can here implement a heuristic that will check if after enlarging // the border lines (rows and columns) of pixels do not contain other values // than background colour. If so, then we are expanding one again, otherwise // we have appropriate size of image } return vis; } public static void layoutVisualization( Visualization vis ) { Utils.waitUntilActivitiesAreFinished(); vis.run( HVConstants.HIERARCHY_DATA_NAME + ".edges" ); vis.run( HVConstants.HIERARCHY_DATA_NAME + ".layout" ); Utils.waitUntilActivitiesAreFinished(); } public static Table createInstanceTable( HVConfig config, Hierarchy hierarchy, Tree hierarchyTree ) { String[] dataNames = getFeatureNames( hierarchy ); Table table = createEmptyInstanceTable( config, dataNames ); processInstanceData( config, hierarchy, hierarchyTree, table ); return table; } /** * If the input file had a first row with column names, then this method returns those names. * If the first row did not contain column names, it creates artificial names ("dimension #") * * @param hierarchy * the hierarchy to get the names for * @return array of names for instance features */ public static String[] getFeatureNames( Hierarchy hierarchy ) { String[] dataNames = hierarchy.getDataNames(); if ( dataNames == null ) { // Input file had no column names -- got to make them up ourselves. try { Instance instance = hierarchy.getRoot().getSubtreeInstances().get( 0 ); int dimCount = instance.getData().length; dataNames = new String[dimCount]; for ( int i = 0; i < dimCount; ++i ) { dataNames[i] = "dimension " + ( i + 1 ); } } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Could not get an instance from the hierarchy. Is the hierarchy empty?" ); } } return dataNames; } /** * Creates a new, empty table used to hold processed instance data. * * @param config * the application config * @param dataNames * array of names for instance features * @return the created table */ private static Table createEmptyInstanceTable( HVConfig config, String[] dataNames ) { Table table = new Table(); for ( int i = 0; i < dataNames.length; ++i ) { table.addColumn( dataNames[i], double.class ); } table.addColumn( HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME, prefuse.data.Node.class ); // table.addColumn( HVConstants.PREFUSE_INSTANCE_VISIBLE_COLUMN_NAME, boolean.class ); // table.addColumn( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, int.class ); if ( config.hasInstanceNameAttribute() ) { table.addColumn( HVConstants.PREFUSE_INSTANCE_LABEL_COLUMN_NAME, String.class ); } return table; } /** * Processes raw hierarchy data and saves it in the specified table. * * @param config * the application config * @param hierarchy * the hierarchy to process * @param hierarchyTree * the processed hierarchy tree * @param table * the table the processed data will be saved in. */ private static void processInstanceData( HVConfig config, Hierarchy hierarchy, Tree hierarchyTree, Table table ) { // TODO: Implement some sort of culling so that we remove overlapping instances? // Could use k-d trees maybe? for ( Instance instance : hierarchy.getRoot().getSubtreeInstances() ) { int row = table.addRow(); double[] data = instance.getData(); for ( int i = 0; i < data.length; ++i ) { table.set( row, i, data[i] ); } prefuse.data.Node node = findGroup( hierarchyTree, config.isUseTrueClass() ? instance.getTrueClass() : instance.getNodeId() ); table.set( row, HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME, node ); // table.set( row, HVConstants.PREFUSE_INSTANCE_VISIBLE_COLUMN_NAME, true ); // table.set( row, HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME, 0 ); if ( config.hasInstanceNameAttribute() ) { table.set( row, HVConstants.PREFUSE_INSTANCE_LABEL_COLUMN_NAME, instance.getInstanceName() ); } } } public static prefuse.data.Node findGroup( Tree hierarchyTree, String name ) { // TODO: // Can potentially speed this up by using a lookup cache in the form of a hash map. // Not sure if worth it, though. int nodeCount = hierarchyTree.getNodeCount(); for ( int i = 0; i < nodeCount; ++i ) { prefuse.data.Node n = hierarchyTree.getNode( i ); if ( n.getString( HVConstants.PREFUSE_NODE_ID_COLUMN_NAME ).equals( name ) ) { return n; } } return null; } public static Visualization createInstanceVisualization( HVContext context, Node group, int pointSize, int dimX, int dimY, boolean withLabels ) { HVConfig config = context.getConfig(); Visualization vis = new Visualization(); String nameLabelsX = "labelsX"; String nameLabelsY = "labelsY"; if ( withLabels ) { vis.setRendererFactory( new RendererFactory() { Renderer rendererAxisX = new AxisRenderer( Constants.CENTER, Constants.FAR_BOTTOM ); Renderer rendererAxisY = new AxisRenderer( Constants.FAR_LEFT, Constants.CENTER ); Renderer rendererPoint = new PointRenderer( new Rectangle2D.Double( 0, 0, pointSize, pointSize ) ); public Renderer getRenderer( VisualItem item ) { if ( item.isInGroup( nameLabelsX ) ) return rendererAxisX; if ( item.isInGroup( nameLabelsY ) ) return rendererAxisY; return rendererPoint; } } ); } else { vis.setRendererFactory( new DefaultRendererFactory( new PointRenderer( new Rectangle2D.Double( 0, 0, pointSize, pointSize ) ) ) ); } Table table = context.getInstanceTable(); vis.addTable( HVConstants.INSTANCE_DATA_NAME, table ); Node root = context.getHierarchy().getRoot(); Rectangle2D bounds = Utils.calculateBoundingRectForCluster( root, dimX, dimY ); AxisLayout axisX = new AxisLayout( HVConstants.INSTANCE_DATA_NAME, table.getColumnName( dimX ), Constants.X_AXIS ); ValuedRangeModel rangeModelX = new NumberRangeModel( 0, bounds.getMaxX(), 0, bounds.getMaxX() ); axisX.setRangeModel( rangeModelX ); AxisLayout axisY = new AxisLayout( HVConstants.INSTANCE_DATA_NAME, table.getColumnName( dimY ), Constants.Y_AXIS ); ValuedRangeModel rangeModelY = new NumberRangeModel( 0, bounds.getMaxY(), 0, bounds.getMaxY() ); axisY.setRangeModel( rangeModelY ); ColorAction colorize = new ColorAction( HVConstants.INSTANCE_DATA_NAME, VisualItem.FILLCOLOR ); colorize.setDefaultColor( Utils.rgba( Color.MAGENTA ) ); colorize.add( getPredicateFor( ElementRole.CURRENT ), Utils.rgba( config.getCurrentGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.DIRECT_PARENT ), Utils.rgba( config.getParentGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.INDIRECT_PARENT ), Utils.rgba( config.getAncestorGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.CHILD ), Utils.rgba( config.getChildGroupColor() ) ); colorize.add( getPredicateFor( ElementRole.OTHER ), Utils.rgba( config.getOtherGroupColor() ) ); ActionList axisActions = new ActionList(); axisActions.add( axisX ); axisActions.add( axisY ); if ( withLabels ) { AxisLabelLayout labelX = new AxisLabelLayout( nameLabelsX, axisX ); labelX.setNumberFormat( NumberFormat.getNumberInstance() ); labelX.setRangeModel( rangeModelX ); labelX.setScale( Constants.LINEAR_SCALE ); AxisLabelLayout labelY = new AxisLabelLayout( nameLabelsY, axisY ); labelY.setNumberFormat( NumberFormat.getNumberInstance() ); labelY.setRangeModel( rangeModelY ); labelY.setScale( Constants.LINEAR_SCALE ); axisActions.add( labelX ); axisActions.add( labelY ); } ActionList drawActions = new ActionList(); drawActions.add( axisActions ); drawActions.add( colorize ); drawActions.add( new RepaintAction() ); vis.putAction( "draw", drawActions ); vis.putAction( "axis", axisActions ); vis.putAction( "repaint", new RepaintAction() ); return vis; } /** * @param elementRole * the {@link ElementRole} to test for * @return creates and returns a predicate which returns true for instances whose node's * {@link ElementRole} is the same as the one passed in argument. */ private static ComparisonPredicate getPredicateFor( ElementRole elementRole ) { return new ComparisonPredicate( ComparisonPredicate.EQ, new InstanceNodeExpression(), Literal.getLiteral( elementRole.getNumber() ) ); } /** * Given a row from the instance data table, extracts the node to which that instance belongs and returns * its {@link ElementRole}. */ @SuppressWarnings("rawtypes") private static class InstanceNodeExpression extends AbstractExpression { public Class getType( Schema s ) { return int.class; } public Object get( Tuple t ) { return getInt( t ); } public int getInt( Tuple t ) { prefuse.data.Node node = (prefuse.data.Node)t.get( HVConstants.PREFUSE_INSTANCE_NODE_COLUMN_NAME ); return node.getInt( HVConstants.PREFUSE_NODE_ROLE_COLUMN_NAME ); } } }
Fixed axes only displaying positive values.
src/pl/pwr/hiervis/visualisation/HierarchyProcessor.java
Fixed axes only displaying positive values.
Java
agpl-3.0
c7f937bb7ac5be54c24c045e1cb0ffaade47a09e
0
mukadder/kc,jwillia/kc-old1,geothomasp/kcmit,iu-uits-es/kc,jwillia/kc-old1,UniversityOfHawaiiORS/kc,kuali/kc,ColostateResearchServices/kc,geothomasp/kcmit,iu-uits-es/kc,ColostateResearchServices/kc,UniversityOfHawaiiORS/kc,jwillia/kc-old1,kuali/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc,ColostateResearchServices/kc,jwillia/kc-old1,mukadder/kc,kuali/kc,mukadder/kc,geothomasp/kcmit,geothomasp/kcmit,iu-uits-es/kc
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.timeandmoney.web.struts.action; import java.sql.Date; import java.sql.SQLException; import java.text.DateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.ojb.broker.accesslayer.LookupException; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.kuali.kra.award.awardhierarchy.AwardHierarchy; import org.kuali.kra.award.awardhierarchy.AwardHierarchyService; import org.kuali.kra.award.home.Award; import org.kuali.kra.award.home.AwardAmountInfo; import org.kuali.kra.award.timeandmoney.AwardDirectFandADistributionBean; import org.kuali.kra.bo.versioning.VersionHistory; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.KraServiceLocator; import org.kuali.kra.service.AwardDirectFandADistributionService; import org.kuali.kra.service.VersionHistoryService; import org.kuali.kra.timeandmoney.AwardHierarchyNode; import org.kuali.kra.timeandmoney.TimeAndMoneyForm; import org.kuali.kra.timeandmoney.document.TimeAndMoneyDocument; import org.kuali.kra.timeandmoney.service.ActivePendingTransactionsService; import org.kuali.kra.timeandmoney.service.TimeAndMoneyActionSummaryService; import org.kuali.kra.timeandmoney.service.TimeAndMoneyHistoryService; import org.kuali.kra.timeandmoney.transactions.AwardAmountTransaction; import org.kuali.kra.web.struts.action.KraTransactionalDocumentActionBase; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kns.document.Document; import org.kuali.rice.kns.service.BusinessObjectService; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.KNSConstants; public class TimeAndMoneyAction extends KraTransactionalDocumentActionBase { BusinessObjectService businessObjectService; private AwardDirectFandADistributionBean awardDirectFandADistributionBean; public TimeAndMoneyAction(){ awardDirectFandADistributionBean = new AwardDirectFandADistributionBean(); } /** * @see org.kuali.kra.web.struts.action.KraTransactionalDocumentActionBase#save(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override public ActionForward save(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ActionForward forward = mapping.findForward(Constants.MAPPING_BASIC); TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; TimeAndMoneyDocument timeAndMoneyDocument = timeAndMoneyForm.getTimeAndMoneyDocument(); forward = super.save(mapping, form, request, response); ActivePendingTransactionsService aptService = getActivePendingTransactionsService(); List<AwardAmountInfo> awardAmountInfoObjects = new ArrayList<AwardAmountInfo>(); for(Entry<String, AwardHierarchyNode> awardHierarchyNode : timeAndMoneyDocument.getAwardHierarchyNodes().entrySet()){ Award award = aptService.getActiveAwardVersion(awardHierarchyNode.getValue().getAwardNumber()); AwardAmountInfo aai = aptService.fetchAwardAmountInfoWithHighestTransactionId(award.getAwardAmountInfos()); String reverseAwardNumber = StringUtils.reverse(awardHierarchyNode.getValue().getAwardNumber()); String i= StringUtils.substring(reverseAwardNumber, 0, StringUtils.indexOf(reverseAwardNumber, "0")); int index = Integer.parseInt(StringUtils.reverse(i)); if(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getFinalExpirationDate()!=null){ aai.setFinalExpirationDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getFinalExpirationDate()); } if(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getCurrentFundEffectiveDate()!=null){ aai.setCurrentFundEffectiveDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getCurrentFundEffectiveDate()); } if(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getObligationExpirationDate()!=null){ aai.setObligationExpirationDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getObligationExpirationDate()); } awardAmountInfoObjects.add(aai); } getBusinessObjectService().save(awardAmountInfoObjects); getBusinessObjectService().save(timeAndMoneyDocument.getAward()); return forward; } public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return super.execute(mapping, form, request, response); } public ActionForward refresh(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return mapping.findForward(Constants.MAPPING_AWARD_BASIC); } @Override public ActionForward docHandler(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; ActionForward forward = handleDocument(mapping, form, request, response, timeAndMoneyForm); timeAndMoneyForm.initializeFormOrDocumentBasedOnCommand(); String rootAwardNumber = timeAndMoneyForm.getTimeAndMoneyDocument().getRootAwardNumber(); List<String> order = new ArrayList<String>(); timeAndMoneyForm.getTimeAndMoneyDocument().setAwardHierarchyItems(getAwardHierarchyService().getAwardHierarchy(rootAwardNumber, order)); timeAndMoneyForm.getTimeAndMoneyDocument().setAwardNumber(rootAwardNumber); timeAndMoneyForm.setOrder(order); setupHierachyNodes(timeAndMoneyForm.getTimeAndMoneyDocument()); populateOtherPanels(timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction(), timeAndMoneyForm, rootAwardNumber); return forward; } private String convertToString(Date date){ if(date!=null){ return date.toString(); }else{ return null; } } public AwardHierarchyService getAwardHierarchyService(){ return (AwardHierarchyService) KraServiceLocator.getService(AwardHierarchyService.class); } protected void setupHierachyNodes(TimeAndMoneyDocument timeAndMoneyDocument){ AwardHierarchyNode awardHierarchyNode; ActivePendingTransactionsService aptService = getActivePendingTransactionsService(); for(Entry<String, AwardHierarchy> awardHierarchy:timeAndMoneyDocument.getAwardHierarchyItems().entrySet()){ awardHierarchyNode = new AwardHierarchyNode(); awardHierarchyNode.setAwardNumber(awardHierarchy.getValue().getAwardNumber()); awardHierarchyNode.setParentAwardNumber(awardHierarchy.getValue().getParentAwardNumber()); awardHierarchyNode.setRootAwardNumber(awardHierarchy.getValue().getRootAwardNumber()); Award award = aptService.getActiveAwardVersion(awardHierarchy.getValue().getAwardNumber()); AwardAmountInfo awardAmountInfo = aptService.fetchAwardAmountInfoWithHighestTransactionId(award.getAwardAmountInfos()); awardHierarchyNode.setFinalExpirationDate(award.getProjectEndDate()); awardHierarchyNode.setLeadUnitName(award.getUnitName()); awardHierarchyNode.setPrincipalInvestigatorName(award.getPrincipalInvestigatorName()); awardHierarchyNode.setObliDistributableAmount(awardAmountInfo.getObliDistributableAmount()); awardHierarchyNode.setAmountObligatedToDate(awardAmountInfo.getAmountObligatedToDate()); awardHierarchyNode.setAnticipatedTotalAmount(awardAmountInfo.getAnticipatedTotalAmount()); awardHierarchyNode.setAntDistributableAmount(awardAmountInfo.getAntDistributableAmount()); awardHierarchyNode.setCurrentFundEffectiveDate(awardAmountInfo.getCurrentFundEffectiveDate()); awardHierarchyNode.setObligationExpirationDate(awardAmountInfo.getObligationExpirationDate()); timeAndMoneyDocument.getAwardHierarchyNodes().put(awardHierarchyNode.getAwardNumber(), awardHierarchyNode); } } /** * @param mapping * @param form * @param request * @param response * @param awardForm * @return * @throws Exception */ public ActionForward handleDocument(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, TimeAndMoneyForm timeAndMoneyForm) throws Exception { String command = timeAndMoneyForm.getCommand(); ActionForward forward; if (KEWConstants.ACTIONLIST_INLINE_COMMAND.equals(command)) { String docIdRequestParameter = request.getParameter(KNSConstants.PARAMETER_DOC_ID); Document retrievedDocument = getDocumentService().getByDocumentHeaderId(docIdRequestParameter); timeAndMoneyForm.setDocument(retrievedDocument); request.setAttribute(KNSConstants.PARAMETER_DOC_ID, docIdRequestParameter); ActionForward baseForward = mapping.findForward(Constants.MAPPING_COPY_PROPOSAL_PAGE); forward = new ActionForward(buildForwardStringForActionListCommand( baseForward.getPath(),docIdRequestParameter)); } else { forward = super.docHandler(mapping, form, request, response); } return forward; } public ActionForward addTransaction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ((TimeAndMoneyForm) form).getTransactionBean().addPendingTransactionItem(); return mapping.findForward("basic"); } public ActionForward deleteTransaction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ((TimeAndMoneyForm) form).getTransactionBean().deletePendingTransactionItem(getLineToDelete(request)); return mapping.findForward("basic"); } public ActionForward approveTransactions(ActionMapping mapping, ActionForm form , HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm)GlobalVariables.getKualiForm(); getActivePendingTransactionsService().approveTransactions(timeAndMoneyForm.getTimeAndMoneyDocument(), timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction()); return mapping.findForward("basic"); } public ActionForward switchAward(ActionMapping mapping, ActionForm form , HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm)form; TimeAndMoneyDocument doc = timeAndMoneyForm.getTimeAndMoneyDocument(); String goToAwardNumber = timeAndMoneyForm.getGoToAwardNumber(); populateOtherPanels(timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction(), timeAndMoneyForm, goToAwardNumber); return mapping.findForward("basic"); } /** * This method... * @param timeAndMoneyForm * @param timeAndMoneyForm * @param goToAwardNumber * @throws LookupException * @throws SQLException */ private void populateOtherPanels(AwardAmountTransaction newAwardAmountTransaction, TimeAndMoneyForm timeAndMoneyForm, String goToAwardNumber) throws LookupException, SQLException { Award award = getActiveAwardVersion(goToAwardNumber); TimeAndMoneyDocument timeAndMoneyDocument = timeAndMoneyForm.getTimeAndMoneyDocument(); timeAndMoneyDocument.setAwardNumber(award.getAwardNumber()); timeAndMoneyDocument.setAward(award); if(isNewAward(timeAndMoneyForm) && !(timeAndMoneyDocument.getAward().getBeginDate() == null)){ AwardDirectFandADistributionService awardDirectFandADistributionService = getAwardDirectFandADistributionService(); timeAndMoneyForm.getTimeAndMoneyDocument().getAward().setAwardDirectFandADistributions (awardDirectFandADistributionService. generateDefaultAwardDirectFandADistributionPeriods(timeAndMoneyForm.getTimeAndMoneyDocument().getAward())); } TimeAndMoneyHistoryService tamhs = KraServiceLocator.getService(TimeAndMoneyHistoryService.class); tamhs.getTimeAndMoneyHistory(timeAndMoneyDocument.getAwardNumber(), timeAndMoneyDocument.getTimeAndMoneyHistory(), timeAndMoneyForm.getColumnSpan()); TimeAndMoneyActionSummaryService tamass = KraServiceLocator.getService(TimeAndMoneyActionSummaryService.class); tamass.populateActionSummary(timeAndMoneyDocument.getTimeAndMoneyActionSummaryItems(), timeAndMoneyDocument.getAwardNumber()); timeAndMoneyDocument.setNewAwardAmountTransaction(newAwardAmountTransaction); } /** * This method tests if the award is new by checking the size of AwardDirectFandADistributions on the Award. * @param awardForm * @return */ public boolean isNewAward(TimeAndMoneyForm timeAndMoneyForm) { return timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions().size() == 0; } /** * * This method is a helper method to retrieve AwardSponsorTermService. * @return */ protected AwardDirectFandADistributionService getAwardDirectFandADistributionService() { return KraServiceLocator.getService(AwardDirectFandADistributionService.class); } /** * This method... * @param doc * @param goToAwardNumber */ private Award getActiveAwardVersion(String goToAwardNumber) { VersionHistoryService vhs = KraServiceLocator.getService(VersionHistoryService.class); VersionHistory vh = vhs.findActiveVersion(Award.class, goToAwardNumber); Award award = null; if(vh!=null){ award = (Award) vh.getSequenceOwner(); }else{ BusinessObjectService businessObjectService = KraServiceLocator.getService(BusinessObjectService.class); award = ((List<Award>)businessObjectService.findMatching(Award.class, getHashMap(goToAwardNumber))).get(0); } return award; } private Map<String, String> getHashMap(String goToAwardNumber) { Map<String, String> map = new HashMap<String,String>(); map.put("awardNumber", goToAwardNumber); return map; } protected ActivePendingTransactionsService getActivePendingTransactionsService(){ return (ActivePendingTransactionsService) KraServiceLocator.getService(ActivePendingTransactionsService.class); } /** * * This method builds the string for the ActionForward * @param forwardPath * @param docIdRequestParameter * @return */ public String buildForwardStringForActionListCommand(String forwardPath, String docIdRequestParameter){ StringBuilder sb = new StringBuilder(); sb.append(forwardPath); sb.append("?"); sb.append(KNSConstants.PARAMETER_DOC_ID); sb.append("="); sb.append(docIdRequestParameter); return sb.toString(); } public ActionForward addTransaction(){ return null; } public ActionForward deleteTransaction(){ return null; } public ActionForward submit(){ return null; } /** * * This method adds a new AwardDirectFandADistribution to the list. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward addAwardDirectFandADistribution(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { awardDirectFandADistributionBean.addAwardDirectFandADistribution(((TimeAndMoneyForm) form).getAwardDirectFandADistributionBean()); return mapping.findForward(Constants.MAPPING_BASIC); } /** * * This method removes an AwardDirectFandADistribution from the list. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward deleteAwardDirectFandADistribution(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions().remove(getLineToDelete(request)); awardDirectFandADistributionBean.updateBudgetPeriodsAfterDelete(timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions()); return mapping.findForward(Constants.MAPPING_BASIC); } /** * This method is used to recalculate the Total amounts in the Direct F and A Distribution panel. * * @param mapping * @param form * @param request * @param response * @return mapping forward * @throws Exception */ public ActionForward recalculateDirectFandADistributionTotals(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return mapping.findForward(Constants.MAPPING_BASIC); } /** * Gets the businessObjectService attribute. * @return Returns the businessObjectService. */ public BusinessObjectService getBusinessObjectService() { businessObjectService = KraServiceLocator.getService(BusinessObjectService.class); return businessObjectService; } }
src/main/java/org/kuali/kra/timeandmoney/web/struts/action/TimeAndMoneyAction.java
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.timeandmoney.web.struts.action; import java.sql.Date; import java.sql.SQLException; import java.text.DateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.ojb.broker.accesslayer.LookupException; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.kuali.kra.award.awardhierarchy.AwardHierarchy; import org.kuali.kra.award.awardhierarchy.AwardHierarchyService; import org.kuali.kra.award.home.Award; import org.kuali.kra.award.home.AwardAmountInfo; import org.kuali.kra.award.timeandmoney.AwardDirectFandADistributionBean; import org.kuali.kra.bo.versioning.VersionHistory; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.KraServiceLocator; import org.kuali.kra.service.AwardDirectFandADistributionService; import org.kuali.kra.service.VersionHistoryService; import org.kuali.kra.timeandmoney.AwardHierarchyNode; import org.kuali.kra.timeandmoney.TimeAndMoneyForm; import org.kuali.kra.timeandmoney.document.TimeAndMoneyDocument; import org.kuali.kra.timeandmoney.service.ActivePendingTransactionsService; import org.kuali.kra.timeandmoney.service.TimeAndMoneyActionSummaryService; import org.kuali.kra.timeandmoney.service.TimeAndMoneyHistoryService; import org.kuali.kra.timeandmoney.transactions.AwardAmountTransaction; import org.kuali.kra.web.struts.action.KraTransactionalDocumentActionBase; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kns.document.Document; import org.kuali.rice.kns.service.BusinessObjectService; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.KNSConstants; public class TimeAndMoneyAction extends KraTransactionalDocumentActionBase { BusinessObjectService businessObjectService; private AwardDirectFandADistributionBean awardDirectFandADistributionBean; public TimeAndMoneyAction(){ awardDirectFandADistributionBean = new AwardDirectFandADistributionBean(); } /** * @see org.kuali.kra.web.struts.action.KraTransactionalDocumentActionBase#save(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override public ActionForward save(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ActionForward forward = mapping.findForward(Constants.MAPPING_BASIC); TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; TimeAndMoneyDocument timeAndMoneyDocument = timeAndMoneyForm.getTimeAndMoneyDocument(); forward = super.save(mapping, form, request, response); ActivePendingTransactionsService aptService = getActivePendingTransactionsService(); List<AwardAmountInfo> awardAmountInfoObjects = new ArrayList<AwardAmountInfo>(); for(Entry<String, AwardHierarchyNode> awardHierarchyNode : timeAndMoneyDocument.getAwardHierarchyNodes().entrySet()){ Award award = aptService.getActiveAwardVersion(awardHierarchyNode.getValue().getAwardNumber()); AwardAmountInfo aai = aptService.fetchAwardAmountInfoWithHighestTransactionId(award.getAwardAmountInfos()); String reverseAwardNumber = StringUtils.reverse(awardHierarchyNode.getValue().getAwardNumber()); String i= StringUtils.substring(reverseAwardNumber, 0, StringUtils.indexOf(reverseAwardNumber, "0")); int index = Integer.parseInt(StringUtils.reverse(i)); aai.setFinalExpirationDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getFinalExpirationDate()); aai.setCurrentFundEffectiveDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getCurrentFundEffectiveDate()); aai.setObligationExpirationDate(timeAndMoneyForm.getAwardHierarchyNodeItems().get(index).getObligationExpirationDate()); awardAmountInfoObjects.add(aai); } getBusinessObjectService().save(awardAmountInfoObjects); //getBusinessObjectService().save(timeAndMoneyDocument.getAward()); return forward; } public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return super.execute(mapping, form, request, response); } public ActionForward refresh(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return mapping.findForward(Constants.MAPPING_AWARD_BASIC); } @Override public ActionForward docHandler(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; ActionForward forward = handleDocument(mapping, form, request, response, timeAndMoneyForm); timeAndMoneyForm.initializeFormOrDocumentBasedOnCommand(); String rootAwardNumber = timeAndMoneyForm.getTimeAndMoneyDocument().getRootAwardNumber(); List<String> order = new ArrayList<String>(); timeAndMoneyForm.getTimeAndMoneyDocument().setAwardHierarchyItems(getAwardHierarchyService().getAwardHierarchy(rootAwardNumber, order)); timeAndMoneyForm.getTimeAndMoneyDocument().setAwardNumber(rootAwardNumber); timeAndMoneyForm.setOrder(order); setupHierachyNodes(timeAndMoneyForm.getTimeAndMoneyDocument()); populateOtherPanels(timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction(), timeAndMoneyForm, rootAwardNumber); return forward; } private String convertToString(Date date){ if(date!=null){ return date.toString(); }else{ return null; } } public AwardHierarchyService getAwardHierarchyService(){ return (AwardHierarchyService) KraServiceLocator.getService(AwardHierarchyService.class); } protected void setupHierachyNodes(TimeAndMoneyDocument timeAndMoneyDocument){ AwardHierarchyNode awardHierarchyNode; ActivePendingTransactionsService aptService = getActivePendingTransactionsService(); for(Entry<String, AwardHierarchy> awardHierarchy:timeAndMoneyDocument.getAwardHierarchyItems().entrySet()){ awardHierarchyNode = new AwardHierarchyNode(); awardHierarchyNode.setAwardNumber(awardHierarchy.getValue().getAwardNumber()); awardHierarchyNode.setParentAwardNumber(awardHierarchy.getValue().getParentAwardNumber()); awardHierarchyNode.setRootAwardNumber(awardHierarchy.getValue().getRootAwardNumber()); Award award = aptService.getActiveAwardVersion(awardHierarchy.getValue().getAwardNumber()); AwardAmountInfo awardAmountInfo = aptService.fetchAwardAmountInfoWithHighestTransactionId(award.getAwardAmountInfos()); awardHierarchyNode.setFinalExpirationDate(award.getProjectEndDate()); awardHierarchyNode.setLeadUnitName(award.getUnitName()); awardHierarchyNode.setPrincipalInvestigatorName(award.getPrincipalInvestigatorName()); awardHierarchyNode.setObliDistributableAmount(awardAmountInfo.getObliDistributableAmount()); awardHierarchyNode.setAmountObligatedToDate(awardAmountInfo.getAmountObligatedToDate()); awardHierarchyNode.setAnticipatedTotalAmount(awardAmountInfo.getAnticipatedTotalAmount()); awardHierarchyNode.setAntDistributableAmount(awardAmountInfo.getAntDistributableAmount()); awardHierarchyNode.setCurrentFundEffectiveDate(awardAmountInfo.getCurrentFundEffectiveDate()); awardHierarchyNode.setObligationExpirationDate(awardAmountInfo.getObligationExpirationDate()); timeAndMoneyDocument.getAwardHierarchyNodes().put(awardHierarchyNode.getAwardNumber(), awardHierarchyNode); } } /** * @param mapping * @param form * @param request * @param response * @param awardForm * @return * @throws Exception */ public ActionForward handleDocument(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, TimeAndMoneyForm timeAndMoneyForm) throws Exception { String command = timeAndMoneyForm.getCommand(); ActionForward forward; if (KEWConstants.ACTIONLIST_INLINE_COMMAND.equals(command)) { String docIdRequestParameter = request.getParameter(KNSConstants.PARAMETER_DOC_ID); Document retrievedDocument = getDocumentService().getByDocumentHeaderId(docIdRequestParameter); timeAndMoneyForm.setDocument(retrievedDocument); request.setAttribute(KNSConstants.PARAMETER_DOC_ID, docIdRequestParameter); ActionForward baseForward = mapping.findForward(Constants.MAPPING_COPY_PROPOSAL_PAGE); forward = new ActionForward(buildForwardStringForActionListCommand( baseForward.getPath(),docIdRequestParameter)); } else { forward = super.docHandler(mapping, form, request, response); } return forward; } public ActionForward addTransaction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ((TimeAndMoneyForm) form).getTransactionBean().addPendingTransactionItem(); return mapping.findForward("basic"); } public ActionForward deleteTransaction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ((TimeAndMoneyForm) form).getTransactionBean().deletePendingTransactionItem(getLineToDelete(request)); return mapping.findForward("basic"); } public ActionForward approveTransactions(ActionMapping mapping, ActionForm form , HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm)GlobalVariables.getKualiForm(); getActivePendingTransactionsService().approveTransactions(timeAndMoneyForm.getTimeAndMoneyDocument(), timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction()); return mapping.findForward("basic"); } public ActionForward switchAward(ActionMapping mapping, ActionForm form , HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm)form; TimeAndMoneyDocument doc = timeAndMoneyForm.getTimeAndMoneyDocument(); String goToAwardNumber = timeAndMoneyForm.getGoToAwardNumber(); populateOtherPanels(timeAndMoneyForm.getTransactionBean().getNewAwardAmountTransaction(), timeAndMoneyForm, goToAwardNumber); return mapping.findForward("basic"); } /** * This method... * @param timeAndMoneyForm * @param timeAndMoneyForm * @param goToAwardNumber * @throws LookupException * @throws SQLException */ private void populateOtherPanels(AwardAmountTransaction newAwardAmountTransaction, TimeAndMoneyForm timeAndMoneyForm, String goToAwardNumber) throws LookupException, SQLException { Award award = getActiveAwardVersion(goToAwardNumber); TimeAndMoneyDocument timeAndMoneyDocument = timeAndMoneyForm.getTimeAndMoneyDocument(); timeAndMoneyDocument.setAwardNumber(award.getAwardNumber()); timeAndMoneyDocument.setAward(award); if(isNewAward(timeAndMoneyForm) && !(timeAndMoneyDocument.getAward().getBeginDate() == null)){ AwardDirectFandADistributionService awardDirectFandADistributionService = getAwardDirectFandADistributionService(); timeAndMoneyForm.getTimeAndMoneyDocument().getAward().setAwardDirectFandADistributions (awardDirectFandADistributionService. generateDefaultAwardDirectFandADistributionPeriods(timeAndMoneyForm.getTimeAndMoneyDocument().getAward())); } TimeAndMoneyHistoryService tamhs = KraServiceLocator.getService(TimeAndMoneyHistoryService.class); tamhs.getTimeAndMoneyHistory(timeAndMoneyDocument.getAwardNumber(), timeAndMoneyDocument.getTimeAndMoneyHistory(), timeAndMoneyForm.getColumnSpan()); TimeAndMoneyActionSummaryService tamass = KraServiceLocator.getService(TimeAndMoneyActionSummaryService.class); tamass.populateActionSummary(timeAndMoneyDocument.getTimeAndMoneyActionSummaryItems(), timeAndMoneyDocument.getAwardNumber()); timeAndMoneyDocument.setNewAwardAmountTransaction(newAwardAmountTransaction); } /** * This method tests if the award is new by checking the size of AwardDirectFandADistributions on the Award. * @param awardForm * @return */ public boolean isNewAward(TimeAndMoneyForm timeAndMoneyForm) { return timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions().size() == 0; } /** * * This method is a helper method to retrieve AwardSponsorTermService. * @return */ protected AwardDirectFandADistributionService getAwardDirectFandADistributionService() { return KraServiceLocator.getService(AwardDirectFandADistributionService.class); } /** * This method... * @param doc * @param goToAwardNumber */ private Award getActiveAwardVersion(String goToAwardNumber) { VersionHistoryService vhs = KraServiceLocator.getService(VersionHistoryService.class); VersionHistory vh = vhs.findActiveVersion(Award.class, goToAwardNumber); Award award = null; if(vh!=null){ award = (Award) vh.getSequenceOwner(); }else{ BusinessObjectService businessObjectService = KraServiceLocator.getService(BusinessObjectService.class); award = ((List<Award>)businessObjectService.findMatching(Award.class, getHashMap(goToAwardNumber))).get(0); } return award; } private Map<String, String> getHashMap(String goToAwardNumber) { Map<String, String> map = new HashMap<String,String>(); map.put("awardNumber", goToAwardNumber); return map; } protected ActivePendingTransactionsService getActivePendingTransactionsService(){ return (ActivePendingTransactionsService) KraServiceLocator.getService(ActivePendingTransactionsService.class); } /** * * This method builds the string for the ActionForward * @param forwardPath * @param docIdRequestParameter * @return */ public String buildForwardStringForActionListCommand(String forwardPath, String docIdRequestParameter){ StringBuilder sb = new StringBuilder(); sb.append(forwardPath); sb.append("?"); sb.append(KNSConstants.PARAMETER_DOC_ID); sb.append("="); sb.append(docIdRequestParameter); return sb.toString(); } public ActionForward addTransaction(){ return null; } public ActionForward deleteTransaction(){ return null; } public ActionForward submit(){ return null; } /** * * This method adds a new AwardDirectFandADistribution to the list. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward addAwardDirectFandADistribution(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { awardDirectFandADistributionBean.addAwardDirectFandADistribution(((TimeAndMoneyForm) form).getAwardDirectFandADistributionBean()); return mapping.findForward(Constants.MAPPING_BASIC); } /** * * This method removes an AwardDirectFandADistribution from the list. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward deleteAwardDirectFandADistribution(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { TimeAndMoneyForm timeAndMoneyForm = (TimeAndMoneyForm) form; timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions().remove(getLineToDelete(request)); awardDirectFandADistributionBean.updateBudgetPeriodsAfterDelete(timeAndMoneyForm.getTimeAndMoneyDocument().getAward().getAwardDirectFandADistributions()); return mapping.findForward(Constants.MAPPING_BASIC); } /** * This method is used to recalculate the Total amounts in the Direct F and A Distribution panel. * * @param mapping * @param form * @param request * @param response * @return mapping forward * @throws Exception */ public ActionForward recalculateDirectFandADistributionTotals(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return mapping.findForward(Constants.MAPPING_BASIC); } /** * Gets the businessObjectService attribute. * @return Returns the businessObjectService. */ public BusinessObjectService getBusinessObjectService() { businessObjectService = KraServiceLocator.getService(BusinessObjectService.class); return businessObjectService; } }
KCAWD-263 - T&M Award Hierarchy View
src/main/java/org/kuali/kra/timeandmoney/web/struts/action/TimeAndMoneyAction.java
KCAWD-263 - T&M Award Hierarchy View
Java
agpl-3.0
8a8d68a72fc07562cfce84721d2f89c82e1f33e2
0
exomiser/Exomiser,exomiser/Exomiser
/* * The Exomiser - A tool to annotate and prioritize variants * * Copyright (C) 2012 - 2015 Charite Universitätsmedizin Berlin and Genome Research Ltd. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package de.charite.compbio.exomiser.core.analysis; import de.charite.compbio.exomiser.core.analysis.util.*; import de.charite.compbio.exomiser.core.factories.SampleDataFactory; import de.charite.compbio.exomiser.core.factories.VariantDataService; import de.charite.compbio.exomiser.core.factories.VariantFactory; import de.charite.compbio.exomiser.core.filters.*; import de.charite.compbio.exomiser.core.model.Gene; import de.charite.compbio.exomiser.core.model.SampleData; import de.charite.compbio.exomiser.core.model.VariantEvaluation; import de.charite.compbio.exomiser.core.prioritisers.Prioritiser; import de.charite.compbio.exomiser.core.prioritisers.PriorityType; import de.charite.compbio.exomiser.core.prioritisers.ScoringMode; import de.charite.compbio.jannovar.pedigree.ModeOfInheritance; import de.charite.compbio.jannovar.pedigree.Pedigree; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.file.Path; import java.util.*; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import static java.util.stream.Collectors.toConcurrentMap; import static java.util.stream.Collectors.toList; /** * @author Jules Jacobsen <[email protected]> */ public abstract class AbstractAnalysisRunner implements AnalysisRunner { private static final Logger logger = LoggerFactory.getLogger(AbstractAnalysisRunner.class); private final SampleDataFactory sampleDataFactory; private final VariantDataService variantDataService; protected final VariantFilterRunner variantFilterRunner; private final GeneFilterRunner geneFilterRunner; public AbstractAnalysisRunner(SampleDataFactory sampleDataFactory, VariantDataService variantDataService, VariantFilterRunner variantFilterRunner, GeneFilterRunner geneFilterRunner) { this.sampleDataFactory = sampleDataFactory; this.variantDataService = variantDataService; this.variantFilterRunner = variantFilterRunner; this.geneFilterRunner = geneFilterRunner; } @Override public void runAnalysis(Analysis analysis) { final SampleData sampleData = makeSampleDataWithoutGenesOrVariants(analysis); logger.info("Running analysis on sample: {}", sampleData.getSampleNames()); long startAnalysisTimeMillis = System.currentTimeMillis(); final Pedigree pedigree = sampleData.getPedigree(); final Path vcfPath = analysis.getVcfPath(); final List<AnalysisStep> analysisSteps = analysis.getAnalysisSteps(); //should this be optional for people really wanting to screw about with the steps at the risk of catastrophic failure? //it's really an optimiser step of a compiler, so perhaps it should be in the AnalysisParser? new AnalysisStepChecker().check(analysisSteps); //soo many comments - this is a bad sign that this is too complicated. Map<String, Gene> allGenes = makeKnownGenes(); List<VariantEvaluation> variantEvaluations = new ArrayList<>(); // some kind of multi-map with ordered duplicate keys would allow for easy grouping of steps for running the groups together. List<List<AnalysisStep>> analysisStepGroups = analysis.getAnalysisStepsGroupedByFunction(); boolean variantsLoaded = false; for (List<AnalysisStep> analysisGroup : analysisStepGroups) { //this is admittedly pretty confusing code and I'm sorry. It's easiest to follow if you turn on debugging. //The analysis steps are run in groups of VARIANT_FILTER, GENE_ONLY_DEPENDENT or INHERITANCE_MODE_DEPENDENT AnalysisStep firstStep = analysisGroup.get(0); logger.debug("Running {} group: {}", firstStep.getType(), analysisGroup); if (firstStep.isVariantFilter() & !variantsLoaded) { //variants take up 99% of all the memory in an analysis - this scales approximately linearly with the sample size //so for whole genomes this is best run as a stream to filter out the unwanted variants with as many filters as possible in one go variantEvaluations = loadAndFilterVariants(vcfPath, allGenes, analysisGroup, analysis); //this is done here as there are GeneFilter steps which may require Variants in the genes, or the InheritanceModeDependent steps which definitely need them... assignVariantsToGenes(variantEvaluations, allGenes); variantsLoaded = true; } else { runSteps(analysisGroup, new ArrayList<>(allGenes.values()), pedigree, analysis.getModeOfInheritance()); } } //maybe only the non-variant dependent steps have been run in which case we need to load the variants although //the results might be a bit meaningless. if (!variantsLoaded) { try(Stream<VariantEvaluation> variantStream = loadVariants(vcfPath)) { variantEvaluations = variantStream.collect(toList()); } assignVariantsToGenes(variantEvaluations, allGenes); } final List<Gene> genes = getFinalGeneList(allGenes); sampleData.setGenes(genes); final List<VariantEvaluation> variants = getFinalVariantList(variantEvaluations); sampleData.setVariantEvaluations(variants); scoreGenes(genes, analysis.getScoringMode(), analysis.getModeOfInheritance()); logger.info("Analysed {} genes containing {} filtered variants", genes.size(), variants.size()); logTopNumScoringGenes(5, genes, analysis); long endAnalysisTimeMillis = System.currentTimeMillis(); double analysisTimeSecs = (double) (endAnalysisTimeMillis - startAnalysisTimeMillis) / 1000; logger.info("Finished analysis in {} secs", analysisTimeSecs); } private List<VariantEvaluation> loadAndFilterVariants(Path vcfPath, Map<String, Gene> allGenes, List<AnalysisStep> analysisGroup, Analysis analysis) { // GeneReassigner geneReassigner = createNonCodingVariantGeneReassigner(analysis); List<VariantFilter> variantFilters = getVariantFilterSteps(analysisGroup); List<VariantEvaluation> filteredVariants; final int[] streamed = {0}; final int[] passed = {0}; try (Stream<VariantEvaluation> variantStream = loadVariants(vcfPath)) { filteredVariants = variantStream .map(logLoadedAndPassedVariants(streamed, passed)) //TODO: put back in once this is sorted properly // .map(reassignNonCodingVariantToBestGene(allGenes, geneReassigner)) .filter(isInKnownGene(allGenes)) .filter(runVariantFilters(variantFilters)) .map(logPassedVariants(passed)) .collect(toList()); } logger.info("Loaded {} variants - {} passed variant filters", streamed[0], passed[0]); return filteredVariants; } private GeneReassigner createNonCodingVariantGeneReassigner(Analysis analysis) { TadIndex tadIndex = new TadIndex(variantDataService.getTopologicallyAssociatedDomains()); PriorityType mainPriorityType = analysis.getMainPrioritiserType(); return new GeneReassigner(tadIndex, mainPriorityType); } private List<VariantFilter> getVariantFilterSteps(List<AnalysisStep> analysisSteps) { logger.info("Filtering variants with:"); return analysisSteps.stream() .filter(AnalysisStep::isVariantFilter) .map(analysisStep -> { logger.info("{}", analysisStep); return (VariantFilter) analysisStep; }) .collect(toList()); } //yep, logging logic private Function<VariantEvaluation, VariantEvaluation> logLoadedAndPassedVariants(int[] streamed, int[] passed) { return variantEvaluation -> { streamed[0]++; if (streamed[0] % 100000 == 0) { logger.info("Loaded {} variants - {} passed variant filters", streamed[0], passed[0]); } return variantEvaluation; }; } private Function<VariantEvaluation, VariantEvaluation> reassignNonCodingVariantToBestGene(Map<String, Gene> genes, GeneReassigner geneReassigner) { return variantEvaluation -> { geneReassigner.reassignVariantToMostPhenotypicallySimilarGeneInTad(variantEvaluation, genes); return variantEvaluation; }; } /** * Defines the filtering behaviour of the runner when performing the initial load and filter of variants. Allows the * concrete runner to define whether a variant should pass or fail depending on the gene or status of the gene it is * assigned to. * * @param genes * @return */ abstract Predicate<VariantEvaluation> isInKnownGene(Map<String, Gene> genes); /** * Defines the filtering behaviour of the runner when performing the initial load and filter of variants. Allows the * concrete runner to define whether a variant should pass or fail when running the variant through the variant * filters defined in the variant filter group, or the initial group if there are more than one. * * @param variantFilters * @return */ abstract Predicate<VariantEvaluation> runVariantFilters(List<VariantFilter> variantFilters); //more logging logic private Function<VariantEvaluation, VariantEvaluation> logPassedVariants(int[] passed) { return variantEvaluation -> { if (variantEvaluation.passedFilters()) { passed[0]++; } return variantEvaluation; }; } private Stream<VariantEvaluation> loadVariants(Path vcfPath) { VariantFactory variantFactory = sampleDataFactory.getVariantFactory(); //WARNING!!! THIS IS NOT THREADSAFE DO NOT USE PARALLEL STREAMS return variantFactory.streamVariantEvaluations(vcfPath); } private SampleData makeSampleDataWithoutGenesOrVariants(Analysis analysis) { final SampleData sampleData = sampleDataFactory.createSampleDataWithoutVariantsOrGenes(analysis.getVcfPath(), analysis.getPedPath()); analysis.setSampleData(sampleData); return sampleData; } private void assignVariantsToGenes(List<VariantEvaluation> variantEvaluations, Map<String, Gene> allGenes) { for (VariantEvaluation variantEvaluation : variantEvaluations) { Gene gene = allGenes.get(variantEvaluation.getGeneSymbol()); gene.addVariant(variantEvaluation); } } /** * @param allGenes * @return */ protected List<Gene> getFinalGeneList(Map<String, Gene> allGenes) { return allGenes.values() .stream() .filter(gene -> !gene.getVariantEvaluations().isEmpty()) .collect(toList()); } //TODO: make this abstract? we need the individual runners to define the behaviour - also check other protected methods. protected List<VariantEvaluation> getFinalVariantList(List<VariantEvaluation> variants) { return variants; } /** * @return a map of genes indexed by gene symbol. */ private Map<String, Gene> makeKnownGenes() { return sampleDataFactory.createKnownGenes() .parallelStream() .collect(toConcurrentMap(Gene::getGeneSymbol, gene -> gene)); } //might this be a nascent class waiting to get out here? private void runSteps(List<AnalysisStep> analysisSteps, List<Gene> genes, Pedigree pedigree, ModeOfInheritance modeOfInheritance) { boolean inheritanceModesCalculated = false; for (AnalysisStep analysisStep : analysisSteps) { if (!inheritanceModesCalculated && analysisStep.isInheritanceModeDependent()) { analyseGeneCompatibilityWithInheritanceMode(genes, pedigree, modeOfInheritance); inheritanceModesCalculated = true; } runStep(analysisStep, genes); } } private void runStep(AnalysisStep analysisStep, List<Gene> genes) { if (analysisStep.isVariantFilter()) { VariantFilter filter = (VariantFilter) analysisStep; logger.info("Running VariantFilter: {}", filter); for (Gene gene : genes) { variantFilterRunner.run(filter, gene.getVariantEvaluations()); } return; } if (GeneFilter.class.isInstance(analysisStep)) { GeneFilter filter = (GeneFilter) analysisStep; logger.info("Running GeneFilter: {}", filter); geneFilterRunner.run(filter, genes); return; } if (Prioritiser.class.isInstance(analysisStep)) { Prioritiser prioritiser = (Prioritiser) analysisStep; logger.info("Running Prioritiser: {}", prioritiser); prioritiser.prioritizeGenes(genes); } } private void analyseGeneCompatibilityWithInheritanceMode(List<Gene> genes, Pedigree pedigree, ModeOfInheritance modeOfInheritance) { InheritanceModeAnalyser inheritanceModeAnalyser = new InheritanceModeAnalyser(pedigree, modeOfInheritance); logger.info("Checking compatibility with {} inheritance mode for genes which passed filters", modeOfInheritance); inheritanceModeAnalyser.analyseInheritanceModes(genes); } private void scoreGenes(List<Gene> genes, ScoringMode scoreMode, ModeOfInheritance modeOfInheritance) { logger.info("Scoring genes"); GeneScorer geneScorer = getGeneScorer(scoreMode); geneScorer.scoreGenes(genes, modeOfInheritance); } private GeneScorer getGeneScorer(ScoringMode scoreMode) { if (scoreMode == ScoringMode.RANK_BASED) { return new RankBasedGeneScorer(); } return new RawScoreGeneScorer(); } private void logTopNumScoringGenes(int numToLog, List<Gene> genes, Analysis analysis) { if (!genes.isEmpty()) { List<Gene> topScoringGenes = genes.stream().filter(Gene::passedFilters).limit(numToLog).collect(toList()); if (topScoringGenes.isEmpty()) { logger.info("No genes passed analysis :("); return; } logger.info("Top {} scoring genes compatible with phenotypes {} were:", numToLog, analysis.getHpoIds()); topScoringGenes.forEach(topScoringGene -> { logger.info("{}", topScoringGene); topScoringGene.getPassedVariantEvaluations().forEach(variant -> logger.info("{} {}", variant.getGeneSymbol(), variant) ); }); } } }
exomiser-core/src/main/java/de/charite/compbio/exomiser/core/analysis/AbstractAnalysisRunner.java
/* * The Exomiser - A tool to annotate and prioritize variants * * Copyright (C) 2012 - 2015 Charite Universitätsmedizin Berlin and Genome Research Ltd. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package de.charite.compbio.exomiser.core.analysis; import de.charite.compbio.exomiser.core.analysis.util.*; import de.charite.compbio.exomiser.core.factories.SampleDataFactory; import de.charite.compbio.exomiser.core.factories.VariantDataService; import de.charite.compbio.exomiser.core.factories.VariantFactory; import de.charite.compbio.exomiser.core.filters.*; import de.charite.compbio.exomiser.core.model.Gene; import de.charite.compbio.exomiser.core.model.SampleData; import de.charite.compbio.exomiser.core.model.VariantEvaluation; import de.charite.compbio.exomiser.core.prioritisers.Prioritiser; import de.charite.compbio.exomiser.core.prioritisers.PriorityType; import de.charite.compbio.exomiser.core.prioritisers.ScoringMode; import de.charite.compbio.jannovar.pedigree.ModeOfInheritance; import de.charite.compbio.jannovar.pedigree.Pedigree; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.file.Path; import java.util.*; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import static java.util.stream.Collectors.toConcurrentMap; import static java.util.stream.Collectors.toList; /** * @author Jules Jacobsen <[email protected]> */ public abstract class AbstractAnalysisRunner implements AnalysisRunner { private static final Logger logger = LoggerFactory.getLogger(AbstractAnalysisRunner.class); private final SampleDataFactory sampleDataFactory; private final VariantDataService variantDataService; protected final VariantFilterRunner variantFilterRunner; private final GeneFilterRunner geneFilterRunner; public AbstractAnalysisRunner(SampleDataFactory sampleDataFactory, VariantDataService variantDataService, VariantFilterRunner variantFilterRunner, GeneFilterRunner geneFilterRunner) { this.sampleDataFactory = sampleDataFactory; this.variantDataService = variantDataService; this.variantFilterRunner = variantFilterRunner; this.geneFilterRunner = geneFilterRunner; } @Override public void runAnalysis(Analysis analysis) { final SampleData sampleData = makeSampleDataWithoutGenesOrVariants(analysis); logger.info("Running analysis on sample: {}", sampleData.getSampleNames()); long startAnalysisTimeMillis = System.currentTimeMillis(); final Pedigree pedigree = sampleData.getPedigree(); final Path vcfPath = analysis.getVcfPath(); final List<AnalysisStep> analysisSteps = analysis.getAnalysisSteps(); //should this be optional for people really wanting to screw about with the steps at the risk of catastrophic failure? //it's really an optimiser step of a compiler, so perhaps it should be in the AnalysisParser? new AnalysisStepChecker().check(analysisSteps); //soo many comments - this is a bad sign that this is too complicated. Map<String, Gene> allGenes = makeKnownGenes(); List<VariantEvaluation> variantEvaluations = new ArrayList<>(); // some kind of multi-map with ordered duplicate keys would allow for easy grouping of steps for running the groups together. List<List<AnalysisStep>> analysisStepGroups = analysis.getAnalysisStepsGroupedByFunction(); boolean variantsLoaded = false; for (List<AnalysisStep> analysisGroup : analysisStepGroups) { //this is admittedly pretty confusing code and I'm sorry. It's easiest to follow if you turn on debugging. //The analysis steps are run in groups of VARIANT_FILTER, GENE_ONLY_DEPENDENT or INHERITANCE_MODE_DEPENDENT AnalysisStep firstStep = analysisGroup.get(0); logger.debug("Running {} group: {}", firstStep.getType(), analysisGroup); if (firstStep.isVariantFilter() & !variantsLoaded) { //variants take up 99% of all the memory in an analysis - this scales approximately linearly with the sample size //so for whole genomes this is best run as a stream to filter out the unwanted variants with as many filters as possible in one go variantEvaluations = loadAndFilterVariants(vcfPath, allGenes, analysisGroup, analysis); //this is done here as there are GeneFilter steps which may require Variants in the genes, or the InheritanceModeDependent steps which definitely need them... assignVariantsToGenes(variantEvaluations, allGenes); variantsLoaded = true; } else { runSteps(analysisGroup, new ArrayList<>(allGenes.values()), pedigree, analysis.getModeOfInheritance()); } } //maybe only the non-variant dependent steps have been run in which case we need to load the variants although //the results might be a bit meaningless. if (!variantsLoaded) { try(Stream<VariantEvaluation> variantStream = loadVariants(vcfPath)) { variantEvaluations = variantStream.collect(toList()); } assignVariantsToGenes(variantEvaluations, allGenes); } final List<Gene> genes = getFinalGeneList(allGenes); sampleData.setGenes(genes); final List<VariantEvaluation> variants = getFinalVariantList(variantEvaluations); sampleData.setVariantEvaluations(variants); scoreGenes(genes, analysis.getScoringMode(), analysis.getModeOfInheritance()); logger.info("Analysed {} genes containing {} filtered variants", genes.size(), variants.size()); logTopNumScoringGenes(5, genes, analysis); long endAnalysisTimeMillis = System.currentTimeMillis(); double analysisTimeSecs = (double) (endAnalysisTimeMillis - startAnalysisTimeMillis) / 1000; logger.info("Finished analysis in {} secs", analysisTimeSecs); } private List<VariantEvaluation> loadAndFilterVariants(Path vcfPath, Map<String, Gene> allGenes, List<AnalysisStep> analysisGroup, Analysis analysis) { GeneReassigner geneReassigner = createNonCodingVariantGeneReassigner(analysis); List<VariantFilter> variantFilters = getVariantFilterSteps(analysisGroup); List<VariantEvaluation> filteredVariants; final int[] streamed = {0}; final int[] passed = {0}; try (Stream<VariantEvaluation> variantStream = loadVariants(vcfPath)) { filteredVariants = variantStream .map(logLoadedAndPassedVariants(streamed, passed)) .map(reassignNonCodingVariantToBestGene(allGenes, geneReassigner)) .filter(isInKnownGene(allGenes)) .filter(runVariantFilters(variantFilters)) .map(logPassedVariants(passed)) .collect(toList()); } logger.info("Loaded {} variants - {} passed variant filters", streamed[0], passed[0]); return filteredVariants; } private GeneReassigner createNonCodingVariantGeneReassigner(Analysis analysis) { TadIndex tadIndex = new TadIndex(variantDataService.getTopologicallyAssociatedDomains()); PriorityType mainPriorityType = analysis.getMainPrioritiserType(); return new GeneReassigner(tadIndex, mainPriorityType); } private List<VariantFilter> getVariantFilterSteps(List<AnalysisStep> analysisSteps) { logger.info("Filtering variants with:"); return analysisSteps.stream() .filter(AnalysisStep::isVariantFilter) .map(analysisStep -> { logger.info("{}", analysisStep); return (VariantFilter) analysisStep; }) .collect(toList()); } //yep, logging logic private Function<VariantEvaluation, VariantEvaluation> logLoadedAndPassedVariants(int[] streamed, int[] passed) { return variantEvaluation -> { streamed[0]++; if (streamed[0] % 100000 == 0) { logger.info("Loaded {} variants - {} passed variant filters", streamed[0], passed[0]); } return variantEvaluation; }; } private Function<VariantEvaluation, VariantEvaluation> reassignNonCodingVariantToBestGene(Map<String, Gene> genes, GeneReassigner geneReassigner) { return variantEvaluation -> { geneReassigner.reassignVariantToMostPhenotypicallySimilarGeneInTad(variantEvaluation, genes); return variantEvaluation; }; } /** * Defines the filtering behaviour of the runner when performing the initial load and filter of variants. Allows the * concrete runner to define whether a variant should pass or fail depending on the gene or status of the gene it is * assigned to. * * @param genes * @return */ abstract Predicate<VariantEvaluation> isInKnownGene(Map<String, Gene> genes); /** * Defines the filtering behaviour of the runner when performing the initial load and filter of variants. Allows the * concrete runner to define whether a variant should pass or fail when running the variant through the variant * filters defined in the variant filter group, or the initial group if there are more than one. * * @param variantFilters * @return */ abstract Predicate<VariantEvaluation> runVariantFilters(List<VariantFilter> variantFilters); //more logging logic private Function<VariantEvaluation, VariantEvaluation> logPassedVariants(int[] passed) { return variantEvaluation -> { if (variantEvaluation.passedFilters()) { passed[0]++; } return variantEvaluation; }; } private Stream<VariantEvaluation> loadVariants(Path vcfPath) { VariantFactory variantFactory = sampleDataFactory.getVariantFactory(); //WARNING!!! THIS IS NOT THREADSAFE DO NOT USE PARALLEL STREAMS return variantFactory.streamVariantEvaluations(vcfPath); } private SampleData makeSampleDataWithoutGenesOrVariants(Analysis analysis) { final SampleData sampleData = sampleDataFactory.createSampleDataWithoutVariantsOrGenes(analysis.getVcfPath(), analysis.getPedPath()); analysis.setSampleData(sampleData); return sampleData; } private void assignVariantsToGenes(List<VariantEvaluation> variantEvaluations, Map<String, Gene> allGenes) { for (VariantEvaluation variantEvaluation : variantEvaluations) { Gene gene = allGenes.get(variantEvaluation.getGeneSymbol()); gene.addVariant(variantEvaluation); } } /** * @param allGenes * @return */ protected List<Gene> getFinalGeneList(Map<String, Gene> allGenes) { return allGenes.values() .stream() .filter(gene -> !gene.getVariantEvaluations().isEmpty()) .collect(toList()); } //TODO: make this abstract? we need the individual runners to define the behaviour - also check other protected methods. protected List<VariantEvaluation> getFinalVariantList(List<VariantEvaluation> variants) { return variants; } /** * @return a map of genes indexed by gene symbol. */ private Map<String, Gene> makeKnownGenes() { return sampleDataFactory.createKnownGenes() .parallelStream() .collect(toConcurrentMap(Gene::getGeneSymbol, gene -> gene)); } //might this be a nascent class waiting to get out here? private void runSteps(List<AnalysisStep> analysisSteps, List<Gene> genes, Pedigree pedigree, ModeOfInheritance modeOfInheritance) { boolean inheritanceModesCalculated = false; for (AnalysisStep analysisStep : analysisSteps) { if (!inheritanceModesCalculated && analysisStep.isInheritanceModeDependent()) { analyseGeneCompatibilityWithInheritanceMode(genes, pedigree, modeOfInheritance); inheritanceModesCalculated = true; } runStep(analysisStep, genes); } } private void runStep(AnalysisStep analysisStep, List<Gene> genes) { if (analysisStep.isVariantFilter()) { VariantFilter filter = (VariantFilter) analysisStep; logger.info("Running VariantFilter: {}", filter); for (Gene gene : genes) { variantFilterRunner.run(filter, gene.getVariantEvaluations()); } return; } if (GeneFilter.class.isInstance(analysisStep)) { GeneFilter filter = (GeneFilter) analysisStep; logger.info("Running GeneFilter: {}", filter); geneFilterRunner.run(filter, genes); return; } if (Prioritiser.class.isInstance(analysisStep)) { Prioritiser prioritiser = (Prioritiser) analysisStep; logger.info("Running Prioritiser: {}", prioritiser); prioritiser.prioritizeGenes(genes); } } private void analyseGeneCompatibilityWithInheritanceMode(List<Gene> genes, Pedigree pedigree, ModeOfInheritance modeOfInheritance) { InheritanceModeAnalyser inheritanceModeAnalyser = new InheritanceModeAnalyser(pedigree, modeOfInheritance); logger.info("Checking compatibility with {} inheritance mode for genes which passed filters", modeOfInheritance); inheritanceModeAnalyser.analyseInheritanceModes(genes); } private void scoreGenes(List<Gene> genes, ScoringMode scoreMode, ModeOfInheritance modeOfInheritance) { logger.info("Scoring genes"); GeneScorer geneScorer = getGeneScorer(scoreMode); geneScorer.scoreGenes(genes, modeOfInheritance); } private GeneScorer getGeneScorer(ScoringMode scoreMode) { if (scoreMode == ScoringMode.RANK_BASED) { return new RankBasedGeneScorer(); } return new RawScoreGeneScorer(); } private void logTopNumScoringGenes(int numToLog, List<Gene> genes, Analysis analysis) { if (!genes.isEmpty()) { List<Gene> topScoringGenes = genes.stream().filter(Gene::passedFilters).limit(numToLog).collect(toList()); if (topScoringGenes.isEmpty()) { logger.info("No genes passed analysis :("); return; } logger.info("Top {} scoring genes compatible with phenotypes {} were:", numToLog, analysis.getHpoIds()); topScoringGenes.forEach(topScoringGene -> { logger.info("{}", topScoringGene); topScoringGene.getPassedVariantEvaluations().forEach(variant -> logger.info("{} {}", variant.getGeneSymbol(), variant) ); }); } } }
Removed TAD non-coding variant gene reassignment from AbstractAnalysisRunner until this is working properly.
exomiser-core/src/main/java/de/charite/compbio/exomiser/core/analysis/AbstractAnalysisRunner.java
Removed TAD non-coding variant gene reassignment from AbstractAnalysisRunner until this is working properly.
Java
agpl-3.0
b560e495fa2d72a0894b1c5b928f0e6b9257f080
0
dgray16/libreplan,poum/libreplan,PaulLuchyn/libreplan,LibrePlan/libreplan,Marine-22/libre,Marine-22/libre,skylow95/libreplan,PaulLuchyn/libreplan,skylow95/libreplan,skylow95/libreplan,LibrePlan/libreplan,poum/libreplan,poum/libreplan,LibrePlan/libreplan,dgray16/libreplan,LibrePlan/libreplan,dgray16/libreplan,LibrePlan/libreplan,PaulLuchyn/libreplan,Marine-22/libre,poum/libreplan,PaulLuchyn/libreplan,Marine-22/libre,Marine-22/libre,PaulLuchyn/libreplan,PaulLuchyn/libreplan,LibrePlan/libreplan,dgray16/libreplan,dgray16/libreplan,poum/libreplan,Marine-22/libre,LibrePlan/libreplan,dgray16/libreplan,skylow95/libreplan,skylow95/libreplan,PaulLuchyn/libreplan,skylow95/libreplan,poum/libreplan,dgray16/libreplan
/* * This file is part of LibrePlan * * Copyright (C) 2010-2011 Wireless Galicia, S.L. * Copyright (C) 2011-2012 Igalia, S.L. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.libreplan.web.orders; import static org.libreplan.web.I18nHelper._; import java.util.Date; import java.util.HashMap; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.LocalDate; import org.libreplan.business.calendars.entities.BaseCalendar; import org.libreplan.business.externalcompanies.entities.ExternalCompany; import org.libreplan.business.orders.daos.IOrderDAO; import org.libreplan.business.orders.entities.Order; import org.libreplan.business.templates.entities.OrderTemplate; import org.libreplan.web.common.ConstraintChecker; import org.libreplan.web.common.Util; import org.libreplan.web.common.components.bandboxsearch.BandboxSearch; import org.libreplan.web.planner.consolidations.AdvanceConsolidationController; import org.libreplan.web.planner.tabs.MultipleTabsPlannerController; import org.springframework.beans.factory.annotation.Autowired; import org.zkoss.zk.ui.Component; import org.zkoss.zk.ui.Executions; import org.zkoss.zk.ui.SuspendNotAllowedException; import org.zkoss.zk.ui.WrongValueException; import org.zkoss.zk.ui.util.GenericForwardComposer; import org.zkoss.zul.Checkbox; import org.zkoss.zul.ComboitemRenderer; import org.zkoss.zul.Constraint; import org.zkoss.zul.Datebox; import org.zkoss.zul.Grid; import org.zkoss.zul.Textbox; import org.zkoss.zul.Window; /** * Controller for the creation of an {@link order} with its principal * properties. * * @author Susana Montes Pedreira <[email protected]> * @author Lorenzo Tilve Álvaro <[email protected]> */ public class ProjectDetailsController extends GenericForwardComposer { private static final Log LOG = LogFactory .getLog(AdvanceConsolidationController.class); private OrderCRUDController orderController; private Grid gridProjectDetails; private BaseCalendar defaultCalendar; private boolean isCodeAutogeneratedInit; private MultipleTabsPlannerController tabs; private Window window; private Datebox initDate; private BandboxSearch bdProjectTemplate; private Textbox txtName; private Datebox deadline; private Checkbox generateCode; @Autowired private IOrderDAO orderDAO; private OrderTemplate template; public ProjectDetailsController() { Window window = (Window) Executions.createComponents( "/orders/_projectDetails.zul", null, new HashMap<String, String>()); try { doAfterCompose(window); } catch (Exception e) { throw new RuntimeException(e); } } @Override public void doAfterCompose(Component comp) throws Exception { super.doAfterCompose(comp); window = (Window) comp; window.setVariable("projectController", this, true); } public void showWindow(OrderCRUDController orderController, MultipleTabsPlannerController tabs) { this.tabs = tabs; this.orderController = orderController; this.defaultCalendar = orderController.getOrder().getCalendar(); this.isCodeAutogeneratedInit = orderController.getOrder() .isCodeAutogenerated(); try { Util.reloadBindings(window); Util.createBindingsFor(gridProjectDetails); Util.reloadBindings(gridProjectDetails); window.doModal(); } catch (SuspendNotAllowedException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public void cancel() { clearProperties(); close(); } public void accept() { if (validate()) { if (tabs != null) { tabs.goToOrderDetails(orderController.getOrder()); } if (bdProjectTemplate.getSelectedElement() != null) { OrderTemplate template = (OrderTemplate) bdProjectTemplate .getSelectedElement(); orderController.createFromTemplate(template); } orderController.editNewCreatedOrder(window); } } private boolean validate() { ConstraintChecker.isValid(window); if (initDate.getValue() == null) { showWrongValue(); return false; } if (orderDAO.existsByNameAnotherTransaction(txtName.getValue())) { showWrongName(); return false; } return true; } private void showWrongValue() { throw new WrongValueException(initDate, _("cannot be empty")); } private void showWrongName() { throw new WrongValueException(txtName, _("project name already being used")); } private void close() { window.setVisible(false); } public Order getOrder() { return orderController.getOrder(); } public boolean isCodeAutogenerated() { return orderController.isCodeAutogenerated(); } public void setCodeAutogenerated(boolean codeAutogenerated) { orderController.setCodeAutogeneratedInModel(codeAutogenerated); Util.reloadBindings(gridProjectDetails); } public List<ExternalCompany> getExternalCompaniesAreClient() { return orderController.getExternalCompaniesAreClient(); } public List<BaseCalendar> getBaseCalendars() { return orderController.getBaseCalendars(); } public ComboitemRenderer getBaseCalendarsComboitemRenderer() { return orderController.getBaseCalendarsComboitemRenderer(); } public void setBaseCalendar(BaseCalendar calendar) { orderController.setBaseCalendar(calendar); } private void clearProperties() { Order order = orderController.getOrder(); order.setName(null); // reset the code autogenerated property if (isCodeAutogeneratedInit) { order.setCodeAutogenerated(true); } else { order.setCodeAutogenerated(false); order.setCode(""); } order.setCustomer(null); order.setDeadline(null); order.setInitDate(new Date()); order.setCalendar(defaultCalendar); } public Constraint checkConstraintFinishDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date finishDate = (Date) value; if ((finishDate != null) && (initDate.getValue() != null) && (finishDate.compareTo(initDate.getValue()) < 0)) { deadline.setValue(null); getOrder().setDeadline(null); throw new WrongValueException(comp, _("must be after start date")); } } }; } public Constraint checkConstraintStartDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date startDate = (Date) value; if ((startDate != null) && (deadline.getValue() != null) && (startDate.compareTo(deadline.getValue()) > 0)) { initDate.setValue(null); getOrder().setInitDate(null); throw new WrongValueException(comp, _("must be lower than end date")); } } }; } public void calculateProjectDates(OrderTemplate template) { LocalDate initLocalDate = new LocalDate() .plusDays(template.getStartAsDaysFromBeginning()); Date initDate = initLocalDate.toDateTimeAtStartOfDay().toDate(); getOrder().setInitDate(initDate); this.initDate.setValue(initDate); if (template.getDeadlineAsDaysFromBeginning() != null ) { LocalDate deadlineLocalDate = initLocalDate.plusDays(template .getDeadlineAsDaysFromBeginning()); Date deadline = deadlineLocalDate.toDateTimeAtStartOfDay().toDate(); getOrder().setDeadline(deadline); this.deadline.setValue(deadline); } else { getOrder().setDeadline(null); this.deadline.setValue(null); } } public OrderTemplate getTemplate() { return template; } public void setTemplate(OrderTemplate template) { this.template = template; if (template == null) { generateCode.setDisabled(false); generateCode.setTooltiptext(""); } else { if (!isCodeAutogenerated()) { setCodeAutogenerated(true); } generateCode.setDisabled(true); generateCode .setTooltiptext(_("Set Code as autogenerated to create a new project from templates")); generateCode.setChecked(true); calculateProjectDates(template); setCalendarFromTemplate(template); } } private void setCalendarFromTemplate(OrderTemplate template) { BaseCalendar calendar = template.getCalendar(); for (BaseCalendar each : getBaseCalendars()) { if (calendar.getId().equals(each.getId())) { setBaseCalendar(each); return; } } } }
libreplan-webapp/src/main/java/org/libreplan/web/orders/ProjectDetailsController.java
/* * This file is part of LibrePlan * * Copyright (C) 2010-2011 Wireless Galicia, S.L. * Copyright (C) 2011-2012 Igalia, S.L. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.libreplan.web.orders; import static org.libreplan.web.I18nHelper._; import java.util.Date; import java.util.HashMap; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.LocalDate; import org.libreplan.business.calendars.entities.BaseCalendar; import org.libreplan.business.externalcompanies.entities.ExternalCompany; import org.libreplan.business.orders.daos.IOrderDAO; import org.libreplan.business.orders.entities.Order; import org.libreplan.business.templates.entities.OrderTemplate; import org.libreplan.web.common.ConstraintChecker; import org.libreplan.web.common.Util; import org.libreplan.web.common.components.bandboxsearch.BandboxSearch; import org.libreplan.web.planner.consolidations.AdvanceConsolidationController; import org.libreplan.web.planner.tabs.MultipleTabsPlannerController; import org.springframework.beans.factory.annotation.Autowired; import org.zkoss.zk.ui.Component; import org.zkoss.zk.ui.Executions; import org.zkoss.zk.ui.SuspendNotAllowedException; import org.zkoss.zk.ui.WrongValueException; import org.zkoss.zk.ui.util.GenericForwardComposer; import org.zkoss.zul.Checkbox; import org.zkoss.zul.ComboitemRenderer; import org.zkoss.zul.Constraint; import org.zkoss.zul.Datebox; import org.zkoss.zul.Grid; import org.zkoss.zul.Textbox; import org.zkoss.zul.Window; /** * Controller for the creation of an {@link order} with its principal * properties. * * @author Susana Montes Pedreira <[email protected]> * @author Lorenzo Tilve Álvaro <[email protected]> */ public class ProjectDetailsController extends GenericForwardComposer { private static final Log LOG = LogFactory .getLog(AdvanceConsolidationController.class); private OrderCRUDController orderController; private Grid gridProjectDetails; private BaseCalendar defaultCalendar; private boolean isCodeAutogeneratedInit; private MultipleTabsPlannerController tabs; private Window window; private Datebox initDate; private BandboxSearch bdProjectTemplate; private Textbox txtName; private Datebox deadline; private Checkbox generateCode; @Autowired private IOrderDAO orderDAO; private OrderTemplate template; public ProjectDetailsController() { Window window = (Window) Executions.createComponents( "/orders/_projectDetails.zul", null, new HashMap<String, String>()); try { doAfterCompose(window); } catch (Exception e) { throw new RuntimeException(e); } } @Override public void doAfterCompose(Component comp) throws Exception { super.doAfterCompose(comp); window = (Window) comp; window.setVariable("projectController", this, true); } public void showWindow(OrderCRUDController orderController, MultipleTabsPlannerController tabs) { this.tabs = tabs; this.orderController = orderController; this.defaultCalendar = orderController.getOrder().getCalendar(); this.isCodeAutogeneratedInit = orderController.getOrder() .isCodeAutogenerated(); try { Util.reloadBindings(window); Util.createBindingsFor(gridProjectDetails); Util.reloadBindings(gridProjectDetails); window.doModal(); } catch (SuspendNotAllowedException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public void cancel() { clearProperties(); close(); } public void accept() { if (validate()) { if (tabs != null) { tabs.goToOrdersList(); } if (bdProjectTemplate.getSelectedElement() != null) { OrderTemplate template = (OrderTemplate) bdProjectTemplate .getSelectedElement(); orderController.createFromTemplate(template); } orderController.editNewCreatedOrder(window); } } private boolean validate() { ConstraintChecker.isValid(window); if (initDate.getValue() == null) { showWrongValue(); return false; } if (orderDAO.existsByNameAnotherTransaction(txtName.getValue())) { showWrongName(); return false; } return true; } private void showWrongValue() { throw new WrongValueException(initDate, _("cannot be empty")); } private void showWrongName() { throw new WrongValueException(txtName, _("project name already being used")); } private void close() { window.setVisible(false); } public Order getOrder() { return orderController.getOrder(); } public boolean isCodeAutogenerated() { return orderController.isCodeAutogenerated(); } public void setCodeAutogenerated(boolean codeAutogenerated) { orderController.setCodeAutogeneratedInModel(codeAutogenerated); Util.reloadBindings(gridProjectDetails); } public List<ExternalCompany> getExternalCompaniesAreClient() { return orderController.getExternalCompaniesAreClient(); } public List<BaseCalendar> getBaseCalendars() { return orderController.getBaseCalendars(); } public ComboitemRenderer getBaseCalendarsComboitemRenderer() { return orderController.getBaseCalendarsComboitemRenderer(); } public void setBaseCalendar(BaseCalendar calendar) { orderController.setBaseCalendar(calendar); } private void clearProperties() { Order order = orderController.getOrder(); order.setName(null); // reset the code autogenerated property if (isCodeAutogeneratedInit) { order.setCodeAutogenerated(true); } else { order.setCodeAutogenerated(false); order.setCode(""); } order.setCustomer(null); order.setDeadline(null); order.setInitDate(new Date()); order.setCalendar(defaultCalendar); } public Constraint checkConstraintFinishDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date finishDate = (Date) value; if ((finishDate != null) && (initDate.getValue() != null) && (finishDate.compareTo(initDate.getValue()) < 0)) { deadline.setValue(null); getOrder().setDeadline(null); throw new WrongValueException(comp, _("must be after start date")); } } }; } public Constraint checkConstraintStartDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date startDate = (Date) value; if ((startDate != null) && (deadline.getValue() != null) && (startDate.compareTo(deadline.getValue()) > 0)) { initDate.setValue(null); getOrder().setInitDate(null); throw new WrongValueException(comp, _("must be lower than end date")); } } }; } public void calculateProjectDates(OrderTemplate template) { LocalDate initLocalDate = new LocalDate() .plusDays(template.getStartAsDaysFromBeginning()); Date initDate = initLocalDate.toDateTimeAtStartOfDay().toDate(); getOrder().setInitDate(initDate); this.initDate.setValue(initDate); if (template.getDeadlineAsDaysFromBeginning() != null ) { LocalDate deadlineLocalDate = initLocalDate.plusDays(template .getDeadlineAsDaysFromBeginning()); Date deadline = deadlineLocalDate.toDateTimeAtStartOfDay().toDate(); getOrder().setDeadline(deadline); this.deadline.setValue(deadline); } else { getOrder().setDeadline(null); this.deadline.setValue(null); } } public OrderTemplate getTemplate() { return template; } public void setTemplate(OrderTemplate template) { this.template = template; if (template == null) { generateCode.setDisabled(false); generateCode.setTooltiptext(""); } else { if (!isCodeAutogenerated()) { setCodeAutogenerated(true); } generateCode.setDisabled(true); generateCode .setTooltiptext(_("Set Code as autogenerated to create a new project from templates")); generateCode.setChecked(true); calculateProjectDates(template); setCalendarFromTemplate(template); } } private void setCalendarFromTemplate(OrderTemplate template) { BaseCalendar calendar = template.getCalendar(); for (BaseCalendar each : getBaseCalendars()) { if (calendar.getId().equals(each.getId())) { setBaseCalendar(each); return; } } } }
Bug #1590: Avoid go to projects list when creating a project FEA: ItEr77S04BugFixing
libreplan-webapp/src/main/java/org/libreplan/web/orders/ProjectDetailsController.java
Bug #1590: Avoid go to projects list when creating a project
Java
agpl-3.0
8e42ee8404abd6deb4a021dfca333188fc945688
0
VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2019 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.export; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayDeque; import java.util.Iterator; import org.voltcore.logging.VoltLogger; import org.voltcore.utils.DBBPool.BBContainer; import org.voltdb.CatalogContext; import org.voltdb.VoltDB; import org.voltdb.catalog.Table; import org.voltdb.exportclient.ExportRowSchema; import org.voltdb.exportclient.ExportRowSchemaSerializer; import org.voltdb.utils.BinaryDeque; import org.voltdb.utils.BinaryDeque.BinaryDequeScanner; import org.voltdb.utils.BinaryDeque.BinaryDequeTruncator; import org.voltdb.utils.BinaryDeque.BinaryDequeValidator; import org.voltdb.utils.BinaryDeque.TruncatorResponse; import org.voltdb.utils.BinaryDequeReader; import org.voltdb.utils.PersistentBinaryDeque; import org.voltdb.utils.PersistentBinaryDeque.ByteBufferTruncatorResponse; import org.voltdb.utils.VoltFile; /** * A customized queue for StreamBlocks that contain export data. The queue is able to * overflow to disk when more then two stream blocks are stored * as well as persist to disk when sync is invoked. Right now sync doesn't actually do an fsync on * the file unless it is specifically requested. It just pushed the two in memory blocks to the persistent * portion of the queue. * * Export PBD buffer layout: * -- Segment Header --- * (defined in PBDSegment.java, see comments for segment header layout) * * -- Export Extra Segment Header --- * exportVersion(1) + generationId(8) + schemaLen(4) + tupleSchema(var length) + * tableNameLength(4) + tableName(var length) + colNameLength(4) + colName(var length) + * colType(1) + colLength(4) + ... * * --- Common Entry Header --- * (defined in PBDSegment.java, see comments for entry header layout) * * --- Export Entry Header --- * seqNo(8) + committedSeqNo(8) + tupleCount(4) + uniqueId(8) * * --- Row Header --- * rowLength(4) + partitionColumnIndex(4) + columnCount(4, includes metadata columns) + * nullArrayLength(4) + nullArray(var length) * * --- Metadata --- * TxnId(8) + timestamp(8) + seqNo(8) + partitionId(8) + siteId(8) + exportOperation(1) * * --- Row Data --- * rowData(var length) * * repeat row header, meta data and row data... */ public class StreamBlockQueue { private static final VoltLogger exportLog = new VoltLogger("EXPORT"); public static final String EXPORT_DISABLE_COMPRESSION_OPTION = "EXPORT_DISABLE_COMPRESSION"; private static final boolean DISABLE_COMPRESSION = Boolean.getBoolean(EXPORT_DISABLE_COMPRESSION_OPTION); /** * Deque containing reference to stream blocks that are in memory. Some of these * stream blocks may still be persisted to disk others are stored completely in memory */ private final ArrayDeque<StreamBlock> m_memoryDeque = new ArrayDeque<StreamBlock>(); /** * A deque for persisting data to disk both for persistence and as a means of overflowing storage */ private BinaryDeque<ExportRowSchema> m_persistentDeque; private final String m_nonce; private final String m_path; private final int m_partitionId; private final String m_streamName; // The initial generation id of the stream that SBQ currently represents. private long m_initialGenerationId; private BinaryDequeReader<ExportRowSchema> m_reader; public StreamBlockQueue(String path, String nonce, String streamName, int partitionId, long genId) throws java.io.IOException { // Not a creation by default this(path, nonce, streamName, partitionId, genId, false); } public StreamBlockQueue(String path, String nonce, String streamName, int partitionId, long genId, boolean create) throws java.io.IOException { m_path = path; m_nonce = nonce; m_streamName = streamName; m_partitionId = partitionId; m_initialGenerationId = genId; // When creating, delete any existing PBD files constructPBD(genId, create); if (exportLog.isDebugEnabled()) { exportLog.debug(m_nonce + " At SBQ creation, PBD size is " + (m_reader.sizeInBytes() - (8 * m_reader.getNumObjects())) + " initial generation ID is " + m_initialGenerationId); } } public boolean isEmpty() throws IOException { if (m_memoryDeque.isEmpty() && m_reader.isEmpty()) { return true; } return false; } /** * Wrapper around the common operation of pulling an element out of the persistent deque. * The behavior is complicated (and might change) since the persistent deque can throw an IOException. * The poll always removes the element from the persistent queue (although not necessarily removing the * file backing, that happens at deleteContents) and will add a reference to the block to the in memory * deque unless actuallyPoll is true, in which case the polled block ownership is transferred to the caller. * * @param actuallyPoll true if this is an actual poll transferring the block to the caller * @return the polled block */ private StreamBlock pollPersistentDeque(boolean actuallyPoll) { BinaryDequeReader.Entry<ExportRowSchema> entry = null; StreamBlock block = null; try { entry = m_reader.pollEntry(PersistentBinaryDeque.UNSAFE_CONTAINER_FACTORY); if (entry != null) { ByteBuffer b = entry.getData(); b.order(ByteOrder.LITTLE_ENDIAN); long seqNo = b.getLong(StreamBlock.SEQUENCE_NUMBER_OFFSET); long committedSeqNo = b.getLong(StreamBlock.COMMIT_SEQUENCE_NUMBER_OFFSET); int tupleCount = b.getInt(StreamBlock.ROW_NUMBER_OFFSET); long uniqueId = b.getLong(StreamBlock.UNIQUE_ID_OFFSET); block = new StreamBlock(entry, seqNo, committedSeqNo, tupleCount, uniqueId, true); // Optionally store a reference to the block in the in memory deque // Note that any in-memory block must have a schema if (!actuallyPoll) { assert(entry.getExtraHeader() != null); m_memoryDeque.offer(block); } } } catch (Exception e) { exportLog.error("Failed to poll from persistent binary deque", e); } return block; } /* * Present an iterator that is backed by the blocks * that are already loaded as well as blocks that * haven't been polled from the persistent deque. * * The iterator wraps an iterator from the memoryDeque, * and regenerates it every time an element is added to the memoryDeque from * the persistent deque. */ public Iterator<StreamBlock> iterator() { return new Iterator<StreamBlock>() { private Iterator<StreamBlock> m_memoryIterator = m_memoryDeque.iterator(); @Override public boolean hasNext() { if (m_memoryIterator.hasNext()) { return true; } else { if (pollPersistentDeque(false) != null) { m_memoryIterator = m_memoryDeque.iterator(); for (int ii = 0; ii < m_memoryDeque.size() - 1; ii++) { m_memoryIterator.next(); } return true; } } return false; } @Override public StreamBlock next() { if (m_memoryIterator.hasNext()) { return m_memoryIterator.next(); } StreamBlock block = pollPersistentDeque(false); if (block == null) { throw new java.util.NoSuchElementException(); } else { m_memoryIterator = m_memoryDeque.iterator(); for (int ii = 0; ii < m_memoryDeque.size(); ii++) { m_memoryIterator.next(); } return block; } } @Override public void remove() { m_memoryIterator.remove(); } }; } public StreamBlock peek() { if (m_memoryDeque.peek() != null) { return m_memoryDeque.peek(); } return pollPersistentDeque(false); } // For test public StreamBlock poll() { StreamBlock sb = null; if (m_memoryDeque.peek() != null) { sb = m_memoryDeque.poll(); } else { sb = pollPersistentDeque(true); } return sb; } public StreamBlock pop() { if (m_memoryDeque.isEmpty()) { StreamBlock sb = pollPersistentDeque(true); if (sb == null) { throw new java.util.NoSuchElementException(); } return sb; } else { return m_memoryDeque.pop(); } } public void updateSchema(ExportRowSchema schema) throws IOException { m_persistentDeque.updateExtraHeader(schema); } /* * Only allow two blocks in memory, put the rest in the persistent deque */ public void offer(StreamBlock streamBlock) throws IOException { m_persistentDeque.offer(streamBlock.asBBContainer()); long unreleasedSeqNo = streamBlock.unreleasedSequenceNumber(); if (m_memoryDeque.size() < 2) { StreamBlock fromPBD = pollPersistentDeque(false); if ((streamBlock.startSequenceNumber() == fromPBD.startSequenceNumber()) && (unreleasedSeqNo > streamBlock.startSequenceNumber())) { fromPBD.releaseTo(unreleasedSeqNo - 1); } } } public void sync() throws IOException { m_persistentDeque.sync(); } // Only used in tests, should be removed. public long sizeInBytes() throws IOException { long memoryBlockUsage = 0; for (StreamBlock b : m_memoryDeque) { //Use only total size, but throw in the USO //to make book keeping consistent when flushed to disk //Also dont count persisted blocks. memoryBlockUsage += b.totalSize(); } //Subtract USO from on disk size return memoryBlockUsage + m_reader.sizeInBytes() - (StreamBlock.HEADER_SIZE * m_reader.getNumObjects()); } public void close() throws IOException { sync(); m_persistentDeque.close(); for (StreamBlock sb : m_memoryDeque) { sb.discard(); } m_memoryDeque.clear(); } public void closeAndDelete() throws IOException { m_persistentDeque.closeAndDelete(); for (StreamBlock sb : m_memoryDeque) { sb.discard(); } } // See PDB segment layout at beginning of this file. public void truncateToSequenceNumber(final long truncationSeqNo) throws IOException { assert(m_memoryDeque.isEmpty()); m_persistentDeque.parseAndTruncate(new BinaryDequeTruncator() { @Override public TruncatorResponse parse(BBContainer bbc) { ByteBuffer b = bbc.b(); ByteOrder endianness = b.order(); b.order(ByteOrder.LITTLE_ENDIAN); try { final long startSequenceNumber = b.getLong(); // If the truncation is before the first row in the block, the entire block is to be discarded if (startSequenceNumber > truncationSeqNo) { return PersistentBinaryDeque.fullTruncateResponse(); } b.getLong(); // committedSequenceNumber final int tupleCountPos = b.position(); final int tupleCount = b.getInt(); // There is nothing to do with this buffer final long lastSequenceNumber = startSequenceNumber + tupleCount - 1; if (lastSequenceNumber <= truncationSeqNo) { return null; } b.getLong(); // uniqueId // Partial truncation int offset = 0; while (b.hasRemaining()) { if (startSequenceNumber + offset > truncationSeqNo) { // The sequence number of this row is the greater than the truncation sequence number. // Don't want this row, but want to preserve all rows before it. // Move back before the row length prefix, txnId and header // Return everything in the block before the truncation point. // Indicate this is the end of the interesting data. b.limit(b.position()); // update tuple count in the header b.putInt(tupleCountPos, offset); b.position(0); return new ByteBufferTruncatorResponse(b); } offset++; // Not the row we are looking to truncate at. Skip past it (row length + row length field). final int rowLength = b.getInt(); b.position(b.position() + rowLength); } return null; } finally { b.order(endianness); } } }); // close reopen reader m_persistentDeque.close(); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); constructPBD(catalogContext.m_genId, false); // temporary debug stmt exportLog.info("After truncate, PBD size is " + (m_reader.sizeInBytes() - (8 * m_reader.getNumObjects()))); } public ExportSequenceNumberTracker scanForGap() throws IOException { ExportSequenceNumberTracker tracker = new ExportSequenceNumberTracker(); m_persistentDeque.scanEntries(new BinaryDequeScanner() { @Override public long scan(BBContainer bbc) { ByteBuffer b = bbc.b(); ByteOrder endianness = b.order(); b.order(ByteOrder.LITTLE_ENDIAN); final long startSequenceNumber = b.getLong(); b.getLong(); // committed sequence number final int tupleCount = b.getInt(); final long endSequenceNumber = startSequenceNumber + tupleCount - 1; b.order(endianness); tracker.addRange(startSequenceNumber, endSequenceNumber); return endSequenceNumber; } }); return tracker; } public boolean deleteStaleBlocks(long generationId) throws IOException { boolean didCleanup = m_persistentDeque.deletePBDSegment(new BinaryDequeValidator<ExportRowSchema>() { @Override public boolean isStale(ExportRowSchema extraHeader) { assert (extraHeader != null); boolean fromOlderGeneration = extraHeader.initialGenerationId < generationId; if (exportLog.isDebugEnabled() && fromOlderGeneration) { exportLog.debug("Delete PBD segments of " + (extraHeader.tableName + "_" + extraHeader.partitionId) + " from older generation " + extraHeader.initialGenerationId); } return fromOlderGeneration; } }); if (generationId != m_initialGenerationId) { m_initialGenerationId = generationId; if (exportLog.isDebugEnabled()) { exportLog.debug("Update created generation id of " + m_nonce + " to " + generationId); } } if (didCleanup) { // Close and reopen close(); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); constructPBD(catalogContext.m_genId, false); } return didCleanup; } public long getGenerationIdCreated() { return m_initialGenerationId; } private void constructPBD(long genId, boolean deleteExisting) throws IOException { Table streamTable = VoltDB.instance().getCatalogContext().database.getTables().get(m_streamName); ExportRowSchema schema = ExportRowSchema.create(streamTable, m_partitionId, m_initialGenerationId, genId); ExportRowSchemaSerializer serializer = new ExportRowSchemaSerializer(); m_persistentDeque = PersistentBinaryDeque.builder(m_nonce, new VoltFile(m_path), exportLog) .initialExtraHeader(schema, serializer) .compression(!DISABLE_COMPRESSION) .deleteExisting(deleteExisting) .build(); m_reader = m_persistentDeque.openForRead(m_nonce); } @Override public void finalize() { try { int nonEmptyCnt = 0; nonEmptyCnt = m_memoryDeque.stream().filter((block) -> (!block.isPersisted())).map((_item) -> 1).reduce(nonEmptyCnt, Integer::sum); if (nonEmptyCnt > 0) { exportLog.error("Finalized StreamBlockQueue with " + nonEmptyCnt + " items in the memory deque that are not persisted. Path: " + m_path + " Nonce: " + m_nonce); } } finally { try { super.finalize(); } catch (Throwable ex) { ; } } } }
src/frontend/org/voltdb/export/StreamBlockQueue.java
/* This file is part of VoltDB. * Copyright (C) 2008-2019 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.export; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayDeque; import java.util.Iterator; import org.voltcore.logging.VoltLogger; import org.voltcore.utils.DBBPool.BBContainer; import org.voltdb.CatalogContext; import org.voltdb.VoltDB; import org.voltdb.catalog.Table; import org.voltdb.exportclient.ExportRowSchema; import org.voltdb.exportclient.ExportRowSchemaSerializer; import org.voltdb.utils.BinaryDeque; import org.voltdb.utils.BinaryDeque.BinaryDequeScanner; import org.voltdb.utils.BinaryDeque.BinaryDequeTruncator; import org.voltdb.utils.BinaryDeque.BinaryDequeValidator; import org.voltdb.utils.BinaryDeque.TruncatorResponse; import org.voltdb.utils.BinaryDequeReader; import org.voltdb.utils.PersistentBinaryDeque; import org.voltdb.utils.PersistentBinaryDeque.ByteBufferTruncatorResponse; import org.voltdb.utils.VoltFile; /** * A customized queue for StreamBlocks that contain export data. The queue is able to * overflow to disk when more then two stream blocks are stored * as well as persist to disk when sync is invoked. Right now sync doesn't actually do an fsync on * the file unless it is specifically requested. It just pushed the two in memory blocks to the persistent * portion of the queue. * * Export PBD buffer layout: * -- Segment Header --- * (defined in PBDSegment.java, see comments for segment header layout) * * -- Export Extra Segment Header --- * exportVersion(1) + generationId(8) + schemaLen(4) + tupleSchema(var length) + * tableNameLength(4) + tableName(var length) + colNameLength(4) + colName(var length) + * colType(1) + colLength(4) + ... * * --- Common Entry Header --- * (defined in PBDSegment.java, see comments for entry header layout) * * --- Export Entry Header --- * seqNo(8) + committedSeqNo(8) + tupleCount(4) + uniqueId(8) * * --- Row Header --- * rowLength(4) + partitionColumnIndex(4) + columnCount(4, includes metadata columns) + * nullArrayLength(4) + nullArray(var length) * * --- Metadata --- * TxnId(8) + timestamp(8) + seqNo(8) + partitionId(8) + siteId(8) + exportOperation(1) * * --- Row Data --- * rowData(var length) * * repeat row header, meta data and row data... */ public class StreamBlockQueue { private static final VoltLogger exportLog = new VoltLogger("EXPORT"); public static final String EXPORT_DISABLE_COMPRESSION_OPTION = "EXPORT_DISABLE_COMPRESSION"; private static final boolean DISABLE_COMPRESSION = Boolean.getBoolean(EXPORT_DISABLE_COMPRESSION_OPTION); /** * Deque containing reference to stream blocks that are in memory. Some of these * stream blocks may still be persisted to disk others are stored completely in memory */ private final ArrayDeque<StreamBlock> m_memoryDeque = new ArrayDeque<StreamBlock>(); /** * A deque for persisting data to disk both for persistence and as a means of overflowing storage */ private BinaryDeque<ExportRowSchema> m_persistentDeque; private final String m_nonce; private final String m_path; private final int m_partitionId; private final String m_streamName; // The initial generation id of the stream that SBQ currently represents. private long m_initialGenerationId; private BinaryDequeReader<ExportRowSchema> m_reader; public StreamBlockQueue(String path, String nonce, String streamName, int partitionId, long genId) throws java.io.IOException { // Not a creation by default this(path, nonce, streamName, partitionId, genId, false); } public StreamBlockQueue(String path, String nonce, String streamName, int partitionId, long genId, boolean create) throws java.io.IOException { m_path = path; m_nonce = nonce; m_streamName = streamName; m_partitionId = partitionId; m_initialGenerationId = genId; // When creating, delete any existing PBD files constructPBD(genId, create); if (exportLog.isDebugEnabled()) { exportLog.debug(m_nonce + " At SBQ creation, PBD size is " + (m_reader.sizeInBytes() - (8 * m_reader.getNumObjects())) + " initial generation ID is " + m_initialGenerationId); } } public boolean isEmpty() throws IOException { if (m_memoryDeque.isEmpty() && m_reader.isEmpty()) { return true; } return false; } /** * Wrapper around the common operation of pulling an element out of the persistent deque. * The behavior is complicated (and might change) since the persistent deque can throw an IOException. * The poll always removes the element from the persistent queue (although not necessarily removing the * file backing, that happens at deleteContents) and will add a reference to the block to the in memory * deque unless actuallyPoll is true, in which case the polled block ownership is transferred to the caller. * * @param actuallyPoll true if this is an actual poll transferring the block to the caller * @return the polled block */ private StreamBlock pollPersistentDeque(boolean actuallyPoll) { BinaryDequeReader.Entry<ExportRowSchema> entry = null; StreamBlock block = null; try { entry = m_reader.pollEntry(PersistentBinaryDeque.UNSAFE_CONTAINER_FACTORY); if (entry != null) { ByteBuffer b = entry.getData(); b.order(ByteOrder.LITTLE_ENDIAN); long seqNo = b.getLong(StreamBlock.SEQUENCE_NUMBER_OFFSET); long committedSeqNo = b.getLong(StreamBlock.COMMIT_SEQUENCE_NUMBER_OFFSET); int tupleCount = b.getInt(StreamBlock.ROW_NUMBER_OFFSET); long uniqueId = b.getLong(StreamBlock.UNIQUE_ID_OFFSET); block = new StreamBlock(entry, seqNo, committedSeqNo, tupleCount, uniqueId, true); // Optionally store a reference to the block in the in memory deque // Note that any in-memory block must have a schema if (!actuallyPoll) { assert(entry.getExtraHeader() != null); m_memoryDeque.offer(block); } } } catch (Exception e) { exportLog.error("Failed to poll from persistent binary deque", e); } return block; } /* * Present an iterator that is backed by the blocks * that are already loaded as well as blocks that * haven't been polled from the persistent deque. * * The iterator wraps an iterator from the memoryDeque, * and regenerates it every time an element is added to the memoryDeque from * the persistent deque. */ public Iterator<StreamBlock> iterator() { return new Iterator<StreamBlock>() { private Iterator<StreamBlock> m_memoryIterator = m_memoryDeque.iterator(); @Override public boolean hasNext() { if (m_memoryIterator.hasNext()) { return true; } else { if (pollPersistentDeque(false) != null) { m_memoryIterator = m_memoryDeque.iterator(); for (int ii = 0; ii < m_memoryDeque.size() - 1; ii++) { m_memoryIterator.next(); } return true; } } return false; } @Override public StreamBlock next() { if (m_memoryIterator.hasNext()) { return m_memoryIterator.next(); } StreamBlock block = pollPersistentDeque(false); if (block == null) { throw new java.util.NoSuchElementException(); } else { m_memoryIterator = m_memoryDeque.iterator(); for (int ii = 0; ii < m_memoryDeque.size(); ii++) { m_memoryIterator.next(); } return block; } } @Override public void remove() { m_memoryIterator.remove(); } }; } public StreamBlock peek() { if (m_memoryDeque.peek() != null) { return m_memoryDeque.peek(); } return pollPersistentDeque(false); } // For test public StreamBlock poll() { StreamBlock sb = null; if (m_memoryDeque.peek() != null) { sb = m_memoryDeque.poll(); } else { sb = pollPersistentDeque(true); } return sb; } public StreamBlock pop() { if (m_memoryDeque.isEmpty()) { StreamBlock sb = pollPersistentDeque(true); if (sb == null) { throw new java.util.NoSuchElementException(); } return sb; } else { return m_memoryDeque.pop(); } } public void updateSchema(ExportRowSchema schema) throws IOException { m_persistentDeque.updateExtraHeader(schema); } /* * Only allow two blocks in memory, put the rest in the persistent deque */ public void offer(StreamBlock streamBlock) throws IOException { m_persistentDeque.offer(streamBlock.asBBContainer()); long unreleasedSeqNo = streamBlock.unreleasedSequenceNumber(); if (m_memoryDeque.size() < 2) { StreamBlock fromPBD = pollPersistentDeque(false); if ((streamBlock.startSequenceNumber() == fromPBD.startSequenceNumber()) && (unreleasedSeqNo > streamBlock.startSequenceNumber())) { fromPBD.releaseTo(unreleasedSeqNo - 1); } } } public void sync() throws IOException { m_persistentDeque.sync(); } // Only used in tests, should be removed. public long sizeInBytes() throws IOException { long memoryBlockUsage = 0; for (StreamBlock b : m_memoryDeque) { //Use only total size, but throw in the USO //to make book keeping consistent when flushed to disk //Also dont count persisted blocks. memoryBlockUsage += b.totalSize(); } //Subtract USO from on disk size return memoryBlockUsage + m_reader.sizeInBytes() - (StreamBlock.HEADER_SIZE * m_reader.getNumObjects()); } public void close() throws IOException { sync(); m_persistentDeque.close(); for (StreamBlock sb : m_memoryDeque) { sb.discard(); } m_memoryDeque.clear(); } public void closeAndDelete() throws IOException { m_persistentDeque.closeAndDelete(); for (StreamBlock sb : m_memoryDeque) { sb.discard(); } } // See PDB segment layout at beginning of this file. public void truncateToSequenceNumber(final long truncationSeqNo) throws IOException { assert(m_memoryDeque.isEmpty()); m_persistentDeque.parseAndTruncate(new BinaryDequeTruncator() { @Override public TruncatorResponse parse(BBContainer bbc) { ByteBuffer b = bbc.b(); ByteOrder endianness = b.order(); b.order(ByteOrder.LITTLE_ENDIAN); try { final long startSequenceNumber = b.getLong(); // If after the truncation point is the first row in the block, the entire block is to be discarded if (startSequenceNumber >= truncationSeqNo) { return PersistentBinaryDeque.fullTruncateResponse(); } b.getLong(); // committedSequenceNumber final int tupleCountPos = b.position(); final int tupleCount = b.getInt(); // There is nothing to do with this buffer final long lastSequenceNumber = startSequenceNumber + tupleCount - 1; if (lastSequenceNumber <= truncationSeqNo) { return null; } b.getLong(); // uniqueId // Partial truncation int offset = 0; while (b.hasRemaining()) { if (startSequenceNumber + offset > truncationSeqNo) { // The sequence number of this row is the greater than the truncation sequence number. // Don't want this row, but want to preserve all rows before it. // Move back before the row length prefix, txnId and header // Return everything in the block before the truncation point. // Indicate this is the end of the interesting data. b.limit(b.position()); // update tuple count in the header b.putInt(tupleCountPos, offset); b.position(0); return new ByteBufferTruncatorResponse(b); } offset++; // Not the row we are looking to truncate at. Skip past it (row length + row length field). final int rowLength = b.getInt(); b.position(b.position() + rowLength); } return null; } finally { b.order(endianness); } } }); // close reopen reader m_persistentDeque.close(); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); constructPBD(catalogContext.m_genId, false); // temporary debug stmt exportLog.info("After truncate, PBD size is " + (m_reader.sizeInBytes() - (8 * m_reader.getNumObjects()))); } public ExportSequenceNumberTracker scanForGap() throws IOException { ExportSequenceNumberTracker tracker = new ExportSequenceNumberTracker(); m_persistentDeque.scanEntries(new BinaryDequeScanner() { @Override public long scan(BBContainer bbc) { ByteBuffer b = bbc.b(); ByteOrder endianness = b.order(); b.order(ByteOrder.LITTLE_ENDIAN); final long startSequenceNumber = b.getLong(); b.getLong(); // committed sequence number final int tupleCount = b.getInt(); final long endSequenceNumber = startSequenceNumber + tupleCount - 1; b.order(endianness); tracker.addRange(startSequenceNumber, endSequenceNumber); return endSequenceNumber; } }); return tracker; } public boolean deleteStaleBlocks(long generationId) throws IOException { boolean didCleanup = m_persistentDeque.deletePBDSegment(new BinaryDequeValidator<ExportRowSchema>() { @Override public boolean isStale(ExportRowSchema extraHeader) { assert (extraHeader != null); boolean fromOlderGeneration = extraHeader.initialGenerationId < generationId; if (exportLog.isDebugEnabled() && fromOlderGeneration) { exportLog.debug("Delete PBD segments of " + (extraHeader.tableName + "_" + extraHeader.partitionId) + " from older generation " + extraHeader.initialGenerationId); } return fromOlderGeneration; } }); if (generationId != m_initialGenerationId) { m_initialGenerationId = generationId; if (exportLog.isDebugEnabled()) { exportLog.debug("Update created generation id of " + m_nonce + " to " + generationId); } } if (didCleanup) { // Close and reopen close(); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); constructPBD(catalogContext.m_genId, false); } return didCleanup; } public long getGenerationIdCreated() { return m_initialGenerationId; } private void constructPBD(long genId, boolean deleteExisting) throws IOException { Table streamTable = VoltDB.instance().getCatalogContext().database.getTables().get(m_streamName); ExportRowSchema schema = ExportRowSchema.create(streamTable, m_partitionId, m_initialGenerationId, genId); ExportRowSchemaSerializer serializer = new ExportRowSchemaSerializer(); m_persistentDeque = PersistentBinaryDeque.builder(m_nonce, new VoltFile(m_path), exportLog) .initialExtraHeader(schema, serializer) .compression(!DISABLE_COMPRESSION) .deleteExisting(deleteExisting) .build(); m_reader = m_persistentDeque.openForRead(m_nonce); } @Override public void finalize() { try { int nonEmptyCnt = 0; nonEmptyCnt = m_memoryDeque.stream().filter((block) -> (!block.isPersisted())).map((_item) -> 1).reduce(nonEmptyCnt, Integer::sum); if (nonEmptyCnt > 0) { exportLog.error("Finalized StreamBlockQueue with " + nonEmptyCnt + " items in the memory deque that are not persisted. Path: " + m_path + " Nonce: " + m_nonce); } } finally { try { super.finalize(); } catch (Throwable ex) { ; } } } }
ENG-19074: fix truncation of last segment with 1-row buffer (#6876)
src/frontend/org/voltdb/export/StreamBlockQueue.java
ENG-19074: fix truncation of last segment with 1-row buffer (#6876)
Java
lgpl-2.1
bf4bf7d142bee39bcc112e435dafbf89179e0bb4
0
gytis/narayana,tomjenkinson/narayana,mmusgrov/narayana,jbosstm/narayana,mmusgrov/narayana,jbosstm/narayana,mmusgrov/narayana,gytis/narayana,jbosstm/narayana,tomjenkinson/narayana,tomjenkinson/narayana,tomjenkinson/narayana,gytis/narayana,gytis/narayana,gytis/narayana,jbosstm/narayana,gytis/narayana,mmusgrov/narayana
/* * JBoss, Home of Professional Open Source * Copyright 2006, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. * See the copyright.txt in the distribution for a * full listing of individual contributors. * This copyrighted material is made available to anyone wishing to use, * modify, copy, or redistribute it subject to the terms and conditions * of the GNU Lesser General Public License, v. 2.1. * This program is distributed in the hope that it will be useful, but WITHOUT A * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public License, * v.2.1 along with this distribution; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. * * (C) 2005-2006, * @author JBoss Inc. */ /* * Copyright (C) 2004, * * Arjuna Technologies Ltd, * Newcastle upon Tyne, * Tyne and Wear, * UK. * * $Id: xidcheck.java 2342 2006-03-30 13:06:17Z $ */ package com.hp.mwtests.ts.jts.local.transactions; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.omg.CosTransactions.Control; import org.omg.CosTransactions.Coordinator; import org.omg.CosTransactions.Terminator; import org.omg.CosTransactions.Inactive; import org.omg.CosTransactions.NoTransaction; import org.omg.CosTransactions.otid_t; import org.omg.CosTransactions.PropagationContext; import org.omg.CosTransactions.Status; import com.arjuna.ArjunaOTS.TransactionType; import com.arjuna.ArjunaOTS.GlobalTransactionInfo; import com.arjuna.ArjunaOTS.TransactionInfo; import com.arjuna.ats.arjuna.common.Uid; import com.arjuna.ats.arjuna.common.arjPropertyManager; import com.arjuna.ats.internal.jts.orbspecific.ControlImple; import com.arjuna.ats.internal.jts.orbspecific.TransactionFactoryImple; import com.arjuna.ats.jts.utils.Utility; import com.hp.mwtests.ts.jts.resources.TestBase; public class TransactionFactoryUnitTest extends TestBase { @Test public void testBasic () throws Exception { TransactionFactoryImple factory = new TransactionFactoryImple("test"); arjPropertyManager.getCoordinatorEnvironmentBean().setEnableStatistics(true); try { factory.numberOfTransactions(TransactionType.TransactionTypeActive); // fail(); } catch (final Inactive ex) { } catch (final NoTransaction ex) { } ControlImple tx = factory.createLocal(1000); assertTrue(tx != null); org.omg.CosTransactions.otid_t[] txId = null; try { txId = factory.numberOfTransactions(TransactionType.TransactionTypeActive); } catch (final Throwable ex) { fail(); } try { if (factory.getChildTransactions(txId[0]) != null) fail(); } catch (final Throwable ex) { fail(); } org.omg.CosTransactions.Status status = factory.getCurrentStatus(txId[0]); assertTrue(status == org.omg.CosTransactions.Status.StatusActive); assertTrue(factory.getStatus(txId[0]) == org.omg.CosTransactions.Status.StatusActive); Control proxy = factory.createProxy(tx.get_coordinator(), tx.get_terminator()); assertTrue(proxy != null); Control propagated = factory.createPropagatedControl(tx.get_coordinator()); assertTrue(propagated != null); assertTrue(Utility.getUid(proxy).equals(Utility.getUid(propagated))); GlobalTransactionInfo info = factory.getGlobalInfo(); assertTrue(info != null); assertEquals(info.totalNumberOfTransactions, 1); assertEquals(info.numberOfHeuristics, 0); factory.numberOfTransactions(TransactionType.TransactionTypeUnresolved); try { tx.getImplHandle().rollback(); } catch (final Throwable ex) { } } @Test public void testContext () throws Exception { TransactionFactoryImple factory = new TransactionFactoryImple("test"); ControlImple tx = factory.createLocal(1000); org.omg.CosTransactions.otid_t txId = Utility.uidToOtid(tx.get_uid()); Uid theUid = Utility.otidToUid(txId); assertEquals(theUid, tx.get_uid()); assertEquals(factory.getOSStatus(tx.get_uid()), org.omg.CosTransactions.Status.StatusNoTransaction); // no state in OS yet! PropagationContext ctx = tx.get_coordinator().get_txcontext(); Control cont = factory.recreate(ctx); assertTrue(Utility.getUid(cont).equals(tx.get_uid())); try { tx.getImplHandle().rollback(); } catch (final Throwable ex) { } } @Test public void testCompare () throws Exception { TransactionFactoryImple factory = new TransactionFactoryImple("test"); ControlImple tx = factory.createLocal(1000); Control proxy = factory.getTransaction(Utility.uidToOtid(tx.get_uid())); assertTrue(Utility.getUid(proxy).equals(tx.get_uid())); try { tx.getImplHandle().rollback(); } catch (final Throwable ex) { } } @Test public void testInfo () throws Exception { TransactionFactoryImple factory = new TransactionFactoryImple("test"); ControlImple tx = factory.createLocal(1000); TransactionInfo info = factory.getTransactionInfo(Utility.uidToOtid(tx.get_uid())); assertEquals(info.currentDepth, 1); assertEquals(info.timeout, 0); assertEquals(info.numberOfThreads, 0); try { tx.getImplHandle().rollback(); } catch (final Throwable ex) { } } }
ArjunaJTS/jts/tests/classes/com/hp/mwtests/ts/jts/local/transactions/TransactionFactoryUnitTest.java
/* * JBoss, Home of Professional Open Source * Copyright 2006, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. * See the copyright.txt in the distribution for a * full listing of individual contributors. * This copyrighted material is made available to anyone wishing to use, * modify, copy, or redistribute it subject to the terms and conditions * of the GNU Lesser General Public License, v. 2.1. * This program is distributed in the hope that it will be useful, but WITHOUT A * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public License, * v.2.1 along with this distribution; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. * * (C) 2005-2006, * @author JBoss Inc. */ /* * Copyright (C) 2004, * * Arjuna Technologies Ltd, * Newcastle upon Tyne, * Tyne and Wear, * UK. * * $Id: xidcheck.java 2342 2006-03-30 13:06:17Z $ */ package com.hp.mwtests.ts.jts.local.transactions; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.omg.CosTransactions.Control; import org.omg.CosTransactions.Coordinator; import org.omg.CosTransactions.Terminator; import org.omg.CosTransactions.Inactive; import org.omg.CosTransactions.NoTransaction; import org.omg.CosTransactions.otid_t; import org.omg.CosTransactions.Status; import com.arjuna.ArjunaOTS.TransactionType; import com.arjuna.ArjunaOTS.GlobalTransactionInfo; import com.arjuna.ats.arjuna.common.arjPropertyManager; import com.arjuna.ats.internal.jts.orbspecific.ControlImple; import com.arjuna.ats.internal.jts.orbspecific.TransactionFactoryImple; import com.arjuna.ats.jts.utils.Utility; import com.hp.mwtests.ts.jts.resources.TestBase; public class TransactionFactoryUnitTest extends TestBase { @Test public void test () throws Exception { TransactionFactoryImple factory = new TransactionFactoryImple("test"); arjPropertyManager.getCoordinatorEnvironmentBean().setEnableStatistics(true); try { factory.numberOfTransactions(TransactionType.TransactionTypeActive); fail(); } catch (final Inactive ex) { } catch (final NoTransaction ex) { } ControlImple tx = factory.createLocal(1000); assertTrue(tx != null); org.omg.CosTransactions.otid_t[] txId = null; try { txId = factory.numberOfTransactions(TransactionType.TransactionTypeActive); } catch (final Throwable ex) { fail(); } try { if (factory.getChildTransactions(txId[0]) != null) fail(); } catch (final Throwable ex) { fail(); } org.omg.CosTransactions.Status status = factory.getCurrentStatus(txId[0]); assertTrue(status == org.omg.CosTransactions.Status.StatusActive); Control proxy = factory.createProxy(tx.get_coordinator(), tx.get_terminator()); assertTrue(proxy != null); Control propagated = factory.createPropagatedControl(tx.get_coordinator()); assertTrue(propagated != null); assertTrue(Utility.getUid(proxy).equals(Utility.getUid(propagated))); GlobalTransactionInfo info = factory.getGlobalInfo(); assertTrue(info != null); assertEquals(info.totalNumberOfTransactions, 1); assertEquals(info.numberOfHeuristics, 0); // assertTrue(factory.getStatus(txId[0]) == org.omg.CosTransactions.Status.StatusActive); factory.numberOfTransactions(TransactionType.TransactionTypeUnresolved); try { tx.getImplHandle().rollback(); } catch (final Throwable ex) { } } }
https://issues.jboss.org/browse/JBTM-715
ArjunaJTS/jts/tests/classes/com/hp/mwtests/ts/jts/local/transactions/TransactionFactoryUnitTest.java
https://issues.jboss.org/browse/JBTM-715
Java
apache-2.0
05481a924e8b586bc2cedc6326e4010eb9769497
0
MalcolmK/fpai-core,MalcolmK/fpai-core,flexiblepower/fpai-core,MalcolmK/fpai-core,MalcolmK/fpai-core,flexiblepower/fpai-core,flexiblepower/fpai-core,flexiblepower/fpai-core
package org.flexiblepower.runtime.messaging; import java.io.Closeable; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.flexiblepower.messaging.Endpoint; import org.flexiblepower.messaging.Port; import org.flexiblepower.messaging.Ports; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EndpointWrapper implements Runnable, Iterable<EndpointPortImpl>, Closeable { private static final Logger log = LoggerFactory.getLogger(EndpointWrapper.class); private final Endpoint endpoint; private final ConnectionManagerImpl connectionManager; private final Thread thread; private final AtomicBoolean running; private final Set<EndpointPortImpl> ports; public EndpointWrapper(Endpoint endpoint, ConnectionManagerImpl connectionManager) { this.endpoint = endpoint; this.connectionManager = connectionManager; ports = parsePorts(); thread = new Thread(this, "Message handler thread for " + endpoint.getClass().getSimpleName()); running = new AtomicBoolean(true); thread.start(); } private Set<EndpointPortImpl> parsePorts() { Port[] ports = null; Ports portsAnnotation = endpoint.getClass().getAnnotation(Ports.class); if (portsAnnotation != null) { ports = portsAnnotation.value(); } else { Port portAnnotation = endpoint.getClass().getAnnotation(Port.class); if (portAnnotation != null) { ports = new Port[] { portAnnotation }; } else { log.warn("Found an Endpoint with no Port definition: {}", endpoint.getClass().getSimpleName()); return Collections.emptySet(); } } Set<EndpointPortImpl> result = new HashSet<EndpointPortImpl>(); for (Port port : ports) { EndpointPortImpl endpointPort = new EndpointPortImpl(this, port); connectionManager.detectPossibleConnections(endpointPort); result.add(endpointPort); } return result; } public Endpoint getEndpoint() { return endpoint; } @Override public Iterator<EndpointPortImpl> iterator() { return ports.iterator(); } @Override public void run() { while (running.get()) { for (EndpointPortImpl port : ports) { for (MatchingPortsImpl matchingPort : port.getMatchingPorts()) { if (matchingPort.isConnected()) { try { matchingPort.handleMessages(port); } catch (Exception ex) { log.error("Uncaught exception while handling message on port " + port + ": " + ex.getMessage(), ex); log.warn("Closing the port because of the previous exception"); matchingPort.disconnect(); } } } synchronized (this) { try { wait(10000); } catch (InterruptedException e) { } } } } } @Override public void close() { synchronized (thread) { running.set(false); notifyAll(); } try { thread.join(); } catch (InterruptedException e) { } for (EndpointPortImpl port : ports) { for (MatchingPortsImpl matchingPort : port.getMatchingPorts()) { if (matchingPort.isConnected()) { matchingPort.disconnect(); } port.removeMatch(matchingPort); matchingPort.getOtherEnd(port).removeMatch(matchingPort); } } } }
flexiblepower.runtime/src/org/flexiblepower/runtime/messaging/EndpointWrapper.java
package org.flexiblepower.runtime.messaging; import java.io.Closeable; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.flexiblepower.messaging.Endpoint; import org.flexiblepower.messaging.Port; import org.flexiblepower.messaging.Ports; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EndpointWrapper implements Runnable, Iterable<EndpointPortImpl>, Closeable { private static final Logger log = LoggerFactory.getLogger(EndpointWrapper.class); private final Endpoint endpoint; private final ConnectionManagerImpl connectionManager; private final Thread thread; private final AtomicBoolean running; private final Set<EndpointPortImpl> ports; public EndpointWrapper(Endpoint endpoint, ConnectionManagerImpl connectionManager) { this.endpoint = endpoint; this.connectionManager = connectionManager; ports = parsePorts(); thread = new Thread(this, "Message handler thread for " + endpoint.getClass().getSimpleName()); running = new AtomicBoolean(true); thread.start(); } private Set<EndpointPortImpl> parsePorts() { Port[] ports = null; Ports portsAnnotation = endpoint.getClass().getAnnotation(Ports.class); if (portsAnnotation != null) { ports = portsAnnotation.value(); } else { Port portAnnotation = endpoint.getClass().getAnnotation(Port.class); if (portAnnotation != null) { ports = new Port[] { portAnnotation }; } else { log.warn("Found an Endpoint with no Port definition: {}", endpoint.getClass().getSimpleName()); return Collections.emptySet(); } } Set<EndpointPortImpl> result = new HashSet<EndpointPortImpl>(); for (Port port : ports) { EndpointPortImpl endpointPort = new EndpointPortImpl(this, port); connectionManager.detectPossibleConnections(endpointPort); result.add(endpointPort); } return result; } public Endpoint getEndpoint() { return endpoint; } @Override public Iterator<EndpointPortImpl> iterator() { return ports.iterator(); } @Override public void run() { while (running.get()) { synchronized (this) { for (EndpointPortImpl port : ports) { for (MatchingPortsImpl matchingPort : port.getMatchingPorts()) { if (matchingPort.isConnected()) { try { matchingPort.handleMessages(port); } catch (Exception ex) { log.error("Uncaught exception while handling message on port " + port + ": " + ex.getMessage(), ex); log.warn("Closing the port because of the previous exception"); matchingPort.disconnect(); } } } try { wait(10000); } catch (InterruptedException e) { } } } } } @Override public void close() { synchronized (thread) { running.set(false); notifyAll(); } try { thread.join(); } catch (InterruptedException e) { } for (EndpointPortImpl port : ports) { for (MatchingPortsImpl matchingPort : port.getMatchingPorts()) { if (matchingPort.isConnected()) { matchingPort.disconnect(); } port.removeMatch(matchingPort); matchingPort.getOtherEnd(port).removeMatch(matchingPort); } } } }
Fixed a possible deadlock situation. The wrapper is calling isConnected on the MatchingPort, which locks. If at the same time an other thread tries to send a message, for which you need to lock the wrapper to send the notification, you could end up in a deadlock (until now seen once in the testcase). This should fix that.
flexiblepower.runtime/src/org/flexiblepower/runtime/messaging/EndpointWrapper.java
Fixed a possible deadlock situation.
Java
apache-2.0
8028f2f6240ca6cd7595fb1851be6c7bc411a4af
0
chaostrigger/rl-library,yimingpeng/rl-library,chaostrigger/rl-library,litlpoet/rl-library,yimingpeng/rl-library,litlpoet/rl-library,yimingpeng/rl-library,litlpoet/rl-library,litlpoet/rl-library,yimingpeng/rl-library,chaostrigger/rl-library,yimingpeng/rl-library,chaostrigger/rl-library,litlpoet/rl-library,chaostrigger/rl-library,yimingpeng/rl-library,yimingpeng/rl-library,chaostrigger/rl-library,litlpoet/rl-library,litlpoet/rl-library,chaostrigger/rl-library
import rlVizLib.general.ParameterHolder; import rlVizLib.general.TinyGlue; import rlVizLib.messaging.environmentShell.EnvShellListRequest; import rlVizLib.messaging.environmentShell.EnvShellListResponse; import rlVizLib.messaging.environmentShell.EnvShellLoadRequest; import rlVizLib.visualization.AbstractVisualizer; import rlglue.RLGlue; import visualizers.mountainCar.MountainCarVisualizer; public class JavaTrainer { /** * @param args * @throws InterruptedException */ public static void main(String[] args) throws InterruptedException { /* change theEnv String to the name of the environment you wish to test*/ // String theEnv="Tetrlais"; String theEnv="MountainCar"; EnvShellListResponse ListResponse = EnvShellListRequest.Execute(); int thisEnvIndex=ListResponse.getTheEnvList().indexOf(theEnv); ParameterHolder p = ListResponse.getTheParamList().get(thisEnvIndex); System.out.println("Running with Parameter Settings: "+p); //Optionally you can set some parameters if(theEnv.equalsIgnoreCase("MountainCar")){ p.setBooleanParam("randomStartStates",true); p.setDoubleParam("acceleration", .002); } if(theEnv.equalsIgnoreCase("Tetrlais")){ p.setBooleanParam("TriBlock",false); p.setIntegerParam("Width", 8); } EnvShellLoadRequest.Execute(theEnv,p); RLGlue.RL_init(); //This portion of the code is the same as a regular RL-Glue experiment program // we have not opened the visualizer yet, we are simply running the Agent through // a number of episodes. Change the number of iterations in the loop to increase the number //of episodes run. You have 100 000 steps per episode to terminate in, other wise // the glue terminates the episode int sum=0; for(int i=0;i<10000;i++){ RLGlue.RL_episode(100000); sum+=RLGlue.RL_num_steps(); if((i+1)%50==0){ System.out.println("Running episode: "+(i+1)+" total steps in last bunch is: "+sum); sum=0; } } //This program will pop up the visualizer to take a look at what's happening //We need to use something caled TinyGlue to manage the RL-Glue calls for us when the visualizer is running TinyGlue theTinyGlue= new TinyGlue(); //Set this otherwise the first step of theTinyGlue will call RL_init and undo all our learning theTinyGlue.setInited(true); //Opens a visualizer frame and starts visualizing RLVizWatchFrame theViz=new RLVizWatchFrame(theEnv,""); theViz.startVisualizing(); //Run a few steps here so the visualizer can be seen by the user. Change this number // if you want to watch it for longer for(int i=0;i<500;i++){ theTinyGlue.step(); Thread.sleep(10); } //finishes off the episode you were in when you ran out of steps. This is important so we can go back to regular RL-Glue calls //without the Tiny Glue while(theTinyGlue.step()); //Stop visualizing the agent's actions System.out.println("out of the display loop"); theViz.stopVisualizing(); sum=0; for(int i=0;i<100;i++){ RLGlue.RL_episode(100000); sum+=RLGlue.RL_num_steps(); if((i+1)%50==0){ System.out.println("Running episode: "+(i+1)+" total steps in last bunch is: "+sum); sum=0; } } //Run a few more steps with the visualizer on again System.out.println("running 1000 steps showing again"); theViz.startVisualizing(); for(int i=0;i<1000;i++){ theTinyGlue.step(); Thread.sleep(10); } //clean up the environment and end the program RLGlue.RL_cleanup(); System.out.println("Program over"); } }
JavaTrainer/src/JavaTrainer.java
import rlVizLib.general.ParameterHolder; import rlVizLib.general.TinyGlue; import rlVizLib.messaging.environmentShell.EnvShellListRequest; import rlVizLib.messaging.environmentShell.EnvShellListResponse; import rlVizLib.messaging.environmentShell.EnvShellLoadRequest; import rlVizLib.visualization.AbstractVisualizer; import rlglue.RLGlue; import visualizers.mountainCar.MountainCarVisualizer; public class JavaTrainer { /** * @param args * @throws InterruptedException */ public static void main(String[] args) throws InterruptedException { /* change theEnv String to the name of the environment you wish to test*/ // String theEnv="Tetrlais"; String theEnv="MountainCar"; EnvShellListResponse ListResponse = EnvShellListRequest.Execute(); int thisEnvIndex=ListResponse.getTheEnvList().indexOf(theEnv); ParameterHolder p = ListResponse.getTheParamList().get(thisEnvIndex); System.out.println("Running with Parameter Settings: "+p); //Optionally you can set some parameters if(theEnv.equalsIgnoreCase("MountainCar")){ p.setBooleanParam("randomStartStates",true); p.setDoubleParam("acceleration", .002); } if(theEnv.equalsIgnoreCase("Tetrlais")){ p.setBooleanParam("TriBlock",false); p.setIntegerParam("Width", 8); } EnvShellLoadRequest.Execute(theEnv,p); RLGlue.RL_init(); //This portion of the code is the same as a regular RL-Glue experiment program // we have not opened the visualizer yet, we are simply running the Agent through // a number of episodes. Change the number of iterations in the loop to increase the number //of episodes run. You have 100 000 steps per episode to terminate in, other wise // the glue terminates the episode int sum=0; for(int i=0;i<1000000;i++){ RLGlue.RL_episode(100000); sum+=RLGlue.RL_num_steps(); if(i%50==0){ System.out.println("Running episode: "+i+" total steps in last bunch is: "+sum); sum=0; } } TinyGlue theTinyGlue= new TinyGlue(); //Set this otherwise the first step of theTinyGlue will call RL_init and undo all our learning theTinyGlue.setInited(true); //Opens a visualizer frame and starts visualizing RLVizWatchFrame theViz=new RLVizWatchFrame(theEnv,""); theViz.startVisualizing(); //Run a few steps here so the visualizer can be seen by the user. Change this number // if you want to watch it for longer for(int i=0;i<500;i++){ theTinyGlue.step(); Thread.sleep(10); } //finishes off the episode you were in when you ran out of steps while(theTinyGlue.step()); //Stop visualizing the agent's actions System.out.println("out of the display loop"); theViz.stopVisualizing(); //Run a few more steps without the visualizer System.out.println("running 1000 steps quietly"); for(int i=0;i<1000;i++){ theTinyGlue.step(); } //Run a few more steps with the visualizer on again System.out.println("running 1000 steps showing again"); theViz.startVisualizing(); for(int i=0;i<1000;i++){ theTinyGlue.step(); Thread.sleep(10); } //clean up the environment and end the program RLGlue.RL_cleanup(); System.out.println("Program over"); } }
few more changes to java Trainer
JavaTrainer/src/JavaTrainer.java
few more changes to java Trainer
Java
apache-2.0
d539c13f625c8d5436aebea03a6a3bf4cba06f76
0
lukas-krecan/ShedLock,lukas-krecan/ShedLock
package net.javacrumbs.shedlock.provider.jdbctemplate; import net.javacrumbs.shedlock.core.LockConfiguration; import net.javacrumbs.shedlock.test.support.jdbc.JdbcTestUtils; import net.javacrumbs.shedlock.test.support.jdbc.PostgresConfig; import net.javacrumbs.shedlock.support.annotation.NonNull; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.sql.Timestamp; import java.time.Duration; import java.time.Instant; import static java.lang.Thread.sleep; import static org.assertj.core.api.Assertions.assertThat; class PostgresJdbcTemplateStorageAccessorTest { private static final PostgresConfig dbConfig = new PostgresConfig(); public static final String MY_LOCK = "my-lock"; private final JdbcTestUtils testUtils = new JdbcTestUtils(dbConfig); private final Instant startTime = Instant.parse("2020-04-11T05:30:00Z"); @BeforeAll public static void startDb() { dbConfig.startDb(); } @AfterAll public static void shutdownDb() { dbConfig.shutdownDb(); } @AfterEach public void cleanup() { testUtils.clean(); } @Test void shouldUpdateOnInsertAfterValidityOfPreviousEnded() throws InterruptedException { JdbcTemplateStorageAccessor accessor = getAccessor(); accessor.insertRecord(new LockConfiguration("other", Duration.ofSeconds(5), Duration.ZERO)); Timestamp otherLockValidity = testUtils.getLockedUntil("other"); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofMillis(10), Duration.ZERO)) ).isEqualTo(true); sleep(10); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofMillis(10), Duration.ZERO)) ).isEqualTo(true); // check that the other lock has not been affected by "my-lock" update assertThat(testUtils.getLockedUntil("other")).isEqualTo(otherLockValidity); } @Test void shouldNotUpdateOnInsertIfPreviousDidNotEnd() { JdbcTemplateStorageAccessor accessor = getAccessor(); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofSeconds(10), Duration.ZERO)) ).isEqualTo(true); Timestamp originalLockValidity = testUtils.getLockedUntil(MY_LOCK); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofSeconds(10), Duration.ZERO)) ).isEqualTo(false); assertThat(testUtils.getLockedUntil(MY_LOCK)).isEqualTo(originalLockValidity); } @NonNull private JdbcTemplateStorageAccessor getAccessor() { return new JdbcTemplateStorageAccessor(JdbcTemplateLockProvider .Configuration.builder() .withJdbcTemplate(testUtils.getJdbcTemplate()) .build() ); } }
providers/jdbc/shedlock-provider-jdbc-template/src/test/java/net/javacrumbs/shedlock/provider/jdbctemplate/PostgresJdbcTemplateStorageAccessorTest.java
package net.javacrumbs.shedlock.provider.jdbctemplate; import net.javacrumbs.shedlock.core.LockConfiguration; import net.javacrumbs.shedlock.test.support.jdbc.JdbcTestUtils; import net.javacrumbs.shedlock.test.support.jdbc.PostgresConfig; import net.javacrumbs.shedlock.support.annotation.NonNull; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.time.Duration; import java.time.Instant; import static java.lang.Thread.sleep; import static org.assertj.core.api.Assertions.assertThat; class PostgresJdbcTemplateStorageAccessorTest { private static final PostgresConfig dbConfig = new PostgresConfig(); public static final String MY_LOCK = "my-lock"; private final JdbcTestUtils testUtils = new JdbcTestUtils(dbConfig); private final Instant startTime = Instant.parse("2020-04-11T05:30:00Z"); @BeforeAll public static void startDb() { dbConfig.startDb(); } @AfterAll public static void shutdownDb() { dbConfig.shutdownDb(); } @AfterEach public void cleanup() { testUtils.clean(); } @Test void shouldUpdateOnInsertAfterValidityOfPreviousEnded() throws InterruptedException { JdbcTemplateStorageAccessor accessor = getAccessor(); accessor.insertRecord(new LockConfiguration("other", Duration.ofSeconds(5), Duration.ZERO)); Instant otherLockValidity = testUtils.getLockedUntil("other").toInstant(); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofMillis(10), Duration.ZERO)) ).isEqualTo(true); sleep(10); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofMillis(10), Duration.ZERO)) ).isEqualTo(true); // check that the other lock has not been affected by "my-lock" update assertThat(testUtils.getLockedUntil("other")).isEqualTo(otherLockValidity); } @Test void shouldNotUpdateOnInsertIfPreviousDidNotEnd() { JdbcTemplateStorageAccessor accessor = getAccessor(); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofSeconds(10), Duration.ZERO)) ).isEqualTo(true); Instant originalLockValidity = testUtils.getLockedUntil(MY_LOCK).toInstant(); assertThat( accessor.insertRecord(new LockConfiguration(MY_LOCK, Duration.ofSeconds(10), Duration.ZERO)) ).isEqualTo(false); assertThat(testUtils.getLockedUntil(MY_LOCK)).isEqualTo(originalLockValidity); } @NonNull private JdbcTemplateStorageAccessor getAccessor() { return new JdbcTemplateStorageAccessor(JdbcTemplateLockProvider .Configuration.builder() .withJdbcTemplate(testUtils.getJdbcTemplate()) .build() ); } }
Fix Postgres test
providers/jdbc/shedlock-provider-jdbc-template/src/test/java/net/javacrumbs/shedlock/provider/jdbctemplate/PostgresJdbcTemplateStorageAccessorTest.java
Fix Postgres test
Java
apache-2.0
df8a02470b7ea3271bfcd8dbd82481eeeb73cfdd
0
apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.form; import java.io.File; import java.io.IOException; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.rendering.TestPDFToImage; import org.junit.After; import org.junit.Before; import org.junit.Test; public class AlignmentTest { private static final File OUT_DIR = new File("target/test-output"); private static final File IN_DIR = new File("src/test/resources/org/apache/pdfbox/pdmodel/interactive/form"); private static final String NAME_OF_PDF = "AlignmentTests.pdf"; private static final String TEST_VALUE = "sdfASDF1234äöü"; private PDDocument document; private PDAcroForm acroForm; @Before public void setUp() throws IOException { document = PDDocument.load(new File(IN_DIR, NAME_OF_PDF)); acroForm = document.getDocumentCatalog().getAcroForm(); OUT_DIR.mkdirs(); } @Test public void fillFields() throws IOException { PDTextField field = (PDTextField) acroForm.getField("AlignLeft"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Small_Outside"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Medium_Outside"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide_Outside"); field.setValue(TEST_VALUE); // compare rendering File file = new File(OUT_DIR, NAME_OF_PDF); document.save(file); TestPDFToImage testPDFToImage = new TestPDFToImage(TestPDFToImage.class.getName()); if (!testPDFToImage.doTestFile(file, IN_DIR.getAbsolutePath(), OUT_DIR.getAbsolutePath())) { // don't fail, rendering is different on different systems, result must be viewed manually System.err.println ("Rendering of " + file + " failed or is not identical to expected rendering in " + IN_DIR + " directory"); } } @After public void tearDown() throws IOException { document.close(); } }
pdfbox/src/test/java/org/apache/pdfbox/pdmodel/interactive/form/AlignmentTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.form; import java.io.File; import java.io.IOException; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.rendering.TestPDFToImage; import org.junit.After; import org.junit.Before; import org.junit.Test; public class AlignmentTest { private static final File OUT_DIR = new File("target/test-output"); private static final File IN_DIR = new File("src/test/resources/org/apache/pdfbox/pdmodel/interactive/form"); private static final String NAME_OF_PDF = "AlignmentTests.pdf"; private static final String TEST_VALUE = "asdfASDF1234äöü"; private PDDocument document; private PDAcroForm acroForm; @Before public void setUp() throws IOException { document = PDDocument.load(new File(IN_DIR, NAME_OF_PDF)); acroForm = document.getDocumentCatalog().getAcroForm(); OUT_DIR.mkdirs(); } @Test public void fillFields() throws IOException { PDTextField field = (PDTextField) acroForm.getField("AlignLeft"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Filled"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Small-Filled"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignLeft-Border_Small_Outside"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Filled"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignMiddle-Border_Medium_Outside"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Small"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Medium"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide_Clipped"); field.setValue(TEST_VALUE); field = (PDTextField) acroForm.getField("AlignRight-Border_Wide_Outside"); field.setValue(TEST_VALUE); // compare rendering File file = new File(OUT_DIR, NAME_OF_PDF); document.save(file); TestPDFToImage testPDFToImage = new TestPDFToImage(TestPDFToImage.class.getName()); if (!testPDFToImage.doTestFile(file, IN_DIR.getAbsolutePath(), OUT_DIR.getAbsolutePath())) { // don't fail, rendering is different on different systems, result must be viewed manually System.err.println ("Rendering of " + file + " failed or is not identical to expected rendering in " + IN_DIR + " directory"); } } @After public void tearDown() throws IOException { document.close(); } }
PDFBOX-2333: don't overwrite Acrobat filled fields git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1680557 13f79535-47bb-0310-9956-ffa450edef68
pdfbox/src/test/java/org/apache/pdfbox/pdmodel/interactive/form/AlignmentTest.java
PDFBOX-2333: don't overwrite Acrobat filled fields
Java
apache-2.0
5c907379bb5832b1028554648f4d3d98bdae9934
0
PasinduTennage/carbon-identity-framework,dharshanaw/carbon-identity-framework,omindu/carbon-identity-framework,omindu/carbon-identity-framework,nuwandi-is/identity-framework,PasinduTennage/carbon-identity-framework,omindu/carbon-identity-framework,nuwandi-is/identity-framework,omindu/carbon-identity-framework,PasinduTennage/carbon-identity-framework,wso2/carbon-identity-framework,wso2/carbon-identity-framework,wso2/carbon-identity-framework,dharshanaw/carbon-identity-framework,wso2/carbon-identity-framework,dharshanaw/carbon-identity-framework,nuwandi-is/identity-framework
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.inbound; import org.wso2.carbon.identity.core.bean.context.MessageContext; import java.io.Serializable; import java.util.HashMap; import java.util.Map; public class IdentityMessageContext<T1 extends Serializable, T2 extends Serializable> extends MessageContext<T1,T2> implements Serializable { private static final long serialVersionUID = 104614801932285909L; protected IdentityRequest request; public IdentityMessageContext(IdentityRequest request, Map<T1,T2> parameters) { super(parameters); this.request = request; } public IdentityMessageContext(IdentityRequest request) { super(new HashMap<T1,T2>()); this.request = request; } public IdentityRequest getRequest() { return request; } }
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/inbound/IdentityMessageContext.java
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.inbound; import org.wso2.carbon.identity.core.bean.context.MessageContext; import java.io.Serializable; import java.util.HashMap; import java.util.Map; public class IdentityMessageContext<T1 extends Serializable, T2 extends Serializable> extends MessageContext implements Serializable { private static final long serialVersionUID = 104614801932285909L; protected IdentityRequest request; protected Map<T1,T2> parameters = new HashMap<>(); public IdentityMessageContext(IdentityRequest request, Map<T1, T2> parameters) { super(parameters); this.request = request; } public IdentityMessageContext(IdentityRequest request) { this.request = request; } public IdentityRequest getRequest() { return request; } }
Fixing comilation failure in 5.2.0-refactoring
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/inbound/IdentityMessageContext.java
Fixing comilation failure in 5.2.0-refactoring
Java
apache-2.0
eb3bc67ad66b97f9f8f77bd7deb93b2fe7b630b9
0
ProPra16/programmierpraktikum-abschlussprojekt-team-1,ProPra16/programmierpraktikum-abschlussprojekt-team-1
package gui; import java.io.File; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import data.AllTests; import data.Class; import data.Project; import data.Test; import javafx.geometry.Insets; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.TextArea; import javafx.scene.layout.BorderPane; import javafx.scene.layout.GridPane; import javafx.scene.layout.HBox; import javafx.scene.text.Font; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; import javafx.stage.Stage; public class Catalog extends Stage{ private Scene scene; private BorderPane root = new BorderPane(); private Label exerciseName, description, babysteps, timetracking; private TextArea classes; private TextArea tests; private Button laden; private String source = "./res/exercise.xml"; private Document doc = null; private int currentExercise = 0; private int numberOfExercises = 0; public Catalog(String source){ super(); this.source = source; setScene(create_scene()); loadExcercises(); showExcercise(currentExercise); } public Catalog(){ super(); setScene(create_scene()); loadExcercises(); showExcercise(currentExercise); } private void clickOnLaden(){ close(); } private void loadExcercises(){ File input = null; try { input = new File(source); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); doc = dBuilder.parse(input); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("exercise"); numberOfExercises = nList.getLength(); } catch (Exception e) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Error"); alert.setHeaderText("Übungen konnten nicht geladen werden"); alert.setContentText("Konnte daten vom Dokument nicht laden:\n" + input.getAbsolutePath()); alert.showAndWait(); } } private Scene create_scene(){ root = create_root(); scene = new Scene(root,600,600); return scene; } private BorderPane create_root(){ root.setTop(create_top()); root.setCenter(create_center()); return root; } private GridPane create_center() { GridPane grid = new GridPane(); grid.setHgap(10); grid.setVgap(10); grid.setPadding(new Insets(15, 50, 0, 50)); grid.setStyle("-fx-background-color: #f5f5f5;"); // Text in column 1, row 1 Text exerciseNameText = new Text("Excercise:"); exerciseNameText.setFont(Font.font("Arial", FontWeight.NORMAL, 15)); grid.add(exerciseNameText, 0, 0); // Label in column 2, row 1 exerciseName = new Label(); exerciseName.setFont(Font.font("Arial", FontWeight.NORMAL, 15)); grid.add(exerciseName, 1, 0); // Text in column 1, row 2 Text descriptionText = new Text("Description:"); descriptionText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(descriptionText, 0, 1); // Label in column 2, row 2 description = new Label(); description.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(description, 1, 1); // Text in column 1, row 3 Text classesText = new Text("Classes:"); classesText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(classesText, 0, 2); // TextArea in column 1, row 4 classes = new TextArea(); classes.setFont(Font.font("Arial", FontWeight.NORMAL, 12)); grid.add(classes, 0, 3, 2, 1); // Text in column 1, row 5 Text testsText = new Text("Tests:"); testsText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(testsText, 0, 4); // TextArea in column 1, row 6 tests = new TextArea(); tests.setFont(Font.font("Arial", FontWeight.NORMAL, 12)); grid.add(tests, 0, 5, 2, 1); // Text in column 1, row 7 Text babystepsText = new Text("Babysteps:"); babystepsText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(babystepsText, 0, 6); // Label in column 2, row 7 babysteps = new Label(); babysteps.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(babysteps, 1, 6); // Text in column 1, row 8 Text trackingText = new Text("Tracking:"); trackingText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(trackingText, 0, 7); // Label in column 2, row 8 timetracking = new Label(); timetracking.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(timetracking, 1, 7); return grid; } private HBox create_top(){ HBox hbox = new HBox(); hbox.setPadding(new Insets(10, 50, 10, 50)); hbox.setSpacing(15); hbox.setStyle("-fx-background-color: #dcdcdc;"); Button zurueck = new Button("Previous"); Button weiter = new Button("Next"); laden = new Button("Load this!"); laden.setOnAction(e->{ clickOnLaden(); }); zurueck.setOnAction(e->{ currentExercise = ((currentExercise-1)+numberOfExercises)%numberOfExercises; showExcercise(currentExercise); }); weiter.setOnAction(e->{ currentExercise = (currentExercise+1)%numberOfExercises; showExcercise(currentExercise); }); hbox.getChildren().addAll(zurueck, weiter, laden); return hbox; } private void showExcercise(int index) { NodeList nList = doc.getElementsByTagName("exercise"); Node nNode = nList.item(index); Element eElement = (Element) nNode; if (nNode.getNodeType() == Node.ELEMENT_NODE) { exerciseName.setText(eElement.getAttribute("name")+""); description.setText(eElement.getElementsByTagName("description").item(0).getTextContent()); } NodeList classList = eElement.getElementsByTagName("class"); String hilfe = ""; for(int i = 0; i < classList.getLength(); i++){ hilfe = hilfe + ((Element)classList.item(i)).getAttribute("name") + "\n"; for(int j = 0; j < classList.item(i).getChildNodes().getLength(); j++){ hilfe = hilfe + classList.item(i).getChildNodes().item(j).getNodeValue(); } hilfe = hilfe + "\n\n"; } classes.setText(hilfe); classes.setEditable(false); hilfe = ""; NodeList testList = eElement.getElementsByTagName("test"); for(int i = 0; i < testList.getLength(); i++){ hilfe = hilfe + ((Element)testList.item(i)).getAttribute("name") + "\n"; for(int j = 0; j < testList.item(i).getChildNodes().getLength(); j++){ hilfe = hilfe + testList.item(i).getChildNodes().item(j).getNodeValue(); } hilfe = hilfe + "\n\n"; } tests.setText(hilfe); tests.setEditable(false); hilfe = ""; NodeList babyList = eElement.getElementsByTagName("babysteps"); if(((Element)babyList.item(0)).getAttribute("value").equals("True")){ hilfe = ((Element)babyList.item(0)).getAttribute("value")+ " time: "+ ((Element)babyList.item(0)).getAttribute("time") ; babysteps.setText(hilfe); } else { babysteps.setText(((Element)babyList.item(0)).getAttribute("value")+ "" ); } NodeList trackingList = eElement.getElementsByTagName("timetracking"); timetracking.setText(((Element)trackingList.item(0)).getAttribute("value")+ "" ); } public Project getProject(){ NodeList nList = doc.getElementsByTagName("exercise"); Node nNode = nList.item(currentExercise); Element eElement = (Element) nNode; NodeList classList = eElement.getElementsByTagName("class"); List<Class> klassenListe = new ArrayList<Class>(); for(int i = 0; i < classList.getLength(); i++){ data.Class klasse = new data.Class(classList.item(i).getChildNodes().item(0).getNodeValue(), (String)((Element)classList.item(i)).getAttribute("name")); klassenListe.add(klasse); } NodeList testList = eElement.getElementsByTagName("test"); List<Test> tests = new ArrayList<Test>(); for(int i = 0; i < testList.getLength(); i++){ Test test = new Test((String)(((Element)testList.item(i)).getAttribute("name")),testList.item(i).getChildNodes().item(0).getNodeValue()); tests.add(test); } NodeList babyList = eElement.getElementsByTagName("babysteps"); boolean babysteps = false; int duration = 0; if(((Element)babyList.item(0)).getAttribute("value").equals("True")){ babysteps = true; duration = 0; //((Element)babyList.item(0)).getAttribute("time") ; //TODO duration in der xml datei zu einem String machen oder in Project die Zeit als String speichern } NodeList trackingList = eElement.getElementsByTagName("timetracking"); boolean tracking = false; if(((Element)trackingList.item(0)).getAttribute("value").equals("True")){ tracking = true; } Project project = new Project(tests, klassenListe, eElement.getElementsByTagName("description").item(0).getTextContent(),eElement.getAttribute("name"),babysteps,duration, tracking); return project; } }
TDD-Trainer/src/gui/Catalog.java
package gui; import java.io.File; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import data.AllTests; import data.Class; import data.Project; import data.Test; import javafx.geometry.Insets; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.TextArea; import javafx.scene.layout.BorderPane; import javafx.scene.layout.GridPane; import javafx.scene.layout.HBox; import javafx.scene.text.Font; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; import javafx.stage.Stage; public class Catalog extends Stage{ private Scene scene; private BorderPane root = new BorderPane(); private Label exerciseName, description, babysteps, timetracking; private TextArea classes; private TextArea tests; private Button laden; private Document doc = null; private int currentExercise = 0; private int numberOfExercises = 0; public Catalog(){ super(); setScene(create_scene()); loadExcercises(); showExcercise(currentExercise); } private void clickOnLaden(){ close(); } private void loadExcercises(){ File input = null; try { input = new File("./res/exercise.xml"); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); doc = dBuilder.parse(input); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("exercise"); numberOfExercises = nList.getLength(); } catch (Exception e) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Error"); alert.setHeaderText("Übungen konnten nicht geladen werden"); alert.setContentText("Konnte daten vom Dokument nicht laden:\n" + input.getAbsolutePath()); alert.showAndWait(); } } private Scene create_scene(){ root = create_root(); scene = new Scene(root,600,600); return scene; } private BorderPane create_root(){ root.setTop(create_top()); root.setCenter(create_center()); return root; } private GridPane create_center() { GridPane grid = new GridPane(); grid.setHgap(10); grid.setVgap(10); grid.setPadding(new Insets(15, 50, 0, 50)); grid.setStyle("-fx-background-color: #f5f5f5;"); // Text in column 1, row 1 Text exerciseNameText = new Text("Excercise:"); exerciseNameText.setFont(Font.font("Arial", FontWeight.NORMAL, 15)); grid.add(exerciseNameText, 0, 0); // Label in column 2, row 1 exerciseName = new Label(); exerciseName.setFont(Font.font("Arial", FontWeight.NORMAL, 15)); grid.add(exerciseName, 1, 0); // Text in column 1, row 2 Text descriptionText = new Text("Description:"); descriptionText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(descriptionText, 0, 1); // Label in column 2, row 2 description = new Label(); description.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(description, 1, 1); // Text in column 1, row 3 Text classesText = new Text("Classes:"); classesText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(classesText, 0, 2); // TextArea in column 1, row 4 classes = new TextArea(); classes.setFont(Font.font("Arial", FontWeight.NORMAL, 12)); grid.add(classes, 0, 3, 2, 1); // Text in column 1, row 5 Text testsText = new Text("Tests:"); testsText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(testsText, 0, 4); // TextArea in column 1, row 6 tests = new TextArea(); tests.setFont(Font.font("Arial", FontWeight.NORMAL, 12)); grid.add(tests, 0, 5, 2, 1); // Text in column 1, row 7 Text babystepsText = new Text("Babysteps:"); babystepsText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(babystepsText, 0, 6); // Label in column 2, row 7 babysteps = new Label(); babysteps.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(babysteps, 1, 6); // Text in column 1, row 8 Text trackingText = new Text("Tracking:"); trackingText.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(trackingText, 0, 7); // Label in column 2, row 8 timetracking = new Label(); timetracking.setFont(Font.font("Arial", FontWeight.NORMAL, 13)); grid.add(timetracking, 1, 7); return grid; } private HBox create_top(){ HBox hbox = new HBox(); hbox.setPadding(new Insets(10, 50, 10, 50)); hbox.setSpacing(15); hbox.setStyle("-fx-background-color: #dcdcdc;"); Button zurueck = new Button("Previous"); Button weiter = new Button("Next"); laden = new Button("Load this!"); laden.setOnAction(e->{ clickOnLaden(); }); zurueck.setOnAction(e->{ currentExercise = ((currentExercise-1)+numberOfExercises)%numberOfExercises; showExcercise(currentExercise); }); weiter.setOnAction(e->{ currentExercise = (currentExercise+1)%numberOfExercises; showExcercise(currentExercise); }); hbox.getChildren().addAll(zurueck, weiter, laden); return hbox; } private void showExcercise(int index) { NodeList nList = doc.getElementsByTagName("exercise"); Node nNode = nList.item(index); Element eElement = (Element) nNode; if (nNode.getNodeType() == Node.ELEMENT_NODE) { exerciseName.setText(eElement.getAttribute("name")+""); description.setText(eElement.getElementsByTagName("description").item(0).getTextContent()); } NodeList classList = eElement.getElementsByTagName("class"); String hilfe = ""; for(int i = 0; i < classList.getLength(); i++){ hilfe = hilfe + ((Element)classList.item(i)).getAttribute("name") + "\n"; for(int j = 0; j < classList.item(i).getChildNodes().getLength(); j++){ hilfe = hilfe + classList.item(i).getChildNodes().item(j).getNodeValue(); } hilfe = hilfe + "\n\n"; } classes.setText(hilfe); classes.setEditable(false); hilfe = ""; NodeList testList = eElement.getElementsByTagName("test"); for(int i = 0; i < testList.getLength(); i++){ hilfe = hilfe + ((Element)testList.item(i)).getAttribute("name") + "\n"; for(int j = 0; j < testList.item(i).getChildNodes().getLength(); j++){ hilfe = hilfe + testList.item(i).getChildNodes().item(j).getNodeValue(); } hilfe = hilfe + "\n\n"; } tests.setText(hilfe); tests.setEditable(false); hilfe = ""; NodeList babyList = eElement.getElementsByTagName("babysteps"); if(((Element)babyList.item(0)).getAttribute("value").equals("True")){ hilfe = ((Element)babyList.item(0)).getAttribute("value")+ " time: "+ ((Element)babyList.item(0)).getAttribute("time") ; babysteps.setText(hilfe); } else { babysteps.setText(((Element)babyList.item(0)).getAttribute("value")+ "" ); } NodeList trackingList = eElement.getElementsByTagName("timetracking"); timetracking.setText(((Element)trackingList.item(0)).getAttribute("value")+ "" ); } public Project getProject(){ NodeList nList = doc.getElementsByTagName("exercise"); Node nNode = nList.item(currentExercise); Element eElement = (Element) nNode; NodeList classList = eElement.getElementsByTagName("class"); List<Class> klassenListe = new ArrayList<Class>(); for(int i = 0; i < classList.getLength(); i++){ data.Class klasse = new data.Class(classList.item(i).getChildNodes().item(0).getNodeValue(), (String)((Element)classList.item(i)).getAttribute("name")); klassenListe.add(klasse); } NodeList testList = eElement.getElementsByTagName("test"); List<Test> tests = new ArrayList<Test>(); for(int i = 0; i < testList.getLength(); i++){ Test test = new Test((String)(((Element)testList.item(i)).getAttribute("name")),testList.item(i).getChildNodes().item(0).getNodeValue()); tests.add(test); } NodeList babyList = eElement.getElementsByTagName("babysteps"); boolean babysteps = false; int duration = 0; if(((Element)babyList.item(0)).getAttribute("value").equals("True")){ babysteps = true; duration = 0; //((Element)babyList.item(0)).getAttribute("time") ; //TODO duration in der xml datei zu einem String machen oder in Project die Zeit als String speichern } NodeList trackingList = eElement.getElementsByTagName("timetracking"); boolean tracking = false; if(((Element)trackingList.item(0)).getAttribute("value").equals("True")){ tracking = true; } Project project = new Project(tests, klassenListe, eElement.getElementsByTagName("description").item(0).getTextContent(),eElement.getAttribute("name"),babysteps,duration, tracking); return project; } }
Katalog kann nun mit anderer .xml datei aufgerufen werden
TDD-Trainer/src/gui/Catalog.java
Katalog kann nun mit anderer .xml datei aufgerufen werden
Java
apache-2.0
1f074ff4004eb896bf16b2ec60874fbcd2fbe625
0
jeremylong/DependencyCheck,hansjoachim/DependencyCheck,stevespringett/DependencyCheck,colezlaw/DependencyCheck,stefanneuhaus/DependencyCheck,wmaintw/DependencyCheck,awhitford/DependencyCheck,jeremylong/DependencyCheck,recena/DependencyCheck,colezlaw/DependencyCheck,stefanneuhaus/DependencyCheck,hansjoachim/DependencyCheck,awhitford/DependencyCheck,hansjoachim/DependencyCheck,stevespringett/DependencyCheck,wmaintw/DependencyCheck,stefanneuhaus/DependencyCheck,wmaintw/DependencyCheck,awhitford/DependencyCheck,stefanneuhaus/DependencyCheck,stefanneuhaus/DependencyCheck,awhitford/DependencyCheck,stevespringett/DependencyCheck,stevespringett/DependencyCheck,jeremylong/DependencyCheck,jeremylong/DependencyCheck,recena/DependencyCheck,adilakhter/DependencyCheck,awhitford/DependencyCheck,colezlaw/DependencyCheck,colezlaw/DependencyCheck,adilakhter/DependencyCheck,stevespringett/DependencyCheck,wmaintw/DependencyCheck,awhitford/DependencyCheck,colezlaw/DependencyCheck,stefanneuhaus/DependencyCheck,stefanneuhaus/DependencyCheck,hansjoachim/DependencyCheck,wmaintw/DependencyCheck,adilakhter/DependencyCheck,wmaintw/DependencyCheck,recena/DependencyCheck,adilakhter/DependencyCheck,hansjoachim/DependencyCheck,jeremylong/DependencyCheck,stefanneuhaus/DependencyCheck,awhitford/DependencyCheck,stevespringett/DependencyCheck,stevespringett/DependencyCheck,colezlaw/DependencyCheck,jeremylong/DependencyCheck,recena/DependencyCheck,adilakhter/DependencyCheck,recena/DependencyCheck,adilakhter/DependencyCheck,jeremylong/DependencyCheck,colezlaw/DependencyCheck,jeremylong/DependencyCheck,hansjoachim/DependencyCheck,recena/DependencyCheck,hansjoachim/DependencyCheck,awhitford/DependencyCheck
/* * This file is part of dependency-check-core. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright (c) 2013 Jeremy Long. All Rights Reserved. */ package org.owasp.dependencycheck.analyzer; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2Utils; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipUtils; import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException; import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.Settings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.util.*; /** * <p> * An analyzer that extracts files from archives and ensures any supported files contained within the archive are added to the * dependency list.</p> * * @author Jeremy Long */ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer { /** * The logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class); /** * The buffer size to use when extracting files from the archive. */ private static final int BUFFER_SIZE = 4096; /** * The count of directories created during analysis. This is used for creating temporary directories. */ private static int dirCount = 0; /** * The parent directory for the individual directories per archive. */ private File tempFileLocation = null; /** * The max scan depth that the analyzer will recursively extract nested archives. */ private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3); /** * Tracks the current scan/extraction depth for nested archives. */ private int scanDepth = 0; //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> /** * The name of the analyzer. */ private static final String ANALYZER_NAME = "Archive Analyzer"; /** * The phase that this analyzer is intended to run in. */ private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL; /** * The set of things we can handle with Zip methods */ private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg"); /** * The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need * to be explicitly handled in {@link #extractFiles(File, File, Engine)}. */ private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2"); /** * Detects files with extensions to remove from the engine's collection of dependencies. */ private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build(); static { final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS); if (additionalZipExt != null) { final Set<String> ext = new HashSet<String>(Collections.singletonList(additionalZipExt)); ZIPPABLES.addAll(ext); } EXTENSIONS.addAll(ZIPPABLES); } /** * The file filter used to filter supported files. */ private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build(); @Override protected FileFilter getFileFilter() { return FILTER; } /** * Detects files with .zip extension. */ private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build(); /** * Returns the name of the analyzer. * * @return the name of the analyzer. */ @Override public String getName() { return ANALYZER_NAME; } /** * Returns the phase that the analyzer is intended to run in. * * @return the phase that the analyzer is intended to run in. */ @Override public AnalysisPhase getAnalysisPhase() { return ANALYSIS_PHASE; } //</editor-fold> /** * Returns the key used in the properties file to reference the analyzer's enabled property. * * @return the analyzer's enabled property setting key */ @Override protected String getAnalyzerEnabledSettingKey() { return Settings.KEYS.ANALYZER_ARCHIVE_ENABLED; } /** * The initialize method does nothing for this Analyzer. * * @throws Exception is thrown if there is an exception deleting or creating temporary files */ @Override public void initializeFileTypeAnalyzer() throws Exception { final File baseDir = Settings.getTempDirectory(); tempFileLocation = File.createTempFile("check", "tmp", baseDir); if (!tempFileLocation.delete()) { final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath()); throw new AnalysisException(msg); } if (!tempFileLocation.mkdirs()) { final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath()); throw new AnalysisException(msg); } } /** * The close method deletes any temporary files and directories created during analysis. * * @throws Exception thrown if there is an exception deleting temporary files */ @Override public void close() throws Exception { if (tempFileLocation != null && tempFileLocation.exists()) { LOGGER.debug("Attempting to delete temporary files"); final boolean success = FileUtils.delete(tempFileLocation); if (!success && tempFileLocation != null && tempFileLocation.exists() && tempFileLocation.list().length > 0) { LOGGER.warn("Failed to delete some temporary files, see the log for more details"); } } } /** * Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned, * and added to the list of dependencies within the engine. * * @param dependency the dependency to analyze * @param engine the engine scanning * @throws AnalysisException thrown if there is an analysis exception */ @Override public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { final File f = new File(dependency.getActualFilePath()); final File tmpDir = getNextTempDirectory(); extractFiles(f, tmpDir, engine); //make a copy final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir); if (!dependencySet.isEmpty()) { for (Dependency d : dependencySet) { //fix the dependency's display name and path final String displayPath = String.format("%s%s", dependency.getFilePath(), d.getActualFilePath().substring(tmpDir.getAbsolutePath().length())); final String displayName = String.format("%s: %s", dependency.getFileName(), d.getFileName()); d.setFilePath(displayPath); d.setFileName(displayName); //TODO - can we get more evidence from the parent? EAR contains module name, etc. //analyze the dependency (i.e. extract files) if it is a supported type. if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) { scanDepth += 1; analyze(d, engine); scanDepth -= 1; } } } if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) { addDisguisedJarsToDependencies(dependency, engine); engine.getDependencies().remove(dependency); } Collections.sort(engine.getDependencies()); } private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException { if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) { final File tdir = getNextTempDirectory(); final String fileName = dependency.getFileName(); LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName)); final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar"); try { org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc); final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc); if (!dependencySet.isEmpty()) { if (dependencySet.size() != 1) { LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?"); } for (Dependency d : dependencySet) { //fix the dependency's display name and path d.setFilePath(dependency.getFilePath()); d.setDisplayFileName(dependency.getFileName()); } } } catch (IOException ex) { LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex); } } } private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet(); /** * Scan the given file/folder, and return any new dependencies found. * * @param engine used to scan * @param file target of scanning * @return any dependencies that weren't known to the engine before */ private static Set<Dependency> findMoreDependencies(Engine engine, File file) { List<Dependency> before = new ArrayList<Dependency>(engine.getDependencies()); engine.scan(file); List<Dependency> after = engine.getDependencies(); final boolean sizeChanged = before.size() != after.size(); final Set<Dependency> newDependencies; if (sizeChanged) { //get the new dependencies newDependencies = new HashSet<Dependency>(); newDependencies.addAll(after); newDependencies.removeAll(before); } else { newDependencies = EMPTY_DEPENDENCY_SET; } return newDependencies; } /** * Retrieves the next temporary directory to extract an archive too. * * @return a directory * @throws AnalysisException thrown if unable to create temporary directory */ private File getNextTempDirectory() throws AnalysisException { dirCount += 1; final File directory = new File(tempFileLocation, String.valueOf(dirCount)); //getting an exception for some directories not being able to be created; might be because the directory already exists? if (directory.exists()) { return getNextTempDirectory(); } if (!directory.mkdirs()) { final String msg = String.format("Unable to create temp directory '%s'.", directory.getAbsolutePath()); throw new AnalysisException(msg); } return directory; } /** * Extracts the contents of an archive into the specified directory. * * @param archive an archive file such as a WAR or EAR * @param destination a directory to extract the contents to * @param engine the scanning engine * @throws AnalysisException thrown if the archive is not found */ private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException { if (archive != null && destination != null) { FileInputStream fis; try { fis = new FileInputStream(archive); } catch (FileNotFoundException ex) { LOGGER.debug("", ex); throw new AnalysisException("Archive file was not found.", ex); } final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase(); try { if (ZIPPABLES.contains(archiveExt)) { extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine); } else if ("tar".equals(archiveExt)) { extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine); } else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) { final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName()); final File f = new File(destination, uncompressedName); if (engine.accept(f)) { decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f); } } else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) { final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName()); final File f = new File(destination, uncompressedName); if (engine.accept(f)) { decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f); } } } catch (ArchiveExtractionException ex) { LOGGER.warn("Exception extracting archive '{}'.", archive.getName()); LOGGER.debug("", ex); } catch (IOException ex) { LOGGER.warn("Exception reading archive '{}'.", archive.getName()); LOGGER.debug("", ex); } finally { close(fis); } } } /** * Extracts files from an archive. * * @param input the archive to extract files from * @param destination the location to write the files too * @param engine the dependency-check engine * @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive */ private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException { ArchiveEntry entry; try { while ((entry = input.getNextEntry()) != null) { final File file = new File(destination, entry.getName()); if (entry.isDirectory()) { if (!file.exists() && !file.mkdirs()) { final String msg = String.format("Unable to create directory '%s'.", file.getAbsolutePath()); throw new AnalysisException(msg); } } else if (engine.accept(file)) { extractAcceptedFile(input, file); } } } catch (Throwable ex) { throw new ArchiveExtractionException(ex); } finally { close(input); } } private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException { LOGGER.debug("Extracting '{}'", file.getPath()); BufferedOutputStream bos = null; FileOutputStream fos = null; try { final File parent = file.getParentFile(); if (!parent.isDirectory()) { if (!parent.mkdirs()) { final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath()); throw new AnalysisException(msg); } } fos = new FileOutputStream(file); bos = new BufferedOutputStream(fos, BUFFER_SIZE); int count; final byte[] data = new byte[BUFFER_SIZE]; while ((count = input.read(data, 0, BUFFER_SIZE)) != -1) { bos.write(data, 0, count); } bos.flush(); } catch (FileNotFoundException ex) { LOGGER.debug("", ex); final String msg = String.format("Unable to find file '%s'.", file.getName()); throw new AnalysisException(msg, ex); } catch (IOException ex) { LOGGER.debug("", ex); final String msg = String.format("IO Exception while parsing file '%s'.", file.getName()); throw new AnalysisException(msg, ex); } finally { close(bos); close(fos); } } /** * Decompresses a file. * * @param inputStream the compressed file * @param outputFile the location to write the decompressed file * @throws ArchiveExtractionException thrown if there is an exception decompressing the file */ private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException { LOGGER.debug("Decompressing '{}'", outputFile.getPath()); FileOutputStream out = null; try { out = new FileOutputStream(outputFile); final byte[] buffer = new byte[BUFFER_SIZE]; int n; // = 0 while (-1 != (n = inputStream.read(buffer))) { out.write(buffer, 0, n); } } catch (FileNotFoundException ex) { LOGGER.debug("", ex); throw new ArchiveExtractionException(ex); } catch (IOException ex) { LOGGER.debug("", ex); throw new ArchiveExtractionException(ex); } finally { close(out); } } /** * Close the given {@link Closeable} instance, ignoring nulls, and logging any thrown {@link IOException}. * * @param closeable to be closed */ private static void close(Closeable closeable){ if (null != closeable) { try { closeable.close(); } catch (IOException ex) { LOGGER.trace("", ex); } } } /** * Attempts to determine if a zip file is actually a JAR file. * * @param dependency the dependency to check * @return true if the dependency appears to be a JAR file; otherwise false */ private boolean isZipFileActuallyJarFile(Dependency dependency) { boolean isJar = false; ZipFile zip = null; try { zip = new ZipFile(dependency.getActualFilePath()); if (zip.getEntry("META-INF/MANIFEST.MF") != null || zip.getEntry("META-INF/maven") != null) { final Enumeration<ZipArchiveEntry> entries = zip.getEntries(); while (entries.hasMoreElements()) { final ZipArchiveEntry entry = entries.nextElement(); if (!entry.isDirectory()) { final String name = entry.getName().toLowerCase(); if (name.endsWith(".class")) { isJar = true; break; } } } } } catch (IOException ex) { LOGGER.debug("Unable to unzip zip file '{}'", dependency.getFilePath(), ex); } finally { ZipFile.closeQuietly(zip); } return isJar; } }
dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java
/* * This file is part of dependency-check-core. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright (c) 2013 Jeremy Long. All Rights Reserved. */ package org.owasp.dependencycheck.analyzer; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2Utils; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipUtils; import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException; import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.Settings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.util.*; /** * <p> * An analyzer that extracts files from archives and ensures any supported files contained within the archive are added to the * dependency list.</p> * * @author Jeremy Long */ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer { /** * The logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class); /** * The buffer size to use when extracting files from the archive. */ private static final int BUFFER_SIZE = 4096; /** * The count of directories created during analysis. This is used for creating temporary directories. */ private static int dirCount = 0; /** * The parent directory for the individual directories per archive. */ private File tempFileLocation = null; /** * The max scan depth that the analyzer will recursively extract nested archives. */ private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3); /** * Tracks the current scan/extraction depth for nested archives. */ private int scanDepth = 0; //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> /** * The name of the analyzer. */ private static final String ANALYZER_NAME = "Archive Analyzer"; /** * The phase that this analyzer is intended to run in. */ private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL; /** * The set of things we can handle with Zip methods */ private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg"); /** * The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need * to be explicitly handled in {@link #extractFiles(File, File, Engine)}. */ private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2"); /** * Detects files with extensions to remove from the engine's collection of dependencies. */ private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build(); static { final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS); if (additionalZipExt != null) { final Set<String> ext = new HashSet<String>(Collections.singletonList(additionalZipExt)); ZIPPABLES.addAll(ext); } EXTENSIONS.addAll(ZIPPABLES); } /** * The file filter used to filter supported files. */ private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build(); @Override protected FileFilter getFileFilter() { return FILTER; } /** * Detects files with .zip extension. */ private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build(); /** * Returns the name of the analyzer. * * @return the name of the analyzer. */ @Override public String getName() { return ANALYZER_NAME; } /** * Returns the phase that the analyzer is intended to run in. * * @return the phase that the analyzer is intended to run in. */ @Override public AnalysisPhase getAnalysisPhase() { return ANALYSIS_PHASE; } //</editor-fold> /** * Returns the key used in the properties file to reference the analyzer's enabled property. * * @return the analyzer's enabled property setting key */ @Override protected String getAnalyzerEnabledSettingKey() { return Settings.KEYS.ANALYZER_ARCHIVE_ENABLED; } /** * The initialize method does nothing for this Analyzer. * * @throws Exception is thrown if there is an exception deleting or creating temporary files */ @Override public void initializeFileTypeAnalyzer() throws Exception { final File baseDir = Settings.getTempDirectory(); tempFileLocation = File.createTempFile("check", "tmp", baseDir); if (!tempFileLocation.delete()) { final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath()); throw new AnalysisException(msg); } if (!tempFileLocation.mkdirs()) { final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath()); throw new AnalysisException(msg); } } /** * The close method deletes any temporary files and directories created during analysis. * * @throws Exception thrown if there is an exception deleting temporary files */ @Override public void close() throws Exception { if (tempFileLocation != null && tempFileLocation.exists()) { LOGGER.debug("Attempting to delete temporary files"); final boolean success = FileUtils.delete(tempFileLocation); if (!success && tempFileLocation != null && tempFileLocation.exists() && tempFileLocation.list().length > 0) { LOGGER.warn("Failed to delete some temporary files, see the log for more details"); } } } /** * Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned, * and added to the list of dependencies within the engine. * * @param dependency the dependency to analyze * @param engine the engine scanning * @throws AnalysisException thrown if there is an analysis exception */ @Override public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { final File f = new File(dependency.getActualFilePath()); final File tmpDir = getNextTempDirectory(); extractFiles(f, tmpDir, engine); //make a copy final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir); if (!dependencySet.isEmpty()) { for (Dependency d : dependencySet) { //fix the dependency's display name and path final String displayPath = String.format("%s%s", dependency.getFilePath(), d.getActualFilePath().substring(tmpDir.getAbsolutePath().length())); final String displayName = String.format("%s: %s", dependency.getFileName(), d.getFileName()); d.setFilePath(displayPath); d.setFileName(displayName); //TODO - can we get more evidence from the parent? EAR contains module name, etc. //analyze the dependency (i.e. extract files) if it is a supported type. if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) { scanDepth += 1; analyze(d, engine); scanDepth -= 1; } } } if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) { addDisguisedJarsToDependencies(dependency, engine); engine.getDependencies().remove(dependency); } Collections.sort(engine.getDependencies()); } private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException { if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) { final File tdir = getNextTempDirectory(); final String fileName = dependency.getFileName(); LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName)); final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar"); try { org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc); final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc); if (!dependencySet.isEmpty()) { if (dependencySet.size() != 1) { LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?"); } for (Dependency d : dependencySet) { //fix the dependency's display name and path d.setFilePath(dependency.getFilePath()); d.setDisplayFileName(dependency.getFileName()); } } } catch (IOException ex) { LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex); } } } private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet(); private Set<Dependency> findMoreDependencies(Engine engine, File tmpDir) { //make a copy List<Dependency> dependencies = new ArrayList<Dependency>(engine.getDependencies()); engine.scan(tmpDir); List<Dependency> newDependencies = engine.getDependencies(); final boolean sizeChanged = dependencies.size() != newDependencies.size(); final Set<Dependency> dependencySet; if (sizeChanged) { //get the new dependencies dependencySet = new HashSet<Dependency>(); dependencySet.addAll(newDependencies); dependencySet.removeAll(dependencies); } else { dependencySet = EMPTY_DEPENDENCY_SET; } return dependencySet; } /** * Retrieves the next temporary directory to extract an archive too. * * @return a directory * @throws AnalysisException thrown if unable to create temporary directory */ private File getNextTempDirectory() throws AnalysisException { dirCount += 1; final File directory = new File(tempFileLocation, String.valueOf(dirCount)); //getting an exception for some directories not being able to be created; might be because the directory already exists? if (directory.exists()) { return getNextTempDirectory(); } if (!directory.mkdirs()) { final String msg = String.format("Unable to create temp directory '%s'.", directory.getAbsolutePath()); throw new AnalysisException(msg); } return directory; } /** * Extracts the contents of an archive into the specified directory. * * @param archive an archive file such as a WAR or EAR * @param destination a directory to extract the contents to * @param engine the scanning engine * @throws AnalysisException thrown if the archive is not found */ private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException { if (archive == null || destination == null) { return; } FileInputStream fis; try { fis = new FileInputStream(archive); } catch (FileNotFoundException ex) { LOGGER.debug("", ex); throw new AnalysisException("Archive file was not found.", ex); } final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase(); try { if (ZIPPABLES.contains(archiveExt)) { extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine); } else if ("tar".equals(archiveExt)) { extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine); } else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) { final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName()); final File f = new File(destination, uncompressedName); if (engine.accept(f)) { decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f); } } else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) { final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName()); final File f = new File(destination, uncompressedName); if (engine.accept(f)) { decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f); } } } catch (ArchiveExtractionException ex) { LOGGER.warn("Exception extracting archive '{}'.", archive.getName()); LOGGER.debug("", ex); } catch (IOException ex) { LOGGER.warn("Exception reading archive '{}'.", archive.getName()); LOGGER.debug("", ex); } finally { try { fis.close(); } catch (IOException ex) { LOGGER.debug("", ex); } } } /** * Extracts files from an archive. * * @param input the archive to extract files from * @param destination the location to write the files too * @param engine the dependency-check engine * @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive */ private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException { ArchiveEntry entry; try { while ((entry = input.getNextEntry()) != null) { if (entry.isDirectory()) { final File d = new File(destination, entry.getName()); if (!d.exists()) { if (!d.mkdirs()) { final String msg = String.format("Unable to create directory '%s'.", d.getAbsolutePath()); throw new AnalysisException(msg); } } } else { final File file = new File(destination, entry.getName()); if (engine.accept(file)) { LOGGER.debug("Extracting '{}'", file.getPath()); BufferedOutputStream bos = null; FileOutputStream fos = null; try { final File parent = file.getParentFile(); if (!parent.isDirectory()) { if (!parent.mkdirs()) { final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath()); throw new AnalysisException(msg); } } fos = new FileOutputStream(file); bos = new BufferedOutputStream(fos, BUFFER_SIZE); int count; final byte[] data = new byte[BUFFER_SIZE]; while ((count = input.read(data, 0, BUFFER_SIZE)) != -1) { bos.write(data, 0, count); } bos.flush(); } catch (FileNotFoundException ex) { LOGGER.debug("", ex); final String msg = String.format("Unable to find file '%s'.", file.getName()); throw new AnalysisException(msg, ex); } catch (IOException ex) { LOGGER.debug("", ex); final String msg = String.format("IO Exception while parsing file '%s'.", file.getName()); throw new AnalysisException(msg, ex); } finally { if (bos != null) { try { bos.close(); } catch (IOException ex) { LOGGER.trace("", ex); } } if (fos != null) { try { fos.close(); } catch (IOException ex) { LOGGER.trace("", ex); } } } } } } } catch (IOException ex) { throw new ArchiveExtractionException(ex); } catch (Throwable ex) { throw new ArchiveExtractionException(ex); } finally { if (input != null) { try { input.close(); } catch (IOException ex) { LOGGER.trace("", ex); } } } } /** * Decompresses a file. * * @param inputStream the compressed file * @param outputFile the location to write the decompressed file * @throws ArchiveExtractionException thrown if there is an exception decompressing the file */ private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException { LOGGER.debug("Decompressing '{}'", outputFile.getPath()); FileOutputStream out = null; try { out = new FileOutputStream(outputFile); final byte[] buffer = new byte[BUFFER_SIZE]; int n; // = 0 while (-1 != (n = inputStream.read(buffer))) { out.write(buffer, 0, n); } } catch (FileNotFoundException ex) { LOGGER.debug("", ex); throw new ArchiveExtractionException(ex); } catch (IOException ex) { LOGGER.debug("", ex); throw new ArchiveExtractionException(ex); } finally { if (out != null) { try { out.close(); } catch (IOException ex) { LOGGER.trace("", ex); } } } } /** * Attempts to determine if a zip file is actually a JAR file. * * @param dependency the dependency to check * @return true if the dependency appears to be a JAR file; otherwise false */ private boolean isZipFileActuallyJarFile(Dependency dependency) { boolean isJar = false; ZipFile zip = null; try { zip = new ZipFile(dependency.getActualFilePath()); if (zip.getEntry("META-INF/MANIFEST.MF") != null || zip.getEntry("META-INF/maven") != null) { final Enumeration<ZipArchiveEntry> entries = zip.getEntries(); while (entries.hasMoreElements()) { final ZipArchiveEntry entry = entries.nextElement(); if (!entry.isDirectory()) { final String name = entry.getName().toLowerCase(); if (name.endsWith(".class")) { isJar = true; break; } } } } } catch (IOException ex) { LOGGER.debug("Unable to unzip zip file '{}'", dependency.getFilePath(), ex); } finally { ZipFile.closeQuietly(zip); } return isJar; } }
290: Further refactoring for readability.
dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java
290: Further refactoring for readability.
Java
apache-2.0
c07e96dc732f7eed19827bf5427a71532ebb5ab5
0
ChangeVision/astah-uml2c-plugin
package com.change_vision.astah.extension.plugin.uml2c.actions; import java.io.File; import java.io.IOException; import javax.swing.JOptionPane; import org.apache.velocity.exception.ResourceNotFoundException; import com.change_vision.astah.extension.plugin.uml2c.Messages; import com.change_vision.astah.extension.plugin.uml2c.cmodule.AbstractCModule; import com.change_vision.astah.extension.plugin.uml2c.cmodule.CModuleFactory; import com.change_vision.astah.extension.plugin.uml2c.codegenerator.CodeGenerator; import com.change_vision.jude.api.inf.AstahAPI; import com.change_vision.jude.api.inf.exception.InvalidUsingException; import com.change_vision.jude.api.inf.exception.ProjectNotFoundException; import com.change_vision.jude.api.inf.model.IClass; import com.change_vision.jude.api.inf.model.IElement; import com.change_vision.jude.api.inf.model.IModel; import com.change_vision.jude.api.inf.project.ProjectAccessor; import com.change_vision.jude.api.inf.ui.IPluginActionDelegate; import com.change_vision.jude.api.inf.ui.IWindow; import com.change_vision.jude.api.inf.view.IDiagramViewManager; import com.change_vision.jude.api.inf.view.IViewManager; public abstract class GenerateCodeAction implements IPluginActionDelegate { public Object run(IWindow window) throws UnExpectedException { try { AstahAPI api = AstahAPI.getAstahAPI(); ProjectAccessor projectAccessor = api.getProjectAccessor(); @SuppressWarnings("unused") IModel iCurrentProject = projectAccessor.getProject(); IElement[] iElements = getSelectedElements(api); // Check the selected elements if ((iElements.length != 1) || !(iElements[0] instanceof IClass)) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.select_class"), Messages.getMessage("title.select_class"), JOptionPane.WARNING_MESSAGE); return null; } IClass iClass = (IClass) iElements[0]; AbstractCModule cModule = CModuleFactory.getCModule(iClass); System.out.printf("Module is %s.\n", cModule.getClass().getSimpleName()); System.out.printf("path is %s.\n", projectAccessor.getProjectPath()); String outputDirPath = new File(projectAccessor.getProjectPath()).getParent(); generateCode(cModule, outputDirPath); } catch (ProjectNotFoundException e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.project_not_found"), Messages.getMessage("title.project_not_found"), JOptionPane.WARNING_MESSAGE); } catch (ResourceNotFoundException e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.not_found_template", CodeGenerator.getAstahConfigPath(), e.getLocalizedMessage()), Messages.getMessage("title.not_found_template"), JOptionPane.WARNING_MESSAGE); } catch (Throwable e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.unexpected_exception", e.getLocalizedMessage(), e.getStackTrace()), Messages.getMessage("title.unexpected_exception"), JOptionPane.ERROR_MESSAGE); } return null; } private IElement[] getSelectedElements(AstahAPI api) throws InvalidUsingException { IViewManager iViewManager = api.getViewManager(); IDiagramViewManager iDiagramViewManager = iViewManager.getDiagramViewManager(); return iDiagramViewManager.getSelectedElements(); } protected abstract void generateCode(AbstractCModule cModule, String outputDirPath) throws IOException; }
src/main/java/com/change_vision/astah/extension/plugin/uml2c/actions/GenerateCodeAction.java
package com.change_vision.astah.extension.plugin.uml2c.actions; import java.io.File; import java.io.IOException; import javax.swing.JOptionPane; import org.apache.velocity.exception.ResourceNotFoundException; import com.change_vision.astah.extension.plugin.uml2c.Messages; import com.change_vision.astah.extension.plugin.uml2c.cmodule.AbstractCModule; import com.change_vision.astah.extension.plugin.uml2c.cmodule.CModuleFactory; import com.change_vision.astah.extension.plugin.uml2c.codegenerator.CodeGenerator; import com.change_vision.jude.api.inf.AstahAPI; import com.change_vision.jude.api.inf.exception.ProjectNotFoundException; import com.change_vision.jude.api.inf.model.IClass; import com.change_vision.jude.api.inf.model.IElement; import com.change_vision.jude.api.inf.model.IModel; import com.change_vision.jude.api.inf.project.ProjectAccessor; import com.change_vision.jude.api.inf.ui.IPluginActionDelegate; import com.change_vision.jude.api.inf.ui.IWindow; import com.change_vision.jude.api.inf.view.IDiagramViewManager; import com.change_vision.jude.api.inf.view.IViewManager; public abstract class GenerateCodeAction implements IPluginActionDelegate { public Object run(IWindow window) throws UnExpectedException { try { AstahAPI api = AstahAPI.getAstahAPI(); ProjectAccessor projectAccessor = api.getProjectAccessor(); @SuppressWarnings("unused") IModel iCurrentProject = projectAccessor.getProject(); // 選択されたモデル要素を取得 IViewManager iViewManager = api.getViewManager(); IDiagramViewManager iDiagramViewManager = iViewManager.getDiagramViewManager(); IElement iElements[] = iDiagramViewManager.getSelectedElements(); // 選択されたモデル要素が1つ、クラスであることをチェック if ((iElements.length != 1) || !(iElements[0] instanceof IClass)) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.select_class"), Messages.getMessage("title.select_class"), JOptionPane.WARNING_MESSAGE); return null; } IClass iClass = (IClass) iElements[0]; AbstractCModule cModule = CModuleFactory.getCModule(iClass); System.out.printf("Module is %s.\n", cModule.getClass().getSimpleName()); String outputDirPath = new File(projectAccessor.getProjectPath()).getParent(); generateCode(cModule, outputDirPath); } catch (ProjectNotFoundException e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.project_not_found"), Messages.getMessage("title.project_not_found"), JOptionPane.WARNING_MESSAGE); } catch (ResourceNotFoundException e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.not_found_template", CodeGenerator.getAstahConfigPath(), e.getLocalizedMessage()), Messages.getMessage("title.not_found_template"), JOptionPane.WARNING_MESSAGE); } catch (Exception e) { JOptionPane.showMessageDialog(window.getParent(), Messages.getMessage("message.unexpected_exception", e.getLocalizedMessage(), e.getStackTrace()), Messages.getMessage("title.unexpected_exception"), JOptionPane.ERROR_MESSAGE); } return null; } protected abstract void generateCode(AbstractCModule cModule, String outputDirPath) throws IOException; }
Refactoring GenerateCodeAction
src/main/java/com/change_vision/astah/extension/plugin/uml2c/actions/GenerateCodeAction.java
Refactoring GenerateCodeAction
Java
apache-2.0
70d69c8534bc9bdfb1c6069f6c24f347e9d62dda
0
kantega/Flyt-cms,kantega/Flyt-cms,kantega/Flyt-cms
/* * Copyright 2009 Kantega AS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package no.kantega.publishing.admin.ajax; import no.kantega.commons.client.util.RequestParameters; import no.kantega.publishing.common.service.TopicMapService; import no.kantega.publishing.topicmaps.data.Topic; import no.kantega.publishing.topicmaps.data.TopicMap; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.Controller; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.HashMap; import java.util.List; import java.util.Map; public class AutocompleteTopicsAction implements Controller { public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { Map<String, Object> model = new HashMap<String, Object>(); RequestParameters param = new RequestParameters(request); int topicMapId = param.getInt("topicMapId"); String term = param.getString("term"); if (term != null && term.trim().length() > 0) { TopicMapService tms = new TopicMapService(request); List<Topic> topics = tms.getTopicsByNameAndTopicMapId(term, topicMapId); List<TopicMap> topicMaps = tms.getTopicMaps(); if (topicMaps.size() > 1) { for (Topic topic : topics) { for (TopicMap topicMap : topicMaps) { if (topicMap.getId() == topic.getTopicMapId()) { topic.setBaseName(topic.getBaseName() + " (" + tms.getTopicMap(topic.getTopicMapId()).getName() + ")"); break; } } } } model.put("topics", topics); } return new ModelAndView("/WEB-INF/jsp/ajax/searchresult-topics.jsp", model); } }
modules/core/src/java/no/kantega/publishing/admin/ajax/AutocompleteTopicsAction.java
/* * Copyright 2009 Kantega AS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package no.kantega.publishing.admin.ajax; import no.kantega.commons.client.util.RequestParameters; import no.kantega.publishing.common.service.TopicMapService; import no.kantega.publishing.topicmaps.data.Topic; import no.kantega.publishing.topicmaps.data.TopicMap; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.Controller; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.HashMap; import java.util.List; import java.util.Map; public class AutocompleteTopicsAction implements Controller { public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { Map<String, Object> model = new HashMap<String, Object>(); RequestParameters param = new RequestParameters(request); int topicMapId = param.getInt("topicMapId"); String term = param.getString("term"); if (term != null && term.trim().length() > 0) { TopicMapService tms = new TopicMapService(request); List<Topic> topics = tms.getTopicsByNameAndTopicMapId(term, topicMapId); List<TopicMap> topicMaps = tms.getTopicMaps(); if (topicMaps.size() > 1) { for (Topic topic : topics) { for (TopicMap topicMap : topicMaps) { if (topicMap.getId() == topic.getTopicMapId()) { topic.setBaseName(topic.getBaseName() + " ( " + tms.getTopicMap(topic.getTopicMapId()).getName() + " )"); break; } } } } model.put("topics", topics); } return new ModelAndView("/WEB-INF/jsp/ajax/searchresult-topics.jsp", model); } }
AP-1261: In Edit mode, the select topic autocomplete liste should display the topicmap name git-svn-id: 8def386c603904b39326d3fc08add479b8279298@3070 fd808399-8219-4f14-9d4c-37719d9ec93d
modules/core/src/java/no/kantega/publishing/admin/ajax/AutocompleteTopicsAction.java
AP-1261: In Edit mode, the select topic autocomplete liste should display the topicmap name
Java
apache-2.0
d575a3cc598a3ec0690e210c44817d23ab8890ef
0
opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,oplinkoms/onos,oplinkoms/onos,opennetworkinglab/onos,gkatsikas/onos,gkatsikas/onos,oplinkoms/onos,opennetworkinglab/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.ovsdb.controller.driver; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import io.netty.channel.Channel; import org.onlab.packet.IpAddress; import org.onosproject.net.DeviceId; import org.onosproject.net.PortNumber; import org.onosproject.net.behaviour.ControlProtocolVersion; import org.onosproject.net.behaviour.ControllerInfo; import org.onosproject.net.behaviour.DeviceCpuStats; import org.onosproject.net.behaviour.DeviceMemoryStats; import org.onosproject.net.behaviour.MirroringName; import org.onosproject.net.behaviour.MirroringStatistics; import org.onosproject.net.behaviour.QosId; import org.onosproject.net.behaviour.QueueDescription; import org.onosproject.net.behaviour.QueueId; import org.onosproject.ovsdb.controller.OvsdbBridge; import org.onosproject.ovsdb.controller.OvsdbClientService; import org.onosproject.ovsdb.controller.OvsdbInterface; import org.onosproject.ovsdb.controller.OvsdbMirror; import org.onosproject.ovsdb.controller.OvsdbNodeId; import org.onosproject.ovsdb.controller.OvsdbPort; import org.onosproject.ovsdb.controller.OvsdbPortName; import org.onosproject.ovsdb.controller.OvsdbPortNumber; import org.onosproject.ovsdb.controller.OvsdbQos; import org.onosproject.ovsdb.controller.OvsdbQueue; import org.onosproject.ovsdb.controller.OvsdbRowStore; import org.onosproject.ovsdb.controller.OvsdbStore; import org.onosproject.ovsdb.controller.OvsdbTableStore; import org.onosproject.ovsdb.rfc.exception.ColumnSchemaNotFoundException; import org.onosproject.ovsdb.rfc.exception.VersionMismatchException; import org.onosproject.ovsdb.rfc.jsonrpc.Callback; import org.onosproject.ovsdb.rfc.message.OperationResult; import org.onosproject.ovsdb.rfc.message.TableUpdates; import org.onosproject.ovsdb.rfc.notation.Column; import org.onosproject.ovsdb.rfc.notation.Condition; import org.onosproject.ovsdb.rfc.notation.Mutation; import org.onosproject.ovsdb.rfc.notation.OvsdbMap; import org.onosproject.ovsdb.rfc.notation.OvsdbSet; import org.onosproject.ovsdb.rfc.notation.Row; import org.onosproject.ovsdb.rfc.notation.Uuid; import org.onosproject.ovsdb.rfc.operations.Delete; import org.onosproject.ovsdb.rfc.operations.Insert; import org.onosproject.ovsdb.rfc.operations.Mutate; import org.onosproject.ovsdb.rfc.operations.Operation; import org.onosproject.ovsdb.rfc.operations.Update; import org.onosproject.ovsdb.rfc.schema.ColumnSchema; import org.onosproject.ovsdb.rfc.schema.DatabaseSchema; import org.onosproject.ovsdb.rfc.schema.TableSchema; import org.onosproject.ovsdb.rfc.table.Bridge; import org.onosproject.ovsdb.rfc.table.Controller; import org.onosproject.ovsdb.rfc.table.Interface; import org.onosproject.ovsdb.rfc.table.Mirror; import org.onosproject.ovsdb.rfc.table.OvsdbTable; import org.onosproject.ovsdb.rfc.table.Port; import org.onosproject.ovsdb.rfc.table.Qos; import org.onosproject.ovsdb.rfc.table.Queue; import org.onosproject.ovsdb.rfc.table.TableGenerator; import org.onosproject.ovsdb.rfc.utils.ConditionUtil; import org.onosproject.ovsdb.rfc.utils.FromJsonUtil; import org.onosproject.ovsdb.rfc.utils.JsonRpcWriterUtil; import org.onosproject.ovsdb.rfc.utils.MutationUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGE; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGES; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGE_CONTROLLER; import static org.onosproject.ovsdb.controller.OvsdbConstant.CONTROLLER; import static org.onosproject.ovsdb.controller.OvsdbConstant.DATABASENAME; import static org.onosproject.ovsdb.controller.OvsdbConstant.EXTERNAL_ID; import static org.onosproject.ovsdb.controller.OvsdbConstant.EXTERNAL_ID_INTERFACE_ID; import static org.onosproject.ovsdb.controller.OvsdbConstant.INTERFACE; import static org.onosproject.ovsdb.controller.OvsdbConstant.INTERFACES; import static org.onosproject.ovsdb.controller.OvsdbConstant.MIRROR; import static org.onosproject.ovsdb.controller.OvsdbConstant.MIRRORS; import static org.onosproject.ovsdb.controller.OvsdbConstant.OFPORT; import static org.onosproject.ovsdb.controller.OvsdbConstant.OFPORT_ERROR; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORT; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORTS; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORT_QOS; import static org.onosproject.ovsdb.controller.OvsdbConstant.QOS; import static org.onosproject.ovsdb.controller.OvsdbConstant.QOS_EXTERNAL_ID_KEY; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUE; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUES; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUE_EXTERNAL_ID_KEY; import static org.onosproject.ovsdb.controller.OvsdbConstant.TYPEVXLAN; import static org.onosproject.ovsdb.controller.OvsdbConstant.UUID; /** * An representation of an ovsdb client. */ public class DefaultOvsdbClient implements OvsdbProviderService, OvsdbClientService { private static final int TRANSACTCONFIG_TIMEOUT = 3; //sec private static final int OFPORT_ERROR_COMPARISON = 0; private final Logger log = LoggerFactory.getLogger(DefaultOvsdbClient.class); private Channel channel; private OvsdbAgent agent; private boolean connected; private OvsdbNodeId nodeId; private Callback monitorCallBack; private OvsdbStore ovsdbStore = new OvsdbStore(); private final Map<String, String> requestMethod = Maps.newHashMap(); private final Map<String, SettableFuture<? extends Object>> requestResult = Maps.newHashMap(); private final Map<String, DatabaseSchema> schema = Maps.newHashMap(); /** * Creates an OvsdbClient. * * @param nodeId ovsdb node id */ public DefaultOvsdbClient(OvsdbNodeId nodeId) { this.nodeId = nodeId; } @Override public OvsdbNodeId nodeId() { return nodeId; } @Override public void setAgent(OvsdbAgent agent) { if (this.agent == null) { this.agent = agent; } } @Override public void setChannel(Channel channel) { this.channel = channel; } @Override public void setConnection(boolean connected) { this.connected = connected; } @Override public boolean isConnected() { return this.connected; } @Override public void nodeAdded() { this.agent.addConnectedNode(nodeId, this); } @Override public void nodeRemoved() { this.agent.removeConnectedNode(nodeId); channel.disconnect(); } /** * Gets the ovsdb table store. * * @param dbName the ovsdb database name * @return ovsTableStore, empty if table store is find */ private OvsdbTableStore getTableStore(String dbName) { if (ovsdbStore == null) { return null; } return ovsdbStore.getOvsdbTableStore(dbName); } /** * Gets the ovsdb row store. * * @param dbName the ovsdb database name * @param tableName the ovsdb table name * @return ovsRowStore, empty store if no rows exist in the table */ private OvsdbRowStore getRowStore(String dbName, String tableName) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { rowStore = new OvsdbRowStore(); } return rowStore; } /** * Gets the ovsdb row. * * @param dbName the ovsdb database name * @param tableName the ovsdb table name * @param uuid the key of the row * @return row, empty if row is find */ @Override public Row getRow(String dbName, String tableName, String uuid) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { return null; } return rowStore.getRow(uuid); } @Override public void removeRow(String dbName, String tableName, String uuid) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { return; } rowStore.deleteRow(uuid); } @Override public void updateOvsdbStore(String dbName, String tableName, String uuid, Row row) { OvsdbTableStore tableStore = ovsdbStore.getOvsdbTableStore(dbName); if (tableStore == null) { tableStore = new OvsdbTableStore(); } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { rowStore = new OvsdbRowStore(); } rowStore.insertRow(uuid, row); tableStore.createOrUpdateTable(tableName, rowStore); ovsdbStore.createOrUpdateOvsdbStore(dbName, tableStore); } /** * Gets the Mirror uuid. * * @param mirrorName mirror name * @return mirror uuid, empty if no uuid is found */ @Override public String getMirrorUuid(String mirrorName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, MIRROR); if (rowStore == null) { log.warn("The mirror uuid is null"); return null; } ConcurrentMap<String, Row> mirrorTableRows = rowStore.getRowStore(); if (mirrorTableRows == null) { log.warn("The mirror uuid is null"); return null; } for (String uuid : mirrorTableRows.keySet()) { Mirror mirror = (Mirror) TableGenerator .getTable(dbSchema, mirrorTableRows.get(uuid), OvsdbTable.MIRROR); String name = mirror.getName(); if (name.contains(mirrorName)) { return uuid; } } log.warn("Mirroring not found"); return null; } /** * Gets mirrors of the device. * * @param deviceId target device id * @return set of mirroring; empty if no mirror is found */ @Override public Set<MirroringStatistics> getMirroringStatistics(DeviceId deviceId) { Uuid bridgeUuid = getBridgeUuid(deviceId); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", deviceId, nodeId.getIpAddress()); return null; } List<MirroringStatistics> mirrorings = getMirrorings(bridgeUuid); if (mirrorings == null) { log.warn("Couldn't find mirrors in {}", nodeId.getIpAddress()); return null; } return ImmutableSet.copyOf(mirrorings); } /** * Helper method which retrieves mirrorings statistics using bridge uuid. * * @param bridgeUuid the uuid of the bridge * @return the list of the mirrorings statistics. */ private List<MirroringStatistics> getMirrorings(Uuid bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { log.warn("Unable to retrieve dbSchema {}", DATABASENAME); return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.warn("Unable to retrieve rowStore {} of {}", BRIDGE, DATABASENAME); return null; } Row bridgeRow = rowStore.getRow(bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator. getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); Set<Uuid> mirroringsUuids = (Set<Uuid>) ((OvsdbSet) bridge .getMirrorsColumn().data()).set(); OvsdbRowStore mirrorRowStore = getRowStore(DATABASENAME, MIRROR); if (mirrorRowStore == null) { log.warn("Unable to retrieve rowStore {} of {}", MIRROR, DATABASENAME); return null; } List<MirroringStatistics> mirroringStatistics = new ArrayList<>(); ConcurrentMap<String, Row> mirrorTableRows = mirrorRowStore.getRowStore(); mirrorTableRows.forEach((key, row) -> { if (!mirroringsUuids.contains(Uuid.uuid(key))) { return; } Mirror mirror = (Mirror) TableGenerator .getTable(dbSchema, row, OvsdbTable.MIRROR); mirroringStatistics.add(MirroringStatistics.mirroringStatistics(mirror.getName(), (Map<String, Integer>) ((OvsdbMap) mirror .getStatisticsColumn().data()).map())); }); return ImmutableList.copyOf(mirroringStatistics); } @Override public String getPortUuid(String portName, String bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Row bridgeRow = getRow(DATABASENAME, BRIDGE, bridgeUuid); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); if (bridge != null) { OvsdbSet setPorts = (OvsdbSet) bridge.getPortsColumn().data(); @SuppressWarnings("unchecked") Set<Uuid> ports = setPorts.set(); if (ports == null || ports.isEmpty()) { log.warn("The port uuid is null"); return null; } for (Uuid uuid : ports) { Row portRow = getRow(DATABASENAME, PORT, uuid.value()); Port port = (Port) TableGenerator.getTable(dbSchema, portRow, OvsdbTable.PORT); if (port != null && portName.equalsIgnoreCase(port.getName())) { return uuid.value(); } } } return null; } @Override public String getBridgeUuid(String bridgeName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("The bridge uuid is null"); return null; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); if (bridgeTableRows == null) { log.debug("The bridge uuid is null"); return null; } for (String uuid : bridgeTableRows.keySet()) { Bridge bridge = (Bridge) TableGenerator .getTable(dbSchema, bridgeTableRows.get(uuid), OvsdbTable.BRIDGE); if (bridge.getName().equals(bridgeName)) { return uuid; } } return null; } private String getOvsUuid(String dbName) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, DATABASENAME); if (rowStore == null) { log.debug("The bridge uuid is null"); return null; } ConcurrentMap<String, Row> ovsTableRows = rowStore.getRowStore(); if (ovsTableRows != null) { for (String uuid : ovsTableRows.keySet()) { Row row = ovsTableRows.get(uuid); String tableName = row.tableName(); if (tableName.equals(dbName)) { return uuid; } } } return null; } @Override public void createPort(String bridgeName, String portName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.error("Can't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return; } DatabaseSchema dbSchema = schema.get(DATABASENAME); String portUuid = getPortUuid(portName, bridgeUuid); Port port = (Port) TableGenerator.createTable(dbSchema, OvsdbTable.PORT); port.setName(portName); if (portUuid == null) { insertConfig(PORT, UUID, BRIDGE, PORTS, bridgeUuid, port.getRow()); } } @Override public void dropPort(String bridgeName, String portName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.error("Could not find Bridge {} in {}", bridgeName, nodeId); return; } String portUuid = getPortUuid(portName, bridgeUuid); if (portUuid != null) { log.info("Port {} delete", portName); deleteConfig(PORT, UUID, portUuid, BRIDGE, PORTS, Uuid.uuid(portUuid)); } } @Override public boolean createBridge(OvsdbBridge ovsdbBridge) { DatabaseSchema dbSchema = schema.get(DATABASENAME); String ovsUuid = getOvsUuid(DATABASENAME); if (dbSchema == null || ovsUuid == null) { log.error("Can't find database Open_vSwitch"); return false; } Bridge bridge = (Bridge) TableGenerator.createTable(dbSchema, OvsdbTable.BRIDGE); bridge.setOtherConfig(ovsdbBridge.otherConfigs()); if (ovsdbBridge.failMode().isPresent()) { String failMode = ovsdbBridge.failMode().get().name().toLowerCase(); bridge.setFailMode(Sets.newHashSet(failMode)); } if (ovsdbBridge.datapathType().isPresent()) { String datapathType = ovsdbBridge.datapathType().get(); bridge.setDatapathType(datapathType); } if (ovsdbBridge.controlProtocols().isPresent()) { bridge.setProtocols(ovsdbBridge.controlProtocols().get().stream() .map(ControlProtocolVersion::toString) .collect(Collectors.toCollection(HashSet::new))); } String bridgeUuid = getBridgeUuid(ovsdbBridge.name()); if (bridgeUuid == null) { bridge.setName(ovsdbBridge.name()); bridgeUuid = insertConfig( BRIDGE, UUID, DATABASENAME, BRIDGES, ovsUuid, bridge.getRow()); } else { // update the bridge if it's already existing updateConfig(BRIDGE, UUID, bridgeUuid, bridge.getRow()); } if (bridgeUuid == null) { log.warn("Failed to create bridge {} on {}", ovsdbBridge.name(), nodeId); return false; } createPort(ovsdbBridge.name(), ovsdbBridge.name()); setControllersWithUuid(Uuid.uuid(bridgeUuid), ovsdbBridge.controllers()); log.info("Created bridge {}", ovsdbBridge.name()); return true; } @Override public ControllerInfo localController() { IpAddress ipAddress = IpAddress.valueOf(((InetSocketAddress) channel.localAddress()).getAddress()); return new ControllerInfo(ipAddress, OFPORT, "tcp"); } private void setControllersWithUuid(Uuid bridgeUuid, List<ControllerInfo> controllers) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { log.debug("There is no schema"); return; } List<Controller> oldControllers = getControllers(bridgeUuid); if (oldControllers == null) { log.warn("There are no controllers"); return; } Set<ControllerInfo> newControllers = new HashSet<>(controllers); List<Controller> removeControllers = new ArrayList<>(); oldControllers.forEach(controller -> { ControllerInfo controllerInfo = new ControllerInfo((String) controller.getTargetColumn().data()); if (newControllers.contains(controllerInfo)) { newControllers.remove(controllerInfo); } else { removeControllers.add(controller); } }); OvsdbRowStore controllerRowStore = getRowStore(DATABASENAME, CONTROLLER); if (controllerRowStore == null) { log.debug("There is no controller table"); return; } newControllers.stream().map(c -> { Controller controller = (Controller) TableGenerator .createTable(dbSchema, OvsdbTable.CONTROLLER); controller.setTarget(c.target()); return controller; }).forEach(c -> insertConfig(CONTROLLER, UUID, BRIDGE, BRIDGE_CONTROLLER, bridgeUuid.value(), c.getRow())); removeControllers.forEach(c -> deleteConfig(CONTROLLER, UUID, c.getRow().uuid().value(), BRIDGE, BRIDGE_CONTROLLER, c.getRow().uuid())); } @Override public void setControllersWithDeviceId(DeviceId deviceId, List<ControllerInfo> controllers) { setControllersWithUuid(getBridgeUuid(deviceId), controllers); } @Override public void dropBridge(String bridgeName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Could not find bridge in node", nodeId.getIpAddress()); return; } deleteConfig(BRIDGE, UUID, bridgeUuid, DATABASENAME, BRIDGES, Uuid.uuid(bridgeUuid)); } @Override public void applyQos(PortNumber portNumber, String qosName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore portRowStore = getRowStore(DATABASENAME, PORT); if (portRowStore == null) { log.debug("The port uuid is null"); return; } OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { log.debug("The qos uuid is null"); return; } // Due to Qos Table doesn't have a unique identifier except uuid, unlike // Bridge or Port Table has a name column,in order to make the api more // general, put qos name in external_ids column of Qos Table if this qos // created by onos. ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); ConcurrentMap<String, Row> portTableRows = portRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosName.equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); Row portRow = portTableRows.values().stream() .filter(r -> r.getColumn("name").data().equals(portNumber.name())) .findFirst().orElse(null); if (portRow != null && qosRow != null) { String qosId = qosRow.uuid().value(); Uuid portUuid = portRow.uuid(); Map<String, Column> columns = new HashMap<>(); Row newPortRow = new Row(PORT, portUuid, columns); Port newport = new Port(dbSchema, newPortRow); columns.put(Port.PortColumn.QOS.columnName(), newport.getQosColumn()); newport.setQos(Uuid.uuid(qosId)); updateConfig(PORT, UUID, portUuid.value(), newport.getRow()); } } @Override public void removeQos(PortNumber portNumber) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, PORT); if (rowStore == null) { log.debug("The qos uuid is null"); return; } ConcurrentMap<String, Row> ovsTableRows = rowStore.getRowStore(); Row portRow = ovsTableRows.values().stream() .filter(r -> r.getColumn("name").data().equals(portNumber.name())) .findFirst().orElse(null); if (portRow == null) { log.warn("Couldn't find port {} in ovsdb port table.", portNumber.name()); return; } OvsdbSet ovsdbSet = ((OvsdbSet) portRow.getColumn(PORT_QOS).data()); @SuppressWarnings("unchecked") Set<Uuid> qosIdSet = ovsdbSet.set(); if (qosIdSet == null || qosIdSet.isEmpty()) { return; } Uuid qosUuid = (Uuid) qosIdSet.toArray()[0]; Condition condition = ConditionUtil.isEqual(UUID, portRow.uuid()); List<Condition> conditions = Lists.newArrayList(condition); Mutation mutation = MutationUtil.delete(PORT_QOS, qosUuid); List<Mutation> mutations = Lists.newArrayList(mutation); ArrayList<Operation> operations = Lists.newArrayList(); Mutate mutate = new Mutate(dbSchema.getTableSchema(PORT), conditions, mutations); operations.add(mutate); transactConfig(DATABASENAME, operations); } @Override public boolean createQos(OvsdbQos ovsdbQos) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Qos qos = (Qos) TableGenerator.createTable(dbSchema, OvsdbTable.QOS); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore == null) { log.debug("The qos uuid is null"); return false; } ArrayList<Operation> operations = Lists.newArrayList(); Set<String> types = Sets.newHashSet(); Map<Long, Uuid> queues = Maps.newHashMap(); types.add(ovsdbQos.qosType()); qos.setOtherConfig(ovsdbQos.otherConfigs()); qos.setExternalIds(ovsdbQos.externalIds()); qos.setType(types); if (ovsdbQos.qosQueues().isPresent()) { for (Map.Entry<Long, String> entry : ovsdbQos.qosQueues().get().entrySet()) { OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore != null) { ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return entry.getValue().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow != null) { queues.put(entry.getKey(), queueRow.uuid()); } } } qos.setQueues(queues); } Insert qosInsert = new Insert(dbSchema.getTableSchema(QOS), QOS, qos.getRow()); operations.add(qosInsert); try { transactConfig(DATABASENAME, operations).get(); } catch (InterruptedException | ExecutionException e) { return false; } return true; } @Override public void dropQos(QosId qosId) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore != null) { ConcurrentMap<String, Row> qosTableRows = rowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow != null) { deleteConfig(QOS, UUID, qosRow.uuid().value(), PORT, PORT_QOS, qosRow.uuid()); } } } @Override public OvsdbQos getQos(QosId qosId) { Set<OvsdbQos> ovsdbQoses = getQoses(); return ovsdbQoses.stream().filter(r -> qosId.name().equals(r.externalIds().get(QOS_EXTERNAL_ID_KEY))). findFirst().orElse(null); } @Override public Set<OvsdbQos> getQoses() { Set<OvsdbQos> ovsdbQoses = new HashSet<>(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore == null) { log.debug("The qos uuid is null"); return ovsdbQoses; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); ovsdbQoses = rows.keySet().stream() .map(uuid -> getRow(DATABASENAME, QOS, uuid)) .map(this::getOvsdbQos) .filter(Objects::nonNull) .collect(Collectors.toSet()); return ovsdbQoses; } @Override public void bindQueues(QosId qosId, Map<Long, QueueDescription> queues) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { log.debug("The qos uuid is null"); return; } OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore == null) { log.debug("The queue uuid is null"); return; } ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow == null) { log.warn("Can't find QoS {}", qosId); return; } Uuid qosUuid = qosRow.uuid(); Map<Long, Uuid> newQueues = new HashMap<>(); for (Map.Entry<Long, QueueDescription> entry : queues.entrySet()) { Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return entry.getValue().queueId().name().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow != null) { newQueues.put(entry.getKey(), queueRow.uuid()); } } // update the qos table ArrayList<Operation> operations = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(UUID, qosUuid); Mutation mutation = MutationUtil.insert(QUEUES, newQueues); List<Condition> conditions = Collections.singletonList(condition); List<Mutation> mutations = Collections.singletonList(mutation); operations.add(new Mutate(dbSchema.getTableSchema(QOS), conditions, mutations)); transactConfig(DATABASENAME, operations); } @SuppressWarnings("unchecked") @Override public void unbindQueues(QosId qosId, List<Long> queueKeys) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { return; } ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow == null) { log.warn("Can't find QoS {}", qosId); return; } Map<Long, Uuid> deleteQueuesMap; Map<Integer, Uuid> queuesMap = ((OvsdbMap) qosRow.getColumn(QUEUES).data()).map(); deleteQueuesMap = queueKeys.stream() .filter(key -> queuesMap.containsKey(key.intValue())) .collect(Collectors.toMap(key -> key, key -> queuesMap.get(key.intValue()), (a, b) -> b)); if (deleteQueuesMap.size() != 0) { TableSchema parentTableSchema = dbSchema .getTableSchema(QOS); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(QUEUES); Mutation mutation = MutationUtil.delete(parentColumnSchema.name(), OvsdbMap.ovsdbMap(deleteQueuesMap)); List<Mutation> mutations = Collections.singletonList(mutation); Condition condition = ConditionUtil.isEqual(UUID, qosRow.uuid()); List<Condition> conditionList = Collections.singletonList(condition); List<Operation> operations = Collections.singletonList( new Mutate(parentTableSchema, conditionList, mutations)); transactConfig(DATABASENAME, operations); } } @Override public boolean createQueue(OvsdbQueue ovsdbQueue) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Queue queue = (Queue) TableGenerator.createTable(dbSchema, OvsdbTable.QUEUE); ArrayList<Operation> operations = Lists.newArrayList(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QUEUE); if (rowStore == null) { log.debug("The queue uuid is null"); return false; } if (ovsdbQueue.dscp().isPresent()) { queue.setDscp(ImmutableSet.of(ovsdbQueue.dscp().get())); } queue.setOtherConfig(ovsdbQueue.otherConfigs()); queue.setExternalIds(ovsdbQueue.externalIds()); Insert queueInsert = new Insert(dbSchema.getTableSchema(QUEUE), QUEUE, queue.getRow()); operations.add(queueInsert); try { transactConfig(DATABASENAME, operations).get(); } catch (InterruptedException | ExecutionException e) { log.error("createQueue transactConfig get exception !"); } return true; } @Override public void dropQueue(QueueId queueId) { OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore == null) { return; } ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return queueId.name().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow == null) { return; } String queueUuid = queueRow.uuid().value(); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore != null) { Map<Long, Uuid> queueMap = new HashMap<>(); ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); qosTableRows.values().stream().filter(r -> { Map<Integer, Uuid> ovsdbMap = ((OvsdbMap) r.getColumn(QUEUES).data()).map(); Set<Integer> keySet = ovsdbMap.keySet(); for (Integer keyId : keySet) { if (ovsdbMap.get(keyId).equals(Uuid.uuid(queueUuid))) { queueMap.put(keyId.longValue(), Uuid.uuid(queueUuid)); return true; } } return false; }).findFirst().orElse(null); deleteConfig(QUEUE, UUID, queueUuid, QOS, QUEUES, OvsdbMap.ovsdbMap(queueMap)); } else { deleteConfig(QUEUE, UUID, queueUuid, null, null, null); } } @Override public OvsdbQueue getQueue(QueueId queueId) { Set<OvsdbQueue> ovsdbQueues = getQueues(); return ovsdbQueues.stream().filter(r -> queueId.name().equals(r.externalIds().get(QUEUE_EXTERNAL_ID_KEY))). findFirst().orElse(null); } @Override public Set<OvsdbQueue> getQueues() { Set<OvsdbQueue> ovsdbqueues = new HashSet<>(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QUEUE); if (rowStore == null) { log.debug("The queue uuid is null"); return ovsdbqueues; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); ovsdbqueues = rows.keySet() .stream() .map(uuid -> getRow(DATABASENAME, QUEUE, uuid)) .map(this::getOvsdbQueue) .filter(Objects::nonNull) .collect(Collectors.toSet()); return ovsdbqueues; } /** * Creates a mirror port. Mirrors the traffic * that goes to selectDstPort or comes from * selectSrcPort or packets containing selectVlan * to mirrorPort or to all ports that trunk mirrorVlan. * * @param mirror the OVSDB mirror description * @return true if mirror creation is successful, false otherwise */ @Override public boolean createMirror(String bridgeName, OvsdbMirror mirror) { /** * Retrieves bridge's uuid. It is necessary to update * Bridge table. */ String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return false; } OvsdbMirror.Builder mirrorBuilder = OvsdbMirror.builder(); mirrorBuilder.mirroringName(mirror.mirroringName()); mirrorBuilder.selectAll(mirror.selectAll()); /** * Retrieves the uuid of the monitored dst ports. */ mirrorBuilder.monitorDstPorts(mirror.monitorDstPorts().parallelStream() .map(dstPort -> { String dstPortUuid = getPortUuid(dstPort.value(), bridgeUuid); if (dstPortUuid != null) { return Uuid.uuid(dstPortUuid); } log.warn("Couldn't find port {} in {}", dstPort.value(), nodeId.getIpAddress()); return null; }) .filter(Objects::nonNull) .collect(Collectors.toSet()) ); /** * Retrieves the uuid of the monitored src ports. */ mirrorBuilder.monitorSrcPorts(mirror.monitorSrcPorts().parallelStream() .map(srcPort -> { String srcPortUuid = getPortUuid(srcPort.value(), bridgeUuid); if (srcPortUuid != null) { return Uuid.uuid(srcPortUuid); } log.warn("Couldn't find port {} in {}", srcPort.value(), nodeId.getIpAddress()); return null; }).filter(Objects::nonNull) .collect(Collectors.toSet()) ); mirrorBuilder.monitorVlans(mirror.monitorVlans()); mirrorBuilder.mirrorPort(mirror.mirrorPort()); mirrorBuilder.mirrorVlan(mirror.mirrorVlan()); mirrorBuilder.externalIds(mirror.externalIds()); mirror = mirrorBuilder.build(); if (mirror.monitorDstPorts().isEmpty() && mirror.monitorSrcPorts().isEmpty() && mirror.monitorVlans().isEmpty()) { log.warn("Invalid monitoring data"); return false; } DatabaseSchema dbSchema = schema.get(DATABASENAME); Mirror mirrorEntry = (Mirror) TableGenerator.createTable(dbSchema, OvsdbTable.MIRROR); mirrorEntry.setName(mirror.mirroringName()); mirrorEntry.setSelectDstPort(mirror.monitorDstPorts()); mirrorEntry.setSelectSrcPort(mirror.monitorSrcPorts()); mirrorEntry.setSelectVlan(mirror.monitorVlans()); mirrorEntry.setExternalIds(mirror.externalIds()); /** * If mirror port, retrieves the uuid of the mirror port. */ if (mirror.mirrorPort() != null) { String outputPortUuid = getPortUuid(mirror.mirrorPort().value(), bridgeUuid); if (outputPortUuid == null) { log.warn("Couldn't find port {} in {}", mirror.mirrorPort().value(), nodeId.getIpAddress()); return false; } mirrorEntry.setOutputPort(Uuid.uuid(outputPortUuid)); } else if (mirror.mirrorVlan() != null) { mirrorEntry.setOutputVlan(mirror.mirrorVlan()); } else { log.warn("Invalid mirror, no mirror port and no mirror vlan"); return false; } ArrayList<Operation> operations = Lists.newArrayList(); Insert mirrorInsert = new Insert(dbSchema.getTableSchema("Mirror"), "Mirror", mirrorEntry.getRow()); operations.add(mirrorInsert); // update the bridge table Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(bridgeUuid)); Mutation mutation = MutationUtil.insert(MIRRORS, Uuid.uuid("Mirror")); List<Condition> conditions = Lists.newArrayList(condition); List<Mutation> mutations = Lists.newArrayList(mutation); operations.add(new Mutate(dbSchema.getTableSchema("Bridge"), conditions, mutations)); transactConfig(DATABASENAME, operations); log.info("Created mirror {}", mirror.mirroringName()); return true; } /** * Drops the configuration for mirror. * * @param mirroringName name of mirror to drop */ @Override public void dropMirror(MirroringName mirroringName) { String mirrorUuid = getMirrorUuid(mirroringName.name()); if (mirrorUuid != null) { log.info("Deleted mirror {}", mirroringName.name()); deleteConfig(MIRROR, UUID, mirrorUuid, BRIDGE, MIRRORS, Uuid.uuid(mirrorUuid)); } log.warn("Unable to delete {}", mirroringName.name()); return; } @Override public boolean createInterface(String bridgeName, OvsdbInterface ovsdbIface) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return false; } if (getPortUuid(ovsdbIface.name(), bridgeUuid) != null) { log.warn("Interface {} already exists", ovsdbIface.name()); return false; } ArrayList<Operation> operations = Lists.newArrayList(); DatabaseSchema dbSchema = schema.get(DATABASENAME); // insert a new port with the interface name Port port = (Port) TableGenerator.createTable(dbSchema, OvsdbTable.PORT); port.setName(ovsdbIface.name()); Insert portInsert = new Insert(dbSchema.getTableSchema(PORT), PORT, port.getRow()); portInsert.getRow().put(INTERFACES, Uuid.uuid(INTERFACE)); operations.add(portInsert); // update the bridge table with the new port Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(bridgeUuid)); Mutation mutation = MutationUtil.insert(PORTS, Uuid.uuid(PORT)); List<Condition> conditions = Lists.newArrayList(condition); List<Mutation> mutations = Lists.newArrayList(mutation); operations.add(new Mutate(dbSchema.getTableSchema(BRIDGE), conditions, mutations)); Interface intf = (Interface) TableGenerator.createTable(dbSchema, OvsdbTable.INTERFACE); intf.setName(ovsdbIface.name()); if (ovsdbIface.type() != null) { intf.setType(ovsdbIface.typeToString()); } if (ovsdbIface.mtu().isPresent()) { Set<Long> mtuSet = Sets.newConcurrentHashSet(); mtuSet.add(ovsdbIface.mtu().get()); intf.setMtu(mtuSet); intf.setMtuRequest(mtuSet); } intf.setOptions(ovsdbIface.options()); ovsdbIface.data().forEach((k, v) -> { if (k == Interface.InterfaceColumn.EXTERNALIDS) { intf.setExternalIds(v); } }); Insert intfInsert = new Insert(dbSchema.getTableSchema(INTERFACE), INTERFACE, intf.getRow()); operations.add(intfInsert); transactConfig(DATABASENAME, operations); log.info("Created interface {}", ovsdbIface); return true; } @Override public boolean dropInterface(String ifaceName) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.warn("Failed to get BRIDGE table"); return false; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); if (bridgeTableRows == null) { log.warn("Failed to get BRIDGE table rows"); return false; } // interface name is unique Optional<String> bridgeId = bridgeTableRows.keySet().stream() .filter(uuid -> getPortUuid(ifaceName, uuid) != null) .findFirst(); if (bridgeId.isPresent()) { String portId = getPortUuid(ifaceName, bridgeId.get()); deleteConfig(PORT, UUID, portId, BRIDGE, PORTS, Uuid.uuid(portId)); return true; } else { log.warn("Unable to find the interface with name {}", ifaceName); return false; } } /** * Delete transact config. * * @param childTableName child table name * @param childColumnName child column name * @param childUuid child row uuid * @param parentTableName parent table name * @param parentColumnName parent column * @param referencedValue referenced value */ private void deleteConfig(String childTableName, String childColumnName, String childUuid, String parentTableName, String parentColumnName, Object referencedValue) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema childTableSchema = dbSchema.getTableSchema(childTableName); ArrayList<Operation> operations = Lists.newArrayList(); if (parentTableName != null && parentColumnName != null && referencedValue != null) { TableSchema parentTableSchema = dbSchema .getTableSchema(parentTableName); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(parentColumnName); List<Mutation> mutations = Lists.newArrayList(); Mutation mutation = MutationUtil.delete(parentColumnSchema.name(), referencedValue); mutations.add(mutation); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.includes(parentColumnName, referencedValue); conditions.add(condition); Mutate op = new Mutate(parentTableSchema, conditions, mutations); operations.add(op); } List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(childColumnName, Uuid.uuid(childUuid)); conditions.add(condition); Delete del = new Delete(childTableSchema, conditions); operations.add(del); transactConfig(DATABASENAME, operations); } /** * Update transact config. * * @param tableName table name * @param columnName column name * @param uuid uuid * @param row the config data */ private void updateConfig(String tableName, String columnName, String uuid, Row row) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema tableSchema = dbSchema.getTableSchema(tableName); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(columnName, Uuid.uuid(uuid)); conditions.add(condition); Update update = new Update(tableSchema, row, conditions); ArrayList<Operation> operations = Lists.newArrayList(); operations.add(update); transactConfig(DATABASENAME, operations); } /** * Insert transact config. * * @param childTableName child table name * @param childColumnName child column name * @param parentTableName parent table name * @param parentColumnName parent column * @param parentUuid parent uuid * @param row the config data * @return uuid, empty if no uuid is find */ private String insertConfig(String childTableName, String childColumnName, String parentTableName, String parentColumnName, String parentUuid, Row row) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema tableSchema = dbSchema.getTableSchema(childTableName); Insert insert = new Insert(tableSchema, childTableName, row); ArrayList<Operation> operations = Lists.newArrayList(); operations.add(insert); if (parentTableName != null && parentColumnName != null) { TableSchema parentTableSchema = dbSchema .getTableSchema(parentTableName); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(parentColumnName); List<Mutation> mutations = Lists.newArrayList(); Mutation mutation = MutationUtil.insert(parentColumnSchema.name(), Uuid.uuid(childTableName)); mutations.add(mutation); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(parentUuid)); conditions.add(condition); Mutate op = new Mutate(parentTableSchema, conditions, mutations); operations.add(op); } if (childTableName.equalsIgnoreCase(PORT)) { log.debug("Handle port insert"); Insert intfInsert = handlePortInsertTable(row); if (intfInsert != null) { operations.add(intfInsert); } Insert ins = (Insert) operations.get(0); ins.getRow().put("interfaces", Uuid.uuid(INTERFACE)); } List<OperationResult> results; try { results = transactConfig(DATABASENAME, operations) .get(TRANSACTCONFIG_TIMEOUT, TimeUnit.SECONDS); return results.get(0).getUuid().value(); } catch (TimeoutException e) { log.warn("TimeoutException thrown while to get result"); } catch (InterruptedException e) { log.warn("Interrupted while waiting to get result"); Thread.currentThread().interrupt(); } catch (ExecutionException e) { log.error("Exception thrown while to get result"); } return null; } /** * Handles port insert. * * @param portRow row of port * @return insert, empty if null */ private Insert handlePortInsertTable(Row portRow) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema portTableSchema = dbSchema.getTableSchema(PORT); ColumnSchema portColumnSchema = portTableSchema.getColumnSchema("name"); String portName = (String) portRow.getColumn(portColumnSchema.name()).data(); Interface inf = (Interface) TableGenerator.createTable(dbSchema, OvsdbTable.INTERFACE); inf.setName(portName); TableSchema intfTableSchema = dbSchema.getTableSchema(INTERFACE); return new Insert(intfTableSchema, INTERFACE, inf.getRow()); } @Override public ListenableFuture<DatabaseSchema> getOvsdbSchema(String dbName) { if (dbName == null) { return null; } DatabaseSchema databaseSchema = schema.get(dbName); if (databaseSchema == null) { List<String> dbNames = new ArrayList<>(); dbNames.add(dbName); Function<JsonNode, DatabaseSchema> rowFunction = input -> { log.debug("Get ovsdb database schema {}", dbName); DatabaseSchema dbSchema = FromJsonUtil.jsonNodeToDbSchema(dbName, input); if (dbSchema == null) { log.debug("Get ovsdb database schema error"); return null; } schema.put(dbName, dbSchema); return dbSchema; }; ListenableFuture<JsonNode> input = getSchema(dbNames); if (input != null) { return futureTransform(input, rowFunction); } return null; } else { return Futures.immediateFuture(databaseSchema); } } @Override public ListenableFuture<TableUpdates> monitorTables(String dbName, String id) { if (dbName == null) { return null; } DatabaseSchema dbSchema = schema.get(dbName); if (dbSchema != null) { Function<JsonNode, TableUpdates> rowFunction = input -> { log.debug("Get table updates"); TableUpdates updates = FromJsonUtil.jsonNodeToTableUpdates(input, dbSchema); if (updates == null) { log.debug("Get table updates error"); return null; } return updates; }; return futureTransform(monitor(dbSchema, id), rowFunction); } return null; } private ListenableFuture<List<OperationResult>> transactConfig(String dbName, List<Operation> operations) { if (dbName == null) { return null; } DatabaseSchema dbSchema = schema.get(dbName); if (dbSchema != null) { Function<List<JsonNode>, List<OperationResult>> rowFunction = (input -> { try { log.debug("Get ovsdb operation result"); List<OperationResult> result = FromJsonUtil.jsonNodeToOperationResult(input, operations); if (result == null) { log.debug("The operation result is null"); return null; } return result; } catch (Exception e) { log.error("Exception while parsing result", e); } return null; }); return futureTransform(transact(dbSchema, operations), rowFunction); } return null; } @Override public ListenableFuture<JsonNode> getSchema(List<String> dbnames) { String id = java.util.UUID.randomUUID().toString(); String getSchemaString = JsonRpcWriterUtil.getSchemaStr(id, dbnames); SettableFuture<JsonNode> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "getSchema"); channel.writeAndFlush(getSchemaString); return sf; } @Override public ListenableFuture<List<String>> echo() { String id = java.util.UUID.randomUUID().toString(); String echoString = JsonRpcWriterUtil.echoStr(id); SettableFuture<List<String>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "echo"); channel.writeAndFlush(echoString); return sf; } @Override public ListenableFuture<JsonNode> monitor(DatabaseSchema dbSchema, String monitorId) { String id = java.util.UUID.randomUUID().toString(); String monitorString = JsonRpcWriterUtil.monitorStr(id, monitorId, dbSchema); SettableFuture<JsonNode> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "monitor"); channel.writeAndFlush(monitorString); return sf; } @Override public ListenableFuture<List<String>> listDbs() { String id = java.util.UUID.randomUUID().toString(); String listDbsString = JsonRpcWriterUtil.listDbsStr(id); SettableFuture<List<String>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "listDbs"); channel.writeAndFlush(listDbsString); return sf; } @Override public ListenableFuture<List<JsonNode>> transact(DatabaseSchema dbSchema, List<Operation> operations) { String id = java.util.UUID.randomUUID().toString(); String transactString = JsonRpcWriterUtil.transactStr(id, dbSchema, operations); SettableFuture<List<JsonNode>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "transact"); channel.writeAndFlush(transactString); return sf; } @SuppressWarnings({"rawtypes", "unchecked"}) @Override public void processResult(JsonNode response) { log.debug("Handle result"); String requestId = response.get("id").asText(); SettableFuture sf = requestResult.get(requestId); if (sf == null) { log.debug("No such future to process"); return; } String methodName = requestMethod.get(requestId); sf.set(FromJsonUtil.jsonResultParser(response, methodName)); requestResult.remove(requestId); requestMethod.remove(requestId); } @Override public void processRequest(JsonNode requestJson) { log.debug("Handle request"); if (requestJson.get("method").asText().equalsIgnoreCase("echo")) { log.debug("handle echo request"); String replyString = FromJsonUtil.getEchoRequestStr(requestJson); channel.writeAndFlush(replyString); } else { FromJsonUtil.jsonCallbackRequestParser(requestJson, monitorCallBack); } } @Override public void setCallback(Callback monitorCallback) { this.monitorCallBack = monitorCallback; } @Override public Set<OvsdbBridge> getBridges() { Set<OvsdbBridge> ovsdbBridges = new HashSet<>(); OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return ovsdbBridges; } OvsdbRowStore rowStore = tableStore.getRows(BRIDGE); if (rowStore == null) { return ovsdbBridges; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); for (String uuid : rows.keySet()) { Row bridgeRow = getRow(DATABASENAME, BRIDGE, uuid); OvsdbBridge ovsdbBridge = getOvsdbBridge(bridgeRow, Uuid.uuid(uuid)); if (ovsdbBridge != null) { ovsdbBridges.add(ovsdbBridge); } } return ovsdbBridges; } @Override public Set<ControllerInfo> getControllers(DeviceId openflowDeviceId) { Uuid bridgeUuid = getBridgeUuid(openflowDeviceId); if (bridgeUuid == null) { log.warn("bad bridge Uuid"); return null; } List<Controller> controllers = getControllers(bridgeUuid); if (controllers == null) { log.warn("bad list of controllers"); return null; } return controllers.stream().map(controller -> new ControllerInfo( (String) controller.getTargetColumn() .data())).collect(Collectors.toSet()); } private List<Controller> getControllers(Uuid bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("There is no bridge table"); return null; } Row bridgeRow = rowStore.getRow(bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator. getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); //FIXME remove log log.warn("type of controller column", bridge.getControllerColumn() .data().getClass()); Set<Uuid> controllerUuids = (Set<Uuid>) ((OvsdbSet) bridge .getControllerColumn().data()).set(); OvsdbRowStore controllerRowStore = getRowStore(DATABASENAME, CONTROLLER); if (controllerRowStore == null) { log.debug("There is no controller table"); return null; } List<Controller> ovsdbControllers = new ArrayList<>(); ConcurrentMap<String, Row> controllerTableRows = controllerRowStore.getRowStore(); controllerTableRows.forEach((key, row) -> { if (!controllerUuids.contains(Uuid.uuid(key))) { return; } Controller controller = (Controller) TableGenerator .getTable(dbSchema, row, OvsdbTable.CONTROLLER); ovsdbControllers.add(controller); }); return ovsdbControllers; } private Uuid getBridgeUuid(DeviceId openflowDeviceId) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("There is no bridge table"); return null; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); final AtomicReference<Uuid> uuid = new AtomicReference<>(); for (Map.Entry<String, Row> entry : bridgeTableRows.entrySet()) { Bridge bridge = (Bridge) TableGenerator.getTable( dbSchema, entry.getValue(), OvsdbTable.BRIDGE); if (matchesDpid(bridge, openflowDeviceId)) { uuid.set(Uuid.uuid(entry.getKey())); break; } } if (uuid.get() == null) { log.debug("There is no bridge for {}", openflowDeviceId); } return uuid.get(); } private static boolean matchesDpid(Bridge b, DeviceId deviceId) { String ofDpid = deviceId.toString().replace("of:", ""); Set ofDeviceIds = ((OvsdbSet) b.getDatapathIdColumn().data()).set(); //TODO Set<String> return ofDeviceIds.contains(ofDpid); } @Override public Set<OvsdbPort> getPorts() { return (Set<OvsdbPort>) getElements(this::getOvsdbPort); } @Override public Set<Interface> getInterfaces() { return (Set<Interface>) getElements(this::getInterface); } private Set<?> getElements(Function<Row, ?> method) { OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(INTERFACE); if (rowStore == null) { return null; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); return rows.keySet() .stream() .map(uuid -> getRow(DATABASENAME, INTERFACE, uuid)) .map(method) .filter(Objects::nonNull) .collect(Collectors.toSet()); } @Override public Interface getInterface(String intf) { return getInterfaces().stream() .filter(ovsdbIntf -> ovsdbIntf.getName().equals(intf)) .findAny().orElse(null); } private Interface getInterface(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null) { return null; } return intf; } @Override public DatabaseSchema getDatabaseSchema(String dbName) { return schema.get(dbName); } private OvsdbPort getOvsdbPort(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null) { return null; } long ofPort = getOfPort(intf); String portName = intf.getName(); if ((ofPort < 0) || (portName == null)) { return null; } return new OvsdbPort(new OvsdbPortNumber(ofPort), new OvsdbPortName(portName)); } private OvsdbBridge getOvsdbBridge(Row row, Uuid bridgeUuid) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, row, OvsdbTable.BRIDGE); if (bridge == null) { return null; } OvsdbSet datapathIdSet = (OvsdbSet) bridge.getDatapathIdColumn().data(); @SuppressWarnings("unchecked") Set<String> datapathIds = datapathIdSet.set(); if (datapathIds == null || datapathIds.isEmpty()) { return null; } String datapathId = (String) datapathIds.toArray()[0]; String bridgeName = bridge.getName(); if ((datapathId == null) || (bridgeName == null)) { return null; } List<Controller> controllers = getControllers(bridgeUuid); if (controllers != null) { List<ControllerInfo> controllerInfos = controllers.stream().map( controller -> new ControllerInfo( (String) controller.getTargetColumn() .data())).collect(Collectors.toList()); return OvsdbBridge.builder() .name(bridgeName) .datapathId(datapathId) .controllers(controllerInfos) .build(); } else { return OvsdbBridge.builder() .name(bridgeName) .datapathId(datapathId) .build(); } } private OvsdbQos getOvsdbQos(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Qos qos = (Qos) TableGenerator.getTable(dbSchema, row, OvsdbTable.QOS); if (qos == null) { return null; } String type = (String) qos.getTypeColumn().data(); Map<String, String> otherConfigs; Map<String, String> externalIds; Map<Long, String> queues; otherConfigs = ((OvsdbMap) qos.getOtherConfigColumn().data()).map(); externalIds = ((OvsdbMap) qos.getExternalIdsColumn().data()).map(); queues = ((OvsdbMap) qos.getQueuesColumn().data()).map(); return OvsdbQos.builder().qosType(type). queues(queues).otherConfigs(otherConfigs). externalIds(externalIds).build(); } private OvsdbQueue getOvsdbQueue(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Queue queue = (Queue) TableGenerator.getTable(dbSchema, row, OvsdbTable.QUEUE); if (queue == null) { return null; } OvsdbSet dscpOvsdbSet = ((OvsdbSet) queue.getDscpColumn().data()); Set dscpSet = dscpOvsdbSet.set(); Long dscp = null; if (dscpSet != null && !dscpSet.isEmpty()) { dscp = Long.valueOf(dscpSet.toArray()[0].toString()); } Map<String, String> otherConfigs; Map<String, String> externalIds; otherConfigs = ((OvsdbMap) queue.getOtherConfigColumn().data()).map(); externalIds = ((OvsdbMap) queue.getExternalIdsColumn().data()).map(); return OvsdbQueue.builder().dscp(dscp). otherConfigs(otherConfigs).externalIds(externalIds).build(); } private long getOfPort(Interface intf) { OvsdbSet ofPortSet = (OvsdbSet) intf.getOpenFlowPortColumn().data(); @SuppressWarnings("unchecked") Set<Integer> ofPorts = ofPortSet.set(); if (ofPorts == null || ofPorts.isEmpty()) { log.debug("The ofport is null in {}", intf.getName()); return -1; } // return (long) ofPorts.toArray()[0]; Iterator<Integer> it = ofPorts.iterator(); return Long.parseLong(it.next().toString()); } @Override public Set<OvsdbPort> getLocalPorts(Iterable<String> ifaceids) { Set<OvsdbPort> ovsdbPorts = new HashSet<>(); OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(INTERFACE); if (rowStore == null) { return null; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); for (String uuid : rows.keySet()) { Row row = getRow(DATABASENAME, INTERFACE, uuid); DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null || getIfaceid(intf) == null) { continue; } String portName = intf.getName(); if (portName == null) { continue; } Set<String> ifaceidSet = Sets.newHashSet(ifaceids); if (portName.startsWith(TYPEVXLAN) || !ifaceidSet.contains(getIfaceid(intf))) { continue; } long ofPort = getOfPort(intf); if (ofPort < 0) { continue; } ovsdbPorts.add(new OvsdbPort(new OvsdbPortNumber(ofPort), new OvsdbPortName(portName))); } return ovsdbPorts; } private String getIfaceid(Interface intf) { OvsdbMap ovsdbMap = (OvsdbMap) intf.getExternalIdsColumn().data(); @SuppressWarnings("unchecked") Map<String, String> externalIds = ovsdbMap.map(); if (externalIds.isEmpty()) { log.warn("The external_ids is null"); return null; } String ifaceid = externalIds.get(EXTERNAL_ID_INTERFACE_ID); if (ifaceid == null) { log.warn("The ifaceid is null"); return null; } return ifaceid; } @Override public void disconnect() { channel.disconnect(); } @Override public List<OvsdbPortName> getPorts(List<String> portNames, DeviceId deviceId) { Uuid bridgeUuid = getBridgeUuid(deviceId); if (bridgeUuid == null) { log.error("Can't find the bridge for the deviceId {}", deviceId); return Collections.emptyList(); } DatabaseSchema dbSchema = schema.get(DATABASENAME); Row bridgeRow = getRow(DATABASENAME, BRIDGE, bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); if (bridge == null) { return Collections.emptyList(); } OvsdbSet setPorts = (OvsdbSet) bridge.getPortsColumn().data(); Set<Uuid> portSet = setPorts.set(); if (portSet.isEmpty()) { return Collections.emptyList(); } Map<Uuid, Port> portMap = portSet.stream().collect(Collectors.toMap( java.util.function.Function.identity(), port -> (Port) TableGenerator .getTable(dbSchema, getRow(DATABASENAME, PORT, port.value()), OvsdbTable.PORT))); List<OvsdbPortName> portList = portMap.entrySet().stream().filter(port -> Objects.nonNull(port.getValue()) && portNames.contains(port.getValue().getName()) && Objects.nonNull(getInterfacebyPort(port.getKey().value(), port.getValue().getName()))) .map(port -> new OvsdbPortName(port.getValue().getName())).collect(Collectors.toList()); return Collections.unmodifiableList(portList); } @Override public boolean getPortError(List<OvsdbPortName> portNames, DeviceId bridgeId) { Uuid bridgeUuid = getBridgeUuid(bridgeId); List<Interface> interfaceList = portNames.stream().collect(Collectors .toMap(java.util.function.Function.identity(), port -> (Interface) getInterfacebyPort(getPortUuid(port.value(), bridgeUuid.value()), port.value()))) .entrySet().stream().filter(intf -> Objects.nonNull(intf.getValue()) && ((OvsdbSet) intf.getValue().getOpenFlowPortColumn().data()).set() .stream().findAny().orElse(OFPORT_ERROR_COMPARISON).equals(OFPORT_ERROR)) .map(Map.Entry::getValue).collect(Collectors.toList()); interfaceList.forEach(intf -> ((Consumer<Interface>) intf1 -> { try { Set<String> setErrors = ((OvsdbSet) intf1.getErrorColumn().data()).set(); log.info("Port has errors. ofport value - {}, Interface - {} has error - {} ", intf1.getOpenFlowPortColumn().data(), intf1.getName(), setErrors.stream() .findFirst().get()); } catch (ColumnSchemaNotFoundException | VersionMismatchException e) { log.debug("Port has errors. ofport value - {}, Interface - {} has error - {} ", intf1.getOpenFlowPortColumn().data(), intf1.getName(), e); } }).accept(intf)); return !interfaceList.isEmpty(); } private Interface getInterfacebyPort(String portUuid, String portName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Row portRow = getRow(DATABASENAME, PORT, portUuid); Port port = (Port) TableGenerator.getTable(dbSchema, portRow, OvsdbTable.PORT); if (port == null) { return null; } OvsdbSet setInterfaces = (OvsdbSet) port.getInterfacesColumn().data(); Set<Uuid> interfaces = setInterfaces.set(); return interfaces.stream().map(intf -> (Interface) TableGenerator .getTable(dbSchema, getRow(DATABASENAME, INTERFACE, intf.value()), OvsdbTable.INTERFACE)) .filter(intf -> Objects.nonNull(intf) && portName.equalsIgnoreCase(intf.getName())) .findFirst().orElse(null); } /** * Get first row of given table from given db. * * @param dbName db name * @param tblName table name * @return firstRow, first row of the given table from given db if present */ @Override public Optional<Object> getFirstRow(String dbName, OvsdbTable tblName) { DatabaseSchema dbSchema = getDatabaseSchema(dbName); if (Objects.isNull(dbSchema)) { return Optional.empty(); } OvsdbTableStore tableStore = ovsdbStore.getOvsdbTableStore(dbName); if (tableStore == null) { return Optional.empty(); } OvsdbRowStore rowStore = tableStore.getRows(tblName.tableName()); if (rowStore == null) { return Optional.empty(); } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); if (rows == null) { log.debug("The {} Table Rows is null", tblName); return Optional.empty(); } // There should be only 1 row in this table Optional<String> uuid = rows.keySet().stream().findFirst(); if (uuid.isPresent() && rows.containsKey(uuid.get())) { return Optional.of(TableGenerator.getTable(dbSchema, rows.get(uuid.get()), tblName)); } else { return Optional.empty(); } } /** * Get memory usage of device. * * @return memoryStats, empty data as there is no generic way to fetch such stats */ @Override public Optional<DeviceMemoryStats> getDeviceMemoryUsage() { return Optional.empty(); } /** * Get cpu usage of device. * * @return cpuStats, empty data as there is no generic way to fetch such stats */ @Override public Optional<DeviceCpuStats> getDeviceCpuUsage() { return Optional.empty(); } private <I, O> ListenableFuture<O> futureTransform( ListenableFuture<I> input, Function<? super I, ? extends O> function) { // Wrapper around deprecated Futures.transform() method. As per Guava // recommendation, passing MoreExecutors.directExecutor() for identical // behavior. return Futures.transform(input, function, MoreExecutors.directExecutor()); } }
protocols/ovsdb/api/src/main/java/org/onosproject/ovsdb/controller/driver/DefaultOvsdbClient.java
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.ovsdb.controller.driver; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import io.netty.channel.Channel; import org.onlab.packet.IpAddress; import org.onosproject.net.DeviceId; import org.onosproject.net.PortNumber; import org.onosproject.net.behaviour.ControlProtocolVersion; import org.onosproject.net.behaviour.ControllerInfo; import org.onosproject.net.behaviour.DeviceCpuStats; import org.onosproject.net.behaviour.DeviceMemoryStats; import org.onosproject.net.behaviour.MirroringName; import org.onosproject.net.behaviour.MirroringStatistics; import org.onosproject.net.behaviour.QosId; import org.onosproject.net.behaviour.QueueDescription; import org.onosproject.net.behaviour.QueueId; import org.onosproject.ovsdb.controller.OvsdbBridge; import org.onosproject.ovsdb.controller.OvsdbClientService; import org.onosproject.ovsdb.controller.OvsdbInterface; import org.onosproject.ovsdb.controller.OvsdbMirror; import org.onosproject.ovsdb.controller.OvsdbNodeId; import org.onosproject.ovsdb.controller.OvsdbPort; import org.onosproject.ovsdb.controller.OvsdbPortName; import org.onosproject.ovsdb.controller.OvsdbPortNumber; import org.onosproject.ovsdb.controller.OvsdbQos; import org.onosproject.ovsdb.controller.OvsdbQueue; import org.onosproject.ovsdb.controller.OvsdbRowStore; import org.onosproject.ovsdb.controller.OvsdbStore; import org.onosproject.ovsdb.controller.OvsdbTableStore; import org.onosproject.ovsdb.rfc.exception.ColumnSchemaNotFoundException; import org.onosproject.ovsdb.rfc.exception.VersionMismatchException; import org.onosproject.ovsdb.rfc.jsonrpc.Callback; import org.onosproject.ovsdb.rfc.message.OperationResult; import org.onosproject.ovsdb.rfc.message.TableUpdates; import org.onosproject.ovsdb.rfc.notation.Column; import org.onosproject.ovsdb.rfc.notation.Condition; import org.onosproject.ovsdb.rfc.notation.Mutation; import org.onosproject.ovsdb.rfc.notation.OvsdbMap; import org.onosproject.ovsdb.rfc.notation.OvsdbSet; import org.onosproject.ovsdb.rfc.notation.Row; import org.onosproject.ovsdb.rfc.notation.Uuid; import org.onosproject.ovsdb.rfc.operations.Delete; import org.onosproject.ovsdb.rfc.operations.Insert; import org.onosproject.ovsdb.rfc.operations.Mutate; import org.onosproject.ovsdb.rfc.operations.Operation; import org.onosproject.ovsdb.rfc.operations.Update; import org.onosproject.ovsdb.rfc.schema.ColumnSchema; import org.onosproject.ovsdb.rfc.schema.DatabaseSchema; import org.onosproject.ovsdb.rfc.schema.TableSchema; import org.onosproject.ovsdb.rfc.table.Bridge; import org.onosproject.ovsdb.rfc.table.Controller; import org.onosproject.ovsdb.rfc.table.Interface; import org.onosproject.ovsdb.rfc.table.Mirror; import org.onosproject.ovsdb.rfc.table.OvsdbTable; import org.onosproject.ovsdb.rfc.table.Port; import org.onosproject.ovsdb.rfc.table.Qos; import org.onosproject.ovsdb.rfc.table.Queue; import org.onosproject.ovsdb.rfc.table.TableGenerator; import org.onosproject.ovsdb.rfc.utils.ConditionUtil; import org.onosproject.ovsdb.rfc.utils.FromJsonUtil; import org.onosproject.ovsdb.rfc.utils.JsonRpcWriterUtil; import org.onosproject.ovsdb.rfc.utils.MutationUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGE; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGES; import static org.onosproject.ovsdb.controller.OvsdbConstant.BRIDGE_CONTROLLER; import static org.onosproject.ovsdb.controller.OvsdbConstant.CONTROLLER; import static org.onosproject.ovsdb.controller.OvsdbConstant.DATABASENAME; import static org.onosproject.ovsdb.controller.OvsdbConstant.EXTERNAL_ID; import static org.onosproject.ovsdb.controller.OvsdbConstant.EXTERNAL_ID_INTERFACE_ID; import static org.onosproject.ovsdb.controller.OvsdbConstant.INTERFACE; import static org.onosproject.ovsdb.controller.OvsdbConstant.INTERFACES; import static org.onosproject.ovsdb.controller.OvsdbConstant.MIRROR; import static org.onosproject.ovsdb.controller.OvsdbConstant.MIRRORS; import static org.onosproject.ovsdb.controller.OvsdbConstant.OFPORT; import static org.onosproject.ovsdb.controller.OvsdbConstant.OFPORT_ERROR; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORT; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORTS; import static org.onosproject.ovsdb.controller.OvsdbConstant.PORT_QOS; import static org.onosproject.ovsdb.controller.OvsdbConstant.QOS; import static org.onosproject.ovsdb.controller.OvsdbConstant.QOS_EXTERNAL_ID_KEY; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUE; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUES; import static org.onosproject.ovsdb.controller.OvsdbConstant.QUEUE_EXTERNAL_ID_KEY; import static org.onosproject.ovsdb.controller.OvsdbConstant.TYPEVXLAN; import static org.onosproject.ovsdb.controller.OvsdbConstant.UUID; /** * An representation of an ovsdb client. */ public class DefaultOvsdbClient implements OvsdbProviderService, OvsdbClientService { private static final int TRANSACTCONFIG_TIMEOUT = 3; //sec private static final int OFPORT_ERROR_COMPARISON = 0; private final Logger log = LoggerFactory.getLogger(DefaultOvsdbClient.class); private Channel channel; private OvsdbAgent agent; private boolean connected; private OvsdbNodeId nodeId; private Callback monitorCallBack; private OvsdbStore ovsdbStore = new OvsdbStore(); private final Map<String, String> requestMethod = Maps.newHashMap(); private final Map<String, SettableFuture<? extends Object>> requestResult = Maps.newHashMap(); private final Map<String, DatabaseSchema> schema = Maps.newHashMap(); /** * Creates an OvsdbClient. * * @param nodeId ovsdb node id */ public DefaultOvsdbClient(OvsdbNodeId nodeId) { this.nodeId = nodeId; } @Override public OvsdbNodeId nodeId() { return nodeId; } @Override public void setAgent(OvsdbAgent agent) { if (this.agent == null) { this.agent = agent; } } @Override public void setChannel(Channel channel) { this.channel = channel; } @Override public void setConnection(boolean connected) { this.connected = connected; } @Override public boolean isConnected() { return this.connected; } @Override public void nodeAdded() { this.agent.addConnectedNode(nodeId, this); } @Override public void nodeRemoved() { this.agent.removeConnectedNode(nodeId); channel.disconnect(); } /** * Gets the ovsdb table store. * * @param dbName the ovsdb database name * @return ovsTableStore, empty if table store is find */ private OvsdbTableStore getTableStore(String dbName) { if (ovsdbStore == null) { return null; } return ovsdbStore.getOvsdbTableStore(dbName); } /** * Gets the ovsdb row store. * * @param dbName the ovsdb database name * @param tableName the ovsdb table name * @return ovsRowStore, empty store if no rows exist in the table */ private OvsdbRowStore getRowStore(String dbName, String tableName) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { rowStore = new OvsdbRowStore(); } return rowStore; } /** * Gets the ovsdb row. * * @param dbName the ovsdb database name * @param tableName the ovsdb table name * @param uuid the key of the row * @return row, empty if row is find */ @Override public Row getRow(String dbName, String tableName, String uuid) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { return null; } return rowStore.getRow(uuid); } @Override public void removeRow(String dbName, String tableName, String uuid) { OvsdbTableStore tableStore = getTableStore(dbName); if (tableStore == null) { return; } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { return; } rowStore.deleteRow(uuid); } @Override public void updateOvsdbStore(String dbName, String tableName, String uuid, Row row) { OvsdbTableStore tableStore = ovsdbStore.getOvsdbTableStore(dbName); if (tableStore == null) { tableStore = new OvsdbTableStore(); } OvsdbRowStore rowStore = tableStore.getRows(tableName); if (rowStore == null) { rowStore = new OvsdbRowStore(); } rowStore.insertRow(uuid, row); tableStore.createOrUpdateTable(tableName, rowStore); ovsdbStore.createOrUpdateOvsdbStore(dbName, tableStore); } /** * Gets the Mirror uuid. * * @param mirrorName mirror name * @return mirror uuid, empty if no uuid is found */ @Override public String getMirrorUuid(String mirrorName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, MIRROR); if (rowStore == null) { log.warn("The mirror uuid is null"); return null; } ConcurrentMap<String, Row> mirrorTableRows = rowStore.getRowStore(); if (mirrorTableRows == null) { log.warn("The mirror uuid is null"); return null; } for (String uuid : mirrorTableRows.keySet()) { Mirror mirror = (Mirror) TableGenerator .getTable(dbSchema, mirrorTableRows.get(uuid), OvsdbTable.MIRROR); String name = mirror.getName(); if (name.contains(mirrorName)) { return uuid; } } log.warn("Mirroring not found"); return null; } /** * Gets mirrors of the device. * * @param deviceId target device id * @return set of mirroring; empty if no mirror is found */ @Override public Set<MirroringStatistics> getMirroringStatistics(DeviceId deviceId) { Uuid bridgeUuid = getBridgeUuid(deviceId); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", deviceId, nodeId.getIpAddress()); return null; } List<MirroringStatistics> mirrorings = getMirrorings(bridgeUuid); if (mirrorings == null) { log.warn("Couldn't find mirrors in {}", nodeId.getIpAddress()); return null; } return ImmutableSet.copyOf(mirrorings); } /** * Helper method which retrieves mirrorings statistics using bridge uuid. * * @param bridgeUuid the uuid of the bridge * @return the list of the mirrorings statistics. */ private List<MirroringStatistics> getMirrorings(Uuid bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { log.warn("Unable to retrieve dbSchema {}", DATABASENAME); return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.warn("Unable to retrieve rowStore {} of {}", BRIDGE, DATABASENAME); return null; } Row bridgeRow = rowStore.getRow(bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator. getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); Set<Uuid> mirroringsUuids = (Set<Uuid>) ((OvsdbSet) bridge .getMirrorsColumn().data()).set(); OvsdbRowStore mirrorRowStore = getRowStore(DATABASENAME, MIRROR); if (mirrorRowStore == null) { log.warn("Unable to retrieve rowStore {} of {}", MIRROR, DATABASENAME); return null; } List<MirroringStatistics> mirroringStatistics = new ArrayList<>(); ConcurrentMap<String, Row> mirrorTableRows = mirrorRowStore.getRowStore(); mirrorTableRows.forEach((key, row) -> { if (!mirroringsUuids.contains(Uuid.uuid(key))) { return; } Mirror mirror = (Mirror) TableGenerator .getTable(dbSchema, row, OvsdbTable.MIRROR); mirroringStatistics.add(MirroringStatistics.mirroringStatistics(mirror.getName(), (Map<String, Integer>) ((OvsdbMap) mirror .getStatisticsColumn().data()).map())); }); return ImmutableList.copyOf(mirroringStatistics); } @Override public String getPortUuid(String portName, String bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Row bridgeRow = getRow(DATABASENAME, BRIDGE, bridgeUuid); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); if (bridge != null) { OvsdbSet setPorts = (OvsdbSet) bridge.getPortsColumn().data(); @SuppressWarnings("unchecked") Set<Uuid> ports = setPorts.set(); if (ports == null || ports.isEmpty()) { log.warn("The port uuid is null"); return null; } for (Uuid uuid : ports) { Row portRow = getRow(DATABASENAME, PORT, uuid.value()); Port port = (Port) TableGenerator.getTable(dbSchema, portRow, OvsdbTable.PORT); if (port != null && portName.equalsIgnoreCase(port.getName())) { return uuid.value(); } } } return null; } @Override public String getBridgeUuid(String bridgeName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("The bridge uuid is null"); return null; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); if (bridgeTableRows == null) { log.debug("The bridge uuid is null"); return null; } for (String uuid : bridgeTableRows.keySet()) { Bridge bridge = (Bridge) TableGenerator .getTable(dbSchema, bridgeTableRows.get(uuid), OvsdbTable.BRIDGE); if (bridge.getName().equals(bridgeName)) { return uuid; } } return null; } private String getOvsUuid(String dbName) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, DATABASENAME); if (rowStore == null) { log.debug("The bridge uuid is null"); return null; } ConcurrentMap<String, Row> ovsTableRows = rowStore.getRowStore(); if (ovsTableRows != null) { for (String uuid : ovsTableRows.keySet()) { Row row = ovsTableRows.get(uuid); String tableName = row.tableName(); if (tableName.equals(dbName)) { return uuid; } } } return null; } @Override public void createPort(String bridgeName, String portName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.error("Can't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return; } DatabaseSchema dbSchema = schema.get(DATABASENAME); String portUuid = getPortUuid(portName, bridgeUuid); Port port = (Port) TableGenerator.createTable(dbSchema, OvsdbTable.PORT); port.setName(portName); if (portUuid == null) { insertConfig(PORT, UUID, BRIDGE, PORTS, bridgeUuid, port.getRow()); } } @Override public void dropPort(String bridgeName, String portName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.error("Could not find Bridge {} in {}", bridgeName, nodeId); return; } String portUuid = getPortUuid(portName, bridgeUuid); if (portUuid != null) { log.info("Port {} delete", portName); deleteConfig(PORT, UUID, portUuid, BRIDGE, PORTS, Uuid.uuid(portUuid)); } } @Override public boolean createBridge(OvsdbBridge ovsdbBridge) { DatabaseSchema dbSchema = schema.get(DATABASENAME); String ovsUuid = getOvsUuid(DATABASENAME); if (dbSchema == null || ovsUuid == null) { log.error("Can't find database Open_vSwitch"); return false; } Bridge bridge = (Bridge) TableGenerator.createTable(dbSchema, OvsdbTable.BRIDGE); bridge.setOtherConfig(ovsdbBridge.otherConfigs()); if (ovsdbBridge.failMode().isPresent()) { String failMode = ovsdbBridge.failMode().get().name().toLowerCase(); bridge.setFailMode(Sets.newHashSet(failMode)); } if (ovsdbBridge.datapathType().isPresent()) { String datapathType = ovsdbBridge.datapathType().get(); bridge.setDatapathType(datapathType); } if (ovsdbBridge.controlProtocols().isPresent()) { bridge.setProtocols(ovsdbBridge.controlProtocols().get().stream() .map(ControlProtocolVersion::toString) .collect(Collectors.toCollection(HashSet::new))); } String bridgeUuid = getBridgeUuid(ovsdbBridge.name()); if (bridgeUuid == null) { bridge.setName(ovsdbBridge.name()); bridgeUuid = insertConfig( BRIDGE, UUID, DATABASENAME, BRIDGES, ovsUuid, bridge.getRow()); } else { // update the bridge if it's already existing updateConfig(BRIDGE, UUID, bridgeUuid, bridge.getRow()); } if (bridgeUuid == null) { log.warn("Failed to create bridge {} on {}", ovsdbBridge.name(), nodeId); return false; } createPort(ovsdbBridge.name(), ovsdbBridge.name()); setControllersWithUuid(Uuid.uuid(bridgeUuid), ovsdbBridge.controllers()); log.info("Created bridge {}", ovsdbBridge.name()); return true; } @Override public ControllerInfo localController() { IpAddress ipAddress = IpAddress.valueOf(((InetSocketAddress) channel.localAddress()).getAddress()); return new ControllerInfo(ipAddress, OFPORT, "tcp"); } private void setControllersWithUuid(Uuid bridgeUuid, List<ControllerInfo> controllers) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { log.debug("There is no schema"); return; } List<Controller> oldControllers = getControllers(bridgeUuid); if (oldControllers == null) { log.warn("There are no controllers"); return; } Set<ControllerInfo> newControllers = new HashSet<>(controllers); List<Controller> removeControllers = new ArrayList<>(); oldControllers.forEach(controller -> { ControllerInfo controllerInfo = new ControllerInfo((String) controller.getTargetColumn().data()); if (newControllers.contains(controllerInfo)) { newControllers.remove(controllerInfo); } else { removeControllers.add(controller); } }); OvsdbRowStore controllerRowStore = getRowStore(DATABASENAME, CONTROLLER); if (controllerRowStore == null) { log.debug("There is no controller table"); return; } removeControllers.forEach(c -> deleteConfig(CONTROLLER, UUID, c.getRow().uuid().value(), BRIDGE, BRIDGE_CONTROLLER, c.getRow().uuid())); newControllers.stream().map(c -> { Controller controller = (Controller) TableGenerator .createTable(dbSchema, OvsdbTable.CONTROLLER); controller.setTarget(c.target()); return controller; }).forEach(c -> insertConfig(CONTROLLER, UUID, BRIDGE, BRIDGE_CONTROLLER, bridgeUuid.value(), c.getRow())); } @Override public void setControllersWithDeviceId(DeviceId deviceId, List<ControllerInfo> controllers) { setControllersWithUuid(getBridgeUuid(deviceId), controllers); } @Override public void dropBridge(String bridgeName) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Could not find bridge in node", nodeId.getIpAddress()); return; } deleteConfig(BRIDGE, UUID, bridgeUuid, DATABASENAME, BRIDGES, Uuid.uuid(bridgeUuid)); } @Override public void applyQos(PortNumber portNumber, String qosName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore portRowStore = getRowStore(DATABASENAME, PORT); if (portRowStore == null) { log.debug("The port uuid is null"); return; } OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { log.debug("The qos uuid is null"); return; } // Due to Qos Table doesn't have a unique identifier except uuid, unlike // Bridge or Port Table has a name column,in order to make the api more // general, put qos name in external_ids column of Qos Table if this qos // created by onos. ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); ConcurrentMap<String, Row> portTableRows = portRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosName.equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); Row portRow = portTableRows.values().stream() .filter(r -> r.getColumn("name").data().equals(portNumber.name())) .findFirst().orElse(null); if (portRow != null && qosRow != null) { String qosId = qosRow.uuid().value(); Uuid portUuid = portRow.uuid(); Map<String, Column> columns = new HashMap<>(); Row newPortRow = new Row(PORT, portUuid, columns); Port newport = new Port(dbSchema, newPortRow); columns.put(Port.PortColumn.QOS.columnName(), newport.getQosColumn()); newport.setQos(Uuid.uuid(qosId)); updateConfig(PORT, UUID, portUuid.value(), newport.getRow()); } } @Override public void removeQos(PortNumber portNumber) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore rowStore = getRowStore(DATABASENAME, PORT); if (rowStore == null) { log.debug("The qos uuid is null"); return; } ConcurrentMap<String, Row> ovsTableRows = rowStore.getRowStore(); Row portRow = ovsTableRows.values().stream() .filter(r -> r.getColumn("name").data().equals(portNumber.name())) .findFirst().orElse(null); if (portRow == null) { log.warn("Couldn't find port {} in ovsdb port table.", portNumber.name()); return; } OvsdbSet ovsdbSet = ((OvsdbSet) portRow.getColumn(PORT_QOS).data()); @SuppressWarnings("unchecked") Set<Uuid> qosIdSet = ovsdbSet.set(); if (qosIdSet == null || qosIdSet.isEmpty()) { return; } Uuid qosUuid = (Uuid) qosIdSet.toArray()[0]; Condition condition = ConditionUtil.isEqual(UUID, portRow.uuid()); List<Condition> conditions = Lists.newArrayList(condition); Mutation mutation = MutationUtil.delete(PORT_QOS, qosUuid); List<Mutation> mutations = Lists.newArrayList(mutation); ArrayList<Operation> operations = Lists.newArrayList(); Mutate mutate = new Mutate(dbSchema.getTableSchema(PORT), conditions, mutations); operations.add(mutate); transactConfig(DATABASENAME, operations); } @Override public boolean createQos(OvsdbQos ovsdbQos) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Qos qos = (Qos) TableGenerator.createTable(dbSchema, OvsdbTable.QOS); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore == null) { log.debug("The qos uuid is null"); return false; } ArrayList<Operation> operations = Lists.newArrayList(); Set<String> types = Sets.newHashSet(); Map<Long, Uuid> queues = Maps.newHashMap(); types.add(ovsdbQos.qosType()); qos.setOtherConfig(ovsdbQos.otherConfigs()); qos.setExternalIds(ovsdbQos.externalIds()); qos.setType(types); if (ovsdbQos.qosQueues().isPresent()) { for (Map.Entry<Long, String> entry : ovsdbQos.qosQueues().get().entrySet()) { OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore != null) { ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return entry.getValue().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow != null) { queues.put(entry.getKey(), queueRow.uuid()); } } } qos.setQueues(queues); } Insert qosInsert = new Insert(dbSchema.getTableSchema(QOS), QOS, qos.getRow()); operations.add(qosInsert); try { transactConfig(DATABASENAME, operations).get(); } catch (InterruptedException | ExecutionException e) { return false; } return true; } @Override public void dropQos(QosId qosId) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore != null) { ConcurrentMap<String, Row> qosTableRows = rowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow != null) { deleteConfig(QOS, UUID, qosRow.uuid().value(), PORT, PORT_QOS, qosRow.uuid()); } } } @Override public OvsdbQos getQos(QosId qosId) { Set<OvsdbQos> ovsdbQoses = getQoses(); return ovsdbQoses.stream().filter(r -> qosId.name().equals(r.externalIds().get(QOS_EXTERNAL_ID_KEY))). findFirst().orElse(null); } @Override public Set<OvsdbQos> getQoses() { Set<OvsdbQos> ovsdbQoses = new HashSet<>(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QOS); if (rowStore == null) { log.debug("The qos uuid is null"); return ovsdbQoses; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); ovsdbQoses = rows.keySet().stream() .map(uuid -> getRow(DATABASENAME, QOS, uuid)) .map(this::getOvsdbQos) .filter(Objects::nonNull) .collect(Collectors.toSet()); return ovsdbQoses; } @Override public void bindQueues(QosId qosId, Map<Long, QueueDescription> queues) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { log.debug("The qos uuid is null"); return; } OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore == null) { log.debug("The queue uuid is null"); return; } ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow == null) { log.warn("Can't find QoS {}", qosId); return; } Uuid qosUuid = qosRow.uuid(); Map<Long, Uuid> newQueues = new HashMap<>(); for (Map.Entry<Long, QueueDescription> entry : queues.entrySet()) { Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return entry.getValue().queueId().name().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow != null) { newQueues.put(entry.getKey(), queueRow.uuid()); } } // update the qos table ArrayList<Operation> operations = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(UUID, qosUuid); Mutation mutation = MutationUtil.insert(QUEUES, newQueues); List<Condition> conditions = Collections.singletonList(condition); List<Mutation> mutations = Collections.singletonList(mutation); operations.add(new Mutate(dbSchema.getTableSchema(QOS), conditions, mutations)); transactConfig(DATABASENAME, operations); } @SuppressWarnings("unchecked") @Override public void unbindQueues(QosId qosId, List<Long> queueKeys) { DatabaseSchema dbSchema = schema.get(DATABASENAME); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore == null) { return; } ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); Row qosRow = qosTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return qosId.name().equals(ovsdbMap.map().get(QOS_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (qosRow == null) { log.warn("Can't find QoS {}", qosId); return; } Map<Long, Uuid> deleteQueuesMap; Map<Integer, Uuid> queuesMap = ((OvsdbMap) qosRow.getColumn(QUEUES).data()).map(); deleteQueuesMap = queueKeys.stream() .filter(key -> queuesMap.containsKey(key.intValue())) .collect(Collectors.toMap(key -> key, key -> queuesMap.get(key.intValue()), (a, b) -> b)); if (deleteQueuesMap.size() != 0) { TableSchema parentTableSchema = dbSchema .getTableSchema(QOS); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(QUEUES); Mutation mutation = MutationUtil.delete(parentColumnSchema.name(), OvsdbMap.ovsdbMap(deleteQueuesMap)); List<Mutation> mutations = Collections.singletonList(mutation); Condition condition = ConditionUtil.isEqual(UUID, qosRow.uuid()); List<Condition> conditionList = Collections.singletonList(condition); List<Operation> operations = Collections.singletonList( new Mutate(parentTableSchema, conditionList, mutations)); transactConfig(DATABASENAME, operations); } } @Override public boolean createQueue(OvsdbQueue ovsdbQueue) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Queue queue = (Queue) TableGenerator.createTable(dbSchema, OvsdbTable.QUEUE); ArrayList<Operation> operations = Lists.newArrayList(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QUEUE); if (rowStore == null) { log.debug("The queue uuid is null"); return false; } if (ovsdbQueue.dscp().isPresent()) { queue.setDscp(ImmutableSet.of(ovsdbQueue.dscp().get())); } queue.setOtherConfig(ovsdbQueue.otherConfigs()); queue.setExternalIds(ovsdbQueue.externalIds()); Insert queueInsert = new Insert(dbSchema.getTableSchema(QUEUE), QUEUE, queue.getRow()); operations.add(queueInsert); try { transactConfig(DATABASENAME, operations).get(); } catch (InterruptedException | ExecutionException e) { log.error("createQueue transactConfig get exception !"); } return true; } @Override public void dropQueue(QueueId queueId) { OvsdbRowStore queueRowStore = getRowStore(DATABASENAME, QUEUE); if (queueRowStore == null) { return; } ConcurrentMap<String, Row> queueTableRows = queueRowStore.getRowStore(); Row queueRow = queueTableRows.values().stream().filter(r -> { OvsdbMap ovsdbMap = (OvsdbMap) (r.getColumn(EXTERNAL_ID).data()); return queueId.name().equals(ovsdbMap.map().get(QUEUE_EXTERNAL_ID_KEY)); }).findFirst().orElse(null); if (queueRow == null) { return; } String queueUuid = queueRow.uuid().value(); OvsdbRowStore qosRowStore = getRowStore(DATABASENAME, QOS); if (qosRowStore != null) { Map<Long, Uuid> queueMap = new HashMap<>(); ConcurrentMap<String, Row> qosTableRows = qosRowStore.getRowStore(); qosTableRows.values().stream().filter(r -> { Map<Integer, Uuid> ovsdbMap = ((OvsdbMap) r.getColumn(QUEUES).data()).map(); Set<Integer> keySet = ovsdbMap.keySet(); for (Integer keyId : keySet) { if (ovsdbMap.get(keyId).equals(Uuid.uuid(queueUuid))) { queueMap.put(keyId.longValue(), Uuid.uuid(queueUuid)); return true; } } return false; }).findFirst().orElse(null); deleteConfig(QUEUE, UUID, queueUuid, QOS, QUEUES, OvsdbMap.ovsdbMap(queueMap)); } else { deleteConfig(QUEUE, UUID, queueUuid, null, null, null); } } @Override public OvsdbQueue getQueue(QueueId queueId) { Set<OvsdbQueue> ovsdbQueues = getQueues(); return ovsdbQueues.stream().filter(r -> queueId.name().equals(r.externalIds().get(QUEUE_EXTERNAL_ID_KEY))). findFirst().orElse(null); } @Override public Set<OvsdbQueue> getQueues() { Set<OvsdbQueue> ovsdbqueues = new HashSet<>(); OvsdbRowStore rowStore = getRowStore(DATABASENAME, QUEUE); if (rowStore == null) { log.debug("The queue uuid is null"); return ovsdbqueues; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); ovsdbqueues = rows.keySet() .stream() .map(uuid -> getRow(DATABASENAME, QUEUE, uuid)) .map(this::getOvsdbQueue) .filter(Objects::nonNull) .collect(Collectors.toSet()); return ovsdbqueues; } /** * Creates a mirror port. Mirrors the traffic * that goes to selectDstPort or comes from * selectSrcPort or packets containing selectVlan * to mirrorPort or to all ports that trunk mirrorVlan. * * @param mirror the OVSDB mirror description * @return true if mirror creation is successful, false otherwise */ @Override public boolean createMirror(String bridgeName, OvsdbMirror mirror) { /** * Retrieves bridge's uuid. It is necessary to update * Bridge table. */ String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return false; } OvsdbMirror.Builder mirrorBuilder = OvsdbMirror.builder(); mirrorBuilder.mirroringName(mirror.mirroringName()); mirrorBuilder.selectAll(mirror.selectAll()); /** * Retrieves the uuid of the monitored dst ports. */ mirrorBuilder.monitorDstPorts(mirror.monitorDstPorts().parallelStream() .map(dstPort -> { String dstPortUuid = getPortUuid(dstPort.value(), bridgeUuid); if (dstPortUuid != null) { return Uuid.uuid(dstPortUuid); } log.warn("Couldn't find port {} in {}", dstPort.value(), nodeId.getIpAddress()); return null; }) .filter(Objects::nonNull) .collect(Collectors.toSet()) ); /** * Retrieves the uuid of the monitored src ports. */ mirrorBuilder.monitorSrcPorts(mirror.monitorSrcPorts().parallelStream() .map(srcPort -> { String srcPortUuid = getPortUuid(srcPort.value(), bridgeUuid); if (srcPortUuid != null) { return Uuid.uuid(srcPortUuid); } log.warn("Couldn't find port {} in {}", srcPort.value(), nodeId.getIpAddress()); return null; }).filter(Objects::nonNull) .collect(Collectors.toSet()) ); mirrorBuilder.monitorVlans(mirror.monitorVlans()); mirrorBuilder.mirrorPort(mirror.mirrorPort()); mirrorBuilder.mirrorVlan(mirror.mirrorVlan()); mirrorBuilder.externalIds(mirror.externalIds()); mirror = mirrorBuilder.build(); if (mirror.monitorDstPorts().isEmpty() && mirror.monitorSrcPorts().isEmpty() && mirror.monitorVlans().isEmpty()) { log.warn("Invalid monitoring data"); return false; } DatabaseSchema dbSchema = schema.get(DATABASENAME); Mirror mirrorEntry = (Mirror) TableGenerator.createTable(dbSchema, OvsdbTable.MIRROR); mirrorEntry.setName(mirror.mirroringName()); mirrorEntry.setSelectDstPort(mirror.monitorDstPorts()); mirrorEntry.setSelectSrcPort(mirror.monitorSrcPorts()); mirrorEntry.setSelectVlan(mirror.monitorVlans()); mirrorEntry.setExternalIds(mirror.externalIds()); /** * If mirror port, retrieves the uuid of the mirror port. */ if (mirror.mirrorPort() != null) { String outputPortUuid = getPortUuid(mirror.mirrorPort().value(), bridgeUuid); if (outputPortUuid == null) { log.warn("Couldn't find port {} in {}", mirror.mirrorPort().value(), nodeId.getIpAddress()); return false; } mirrorEntry.setOutputPort(Uuid.uuid(outputPortUuid)); } else if (mirror.mirrorVlan() != null) { mirrorEntry.setOutputVlan(mirror.mirrorVlan()); } else { log.warn("Invalid mirror, no mirror port and no mirror vlan"); return false; } ArrayList<Operation> operations = Lists.newArrayList(); Insert mirrorInsert = new Insert(dbSchema.getTableSchema("Mirror"), "Mirror", mirrorEntry.getRow()); operations.add(mirrorInsert); // update the bridge table Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(bridgeUuid)); Mutation mutation = MutationUtil.insert(MIRRORS, Uuid.uuid("Mirror")); List<Condition> conditions = Lists.newArrayList(condition); List<Mutation> mutations = Lists.newArrayList(mutation); operations.add(new Mutate(dbSchema.getTableSchema("Bridge"), conditions, mutations)); transactConfig(DATABASENAME, operations); log.info("Created mirror {}", mirror.mirroringName()); return true; } /** * Drops the configuration for mirror. * * @param mirroringName name of mirror to drop */ @Override public void dropMirror(MirroringName mirroringName) { String mirrorUuid = getMirrorUuid(mirroringName.name()); if (mirrorUuid != null) { log.info("Deleted mirror {}", mirroringName.name()); deleteConfig(MIRROR, UUID, mirrorUuid, BRIDGE, MIRRORS, Uuid.uuid(mirrorUuid)); } log.warn("Unable to delete {}", mirroringName.name()); return; } @Override public boolean createInterface(String bridgeName, OvsdbInterface ovsdbIface) { String bridgeUuid = getBridgeUuid(bridgeName); if (bridgeUuid == null) { log.warn("Couldn't find bridge {} in {}", bridgeName, nodeId.getIpAddress()); return false; } if (getPortUuid(ovsdbIface.name(), bridgeUuid) != null) { log.warn("Interface {} already exists", ovsdbIface.name()); return false; } ArrayList<Operation> operations = Lists.newArrayList(); DatabaseSchema dbSchema = schema.get(DATABASENAME); // insert a new port with the interface name Port port = (Port) TableGenerator.createTable(dbSchema, OvsdbTable.PORT); port.setName(ovsdbIface.name()); Insert portInsert = new Insert(dbSchema.getTableSchema(PORT), PORT, port.getRow()); portInsert.getRow().put(INTERFACES, Uuid.uuid(INTERFACE)); operations.add(portInsert); // update the bridge table with the new port Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(bridgeUuid)); Mutation mutation = MutationUtil.insert(PORTS, Uuid.uuid(PORT)); List<Condition> conditions = Lists.newArrayList(condition); List<Mutation> mutations = Lists.newArrayList(mutation); operations.add(new Mutate(dbSchema.getTableSchema(BRIDGE), conditions, mutations)); Interface intf = (Interface) TableGenerator.createTable(dbSchema, OvsdbTable.INTERFACE); intf.setName(ovsdbIface.name()); if (ovsdbIface.type() != null) { intf.setType(ovsdbIface.typeToString()); } if (ovsdbIface.mtu().isPresent()) { Set<Long> mtuSet = Sets.newConcurrentHashSet(); mtuSet.add(ovsdbIface.mtu().get()); intf.setMtu(mtuSet); intf.setMtuRequest(mtuSet); } intf.setOptions(ovsdbIface.options()); ovsdbIface.data().forEach((k, v) -> { if (k == Interface.InterfaceColumn.EXTERNALIDS) { intf.setExternalIds(v); } }); Insert intfInsert = new Insert(dbSchema.getTableSchema(INTERFACE), INTERFACE, intf.getRow()); operations.add(intfInsert); transactConfig(DATABASENAME, operations); log.info("Created interface {}", ovsdbIface); return true; } @Override public boolean dropInterface(String ifaceName) { OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.warn("Failed to get BRIDGE table"); return false; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); if (bridgeTableRows == null) { log.warn("Failed to get BRIDGE table rows"); return false; } // interface name is unique Optional<String> bridgeId = bridgeTableRows.keySet().stream() .filter(uuid -> getPortUuid(ifaceName, uuid) != null) .findFirst(); if (bridgeId.isPresent()) { String portId = getPortUuid(ifaceName, bridgeId.get()); deleteConfig(PORT, UUID, portId, BRIDGE, PORTS, Uuid.uuid(portId)); return true; } else { log.warn("Unable to find the interface with name {}", ifaceName); return false; } } /** * Delete transact config. * * @param childTableName child table name * @param childColumnName child column name * @param childUuid child row uuid * @param parentTableName parent table name * @param parentColumnName parent column * @param referencedValue referenced value */ private void deleteConfig(String childTableName, String childColumnName, String childUuid, String parentTableName, String parentColumnName, Object referencedValue) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema childTableSchema = dbSchema.getTableSchema(childTableName); ArrayList<Operation> operations = Lists.newArrayList(); if (parentTableName != null && parentColumnName != null && referencedValue != null) { TableSchema parentTableSchema = dbSchema .getTableSchema(parentTableName); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(parentColumnName); List<Mutation> mutations = Lists.newArrayList(); Mutation mutation = MutationUtil.delete(parentColumnSchema.name(), referencedValue); mutations.add(mutation); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.includes(parentColumnName, referencedValue); conditions.add(condition); Mutate op = new Mutate(parentTableSchema, conditions, mutations); operations.add(op); } List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(childColumnName, Uuid.uuid(childUuid)); conditions.add(condition); Delete del = new Delete(childTableSchema, conditions); operations.add(del); transactConfig(DATABASENAME, operations); } /** * Update transact config. * * @param tableName table name * @param columnName column name * @param uuid uuid * @param row the config data */ private void updateConfig(String tableName, String columnName, String uuid, Row row) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema tableSchema = dbSchema.getTableSchema(tableName); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(columnName, Uuid.uuid(uuid)); conditions.add(condition); Update update = new Update(tableSchema, row, conditions); ArrayList<Operation> operations = Lists.newArrayList(); operations.add(update); transactConfig(DATABASENAME, operations); } /** * Insert transact config. * * @param childTableName child table name * @param childColumnName child column name * @param parentTableName parent table name * @param parentColumnName parent column * @param parentUuid parent uuid * @param row the config data * @return uuid, empty if no uuid is find */ private String insertConfig(String childTableName, String childColumnName, String parentTableName, String parentColumnName, String parentUuid, Row row) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema tableSchema = dbSchema.getTableSchema(childTableName); Insert insert = new Insert(tableSchema, childTableName, row); ArrayList<Operation> operations = Lists.newArrayList(); operations.add(insert); if (parentTableName != null && parentColumnName != null) { TableSchema parentTableSchema = dbSchema .getTableSchema(parentTableName); ColumnSchema parentColumnSchema = parentTableSchema .getColumnSchema(parentColumnName); List<Mutation> mutations = Lists.newArrayList(); Mutation mutation = MutationUtil.insert(parentColumnSchema.name(), Uuid.uuid(childTableName)); mutations.add(mutation); List<Condition> conditions = Lists.newArrayList(); Condition condition = ConditionUtil.isEqual(UUID, Uuid.uuid(parentUuid)); conditions.add(condition); Mutate op = new Mutate(parentTableSchema, conditions, mutations); operations.add(op); } if (childTableName.equalsIgnoreCase(PORT)) { log.debug("Handle port insert"); Insert intfInsert = handlePortInsertTable(row); if (intfInsert != null) { operations.add(intfInsert); } Insert ins = (Insert) operations.get(0); ins.getRow().put("interfaces", Uuid.uuid(INTERFACE)); } List<OperationResult> results; try { results = transactConfig(DATABASENAME, operations) .get(TRANSACTCONFIG_TIMEOUT, TimeUnit.SECONDS); return results.get(0).getUuid().value(); } catch (TimeoutException e) { log.warn("TimeoutException thrown while to get result"); } catch (InterruptedException e) { log.warn("Interrupted while waiting to get result"); Thread.currentThread().interrupt(); } catch (ExecutionException e) { log.error("Exception thrown while to get result"); } return null; } /** * Handles port insert. * * @param portRow row of port * @return insert, empty if null */ private Insert handlePortInsertTable(Row portRow) { DatabaseSchema dbSchema = schema.get(DATABASENAME); TableSchema portTableSchema = dbSchema.getTableSchema(PORT); ColumnSchema portColumnSchema = portTableSchema.getColumnSchema("name"); String portName = (String) portRow.getColumn(portColumnSchema.name()).data(); Interface inf = (Interface) TableGenerator.createTable(dbSchema, OvsdbTable.INTERFACE); inf.setName(portName); TableSchema intfTableSchema = dbSchema.getTableSchema(INTERFACE); return new Insert(intfTableSchema, INTERFACE, inf.getRow()); } @Override public ListenableFuture<DatabaseSchema> getOvsdbSchema(String dbName) { if (dbName == null) { return null; } DatabaseSchema databaseSchema = schema.get(dbName); if (databaseSchema == null) { List<String> dbNames = new ArrayList<>(); dbNames.add(dbName); Function<JsonNode, DatabaseSchema> rowFunction = input -> { log.debug("Get ovsdb database schema {}", dbName); DatabaseSchema dbSchema = FromJsonUtil.jsonNodeToDbSchema(dbName, input); if (dbSchema == null) { log.debug("Get ovsdb database schema error"); return null; } schema.put(dbName, dbSchema); return dbSchema; }; ListenableFuture<JsonNode> input = getSchema(dbNames); if (input != null) { return futureTransform(input, rowFunction); } return null; } else { return Futures.immediateFuture(databaseSchema); } } @Override public ListenableFuture<TableUpdates> monitorTables(String dbName, String id) { if (dbName == null) { return null; } DatabaseSchema dbSchema = schema.get(dbName); if (dbSchema != null) { Function<JsonNode, TableUpdates> rowFunction = input -> { log.debug("Get table updates"); TableUpdates updates = FromJsonUtil.jsonNodeToTableUpdates(input, dbSchema); if (updates == null) { log.debug("Get table updates error"); return null; } return updates; }; return futureTransform(monitor(dbSchema, id), rowFunction); } return null; } private ListenableFuture<List<OperationResult>> transactConfig(String dbName, List<Operation> operations) { if (dbName == null) { return null; } DatabaseSchema dbSchema = schema.get(dbName); if (dbSchema != null) { Function<List<JsonNode>, List<OperationResult>> rowFunction = (input -> { try { log.debug("Get ovsdb operation result"); List<OperationResult> result = FromJsonUtil.jsonNodeToOperationResult(input, operations); if (result == null) { log.debug("The operation result is null"); return null; } return result; } catch (Exception e) { log.error("Exception while parsing result", e); } return null; }); return futureTransform(transact(dbSchema, operations), rowFunction); } return null; } @Override public ListenableFuture<JsonNode> getSchema(List<String> dbnames) { String id = java.util.UUID.randomUUID().toString(); String getSchemaString = JsonRpcWriterUtil.getSchemaStr(id, dbnames); SettableFuture<JsonNode> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "getSchema"); channel.writeAndFlush(getSchemaString); return sf; } @Override public ListenableFuture<List<String>> echo() { String id = java.util.UUID.randomUUID().toString(); String echoString = JsonRpcWriterUtil.echoStr(id); SettableFuture<List<String>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "echo"); channel.writeAndFlush(echoString); return sf; } @Override public ListenableFuture<JsonNode> monitor(DatabaseSchema dbSchema, String monitorId) { String id = java.util.UUID.randomUUID().toString(); String monitorString = JsonRpcWriterUtil.monitorStr(id, monitorId, dbSchema); SettableFuture<JsonNode> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "monitor"); channel.writeAndFlush(monitorString); return sf; } @Override public ListenableFuture<List<String>> listDbs() { String id = java.util.UUID.randomUUID().toString(); String listDbsString = JsonRpcWriterUtil.listDbsStr(id); SettableFuture<List<String>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "listDbs"); channel.writeAndFlush(listDbsString); return sf; } @Override public ListenableFuture<List<JsonNode>> transact(DatabaseSchema dbSchema, List<Operation> operations) { String id = java.util.UUID.randomUUID().toString(); String transactString = JsonRpcWriterUtil.transactStr(id, dbSchema, operations); SettableFuture<List<JsonNode>> sf = SettableFuture.create(); requestResult.put(id, sf); requestMethod.put(id, "transact"); channel.writeAndFlush(transactString); return sf; } @SuppressWarnings({"rawtypes", "unchecked"}) @Override public void processResult(JsonNode response) { log.debug("Handle result"); String requestId = response.get("id").asText(); SettableFuture sf = requestResult.get(requestId); if (sf == null) { log.debug("No such future to process"); return; } String methodName = requestMethod.get(requestId); sf.set(FromJsonUtil.jsonResultParser(response, methodName)); requestResult.remove(requestId); requestMethod.remove(requestId); } @Override public void processRequest(JsonNode requestJson) { log.debug("Handle request"); if (requestJson.get("method").asText().equalsIgnoreCase("echo")) { log.debug("handle echo request"); String replyString = FromJsonUtil.getEchoRequestStr(requestJson); channel.writeAndFlush(replyString); } else { FromJsonUtil.jsonCallbackRequestParser(requestJson, monitorCallBack); } } @Override public void setCallback(Callback monitorCallback) { this.monitorCallBack = monitorCallback; } @Override public Set<OvsdbBridge> getBridges() { Set<OvsdbBridge> ovsdbBridges = new HashSet<>(); OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return ovsdbBridges; } OvsdbRowStore rowStore = tableStore.getRows(BRIDGE); if (rowStore == null) { return ovsdbBridges; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); for (String uuid : rows.keySet()) { Row bridgeRow = getRow(DATABASENAME, BRIDGE, uuid); OvsdbBridge ovsdbBridge = getOvsdbBridge(bridgeRow, Uuid.uuid(uuid)); if (ovsdbBridge != null) { ovsdbBridges.add(ovsdbBridge); } } return ovsdbBridges; } @Override public Set<ControllerInfo> getControllers(DeviceId openflowDeviceId) { Uuid bridgeUuid = getBridgeUuid(openflowDeviceId); if (bridgeUuid == null) { log.warn("bad bridge Uuid"); return null; } List<Controller> controllers = getControllers(bridgeUuid); if (controllers == null) { log.warn("bad list of controllers"); return null; } return controllers.stream().map(controller -> new ControllerInfo( (String) controller.getTargetColumn() .data())).collect(Collectors.toSet()); } private List<Controller> getControllers(Uuid bridgeUuid) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("There is no bridge table"); return null; } Row bridgeRow = rowStore.getRow(bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator. getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); //FIXME remove log log.warn("type of controller column", bridge.getControllerColumn() .data().getClass()); Set<Uuid> controllerUuids = (Set<Uuid>) ((OvsdbSet) bridge .getControllerColumn().data()).set(); OvsdbRowStore controllerRowStore = getRowStore(DATABASENAME, CONTROLLER); if (controllerRowStore == null) { log.debug("There is no controller table"); return null; } List<Controller> ovsdbControllers = new ArrayList<>(); ConcurrentMap<String, Row> controllerTableRows = controllerRowStore.getRowStore(); controllerTableRows.forEach((key, row) -> { if (!controllerUuids.contains(Uuid.uuid(key))) { return; } Controller controller = (Controller) TableGenerator .getTable(dbSchema, row, OvsdbTable.CONTROLLER); ovsdbControllers.add(controller); }); return ovsdbControllers; } private Uuid getBridgeUuid(DeviceId openflowDeviceId) { DatabaseSchema dbSchema = schema.get(DATABASENAME); if (dbSchema == null) { return null; } OvsdbRowStore rowStore = getRowStore(DATABASENAME, BRIDGE); if (rowStore == null) { log.debug("There is no bridge table"); return null; } ConcurrentMap<String, Row> bridgeTableRows = rowStore.getRowStore(); final AtomicReference<Uuid> uuid = new AtomicReference<>(); for (Map.Entry<String, Row> entry : bridgeTableRows.entrySet()) { Bridge bridge = (Bridge) TableGenerator.getTable( dbSchema, entry.getValue(), OvsdbTable.BRIDGE); if (matchesDpid(bridge, openflowDeviceId)) { uuid.set(Uuid.uuid(entry.getKey())); break; } } if (uuid.get() == null) { log.debug("There is no bridge for {}", openflowDeviceId); } return uuid.get(); } private static boolean matchesDpid(Bridge b, DeviceId deviceId) { String ofDpid = deviceId.toString().replace("of:", ""); Set ofDeviceIds = ((OvsdbSet) b.getDatapathIdColumn().data()).set(); //TODO Set<String> return ofDeviceIds.contains(ofDpid); } @Override public Set<OvsdbPort> getPorts() { return (Set<OvsdbPort>) getElements(this::getOvsdbPort); } @Override public Set<Interface> getInterfaces() { return (Set<Interface>) getElements(this::getInterface); } private Set<?> getElements(Function<Row, ?> method) { OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(INTERFACE); if (rowStore == null) { return null; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); return rows.keySet() .stream() .map(uuid -> getRow(DATABASENAME, INTERFACE, uuid)) .map(method) .filter(Objects::nonNull) .collect(Collectors.toSet()); } @Override public Interface getInterface(String intf) { return getInterfaces().stream() .filter(ovsdbIntf -> ovsdbIntf.getName().equals(intf)) .findAny().orElse(null); } private Interface getInterface(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null) { return null; } return intf; } @Override public DatabaseSchema getDatabaseSchema(String dbName) { return schema.get(dbName); } private OvsdbPort getOvsdbPort(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null) { return null; } long ofPort = getOfPort(intf); String portName = intf.getName(); if ((ofPort < 0) || (portName == null)) { return null; } return new OvsdbPort(new OvsdbPortNumber(ofPort), new OvsdbPortName(portName)); } private OvsdbBridge getOvsdbBridge(Row row, Uuid bridgeUuid) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, row, OvsdbTable.BRIDGE); if (bridge == null) { return null; } OvsdbSet datapathIdSet = (OvsdbSet) bridge.getDatapathIdColumn().data(); @SuppressWarnings("unchecked") Set<String> datapathIds = datapathIdSet.set(); if (datapathIds == null || datapathIds.isEmpty()) { return null; } String datapathId = (String) datapathIds.toArray()[0]; String bridgeName = bridge.getName(); if ((datapathId == null) || (bridgeName == null)) { return null; } List<Controller> controllers = getControllers(bridgeUuid); if (controllers != null) { List<ControllerInfo> controllerInfos = controllers.stream().map( controller -> new ControllerInfo( (String) controller.getTargetColumn() .data())).collect(Collectors.toList()); return OvsdbBridge.builder() .name(bridgeName) .datapathId(datapathId) .controllers(controllerInfos) .build(); } else { return OvsdbBridge.builder() .name(bridgeName) .datapathId(datapathId) .build(); } } private OvsdbQos getOvsdbQos(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Qos qos = (Qos) TableGenerator.getTable(dbSchema, row, OvsdbTable.QOS); if (qos == null) { return null; } String type = (String) qos.getTypeColumn().data(); Map<String, String> otherConfigs; Map<String, String> externalIds; Map<Long, String> queues; otherConfigs = ((OvsdbMap) qos.getOtherConfigColumn().data()).map(); externalIds = ((OvsdbMap) qos.getExternalIdsColumn().data()).map(); queues = ((OvsdbMap) qos.getQueuesColumn().data()).map(); return OvsdbQos.builder().qosType(type). queues(queues).otherConfigs(otherConfigs). externalIds(externalIds).build(); } private OvsdbQueue getOvsdbQueue(Row row) { DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Queue queue = (Queue) TableGenerator.getTable(dbSchema, row, OvsdbTable.QUEUE); if (queue == null) { return null; } OvsdbSet dscpOvsdbSet = ((OvsdbSet) queue.getDscpColumn().data()); Set dscpSet = dscpOvsdbSet.set(); Long dscp = null; if (dscpSet != null && !dscpSet.isEmpty()) { dscp = Long.valueOf(dscpSet.toArray()[0].toString()); } Map<String, String> otherConfigs; Map<String, String> externalIds; otherConfigs = ((OvsdbMap) queue.getOtherConfigColumn().data()).map(); externalIds = ((OvsdbMap) queue.getExternalIdsColumn().data()).map(); return OvsdbQueue.builder().dscp(dscp). otherConfigs(otherConfigs).externalIds(externalIds).build(); } private long getOfPort(Interface intf) { OvsdbSet ofPortSet = (OvsdbSet) intf.getOpenFlowPortColumn().data(); @SuppressWarnings("unchecked") Set<Integer> ofPorts = ofPortSet.set(); if (ofPorts == null || ofPorts.isEmpty()) { log.debug("The ofport is null in {}", intf.getName()); return -1; } // return (long) ofPorts.toArray()[0]; Iterator<Integer> it = ofPorts.iterator(); return Long.parseLong(it.next().toString()); } @Override public Set<OvsdbPort> getLocalPorts(Iterable<String> ifaceids) { Set<OvsdbPort> ovsdbPorts = new HashSet<>(); OvsdbTableStore tableStore = getTableStore(DATABASENAME); if (tableStore == null) { return null; } OvsdbRowStore rowStore = tableStore.getRows(INTERFACE); if (rowStore == null) { return null; } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); for (String uuid : rows.keySet()) { Row row = getRow(DATABASENAME, INTERFACE, uuid); DatabaseSchema dbSchema = getDatabaseSchema(DATABASENAME); Interface intf = (Interface) TableGenerator .getTable(dbSchema, row, OvsdbTable.INTERFACE); if (intf == null || getIfaceid(intf) == null) { continue; } String portName = intf.getName(); if (portName == null) { continue; } Set<String> ifaceidSet = Sets.newHashSet(ifaceids); if (portName.startsWith(TYPEVXLAN) || !ifaceidSet.contains(getIfaceid(intf))) { continue; } long ofPort = getOfPort(intf); if (ofPort < 0) { continue; } ovsdbPorts.add(new OvsdbPort(new OvsdbPortNumber(ofPort), new OvsdbPortName(portName))); } return ovsdbPorts; } private String getIfaceid(Interface intf) { OvsdbMap ovsdbMap = (OvsdbMap) intf.getExternalIdsColumn().data(); @SuppressWarnings("unchecked") Map<String, String> externalIds = ovsdbMap.map(); if (externalIds.isEmpty()) { log.warn("The external_ids is null"); return null; } String ifaceid = externalIds.get(EXTERNAL_ID_INTERFACE_ID); if (ifaceid == null) { log.warn("The ifaceid is null"); return null; } return ifaceid; } @Override public void disconnect() { channel.disconnect(); } @Override public List<OvsdbPortName> getPorts(List<String> portNames, DeviceId deviceId) { Uuid bridgeUuid = getBridgeUuid(deviceId); if (bridgeUuid == null) { log.error("Can't find the bridge for the deviceId {}", deviceId); return Collections.emptyList(); } DatabaseSchema dbSchema = schema.get(DATABASENAME); Row bridgeRow = getRow(DATABASENAME, BRIDGE, bridgeUuid.value()); Bridge bridge = (Bridge) TableGenerator.getTable(dbSchema, bridgeRow, OvsdbTable.BRIDGE); if (bridge == null) { return Collections.emptyList(); } OvsdbSet setPorts = (OvsdbSet) bridge.getPortsColumn().data(); Set<Uuid> portSet = setPorts.set(); if (portSet.isEmpty()) { return Collections.emptyList(); } Map<Uuid, Port> portMap = portSet.stream().collect(Collectors.toMap( java.util.function.Function.identity(), port -> (Port) TableGenerator .getTable(dbSchema, getRow(DATABASENAME, PORT, port.value()), OvsdbTable.PORT))); List<OvsdbPortName> portList = portMap.entrySet().stream().filter(port -> Objects.nonNull(port.getValue()) && portNames.contains(port.getValue().getName()) && Objects.nonNull(getInterfacebyPort(port.getKey().value(), port.getValue().getName()))) .map(port -> new OvsdbPortName(port.getValue().getName())).collect(Collectors.toList()); return Collections.unmodifiableList(portList); } @Override public boolean getPortError(List<OvsdbPortName> portNames, DeviceId bridgeId) { Uuid bridgeUuid = getBridgeUuid(bridgeId); List<Interface> interfaceList = portNames.stream().collect(Collectors .toMap(java.util.function.Function.identity(), port -> (Interface) getInterfacebyPort(getPortUuid(port.value(), bridgeUuid.value()), port.value()))) .entrySet().stream().filter(intf -> Objects.nonNull(intf.getValue()) && ((OvsdbSet) intf.getValue().getOpenFlowPortColumn().data()).set() .stream().findAny().orElse(OFPORT_ERROR_COMPARISON).equals(OFPORT_ERROR)) .map(Map.Entry::getValue).collect(Collectors.toList()); interfaceList.forEach(intf -> ((Consumer<Interface>) intf1 -> { try { Set<String> setErrors = ((OvsdbSet) intf1.getErrorColumn().data()).set(); log.info("Port has errors. ofport value - {}, Interface - {} has error - {} ", intf1.getOpenFlowPortColumn().data(), intf1.getName(), setErrors.stream() .findFirst().get()); } catch (ColumnSchemaNotFoundException | VersionMismatchException e) { log.debug("Port has errors. ofport value - {}, Interface - {} has error - {} ", intf1.getOpenFlowPortColumn().data(), intf1.getName(), e); } }).accept(intf)); return !interfaceList.isEmpty(); } private Interface getInterfacebyPort(String portUuid, String portName) { DatabaseSchema dbSchema = schema.get(DATABASENAME); Row portRow = getRow(DATABASENAME, PORT, portUuid); Port port = (Port) TableGenerator.getTable(dbSchema, portRow, OvsdbTable.PORT); if (port == null) { return null; } OvsdbSet setInterfaces = (OvsdbSet) port.getInterfacesColumn().data(); Set<Uuid> interfaces = setInterfaces.set(); return interfaces.stream().map(intf -> (Interface) TableGenerator .getTable(dbSchema, getRow(DATABASENAME, INTERFACE, intf.value()), OvsdbTable.INTERFACE)) .filter(intf -> Objects.nonNull(intf) && portName.equalsIgnoreCase(intf.getName())) .findFirst().orElse(null); } /** * Get first row of given table from given db. * * @param dbName db name * @param tblName table name * @return firstRow, first row of the given table from given db if present */ @Override public Optional<Object> getFirstRow(String dbName, OvsdbTable tblName) { DatabaseSchema dbSchema = getDatabaseSchema(dbName); if (Objects.isNull(dbSchema)) { return Optional.empty(); } OvsdbTableStore tableStore = ovsdbStore.getOvsdbTableStore(dbName); if (tableStore == null) { return Optional.empty(); } OvsdbRowStore rowStore = tableStore.getRows(tblName.tableName()); if (rowStore == null) { return Optional.empty(); } ConcurrentMap<String, Row> rows = rowStore.getRowStore(); if (rows == null) { log.debug("The {} Table Rows is null", tblName); return Optional.empty(); } // There should be only 1 row in this table Optional<String> uuid = rows.keySet().stream().findFirst(); if (uuid.isPresent() && rows.containsKey(uuid.get())) { return Optional.of(TableGenerator.getTable(dbSchema, rows.get(uuid.get()), tblName)); } else { return Optional.empty(); } } /** * Get memory usage of device. * * @return memoryStats, empty data as there is no generic way to fetch such stats */ @Override public Optional<DeviceMemoryStats> getDeviceMemoryUsage() { return Optional.empty(); } /** * Get cpu usage of device. * * @return cpuStats, empty data as there is no generic way to fetch such stats */ @Override public Optional<DeviceCpuStats> getDeviceCpuUsage() { return Optional.empty(); } private <I, O> ListenableFuture<O> futureTransform( ListenableFuture<I> input, Function<? super I, ? extends O> function) { // Wrapper around deprecated Futures.transform() method. As per Guava // recommendation, passing MoreExecutors.directExecutor() for identical // behavior. return Futures.transform(input, function, MoreExecutors.directExecutor()); } }
Fix: add new ovs controller first, remove ovs controller afterward Change-Id: I7ed7f9b01d512ece3781891d69aaf76ba255fd1b
protocols/ovsdb/api/src/main/java/org/onosproject/ovsdb/controller/driver/DefaultOvsdbClient.java
Fix: add new ovs controller first, remove ovs controller afterward
Java
apache-2.0
30f69eb8d543eee618c75053427253cc5419fa7f
0
gkearney/brailleblaster.old,DynamicalSystem/brailleblaster.old,gkearney/brailleblaster.old,normanbrobinson/brailleblaster.old,mazhen2009/brailleblaster.old,mazhen2009/brailleblaster.old,larsvoigt/brailleblaster.old,gkearney/brailleblaster.old,DynamicalSystem/brailleblaster.old,larsvoigt/brailleblaster.old,teambraison/brailleblaster.old,larsvoigt/brailleblaster.old,normanbrobinson/brailleblaster.old,teambraison/brailleblaster.old,DynamicalSystem/brailleblaster.old,mazhen2009/brailleblaster.old,teambraison/brailleblaster.old,normanbrobinson/brailleblaster.old
/* BrailleBlaster Braille Transcription Application * * Copyright (C) 2010, 2012 * ViewPlus Technologies, Inc. www.viewplus.com * and * Abilitiessoft, Inc. www.abilitiessoft.com * All rights reserved * * This file may contain code borrowed from files produced by various * Java development teams. These are gratefully acknoledged. * * This file is free software; you can redistribute it and/or modify it * under the terms of the Apache 2.0 License, as given at * http://www.apache.org/licenses/ * * This file is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE * See the Apache 2.0 License for more details. * * You should have received a copy of the Apache 2.0 License along with * this program; see the file LICENSE. * If not, see * http://www.apache.org/licenses/ * * Maintained by John J. Boyer [email protected] */ package org.brailleblaster.wordprocessor; import java.awt.Desktop; import org.brailleblaster.util.Notify; import org.brailleblaster.BBIni; import java.net.URI; import java.net.URISyntaxException; import java.io.IOException; /** * This class handles the items on the help menu. */ class UserHelp { private String helpPath; Desktop desktop; UserHelp (String helpName) { helpPath = BBIni.getHelpDocsPath() + BBIni.getFileSep(); desktop = Desktop.getDesktop(); if (helpName.equals ("about")) { new Notify (BBIni.getVersion() + ", released on " + BBIni.getReleaseDate() + ". For questions and bug reports contact [email protected]"); } else if (helpName.equals ("manuals")) { showHelp ("manuals.html"); } else if (helpName.equals ("helpinfo")) { showHelp ("helpinfo.html"); } else { new Notify (helpName + " is being written."); } } /** * Display help documents in the local browser. */ void showHelp (String fileName) { try { desktop.browse (new URI (helpPath + fileName)); } catch (URISyntaxException e) { } catch (IOException e) { } } }
src/main/org/brailleblaster/wordprocessor/UserHelp.java
/* BrailleBlaster Braille Transcription Application * * Copyright (C) 2010, 2012 * ViewPlus Technologies, Inc. www.viewplus.com * and * Abilitiessoft, Inc. www.abilitiessoft.com * All rights reserved * * This file may contain code borrowed from files produced by various * Java development teams. These are gratefully acknoledged. * * This file is free software; you can redistribute it and/or modify it * under the terms of the Apache 2.0 License, as given at * http://www.apache.org/licenses/ * * This file is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE * See the Apache 2.0 License for more details. * * You should have received a copy of the Apache 2.0 License along with * this program; see the file LICENSE. * If not, see * http://www.apache.org/licenses/ * * Maintained by John J. Boyer [email protected] */ package org.brailleblaster.wordprocessor; import java.awt.Desktop; import org.brailleblaster.util.Notify; import org.brailleblaster.BBIni; import java.net.URI; import java.net.URISyntaxException; import java.io.IOException; /** * This class handles the items on the help menu. */ class UserHelp { private String helpPath; Desktop desktop; UserHelp (String helpName) { helpPath = BBIni.getHelpDocsPath() + BBIni.getFileSep(); desktop = Desktop.getDesktop(); if (helpName.equals ("about")) { new Notify (BBIni.getVersion() + ", released on " + BBIni.getReleaseDate() + ". For questions and bug reports contact [email protected]"); } else if (helpName.equals ("manuals")) { showHelp ("manuals.html"); } else if (helpName.equals ("helpinfo.html")) { showHelp ("helpinfo.html"); } else { new Notify (helpName + " is being written."); } } /** * Display help documents in the local browser. */ void showHelp (String fileName) { try { desktop.browse (new URI (helpPath + fileName)); } catch (URISyntaxException e) { } catch (IOException e) { } } }
fixing a typo
src/main/org/brailleblaster/wordprocessor/UserHelp.java
fixing a typo
Java
apache-2.0
de40d91c57aa39dad421b03a518430779bccbcb7
0
astefanutti/camel-cdi
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi; import org.apache.camel.CamelContext; import org.apache.camel.ProducerTemplate; import org.apache.camel.main.MainSupport; import javax.enterprise.inject.UnsatisfiedResolutionException; import javax.enterprise.inject.Vetoed; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import java.util.HashMap; import java.util.Map; /** * Camel CDI boot integration. Allows Camel and CDI to be booted up on the command line as a JVM process. * See http://camel.apache.org/camel-boot.html. */ @Vetoed public class Main extends MainSupport { private static Main instance; private Object cdiContainer; // we don't want to use cdictrl API in OSGi public static void main(String... args) throws Exception { Main main = new Main(); instance = main; main.enableHangupSupport(); main.run(args); } /** * Returns the currently executing instance. * * @return the current running instance */ public static Main getInstance() { return instance; } @Override protected ProducerTemplate findOrCreateCamelTemplate() { BeanManager manager = ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).getBeanManager(); Bean<?> bean = manager.resolve(manager.getBeans(CamelContext.class)); if (bean == null) throw new UnsatisfiedResolutionException("No default Camel context is deployed, cannot create default ProducerTemplate!"); CamelContext context = (CamelContext) manager.getReference(bean, CamelContext.class, manager.createCreationalContext(bean)); return context.createProducerTemplate(); } @Override protected Map<String, CamelContext> getCamelContextMap() { BeanManager manager = ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).getBeanManager(); Map<String, CamelContext> answer = new HashMap<>(); for (Bean<?> bean : manager.getBeans(CamelContext.class, AnyLiteral.INSTANCE)) { CamelContext context = (CamelContext) manager.getReference(bean, CamelContext.class, manager.createCreationalContext(bean)); answer.put(context.getName(), context); } return answer; } @Override protected void doStart() throws Exception { // TODO: Use standard CDI Java SE support when CDI 2.0 becomes a prerequisite org.apache.deltaspike.cdise.api.CdiContainer container = org.apache.deltaspike.cdise.api.CdiContainerLoader.getCdiContainer(); container.boot(); container.getContextControl().startContexts(); cdiContainer = container; super.doStart(); postProcessContext(); for (CamelContext context : getCamelContexts()) context.start(); } @Override protected void doStop() throws Exception { // FIXME: since version 2.3.0.Final and WELD-1915, Weld always register a shutdown hook that conflicts with Camel main support. See WELD-2051. for (CamelContext context : getCamelContexts()) context.stop(); super.doStop(); if (cdiContainer != null) ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).shutdown(); } }
impl/src/main/java/org/apache/camel/cdi/Main.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi; import org.apache.camel.CamelContext; import org.apache.camel.ProducerTemplate; import org.apache.camel.main.MainSupport; import javax.enterprise.inject.Vetoed; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import java.util.HashMap; import java.util.Map; /** * Camel CDI boot integration. Allows Camel and CDI to be booted up on the command line as a JVM process. * See http://camel.apache.org/camel-boot.html. */ @Vetoed public class Main extends MainSupport { private static Main instance; private Object cdiContainer; // we don't want to use cdictrl API in OSGi public static void main(String... args) throws Exception { Main main = new Main(); instance = main; main.enableHangupSupport(); main.run(args); } /** * Returns the currently executing instance. * * @return the current running instance */ public static Main getInstance() { return instance; } @Override protected ProducerTemplate findOrCreateCamelTemplate() { BeanManager manager = ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).getBeanManager(); Bean<?> bean = manager.resolve(manager.getBeans(CamelContext.class)); if (bean == null) throw new IllegalStateException("No default Camel context is deployed so cannot create a ProducerTemplate!"); CamelContext context = (CamelContext) manager.getReference(bean, CamelContext.class, manager.createCreationalContext(bean)); return context.createProducerTemplate(); } @Override protected Map<String, CamelContext> getCamelContextMap() { BeanManager manager = ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).getBeanManager(); Map<String, CamelContext> answer = new HashMap<>(); for (Bean<?> bean : manager.getBeans(CamelContext.class, AnyLiteral.INSTANCE)) { CamelContext context = (CamelContext) manager.getReference(bean, CamelContext.class, manager.createCreationalContext(bean)); answer.put(context.getName(), context); } return answer; } @Override protected void doStart() throws Exception { // TODO: Use standard CDI Java SE support when CDI 2.0 becomes a prerequisite org.apache.deltaspike.cdise.api.CdiContainer container = org.apache.deltaspike.cdise.api.CdiContainerLoader.getCdiContainer(); container.boot(); container.getContextControl().startContexts(); cdiContainer = container; super.doStart(); postProcessContext(); for (CamelContext context : getCamelContexts()) context.start(); } @Override protected void doStop() throws Exception { // FIXME: since version 2.3.0.Final and WELD-1915, Weld always register a shutdown hook that conflicts with Camel main support. See WELD-2051. for (CamelContext context : getCamelContexts()) context.stop(); super.doStop(); if (cdiContainer != null) ((org.apache.deltaspike.cdise.api.CdiContainer) cdiContainer).shutdown(); } }
Use standard CDI exception
impl/src/main/java/org/apache/camel/cdi/Main.java
Use standard CDI exception
Java
apache-2.0
b63b7ab10c0ef4648da5c87d6851a5072293babd
0
FlowCI/flow-platform,FlowCI/flow-platform
/* * Copyright 2017 flow.ci * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.flow.platform.cc.test.consumer; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.client.WireMock.verify; import com.flow.platform.cc.service.AgentService; import com.flow.platform.cc.service.CmdService; import com.flow.platform.cc.service.ZoneService; import com.flow.platform.cc.test.TestBase; import com.flow.platform.domain.Agent; import com.flow.platform.domain.AgentPath; import com.flow.platform.domain.AgentStatus; import com.flow.platform.domain.Cmd; import com.flow.platform.domain.CmdInfo; import com.flow.platform.domain.CmdStatus; import com.flow.platform.domain.CmdType; import com.flow.platform.domain.Zone; import com.github.tomakehurst.wiremock.client.CountMatchingStrategy; import com.github.tomakehurst.wiremock.junit.WireMockRule; import org.junit.Assert; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.runners.MethodSorters; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.CannotAcquireLockException; /** * @author [email protected] */ @FixMethodOrder(value = MethodSorters.JVM) public class CmdQueueConsumerTest extends TestBase { private final static String ZONE = "ut-test-zone-for-queue"; @Rule public WireMockRule wireMockRule = new WireMockRule(8088); @Autowired private AgentService agentService; @Autowired private ZoneService zoneService; @Autowired private CmdService cmdService; @Before public void before() throws Throwable { cleanZookeeperChilderenNode(zkHelper.buildZkPath(ZONE, null).path()); zoneService.createZone(new Zone(ZONE, "mock-cloud-provider")); } @Test public void should_retry_cmd_in_queue() throws Throwable { // given: String url = "/node/test-for-retry/callback"; CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + url); stubFor(post(urlEqualTo(url)).willReturn(aResponse().withStatus(200))); // when: send to queue and waiting for retry 3 times Cmd cmd = cmdService.queue(mockCmd, 1, 3); Assert.assertNotNull(cmdService.find(cmd.getId())); Thread.sleep(6000); // then: check num of request verify(3, postRequestedFor(urlEqualTo(url))); } @Test public void should_receive_cmd_from_queue() throws Throwable { // given: String agentName = "agent-for-queue-test"; AgentPath agentPath = createMockAgent(ZONE, agentName); Thread.sleep(2000); Agent agent = agentService.find(agentPath); Assert.assertNotNull(agent); Assert.assertEquals(AgentStatus.IDLE, agent.getStatus()); // mock callback url stubFor(post(urlEqualTo("/node/callback")).willReturn(aResponse().withStatus(200))); // when: send cmd by rabbit mq with cmd exchange name CmdInfo mockCmd = new CmdInfo(ZONE, agentName, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088/node/callback"); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 0); Assert.assertNotNull(mockCmdInstance.getId()); Thread.sleep(1000); // then: webhook been invoked verify(1, postRequestedFor(urlEqualTo("/node/callback"))); // then: cmd should received in zookeeper agent node byte[] raw = zkClient.getData(zkHelper.getZkPath(agentPath), false, null); Cmd received = Cmd.parse(raw, Cmd.class); Assert.assertNotNull(received); Assert.assertNotNull(received.getId()); Assert.assertEquals(mockCmd.getAgentPath(), received.getAgentPath()); } @Test public void should_re_enqueue_if_no_agent() throws Throwable { // given: String testUrl = "/node/path-of-node/callback"; stubFor(post(urlEqualTo(testUrl)).willReturn(aResponse().withStatus(200))); // when: send cmd without available agent CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + testUrl); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 5); Assert.assertNotNull(mockCmdInstance.getId()); // wait for send webhook Thread.sleep(500); // then: should invoke cmd webhook for status REJECT CountMatchingStrategy countStrategy = new CountMatchingStrategy(CountMatchingStrategy.GREATER_THAN_OR_EQUAL, 1); verify(countStrategy, postRequestedFor(urlEqualTo(testUrl))); // when: createMockAgent(ZONE, "agent-for-retry-queue-test"); Thread.sleep(5000); // wait for enqueue again // then: countStrategy = new CountMatchingStrategy(CountMatchingStrategy.GREATER_THAN_OR_EQUAL, 2); verify(countStrategy, postRequestedFor(urlEqualTo(testUrl))); } @Test public void should_stop_queued_cmd() throws Throwable { // given: String testUrl = "/node/path-of-node-for-stop/callback"; stubFor(post(urlEqualTo(testUrl)).willReturn(aResponse().withStatus(200))); // when: send cmd without available agent CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + testUrl); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 5); Assert.assertNotNull(mockCmdInstance.getId()); Assert.assertNotNull(cmdDao.get(mockCmdInstance.getId())); // wait for send webhook Thread.sleep(1000); // then: verify has webhook callback if no available agent found verify(1, postRequestedFor(urlEqualTo(testUrl))); // when: set cmd to stop status try { cmdService.updateStatus(mockCmdInstance.getId(), CmdStatus.STOPPED, null, false, true); // wait for send webhook Thread.sleep(1000); // then: CountMatchingStrategy countStrategy = new CountMatchingStrategy(CountMatchingStrategy.GREATER_THAN_OR_EQUAL, 2); verify(countStrategy, postRequestedFor(urlEqualTo(testUrl))); } catch (CannotAcquireLockException acquireLockException) { // may raise the exception when this cmd is processing, in api level should return stop cmd failure } } }
platform-control-center/src/test/java/com/flow/platform/cc/test/consumer/CmdQueueConsumerTest.java
/* * Copyright 2017 flow.ci * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.flow.platform.cc.test.consumer; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.client.WireMock.verify; import com.flow.platform.cc.service.AgentService; import com.flow.platform.cc.service.CmdService; import com.flow.platform.cc.service.ZoneService; import com.flow.platform.cc.test.TestBase; import com.flow.platform.domain.Agent; import com.flow.platform.domain.AgentPath; import com.flow.platform.domain.AgentStatus; import com.flow.platform.domain.Cmd; import com.flow.platform.domain.CmdInfo; import com.flow.platform.domain.CmdStatus; import com.flow.platform.domain.CmdType; import com.flow.platform.domain.Zone; import com.github.tomakehurst.wiremock.client.CountMatchingStrategy; import com.github.tomakehurst.wiremock.junit.WireMockRule; import org.junit.Assert; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.runners.MethodSorters; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.CannotAcquireLockException; /** * @author [email protected] */ @FixMethodOrder(value = MethodSorters.JVM) public class CmdQueueConsumerTest extends TestBase { private final static String ZONE = "ut-test-zone-for-queue"; @Rule public WireMockRule wireMockRule = new WireMockRule(8088); @Autowired private AgentService agentService; @Autowired private ZoneService zoneService; @Autowired private CmdService cmdService; @Before public void before() throws Throwable { cleanZookeeperChilderenNode(zkHelper.buildZkPath(ZONE, null).path()); zoneService.createZone(new Zone(ZONE, "mock-cloud-provider")); } @Test public void should_retry_cmd_in_queue() throws Throwable { // given: String url = "/node/test-for-retry/callback"; CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + url); stubFor(post(urlEqualTo(url)).willReturn(aResponse().withStatus(200))); // when: send to queue and waiting for retry 3 times Cmd cmd = cmdService.queue(mockCmd, 1, 3); Assert.assertNotNull(cmdService.find(cmd.getId())); Thread.sleep(6000); // then: check num of request verify(3, postRequestedFor(urlEqualTo(url))); } @Test public void should_receive_cmd_from_queue() throws Throwable { // given: String agentName = "agent-for-queue-test"; AgentPath agentPath = createMockAgent(ZONE, agentName); Thread.sleep(2000); Agent agent = agentService.find(agentPath); Assert.assertNotNull(agent); Assert.assertEquals(AgentStatus.IDLE, agent.getStatus()); // mock callback url stubFor(post(urlEqualTo("/node/callback")).willReturn(aResponse().withStatus(200))); // when: send cmd by rabbit mq with cmd exchange name CmdInfo mockCmd = new CmdInfo(ZONE, agentName, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088/node/callback"); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 0); Assert.assertNotNull(mockCmdInstance.getId()); Thread.sleep(1000); // then: webhook been invoked verify(1, postRequestedFor(urlEqualTo("/node/callback"))); // then: cmd should received in zookeeper agent node byte[] raw = zkClient.getData(zkHelper.getZkPath(agentPath), false, null); Cmd received = Cmd.parse(raw, Cmd.class); Assert.assertNotNull(received); Assert.assertNotNull(received.getId()); Assert.assertEquals(mockCmd.getAgentPath(), received.getAgentPath()); } @Test public void should_re_enqueue_if_no_agent() throws Throwable { // given: String testUrl = "/node/path-of-node/callback"; stubFor(post(urlEqualTo(testUrl)).willReturn(aResponse().withStatus(200))); // when: send cmd without available agent CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + testUrl); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 5); Assert.assertNotNull(mockCmdInstance.getId()); // wait for send webhook Thread.sleep(1000); // then: should invoke cmd webhook for status REJECT verify(1, postRequestedFor(urlEqualTo(testUrl))); // when: createMockAgent(ZONE, "agent-for-retry-queue-test"); Thread.sleep(5000); // wait for enqueue again // then: CountMatchingStrategy countStrategy = new CountMatchingStrategy(CountMatchingStrategy.GREATER_THAN_OR_EQUAL, 2); verify(countStrategy, postRequestedFor(urlEqualTo(testUrl))); } @Test public void should_stop_queued_cmd() throws Throwable { // given: String testUrl = "/node/path-of-node-for-stop/callback"; stubFor(post(urlEqualTo(testUrl)).willReturn(aResponse().withStatus(200))); // when: send cmd without available agent CmdInfo mockCmd = new CmdInfo(ZONE, null, CmdType.RUN_SHELL, "echo hello"); mockCmd.setWebhook("http://localhost:8088" + testUrl); Cmd mockCmdInstance = cmdService.queue(mockCmd, 1, 5); Assert.assertNotNull(mockCmdInstance.getId()); Assert.assertNotNull(cmdDao.get(mockCmdInstance.getId())); // wait for send webhook Thread.sleep(1000); // then: verify has webhook callback if no available agent found verify(1, postRequestedFor(urlEqualTo(testUrl))); // when: set cmd to stop status try { cmdService.updateStatus(mockCmdInstance.getId(), CmdStatus.STOPPED, null, false, true); // wait for send webhook Thread.sleep(1000); // then: CountMatchingStrategy countStrategy = new CountMatchingStrategy(CountMatchingStrategy.GREATER_THAN_OR_EQUAL, 2); verify(countStrategy, postRequestedFor(urlEqualTo(testUrl))); } catch (CannotAcquireLockException acquireLockException) { // may raise the exception when this cmd is processing, in api level should return stop cmd failure } } }
fix unit test logic for queue
platform-control-center/src/test/java/com/flow/platform/cc/test/consumer/CmdQueueConsumerTest.java
fix unit test logic for queue
Java
apache-2.0
5621a3de11a0e65b0de1fe1a800a39cf2f433356
0
confluentinc/camus,confluentinc/camus
package com.linkedin.camus.etl.kafka.mapred; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import kafka.message.Message; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.mapred.AvroWrapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.joda.time.DateTime; import com.linkedin.camus.coders.CamusWrapper; import com.linkedin.camus.coders.MessageDecoder; import com.linkedin.camus.etl.kafka.CamusJob; import com.linkedin.camus.etl.kafka.coders.KafkaAvroMessageDecoder; import com.linkedin.camus.etl.kafka.coders.MessageDecoderFactory; import com.linkedin.camus.etl.kafka.common.EtlKey; import com.linkedin.camus.etl.kafka.common.EtlRequest; import com.linkedin.camus.etl.kafka.common.ExceptionWritable; import com.linkedin.camus.etl.kafka.common.KafkaReader; public class EtlRecordReader extends RecordReader<EtlKey, AvroWrapper<Object>> { private static final String PRINT_MAX_DECODER_EXCEPTIONS = "max.decoder.exceptions.to.print"; private TaskAttemptContext context; private Mapper<EtlKey, Writable, EtlKey, Writable>.Context mapperContext; private KafkaReader reader; private long totalBytes; private long readBytes = 0; private boolean skipSchemaErrors = false; private MessageDecoder<byte[], Record> decoder; private final BytesWritable msgValue = new BytesWritable(); private final BytesWritable msgKey = new BytesWritable(); private final EtlKey key = new EtlKey(); private AvroWrapper<Object> value = new AvroWrapper<Object>(new Object()); private int maxPullHours = 0; private int exceptionCount = 0; private long maxPullTime = 0; private long beginTimeStamp = 0; private long endTimeStamp = 0; private String statusMsg = ""; EtlSplit split; /** * Record reader to fetch directly from Kafka * * @param split * @param job * @param reporter * @throws IOException * @throws InterruptedException */ public EtlRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { initialize(split, context); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { this.split = (EtlSplit) split; this.context = context; if (context instanceof Mapper.Context) { mapperContext = (Context) context; } this.skipSchemaErrors = EtlInputFormat.getEtlIgnoreSchemaErrors(context); if (EtlInputFormat.getKafkaMaxPullHrs(context) != -1) { this.maxPullHours = EtlInputFormat.getKafkaMaxPullHrs(context); } else { this.endTimeStamp = Long.MAX_VALUE; } if (EtlInputFormat.getKafkaMaxPullMinutesPerTask(context) != -1) { DateTime now = new DateTime(); this.maxPullTime = now.plusMinutes( EtlInputFormat.getKafkaMaxPullMinutesPerTask(context)).getMillis(); } else { this.maxPullTime = Long.MAX_VALUE; } if (EtlInputFormat.getKafkaMaxHistoricalDays(context) != -1) { int maxDays = EtlInputFormat.getKafkaMaxHistoricalDays(context); beginTimeStamp = (new DateTime()).minusDays(maxDays).getMillis(); } else { beginTimeStamp = 0; } this.totalBytes = this.split.getLength(); } @Override public synchronized void close() throws IOException { if (reader != null) { reader.close(); } } private CamusWrapper getWrappedRecord(String topicName, byte[] payload) throws IOException { CamusWrapper r = null; try { r = decoder.decode(payload); } catch (Exception e) { if (!skipSchemaErrors) { throw new IOException(e); } } return r; } private static byte[] getBytes(BytesWritable val) { byte[] buffer = val.getBytes(); /* * FIXME: remove the following part once the below jira is fixed * https://issues.apache.org/jira/browse/HADOOP-6298 */ long len = val.getLength(); byte[] bytes = buffer; if (len < buffer.length) { bytes = new byte[(int) len]; System.arraycopy(buffer, 0, bytes, 0, (int) len); } return bytes; } @Override public float getProgress() throws IOException { if (getPos() == 0) { return 0f; } if (getPos() >= totalBytes) { return 1f; } return (float) ((double) getPos() / totalBytes); } private long getPos() throws IOException { if (reader != null) { return readBytes + reader.getReadBytes(); } else { return readBytes; } } @Override public EtlKey getCurrentKey() throws IOException, InterruptedException { return key; } @Override public AvroWrapper<Object> getCurrentValue() throws IOException, InterruptedException { return value; } @Override public boolean nextKeyValue() throws IOException, InterruptedException { Message message = null ; // we only pull for a specified time. unfinished work will be // rescheduled in the next // run. if (System.currentTimeMillis() > maxPullTime) { System.out.println("Max pull time reached"); if (reader != null) { closeReader(); } return false; } while (true) { try { if (reader == null || reader.hasNext() == false) { EtlRequest request = split.popRequest(); if (request == null) { return false; } if (maxPullHours > 0) { endTimeStamp = 0; } key.set(request.getTopic(), request.getLeaderId(), request.getPartition(), request.getOffset(), request.getOffset(), 0); value = new AvroWrapper<Object>(new Object()); System.out.println("\n\ntopic:" + request.getTopic() + " partition:" + request.getPartition() + " beginOffset:" + request.getOffset() + " estimatedLastOffset:" + request.getLastOffset()); statusMsg += statusMsg.length() > 0 ? "; " : ""; statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":" + request.getPartition(); context.setStatus(statusMsg); if (reader != null) { closeReader(); } reader = new KafkaReader(context, request, CamusJob.getKafkaTimeoutValue(mapperContext), CamusJob.getKafkaBufferSize(mapperContext)); decoder = (MessageDecoder<byte[], Record>) MessageDecoderFactory.createMessageDecoder(context, request.getTopic()); } int count = 0; while (reader.getNext(key, msgValue , msgKey)) { count++; context.progress(); mapperContext.getCounter("total", "data-read").increment(msgValue.getLength()); mapperContext.getCounter("total", "event-count").increment(1); byte[] bytes = getBytes(msgValue); byte[] keyBytes = getBytes(msgKey); // check the checksum of message. // If message has partiion key, need to construct it with Key for checkSum to match if(keyBytes.length == 0){ message = new Message(bytes); }else{ message = new Message(bytes,keyBytes); } long checksum = key.getChecksum(); if (checksum != message.checksum()) { throw new ChecksumException("Invalid message checksum " + message.checksum() + ". Expected " + key.getChecksum(), key.getOffset()); } long tempTime = System.currentTimeMillis(); CamusWrapper wrapper; try { wrapper = getWrappedRecord(key.getTopic(), bytes); } catch (Exception e) { if(exceptionCount < getMaximumDecoderExceptionsToPrint(context)) { mapperContext.write(key, new ExceptionWritable(e)); exceptionCount++; } else if(exceptionCount == getMaximumDecoderExceptionsToPrint(context)) { exceptionCount = Integer.MAX_VALUE; //Any random value System.out.println("The same exception has occured for more than " + getMaximumDecoderExceptionsToPrint(context) + " records. All further exceptions will not be printed"); } continue; } if (wrapper == null) { mapperContext.write(key, new ExceptionWritable(new RuntimeException( "null record"))); continue; } long timeStamp = wrapper.getTimestamp(); try { key.setTime(timeStamp); key.setPartition(wrapper.getPartitionMap()); } catch (Exception e) { mapperContext.write(key, new ExceptionWritable(e)); continue; } if (timeStamp < beginTimeStamp) { mapperContext.getCounter("total", "skip-old").increment(1); } else if (endTimeStamp == 0) { DateTime time = new DateTime(timeStamp); statusMsg += " begin read at " + time.toString(); context.setStatus(statusMsg); System.out.println(key.getTopic() + " begin read at " + time.toString()); endTimeStamp = (time.plusHours(this.maxPullHours)).getMillis(); } else if (timeStamp > endTimeStamp || System.currentTimeMillis() > maxPullTime) { if(timeStamp > endTimeStamp) System.out.println("Kafka Max history hours reached"); if(System.currentTimeMillis() > maxPullTime) System.out.println("Kafka pull time limit reached"); statusMsg += " max read at " + new DateTime(timeStamp).toString(); context.setStatus(statusMsg); System.out.println(key.getTopic() + " max read at " + new DateTime(timeStamp).toString()); mapperContext.getCounter("total", "request-time(ms)").increment( reader.getFetchTime()); closeReader(); } long secondTime = System.currentTimeMillis(); value.datum(wrapper.getRecord()); long decodeTime = ((secondTime - tempTime)); mapperContext.getCounter("total", "decode-time(ms)").increment(decodeTime); if (reader != null) { mapperContext.getCounter("total", "request-time(ms)").increment( reader.getFetchTime()); } return true; } System.out.println("Records read : " + count); count = 0; reader = null; } catch (Throwable t) { Exception e = new Exception(t.getLocalizedMessage(), t); e.setStackTrace(t.getStackTrace()); mapperContext.write(key, new ExceptionWritable(e)); reader = null; continue; } } } private void closeReader() throws IOException { if (reader != null) { try { readBytes += reader.getReadBytes(); reader.close(); } catch (Exception e) { // not much to do here but skip the task } finally { reader = null; } } } public static int getMaximumDecoderExceptionsToPrint(JobContext job) { return job.getConfiguration().getInt(PRINT_MAX_DECODER_EXCEPTIONS, 10); } }
camus-etl-kafka/src/main/java/com/linkedin/camus/etl/kafka/mapred/EtlRecordReader.java
package com.linkedin.camus.etl.kafka.mapred; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import kafka.message.Message; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.mapred.AvroWrapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.joda.time.DateTime; import com.linkedin.camus.coders.CamusWrapper; import com.linkedin.camus.coders.MessageDecoder; import com.linkedin.camus.etl.kafka.CamusJob; import com.linkedin.camus.etl.kafka.coders.KafkaAvroMessageDecoder; import com.linkedin.camus.etl.kafka.coders.MessageDecoderFactory; import com.linkedin.camus.etl.kafka.common.EtlKey; import com.linkedin.camus.etl.kafka.common.EtlRequest; import com.linkedin.camus.etl.kafka.common.ExceptionWritable; import com.linkedin.camus.etl.kafka.common.KafkaReader; public class EtlRecordReader extends RecordReader<EtlKey, AvroWrapper<Object>> { private static final String PRINT_MAX_DECODER_EXCEPTIONS = "max.decoder.exceptions.to.print"; private TaskAttemptContext context; private Mapper<EtlKey, Writable, EtlKey, Writable>.Context mapperContext; private KafkaReader reader; private long totalBytes; private long readBytes = 0; private boolean skipSchemaErrors = false; private MessageDecoder<byte[], Record> decoder; private final BytesWritable msgValue = new BytesWritable(); private final EtlKey key = new EtlKey(); private AvroWrapper<Object> value = new AvroWrapper<Object>(new Object()); private int maxPullHours = 0; private int exceptionCount = 0; private long maxPullTime = 0; private long beginTimeStamp = 0; private long endTimeStamp = 0; private String statusMsg = ""; EtlSplit split; /** * Record reader to fetch directly from Kafka * * @param split * @param job * @param reporter * @throws IOException * @throws InterruptedException */ public EtlRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { initialize(split, context); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { this.split = (EtlSplit) split; this.context = context; if (context instanceof Mapper.Context) { mapperContext = (Context) context; } this.skipSchemaErrors = EtlInputFormat.getEtlIgnoreSchemaErrors(context); if (EtlInputFormat.getKafkaMaxPullHrs(context) != -1) { this.maxPullHours = EtlInputFormat.getKafkaMaxPullHrs(context); } else { this.endTimeStamp = Long.MAX_VALUE; } if (EtlInputFormat.getKafkaMaxPullMinutesPerTask(context) != -1) { DateTime now = new DateTime(); this.maxPullTime = now.plusMinutes( EtlInputFormat.getKafkaMaxPullMinutesPerTask(context)).getMillis(); } else { this.maxPullTime = Long.MAX_VALUE; } if (EtlInputFormat.getKafkaMaxHistoricalDays(context) != -1) { int maxDays = EtlInputFormat.getKafkaMaxHistoricalDays(context); beginTimeStamp = (new DateTime()).minusDays(maxDays).getMillis(); } else { beginTimeStamp = 0; } this.totalBytes = this.split.getLength(); } @Override public synchronized void close() throws IOException { if (reader != null) { reader.close(); } } private CamusWrapper getWrappedRecord(String topicName, byte[] payload) throws IOException { CamusWrapper r = null; try { r = decoder.decode(payload); } catch (Exception e) { if (!skipSchemaErrors) { throw new IOException(e); } } return r; } private static byte[] getBytes(BytesWritable val) { byte[] buffer = val.getBytes(); /* * FIXME: remove the following part once the below jira is fixed * https://issues.apache.org/jira/browse/HADOOP-6298 */ long len = val.getLength(); byte[] bytes = buffer; if (len < buffer.length) { bytes = new byte[(int) len]; System.arraycopy(buffer, 0, bytes, 0, (int) len); } return bytes; } @Override public float getProgress() throws IOException { if (getPos() == 0) { return 0f; } if (getPos() >= totalBytes) { return 1f; } return (float) ((double) getPos() / totalBytes); } private long getPos() throws IOException { if (reader != null) { return readBytes + reader.getReadBytes(); } else { return readBytes; } } @Override public EtlKey getCurrentKey() throws IOException, InterruptedException { return key; } @Override public AvroWrapper<Object> getCurrentValue() throws IOException, InterruptedException { return value; } @Override public boolean nextKeyValue() throws IOException, InterruptedException { // we only pull for a specified time. unfinished work will be // rescheduled in the next // run. if (System.currentTimeMillis() > maxPullTime) { System.out.println("Max pull time reached"); if (reader != null) { closeReader(); } return false; } while (true) { try { if (reader == null || reader.hasNext() == false) { EtlRequest request = split.popRequest(); if (request == null) { return false; } if (maxPullHours > 0) { endTimeStamp = 0; } key.set(request.getTopic(), request.getLeaderId(), request.getPartition(), request.getOffset(), request.getOffset(), 0); value = new AvroWrapper<Object>(new Object()); System.out.println("\n\ntopic:" + request.getTopic() + " partition:" + request.getPartition() + " beginOffset:" + request.getOffset() + " estimatedLastOffset:" + request.getLastOffset()); statusMsg += statusMsg.length() > 0 ? "; " : ""; statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":" + request.getPartition(); context.setStatus(statusMsg); if (reader != null) { closeReader(); } reader = new KafkaReader(context, request, CamusJob.getKafkaTimeoutValue(mapperContext), CamusJob.getKafkaBufferSize(mapperContext)); decoder = (MessageDecoder<byte[], Record>) MessageDecoderFactory.createMessageDecoder(context, request.getTopic()); } int count = 0; while (reader.getNext(key, msgValue)) { count++; context.progress(); mapperContext.getCounter("total", "data-read").increment(msgValue.getLength()); mapperContext.getCounter("total", "event-count").increment(1); byte[] bytes = getBytes(msgValue); // check the checksum of message Message message = new Message(bytes); long checksum = key.getChecksum(); if (checksum != message.checksum()) { throw new ChecksumException("Invalid message checksum " + message.checksum() + ". Expected " + key.getChecksum(), key.getOffset()); } long tempTime = System.currentTimeMillis(); CamusWrapper wrapper; try { wrapper = getWrappedRecord(key.getTopic(), bytes); } catch (Exception e) { if(exceptionCount < getMaximumDecoderExceptionsToPrint(context)) { mapperContext.write(key, new ExceptionWritable(e)); exceptionCount++; } else if(exceptionCount == getMaximumDecoderExceptionsToPrint(context)) { exceptionCount = Integer.MAX_VALUE; //Any random value System.out.println("The same exception has occured for more than " + getMaximumDecoderExceptionsToPrint(context) + " records. All further exceptions will not be printed"); } continue; } if (wrapper == null) { mapperContext.write(key, new ExceptionWritable(new RuntimeException( "null record"))); continue; } long timeStamp = wrapper.getTimestamp(); try { key.setTime(timeStamp); key.setPartition(wrapper.getPartitionMap()); } catch (Exception e) { mapperContext.write(key, new ExceptionWritable(e)); continue; } if (timeStamp < beginTimeStamp) { mapperContext.getCounter("total", "skip-old").increment(1); } else if (endTimeStamp == 0) { DateTime time = new DateTime(timeStamp); statusMsg += " begin read at " + time.toString(); context.setStatus(statusMsg); System.out.println(key.getTopic() + " begin read at " + time.toString()); endTimeStamp = (time.plusHours(this.maxPullHours)).getMillis(); } else if (timeStamp > endTimeStamp || System.currentTimeMillis() > maxPullTime) { if(timeStamp > endTimeStamp) System.out.println("Kafka Max history hours reached"); if(System.currentTimeMillis() > maxPullTime) System.out.println("Kafka pull time limit reached"); statusMsg += " max read at " + new DateTime(timeStamp).toString(); context.setStatus(statusMsg); System.out.println(key.getTopic() + " max read at " + new DateTime(timeStamp).toString()); mapperContext.getCounter("total", "request-time(ms)").increment( reader.getFetchTime()); closeReader(); } long secondTime = System.currentTimeMillis(); value.datum(wrapper.getRecord()); long decodeTime = ((secondTime - tempTime)); mapperContext.getCounter("total", "decode-time(ms)").increment(decodeTime); if (reader != null) { mapperContext.getCounter("total", "request-time(ms)").increment( reader.getFetchTime()); } return true; } System.out.println("Records read : " + count); count = 0; reader = null; } catch (Throwable t) { Exception e = new Exception(t.getLocalizedMessage(), t); e.setStackTrace(t.getStackTrace()); mapperContext.write(key, new ExceptionWritable(e)); reader = null; continue; } } } private void closeReader() throws IOException { if (reader != null) { try { readBytes += reader.getReadBytes(); reader.close(); } catch (Exception e) { // not much to do here but skip the task } finally { reader = null; } } } public static int getMaximumDecoderExceptionsToPrint(JobContext job) { return job.getConfiguration().getInt(PRINT_MAX_DECODER_EXCEPTIONS, 10); } }
Update EtlRecordReader.java Camus has a issues and failing while comparing the Checksum of the message once partition ID is used to persists the message. The issues is with the way Camus read the messages from Kafka using EtlRecordReader.java and KafkaReader.java class. If the message is persisted with partition ID, Kafka basically persists the partition ID along with the message. But when Camus compute the checksum, it only takes the payload, not the complete message (i.e. message with Partition Key). Thus the checksum is failing.
camus-etl-kafka/src/main/java/com/linkedin/camus/etl/kafka/mapred/EtlRecordReader.java
Update EtlRecordReader.java
Java
apache-2.0
24ed852bb6e0d2e5887f9ada083bd943fdcffa91
0
metaborg/jsglr,metaborg/jsglr,metaborg/jsglr,metaborg/jsglr
package org.spoofax.jsglr2.incremental; import static com.google.common.collect.Iterables.size; import static org.metaborg.util.iterators.Iterables2.stream; import static org.spoofax.jsglr2.parser.observing.IParserObserver.BreakdownReason.*; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.metaborg.parsetable.IParseTable; import org.metaborg.parsetable.actions.*; import org.spoofax.jsglr2.JSGLR2Request; import org.spoofax.jsglr2.incremental.actions.GotoShift; import org.spoofax.jsglr2.incremental.diff.IStringDiff; import org.spoofax.jsglr2.incremental.diff.JGitHistogramDiff; import org.spoofax.jsglr2.incremental.diff.ProcessUpdates; import org.spoofax.jsglr2.incremental.parseforest.IncrementalDerivation; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseForest; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseForestManager; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseNode; import org.spoofax.jsglr2.inputstack.incremental.IIncrementalInputStack; import org.spoofax.jsglr2.inputstack.incremental.IncrementalInputStackFactory; import org.spoofax.jsglr2.parseforest.Disambiguator; import org.spoofax.jsglr2.parseforest.IParseNode; import org.spoofax.jsglr2.parseforest.ParseForestManagerFactory; import org.spoofax.jsglr2.parser.*; import org.spoofax.jsglr2.parser.failure.ParseFailureHandlerFactory; import org.spoofax.jsglr2.reducing.ReduceManagerFactory; import org.spoofax.jsglr2.stack.AbstractStackManager; import org.spoofax.jsglr2.stack.IStackNode; import org.spoofax.jsglr2.stack.StackManagerFactory; public class IncrementalParser // @formatter:off <StackNode extends IStackNode, ParseState extends AbstractParseState<IIncrementalInputStack, StackNode> & IIncrementalParseState, StackManager extends AbstractStackManager<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState>, ReduceManager extends org.spoofax.jsglr2.reducing.ReduceManager<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState>> // @formatter:on extends Parser<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState, StackManager, ReduceManager> { private final IncrementalInputStackFactory<IIncrementalInputStack> incrementalInputStackFactory; private final IStringDiff diff; private final ProcessUpdates<StackNode, ParseState> processUpdates; public IncrementalParser(IncrementalInputStackFactory<IIncrementalInputStack> incrementalInputStackFactory, ParseStateFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, IIncrementalInputStack, StackNode, ParseState> parseStateFactory, IParseTable parseTable, StackManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState, StackManager> stackManagerFactory, ParseForestManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> parseForestManagerFactory, Disambiguator<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> disambiguator, ReduceManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState, StackManager, ReduceManager> reduceManagerFactory, ParseFailureHandlerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> failureHandlerFactory, ParseReporterFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState> reporterFactory) { super(null, parseStateFactory, parseTable, stackManagerFactory, parseForestManagerFactory, disambiguator, reduceManagerFactory, failureHandlerFactory, reporterFactory); this.incrementalInputStackFactory = incrementalInputStackFactory; // TODO parametrize parser on diff algorithm for benchmarking this.diff = new JGitHistogramDiff(); this.processUpdates = new ProcessUpdates<>((IncrementalParseForestManager<StackNode, ParseState>) parseForestManager); } @Override protected ParseState getParseState(JSGLR2Request request, String previousInput, IncrementalParseForest previousResult) { IncrementalParseForest updatedTree = previousInput != null && previousResult != null ? processUpdates.processUpdates(previousInput, previousResult, diff.diff(previousInput, request.input)) : processUpdates.getParseNodeFromString(request.input); return parseStateFactory.get(request, incrementalInputStackFactory.get(updatedTree, request.input), observing); } @Override protected void parseLoop(ParseState parseState) throws ParseException { if(!attemptToFullyReuse(parseState)) super.parseLoop(parseState); } // Optimization: if the first node on the lookahead stack has no changes and can be completely reused, do so private boolean attemptToFullyReuse(ParseState parseState) { // We cannot do this optimization if... // ...the parse is not at the start anymore (parseLoop may be called multiple times due to recovery) if(parseState.inputStack.offset() != 0 || parseState.activeStacks.getSingle().state().id() != parseTable.getStartState().id()) return false; IncrementalParseNode rootNode = (IncrementalParseNode) parseState.inputStack.getNode(); // ...the root node is a temporary node if(rootNode.production() == null) return false; // ...the root node does not span the entire input if(rootNode.width() != parseState.inputStack.length()) return false; StackNode stack = parseState.activeStacks.getSingle(); // Shift the entire tree addForShifter(parseState, stack, parseTable.getState(stack.state().getGotoId(rootNode.production().id()))); shifter(parseState); parseState.inputStack.next(); // Accept actor(parseState.activeStacks.getSingle(), parseState, Accept.SINGLETON); return true; } @Override protected void actor(StackNode stack, ParseState parseState) { IncrementalParseForest originalLookahead = parseState.inputStack.getNode(); Iterable<IAction> actions = breakDownUntilValidActions(stack, parseState); // If we already had something to shift and the lookahead has been broken down, // update the goto states in forShifter based on the new lookahead. // If we wouldn't do this, it would cause different shifts to be desynchronised. if(!parseState.forShifter.isEmpty() && parseState.inputStack.getNode() != originalLookahead) updateForShifterStates(parseState); if(size(actions) > 1) parseState.setMultipleStates(true); observing.notify(observer -> observer.actor(stack, parseState, actions)); for(IAction action : actions) actor(stack, parseState, action); } private Iterable<IAction> breakDownUntilValidActions(StackNode stack, ParseState parseState) { // Get actions based on the lookahead terminal from `inputStack.actionQueryCharacter` Iterable<IAction> originalActions = stack.state().getApplicableActions(parseState.inputStack, parseState.mode); IncrementalParseForest lookahead = parseState.inputStack.getNode(); if(lookahead.isTerminal()) { return originalActions; } boolean hasShiftActions = stream(originalActions).anyMatch(a -> a.actionType() == ActionType.SHIFT); do { IncrementalParseNode lookaheadNode = (IncrementalParseNode) lookahead; // Only allow shifting the subtree if the saved state matches the current state if(lookaheadNode.isReusable(stack.state())) { // Remove shift actions from the original actions list List<IAction> filteredActions = stream(originalActions) .filter(a -> a.actionType() == ActionType.REDUCE || a.actionType() == ActionType.REDUCE_LOOKAHEAD) .collect(Collectors.toList()); // Optimization: if the (only) reduce action already appears in the to-be-reused lookahead, // the reduce action can be removed. // This is to avoid multipleStates = true, // and should only happen in case multipleStates == false to avoid messing up other parse branches. if(parseState.newParseNodesAreReusable() && filteredActions.size() == 1 && nullReduceMatchesLookahead(stack, (IReduce) filteredActions.get(0), lookaheadNode)) { filteredActions.clear(); } // Reusable nodes have only one derivation, by definition, so the production of the node is correct filteredActions.add(new GotoShift(stack.state().getGotoId(lookaheadNode.production().id()))); return filteredActions; } // Break down the lookahead in either of the following scenarios: // - the lookahead is not reusable, or // - the lookahead has applicable shift actions // If neither scenario is the case, directly return the current list of actions. if(lookaheadNode.isReusable() && !hasShiftActions) { return originalActions; } observing.notify(observer -> observer.breakDown(parseState.inputStack, lookaheadNode.production() == null ? TEMPORARY : lookaheadNode.isReusable() ? lookaheadNode.isReusable(stack.state()) ? NO_ACTIONS : WRONG_STATE : IRREUSABLE)); parseState.inputStack.breakDown(); observing.notify(observer -> observer.parseRound(parseState, parseState.activeStacks)); // If the broken-down node has no children, it has been removed from the input stack. // Therefore, any GotoShift actions that were in the forShifter list become invalid. // They can be discarded, because they will replaced by 0-arity reductions. if(!parseState.forShifter.isEmpty() && lookaheadNode.getFirstDerivation().parseForests.length == 0) parseState.forShifter.clear(); lookahead = parseState.inputStack.getNode(); if(lookahead.isTerminal()) { return originalActions; } } while(true); } private void updateForShifterStates(ParseState parseState) { List<ForShifterElement<StackNode>> oldForShifter = new ArrayList<>(parseState.forShifter); parseState.forShifter.clear(); IncrementalParseForest newLookaheadNode = parseState.inputStack.getNode(); if(newLookaheadNode instanceof IParseNode) { // If the new lookahead node is a parse node, replace the forShifter states // with new goto states based on the production of the new lookahead node. int productionId = ((IParseNode<?, ?>) newLookaheadNode).production().id(); for(ForShifterElement<StackNode> forShifterElement : oldForShifter) { StackNode forShifterStack = forShifterElement.stack; addForShifter(parseState, forShifterStack, parseTable.getState(forShifterStack.state().getGotoId(productionId))); } } else { // If the new lookahead node is a character node, replace the forShifter states // with the shift states from the parse table. Set<StackNode> seen = new HashSet<>(); for(ForShifterElement<StackNode> forShifterElement : oldForShifter) { StackNode forShifterStack = forShifterElement.stack; if(seen.contains(forShifterStack)) continue; seen.add(forShifterStack); // Note that there can be multiple shift states per stack, // due to shift/shift conflicts in the parse table. for(IAction action : forShifterStack.state().getApplicableActions(parseState.inputStack, parseState.mode)) { if(action.actionType() != ActionType.SHIFT) continue; addForShifter(parseState, forShifterStack, parseTable.getState(((IShift) action).shiftStateId())); } } } } // If there are two actions, with one reduce of arity 0 and one GotoShift that contains this subtree already, // then the reduce of arity 0 is not necessary. // This method returns whether this is the case. private boolean nullReduceMatchesLookahead(StackNode stack, IReduce reduceAction, IncrementalParseNode lookaheadNode) { if(reduceAction.arity() != 0) return false; int reduceGoto = stack.state().getGotoId(reduceAction.production().id()); while(true) { if(reduceGoto == stack.state().getGotoId(lookaheadNode.production().id())) return true; IncrementalParseForest[] children = lookaheadNode.getFirstDerivation().parseForests; if(children.length == 0) return false; IncrementalParseForest child = children[0]; if(child.isTerminal()) return false; lookaheadNode = ((IncrementalParseNode) child); } } @Override protected IncrementalParseForest getNodeToShift(ParseState parseState) { return parseState.inputStack.getNode(); } }
org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/incremental/IncrementalParser.java
package org.spoofax.jsglr2.incremental; import static com.google.common.collect.Iterables.size; import static org.metaborg.util.iterators.Iterables2.stream; import static org.spoofax.jsglr2.parser.observing.IParserObserver.BreakdownReason.*; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.metaborg.parsetable.IParseTable; import org.metaborg.parsetable.actions.*; import org.spoofax.jsglr2.JSGLR2Request; import org.spoofax.jsglr2.incremental.actions.GotoShift; import org.spoofax.jsglr2.incremental.diff.IStringDiff; import org.spoofax.jsglr2.incremental.diff.JGitHistogramDiff; import org.spoofax.jsglr2.incremental.diff.ProcessUpdates; import org.spoofax.jsglr2.incremental.parseforest.IncrementalDerivation; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseForest; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseForestManager; import org.spoofax.jsglr2.incremental.parseforest.IncrementalParseNode; import org.spoofax.jsglr2.inputstack.incremental.IIncrementalInputStack; import org.spoofax.jsglr2.inputstack.incremental.IncrementalInputStackFactory; import org.spoofax.jsglr2.parseforest.Disambiguator; import org.spoofax.jsglr2.parseforest.IParseNode; import org.spoofax.jsglr2.parseforest.ParseForestManagerFactory; import org.spoofax.jsglr2.parser.*; import org.spoofax.jsglr2.parser.failure.ParseFailureHandlerFactory; import org.spoofax.jsglr2.reducing.ReduceManagerFactory; import org.spoofax.jsglr2.stack.AbstractStackManager; import org.spoofax.jsglr2.stack.IStackNode; import org.spoofax.jsglr2.stack.StackManagerFactory; public class IncrementalParser // @formatter:off <StackNode extends IStackNode, ParseState extends AbstractParseState<IIncrementalInputStack, StackNode> & IIncrementalParseState, StackManager extends AbstractStackManager<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState>, ReduceManager extends org.spoofax.jsglr2.reducing.ReduceManager<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState>> // @formatter:on extends Parser<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState, StackManager, ReduceManager> { private final IncrementalInputStackFactory<IIncrementalInputStack> incrementalInputStackFactory; private final IStringDiff diff; private final ProcessUpdates<StackNode, ParseState> processUpdates; public IncrementalParser(IncrementalInputStackFactory<IIncrementalInputStack> incrementalInputStackFactory, ParseStateFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, IIncrementalInputStack, StackNode, ParseState> parseStateFactory, IParseTable parseTable, StackManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState, StackManager> stackManagerFactory, ParseForestManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> parseForestManagerFactory, Disambiguator<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> disambiguator, ReduceManagerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState, StackManager, ReduceManager> reduceManagerFactory, ParseFailureHandlerFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, ParseState> failureHandlerFactory, ParseReporterFactory<IncrementalParseForest, IncrementalDerivation, IncrementalParseNode, StackNode, IIncrementalInputStack, ParseState> reporterFactory) { super(null, parseStateFactory, parseTable, stackManagerFactory, parseForestManagerFactory, disambiguator, reduceManagerFactory, failureHandlerFactory, reporterFactory); this.incrementalInputStackFactory = incrementalInputStackFactory; // TODO parametrize parser on diff algorithm for benchmarking this.diff = new JGitHistogramDiff(); this.processUpdates = new ProcessUpdates<>((IncrementalParseForestManager<StackNode, ParseState>) parseForestManager); } @Override protected ParseState getParseState(JSGLR2Request request, String previousInput, IncrementalParseForest previousResult) { IncrementalParseForest updatedTree = previousInput != null && previousResult != null ? processUpdates.processUpdates(previousInput, previousResult, diff.diff(previousInput, request.input)) : processUpdates.getParseNodeFromString(request.input); return parseStateFactory.get(request, incrementalInputStackFactory.get(updatedTree, request.input), observing); } @Override protected void parseLoop(ParseState parseState) throws ParseException { if(!attemptToFullyReuse(parseState)) super.parseLoop(parseState); } // Optimization: if the first node on the lookahead stack has no changes and can be completely reused, do so private boolean attemptToFullyReuse(ParseState parseState) { // We cannot do this optimization if... // ...the parse is not at the start anymore (parseLoop may be called multiple times due to recovery) if(parseState.inputStack.offset() != 0 || parseState.activeStacks.getSingle().state().id() != parseTable.getStartState().id()) return false; IncrementalParseNode rootNode = (IncrementalParseNode) parseState.inputStack.getNode(); // ...the root node is a temporary node if(rootNode.production() == null) return false; // ...the root node does not span the entire input if(rootNode.width() != parseState.inputStack.length()) return false; StackNode stack = parseState.activeStacks.getSingle(); // Shift the entire tree addForShifter(parseState, stack, parseTable.getState(stack.state().getGotoId(rootNode.production().id()))); shifter(parseState); parseState.inputStack.next(); // Accept actor(parseState.activeStacks.getSingle(), parseState, Accept.SINGLETON); return true; } @Override protected void actor(StackNode stack, ParseState parseState) { Iterable<IAction> actions = breakDownUntilValidActions(stack, parseState); if(size(actions) > 1) parseState.setMultipleStates(true); observing.notify(observer -> observer.actor(stack, parseState, actions)); for(IAction action : actions) actor(stack, parseState, action); } private Iterable<IAction> breakDownUntilValidActions(StackNode stack, ParseState parseState) { // Get actions based on the lookahead terminal from `inputStack.actionQueryCharacter` Iterable<IAction> originalActions = stack.state().getApplicableActions(parseState.inputStack, parseState.mode); IncrementalParseForest lookahead = parseState.inputStack.getNode(); if(lookahead.isTerminal()) { return originalActions; } boolean hasShiftActions = stream(originalActions).anyMatch(a -> a.actionType() == ActionType.SHIFT); do { IncrementalParseNode lookaheadNode = (IncrementalParseNode) lookahead; // Only allow shifting the subtree if the saved state matches the current state if(lookaheadNode.isReusable(stack.state())) { // Remove shift actions from the original actions list List<IAction> filteredActions = stream(originalActions) .filter(a -> a.actionType() == ActionType.REDUCE || a.actionType() == ActionType.REDUCE_LOOKAHEAD) .collect(Collectors.toList()); // Optimization: if the (only) reduce action already appears in the to-be-reused lookahead, // the reduce action can be removed. // This is to avoid multipleStates = true, // and should only happen in case multipleStates == false to avoid messing up other parse branches. if(parseState.newParseNodesAreReusable() && filteredActions.size() == 1 && nullReduceMatchesLookahead(stack, (IReduce) filteredActions.get(0), lookaheadNode)) { filteredActions.clear(); } // Reusable nodes have only one derivation, by definition, so the production of the node is correct filteredActions.add(new GotoShift(stack.state().getGotoId(lookaheadNode.production().id()))); return filteredActions; } // Break down the lookahead in either of the following scenarios: // - the lookahead is not reusable, or // - the lookahead has applicable shift actions // If neither scenario is the case, directly return the current list of actions. if(lookaheadNode.isReusable() && !hasShiftActions) { return originalActions; } observing.notify(observer -> observer.breakDown(parseState.inputStack, lookaheadNode.production() == null ? TEMPORARY : lookaheadNode.isReusable() ? lookaheadNode.isReusable(stack.state()) ? NO_ACTIONS : WRONG_STATE : IRREUSABLE)); parseState.inputStack.breakDown(); observing.notify(observer -> observer.parseRound(parseState, parseState.activeStacks)); // If we already had something to shift, update the goto states in forShifter based on the new lookahead. // If we wouldn't do this, it would cause different shifts to be desynchronised. if(!parseState.forShifter.isEmpty()) { // If the broken-down node has no children, it has been removed from the input stack. // Therefore, any GotoShift actions that were in the forShifter list become invalid. // They can be discarded, because they will replaced by 0-arity reductions. if(lookaheadNode.getFirstDerivation().parseForests.length == 0) parseState.forShifter.clear(); else updateForShifterStates(parseState); } lookahead = parseState.inputStack.getNode(); if(lookahead.isTerminal()) { return originalActions; } } while(true); } private void updateForShifterStates(ParseState parseState) { List<ForShifterElement<StackNode>> oldForShifter = new ArrayList<>(parseState.forShifter); parseState.forShifter.clear(); IncrementalParseForest newLookaheadNode = parseState.inputStack.getNode(); if(newLookaheadNode instanceof IParseNode) { // If the new lookahead node is a parse node, replace the forShifter states // with new goto states based on the production of the new lookahead node. int productionId = ((IParseNode<?, ?>) newLookaheadNode).production().id(); for(ForShifterElement<StackNode> forShifterElement : oldForShifter) { StackNode forShifterStack = forShifterElement.stack; addForShifter(parseState, forShifterStack, parseTable.getState(forShifterStack.state().getGotoId(productionId))); } } else { // If the new lookahead node is a character node, replace the forShifter states // with the shift states from the parse table. Set<StackNode> seen = new HashSet<>(); for(ForShifterElement<StackNode> forShifterElement : oldForShifter) { StackNode forShifterStack = forShifterElement.stack; if(seen.contains(forShifterStack)) continue; seen.add(forShifterStack); // Note that there can be multiple shift states per stack, // due to shift/shift conflicts in the parse table. for(IAction action : forShifterStack.state().getApplicableActions(parseState.inputStack, parseState.mode)) { if(action.actionType() != ActionType.SHIFT) continue; addForShifter(parseState, forShifterStack, parseTable.getState(((IShift) action).shiftStateId())); } } } } // If there are two actions, with one reduce of arity 0 and one GotoShift that contains this subtree already, // then the reduce of arity 0 is not necessary. // This method returns whether this is the case. private boolean nullReduceMatchesLookahead(StackNode stack, IReduce reduceAction, IncrementalParseNode lookaheadNode) { if(reduceAction.arity() != 0) return false; int reduceGoto = stack.state().getGotoId(reduceAction.production().id()); while(true) { if(reduceGoto == stack.state().getGotoId(lookaheadNode.production().id())) return true; IncrementalParseForest[] children = lookaheadNode.getFirstDerivation().parseForests; if(children.length == 0) return false; IncrementalParseForest child = children[0]; if(child.isTerminal()) return false; lookaheadNode = ((IncrementalParseNode) child); } } @Override protected IncrementalParseForest getNodeToShift(ParseState parseState) { return parseState.inputStack.getNode(); } }
Delay updateForShifterStates until after breakdown loop
org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/incremental/IncrementalParser.java
Delay updateForShifterStates until after breakdown loop
Java
apache-2.0
fc35c7a5f97049497163f80f53de1a3f80b265a3
0
LogisticsImpactModel/LIMO,LogisticsImpactModel/LIMO,LogisticsImpactModel/LIMO
package nl.fontys.sofa.limo.externaltrader; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.List; import java.util.Map; import java.util.Set; import nl.fontys.sofa.limo.domain.BaseEntity; import nl.fontys.sofa.limo.orientdb.OrientDBConnector; import org.json.JSONObject; /** * JSON Exporter takes care of converting a list w/ BaseEntities to a JSON * object, which is then written to a file at a specified filepath * * @author Matthias Brück */ public final class JSONExporter { private JSONExporter() { } /** * * @param allEntities - map w/ all baseEntities that should be exported * @param filepath - the location where the file should be saved */ public static void exportToJson(Map<String, List<BaseEntity>> allEntities, String filepath) { JSONObject json = new JSONObject(); Set<Map.Entry<String, List<BaseEntity>>> entrySet = allEntities.entrySet(); for (Map.Entry<String, List<BaseEntity>> set : entrySet) { for (BaseEntity entity : set.getValue()) { OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<>("select from index:uuid where key = '" + entity.getUniqueIdentifier() + "'"); List<ODocument> result = OrientDBConnector.connection().query(query); if (!result.isEmpty()) { ODocument d = result.get(0).field("rid"); json.append(set.getKey(), new JSONObject(d.toJSON("class,attribSameRow"))); } } } writeJSON(json, filepath); } /** * Print/write JSON-object to specified filepath * * @param json - JSON object that is to be saved * @param filepath - the location where the file should be saved */ private static void writeJSON(JSONObject json, String filepath) { try (PrintWriter out = new PrintWriter(filepath)) { out.print(json.toString()); } catch (FileNotFoundException ex) { ex.printStackTrace(); } } }
LIMO/externalTraider/src/main/java/nl/fontys/sofa/limo/externaltrader/JSONExporter.java
package nl.fontys.sofa.limo.externaltrader; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.List; import java.util.Map; import java.util.Set; import nl.fontys.sofa.limo.domain.BaseEntity; import nl.fontys.sofa.limo.orientdb.OrientDBConnector; import org.json.JSONObject; /** * @author Matthias Brück */ public final class JSONExporter { private JSONExporter() { } public static void exportToJson(Map<String, List<BaseEntity>> allEntities, String filepath) { JSONObject json = new JSONObject(); Set<Map.Entry<String, List<BaseEntity>>> entrySet = allEntities.entrySet(); for (Map.Entry<String, List<BaseEntity>> set : entrySet) { for (BaseEntity entity : set.getValue()) { OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<>("select from index:uuid where key = '" + entity.getUniqueIdentifier() + "'"); List<ODocument> result = OrientDBConnector.connection().query(query); if (!result.isEmpty()) { ODocument d = result.get(0).field("rid"); json.append(set.getKey(), new JSONObject(d.toJSON("class,attribSameRow"))); } } } writeJSON(json, filepath); } private static void writeJSON(JSONObject json, String filepath) { try (PrintWriter out = new PrintWriter(filepath)) { out.print(json.toString()); } catch (FileNotFoundException ex) { ex.printStackTrace(); } } }
improved javadoc
LIMO/externalTraider/src/main/java/nl/fontys/sofa/limo/externaltrader/JSONExporter.java
improved javadoc
Java
apache-2.0
27f44caea1897dbb6a49702964774b7470953823
0
Malamut54/dbobrov,Malamut54/dbobrov,Malamut54/dbobrov
package ru.job4j.array; /** *Sorted an array through Bubble sort. Task 5.1. *@author Dmitriy Bobrov (mailto:[email protected]) *@since 0.1 */ public class BubbleSort { /** *Class BuubleSort sorted array. */ /** *Method sort implements Bubble sort. *@param array - input array. *@return array - sorted array. */ public int[] sort(int[] array) { for (int i = array.length - 1; i >= 0; i--) { for (int j = 0; j < i; j++) { if (array[j] > array[j + 1]) { int tmp = array[j]; array[j] = array[j + 1]; array[j + 1] = tmp; } } } return array; } }
chapter_001/src/main/java/ru/job4j/array/BubbleSort.java
package ru.job4j.array; /** *Sorted an array through Bubble sort. Task 5.1. *@author Dmitriy Bobrov (mailto:[email protected]) *@since 0.1 */ public class BubbleSort { /** *Class BuubleSort sorted array. */ /** *Method sort implements Bubble sort. *@param array - input array. *@return array - sorted array. */ public int[] sort(int[] array) { for (int i = array.length - 1; i >= 0; i--) { for (int j = 0; j < i; j++) { if (array[j] > array[j + 1]) { int t = array[j]; array[j] = array[j + 1]; array[j + 1] = t; } } } return array; } }
Task 5.1 Bubble sort
chapter_001/src/main/java/ru/job4j/array/BubbleSort.java
Task 5.1 Bubble sort
Java
apache-2.0
d8613c2ee1d7232ddc83fe6da101b06e7fca484e
0
pwoodworth/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,kool79/intellij-community,petteyg/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,blademainer/intellij-community,samthor/intellij-community,caot/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,caot/intellij-community,kool79/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,kool79/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,da1z/intellij-community,supersven/intellij-community,blademainer/intellij-community,ibinti/intellij-community,retomerz/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,slisson/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,ibinti/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,consulo/consulo,ftomassetti/intellij-community,robovm/robovm-studio,clumsy/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,fitermay/intellij-community,caot/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,semonte/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,fnouama/intellij-community,supersven/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,signed/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,asedunov/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,holmes/intellij-community,holmes/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,consulo/consulo,ftomassetti/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,holmes/intellij-community,samthor/intellij-community,fnouama/intellij-community,allotria/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,kool79/intellij-community,adedayo/intellij-community,caot/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,slisson/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,ernestp/consulo,idea4bsd/idea4bsd,MER-GROUP/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,retomerz/intellij-community,kdwink/intellij-community,xfournet/intellij-community,signed/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,supersven/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,fitermay/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,caot/intellij-community,robovm/robovm-studio,apixandru/intellij-community,da1z/intellij-community,ernestp/consulo,idea4bsd/idea4bsd,hurricup/intellij-community,petteyg/intellij-community,kool79/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,fitermay/intellij-community,diorcety/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,signed/intellij-community,petteyg/intellij-community,dslomov/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,retomerz/intellij-community,FHannes/intellij-community,semonte/intellij-community,amith01994/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,kool79/intellij-community,kool79/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,izonder/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,amith01994/intellij-community,izonder/intellij-community,FHannes/intellij-community,xfournet/intellij-community,slisson/intellij-community,youdonghai/intellij-community,izonder/intellij-community,orekyuu/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,izonder/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,FHannes/intellij-community,consulo/consulo,salguarnieri/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,holmes/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,apixandru/intellij-community,dslomov/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,supersven/intellij-community,apixandru/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,slisson/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,hurricup/intellij-community,robovm/robovm-studio,consulo/consulo,wreckJ/intellij-community,clumsy/intellij-community,da1z/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,jagguli/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,izonder/intellij-community,samthor/intellij-community,clumsy/intellij-community,ryano144/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,da1z/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,ryano144/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,adedayo/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,ernestp/consulo,diorcety/intellij-community,samthor/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,kdwink/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,signed/intellij-community,kdwink/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,slisson/intellij-community,slisson/intellij-community,allotria/intellij-community,blademainer/intellij-community,xfournet/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,amith01994/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,supersven/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,caot/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,signed/intellij-community,Distrotech/intellij-community,allotria/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,da1z/intellij-community,holmes/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,holmes/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,retomerz/intellij-community,hurricup/intellij-community,samthor/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,FHannes/intellij-community,samthor/intellij-community,amith01994/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,ahb0327/intellij-community,slisson/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,izonder/intellij-community,adedayo/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,caot/intellij-community,ibinti/intellij-community,slisson/intellij-community,vvv1559/intellij-community,semonte/intellij-community,signed/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,xfournet/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,retomerz/intellij-community,fitermay/intellij-community,allotria/intellij-community,semonte/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,holmes/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,petteyg/intellij-community,ryano144/intellij-community,ernestp/consulo,asedunov/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,fitermay/intellij-community,signed/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,ryano144/intellij-community,fnouama/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,caot/intellij-community,asedunov/intellij-community,hurricup/intellij-community,fitermay/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,samthor/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,da1z/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,caot/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,signed/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,supersven/intellij-community,ernestp/consulo,allotria/intellij-community,signed/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,signed/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,jagguli/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,fnouama/intellij-community,kdwink/intellij-community,adedayo/intellij-community,signed/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,samthor/intellij-community,allotria/intellij-community,apixandru/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,amith01994/intellij-community,ernestp/consulo,caot/intellij-community,da1z/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,vladmm/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,semonte/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,hurricup/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,slisson/intellij-community,holmes/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,semonte/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,supersven/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community
import java.util.ArrayList; import java.util.List; class Test { List<String> queue = new ArrayList<>(); ArrayList l = new ArrayList<>(8); } class HMCopy<K, V> { private Entry[] table; class Entry<K, V> { Entry(int h, K k, V v, Entry<K, V> n) { } } void addEntry(int hash, K key, V value, int bucketIndex) { Entry<K, V> e = table[bucketIndex]; table[bucketIndex] = new Entry<>(hash, key, value, e); } } class DD { P1<P<String>> l = new L<String>() { @Override void f() { } }; P1<P<String>> l1 = new L<>(); P1<P<String>> foo() { return new L<>(); } String s = ""; } class L<K> extends P1<P<K>> { void f() { } } class P1<P1T> extends P<P1T> { } class P<PT> { } class Test1 { void bar() { foo<error descr="'foo(F<F<java.lang.String>>)' in 'Test1' cannot be applied to '(FF<java.lang.Object>)'">(new FF<>())</error>; } void foo(F<F<String>> p) {} } class FF<X> extends F<X>{} class F<T> {} class MyTest { static class Foo<X> { Foo(X x) {} } static interface Base<Y> {} static class A extends Exception implements Base<String> {} static class B extends Exception implements Base<Integer> {} void m() throws B { try { if (true) { throw new A(); } else { throw new B(); } } catch (A ex) { Foo<? extends Base<String>> foo1 = new Foo<>(ex); // ok <error descr="Incompatible types. Found: 'MyTest.Foo<MyTest.A>', required: 'MyTest.Foo<MyTest.Base<java.lang.String>>'">Foo<Base<String>> foo2 = new Foo<>(ex);</error> // should be error } } } class NonParameterized { void foo() { new NonParameterized<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); } } interface I<T> { T m(); } class FI1 { I<? extends String> i1 = new I<<error descr="Cannot use ''<>'' with anonymous inner classes"></error>>() { @Override public String m() { return null; } }; I<?> i2 = new I<<error descr="Cannot use ''<>'' with anonymous inner classes"></error>>() { @Override public Object m() { return null; } }; } class Super<X,Y> { private Super(Integer i, Y y, X x) {} public Super(Number n, X x, Y y) {} } class TestMySuper { Super<String,Integer> ssi1 = new Super<>(1, "", 2); } class TestLocal<X> { class Member { } static class Nested {} void test() { class Local {} Member m = new Member<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); Nested n = new Nested<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); Local l = new Local<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); } } class QualifiedTest { java.util.Map<String, String> s = new java.util.HashMap<>(); } class TZ { } class ParenthTest<T extends TZ> { public ParenthTest(T x) { } public T z = null; public int a() { ParenthTest<T> x = (new ParenthTest<>(null)); //red code is here return 1; } }
java/java-tests/testData/codeInsight/daemonCodeAnalyzer/advHighlighting7/DiamondMisc.java
import java.util.ArrayList; import java.util.List; class Test { List<String> queue = new ArrayList<>(); ArrayList l = new ArrayList<>(8); } class HMCopy<K, V> { private Entry[] table; class Entry<K, V> { Entry(int h, K k, V v, Entry<K, V> n) { } } void addEntry(int hash, K key, V value, int bucketIndex) { Entry<K, V> e = table[bucketIndex]; table[bucketIndex] = new Entry<>(hash, key, value, e); } } class DD { P1<P<String>> l = new L<String>() { @Override void f() { } }; P1<P<String>> l1 = new L<>(); P1<P<String>> foo() { return new L<>(); } String s = ""; } class L<K> extends P1<P<K>> { void f() { } } class P1<P1T> extends P<P1T> { } class P<PT> { } class Test1 { void bar() { foo<error descr="'foo(F<F<java.lang.String>>)' in 'Test1' cannot be applied to '(FF<java.lang.Object>)'">(new FF<>())</error>; } void foo(F<F<String>> p) {} } class FF<X> extends F<X>{} class F<T> {} class MyTest { static class Foo<X> { Foo(X x) {} } static interface Base<Y> {} static class A extends Exception implements Base<String> {} static class B extends Exception implements Base<Integer> {} void m() throws B { try { if (true) { throw new A(); } else { throw new B(); } } catch (A ex) { Foo<? extends Base<String>> foo1 = new Foo<>(ex); // ok <error descr="Incompatible types. Found: 'MyTest.Foo<MyTest.A>', required: 'MyTest.Foo<MyTest.Base<java.lang.String>>'">Foo<Base<String>> foo2 = new Foo<>(ex);</error> // should be error } } } class NonParameterized { void foo() { new NonParameterized<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); } } interface I<T> { T m(); } class FI1 { I<? extends String> i1 = new I<>() { @Override public String m() { return null; } }; I<?> i2 = new I<>() { @Override public Object m() { return null; } }; } class Super<X,Y> { private Super(Integer i, Y y, X x) {} public Super(Number n, X x, Y y) {} } class TestMySuper { Super<String,Integer> ssi1 = new Super<>(1, "", 2); } class TestLocal<X> { class Member { } static class Nested {} void test() { class Local {} Member m = new Member<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); Nested n = new Nested<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); Local l = new Local<<error descr="Diamond operator is not applicable for non-parameterized types"></error>>(); } } class QualifiedTest { java.util.Map<String, String> s = new java.util.HashMap<>(); } class TZ { } class ParenthTest<T extends TZ> { public ParenthTest(T x) { } public T z = null; public int a() { ParenthTest<T> x = (new ParenthTest<>(null)); //red code is here return 1; } }
diamonds: no diamonds with anonymous classes (IDEA-72391)
java/java-tests/testData/codeInsight/daemonCodeAnalyzer/advHighlighting7/DiamondMisc.java
diamonds: no diamonds with anonymous classes (IDEA-72391)
Java
apache-2.0
dc79013c7530464867192a4ecb04a3062b95266b
0
muntasirsyed/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,retomerz/intellij-community,consulo/consulo,slisson/intellij-community,retomerz/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,semonte/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,vladmm/intellij-community,signed/intellij-community,blademainer/intellij-community,signed/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,consulo/consulo,Lekanich/intellij-community,ryano144/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,supersven/intellij-community,adedayo/intellij-community,caot/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,hurricup/intellij-community,blademainer/intellij-community,samthor/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,caot/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,retomerz/intellij-community,kool79/intellij-community,caot/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,retomerz/intellij-community,kdwink/intellij-community,ibinti/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,slisson/intellij-community,signed/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,supersven/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,fitermay/intellij-community,petteyg/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,consulo/consulo,kool79/intellij-community,clumsy/intellij-community,kool79/intellij-community,holmes/intellij-community,clumsy/intellij-community,diorcety/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,jagguli/intellij-community,dslomov/intellij-community,FHannes/intellij-community,ernestp/consulo,ernestp/consulo,SerCeMan/intellij-community,izonder/intellij-community,jagguli/intellij-community,da1z/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,samthor/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,retomerz/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,fnouama/intellij-community,caot/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,izonder/intellij-community,consulo/consulo,clumsy/intellij-community,nicolargo/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,allotria/intellij-community,izonder/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,supersven/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,izonder/intellij-community,caot/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,semonte/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,signed/intellij-community,allotria/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,ibinti/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,vladmm/intellij-community,ryano144/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,petteyg/intellij-community,signed/intellij-community,apixandru/intellij-community,kool79/intellij-community,kool79/intellij-community,fnouama/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,da1z/intellij-community,apixandru/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,holmes/intellij-community,robovm/robovm-studio,FHannes/intellij-community,apixandru/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,apixandru/intellij-community,retomerz/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,supersven/intellij-community,retomerz/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,jexp/idea2,jexp/idea2,signed/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,amith01994/intellij-community,allotria/intellij-community,ibinti/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,holmes/intellij-community,caot/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,youdonghai/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,allotria/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,slisson/intellij-community,ernestp/consulo,da1z/intellij-community,hurricup/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,caot/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,semonte/intellij-community,jagguli/intellij-community,holmes/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,semonte/intellij-community,signed/intellij-community,caot/intellij-community,asedunov/intellij-community,da1z/intellij-community,da1z/intellij-community,hurricup/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ernestp/consulo,Lekanich/intellij-community,semonte/intellij-community,slisson/intellij-community,samthor/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,jexp/idea2,muntasirsyed/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,samthor/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,signed/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,ernestp/consulo,wreckJ/intellij-community,holmes/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,caot/intellij-community,joewalnes/idea-community,ryano144/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,ibinti/intellij-community,robovm/robovm-studio,xfournet/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,jagguli/intellij-community,vladmm/intellij-community,izonder/intellij-community,petteyg/intellij-community,blademainer/intellij-community,ibinti/intellij-community,diorcety/intellij-community,fitermay/intellij-community,dslomov/intellij-community,fnouama/intellij-community,kool79/intellij-community,holmes/intellij-community,akosyakov/intellij-community,samthor/intellij-community,samthor/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,supersven/intellij-community,robovm/robovm-studio,xfournet/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,allotria/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,allotria/intellij-community,kool79/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,hurricup/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,consulo/consulo,idea4bsd/idea4bsd,SerCeMan/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,supersven/intellij-community,samthor/intellij-community,robovm/robovm-studio,signed/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,robovm/robovm-studio,adedayo/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,samthor/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,signed/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,xfournet/intellij-community,jexp/idea2,kool79/intellij-community,slisson/intellij-community,samthor/intellij-community,clumsy/intellij-community,semonte/intellij-community,allotria/intellij-community,FHannes/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,gnuhub/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,da1z/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,semonte/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,caot/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,jexp/idea2,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,apixandru/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,semonte/intellij-community,izonder/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,da1z/intellij-community,semonte/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,holmes/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,asedunov/intellij-community,petteyg/intellij-community,diorcety/intellij-community,joewalnes/idea-community,hurricup/intellij-community,kdwink/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,signed/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,samthor/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,da1z/intellij-community,joewalnes/idea-community,amith01994/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,joewalnes/idea-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,jexp/idea2,holmes/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,retomerz/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,ryano144/intellij-community,blademainer/intellij-community,kdwink/intellij-community,jexp/idea2,dslomov/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,caot/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,jexp/idea2,vladmm/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,gnuhub/intellij-community,samthor/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,signed/intellij-community,slisson/intellij-community,ryano144/intellij-community,adedayo/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,semonte/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,semonte/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,clumsy/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community
/* * Copyright (c) 2000-2007 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.compiler.impl.packagingCompiler; import com.intellij.openapi.compiler.CompileContext; import com.intellij.openapi.compiler.CompilerBundle; import com.intellij.openapi.compiler.make.BuildConfiguration; import com.intellij.openapi.compiler.make.BuildParticipant; import com.intellij.openapi.compiler.make.BuildParticipantProvider; import com.intellij.openapi.deployment.DeploymentUtilImpl; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.File; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author nik */ public class IncrementalPackagingCompiler extends PackagingCompilerBase { public static final Key<Set<BuildParticipant>> AFFECTED_PARTICIPANTS_KEY = Key.create("AFFECTED_PARTICIPANTS"); private static final Key<List<String>> FILES_TO_DELETE_KEY = Key.create("files_to_delete"); private static final Key<OldProcessingItemsBuilderContext> BUILDER_CONTEXT_KEY = Key.create("processing_items_builder"); @NonNls private static final String INCREMENTAL_PACKAGING_CACHE_ID = "incremental_packaging"; public IncrementalPackagingCompiler() { super(FILES_TO_DELETE_KEY, BUILDER_CONTEXT_KEY); } @Override protected PackagingProcessingItem[] collectItems(OldProcessingItemsBuilderContext builderContext, final Project project) { Module[] allModules = ModuleManager.getInstance(project).getSortedModules(); final BuildParticipantProvider<?>[] providers = DeploymentUtilImpl.getBuildParticipantProviders(); for (BuildParticipantProvider<?> provider : providers) { addItemsForProvider(provider, allModules, builderContext); } return builderContext.getProcessingItems(); } private static <P extends BuildParticipant> void addItemsForProvider(final BuildParticipantProvider<P> provider, final Module[] modulesToCompile, OldProcessingItemsBuilderContext builderContext) { for (Module module : modulesToCompile) { final Collection<P> participants = provider.getParticipants(module); for (P participant : participants) { addItemsForParticipant(participant, builderContext); } } } private static void addItemsForParticipant(final BuildParticipant participant, OldProcessingItemsBuilderContext builderContext) { participant.buildStarted(builderContext.getCompileContext()); new ProcessingItemsBuilder(participant, builderContext).build(); } @NotNull public String getDescription() { return CompilerBundle.message("incremental.packaging.compiler.description"); } protected String getOutputCacheId() { return INCREMENTAL_PACKAGING_CACHE_ID; } protected void onBuildFinished(OldProcessingItemsBuilderContext builderContext, JarsBuilder builder, final Project project) throws Exception { final Set<BuildParticipant> affectedParticipants = getAffectedParticipants(builderContext.getCompileContext()); for (ExplodedDestinationInfo destination : builder.getJarsDestinations()) { affectedParticipants.add(builderContext.getDestinationOwner(destination)); } CompileContext context = builderContext.getCompileContext(); for (BuildParticipant participant : affectedParticipants) { BuildConfiguration buildConfiguration = participant.getBuildConfiguration(); if (participant.willBuildExploded()) { participant.afterExplodedCreated(new File(FileUtil.toSystemDependentName(DeploymentUtilImpl.getOrCreateExplodedDir(participant))), context); } String jarPath = buildConfiguration.getJarPath(); if (buildConfiguration.isJarEnabled() && jarPath != null) { participant.afterJarCreated(new File(FileUtil.toSystemDependentName(jarPath)), context); } participant.buildFinished(context); } } public static Set<BuildParticipant> getAffectedParticipants(CompileContext context) { return context.getUserData(AFFECTED_PARTICIPANTS_KEY); } @Override protected void beforeBuildStarted(OldProcessingItemsBuilderContext context) { context.getCompileContext().putUserData(AFFECTED_PARTICIPANTS_KEY, new HashSet<BuildParticipant>()); } protected OldProcessingItemsBuilderContext createContext(CompileContext context) { return new OldProcessingItemsBuilderContext(context); } @Override protected void onFileCopied(OldProcessingItemsBuilderContext builderContext, ExplodedDestinationInfo explodedDestination) { getAffectedParticipants(builderContext.getCompileContext()).add(builderContext.getDestinationOwner(explodedDestination)); } protected boolean doNotStartBuild(CompileContext context) { Module[] affectedModules = context.getCompileScope().getAffectedModules(); return affectedModules.length == 0; } }
java/compiler/impl/src/com/intellij/compiler/impl/packagingCompiler/IncrementalPackagingCompiler.java
/* * Copyright (c) 2000-2007 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.compiler.impl.packagingCompiler; import com.intellij.openapi.compiler.CompileContext; import com.intellij.openapi.compiler.CompilerBundle; import com.intellij.openapi.compiler.make.BuildConfiguration; import com.intellij.openapi.compiler.make.BuildParticipant; import com.intellij.openapi.compiler.make.BuildParticipantProvider; import com.intellij.openapi.deployment.DeploymentUtilImpl; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.File; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author nik */ public class IncrementalPackagingCompiler extends PackagingCompilerBase { public static final Key<Set<BuildParticipant>> AFFECTED_PARTICIPANTS_KEY = Key.create("AFFECTED_PARTICIPANTS"); private static final Key<List<String>> FILES_TO_DELETE_KEY = Key.create("files_to_delete"); private static final Key<OldProcessingItemsBuilderContext> BUILDER_CONTEXT_KEY = Key.create("processing_items_builder"); @NonNls private static final String INCREMENTAL_PACKAGING_CACHE_ID = "incremental_packaging"; public IncrementalPackagingCompiler() { super(FILES_TO_DELETE_KEY, BUILDER_CONTEXT_KEY); } @Override protected PackagingProcessingItem[] collectItems(OldProcessingItemsBuilderContext builderContext, final Project project) { Module[] allModules = ModuleManager.getInstance(project).getSortedModules(); final BuildParticipantProvider<?>[] providers = DeploymentUtilImpl.getBuildParticipantProviders(); for (BuildParticipantProvider<?> provider : providers) { addItemsForProvider(provider, allModules, builderContext); } return builderContext.getProcessingItems(builderContext.getCompileContext().getCompileScope().getAffectedModules()); } private static <P extends BuildParticipant> void addItemsForProvider(final BuildParticipantProvider<P> provider, final Module[] modulesToCompile, OldProcessingItemsBuilderContext builderContext) { for (Module module : modulesToCompile) { final Collection<P> participants = provider.getParticipants(module); for (P participant : participants) { addItemsForParticipant(participant, builderContext); } } } private static void addItemsForParticipant(final BuildParticipant participant, OldProcessingItemsBuilderContext builderContext) { participant.buildStarted(builderContext.getCompileContext()); new ProcessingItemsBuilder(participant, builderContext).build(); } @NotNull public String getDescription() { return CompilerBundle.message("incremental.packaging.compiler.description"); } protected String getOutputCacheId() { return INCREMENTAL_PACKAGING_CACHE_ID; } protected void onBuildFinished(OldProcessingItemsBuilderContext builderContext, JarsBuilder builder, final Project project) throws Exception { final Set<BuildParticipant> affectedParticipants = getAffectedParticipants(builderContext.getCompileContext()); for (ExplodedDestinationInfo destination : builder.getJarsDestinations()) { affectedParticipants.add(builderContext.getDestinationOwner(destination)); } CompileContext context = builderContext.getCompileContext(); for (BuildParticipant participant : affectedParticipants) { BuildConfiguration buildConfiguration = participant.getBuildConfiguration(); if (participant.willBuildExploded()) { participant.afterExplodedCreated(new File(FileUtil.toSystemDependentName(DeploymentUtilImpl.getOrCreateExplodedDir(participant))), context); } String jarPath = buildConfiguration.getJarPath(); if (buildConfiguration.isJarEnabled() && jarPath != null) { participant.afterJarCreated(new File(FileUtil.toSystemDependentName(jarPath)), context); } participant.buildFinished(context); } } public static Set<BuildParticipant> getAffectedParticipants(CompileContext context) { return context.getUserData(AFFECTED_PARTICIPANTS_KEY); } @Override protected void beforeBuildStarted(OldProcessingItemsBuilderContext context) { context.getCompileContext().putUserData(AFFECTED_PARTICIPANTS_KEY, new HashSet<BuildParticipant>()); } protected OldProcessingItemsBuilderContext createContext(CompileContext context) { return new OldProcessingItemsBuilderContext(context); } @Override protected void onFileCopied(OldProcessingItemsBuilderContext builderContext, ExplodedDestinationInfo explodedDestination) { getAffectedParticipants(builderContext.getCompileContext()).add(builderContext.getDestinationOwner(explodedDestination)); } protected boolean doNotStartBuild(CompileContext context) { Module[] affectedModules = context.getCompileScope().getAffectedModules(); return affectedModules.length == 0; } }
IDEADEV-40394: All Class Files Recopied On Every Make
java/compiler/impl/src/com/intellij/compiler/impl/packagingCompiler/IncrementalPackagingCompiler.java
IDEADEV-40394: All Class Files Recopied On Every Make
Java
apache-2.0
1fee752aa24827598b694079a3812a5888f0cd40
0
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project * * Copyright (c) 2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.analysis.expression.coexpression; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject; import ubic.gemma.model.genome.Gene; import ubic.gemma.ontology.OntologyTerm; /** * The results for one gene that is coexpressed with a query gene, across multiple expression experiments; possibly with * multiple probes per expression experiment. * <p> * Keeps track of specificity, pValues, Scores, goTerms, GO overlap with the query, stringency value. Information about * positive and negative correlations are stored, separately. * * @author klc * @version $Id$ */ public class CoexpressionValueObject implements Comparable<CoexpressionValueObject> { private static Log log = LogFactory.getLog( CoexpressionValueObject.class.getName() ); /** * Genes that were predicted to cross-hybridize with the target gene */ private Collection<Long> crossHybridizingGenes = new HashSet<Long>(); private Collection<Long> datasetsTestedIn = new HashSet<Long>(); // the expression experiments that this coexpression was involved in. The number of these will total the 'support' // (pos+neg correlations, minus # of experiments that support both + and -) private Map<Long, ExpressionExperimentValueObject> expressionExperimentValueObjects; /** * ID of the coexpressed gene. */ private Long geneId; /** * Name of the coexpressed gene */ private String geneName; /** * Official symbol of the coexpressed gene */ private String geneOfficialName; /** * Gene type fo the coexpressed gene */ private String geneType = null; /** * Number of GO terms this gene shares with the query gene. */ private Collection<OntologyTerm> goOverlap; private Map<Long, Collection<ProbePair>> links = new HashMap<Long, Collection<ProbePair>>(); private Map<Long, Map<Long, Double>> negativeScores; private Map<Long, Map<Long, Double>> negPvalues; /** * Expression Experiments whihc have evidence for coexpression of this gene with the query, but the probes are not * specific for the target gene. */ private Collection<Long> nonspecificEEs; /** * Number of GO terms the query gene has. This is the highest possible overlap */ private int numQueryGeneGOTerms; /** * Maps of Expression Experiment IDs to maps of Probe IDs to scores/pvalues that are in support of this * coexpression. */ private Map<Long, Map<Long, Double>> positiveScores; private Map<Long, Map<Long, Double>> posPvalues; private Gene queryGene; /** * Map of eeId -> probe IDs for the _query_. */ private Map<Long, Collection<Long>> queryProbeInfo; private Long taxonId; public CoexpressionValueObject() { geneName = ""; geneId = null; geneOfficialName = null; expressionExperimentValueObjects = new HashMap<Long, ExpressionExperimentValueObject>(); positiveScores = new HashMap<Long, Map<Long, Double>>(); negativeScores = new HashMap<Long, Map<Long, Double>>(); posPvalues = new HashMap<Long, Map<Long, Double>>(); negPvalues = new HashMap<Long, Map<Long, Double>>(); queryProbeInfo = new HashMap<Long, Collection<Long>>(); nonspecificEEs = new HashSet<Long>(); numQueryGeneGOTerms = 0; } /** * @param geneid of gene that is predicted to cross-hybridize with this gene */ public void addCrossHybridizingGene( Long geneid ) { if ( geneid.equals( this.geneId ) ) return; this.crossHybridizingGenes.add( geneid ); } /** * @param eeID * @param score * @param pvalue * @param probeID * @param outputProbeId */ public void addScore( Long eeID, Double score, Double pvalue, Long queryProbe, Long coexpressedProbe ) { assert !queryProbe.equals( coexpressedProbe ); if ( !queryProbeInfo.containsKey( eeID ) ) { queryProbeInfo.put( eeID, new HashSet<Long>() ); } queryProbeInfo.get( eeID ).add( queryProbe ); if ( !this.links.containsKey( eeID ) ) { this.links.put( eeID, new HashSet<ProbePair>() ); } this.links.get( eeID ).add( new ProbePair( queryProbe, coexpressedProbe, score, pvalue ) ); if ( score < 0 ) { if ( !negativeScores.containsKey( eeID ) ) negativeScores.put( eeID, new HashMap<Long, Double>() ); if ( !negPvalues.containsKey( eeID ) ) negPvalues.put( eeID, new HashMap<Long, Double>() ); negPvalues.get( eeID ).put( coexpressedProbe, pvalue ); negativeScores.get( eeID ).put( coexpressedProbe, score ); } else { if ( !positiveScores.containsKey( eeID ) ) positiveScores.put( eeID, new HashMap<Long, Double>() ); if ( !posPvalues.containsKey( eeID ) ) posPvalues.put( eeID, new HashMap<Long, Double>() ); posPvalues.get( eeID ).put( coexpressedProbe, pvalue ); positiveScores.get( eeID ).put( coexpressedProbe, score ); } } /** * Add another experiment that supports this coexpression. * * @param eeVo */ public void addSupportingExperiment( ExpressionExperimentValueObject eeVo ) { if ( expressionExperimentValueObjects.containsKey( eeVo.getId() ) ) { // I guess this happens if there are two probes for the same gene. if ( log.isDebugEnabled() ) log.debug( "Already have seen this experiment" ); } this.expressionExperimentValueObjects.put( eeVo.getId(), eeVo ); } /* * (non-Javadoc) * @see java.lang.Comparable#compareTo(java.lang.Object) */ public int compareTo( CoexpressionValueObject o ) { int o1Size = this.getMaxLinkCount(); int o2Size = o.getMaxLinkCount(); if ( o1Size > o2Size ) { return -1; } else if ( o1Size < o2Size ) { return 1; } else { return this.getGeneName().compareTo( o.getGeneName() ); } } @Override public boolean equals( Object obj ) { if ( this == obj ) return true; if ( obj == null ) return false; if ( getClass() != obj.getClass() ) return false; CoexpressionValueObject other = ( CoexpressionValueObject ) obj; if ( geneId == null ) { if ( other.geneId != null ) return false; } else if ( !geneId.equals( other.geneId ) ) return false; return true; } /** * @return IDs of genes that may be crosshybridizing with the target gene for this. */ public Collection<Long> getCrossHybridizingGenes() { return crossHybridizingGenes; } /** * Collection of EE IDs in which the link was tested. * * @return */ public Collection<Long> getDatasetsTestedIn() { return this.datasetsTestedIn; } /** * @return a collection of EE ids that contributed to this genes negative expression */ public Collection<Long> getEEContributing2NegativeLinks() { return negativeScores.keySet(); } /** * @return a collection of EEids that contributed to this genes positive expression */ public Collection<Long> getEEContributing2PositiveLinks() { return positiveScores.keySet(); } /** * @return experiments that are supporting coexpression. */ public Collection<Long> getExpressionExperiments() { /* * We don't use the expresionexperimentvalueobject keyset because there may be 'cruft' after pruning the * results. */ Collection<Long> eeIDs = new HashSet<Long>(); eeIDs.addAll( this.getNegativeScores().keySet() ); eeIDs.addAll( this.getPositiveScores().keySet() ); return eeIDs; } /** * @return the geneId of the coexpressed gene */ public Long getGeneId() { return geneId; } /** * @return the geneName of the coexpressed gene */ public String getGeneName() { return geneName; } /** * @return the geneOfficialName of the coexpressed gene */ public String getGeneOfficialName() { return geneOfficialName; } /** * @return the geneType (known gene, predicted, or probe-aligned region) of the coexpressed gene */ public String getGeneType() { return geneType; } /** * @return Gene Ontology similarity of the coexpressed gene with the query gene. */ public Collection<OntologyTerm> getGoOverlap() { return goOverlap; } /** * Function to return the max of the negative and positive link support. This is used for sorting. * * @return */ public int getMaxLinkCount() { int positiveLinks = this.getPositiveLinkSupport(); int negativeLinks = this.getNegativeLinkSupport(); return Math.max( positiveLinks, negativeLinks ); } // /** // * @return // */ // public String getImageMapName() { // StringBuffer buf = new StringBuffer(); // buf.append( "map." ); // buf.append( geneType ); // buf.append( ".gene" ); // buf.append( geneId ); // buf.append( ".taxon" ); // buf.append( taxonId ); // return buf.toString(); // } /** * @param eeId * @return */ public Collection<Long> getNegativeCorrelationProbes( Long eeId ) { if ( !negativeScores.containsKey( eeId ) ) return new HashSet<Long>(); return negativeScores.get( eeId ).keySet(); } /** * @return the negative link counts */ public int getNegativeLinkSupport() { if ( negativeScores.size() == 0 ) return 0; return this.negativeScores.size(); } /** * @return the negativeScores, a map of EEID->ProbeID->Correlation score. */ public double getNegativeScore() { if ( getNegativeLinkSupport() == 0 ) return 0.0; double mean = 0; int size = 0; for ( Map<Long, Double> scores : negativeScores.values() ) { for ( Double score : scores.values() ) { mean += score; size++; } } assert size > 0; return mean / size; } /** * @return the negativePValues */ public Map<Long, Map<Long, Double>> getNegativeScores() { return negativeScores; } /** * @return geometric mean of the pvalues for the supporting links. */ public double getNegPValue() { if ( negPvalues.size() == 0 ) return 0.0; Collection<Map<Long, Double>> values = negPvalues.values(); return computePvalue( values ); } /** * @return the nonspecificEE */ public Collection<Long> getNonspecificEE() { return nonspecificEEs; } public int getNumDatasetsTestedIn() { if ( datasetsTestedIn == null ) return 0; return this.datasetsTestedIn.size(); } /** * @param eeId * @return */ public Collection<Long> getPositiveCorrelationProbes( Long eeId ) { if ( !positiveScores.containsKey( eeId ) ) return new HashSet<Long>(); return positiveScores.get( eeId ).keySet(); } /** * @return the positive link counts */ public int getPositiveLinkSupport() { if ( positiveScores == null ) return 0; return this.positiveScores.size(); } /** * @return */ public double getPositiveScore() { if ( positiveScores.size() == 0 ) return 0.0; double mean = 0.0; int size = 0; for ( Map<Long, Double> scores : positiveScores.values() ) { for ( Double score : scores.values() ) { mean += score; size++; } } assert size > 0; return mean / size; } /** * @return the positiveScores, a map of EEID->ProbeID->Correlation score. */ public Map<Long, Map<Long, Double>> getPositiveScores() { return positiveScores; } /** * @return geometric mean of the pvalues for the supporting links. */ public double getPosPValue() { if ( posPvalues.size() == 0 ) return 0.0; return computePvalue( this.posPvalues.values() ); } /** * @return */ public int getPossibleOverlap() { return numQueryGeneGOTerms; } /** * @param eeId * @return */ public Collection<Long> getProbes( Long eeId ) { Collection<Long> result = new HashSet<Long>(); result.addAll( getPositiveCorrelationProbes( eeId ) ); result.addAll( getNegativeCorrelationProbes( eeId ) ); return result; } /** * @return the query gene */ public Gene getQueryGene() { return queryGene; } /** * @return Map of eeId -> probe IDs for the _query_. */ public Map<Long, Collection<Long>> getQueryProbeInfo() { return queryProbeInfo; } /** * @return */ public Long getTaxonId() { return taxonId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ( ( geneId == null ) ? 0 : geneId.hashCode() ); return result; } /** * Delete the data for a specific EE-probe combination. This is done during filtering to remove, for example, probes * that hybridize with the query gene. * * @param probeId * @param eeId * @return true if there is still evidence of coexpression left in this object, false if not. */ public boolean removeProbeEvidence( Long probeId, Long eeId ) { Collection<ProbePair> pairs = this.getLinks().get( eeId ); for ( Iterator<ProbePair> it = pairs.iterator(); it.hasNext(); ) { ProbePair probePair = it.next(); if ( probePair.getQueryProbeId().equals( probeId ) || probePair.getTargetProbeId().equals( probeId ) ) { it.remove(); } } if ( this.positiveScores.containsKey( eeId ) ) { Map<Long, Double> map = this.positiveScores.get( eeId ); if ( map.containsKey( probeId ) ) { map.remove( probeId ); } /* * At this point, we may have removed all evidence for the EE supporting the coexpression. In that case, * remove the ee. */ if ( map.size() == 0 ) { this.positiveScores.remove( eeId ); } Map<Long, Double> map2 = this.posPvalues.get( eeId ); if ( map2.containsKey( probeId ) ) { map2.remove( probeId ); } if ( map2.size() == 0 ) { this.posPvalues.remove( eeId ); } } /* * Do the same thing for negative correlations. */ if ( this.negativeScores.containsKey( eeId ) ) { Map<Long, Double> map = this.negativeScores.get( eeId ); if ( map.containsKey( probeId ) ) { map.remove( probeId ); } if ( map.size() == 0 ) { this.negativeScores.remove( eeId ); } Map<Long, Double> map2 = this.negPvalues.get( eeId ); if ( map2.containsKey( probeId ) ) { map2.remove( probeId ); } if ( map2.size() == 0 ) { this.negPvalues.remove( eeId ); } } if ( this.positiveScores.size() == 0 && this.negativeScores.size() == 0 ) { return false; } return true; } public void setDatasetsTestedIn( Collection<Long> datasetsTestedIn ) { this.datasetsTestedIn = datasetsTestedIn; } /** * @param geneId the geneId to set */ public void setGeneId( Long geneId ) { this.geneId = geneId; } /** * @param geneName the geneName to set */ public void setGeneName( String geneName ) { this.geneName = geneName; } /** * @param geneOfficialName the geneOfficialName to set */ public void setGeneOfficialName( String geneOfficialName ) { this.geneOfficialName = geneOfficialName; } /** * @param geneType the geneType to set */ public void setGeneType( String geneType ) { this.geneType = geneType; } /** * @param goOverlap of this gene with the query gene */ public void setGoOverlap( Collection<OntologyTerm> goOverlap ) { this.goOverlap = goOverlap; } /** * A 'non-specific ee' is an expression experiment that lacks specific probes for BOTH the query and target genes. * * @param nonspecificEEs the nonspecificEE to set */ public void setNonspecificEEs( Collection<Long> nonspecificEEs ) { this.nonspecificEEs = nonspecificEEs; } /** * @param numQueryGeneGOTerms */ public void setNumQueryGeneGOTerms( int numQueryGeneGOTerms ) { this.numQueryGeneGOTerms = numQueryGeneGOTerms; } public void setQueryGene( Gene queryGene ) { this.queryGene = queryGene; } /** * @param taxonId */ public void setTaxonId( Long taxonId ) { this.taxonId = taxonId; } @Override public String toString() { // return StringUtils.isBlank( geneName ) ? "Gene " + geneId : geneName; StringBuilder buf = new StringBuilder(); buf.append( "Coexpression value object: query=" + queryGene + " target=" + geneId + " " + geneName + "\n" ); buf.append( "Tested in " + datasetsTestedIn.size() + ": " + StringUtils.join( datasetsTestedIn, ',' ) + "\n" ); if ( positiveScores.size() > 0 ) { buf.append( "Positive correlation support=" + positiveScores.size() + "\n" ); for ( Long eeid : positiveScores.keySet() ) { Collection<Long> qprobes = queryProbeInfo.get( eeid ); for ( Long probe : positiveScores.get( eeid ).keySet() ) { for ( Long qprobe : qprobes ) { buf.append( "EE=" + eeid + " tprobe=" + probe + " qprobe=" + qprobe + " specific=" + ( this.nonspecificEEs.contains( eeid ) ? "n" : "y" ) + "\n" ); } } } } if ( negativeScores.size() > 0 ) { buf.append( "Negative correlation support=" + negativeScores.size() + "\n" ); for ( Long eeid : negativeScores.keySet() ) { for ( Long probe : negativeScores.get( eeid ).keySet() ) { buf.append( "EE=" + eeid + " probe=" + probe + " specific=" + ( this.nonspecificEEs.contains( eeid ) ? "n" : "y" ) + "\n" ); } } } return buf.toString(); } /** * @return the links */ protected Map<Long, Collection<ProbePair>> getLinks() { return links; } /** * FIXME just returning zero for now. * <p> * Compute a combined pvalue for the scores. * * @param mean * @param size * @param values * @return */ private double computePvalue( Collection<Map<Long, Double>> values ) { return 0.0; // double mean = 0.0; // int size = 0; // for ( Map<Long, Double> scores : values ) { // for ( Double score : scores.values() ) { // if ( score.doubleValue() == 0 ) { // score = Constants.SMALL; // } // mean += Math.log( score ); // size++; // } // } // assert size > 0; // // return Math.exp( mean / size ); } }
gemma-mda/src/main/java/ubic/gemma/model/analysis/expression/coexpression/CoexpressionValueObject.java
/* * The Gemma project * * Copyright (c) 2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.analysis.expression.coexpression; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject; import ubic.gemma.model.genome.Gene; import ubic.gemma.ontology.OntologyTerm; /** * The results for one gene that is coexpressed with a query gene, across multiple expression experiments; possibly with * multiple probes per expression experiment. * <p> * Keeps track of specificity, pValues, Scores, goTerms, GO overlap with the query, stringency value. Information about * positive and negative correlations are stored, separately. * * @author klc * @version $Id$ */ public class CoexpressionValueObject implements Comparable<CoexpressionValueObject> { private static Log log = LogFactory.getLog( CoexpressionValueObject.class.getName() ); /** * Genes that were predicted to cross-hybridize with the target gene */ private Collection<Long> crossHybridizingGenes = new HashSet<Long>(); private Collection<Long> datasetsTestedIn = new HashSet<Long>(); // the expression experiments that this coexpression was involved in. The number of these will total the 'support' // (pos+neg correlations, minus # of experiments that support both + and -) private Map<Long, ExpressionExperimentValueObject> expressionExperimentValueObjects; /** * ID of the coexpressed gene. */ private Long geneId; /** * Name of the coexpressed gene */ private String geneName; /** * Official symbol of the coexpressed gene */ private String geneOfficialName; /** * Gene type fo the coexpressed gene */ private String geneType = null; /** * Number of GO terms this gene shares with the query gene. */ private Collection<OntologyTerm> goOverlap; private Map<Long, Collection<ProbePair>> links = new HashMap<Long, Collection<ProbePair>>(); private Map<Long, Map<Long, Double>> negativeScores; private Map<Long, Map<Long, Double>> negPvalues; /** * Expression Experiments whihc have evidence for coexpression of this gene with the query, but the probes are not * specific for the target gene. */ private Collection<Long> nonspecificEEs; /** * Number of GO terms the query gene has. This is the highest possible overlap */ private int numQueryGeneGOTerms; /** * Maps of Expression Experiment IDs to maps of Probe IDs to scores/pvalues that are in support of this * coexpression. */ private Map<Long, Map<Long, Double>> positiveScores; private Map<Long, Map<Long, Double>> posPvalues; private Gene queryGene; /** * Map of eeId -> probe IDs for the _query_. */ private Map<Long, Collection<Long>> queryProbeInfo; private Long taxonId; public CoexpressionValueObject() { geneName = ""; geneId = null; geneOfficialName = null; expressionExperimentValueObjects = new HashMap<Long, ExpressionExperimentValueObject>(); positiveScores = new HashMap<Long, Map<Long, Double>>(); negativeScores = new HashMap<Long, Map<Long, Double>>(); posPvalues = new HashMap<Long, Map<Long, Double>>(); negPvalues = new HashMap<Long, Map<Long, Double>>(); queryProbeInfo = new HashMap<Long, Collection<Long>>(); nonspecificEEs = new HashSet<Long>(); numQueryGeneGOTerms = 0; } /** * @param geneid of gene that is predicted to cross-hybridize with this gene */ public void addCrossHybridizingGene( Long geneid ) { if ( geneid.equals( this.geneId ) ) return; this.crossHybridizingGenes.add( geneid ); } /** * @param eeID * @param score * @param pvalue * @param probeID * @param outputProbeId */ public void addScore( Long eeID, Double score, Double pvalue, Long queryProbe, Long coexpressedProbe ) { assert !queryProbe.equals( coexpressedProbe ); if ( !queryProbeInfo.containsKey( eeID ) ) { queryProbeInfo.put( eeID, new HashSet<Long>() ); } queryProbeInfo.get( eeID ).add( queryProbe ); if ( !this.links.containsKey( eeID ) ) { this.links.put( eeID, new HashSet<ProbePair>() ); } this.links.get( eeID ).add( new ProbePair( queryProbe, coexpressedProbe, score, pvalue ) ); if ( score < 0 ) { if ( !negativeScores.containsKey( eeID ) ) negativeScores.put( eeID, new HashMap<Long, Double>() ); if ( !negPvalues.containsKey( eeID ) ) negPvalues.put( eeID, new HashMap<Long, Double>() ); negPvalues.get( eeID ).put( coexpressedProbe, pvalue ); negativeScores.get( eeID ).put( coexpressedProbe, score ); } else { if ( !positiveScores.containsKey( eeID ) ) positiveScores.put( eeID, new HashMap<Long, Double>() ); if ( !posPvalues.containsKey( eeID ) ) posPvalues.put( eeID, new HashMap<Long, Double>() ); posPvalues.get( eeID ).put( coexpressedProbe, pvalue ); positiveScores.get( eeID ).put( coexpressedProbe, score ); } } /** * Add another experiment that supports this coexpression. * * @param eeVo */ public void addSupportingExperiment( ExpressionExperimentValueObject eeVo ) { if ( expressionExperimentValueObjects.containsKey( eeVo.getId() ) ) { // I guess this happens if there are two probes for the same gene. if ( log.isDebugEnabled() ) log.debug( "Already have seen this experiment" ); } this.expressionExperimentValueObjects.put( eeVo.getId(), eeVo ); } /* * (non-Javadoc) * @see java.lang.Comparable#compareTo(java.lang.Object) */ public int compareTo( CoexpressionValueObject o ) { int o1Size = this.getMaxLinkCount(); int o2Size = o.getMaxLinkCount(); if ( o1Size > o2Size ) { return -1; } else if ( o1Size < o2Size ) { return 1; } else { return this.getGeneName().compareTo( o.getGeneName() ); } } @Override public boolean equals( Object obj ) { if ( this == obj ) return true; if ( obj == null ) return false; if ( getClass() != obj.getClass() ) return false; CoexpressionValueObject other = ( CoexpressionValueObject ) obj; if ( geneId == null ) { if ( other.geneId != null ) return false; } else if ( !geneId.equals( other.geneId ) ) return false; return true; } /** * @return IDs of genes that may be crosshybridizing with the target gene for this. */ public Collection<Long> getCrossHybridizingGenes() { return crossHybridizingGenes; } /** * Collection of EE IDs in which the link was tested. * * @return */ public Collection<Long> getDatasetsTestedIn() { return this.datasetsTestedIn; } /** * @return a collection of EE ids that contributed to this genes negative expression */ public Collection<Long> getEEContributing2NegativeLinks() { return negativeScores.keySet(); } /** * @return a collection of EEids that contributed to this genes positive expression */ public Collection<Long> getEEContributing2PositiveLinks() { return positiveScores.keySet(); } /** * @return experiments that are supporting coexpression. */ public Collection<Long> getExpressionExperiments() { /* * We don't use the expresionexperimentvalueobject keyset because there may be 'cruft' after pruning the * results. */ Collection<Long> eeIDs = new HashSet<Long>(); eeIDs.addAll( this.getNegativeScores().keySet() ); eeIDs.addAll( this.getPositiveScores().keySet() ); return eeIDs; } /** * @return the geneId of the coexpressed gene */ public Long getGeneId() { return geneId; } /** * @return the geneName of the coexpressed gene */ public String getGeneName() { return geneName; } /** * @return the geneOfficialName of the coexpressed gene */ public String getGeneOfficialName() { return geneOfficialName; } /** * @return the geneType (known gene, predicted, or probe-aligned region) of the coexpressed gene */ public String getGeneType() { return geneType; } /** * @return Gene Ontology similarity of the coexpressed gene with the query gene. */ public Collection<OntologyTerm> getGoOverlap() { return goOverlap; } /** * Function to return the max of the negative and positive link support. This is used for sorting. * * @return */ public int getMaxLinkCount() { int positiveLinks = this.getPositiveLinkSupport(); int negativeLinks = this.getNegativeLinkSupport(); return Math.max( positiveLinks, negativeLinks ); } // /** // * @return // */ // public String getImageMapName() { // StringBuffer buf = new StringBuffer(); // buf.append( "map." ); // buf.append( geneType ); // buf.append( ".gene" ); // buf.append( geneId ); // buf.append( ".taxon" ); // buf.append( taxonId ); // return buf.toString(); // } /** * @param eeId * @return */ public Collection<Long> getNegativeCorrelationProbes( Long eeId ) { if ( !negativeScores.containsKey( eeId ) ) return new HashSet<Long>(); return negativeScores.get( eeId ).keySet(); } /** * @return the negative link counts */ public int getNegativeLinkSupport() { if ( negativeScores.size() == 0 ) return 0; return this.negativeScores.size(); } /** * @return */ public double getNegativeScore() { if ( getNegativeLinkSupport() == 0 ) return 0.0; double mean = 0; int size = 0; for ( Map<Long, Double> scores : negativeScores.values() ) { for ( Double score : scores.values() ) { mean += score; size++; } } assert size > 0; return mean / size; } /** * @return the negativePValues */ public Map<Long, Map<Long, Double>> getNegativeScores() { return negativeScores; } /** * @return geometric mean of the pvalues for the supporting links. */ public double getNegPValue() { if ( negPvalues.size() == 0 ) return 0.0; Collection<Map<Long, Double>> values = negPvalues.values(); return computePvalue( values ); } /** * @return the nonspecificEE */ public Collection<Long> getNonspecificEE() { return nonspecificEEs; } public int getNumDatasetsTestedIn() { if ( datasetsTestedIn == null ) return 0; return this.datasetsTestedIn.size(); } /** * @param eeId * @return */ public Collection<Long> getPositiveCorrelationProbes( Long eeId ) { if ( !positiveScores.containsKey( eeId ) ) return new HashSet<Long>(); return positiveScores.get( eeId ).keySet(); } /** * @return the positive link counts */ public int getPositiveLinkSupport() { if ( positiveScores == null ) return 0; return this.positiveScores.size(); } /** * @return */ public double getPositiveScore() { if ( positiveScores.size() == 0 ) return 0.0; double mean = 0.0; int size = 0; for ( Map<Long, Double> scores : positiveScores.values() ) { for ( Double score : scores.values() ) { mean += score; size++; } } assert size > 0; return mean / size; } /** * @return the positiveScores */ public Map<Long, Map<Long, Double>> getPositiveScores() { return positiveScores; } /** * @return geometric mean of the pvalues for the supporting links. */ public double getPosPValue() { if ( posPvalues.size() == 0 ) return 0.0; return computePvalue( this.posPvalues.values() ); } /** * @return */ public int getPossibleOverlap() { return numQueryGeneGOTerms; } /** * @param eeId * @return */ public Collection<Long> getProbes( Long eeId ) { Collection<Long> result = new HashSet<Long>(); result.addAll( getPositiveCorrelationProbes( eeId ) ); result.addAll( getNegativeCorrelationProbes( eeId ) ); return result; } /** * @return the query gene */ public Gene getQueryGene() { return queryGene; } /** * @return Map of eeId -> probe IDs for the _query_. */ public Map<Long, Collection<Long>> getQueryProbeInfo() { return queryProbeInfo; } /** * @return */ public Long getTaxonId() { return taxonId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ( ( geneId == null ) ? 0 : geneId.hashCode() ); return result; } /** * Delete the data for a specific EE-probe combination. This is done during filtering to remove, for example, probes * that hybridize with the query gene. * * @param probeId * @param eeId * @return true if there is still evidence of coexpression left in this object, false if not. */ public boolean removeProbeEvidence( Long probeId, Long eeId ) { Collection<ProbePair> pairs = this.getLinks().get( eeId ); for ( Iterator<ProbePair> it = pairs.iterator(); it.hasNext(); ) { ProbePair probePair = it.next(); if ( probePair.getQueryProbeId().equals( probeId ) || probePair.getTargetProbeId().equals( probeId ) ) { it.remove(); } } if ( this.positiveScores.containsKey( eeId ) ) { Map<Long, Double> map = this.positiveScores.get( eeId ); if ( map.containsKey( probeId ) ) { map.remove( probeId ); } /* * At this point, we may have removed all evidence for the EE supporting the coexpression. In that case, * remove the ee. */ if ( map.size() == 0 ) { this.positiveScores.remove( eeId ); } Map<Long, Double> map2 = this.posPvalues.get( eeId ); if ( map2.containsKey( probeId ) ) { map2.remove( probeId ); } if ( map2.size() == 0 ) { this.posPvalues.remove( eeId ); } } /* * Do the same thing for negative correlations. */ if ( this.negativeScores.containsKey( eeId ) ) { Map<Long, Double> map = this.negativeScores.get( eeId ); if ( map.containsKey( probeId ) ) { map.remove( probeId ); } if ( map.size() == 0 ) { this.negativeScores.remove( eeId ); } Map<Long, Double> map2 = this.negPvalues.get( eeId ); if ( map2.containsKey( probeId ) ) { map2.remove( probeId ); } if ( map2.size() == 0 ) { this.negPvalues.remove( eeId ); } } if ( this.positiveScores.size() == 0 && this.negativeScores.size() == 0 ) { return false; } return true; } public void setDatasetsTestedIn( Collection<Long> datasetsTestedIn ) { this.datasetsTestedIn = datasetsTestedIn; } /** * @param geneId the geneId to set */ public void setGeneId( Long geneId ) { this.geneId = geneId; } /** * @param geneName the geneName to set */ public void setGeneName( String geneName ) { this.geneName = geneName; } /** * @param geneOfficialName the geneOfficialName to set */ public void setGeneOfficialName( String geneOfficialName ) { this.geneOfficialName = geneOfficialName; } /** * @param geneType the geneType to set */ public void setGeneType( String geneType ) { this.geneType = geneType; } /** * @param goOverlap of this gene with the query gene */ public void setGoOverlap( Collection<OntologyTerm> goOverlap ) { this.goOverlap = goOverlap; } /** * A 'non-specific ee' is an expression experiment that lacks specific probes for BOTH the query and target genes. * * @param nonspecificEEs the nonspecificEE to set */ public void setNonspecificEEs( Collection<Long> nonspecificEEs ) { this.nonspecificEEs = nonspecificEEs; } /** * @param numQueryGeneGOTerms */ public void setNumQueryGeneGOTerms( int numQueryGeneGOTerms ) { this.numQueryGeneGOTerms = numQueryGeneGOTerms; } public void setQueryGene( Gene queryGene ) { this.queryGene = queryGene; } /** * @param taxonId */ public void setTaxonId( Long taxonId ) { this.taxonId = taxonId; } @Override public String toString() { // return StringUtils.isBlank( geneName ) ? "Gene " + geneId : geneName; StringBuilder buf = new StringBuilder(); buf.append( "Coexpression value object: query=" + queryGene + " target=" + geneId + " " + geneName + "\n" ); buf.append( "Tested in " + datasetsTestedIn.size() + ": " + StringUtils.join( datasetsTestedIn, ',' ) + "\n" ); if ( positiveScores.size() > 0 ) { buf.append( "Positive correlation support=" + positiveScores.size() + "\n" ); for ( Long eeid : positiveScores.keySet() ) { Collection<Long> qprobes = queryProbeInfo.get( eeid ); for ( Long probe : positiveScores.get( eeid ).keySet() ) { for ( Long qprobe : qprobes ) { buf.append( "EE=" + eeid + " tprobe=" + probe + " qprobe=" + qprobe + " specific=" + ( this.nonspecificEEs.contains( eeid ) ? "n" : "y" ) + "\n" ); } } } } if ( negativeScores.size() > 0 ) { buf.append( "Negative correlation support=" + negativeScores.size() + "\n" ); for ( Long eeid : negativeScores.keySet() ) { for ( Long probe : negativeScores.get( eeid ).keySet() ) { buf.append( "EE=" + eeid + " probe=" + probe + " specific=" + ( this.nonspecificEEs.contains( eeid ) ? "n" : "y" ) + "\n" ); } } } return buf.toString(); } /** * @return the links */ protected Map<Long, Collection<ProbePair>> getLinks() { return links; } /** * FIXME just returning zero for now. * <p> * Compute a combined pvalue for the scores. * * @param mean * @param size * @param values * @return */ private double computePvalue( Collection<Map<Long, Double>> values ) { return 0.0; // double mean = 0.0; // int size = 0; // for ( Map<Long, Double> scores : values ) { // for ( Double score : scores.values() ) { // if ( score.doubleValue() == 0 ) { // score = Constants.SMALL; // } // mean += Math.log( score ); // size++; // } // } // assert size > 0; // // return Math.exp( mean / size ); } }
document negative/positive score datastructures
gemma-mda/src/main/java/ubic/gemma/model/analysis/expression/coexpression/CoexpressionValueObject.java
document negative/positive score datastructures
Java
apache-2.0
db2ff40e4b9f1fc0b04da9e08061f7ffa4eb8f15
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.scheduling.support; import static org.junit.Assert.assertEquals; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.springframework.scheduling.TriggerContext; /** * @author Dave Syer * @author Mark Fisher */ public class CronTriggerTests { private Calendar calendar = new GregorianCalendar(); private Date date = new Date(); /** * @param calendar */ private void roundup(Calendar calendar) { calendar.add(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 0); } @Before public void setUp() { calendar.setTime(date); roundup(calendar); } @Test public void testMatchAll() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * *"); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testMatchLastSecond() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * *"); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 58); assertMatchesNextSecond(trigger, calendar); } @Test public void testMatchSpecificSecond() throws Exception { CronTrigger trigger = new CronTrigger("10 * * * * *"); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); } @Test public void testIncrementSecondByOne() throws Exception { CronTrigger trigger = new CronTrigger("11 * * * * *"); calendar.set(Calendar.SECOND, 10); Date date = calendar.getTime(); calendar.add(Calendar.SECOND, 1); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementSecondAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("10 * * * * *"); calendar.set(Calendar.SECOND, 11); Date date = calendar.getTime(); calendar.add(Calendar.SECOND, 59); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testSecondRange() throws Exception { CronTrigger trigger = new CronTrigger("10-15 * * * * *"); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); calendar.set(Calendar.SECOND, 14); assertMatchesNextSecond(trigger, calendar); } @Test public void testIncrementMinuteByOne() throws Exception { CronTrigger trigger = new CronTrigger("0 11 * * * *"); calendar.set(Calendar.MINUTE, 10); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementMinute() throws Exception { CronTrigger trigger = new CronTrigger("0 * * * * *"); calendar.set(Calendar.MINUTE, 10); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testIncrementMinuteAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("0 10 * * * *"); calendar.set(Calendar.MINUTE, 11); calendar.set(Calendar.SECOND, 0); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 59); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementHour() throws Exception { CronTrigger trigger = new CronTrigger("0 0 * * * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 11); calendar.set(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); Date date = calendar.getTime(); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.HOUR_OF_DAY, 12); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.HOUR_OF_DAY, 13); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testIncrementDayOfMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.DAY_OF_MONTH, 1); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); assertEquals(2, calendar.get(Calendar.DAY_OF_MONTH)); calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); assertEquals(3, calendar.get(Calendar.DAY_OF_MONTH)); } @Test public void testIncrementDayOfMonthByOne() throws Exception { CronTrigger trigger = new CronTrigger("* * * 10 * *"); calendar.set(Calendar.DAY_OF_MONTH, 9); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementDayOfMonthAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("* * * 10 * *"); calendar.set(Calendar.DAY_OF_MONTH, 11); Date date = calendar.getTime(); calendar.add(Calendar.MONTH, 1); calendar.set(Calendar.DAY_OF_MONTH, 10); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testDailyTriggerInShortMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.MONTH, 8); // September: 30 days calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.MONTH, 9); // October calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.DAY_OF_MONTH, 2); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testDailyTriggerInLongMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.MONTH, 9); // October: 31 days calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.MONTH, 10); // November calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testIncrementMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 1 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MONTH, 10); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.MONTH, 11); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testMonthlyTriggerInLongMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 31 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.DAY_OF_MONTH, 31); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testMonthlyTriggerInShortMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 1 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.MONTH, 10); calendar.set(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementDayOfWeekByOne() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * 2"); calendar.set(Calendar.DAY_OF_WEEK, 2); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_WEEK, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); assertEquals(Calendar.TUESDAY, calendar.get(Calendar.DAY_OF_WEEK)); } @Test public void testIncrementDayOfWeekAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * 2"); calendar.set(Calendar.DAY_OF_WEEK, 4); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 6); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); assertEquals(Calendar.TUESDAY, calendar.get(Calendar.DAY_OF_WEEK)); } @Test @Ignore public void testSpecificMinuteSecond() throws Exception { CronTrigger trigger = new CronTrigger("2 5 * * * *"); calendar.set(Calendar.MINUTE, 4); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 2); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.add(Calendar.HOUR, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testSpecificMinuteHour() throws Exception { CronTrigger trigger = new CronTrigger("* 5 10 * * *"); calendar.set(Calendar.MINUTE, 4); calendar.set(Calendar.HOUR_OF_DAY, 9); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.add(Calendar.HOUR_OF_DAY, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); // next trigger is in one second because second is wildcard calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testWeekDaySequence() throws Exception { CronTrigger trigger = new CronTrigger("0 0 7 ? * MON-FRI"); // This is a Saturday calendar.set(2009, 8, 26); date = calendar.getTime(); // 7 am is the trigger time calendar.set(Calendar.HOUR_OF_DAY, 7); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); // Add two days because we start on Saturday calendar.add(Calendar.DAY_OF_MONTH, 2); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); // Next day is a week day so add one calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context3 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context3)); } @Test public void testDayOfWeekIndifferent() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * 2 * *"); CronTrigger trigger2 = new CronTrigger("* * * 2 * ?"); assertEquals(trigger1, trigger2); } @Test public void testSecondIncrementer() throws Exception { CronTrigger trigger1 = new CronTrigger("57,59 * * * * *"); CronTrigger trigger2 = new CronTrigger("57/2 * * * * *"); assertEquals(trigger1, trigger2); } @Test public void testSecondIncrementerWithRange() throws Exception { CronTrigger trigger1 = new CronTrigger("1,3,5 * * * * *"); CronTrigger trigger2 = new CronTrigger("1-6/2 * * * * *"); assertEquals(trigger1, trigger2); } @Test public void testHourIncrementer() throws Exception { CronTrigger trigger1 = new CronTrigger("* * 4,8,12,16,20 * * *"); CronTrigger trigger2 = new CronTrigger("* * 4/4 * * *"); assertEquals(trigger1, trigger2); } @Test public void testDayNames() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0-6"); CronTrigger trigger2 = new CronTrigger("* * * * * TUE,WED,THU,FRI,SAT,SUN,MON"); assertEquals(trigger1, trigger2); } @Test public void testSundayIsZero() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0"); CronTrigger trigger2 = new CronTrigger("* * * * * SUN"); assertEquals(trigger1, trigger2); } @Test public void testSundaySynonym() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0"); CronTrigger trigger2 = new CronTrigger("* * * * * 7"); assertEquals(trigger1, trigger2); } @Test public void testMonthNames() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 0-11 *"); CronTrigger trigger2 = new CronTrigger("* * * * FEB,JAN,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC *"); assertEquals(trigger1, trigger2); } @Test public void testMonthNamesMixedCase() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 1 *"); CronTrigger trigger2 = new CronTrigger("* * * * Feb *"); assertEquals(trigger1, trigger2); } @Test public void testWhitespace() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 1 *"); CronTrigger trigger2 = new CronTrigger("* * * * 1 *"); assertEquals(trigger1, trigger2); } /** * @param trigger * @param calendar */ private void assertMatchesNextSecond(CronTrigger trigger, Calendar calendar) { Date date = calendar.getTime(); roundup(calendar); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } private static TriggerContext getTriggerContext(Date lastCompletionTime) { SimpleTriggerContext context = new SimpleTriggerContext(); context.update(null, null, lastCompletionTime); return context; } }
org.springframework.context/src/test/java/org/springframework/scheduling/support/CronTriggerTests.java
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.scheduling.support; import static org.junit.Assert.assertEquals; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import org.junit.Before; import org.junit.Test; import org.springframework.scheduling.TriggerContext; /** * @author Dave Syer * @author Mark Fisher */ public class CronTriggerTests { private Calendar calendar = new GregorianCalendar(); private Date date = new Date(); /** * @param calendar */ private void roundup(Calendar calendar) { calendar.add(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 0); } @Before public void setUp() { calendar.setTime(date); roundup(calendar); } @Test public void testMatchAll() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * *"); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testMatchLastSecond() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * *"); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 58); assertMatchesNextSecond(trigger, calendar); } @Test public void testMatchSpecificSecond() throws Exception { CronTrigger trigger = new CronTrigger("10 * * * * *"); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); } @Test public void testIncrementSecondByOne() throws Exception { CronTrigger trigger = new CronTrigger("11 * * * * *"); calendar.set(Calendar.SECOND, 10); Date date = calendar.getTime(); calendar.add(Calendar.SECOND, 1); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementSecondAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("10 * * * * *"); calendar.set(Calendar.SECOND, 11); Date date = calendar.getTime(); calendar.add(Calendar.SECOND, 59); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testSecondRange() throws Exception { CronTrigger trigger = new CronTrigger("10-15 * * * * *"); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); calendar.set(Calendar.SECOND, 14); assertMatchesNextSecond(trigger, calendar); } @Test public void testIncrementMinuteByOne() throws Exception { CronTrigger trigger = new CronTrigger("0 11 * * * *"); calendar.set(Calendar.MINUTE, 10); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementMinute() throws Exception { CronTrigger trigger = new CronTrigger("0 * * * * *"); calendar.set(Calendar.MINUTE, 10); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testIncrementMinuteAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("0 10 * * * *"); calendar.set(Calendar.MINUTE, 11); calendar.set(Calendar.SECOND, 0); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 59); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementHour() throws Exception { CronTrigger trigger = new CronTrigger("0 0 * * * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 11); calendar.set(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 0); Date date = calendar.getTime(); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.HOUR_OF_DAY, 12); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.HOUR_OF_DAY, 13); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testIncrementDayOfMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.DAY_OF_MONTH, 1); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); assertEquals(2, calendar.get(Calendar.DAY_OF_MONTH)); calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); assertEquals(3, calendar.get(Calendar.DAY_OF_MONTH)); } @Test public void testIncrementDayOfMonthByOne() throws Exception { CronTrigger trigger = new CronTrigger("* * * 10 * *"); calendar.set(Calendar.DAY_OF_MONTH, 9); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementDayOfMonthAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("* * * 10 * *"); calendar.set(Calendar.DAY_OF_MONTH, 11); Date date = calendar.getTime(); calendar.add(Calendar.MONTH, 1); calendar.set(Calendar.DAY_OF_MONTH, 10); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testDailyTriggerInShortMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.MONTH, 8); // September: 30 days calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.MONTH, 9); // October calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.DAY_OF_MONTH, 2); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testDailyTriggerInLongMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 * * *"); calendar.set(Calendar.MONTH, 9); // October: 31 days calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.MONTH, 10); // November calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testIncrementMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 1 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MONTH, 10); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.set(Calendar.MONTH, 11); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context2)); } @Test public void testMonthlyTriggerInLongMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 31 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.DAY_OF_MONTH, 31); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testMonthlyTriggerInShortMonth() throws Exception { CronTrigger trigger = new CronTrigger("0 0 0 1 * *"); calendar.set(Calendar.MONTH, 9); calendar.set(Calendar.DAY_OF_MONTH, 30); Date date = calendar.getTime(); calendar.set(Calendar.MONTH, 10); calendar.set(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } @Test public void testIncrementDayOfWeekByOne() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * 2"); calendar.set(Calendar.DAY_OF_WEEK, 2); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_WEEK, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); assertEquals(Calendar.TUESDAY, calendar.get(Calendar.DAY_OF_WEEK)); } @Test public void testIncrementDayOfWeekAndRollover() throws Exception { CronTrigger trigger = new CronTrigger("* * * * * 2"); calendar.set(Calendar.DAY_OF_WEEK, 4); Date date = calendar.getTime(); calendar.add(Calendar.DAY_OF_MONTH, 6); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); assertEquals(Calendar.TUESDAY, calendar.get(Calendar.DAY_OF_WEEK)); } @Test public void testSpecificMinuteSecond() throws Exception { CronTrigger trigger = new CronTrigger("2 5 * * * *"); calendar.set(Calendar.MINUTE, 4); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.set(Calendar.SECOND, 2); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); calendar.add(Calendar.HOUR, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testSpecificMinuteHour() throws Exception { CronTrigger trigger = new CronTrigger("* 5 10 * * *"); calendar.set(Calendar.MINUTE, 4); calendar.set(Calendar.HOUR_OF_DAY, 9); Date date = calendar.getTime(); calendar.add(Calendar.MINUTE, 1); calendar.add(Calendar.HOUR_OF_DAY, 1); calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); // next trigger is in one second because second is wildcard calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); } @Test public void testWeekDaySequence() throws Exception { CronTrigger trigger = new CronTrigger("0 0 7 ? * MON-FRI"); // This is a Saturday calendar.set(2009, 8, 26); date = calendar.getTime(); // 7 am is the trigger time calendar.set(Calendar.HOUR_OF_DAY, 7); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); // Add two days because we start on Saturday calendar.add(Calendar.DAY_OF_MONTH, 2); TriggerContext context1 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context1)); // Next day is a week day so add one calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context2)); calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context3 = getTriggerContext(date); assertEquals(calendar.getTime(), date = trigger.nextExecutionTime(context3)); } @Test public void testDayOfWeekIndifferent() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * 2 * *"); CronTrigger trigger2 = new CronTrigger("* * * 2 * ?"); assertEquals(trigger1, trigger2); } @Test public void testSecondIncrementer() throws Exception { CronTrigger trigger1 = new CronTrigger("57,59 * * * * *"); CronTrigger trigger2 = new CronTrigger("57/2 * * * * *"); assertEquals(trigger1, trigger2); } @Test public void testSecondIncrementerWithRange() throws Exception { CronTrigger trigger1 = new CronTrigger("1,3,5 * * * * *"); CronTrigger trigger2 = new CronTrigger("1-6/2 * * * * *"); assertEquals(trigger1, trigger2); } @Test public void testHourIncrementer() throws Exception { CronTrigger trigger1 = new CronTrigger("* * 4,8,12,16,20 * * *"); CronTrigger trigger2 = new CronTrigger("* * 4/4 * * *"); assertEquals(trigger1, trigger2); } @Test public void testDayNames() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0-6"); CronTrigger trigger2 = new CronTrigger("* * * * * TUE,WED,THU,FRI,SAT,SUN,MON"); assertEquals(trigger1, trigger2); } @Test public void testSundayIsZero() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0"); CronTrigger trigger2 = new CronTrigger("* * * * * SUN"); assertEquals(trigger1, trigger2); } @Test public void testSundaySynonym() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * * 0"); CronTrigger trigger2 = new CronTrigger("* * * * * 7"); assertEquals(trigger1, trigger2); } @Test public void testMonthNames() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 0-11 *"); CronTrigger trigger2 = new CronTrigger("* * * * FEB,JAN,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC *"); assertEquals(trigger1, trigger2); } @Test public void testMonthNamesMixedCase() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 1 *"); CronTrigger trigger2 = new CronTrigger("* * * * Feb *"); assertEquals(trigger1, trigger2); } @Test public void testWhitespace() throws Exception { CronTrigger trigger1 = new CronTrigger("* * * * 1 *"); CronTrigger trigger2 = new CronTrigger("* * * * 1 *"); assertEquals(trigger1, trigger2); } /** * @param trigger * @param calendar */ private void assertMatchesNextSecond(CronTrigger trigger, Calendar calendar) { Date date = calendar.getTime(); roundup(calendar); TriggerContext context = getTriggerContext(date); assertEquals(calendar.getTime(), trigger.nextExecutionTime(context)); } private static TriggerContext getTriggerContext(Date lastCompletionTime) { SimpleTriggerContext context = new SimpleTriggerContext(); context.update(null, null, lastCompletionTime); return context; } }
commented out test failing nightly snapshot
org.springframework.context/src/test/java/org/springframework/scheduling/support/CronTriggerTests.java
commented out test failing nightly snapshot
Java
apache-2.0
a1322d62d1dc723305ff8fc042103b9d14530f60
0
alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator
package com.alibaba.dcm.tool; import com.alibaba.dcm.agent.DcmAgent; import com.sun.tools.attach.*; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.commons.cli.*; import org.apache.commons.io.FileUtils; import javax.annotation.Nonnull; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.util.Iterator; import java.util.List; import java.util.Scanner; import static java.lang.System.exit; /** * @author Jerry Lee (oldratlee at gmail dot com) * @since 1.4.0 */ public class DcmTool { static final String DCM_TOOLS_TMP_FILE_KEY = "DCM_TOOLS_TMP_FILE"; static final String DCM_TOOLS_AGENT_JAR_KEY = "DCM_TOOLS_AGENT_JAR"; private static final String DCM_AGENT_SUCCESS_MARK_LINE = "!!DCM SUCCESS!!"; private final static List<String> actionList = DcmAgent.getActionList(); public static void main(@Nonnull String[] args) throws Exception { final String tmpFile = getConfig(DCM_TOOLS_TMP_FILE_KEY); final String agentJar = getConfig(DCM_TOOLS_AGENT_JAR_KEY); final CommandLine cmd = parseCommandLine(args); final String[] arguments = cmd.getArgs(); if (arguments.length < 1) { System.out.println("No Action! Available action: " + actionList); exit(2); } final String action = arguments[0].trim(); if (!actionList.contains(action)) { throw new IllegalStateException("Unknown action " + action + ". Available action: " + actionList); } final String pid; if (cmd.hasOption('p')) { pid = cmd.getOptionValue('p'); } else { pid = selectProcess(); } doDcmActionViaAgent(tmpFile, agentJar, arguments, action, pid); } @Nonnull private static CommandLine parseCommandLine(@Nonnull String[] args) throws ParseException { final Options options = new Options(); options.addOption("p", "pid", true, "java process id to attach"); options.addOption("h", "help", false, "show help"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption('h')) { HelpFormatter hf = new HelpFormatter(); hf.printHelp("Options", options); exit(0); } return cmd; } private static void doDcmActionViaAgent( @Nonnull String tmpFile, @Nonnull String agentJar, @Nonnull String[] arguments, @Nonnull String action, @Nonnull String pid) throws AttachNotSupportedException, IOException, AgentLoadException, AgentInitializationException { final StringBuilder agentArgument = new StringBuilder(); agentArgument.append(action); for (int i = 1; i < arguments.length; i++) { String s = arguments[i]; agentArgument.append(' ').append(s); } agentArgument.append(" file ").append(tmpFile); VirtualMachine vm = null; // target java process pid boolean actionSuccess; try { vm = VirtualMachine.attach(pid); vm.loadAgent(agentJar, agentArgument.toString()); // loadAgent method will wait to agentmain finished. actionSuccess = printDcmResult(tmpFile); } finally { if (null != vm) { vm.detach(); } } if (!actionSuccess) { exit(1); } } private static boolean printDcmResult(@Nonnull String tmpFile) throws IOException { boolean actionSuccess = false; final List<String> lines = FileUtils.readLines(new File(tmpFile), "UTF-8"); final int lastIdx = lines.size() - 1; final String lastLine = lines.get(lastIdx); if (DCM_AGENT_SUCCESS_MARK_LINE.equals(lastLine)) { lines.remove(lastIdx); actionSuccess = true; } for (String line : lines) { System.out.println(line); } return actionSuccess; } /////////////////////////////////////////////// // util methods /////////////////////////////////////////////// @Nonnull private static String getConfig(@Nonnull String name) { String var = System.getenv(name); if (var == null || var.trim().length() == 0) { var = System.getProperty(name); } if (var == null || var.trim().length() == 0) { throw new IllegalStateException("fail to var " + name + ", is absent or blank string!"); } return var; } @Nonnull @SuppressFBWarnings("DM_DEFAULT_ENCODING") private static String selectProcess() { System.out.println("Which java process to attache:"); final List<VirtualMachineDescriptor> list = VirtualMachine.list(); // remove current process for (Iterator<VirtualMachineDescriptor> iterator = list.iterator(); iterator.hasNext(); ) { VirtualMachineDescriptor vm = iterator.next(); if (vm.id().equals(pid())) iterator.remove(); } for (int i = 0; i < list.size(); i++) { final VirtualMachineDescriptor vm = list.get(i); System.out.printf("%d) %-5s %s%n", i + 1, vm.id(), vm.displayName()); } Scanner in = new Scanner(System.in); while (true) { System.out.print("?# "); final String select = in.nextLine(); try { final int idx = Integer.parseInt(select); if (idx > 0 && idx <= list.size()) { return list.get(idx - 1).id(); } System.out.println("Invalid selection!"); } catch (NumberFormatException e) { System.out.println("Invalid input, not number!"); } } } @Nonnull static String pid() { final String name = ManagementFactory.getRuntimeMXBean().getName(); final int idx = name.indexOf("@"); return name.substring(0, idx); } }
tool/src/main/java/com/alibaba/dcm/tool/DcmTool.java
package com.alibaba.dcm.tool; import com.alibaba.dcm.agent.DcmAgent; import com.sun.tools.attach.VirtualMachine; import com.sun.tools.attach.VirtualMachineDescriptor; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.commons.cli.*; import org.apache.commons.io.FileUtils; import java.io.File; import java.lang.management.ManagementFactory; import java.util.Iterator; import java.util.List; import java.util.Scanner; import static java.lang.System.exit; /** * @author Jerry Lee (oldratlee at gmail dot com) * @since 1.4.0 */ public class DcmTool { static final String DCM_AGENT_SUCCESS_MARK_LINE = "!!DCM SUCCESS!!"; static final String DCM_TOOLS_TMP_FILE_KEY = "DCM_TOOLS_TMP_FILE"; static final String DCM_TOOLS_AGENT_JAR_KEY = "DCM_TOOLS_AGENT_JAR"; final static List<String> actionList = DcmAgent.getActionList(); public static void main(String[] args) throws Exception { final String tmpFile = getConfig(DCM_TOOLS_TMP_FILE_KEY); final String agentJar = getConfig(DCM_TOOLS_AGENT_JAR_KEY); final Options options = new Options(); options.addOption("p", "pid", true, "java process id to attach"); options.addOption("h", "help", false, "show help"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption('h')) { HelpFormatter hf = new HelpFormatter(); hf.printHelp("Options", options); return; } final String[] arguments = cmd.getArgs(); if (arguments.length < 1) { System.out.println("No Action! Available action: " + actionList); exit(2); } final String action = arguments[0].trim(); if (!actionList.contains(action)) { throw new IllegalStateException("Unknown action " + action + ". Available action: " + actionList); } final String pid; if (cmd.hasOption('p')) { pid = cmd.getOptionValue('p'); } else { pid = selectProcess(); } StringBuilder agentArgument = new StringBuilder(); agentArgument.append(action); for (int i = 1; i < arguments.length; i++) { String s = arguments[i]; agentArgument.append(' ').append(s); } agentArgument.append(" file ").append(tmpFile); VirtualMachine vm = null; // target java process pid boolean actionSuccess = false; try { vm = VirtualMachine.attach(pid); vm.loadAgent(agentJar, agentArgument.toString()); // loadAgent method will wait to agentmain finished. final List<String> lines = FileUtils.readLines(new File(tmpFile), "UTF-8"); final int lastIdx = lines.size() - 1; final String lastLine = lines.get(lastIdx); if (DCM_AGENT_SUCCESS_MARK_LINE.equals(lastLine)) { lines.remove(lastIdx); actionSuccess = true; } for (String line : lines) { System.out.println(line); } } finally { if (null != vm) { vm.detach(); } } if (!actionSuccess) { exit(1); } } static String getConfig(String name) { String var = System.getenv(name); if (var == null || var.trim().length() == 0) { var = System.getProperty(name); } if (var == null || var.trim().length() == 0) { throw new IllegalStateException("fail to var " + name + ", is absent or blank string!"); } return var; } @SuppressFBWarnings("DM_DEFAULT_ENCODING") static String selectProcess() { System.out.println("Which java process to attache:"); final List<VirtualMachineDescriptor> list = VirtualMachine.list(); // remove current process for (Iterator<VirtualMachineDescriptor> iterator = list.iterator(); iterator.hasNext(); ) { VirtualMachineDescriptor vm = iterator.next(); if (vm.id().equals(pid())) iterator.remove(); } for (int i = 0; i < list.size(); i++) { final VirtualMachineDescriptor vm = list.get(i); System.out.printf("%d) %-5s %s%n", i + 1, vm.id(), vm.displayName()); } Scanner in = new Scanner(System.in); while (true) { System.out.print("?# "); final String select = in.nextLine(); try { final int idx = Integer.parseInt(select); if (idx > 0 && idx <= list.size()) { return list.get(idx - 1).id(); } System.out.println("Invalid selection!"); } catch (NumberFormatException e) { System.out.println("Invalid input, not number!"); } } } static String pid() { final String name = ManagementFactory.getRuntimeMXBean().getName(); final int idx = name.indexOf("@"); return name.substring(0, idx); } }
= refactor: split big method `DcmTool.main`
tool/src/main/java/com/alibaba/dcm/tool/DcmTool.java
= refactor: split big method `DcmTool.main`
Java
apache-2.0
4c8d4d108021f12f79b352092d07ba5c2f424ffb
0
colczr/sakai,udayg/sakai,buckett/sakai-gitflow,surya-janani/sakai,whumph/sakai,tl-its-umich-edu/sakai,willkara/sakai,surya-janani/sakai,willkara/sakai,frasese/sakai,ouit0408/sakai,clhedrick/sakai,Fudan-University/sakai,zqian/sakai,ktakacs/sakai,surya-janani/sakai,zqian/sakai,OpenCollabZA/sakai,joserabal/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,liubo404/sakai,lorenamgUMU/sakai,rodriguezdevera/sakai,Fudan-University/sakai,pushyamig/sakai,bkirschn/sakai,colczr/sakai,colczr/sakai,joserabal/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,conder/sakai,puramshetty/sakai,udayg/sakai,frasese/sakai,frasese/sakai,clhedrick/sakai,whumph/sakai,bkirschn/sakai,OpenCollabZA/sakai,willkara/sakai,colczr/sakai,bkirschn/sakai,ktakacs/sakai,bkirschn/sakai,conder/sakai,noondaysun/sakai,lorenamgUMU/sakai,ktakacs/sakai,liubo404/sakai,pushyamig/sakai,bzhouduke123/sakai,joserabal/sakai,hackbuteer59/sakai,pushyamig/sakai,puramshetty/sakai,noondaysun/sakai,colczr/sakai,OpenCollabZA/sakai,duke-compsci290-spring2016/sakai,bkirschn/sakai,tl-its-umich-edu/sakai,lorenamgUMU/sakai,kingmook/sakai,OpenCollabZA/sakai,liubo404/sakai,udayg/sakai,kingmook/sakai,rodriguezdevera/sakai,bzhouduke123/sakai,kwedoff1/sakai,bzhouduke123/sakai,bzhouduke123/sakai,ktakacs/sakai,Fudan-University/sakai,kingmook/sakai,introp-software/sakai,introp-software/sakai,clhedrick/sakai,zqian/sakai,pushyamig/sakai,conder/sakai,ouit0408/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,colczr/sakai,hackbuteer59/sakai,introp-software/sakai,surya-janani/sakai,conder/sakai,ouit0408/sakai,colczr/sakai,frasese/sakai,joserabal/sakai,surya-janani/sakai,buckett/sakai-gitflow,puramshetty/sakai,tl-its-umich-edu/sakai,conder/sakai,frasese/sakai,noondaysun/sakai,whumph/sakai,noondaysun/sakai,willkara/sakai,bkirschn/sakai,udayg/sakai,rodriguezdevera/sakai,noondaysun/sakai,pushyamig/sakai,tl-its-umich-edu/sakai,rodriguezdevera/sakai,hackbuteer59/sakai,zqian/sakai,Fudan-University/sakai,noondaysun/sakai,kingmook/sakai,zqian/sakai,whumph/sakai,duke-compsci290-spring2016/sakai,hackbuteer59/sakai,whumph/sakai,joserabal/sakai,kwedoff1/sakai,hackbuteer59/sakai,clhedrick/sakai,udayg/sakai,frasese/sakai,kingmook/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,introp-software/sakai,liubo404/sakai,duke-compsci290-spring2016/sakai,udayg/sakai,willkara/sakai,joserabal/sakai,Fudan-University/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,puramshetty/sakai,frasese/sakai,clhedrick/sakai,introp-software/sakai,wfuedu/sakai,buckett/sakai-gitflow,bzhouduke123/sakai,Fudan-University/sakai,whumph/sakai,Fudan-University/sakai,introp-software/sakai,whumph/sakai,kwedoff1/sakai,kwedoff1/sakai,whumph/sakai,hackbuteer59/sakai,ktakacs/sakai,kingmook/sakai,kingmook/sakai,surya-janani/sakai,hackbuteer59/sakai,lorenamgUMU/sakai,wfuedu/sakai,buckett/sakai-gitflow,wfuedu/sakai,surya-janani/sakai,surya-janani/sakai,hackbuteer59/sakai,ktakacs/sakai,introp-software/sakai,liubo404/sakai,lorenamgUMU/sakai,frasese/sakai,duke-compsci290-spring2016/sakai,liubo404/sakai,OpenCollabZA/sakai,kingmook/sakai,puramshetty/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,zqian/sakai,wfuedu/sakai,ouit0408/sakai,puramshetty/sakai,wfuedu/sakai,OpenCollabZA/sakai,OpenCollabZA/sakai,conder/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,tl-its-umich-edu/sakai,ouit0408/sakai,willkara/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,kwedoff1/sakai,buckett/sakai-gitflow,wfuedu/sakai,buckett/sakai-gitflow,liubo404/sakai,pushyamig/sakai,zqian/sakai,puramshetty/sakai,kwedoff1/sakai,rodriguezdevera/sakai,kwedoff1/sakai,liubo404/sakai,tl-its-umich-edu/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,clhedrick/sakai,puramshetty/sakai,udayg/sakai,kwedoff1/sakai,ouit0408/sakai,noondaysun/sakai,clhedrick/sakai,buckett/sakai-gitflow,bzhouduke123/sakai,clhedrick/sakai,introp-software/sakai,zqian/sakai,willkara/sakai,willkara/sakai,Fudan-University/sakai,conder/sakai,conder/sakai,bkirschn/sakai,rodriguezdevera/sakai,colczr/sakai,bkirschn/sakai,buckett/sakai-gitflow,wfuedu/sakai,ouit0408/sakai,ktakacs/sakai,joserabal/sakai
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/sam/trunk/component/src/java/org/sakaiproject/tool/assessment/facade/AssessmentGradingFacadeQueries.java $ * $Id: AssessmentGradingFacadeQueries.java 9348 2006-05-13 06:14:57Z [email protected] $ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006 The Sakai Foundation. * * Licensed under the Educational Community License, Version 1.0 (the"License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.facade; import java.io.InputStream; import java.io.File; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.hibernate.Criteria; import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.Disjunction; import org.hibernate.criterion.Expression; import org.hibernate.criterion.Order; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.services.PersistenceService; import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedAssessmentData; import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedItemData; import org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.MediaData; import org.sakaiproject.tool.assessment.data.ifc.assessment.EvaluationModelIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc; import org.sakaiproject.tool.assessment.data.ifc.grading.AssessmentGradingIfc; import org.sakaiproject.tool.assessment.data.ifc.grading.ItemGradingIfc; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; public class AssessmentGradingFacadeQueries extends HibernateDaoSupport implements AssessmentGradingFacadeQueriesAPI{ private static Log log = LogFactory.getLog(AssessmentGradingFacadeQueries.class); public AssessmentGradingFacadeQueries () { } public List getTotalScores(final String publishedId, String which) { try { // sectionSet of publishedAssessment is defined as lazy loading in // Hibernate OR map, so we need to initialize them. Unfortunately our // spring-1.0.2.jar does not support HibernateTemplate.intialize(Object) // so we need to do it ourselves PublishedAssessmentData assessment =PersistenceService.getInstance().getPublishedAssessmentFacadeQueries(). loadPublishedAssessment(new Long(publishedId)); HashSet sectionSet = PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries().getSectionSetForAssessment(assessment); assessment.setSectionSet(sectionSet); // proceed to get totalScores // Object[] objects = new Object[2]; // objects[0] = new Long(publishedId); // objects[1] = new Boolean(true); // Type[] types = new Type[2]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.BOOLEAN; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId ASC, a.finalScore DESC, a.submittedDate DESC"); q.setLong(0, Long.parseLong(publishedId)); q.setBoolean(1, true); return q.list(); }; }; List list = getHibernateTemplate().executeFind(hcb); // List list = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by agentId ASC, finalScore DESC, submittedDate DESC", // objects, types); // last submission if (which.equals(EvaluationModelIfc.LAST_SCORE.toString())) { final HibernateCallback hcb2 = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId ASC, a.submittedDate DESC"); q.setLong(0, Long.parseLong(publishedId)); q.setBoolean(1, true); return q.list(); }; }; list = getHibernateTemplate().executeFind(hcb2); // list = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by agentId ASC, submittedDate DESC", // objects, types); } if (which.equals(EvaluationModelIfc.ALL_SCORE.toString())) { return list; } else { // only take highest or latest Iterator items = list.iterator(); ArrayList newlist = new ArrayList(); String agentid = null; AssessmentGradingData data = (AssessmentGradingData) items.next(); // daisyf add the following line on 12/15/04 data.setPublishedAssessmentId(assessment.getPublishedAssessmentId()); agentid = data.getAgentId(); newlist.add(data); while (items.hasNext()) { while (items.hasNext()) { data = (AssessmentGradingData) items.next(); if (!data.getAgentId().equals(agentid)) { agentid = data.getAgentId(); newlist.add(data); break; } } } return newlist; } } catch (Exception e) { e.printStackTrace(); return new ArrayList(); } } public List getAllSubmissions(final String publishedId) { // Object[] objects = new Object[1]; // objects[0] = new Long(publishedId); // Type[] types = new Type[1]; // types[0] = Hibernate.LONG; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=1"); q.setLong(0, Long.parseLong(publishedId)); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); // List list = getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=1", objects, types); // return list; } public HashMap getItemScores(Long publishedId, final Long itemId, String which) { try { ArrayList scores = (ArrayList) getTotalScores(publishedId.toString(), which); HashMap map = new HashMap(); //List list = new ArrayList(); // make final for callback to access final Iterator iter = scores.iterator(); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Criteria criteria = session.createCriteria(ItemGradingData.class); Disjunction disjunction = Expression.disjunction(); /** make list from AssessmentGradingData ids */ List gradingIdList = new ArrayList(); while (iter.hasNext()){ AssessmentGradingData data = (AssessmentGradingData) iter.next(); gradingIdList.add(data.getAssessmentGradingId()); } /** create or disjunctive expression for (in clauses) */ List tempList = new ArrayList(); for (int i = 0; i < gradingIdList.size(); i += 50){ if (i + 50 > gradingIdList.size()){ tempList = gradingIdList.subList(i, gradingIdList.size()); disjunction.add(Expression.in("assessmentGradingId", tempList)); } else{ tempList = gradingIdList.subList(i, i + 50); disjunction.add(Expression.in("assessmentGradingId", tempList)); } } if (itemId.equals(new Long(0))) { criteria.add(disjunction); //criteria.add(Expression.isNotNull("submittedDate")); } else { /** create logical and between the pubCriterion and the disjunction criterion */ //Criterion pubCriterion = Expression.eq("publishedItem.itemId", itemId); Criterion pubCriterion = Expression.eq("publishedItemId", itemId); criteria.add(Expression.and(pubCriterion, disjunction)); //criteria.add(Expression.isNotNull("submittedDate")); } criteria.addOrder(Order.asc("agentId")); criteria.addOrder(Order.desc("submittedDate")); //return criteria.list(); //large list cause out of memory error (java heap space) return criteria.setMaxResults(10000).list(); } }; List temp = (List) getHibernateTemplate().execute(hcb); Iterator iter2 = temp.iterator(); while (iter2.hasNext()) { ItemGradingData data = (ItemGradingData) iter2.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } /** * This returns a hashmap of all the latest item entries, keyed by * item id for easy retrieval. * return (Long publishedItemId, ArrayList itemGradingData) */ public HashMap getLastItemGradingData(final Long publishedId, final String agentId) { try { // Object[] objects = new Object[2]; // objects[0] = publishedId; // objects[1] = agentId; // Type[] types = new Type[2]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.STRING; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate DESC"); q.setLong(0, publishedId.longValue()); q.setString(1, agentId); return q.list(); }; }; ArrayList scores = (ArrayList) getHibernateTemplate().executeFind(hcb); // ArrayList scores = (ArrayList) getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by submittedDate DESC", objects, types); HashMap map = new HashMap(); if (scores.isEmpty()) return new HashMap(); AssessmentGradingData gdata = (AssessmentGradingData) scores.toArray()[0]; // initialize itemGradingSet gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); if (gdata.getForGrade().booleanValue()) return new HashMap(); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } /** * This returns a hashmap of all the submitted items, keyed by * item id for easy retrieval. */ public HashMap getStudentGradingData(String assessmentGradingId) { try { HashMap map = new HashMap(); AssessmentGradingData gdata = load(new Long(assessmentGradingId)); gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); log.debug("****#6, gdata="+gdata); log.debug("****#7, item size="+gdata.getItemGradingSet().size()); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } public HashMap getSubmitData(final Long publishedId, final String agentId) { try { // Object[] objects = new Object[3]; // objects[0] = publishedId; // objects[1] = agentId; // objects[2] = new Boolean(true); // Type[] types = new Type[3]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.STRING; // types[2] = Hibernate.BOOLEAN; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate DESC"); q.setLong(0, publishedId.longValue()); q.setString(1, agentId); q.setBoolean(2, true); return q.list(); }; }; ArrayList scores = (ArrayList) getHibernateTemplate().executeFind(hcb); // ArrayList scores = (ArrayList) getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by submittedDate DESC", objects, types); HashMap map = new HashMap(); if (scores.isEmpty()) return new HashMap(); AssessmentGradingData gdata = (AssessmentGradingData) scores.toArray()[0]; gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } public Long add(AssessmentGradingData a) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(a); retryCount = 0; } catch (Exception e) { log.warn("problem adding assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } return a.getAssessmentGradingId(); } public int getSubmissionSizeOfPublishedAssessment(Long publishedAssessmentId){ List size = getHibernateTemplate().find( "select count(a) from AssessmentGradingData a where a.forGrade=1 and a.publishedAssessmentId=?"+ publishedAssessmentId); Iterator iter = size.iterator(); if (iter.hasNext()){ int i = ((Integer)iter.next()).intValue(); return i; } else{ return 0; } } public HashMap getSubmissionSizeOfAllPublishedAssessments(){ HashMap h = new HashMap(); List list = getHibernateTemplate().find( "select new PublishedAssessmentData(a.publishedAssessmentId, count(a)) from AssessmentGradingData a where a.forGrade=1 group by a.publishedAssessmentId"); Iterator iter = list.iterator(); while (iter.hasNext()){ PublishedAssessmentData o = (PublishedAssessmentData)iter.next(); h.put(o.getPublishedAssessmentId(), new Integer(o.getSubmissionSize())); } return h; } public Long saveMedia(byte[] media, String mimeType){ log.debug("****"+AgentFacade.getAgentString()+"saving media...size="+media.length+" "+(new Date())); MediaData mediaData = new MediaData(media, mimeType); int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(mediaData); retryCount = 0; } catch (Exception e) { log.warn("problem saving media with mimeType: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } log.debug("****"+AgentFacade.getAgentString()+"saved media."+(new Date())); return mediaData.getMediaId(); } public Long saveMedia(MediaData mediaData){ log.debug("****"+mediaData.getFilename()+" saving media...size="+mediaData.getFileSize()+" "+(new Date())); int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(mediaData); retryCount = 0; } catch (Exception e) { log.warn("problem saving media: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } log.debug("****"+mediaData.getFilename()+" saved media."+(new Date())); return mediaData.getMediaId(); } public void removeMediaById(Long mediaId){ String mediaLocation = null; Session session = null; try{ session = getSessionFactory().openSession(); Connection conn = session.connection(); log.debug("****Connection="+conn); String query0="select LOCATION from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement0 = conn.prepareStatement(query0); statement0.setLong(1, mediaId.longValue()); ResultSet rs =statement0.executeQuery(); if (rs.next()){ mediaLocation = rs.getString("LOCATION"); } log.debug("****mediaLocation="+mediaLocation); String query="delete from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement = conn.prepareStatement(query); statement.setLong(1, mediaId.longValue()); statement.executeUpdate(); } catch(Exception e){ log.warn(e.getMessage()); } finally{ try{ if (session !=null) session.close(); } catch(Exception ex){ log.warn(ex.getMessage()); } } try{ if (mediaLocation != null){ File mediaFile = new File(mediaLocation); mediaFile.delete(); } } catch (Exception e) { log.warn("problem removing file="+e.getMessage()); } } public MediaData getMedia(Long mediaId){ MediaData mediaData = (MediaData) getHibernateTemplate().load(MediaData.class, mediaId); if (mediaData != null){ String mediaLocation = mediaData.getLocation(); if (mediaLocation == null || (mediaLocation.trim()).equals("")){ mediaData.setMedia(getMediaStream(mediaId)); } } return mediaData; } public ArrayList getMediaArray(final Long itemGradingId){ log.debug("*** itemGradingId ="+itemGradingId); ArrayList a = new ArrayList(); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from MediaData m where m.itemGradingData.itemGradingId=?"); q.setLong(0, itemGradingId.longValue()); return q.list(); }; }; List list = getHibernateTemplate().executeFind(hcb); for (int i=0;i<list.size();i++){ a.add((MediaData)list.get(i)); } log.debug("*** no. of media ="+a.size()); return a; } public ArrayList getMediaArray(ItemGradingData item){ ArrayList a = new ArrayList(); List list = getHibernateTemplate().find( "from MediaData m where m.itemGradingData=?", item ); for (int i=0;i<list.size();i++){ a.add((MediaData)list.get(i)); } log.debug("*** no. of media ="+a.size()); return a; } public List getMediaArray(Long publishedId, final Long publishedItemId, String which) { try { HashMap itemScores = (HashMap) getItemScores(publishedId, publishedItemId, which); final List list = (List) itemScores.get(publishedItemId); log.debug("list size list.size() = " + list.size()); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Criteria criteria = session.createCriteria(MediaData.class); Disjunction disjunction = Expression.disjunction(); /** make list from AssessmentGradingData ids */ List itemGradingIdList = new ArrayList(); for (int i = 0; i < list.size() ; i++) { ItemGradingIfc itemGradingData = (ItemGradingIfc) list.get(i); itemGradingIdList.add(itemGradingData.getItemGradingId()); } /** create or disjunctive expression for (in clauses) */ List tempList = new ArrayList(); for (int i = 0; i < itemGradingIdList.size(); i += 50){ if (i + 50 > itemGradingIdList.size()){ tempList = itemGradingIdList.subList(i, itemGradingIdList.size()); disjunction.add(Expression.in("itemGradingData.itemGradingId", tempList)); } else{ tempList = itemGradingIdList.subList(i, i + 50); disjunction.add(Expression.in("itemGradingData.itemGradingId", tempList)); } } criteria.add(disjunction); return criteria.setMaxResults(10000).list(); } }; return (List) getHibernateTemplate().execute(hcb); } catch (Exception e) { e.printStackTrace(); return new ArrayList(); } } public ItemGradingData getLastItemGradingDataByAgent( final Long publishedItemId, final String agentId) { final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from ItemGradingData i where i.publishedItemId=? and i.agentId=?"); q.setLong(0, publishedItemId.longValue()); q.setString(1, agentId); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find( // "from ItemGradingData i where i.publishedItemId=? and i.agentId=?", // new Object[] { publishedItemId, agentId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (itemGradings.size() == 0) return null; return (ItemGradingData) itemGradings.get(0); } public ItemGradingData getItemGradingData( final Long assessmentGradingId, final Long publishedItemId) { log.debug("****assessmentGradingId="+assessmentGradingId); log.debug("****publishedItemId="+publishedItemId); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from ItemGradingData i where i.assessmentGradingId = ? and i.publishedItemId=?"); q.setLong(0, assessmentGradingId.longValue()); q.setLong(1, publishedItemId.longValue()); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find( // "from ItemGradingData i where i.assessmentGradingId = ? and i.publishedItemId=?", // new Object[] { assessmentGradingId, publishedItemId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.LONG }); if (itemGradings.size() == 0) return null; return (ItemGradingData) itemGradings.get(0); } public AssessmentGradingData load(Long id) { return (AssessmentGradingData) getHibernateTemplate().load(AssessmentGradingData.class, id); } public ItemGradingData getItemGrading(Long id) { return (ItemGradingData) getHibernateTemplate().load(ItemGradingData.class, id); } public AssessmentGradingData getLastSavedAssessmentGradingByAgentId(final Long publishedAssessmentId, final String agentIdString) { AssessmentGradingData ag = null; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate desc"); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentIdString); q.setBoolean(2, false); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate desc", // new Object[] { publishedAssessmentId, agentIdString, Boolean.FALSE }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING, Hibernate.BOOLEAN }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public AssessmentGradingData getLastAssessmentGradingByAgentId(final Long publishedAssessmentId, final String agentIdString) { AssessmentGradingData ag = null; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate desc"); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentIdString); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate desc", // new Object[] { publishedAssessmentId, agentIdString }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public void saveItemGrading(ItemGradingIfc item) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().saveOrUpdate((ItemGradingData)item); retryCount = 0; } catch (Exception e) { log.warn("problem saving itemGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public void saveOrUpdateAssessmentGrading(AssessmentGradingIfc assessment) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { /* for testing the catch block - daisyf if (retryCount >2) throw new Exception("uncategorized SQLException for SQL []; SQL state [61000]; error code [60]; ORA-00060: deadlock detected while waiting for resource"); */ getHibernateTemplate().saveOrUpdate((AssessmentGradingData)assessment); retryCount = 0; } catch (Exception e) { log.warn("problem inserting/updating assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } private byte[] getMediaStream(Long mediaId){ byte[] b = new byte[4000]; Session session = null; Connection conn = null; InputStream in = null; try{ session = getSessionFactory().openSession(); conn = session.connection(); log.debug("****Connection="+conn); String query="select MEDIA from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement = conn.prepareStatement(query); statement.setLong(1, mediaId.longValue()); ResultSet rs = statement.executeQuery(); if (rs.next()){ java.lang.Object o = rs.getObject("MEDIA"); if (o!=null){ in = rs.getBinaryStream("MEDIA"); in.mark(0); int ch; int len=0; while ((ch=in.read())!=-1){ len++; } b = new byte[len]; in.reset(); in.read(b,0,len); } } } catch(Exception e){ log.warn(e.getMessage()); } finally{ try{ if (session !=null) session.close(); if (in !=null) in.close(); if (conn !=null) conn.close(); } catch(Exception ex){ log.warn(ex.getMessage()); } } return b; } public List getAssessmentGradingIds(final Long publishedItemId){ final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select g.assessmentGradingId from "+ " ItemGradingData g where g.publishedItemId=?"); q.setLong(0, publishedItemId.longValue()); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); // return getHibernateTemplate().find( // "select g.assessmentGradingId from "+ // " ItemGradingData g where g.publishedItemId=?", // new Object[] { publishedItemId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); } public AssessmentGradingIfc getHighestAssessmentGrading( final Long publishedAssessmentId, final String agentId) { AssessmentGradingData ag = null; final String query ="from AssessmentGradingData a "+ " where a.publishedAssessmentId=? and "+ " a.agentId=? order by a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentId); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId, agentId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public List getLastAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } public List getLastSubmittedAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); q.setBoolean(1, true); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } public List getHighestAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? order by a.agentId asc, a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } // build a Hashmap (Long publishedItemId, ArrayList assessmentGradingIds) // containing the item submission of the last AssessmentGrading // (regardless of users who submitted it) of a given published assessment public HashMap getLastAssessmentGradingByPublishedItem(final Long publishedAssessmentId){ HashMap h = new HashMap(); final String query = "select new AssessmentGradingData("+ " a.assessmentGradingId, p.itemId, "+ " a.agentId, a.finalScore, a.submittedDate) "+ " from ItemGradingData i, AssessmentGradingData a,"+ " PublishedItemData p where "+ " i.assessmentGradingId = a.assessmentGradingId and i.publishedItemId = p.itemId and "+ " a.publishedAssessmentId=? " + " order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); // ArrayList l = new ArrayList(); String currentAgent=""; Date submittedDate = null; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); Long itemId = g.getPublishedItemId(); Long gradingId = g.getAssessmentGradingId(); log.debug("**** itemId="+itemId+", gradingId="+gradingId+", agentId="+g.getAgentId()+", score="+g.getFinalScore()); if ( i==0 ){ currentAgent = g.getAgentId(); submittedDate = g.getSubmittedDate(); } if (currentAgent.equals(g.getAgentId()) && ((submittedDate==null && g.getSubmittedDate()==null) || (submittedDate!=null && submittedDate.equals(g.getSubmittedDate())))){ Object o = h.get(itemId); if (o != null) ((ArrayList) o).add(gradingId); else{ ArrayList gradingIds = new ArrayList(); gradingIds.add(gradingId); h.put(itemId, gradingIds); } } if (!currentAgent.equals(g.getAgentId())){ currentAgent = g.getAgentId(); submittedDate = g.getSubmittedDate(); } } return h; } // build a Hashmap (Long publishedItemId, ArrayList assessmentGradingIds) // containing the item submission of the highest AssessmentGrading // (regardless of users who submitted it) of a given published assessment public HashMap getHighestAssessmentGradingByPublishedItem(final Long publishedAssessmentId){ HashMap h = new HashMap(); final String query = "select new AssessmentGradingData("+ " a.assessmentGradingId, p.itemId, "+ " a.agentId, a.finalScore, a.submittedDate) "+ " from ItemGradingData i, AssessmentGradingData a, "+ " PublishedItemData p where "+ " i.assessmentGradingId = a.assessmentGradingId and i.publishedItemId = p.itemId and "+ " a.publishedAssessmentId=? " + " order by a.agentId asc, a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); // ArrayList l = new ArrayList(); String currentAgent=""; Float finalScore = null; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); Long itemId = g.getPublishedItemId(); Long gradingId = g.getAssessmentGradingId(); log.debug("**** itemId="+itemId+", gradingId="+gradingId+", agentId="+g.getAgentId()+", score="+g.getFinalScore()); if ( i==0 ){ currentAgent = g.getAgentId(); finalScore = g.getFinalScore(); } if (currentAgent.equals(g.getAgentId()) && ((finalScore==null && g.getFinalScore()==null) || (finalScore!=null && finalScore.equals(g.getFinalScore())))){ Object o = h.get(itemId); if (o != null) ((ArrayList) o).add(gradingId); else{ ArrayList gradingIds = new ArrayList(); gradingIds.add(gradingId); h.put(itemId, gradingIds); } } if (!currentAgent.equals(g.getAgentId())){ currentAgent = g.getAgentId(); finalScore = g.getFinalScore(); } } return h; } public Set getItemGradingSet(final Long assessmentGradingId){ final String query = "from ItemGradingData i where i.assessmentGradingId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find(query, // new Object[] { assessmentGradingId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); HashSet s = new HashSet(); for (int i=0; i<itemGradings.size();i++){ s.add(itemGradings.get(i)); } return s; } public HashMap getAssessmentGradingByItemGradingId(final Long publishedAssessmentId){ List aList = getAllSubmissions(publishedAssessmentId.toString()); HashMap aHash = new HashMap(); for (int j=0; j<aList.size();j++){ AssessmentGradingData a = (AssessmentGradingData)aList.get(j); aHash.put(a.getAssessmentGradingId(), a); } final String query = "select new ItemGradingData(i.itemGradingId, a.assessmentGradingId) "+ " from ItemGradingData i, AssessmentGradingData a "+ " where i.assessmentGradingId=a.assessmentGradingId "+ " and a.publishedAssessmentId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List l = getHibernateTemplate().executeFind(hcb); // List l = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); //System.out.println("****** assessmentGradinghash="+l.size()); HashMap h = new HashMap(); for (int i=0; i<l.size();i++){ ItemGradingData o = (ItemGradingData)l.get(i); h.put(o.getItemGradingId(), (AssessmentGradingData)aHash.get(o.getAssessmentGradingId())); } return h; } public void deleteAll(Collection c){ int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().deleteAll(c); retryCount = 0; } catch (Exception e) { log.warn("problem inserting assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public void saveOrUpdateAll(Collection c) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().saveOrUpdateAll(c); retryCount = 0; } catch (Exception e) { log.warn("problem inserting assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public PublishedAssessmentIfc getPublishedAssessmentByAssessmentGradingId(final Long assessmentGradingId){ PublishedAssessmentIfc pub = null; final String query = "select p from PublishedAssessmentData p, AssessmentGradingData a "+ " where a.publishedAssessmentId=p.publishedAssessmentId and a.assessmentGradingId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; List pubList = getHibernateTemplate().executeFind(hcb); // List pubList = getHibernateTemplate().find(query, // new Object[] { assessmentGradingId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); if (pubList!=null && pubList.size()>0) pub = (PublishedAssessmentIfc) pubList.get(0); return pub; } public PublishedAssessmentIfc getPublishedAssessmentByPublishedItemId(final Long publishedItemId){ PublishedAssessmentIfc pub = null; final String query = "select p from PublishedAssessmentData p, PublishedItemData i "+ " where p.publishedAssessmentId=i.section.assessment.publishedAssessmentId and i.itemId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedItemId.longValue()); return q.list(); }; }; List pubList = getHibernateTemplate().executeFind(hcb); if (pubList!=null && pubList.size()>0) pub = (PublishedAssessmentIfc) pubList.get(0); return pub; } public ArrayList getLastItemGradingDataPosition(final Long assessmentGradingId, final String agentId) { ArrayList position = new ArrayList(); try { final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select s.sequence " + " from ItemGradingData i, PublishedItemData pi, PublishedSectionData s " + " where i.agentId = ? and i.assessmentGradingId = ? " + " and pi.itemId = i.publishedItemId " + " and pi.section.id = s.id " + " group by i.publishedItemId, s.sequence, pi.sequence " + " order by s.sequence desc , pi.sequence desc"); q.setString(0, agentId); q.setLong(1, assessmentGradingId.longValue()); return q.list(); }; }; ArrayList list = (ArrayList) getHibernateTemplate().executeFind(hcb); if ( list.size() == 0) { position.add(new Integer(0)); position.add(new Integer(0)); } else { Integer sequence = (Integer) list.get(0); Integer nextSequence; int count = 1; for (int i = 1; i < list.size(); i++) { log.debug("i = " + i); nextSequence = (Integer) list.get(i); if (sequence.equals(nextSequence)) { log.debug("equal"); count++; } else { break; } } log.debug("sequence = " + sequence); log.debug("count = " + count); position.add(sequence); position.add(new Integer(count)); } return position; } catch (Exception e) { e.printStackTrace(); position.add(new Integer(0)); position.add(new Integer(0)); return position; } } public List getItemGradingIds(final Long assessmentGradingId){ final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select i.publishedItemId from "+ " ItemGradingData i where i.assessmentGradingId=?"); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); } public HashSet getItemSet(final Long publishedAssessmentId, final Long sectionId) { HashSet itemSet = new HashSet(); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "select distinct p " + "from PublishedItemData p, AssessmentGradingData a, ItemGradingData i " + "where a.publishedAssessmentId=? and a.forGrade=? and p.section.id=? " + "and i.assessmentGradingId = a.assessmentGradingId " + "and p.itemId = i.publishedItemId "); q.setLong(0, publishedAssessmentId.longValue()); q.setBoolean(1, true); q.setLong(2, sectionId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); Iterator iter = assessmentGradings.iterator(); PublishedItemData publishedItemData; while(iter.hasNext()) { publishedItemData = (PublishedItemData) iter.next(); log.debug("itemId = " + publishedItemData.getItemId()); itemSet.add(publishedItemData); } return itemSet; } }
samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/AssessmentGradingFacadeQueries.java
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/sam/trunk/component/src/java/org/sakaiproject/tool/assessment/facade/AssessmentGradingFacadeQueries.java $ * $Id: AssessmentGradingFacadeQueries.java 9348 2006-05-13 06:14:57Z [email protected] $ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006 The Sakai Foundation. * * Licensed under the Educational Community License, Version 1.0 (the"License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.facade; import java.io.InputStream; import java.io.File; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.hibernate.Criteria; import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.Disjunction; import org.hibernate.criterion.Expression; import org.hibernate.criterion.Order; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.services.PersistenceService; import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedAssessmentData; import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedItemData; import org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.MediaData; import org.sakaiproject.tool.assessment.data.ifc.assessment.EvaluationModelIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc; import org.sakaiproject.tool.assessment.data.ifc.grading.AssessmentGradingIfc; import org.sakaiproject.tool.assessment.data.ifc.grading.ItemGradingIfc; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; public class AssessmentGradingFacadeQueries extends HibernateDaoSupport implements AssessmentGradingFacadeQueriesAPI{ private static Log log = LogFactory.getLog(AssessmentGradingFacadeQueries.class); public AssessmentGradingFacadeQueries () { } public List getTotalScores(final String publishedId, String which) { try { // sectionSet of publishedAssessment is defined as lazy loading in // Hibernate OR map, so we need to initialize them. Unfortunately our // spring-1.0.2.jar does not support HibernateTemplate.intialize(Object) // so we need to do it ourselves PublishedAssessmentData assessment =PersistenceService.getInstance().getPublishedAssessmentFacadeQueries(). loadPublishedAssessment(new Long(publishedId)); HashSet sectionSet = PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries().getSectionSetForAssessment(assessment); assessment.setSectionSet(sectionSet); // proceed to get totalScores // Object[] objects = new Object[2]; // objects[0] = new Long(publishedId); // objects[1] = new Boolean(true); // Type[] types = new Type[2]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.BOOLEAN; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId ASC, a.finalScore DESC, a.submittedDate DESC"); q.setLong(0, Long.parseLong(publishedId)); q.setBoolean(1, true); return q.list(); }; }; List list = getHibernateTemplate().executeFind(hcb); // List list = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by agentId ASC, finalScore DESC, submittedDate DESC", // objects, types); // last submission if (which.equals(EvaluationModelIfc.LAST_SCORE.toString())) { final HibernateCallback hcb2 = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId ASC, a.submittedDate DESC"); q.setLong(0, Long.parseLong(publishedId)); q.setBoolean(1, true); return q.list(); }; }; list = getHibernateTemplate().executeFind(hcb2); // list = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by agentId ASC, submittedDate DESC", // objects, types); } if (which.equals(EvaluationModelIfc.ALL_SCORE.toString())) { return list; } else { // only take highest or latest Iterator items = list.iterator(); ArrayList newlist = new ArrayList(); String agentid = null; AssessmentGradingData data = (AssessmentGradingData) items.next(); // daisyf add the following line on 12/15/04 data.setPublishedAssessmentId(assessment.getPublishedAssessmentId()); agentid = data.getAgentId(); newlist.add(data); while (items.hasNext()) { while (items.hasNext()) { data = (AssessmentGradingData) items.next(); if (!data.getAgentId().equals(agentid)) { agentid = data.getAgentId(); newlist.add(data); break; } } } return newlist; } } catch (Exception e) { e.printStackTrace(); return new ArrayList(); } } public List getAllSubmissions(final String publishedId) { // Object[] objects = new Object[1]; // objects[0] = new Long(publishedId); // Type[] types = new Type[1]; // types[0] = Hibernate.LONG; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=1"); q.setLong(0, Long.parseLong(publishedId)); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); // List list = getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=1", objects, types); // return list; } public HashMap getItemScores(Long publishedId, final Long itemId, String which) { try { ArrayList scores = (ArrayList) getTotalScores(publishedId.toString(), which); HashMap map = new HashMap(); //List list = new ArrayList(); // make final for callback to access final Iterator iter = scores.iterator(); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Criteria criteria = session.createCriteria(ItemGradingData.class); Disjunction disjunction = Expression.disjunction(); /** make list from AssessmentGradingData ids */ List gradingIdList = new ArrayList(); while (iter.hasNext()){ AssessmentGradingData data = (AssessmentGradingData) iter.next(); gradingIdList.add(data.getAssessmentGradingId()); } /** create or disjunctive expression for (in clauses) */ List tempList = new ArrayList(); for (int i = 0; i < gradingIdList.size(); i += 50){ if (i + 50 > gradingIdList.size()){ tempList = gradingIdList.subList(i, gradingIdList.size()); disjunction.add(Expression.in("assessmentGradingId", tempList)); } else{ tempList = gradingIdList.subList(i, i + 50); disjunction.add(Expression.in("assessmentGradingId", tempList)); } } if (itemId.equals(new Long(0))) { criteria.add(disjunction); //criteria.add(Expression.isNotNull("submittedDate")); } else { /** create logical and between the pubCriterion and the disjunction criterion */ //Criterion pubCriterion = Expression.eq("publishedItem.itemId", itemId); Criterion pubCriterion = Expression.eq("publishedItemId", itemId); criteria.add(Expression.and(pubCriterion, disjunction)); //criteria.add(Expression.isNotNull("submittedDate")); } criteria.addOrder(Order.asc("agentId")); criteria.addOrder(Order.desc("submittedDate")); //return criteria.list(); //large list cause out of memory error (java heap space) return criteria.setMaxResults(10000).list(); } }; List temp = (List) getHibernateTemplate().execute(hcb); Iterator iter2 = temp.iterator(); while (iter2.hasNext()) { ItemGradingData data = (ItemGradingData) iter2.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } /** * This returns a hashmap of all the latest item entries, keyed by * item id for easy retrieval. * return (Long publishedItemId, ArrayList itemGradingData) */ public HashMap getLastItemGradingData(final Long publishedId, final String agentId) { try { // Object[] objects = new Object[2]; // objects[0] = publishedId; // objects[1] = agentId; // Type[] types = new Type[2]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.STRING; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate DESC"); q.setLong(0, publishedId.longValue()); q.setString(1, agentId); return q.list(); }; }; ArrayList scores = (ArrayList) getHibernateTemplate().executeFind(hcb); // ArrayList scores = (ArrayList) getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by submittedDate DESC", objects, types); HashMap map = new HashMap(); if (scores.isEmpty()) return new HashMap(); AssessmentGradingData gdata = (AssessmentGradingData) scores.toArray()[0]; // initialize itemGradingSet gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); if (gdata.getForGrade().booleanValue()) return new HashMap(); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } /** * This returns a hashmap of all the submitted items, keyed by * item id for easy retrieval. */ public HashMap getStudentGradingData(String assessmentGradingId) { try { HashMap map = new HashMap(); AssessmentGradingData gdata = load(new Long(assessmentGradingId)); gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); log.debug("****#6, gdata="+gdata); log.debug("****#7, item size="+gdata.getItemGradingSet().size()); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } public HashMap getSubmitData(final Long publishedId, final String agentId) { try { // Object[] objects = new Object[3]; // objects[0] = publishedId; // objects[1] = agentId; // objects[2] = new Boolean(true); // Type[] types = new Type[3]; // types[0] = Hibernate.LONG; // types[1] = Hibernate.STRING; // types[2] = Hibernate.BOOLEAN; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate DESC"); q.setLong(0, publishedId.longValue()); q.setString(1, agentId); q.setBoolean(2, true); return q.list(); }; }; ArrayList scores = (ArrayList) getHibernateTemplate().executeFind(hcb); // ArrayList scores = (ArrayList) getHibernateTemplate().find("from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by submittedDate DESC", objects, types); HashMap map = new HashMap(); if (scores.isEmpty()) return new HashMap(); AssessmentGradingData gdata = (AssessmentGradingData) scores.toArray()[0]; gdata.setItemGradingSet(getItemGradingSet(gdata.getAssessmentGradingId())); Iterator iter = gdata.getItemGradingSet().iterator(); while (iter.hasNext()) { ItemGradingData data = (ItemGradingData) iter.next(); ArrayList thisone = (ArrayList) map.get(data.getPublishedItemId()); if (thisone == null) thisone = new ArrayList(); thisone.add(data); map.put(data.getPublishedItemId(), thisone); } return map; } catch (Exception e) { e.printStackTrace(); return new HashMap(); } } public Long add(AssessmentGradingData a) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(a); retryCount = 0; } catch (Exception e) { log.warn("problem adding assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } return a.getAssessmentGradingId(); } public int getSubmissionSizeOfPublishedAssessment(Long publishedAssessmentId){ List size = getHibernateTemplate().find( "select count(a) from AssessmentGradingData a where a.forGrade=1 and a.publishedAssessmentId=?"+ publishedAssessmentId); Iterator iter = size.iterator(); if (iter.hasNext()){ int i = ((Integer)iter.next()).intValue(); return i; } else{ return 0; } } public HashMap getSubmissionSizeOfAllPublishedAssessments(){ HashMap h = new HashMap(); List list = getHibernateTemplate().find( "select new PublishedAssessmentData(a.publishedAssessmentId, count(a)) from AssessmentGradingData a where a.forGrade=1 group by a.publishedAssessmentId"); Iterator iter = list.iterator(); while (iter.hasNext()){ PublishedAssessmentData o = (PublishedAssessmentData)iter.next(); h.put(o.getPublishedAssessmentId(), new Integer(o.getSubmissionSize())); } return h; } public Long saveMedia(byte[] media, String mimeType){ log.debug("****"+AgentFacade.getAgentString()+"saving media...size="+media.length+" "+(new Date())); MediaData mediaData = new MediaData(media, mimeType); int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(mediaData); retryCount = 0; } catch (Exception e) { log.warn("problem saving media with mimeType: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } log.debug("****"+AgentFacade.getAgentString()+"saved media."+(new Date())); return mediaData.getMediaId(); } public Long saveMedia(MediaData mediaData){ log.debug("****"+mediaData.getFilename()+" saving media...size="+mediaData.getFileSize()+" "+(new Date())); int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().save(mediaData); retryCount = 0; } catch (Exception e) { log.warn("problem saving media: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } log.debug("****"+mediaData.getFilename()+" saved media."+(new Date())); return mediaData.getMediaId(); } public void removeMediaById(Long mediaId){ String mediaLocation = null; Session session = null; try{ session = getSessionFactory().openSession(); Connection conn = session.connection(); log.debug("****Connection="+conn); String query0="select LOCATION from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement0 = conn.prepareStatement(query0); statement0.setLong(1, mediaId.longValue()); ResultSet rs =statement0.executeQuery(); if (rs.next()){ mediaLocation = rs.getString("LOCATION"); } log.debug("****mediaLocation="+mediaLocation); String query="delete from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement = conn.prepareStatement(query); statement.setLong(1, mediaId.longValue()); statement.executeUpdate(); } catch(Exception e){ log.warn(e.getMessage()); } finally{ try{ if (session !=null) session.close(); } catch(Exception ex){ log.warn(ex.getMessage()); } } try{ if (mediaLocation != null){ File mediaFile = new File(mediaLocation); mediaFile.delete(); } } catch (Exception e) { log.warn("problem removing file="+e.getMessage()); } } public MediaData getMedia(Long mediaId){ MediaData mediaData = (MediaData) getHibernateTemplate().load(MediaData.class, mediaId); if (mediaData != null){ String mediaLocation = mediaData.getLocation(); if (mediaLocation == null || (mediaLocation.trim()).equals("")){ mediaData.setMedia(getMediaStream(mediaId)); } } return mediaData; } public ArrayList getMediaArray(final Long itemGradingId){ log.debug("*** itemGradingId ="+itemGradingId); ArrayList a = new ArrayList(); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from MediaData m where m.itemGradingData.itemGradingId=?"); q.setLong(0, itemGradingId.longValue()); return q.list(); }; }; List list = getHibernateTemplate().executeFind(hcb); for (int i=0;i<list.size();i++){ a.add((MediaData)list.get(i)); } log.debug("*** no. of media ="+a.size()); return a; } public ArrayList getMediaArray(ItemGradingData item){ ArrayList a = new ArrayList(); List list = getHibernateTemplate().find( "from MediaData m where m.itemGradingData=?", item ); for (int i=0;i<list.size();i++){ a.add((MediaData)list.get(i)); } log.debug("*** no. of media ="+a.size()); return a; } public List getMediaArray(Long publishedId, final Long publishedItemId, String which) { try { HashMap itemScores = (HashMap) getItemScores(publishedId, publishedItemId, which); final List list = (List) itemScores.get(publishedItemId); log.debug("list size list.size() = " + list.size()); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Criteria criteria = session.createCriteria(MediaData.class); Disjunction disjunction = Expression.disjunction(); /** make list from AssessmentGradingData ids */ List itemGradingIdList = new ArrayList(); for (int i = 0; i < list.size() ; i++) { ItemGradingIfc itemGradingData = (ItemGradingIfc) list.get(i); itemGradingIdList.add(itemGradingData.getItemGradingId()); } /** create or disjunctive expression for (in clauses) */ List tempList = new ArrayList(); for (int i = 0; i < itemGradingIdList.size(); i += 50){ if (i + 50 > itemGradingIdList.size()){ tempList = itemGradingIdList.subList(i, itemGradingIdList.size()); disjunction.add(Expression.in("itemGradingData.itemGradingId", tempList)); } else{ tempList = itemGradingIdList.subList(i, i + 50); disjunction.add(Expression.in("itemGradingData.itemGradingId", tempList)); } } criteria.add(disjunction); return criteria.setMaxResults(10000).list(); } }; return (List) getHibernateTemplate().execute(hcb); } catch (Exception e) { e.printStackTrace(); return new ArrayList(); } } public ItemGradingData getLastItemGradingDataByAgent( final Long publishedItemId, final String agentId) { final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("from ItemGradingData i where i.publishedItemId=? and i.agentId=?"); q.setLong(0, publishedItemId.longValue()); q.setString(1, agentId); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find( // "from ItemGradingData i where i.publishedItemId=? and i.agentId=?", // new Object[] { publishedItemId, agentId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (itemGradings.size() == 0) return null; return (ItemGradingData) itemGradings.get(0); } public ItemGradingData getItemGradingData( final Long assessmentGradingId, final Long publishedItemId) { log.debug("****assessmentGradingId="+assessmentGradingId); log.debug("****publishedItemId="+publishedItemId); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from ItemGradingData i where i.assessmentGradingId = ? and i.publishedItemId=?"); q.setLong(0, assessmentGradingId.longValue()); q.setLong(1, publishedItemId.longValue()); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find( // "from ItemGradingData i where i.assessmentGradingId = ? and i.publishedItemId=?", // new Object[] { assessmentGradingId, publishedItemId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.LONG }); if (itemGradings.size() == 0) return null; return (ItemGradingData) itemGradings.get(0); } public AssessmentGradingData load(Long id) { return (AssessmentGradingData) getHibernateTemplate().load(AssessmentGradingData.class, id); } public ItemGradingData getItemGrading(Long id) { return (ItemGradingData) getHibernateTemplate().load(ItemGradingData.class, id); } public AssessmentGradingData getLastSavedAssessmentGradingByAgentId(final Long publishedAssessmentId, final String agentIdString) { AssessmentGradingData ag = null; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate desc"); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentIdString); q.setBoolean(2, false); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? and a.forGrade=? order by a.submittedDate desc", // new Object[] { publishedAssessmentId, agentIdString, Boolean.FALSE }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING, Hibernate.BOOLEAN }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public AssessmentGradingData getLastAssessmentGradingByAgentId(final Long publishedAssessmentId, final String agentIdString) { AssessmentGradingData ag = null; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate desc"); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentIdString); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find( // "from AssessmentGradingData a where a.publishedAssessmentId=? and a.agentId=? order by a.submittedDate desc", // new Object[] { publishedAssessmentId, agentIdString }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public void saveItemGrading(ItemGradingIfc item) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().saveOrUpdate((ItemGradingData)item); retryCount = 0; } catch (Exception e) { log.warn("problem saving itemGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public void saveOrUpdateAssessmentGrading(AssessmentGradingIfc assessment) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { /* for testing the catch block - daisyf if (retryCount >2) throw new Exception("uncategorized SQLException for SQL []; SQL state [61000]; error code [60]; ORA-00060: deadlock detected while waiting for resource"); */ getHibernateTemplate().saveOrUpdate((AssessmentGradingData)assessment); retryCount = 0; } catch (Exception e) { log.warn("problem inserting/updating assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } private byte[] getMediaStream(Long mediaId){ byte[] b = new byte[4000]; Session session = null; InputStream in = null; try{ session = getSessionFactory().openSession(); Connection conn = session.connection(); log.debug("****Connection="+conn); String query="select MEDIA from SAM_MEDIA_T where MEDIAID=?"; PreparedStatement statement = conn.prepareStatement(query); statement.setLong(1, mediaId.longValue()); ResultSet rs = statement.executeQuery(); if (rs.next()){ java.lang.Object o = rs.getObject("MEDIA"); if (o!=null){ in = rs.getBinaryStream("MEDIA"); in.mark(0); int ch; int len=0; while ((ch=in.read())!=-1){ len++; } b = new byte[len]; in.reset(); in.read(b,0,len); } } } catch(Exception e){ log.warn(e.getMessage()); } finally{ try{ if (session !=null) session.close(); if (in !=null) in.close(); } catch(Exception ex){ log.warn(ex.getMessage()); } } return b; } public List getAssessmentGradingIds(final Long publishedItemId){ final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select g.assessmentGradingId from "+ " ItemGradingData g where g.publishedItemId=?"); q.setLong(0, publishedItemId.longValue()); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); // return getHibernateTemplate().find( // "select g.assessmentGradingId from "+ // " ItemGradingData g where g.publishedItemId=?", // new Object[] { publishedItemId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); } public AssessmentGradingIfc getHighestAssessmentGrading( final Long publishedAssessmentId, final String agentId) { AssessmentGradingData ag = null; final String query ="from AssessmentGradingData a "+ " where a.publishedAssessmentId=? and "+ " a.agentId=? order by a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); q.setString(1, agentId); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId, agentId }, // new org.hibernate.type.Type[] { Hibernate.LONG, Hibernate.STRING }); if (assessmentGradings.size() != 0){ ag = (AssessmentGradingData) assessmentGradings.get(0); ag.setItemGradingSet(getItemGradingSet(ag.getAssessmentGradingId())); } return ag; } public List getLastAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } public List getLastSubmittedAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? and a.forGrade=? order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); q.setBoolean(1, true); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } public List getHighestAssessmentGradingList(final Long publishedAssessmentId){ final String query = "from AssessmentGradingData a where a.publishedAssessmentId=? order by a.agentId asc, a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); ArrayList l = new ArrayList(); String currentAgent=""; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); if (!currentAgent.equals(g.getAgentId())){ l.add(g); currentAgent = g.getAgentId(); } } return l; } // build a Hashmap (Long publishedItemId, ArrayList assessmentGradingIds) // containing the item submission of the last AssessmentGrading // (regardless of users who submitted it) of a given published assessment public HashMap getLastAssessmentGradingByPublishedItem(final Long publishedAssessmentId){ HashMap h = new HashMap(); final String query = "select new AssessmentGradingData("+ " a.assessmentGradingId, p.itemId, "+ " a.agentId, a.finalScore, a.submittedDate) "+ " from ItemGradingData i, AssessmentGradingData a,"+ " PublishedItemData p where "+ " i.assessmentGradingId = a.assessmentGradingId and i.publishedItemId = p.itemId and "+ " a.publishedAssessmentId=? " + " order by a.agentId asc, a.submittedDate desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); // ArrayList l = new ArrayList(); String currentAgent=""; Date submittedDate = null; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); Long itemId = g.getPublishedItemId(); Long gradingId = g.getAssessmentGradingId(); log.debug("**** itemId="+itemId+", gradingId="+gradingId+", agentId="+g.getAgentId()+", score="+g.getFinalScore()); if ( i==0 ){ currentAgent = g.getAgentId(); submittedDate = g.getSubmittedDate(); } if (currentAgent.equals(g.getAgentId()) && ((submittedDate==null && g.getSubmittedDate()==null) || (submittedDate!=null && submittedDate.equals(g.getSubmittedDate())))){ Object o = h.get(itemId); if (o != null) ((ArrayList) o).add(gradingId); else{ ArrayList gradingIds = new ArrayList(); gradingIds.add(gradingId); h.put(itemId, gradingIds); } } if (!currentAgent.equals(g.getAgentId())){ currentAgent = g.getAgentId(); submittedDate = g.getSubmittedDate(); } } return h; } // build a Hashmap (Long publishedItemId, ArrayList assessmentGradingIds) // containing the item submission of the highest AssessmentGrading // (regardless of users who submitted it) of a given published assessment public HashMap getHighestAssessmentGradingByPublishedItem(final Long publishedAssessmentId){ HashMap h = new HashMap(); final String query = "select new AssessmentGradingData("+ " a.assessmentGradingId, p.itemId, "+ " a.agentId, a.finalScore, a.submittedDate) "+ " from ItemGradingData i, AssessmentGradingData a, "+ " PublishedItemData p where "+ " i.assessmentGradingId = a.assessmentGradingId and i.publishedItemId = p.itemId and "+ " a.publishedAssessmentId=? " + " order by a.agentId asc, a.finalScore desc"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); // List assessmentGradings = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); // ArrayList l = new ArrayList(); String currentAgent=""; Float finalScore = null; for (int i=0; i<assessmentGradings.size(); i++){ AssessmentGradingData g = (AssessmentGradingData)assessmentGradings.get(i); Long itemId = g.getPublishedItemId(); Long gradingId = g.getAssessmentGradingId(); log.debug("**** itemId="+itemId+", gradingId="+gradingId+", agentId="+g.getAgentId()+", score="+g.getFinalScore()); if ( i==0 ){ currentAgent = g.getAgentId(); finalScore = g.getFinalScore(); } if (currentAgent.equals(g.getAgentId()) && ((finalScore==null && g.getFinalScore()==null) || (finalScore!=null && finalScore.equals(g.getFinalScore())))){ Object o = h.get(itemId); if (o != null) ((ArrayList) o).add(gradingId); else{ ArrayList gradingIds = new ArrayList(); gradingIds.add(gradingId); h.put(itemId, gradingIds); } } if (!currentAgent.equals(g.getAgentId())){ currentAgent = g.getAgentId(); finalScore = g.getFinalScore(); } } return h; } public Set getItemGradingSet(final Long assessmentGradingId){ final String query = "from ItemGradingData i where i.assessmentGradingId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; List itemGradings = getHibernateTemplate().executeFind(hcb); // List itemGradings = getHibernateTemplate().find(query, // new Object[] { assessmentGradingId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); HashSet s = new HashSet(); for (int i=0; i<itemGradings.size();i++){ s.add(itemGradings.get(i)); } return s; } public HashMap getAssessmentGradingByItemGradingId(final Long publishedAssessmentId){ List aList = getAllSubmissions(publishedAssessmentId.toString()); HashMap aHash = new HashMap(); for (int j=0; j<aList.size();j++){ AssessmentGradingData a = (AssessmentGradingData)aList.get(j); aHash.put(a.getAssessmentGradingId(), a); } final String query = "select new ItemGradingData(i.itemGradingId, a.assessmentGradingId) "+ " from ItemGradingData i, AssessmentGradingData a "+ " where i.assessmentGradingId=a.assessmentGradingId "+ " and a.publishedAssessmentId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedAssessmentId.longValue()); return q.list(); }; }; List l = getHibernateTemplate().executeFind(hcb); // List l = getHibernateTemplate().find(query, // new Object[] { publishedAssessmentId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); //System.out.println("****** assessmentGradinghash="+l.size()); HashMap h = new HashMap(); for (int i=0; i<l.size();i++){ ItemGradingData o = (ItemGradingData)l.get(i); h.put(o.getItemGradingId(), (AssessmentGradingData)aHash.get(o.getAssessmentGradingId())); } return h; } public void deleteAll(Collection c){ int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().deleteAll(c); retryCount = 0; } catch (Exception e) { log.warn("problem inserting assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public void saveOrUpdateAll(Collection c) { int retryCount = PersistenceService.getInstance().getRetryCount().intValue(); while (retryCount > 0){ try { getHibernateTemplate().saveOrUpdateAll(c); retryCount = 0; } catch (Exception e) { log.warn("problem inserting assessmentGrading: "+e.getMessage()); retryCount = PersistenceService.getInstance().retryDeadlock(e, retryCount); } } } public PublishedAssessmentIfc getPublishedAssessmentByAssessmentGradingId(final Long assessmentGradingId){ PublishedAssessmentIfc pub = null; final String query = "select p from PublishedAssessmentData p, AssessmentGradingData a "+ " where a.publishedAssessmentId=p.publishedAssessmentId and a.assessmentGradingId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; List pubList = getHibernateTemplate().executeFind(hcb); // List pubList = getHibernateTemplate().find(query, // new Object[] { assessmentGradingId }, // new org.hibernate.type.Type[] { Hibernate.LONG }); if (pubList!=null && pubList.size()>0) pub = (PublishedAssessmentIfc) pubList.get(0); return pub; } public PublishedAssessmentIfc getPublishedAssessmentByPublishedItemId(final Long publishedItemId){ PublishedAssessmentIfc pub = null; final String query = "select p from PublishedAssessmentData p, PublishedItemData i "+ " where p.publishedAssessmentId=i.section.assessment.publishedAssessmentId and i.itemId=?"; final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery(query); q.setLong(0, publishedItemId.longValue()); return q.list(); }; }; List pubList = getHibernateTemplate().executeFind(hcb); if (pubList!=null && pubList.size()>0) pub = (PublishedAssessmentIfc) pubList.get(0); return pub; } public ArrayList getLastItemGradingDataPosition(final Long assessmentGradingId, final String agentId) { ArrayList position = new ArrayList(); try { final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select s.sequence " + " from ItemGradingData i, PublishedItemData pi, PublishedSectionData s " + " where i.agentId = ? and i.assessmentGradingId = ? " + " and pi.itemId = i.publishedItemId " + " and pi.section.id = s.id " + " group by i.publishedItemId, s.sequence, pi.sequence " + " order by s.sequence desc , pi.sequence desc"); q.setString(0, agentId); q.setLong(1, assessmentGradingId.longValue()); return q.list(); }; }; ArrayList list = (ArrayList) getHibernateTemplate().executeFind(hcb); if ( list.size() == 0) { position.add(new Integer(0)); position.add(new Integer(0)); } else { Integer sequence = (Integer) list.get(0); Integer nextSequence; int count = 1; for (int i = 1; i < list.size(); i++) { log.debug("i = " + i); nextSequence = (Integer) list.get(i); if (sequence.equals(nextSequence)) { log.debug("equal"); count++; } else { break; } } log.debug("sequence = " + sequence); log.debug("count = " + count); position.add(sequence); position.add(new Integer(count)); } return position; } catch (Exception e) { e.printStackTrace(); position.add(new Integer(0)); position.add(new Integer(0)); return position; } } public List getItemGradingIds(final Long assessmentGradingId){ final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery("select i.publishedItemId from "+ " ItemGradingData i where i.assessmentGradingId=?"); q.setLong(0, assessmentGradingId.longValue()); return q.list(); }; }; return getHibernateTemplate().executeFind(hcb); } public HashSet getItemSet(final Long publishedAssessmentId, final Long sectionId) { HashSet itemSet = new HashSet(); final HibernateCallback hcb = new HibernateCallback(){ public Object doInHibernate(Session session) throws HibernateException, SQLException { Query q = session.createQuery( "select distinct p " + "from PublishedItemData p, AssessmentGradingData a, ItemGradingData i " + "where a.publishedAssessmentId=? and a.forGrade=? and p.section.id=? " + "and i.assessmentGradingId = a.assessmentGradingId " + "and p.itemId = i.publishedItemId "); q.setLong(0, publishedAssessmentId.longValue()); q.setBoolean(1, true); q.setLong(2, sectionId.longValue()); return q.list(); }; }; List assessmentGradings = getHibernateTemplate().executeFind(hcb); Iterator iter = assessmentGradings.iterator(); PublishedItemData publishedItemData; while(iter.hasNext()) { publishedItemData = (PublishedItemData) iter.next(); log.debug("itemId = " + publishedItemData.getItemId()); itemSet.add(publishedItemData); } return itemSet; } }
SAK-6379 git-svn-id: 574bb14f304dbe16c01253ed6697ea749724087f@14982 66ffb92e-73f9-0310-93c1-f5514f145a0a
samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/AssessmentGradingFacadeQueries.java
SAK-6379
Java
apache-2.0
7c0314728342747b9380f201789ad6afe733cca8
0
collectivemedia/celos,collectivemedia/celos,collectivemedia/celos
package com.collective.celos; import java.net.URI; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** * Check in HDFS for a data dependency. */ public class HDFSCheckTrigger implements Trigger { private final ScheduledTimeFormatter formatter = new ScheduledTimeFormatter(); private final FileSystem fs; private final String rawPathString; private final String fsString; private static final Map<String, FileSystem> cachedFSs = new HashMap<>(); public HDFSCheckTrigger(String rawPathString, String fsString) throws Exception { this.rawPathString = Util.requireNonNull(rawPathString); this.fsString = Util.requireNonNull(fsString); if (cachedFSs.containsKey(fsString)) { this.fs = cachedFSs.get(fsString); } else { this.fs = FileSystem.get(new URI(fsString), new Configuration()); cachedFSs.put(fsString, this.fs); } } @Override public boolean isDataAvailable(ScheduledTime now, ScheduledTime t) throws Exception { Path path = new Path(formatter.replaceTimeTokens(rawPathString, t)); return fs.exists(path); } public String getFsString() { return fsString; } public String getRawPathString() { return rawPathString; } }
src/main/java/com/collective/celos/HDFSCheckTrigger.java
package com.collective.celos; import java.net.URI; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** * Check in HDFS for a data dependency. */ public class HDFSCheckTrigger implements Trigger { private final ScheduledTimeFormatter formatter = new ScheduledTimeFormatter(); private final FileSystem fs; private final String rawPathString; private final String fsString; private static final Map<String, FileSystem> cachedFSs = new HashMap<>(); public HDFSCheckTrigger(String rawPathString, String fsString) throws Exception { this.rawPathString = Util.requireNonNull(rawPathString); this.fsString = Util.requireNonNull(fsString); synchronized (cachedFSs) { if (cachedFSs.containsKey(fsString)) { this.fs = cachedFSs.get(fsString); } else { this.fs = FileSystem.get(new URI(fsString), new Configuration()); cachedFSs.put(fsString, this.fs); } } } @Override public boolean isDataAvailable(ScheduledTime now, ScheduledTime t) throws Exception { Path path = new Path(formatter.replaceTimeTokens(rawPathString, t)); return fs.exists(path); } public String getFsString() { return fsString; } public String getRawPathString() { return rawPathString; } }
Remove unneeded synchronized block.
src/main/java/com/collective/celos/HDFSCheckTrigger.java
Remove unneeded synchronized block.
Java
apache-2.0
6bcfd7309e948034fbd1d8922afb1a369c08ba7b
0
eginez/incubator-groovy,guangying945/incubator-groovy,rabbitcount/incubator-groovy,pickypg/incubator-groovy,apache/incubator-groovy,antoaravinth/incubator-groovy,pledbrook/incubator-groovy,ebourg/groovy-core,russel/groovy,adjohnson916/incubator-groovy,kidaa/incubator-groovy,EPadronU/incubator-groovy,paulk-asert/incubator-groovy,samanalysis/incubator-groovy,ebourg/groovy-core,alien11689/incubator-groovy,taoguan/incubator-groovy,mariogarcia/groovy-core,kidaa/incubator-groovy,christoph-frick/groovy-core,sagarsane/incubator-groovy,shils/incubator-groovy,dpolivaev/groovy,sagarsane/groovy-core,apache/groovy,paplorinc/incubator-groovy,pledbrook/incubator-groovy,paulk-asert/incubator-groovy,aim-for-better/incubator-groovy,paulk-asert/groovy,pickypg/incubator-groovy,mariogarcia/groovy-core,pickypg/incubator-groovy,alien11689/groovy-core,alien11689/incubator-groovy,apache/incubator-groovy,i55ac/incubator-groovy,sagarsane/groovy-core,adjohnson916/groovy-core,russel/incubator-groovy,ChanJLee/incubator-groovy,yukangguo/incubator-groovy,jwagenleitner/groovy,bsideup/incubator-groovy,christoph-frick/groovy-core,avafanasiev/groovy,PascalSchumacher/incubator-groovy,mariogarcia/groovy-core,groovy/groovy-core,gillius/incubator-groovy,paulk-asert/groovy,pickypg/incubator-groovy,aaronzirbes/incubator-groovy,traneHead/groovy-core,nkhuyu/incubator-groovy,paulk-asert/incubator-groovy,tkruse/incubator-groovy,avafanasiev/groovy,gillius/incubator-groovy,taoguan/incubator-groovy,alien11689/groovy-core,i55ac/incubator-groovy,aim-for-better/incubator-groovy,shils/incubator-groovy,graemerocher/incubator-groovy,sagarsane/incubator-groovy,rlovtangen/groovy-core,rlovtangen/groovy-core,genqiang/incubator-groovy,pledbrook/incubator-groovy,paulk-asert/groovy,taoguan/incubator-groovy,EPadronU/incubator-groovy,russel/groovy,rabbitcount/incubator-groovy,ebourg/incubator-groovy,graemerocher/incubator-groovy,rabbitcount/incubator-groovy,ebourg/incubator-groovy,PascalSchumacher/incubator-groovy,i55ac/incubator-groovy,eginez/incubator-groovy,avafanasiev/groovy,aaronzirbes/incubator-groovy,graemerocher/incubator-groovy,paulk-asert/incubator-groovy,adjohnson916/groovy-core,rabbitcount/incubator-groovy,apache/groovy,guangying945/incubator-groovy,upadhyayap/incubator-groovy,bsideup/groovy-core,yukangguo/incubator-groovy,antoaravinth/incubator-groovy,armsargis/groovy,graemerocher/incubator-groovy,guangying945/incubator-groovy,nkhuyu/incubator-groovy,shils/groovy,tkruse/incubator-groovy,dpolivaev/groovy,shils/incubator-groovy,yukangguo/incubator-groovy,rlovtangen/groovy-core,guangying945/incubator-groovy,rlovtangen/groovy-core,ChanJLee/incubator-groovy,paulk-asert/groovy,jwagenleitner/groovy,sagarsane/incubator-groovy,dpolivaev/groovy,apache/groovy,pledbrook/incubator-groovy,shils/incubator-groovy,eginez/incubator-groovy,kenzanmedia/incubator-groovy,shils/groovy,aim-for-better/incubator-groovy,alien11689/groovy-core,paplorinc/incubator-groovy,upadhyayap/incubator-groovy,ebourg/incubator-groovy,eginez/incubator-groovy,gillius/incubator-groovy,EPadronU/incubator-groovy,paulk-asert/incubator-groovy,shils/groovy,jwagenleitner/incubator-groovy,kenzanmedia/incubator-groovy,genqiang/incubator-groovy,nkhuyu/incubator-groovy,PascalSchumacher/incubator-groovy,apache/incubator-groovy,PascalSchumacher/incubator-groovy,nobeans/incubator-groovy,tkruse/incubator-groovy,kidaa/incubator-groovy,rlovtangen/groovy-core,traneHead/groovy-core,samanalysis/incubator-groovy,kenzanmedia/incubator-groovy,samanalysis/incubator-groovy,armsargis/groovy,yukangguo/incubator-groovy,ChanJLee/incubator-groovy,fpavageau/groovy,mariogarcia/groovy-core,sagarsane/groovy-core,russel/incubator-groovy,christoph-frick/groovy-core,bsideup/incubator-groovy,jwagenleitner/incubator-groovy,adjohnson916/groovy-core,adjohnson916/incubator-groovy,christoph-frick/groovy-core,armsargis/groovy,fpavageau/groovy,gillius/incubator-groovy,alien11689/incubator-groovy,fpavageau/groovy,ebourg/incubator-groovy,genqiang/incubator-groovy,sagarsane/incubator-groovy,ebourg/groovy-core,aaronzirbes/incubator-groovy,alien11689/groovy-core,ebourg/groovy-core,mariogarcia/groovy-core,traneHead/groovy-core,adjohnson916/groovy-core,traneHead/groovy-core,paplorinc/incubator-groovy,antoaravinth/incubator-groovy,aim-for-better/incubator-groovy,alien11689/groovy-core,russel/incubator-groovy,jwagenleitner/groovy,ChanJLee/incubator-groovy,bsideup/groovy-core,nobeans/incubator-groovy,jwagenleitner/incubator-groovy,taoguan/incubator-groovy,apache/incubator-groovy,bsideup/groovy-core,antoaravinth/incubator-groovy,shils/groovy,i55ac/incubator-groovy,kenzanmedia/incubator-groovy,alien11689/incubator-groovy,kidaa/incubator-groovy,sagarsane/groovy-core,russel/groovy,aaronzirbes/incubator-groovy,sagarsane/groovy-core,bsideup/incubator-groovy,apache/groovy,adjohnson916/incubator-groovy,russel/groovy,nobeans/incubator-groovy,avafanasiev/groovy,PascalSchumacher/incubator-groovy,samanalysis/incubator-groovy,nobeans/incubator-groovy,upadhyayap/incubator-groovy,jwagenleitner/incubator-groovy,adjohnson916/groovy-core,genqiang/incubator-groovy,adjohnson916/incubator-groovy,groovy/groovy-core,armsargis/groovy,christoph-frick/groovy-core,bsideup/groovy-core,russel/incubator-groovy,groovy/groovy-core,groovy/groovy-core,tkruse/incubator-groovy,paplorinc/incubator-groovy,dpolivaev/groovy,jwagenleitner/groovy,groovy/groovy-core,ebourg/groovy-core,upadhyayap/incubator-groovy,bsideup/incubator-groovy,nkhuyu/incubator-groovy,EPadronU/incubator-groovy,fpavageau/groovy
/* * Copyright 2003-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.control; import groovy.lang.GroovyClassLoader; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.*; import org.codehaus.groovy.ast.stmt.BlockStatement; import org.codehaus.groovy.ast.stmt.CatchStatement; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.ast.stmt.Statement; import org.codehaus.groovy.classgen.Verifier; import org.codehaus.groovy.control.messages.ExceptionMessage; import org.codehaus.groovy.syntax.Types; import org.codehaus.groovy.GroovyBugError; import org.objectweb.asm.Opcodes; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.*; /** * Visitor to resolve Types and convert VariableExpression to * ClassExpressions if needed. The ResolveVisitor will try to * find the Class for a ClassExpression and prints an error if * it fails to do so. Constructions like C[], foo as C, (C) foo * will force creation of a ClassExpression for C * <p/> * Note: the method to start the resolving is startResolving(ClassNode, SourceUnit). * * @author Jochen Theodorou */ public class ResolveVisitor extends ClassCodeExpressionTransformer { private ClassNode currentClass; // note: BigInteger and BigDecimal are also imported by default public static final String[] DEFAULT_IMPORTS = {"java.lang.", "java.io.", "java.net.", "java.util.", "groovy.lang.", "groovy.util."}; private CompilationUnit compilationUnit; private Map cachedClasses = new HashMap(); private static final Object NO_CLASS = new Object(); private static final Object SCRIPT = new Object(); private SourceUnit source; private VariableScope currentScope; private boolean isTopLevelProperty = true; private boolean inPropertyExpression = false; private boolean inClosure = false; private boolean isSpecialConstructorCall = false; private Map<String, GenericsType> genericParameterNames = new HashMap<String, GenericsType>(); /** * we use ConstructedClassWithPackage to limit the resolving the compiler * does when combining package names and class names. The idea * that if we use a package, then we do not want to replace the * '.' with a '$' for the package part, only for the class name * part. There is also the case of a imported class, so this logic * can't be done in these cases... */ private static class ConstructedClassWithPackage extends ClassNode { String prefix; String className; public ConstructedClassWithPackage(String pkg, String name) { super(pkg+name, Opcodes.ACC_PUBLIC,ClassHelper.OBJECT_TYPE); isPrimaryNode = false; this.prefix = pkg; this.className = name; } public String getName() { if (redirect()!=this) return super.getName(); return prefix+className; } public boolean hasPackageName() { if (redirect()!=this) return super.hasPackageName(); return className.indexOf('.')!=-1; } public String setName(String name) { if (redirect()!=this) { return super.setName(name); } else { throw new GroovyBugError("ConstructedClassWithPackage#setName should not be called"); } } } /** * we use LowerCaseClass to limit the resolving the compiler * does for vanilla names starting with a lower case letter. The idea * that if we use a vanilla name with a lower case letter, that this * is in most cases no class. If it is a class the class needs to be * imported explicitly. The efffect is that in an expression like * "def foo = bar" we do not have to use a loadClass call to check the * name foo and bar for being classes. Instead we will ask the module * for an alias for this name which is much faster. */ private static class LowerCaseClass extends ClassNode { String className; public LowerCaseClass(String name) { super(name, Opcodes.ACC_PUBLIC,ClassHelper.OBJECT_TYPE); isPrimaryNode = false; this.className = name; } public String getName() { if (redirect()!=this) return super.getName(); return className; } public boolean hasPackageName() { if (redirect()!=this) return super.hasPackageName(); return false; } public String setName(String name) { if (redirect()!=this) { return super.setName(name); } else { throw new GroovyBugError("ConstructedClassWithPackage#setName should not be called"); } } } public ResolveVisitor(CompilationUnit cu) { compilationUnit = cu; } public void startResolving(ClassNode node, SourceUnit source) { this.source = source; visitClass(node); } protected void visitConstructorOrMethod(MethodNode node, boolean isConstructor) { VariableScope oldScope = currentScope; currentScope = node.getVariableScope(); Map<String, GenericsType> oldPNames = genericParameterNames; genericParameterNames = new HashMap<String, GenericsType>(genericParameterNames); resolveGenericsHeader(node.getGenericsTypes()); Parameter[] paras = node.getParameters(); for (Parameter p : paras) { p.setInitialExpression(transform(p.getInitialExpression())); resolveOrFail(p.getType(), p.getType()); visitAnnotations(p); } ClassNode[] exceptions = node.getExceptions(); for (ClassNode t : exceptions) { resolveOrFail(t, node); } resolveOrFail(node.getReturnType(), node); super.visitConstructorOrMethod(node, isConstructor); genericParameterNames = oldPNames; currentScope = oldScope; } public void visitField(FieldNode node) { ClassNode t = node.getType(); resolveOrFail(t, node); super.visitField(node); } public void visitProperty(PropertyNode node) { ClassNode t = node.getType(); resolveOrFail(t, node); super.visitProperty(node); } private boolean resolveToInner (ClassNode type) { // we do not do our name mangling to find an inner class // if the type is a ConstructedClassWithPackage, because in this case we // are resolving the name at a different place already if (type instanceof ConstructedClassWithPackage) return false; String name = type.getName(); String saved = name; while (true) { int len = name.lastIndexOf('.'); if (len == -1) break; name = name.substring(0,len) + "$" + name.substring(len+1); type.setName(name); if (resolve(type)) return true; } if(resolveToInnerEnum (type)) return true; type.setName(saved); return false; } private boolean resolveToInnerEnum (ClassNode type) { // GROOVY-3110: It may be an inner enum defined by this class itself, in which case it does not need to be // explicitly qualified by the currentClass name String name = type.getName(); if(currentClass != type && !name.contains(".") && type.getClass().equals(ClassNode.class)) { type.setName(currentClass.getName() + "$" + name); if (resolve(type)) return true; } return false; } private void resolveOrFail(ClassNode type, String msg, ASTNode node) { if (resolve(type)) return; if (resolveToInner(type)) return; addError("unable to resolve class " + type.getName() + " " + msg, node); } private void resolveOrFail(ClassNode type, ASTNode node, boolean prefereImports) { resolveGenericsTypes(type.getGenericsTypes()); if (prefereImports && resolveAliasFromModule(type)) return; resolveOrFail(type, node); } private void resolveOrFail(ClassNode type, ASTNode node) { resolveOrFail(type, "", node); } private boolean resolve(ClassNode type) { return resolve(type, true, true, true); } private boolean resolve(ClassNode type, boolean testModuleImports, boolean testDefaultImports, boolean testStaticInnerClasses) { resolveGenericsTypes(type.getGenericsTypes()); if (type.isResolved() || type.isPrimaryClassNode()) return true; if (type.isArray()) { ClassNode element = type.getComponentType(); boolean resolved = resolve(element, testModuleImports, testDefaultImports, testStaticInnerClasses); if (resolved) { ClassNode cn = element.makeArray(); type.setRedirect(cn); } return resolved; } // test if vanilla name is current class name if (currentClass == type) return true; if (genericParameterNames.get(type.getName()) != null) { GenericsType gt = genericParameterNames.get(type.getName()); type.setRedirect(gt.getType()); type.setGenericsTypes(new GenericsType[]{gt}); type.setGenericsPlaceHolder(true); return true; } if (currentClass.getNameWithoutPackage().equals(type.getName())) { type.setRedirect(currentClass); return true; } return resolveFromModule(type, testModuleImports) || resolveFromCompileUnit(type) || resolveFromDefaultImports(type, testDefaultImports) || resolveFromStaticInnerClasses(type, testStaticInnerClasses) || resolveFromClassCache(type) || resolveToClass(type) || resolveToScript(type); } private boolean resolveFromClassCache(ClassNode type) { String name = type.getName(); Object val = cachedClasses.get(name); if (val == null || val == NO_CLASS) { return false; } else { type.setRedirect((ClassNode)val); return true; } } // NOTE: copied from GroovyClassLoader private long getTimeStamp(Class cls) { return Verifier.getTimestamp(cls); } // NOTE: copied from GroovyClassLoader private boolean isSourceNewer(URL source, Class cls) { try { long lastMod; // Special handling for file:// protocol, as getLastModified() often reports // incorrect results (-1) if (source.getProtocol().equals("file")) { // Coerce the file URL to a File String path = source.getPath().replace('/', File.separatorChar).replace('|', ':'); File file = new File(path); lastMod = file.lastModified(); } else { URLConnection conn = source.openConnection(); lastMod = conn.getLastModified(); conn.getInputStream().close(); } return lastMod > getTimeStamp(cls); } catch (IOException e) { // if the stream can't be opened, let's keep the old reference return false; } } private boolean resolveToScript(ClassNode type) { String name = type.getName(); // We do not need to check instances of LowerCaseClass // to be a script, because unless there was an import for // for this we do not lookup these cases. This was a decision // made on the mailing list. To ensure we will not visit this // method again we set a NO_CLASS for this name if (type instanceof LowerCaseClass) { cachedClasses.put(name, NO_CLASS); } if (cachedClasses.get(name) == NO_CLASS) return false; if (cachedClasses.get(name) == SCRIPT) cachedClasses.put(name, NO_CLASS); if (name.startsWith("java.")) return type.isResolved(); //TODO: don't ignore inner static classes completely if (name.indexOf('$') != -1) return type.isResolved(); ModuleNode module = currentClass.getModule(); if (module.hasPackageName() && name.indexOf('.') == -1) return type.isResolved(); // try to find a script from classpath GroovyClassLoader gcl = compilationUnit.getClassLoader(); URL url = null; try { url = gcl.getResourceLoader().loadGroovySource(name); } catch (MalformedURLException e) { // fall through and let the URL be null } if (url != null) { if (type.isResolved()) { Class cls = type.getTypeClass(); // if the file is not newer we don't want to recompile if (!isSourceNewer(url, cls)) return true; // since we came to this, we want to recompile cachedClasses.remove(type.getName()); type.setRedirect(null); } SourceUnit su = compilationUnit.addSource(url); currentClass.getCompileUnit().addClassNodeToCompile(type, su); return true; } // type may be resolved through the classloader before return type.isResolved(); } private String replaceLastPoint(String name) { int lastPoint = name.lastIndexOf('.'); name = new StringBuffer() .append(name.substring(0, lastPoint)) .append("$") .append(name.substring(lastPoint + 1)) .toString(); return name; } private boolean resolveFromStaticInnerClasses(ClassNode type, boolean testStaticInnerClasses) { // a class consisting of a vanilla name can never be // a static inner class, because at least one dot is // required for this. Example: foo.bar -> foo$bar if (type instanceof LowerCaseClass) return false; // try to resolve a public static inner class' name testStaticInnerClasses &= type.hasPackageName(); if (testStaticInnerClasses) { if (type instanceof ConstructedClassWithPackage) { // we replace '.' only in the className part // with '$' to find an inner class. The case that // the package is really a class is handled else where ConstructedClassWithPackage tmp = (ConstructedClassWithPackage) type; String name = ((ConstructedClassWithPackage) type).className; tmp.className = replaceLastPoint(name); if (resolve(tmp, false, true, true)) { type.setRedirect(tmp.redirect()); return true; } tmp.className = name; } else { String name = type.getName(); String replacedPointType = replaceLastPoint(name); type.setName(replacedPointType); if (resolve(type, false, true, true)) return true; type.setName(name); } } return false; } private boolean resolveFromDefaultImports(ClassNode type, boolean testDefaultImports) { // test default imports testDefaultImports &= !type.hasPackageName(); // we do not resolve a vanilla name starting with a lower case letter // try to resolve against adefault import, because we know that the // default packages do not contain classes like these testDefaultImports &= !(type instanceof LowerCaseClass); if (testDefaultImports) { for (int i = 0, size = DEFAULT_IMPORTS.length; i < size; i++) { String packagePrefix = DEFAULT_IMPORTS[i]; String name = type.getName(); // We limit the inner class lookups here by using ConstructedClassWithPackage. // This way only the name will change, the packagePrefix will // not be included in the lookup. The case where the // packagePrefix is really a class is handled else where. // WARNING: This code does not expect a class that has an static // inner class in DEFAULT_IMPORTS ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(packagePrefix,name); if (resolve(tmp, false, false, false)) { type.setRedirect(tmp.redirect()); return true; } } String name = type.getName(); if (name.equals("BigInteger")) { type.setRedirect(ClassHelper.BigInteger_TYPE); return true; } else if (name.equals("BigDecimal")) { type.setRedirect(ClassHelper.BigDecimal_TYPE); return true; } } return false; } private boolean resolveFromCompileUnit(ClassNode type) { // look into the compile unit if there is a class with that name CompileUnit compileUnit = currentClass.getCompileUnit(); if (compileUnit == null) return false; ClassNode cuClass = compileUnit.getClass(type.getName()); if (cuClass != null) { if (type != cuClass) type.setRedirect(cuClass); return true; } return false; } private void ambiguousClass(ClassNode type, ClassNode iType, String name) { if (type.getName().equals(iType.getName())) { addError("reference to " + name + " is ambiguous, both class " + type.getName() + " and " + iType.getName() + " match", type); } else { type.setRedirect(iType); } } private boolean resolveAliasFromModule(ClassNode type) { // In case of getting a ConstructedClassWithPackage here we do not do checks for partial // matches with imported classes. The ConstructedClassWithPackage is already a constructed // node and any subclass resolving will then take elsewhere place if (type instanceof ConstructedClassWithPackage) return false; ModuleNode module = currentClass.getModule(); if (module == null) return false; String name = type.getName(); // check module node imports aliases // the while loop enables a check for inner classes which are not fully imported, // but visible as the surrounding class is imported and the inner class is public/protected static String pname = name; int index = name.length(); /* * we have a name foo.bar and an import foo.foo. This means foo.bar is possibly * foo.foo.bar rather than foo.bar. This means to cut at the dot in foo.bar and * foo for import */ while (true) { pname = name.substring(0, index); ClassNode aliasedNode = module.getImportType(pname); if (aliasedNode != null) { if (pname.length() == name.length()) { // full match // We can compare here by length, because pname is always // a sbustring of name, so same length means they are equal. type.setRedirect(aliasedNode); return true; } else { //partial match // At this point we know that we have a match for pname. This may // mean, that name[pname.length()..<-1] is a static inner class. // For this the rest of the name does not need any dots in its name. // It is either completely a inner static class or it is not. // Since we do not want to have useless lookups we create the name // completely and use a ConstructedClassWithPackage to prevent lookups against the package. String className = aliasedNode.getNameWithoutPackage() + '$' + name.substring(pname.length()+1).replace('.', '$'); ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(aliasedNode.getPackageName()+".", className); if (resolve(tmp, true, true, false)) { type.setRedirect(tmp.redirect()); return true; } } } index = pname.lastIndexOf('.'); if (index == -1) break; } return false; } private boolean resolveFromModule(ClassNode type, boolean testModuleImports) { // we decided if we have a vanilla name starting with a lower case // letter that we will not try to resolve this name against .* // imports. Instead a full import is needed for these. // resolveAliasFromModule will do this check for us. This method // does also check the module contains a class in the same package // of this name. This check is not done for vanilla names starting // with a lower case letter anymore if (type instanceof LowerCaseClass) { return resolveAliasFromModule(type); } String name = type.getName(); ModuleNode module = currentClass.getModule(); if (module == null) return false; boolean newNameUsed = false; // we add a package if there is none yet and the module has one. But we // do not add that if the type is a ConstructedClassWithPackage. The code in ConstructedClassWithPackage // hasPackageName() will return true if ConstructedClassWithPackage#className has no dots. // but since the prefix may have them and the code there does ignore that // fact. We check here for ConstructedClassWithPackage. if (!type.hasPackageName() && module.hasPackageName() && !(type instanceof ConstructedClassWithPackage)) { type.setName(module.getPackageName() + name); newNameUsed = true; } // look into the module node if there is a class with that name List<ClassNode> moduleClasses = module.getClasses(); for (ClassNode mClass : moduleClasses) { if (mClass.getName().equals(type.getName())) { if (mClass != type) type.setRedirect(mClass); return true; } } if (newNameUsed) type.setName(name); if (testModuleImports) { if (resolveAliasFromModule(type)) return true; if (module.hasPackageName()) { // check package this class is defined in. The usage of ConstructedClassWithPackage here // means, that the module package will not be involved when the // compiler tries to find an inner class. ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(module.getPackageName(),name); if (resolve(tmp, false, false, false)) { type.setRedirect(tmp.redirect()); return true; } } // check module node imports packages for (ImportNode importNode : module.getStarImports()) { String packagePrefix = importNode.getPackageName(); // We limit the inner class lookups here by using ConstructedClassWithPackage. // This way only the name will change, the packagePrefix will // not be included in the lookup. The case where the // packagePrefix is really a class is handled else where. ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(packagePrefix, name); if (resolve(tmp, false, false, true)) { ambiguousClass(type, tmp, name); type.setRedirect(tmp.redirect()); return true; } } } return false; } private boolean resolveToClass(ClassNode type) { String name = type.getName(); // We do not need to check instances of LowerCaseClass // to be a Class, because unless there was an import for // for this we do not lookup these cases. This was a decision // made on the mailing list. To ensure we will not visit this // method again we set a NO_CLASS for this name if (type instanceof LowerCaseClass) { cachedClasses.put(name,NO_CLASS); } // We use here the class cache cachedClasses to prevent // calls to ClassLoader#loadClass. disabling this cache will // cause a major performance hit. Unlike at the end of this // method we do not return true or false depending on if we // want to recompile or not. If the class was cached, then // we do not want to recompile, recompilation is already // scheduled then Object cached = cachedClasses.get(name); if (cached == NO_CLASS) return false; // cached == SCRIPT should not happen here! if (cached == SCRIPT) throw new GroovyBugError("name "+name+" was marked as script, but was not resolved as such"); if (cached != null) return true; if (currentClass.getModule().hasPackageName() && name.indexOf('.') == -1) return false; GroovyClassLoader loader = compilationUnit.getClassLoader(); Class cls; try { // NOTE: it's important to do no lookup against script files // here since the GroovyClassLoader would create a new CompilationUnit cls = loader.loadClass(name, false, true); } catch (ClassNotFoundException cnfe) { cachedClasses.put(name, SCRIPT); return false; } catch (CompilationFailedException cfe) { compilationUnit.getErrorCollector().addErrorAndContinue(new ExceptionMessage(cfe, true, source)); return false; } //TODO: the case of a NoClassDefFoundError needs a bit more research // a simple recompilation is not possible it seems. The current class // we are searching for is there, so we should mark that somehow. // Basically the missing class needs to be completly compiled before // we can again search for the current name. /*catch (NoClassDefFoundError ncdfe) { cachedClasses.put(name,SCRIPT); return false; }*/ if (cls == null) return false; ClassNode cn = ClassHelper.make(cls); cachedClasses.put(name, cn); type.setRedirect(cn); //NOTE: we might return false here even if we found a class, // because we want to give a possible script a chance to // recompile. This can only be done if the loader was not // the instance defining the class. return cls.getClassLoader() == loader; } public Expression transform(Expression exp) { if (exp == null) return null; Expression ret = null; if (exp instanceof VariableExpression) { ret = transformVariableExpression((VariableExpression) exp); } else if (exp.getClass() == PropertyExpression.class) { ret = transformPropertyExpression((PropertyExpression) exp); } else if (exp instanceof DeclarationExpression) { ret = transformDeclarationExpression((DeclarationExpression) exp); } else if (exp instanceof BinaryExpression) { ret = transformBinaryExpression((BinaryExpression) exp); } else if (exp instanceof MethodCallExpression) { ret = transformMethodCallExpression((MethodCallExpression) exp); } else if (exp instanceof ClosureExpression) { ret = transformClosureExpression((ClosureExpression) exp); } else if (exp instanceof ConstructorCallExpression) { ret = transformConstructorCallExpression((ConstructorCallExpression) exp); } else if (exp instanceof AnnotationConstantExpression) { ret = transformAnnotationConstantExpression((AnnotationConstantExpression) exp); } else { resolveOrFail(exp.getType(), exp); ret = exp.transformExpression(this); } if (ret!=null && ret!=exp) ret.setSourcePosition(exp); return ret; } private String lookupClassName(PropertyExpression pe) { boolean doInitialClassTest=true; String name = ""; // this loop builds a name from right to left each name part // separated by "." for (Expression it = pe; it != null; it = ((PropertyExpression) it).getObjectExpression()) { if (it instanceof VariableExpression) { VariableExpression ve = (VariableExpression) it; // stop at super and this if (ve.isSuperExpression() || ve.isThisExpression()) { return null; } String varName = ve.getName(); if (doInitialClassTest) { // we are at the first name part. This is the right most part. // If this part is in lower case, then we do not need a class // check. other parts of the property expression will be tested // by a different method call to this method, so foo.Bar.bar // can still be resolved to the class foo.Bar and the static // field bar. if (!testVanillaNameForClass(varName)) return null; doInitialClassTest = false; name = varName; } else { name = varName + "." + name; } break; } // anything other than PropertyExpressions or // VariableExpressions will stop resolving else if (it.getClass() != PropertyExpression.class) { return null; } else { PropertyExpression current = (PropertyExpression) it; String propertyPart = current.getPropertyAsString(); // the class property stops resolving, dynamic property names too if (propertyPart == null || propertyPart.equals("class")) { return null; } if (doInitialClassTest) { // we are at the first name part. This is the right most part. // If this part is in lower case, then we do not need a class // check. other parts of the property expression will be tested // by a different method call to this method, so foo.Bar.bar // can still be resolved to the class foo.Bar and the static // field bar. if (!testVanillaNameForClass(propertyPart)) return null; doInitialClassTest= false; name = propertyPart; } else { name = propertyPart + "." + name; } } } if (name.length() == 0) return null; return name; } // iterate from the inner most to the outer and check for classes // this check will ignore a .class property, for Example Integer.class will be // a PropertyExpression with the ClassExpression of Integer as objectExpression // and class as property private Expression correctClassClassChain(PropertyExpression pe) { LinkedList<Expression> stack = new LinkedList<Expression>(); ClassExpression found = null; for (Expression it = pe; it != null; it = ((PropertyExpression) it).getObjectExpression()) { if (it instanceof ClassExpression) { found = (ClassExpression) it; break; } else if (!(it.getClass() == PropertyExpression.class)) { return pe; } stack.addFirst(it); } if (found == null) return pe; if (stack.isEmpty()) return pe; Object stackElement = stack.removeFirst(); if (!(stackElement.getClass() == PropertyExpression.class)) return pe; PropertyExpression classPropertyExpression = (PropertyExpression) stackElement; String propertyNamePart = classPropertyExpression.getPropertyAsString(); if (propertyNamePart == null || !propertyNamePart.equals("class")) return pe; found.setSourcePosition(classPropertyExpression); if (stack.isEmpty()) return found; stackElement = stack.removeFirst(); if (!(stackElement.getClass() == PropertyExpression.class)) return pe; PropertyExpression classPropertyExpressionContainer = (PropertyExpression) stackElement; classPropertyExpressionContainer.setObjectExpression(found); return pe; } protected Expression transformPropertyExpression(PropertyExpression pe) { boolean itlp = isTopLevelProperty; boolean ipe = inPropertyExpression; Expression objectExpression = pe.getObjectExpression(); inPropertyExpression = true; isTopLevelProperty = (objectExpression.getClass() != PropertyExpression.class); objectExpression = transform(objectExpression); // we handle the property part as if it were not part of the property inPropertyExpression = false; Expression property = transform(pe.getProperty()); isTopLevelProperty = itlp; inPropertyExpression = ipe; boolean spreadSafe = pe.isSpreadSafe(); PropertyExpression old = pe; pe = new PropertyExpression(objectExpression, property, pe.isSafe()); pe.setSpreadSafe(spreadSafe); pe.setSourcePosition(old); String className = lookupClassName(pe); if (className != null) { ClassNode type = ClassHelper.make(className); if (resolve(type)) { Expression ret = new ClassExpression(type); ret.setSourcePosition(pe); return ret; } } if (objectExpression instanceof ClassExpression && pe.getPropertyAsString() != null) { // possibly an inner class ClassExpression ce = (ClassExpression) objectExpression; ClassNode type = ClassHelper.make(ce.getType().getName() + "$" + pe.getPropertyAsString()); if (resolve(type, false, false, false)) { Expression ret = new ClassExpression(type); ret.setSourcePosition(ce); return ret; } } Expression ret = pe; if (isTopLevelProperty) ret = correctClassClassChain(pe); return ret; } protected Expression transformVariableExpression(VariableExpression ve) { Variable v = ve.getAccessedVariable(); if (v instanceof DynamicVariable){ String name = ve.getName(); ClassNode t = ClassHelper.make(name); // asking isResolved here allows to check if a primitive // type name like "int" was used to make t. In such a case // we have nothing left to do. boolean isClass = t.isResolved(); if (!isClass) { // It was no primitive type, so next we see if the name, // which is a vanilla name, starts with a lower case letter. // In that case we change it to a LowerCaseClass to let the // compiler skip the resolving at several places in this class. if (Character.isLowerCase(name.charAt(0))) { t = new LowerCaseClass(name); } isClass = resolve(t); if(!isClass) isClass = resolveToInnerEnum(t); } if (isClass) { // the name is a type so remove it from the scoping // as it is only a classvariable, it is only in // referencedClassVariables, but must be removed // for each parentscope too for (VariableScope scope = currentScope; scope != null && !scope.isRoot(); scope = scope.getParent()) { if (scope.isRoot()) break; if (scope.removeReferencedClassVariable(ve.getName()) == null) break; } ClassExpression ce = new ClassExpression(t); ce.setSourcePosition(ve); return ce; } } resolveOrFail(ve.getType(), ve); return ve; } private boolean testVanillaNameForClass(String name) { if (name==null || name.length()==0) return false; return !Character.isLowerCase(name.charAt(0)); } protected Expression transformBinaryExpression(BinaryExpression be) { Expression left = transform(be.getLeftExpression()); int type = be.getOperation().getType(); if ((type == Types.ASSIGNMENT_OPERATOR || type == Types.EQUAL) && left instanceof ClassExpression) { ClassExpression ce = (ClassExpression) left; String error = "you tried to assign a value to the class '" + ce.getType().getName() + "'"; if (ce.getType().isScript()) { error += ". Do you have a script with this name?"; } addError(error, be.getLeftExpression()); return be; } if (left instanceof ClassExpression && be.getRightExpression() instanceof ListExpression) { // we have C[] if the list is empty -> should be an array then! ListExpression list = (ListExpression) be.getRightExpression(); if (list.getExpressions().isEmpty()) { return new ClassExpression(left.getType().makeArray()); } } Expression right = transform(be.getRightExpression()); be.setLeftExpression(left); be.setRightExpression(right); return be; } protected Expression transformClosureExpression(ClosureExpression ce) { boolean oldInClosure = inClosure; inClosure = true; Parameter[] paras = ce.getParameters(); if (paras != null) { for (Parameter para : paras) { ClassNode t = para.getType(); resolveOrFail(t, ce); if (para.hasInitialExpression()) { Object initialVal = para.getInitialExpression(); if (initialVal instanceof Expression) { transform((Expression) initialVal); } } } } Statement code = ce.getCode(); if (code != null) code.visit(this); inClosure = oldInClosure; return ce; } protected Expression transformConstructorCallExpression(ConstructorCallExpression cce) { ClassNode type = cce.getType(); resolveOrFail(type, cce); isSpecialConstructorCall = cce.isSpecialCall(); Expression ret = cce.transformExpression(this); isSpecialConstructorCall = false; return ret; } protected Expression transformMethodCallExpression(MethodCallExpression mce) { Expression args = transform(mce.getArguments()); Expression method = transform(mce.getMethod()); Expression object = transform(mce.getObjectExpression()); MethodCallExpression result = new MethodCallExpression(object, method, args); result.setSafe(mce.isSafe()); result.setImplicitThis(mce.isImplicitThis()); result.setSpreadSafe(mce.isSpreadSafe()); result.setSourcePosition(mce); return result; } protected Expression transformDeclarationExpression(DeclarationExpression de) { Expression oldLeft = de.getLeftExpression(); Expression left = transform(oldLeft); if (left instanceof ClassExpression) { ClassExpression ce = (ClassExpression) left; addError("you tried to assign a value to the class " + ce.getType().getName(), oldLeft); return de; } Expression right = transform(de.getRightExpression()); if (right == de.getRightExpression()) return de; DeclarationExpression newDeclExpr = new DeclarationExpression(left, de.getOperation(), right); newDeclExpr.setSourcePosition(de); return newDeclExpr; } protected Expression transformAnnotationConstantExpression(AnnotationConstantExpression ace) { AnnotationNode an = (AnnotationNode) ace.getValue(); ClassNode type = an.getClassNode(); resolveOrFail(type, ", unable to find class for annotation", an); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { member.setValue(transform(member.getValue())); } return ace; } public void visitAnnotations(AnnotatedNode node) { List<AnnotationNode> annotations = node.getAnnotations(); if (annotations.isEmpty()) return; for (AnnotationNode an : annotations) { // skip built-in properties if (an.isBuiltIn()) continue; resolveOrFail(an.getClassNode(), ", unable to find class for annotation", an); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { Expression newValue = transform(member.getValue()); newValue = transformInlineConstants(newValue); member.setValue(newValue); checkAnnotationMemberValue(newValue); } } } // resolve constant-looking expressions statically (do here as gets transformed away later) private Expression transformInlineConstants(Expression exp) { if (exp instanceof PropertyExpression) { PropertyExpression pe = (PropertyExpression) exp; if (pe.getObjectExpression() instanceof ClassExpression) { ClassExpression ce = (ClassExpression) pe.getObjectExpression(); ClassNode type = ce.getType(); if (type.isEnum()) return exp; FieldNode fn = type.getField(pe.getPropertyAsString()); if (fn != null && !fn.isEnum() && fn.isStatic() && fn.isFinal()) { if (fn.getInitialValueExpression() instanceof ConstantExpression) { return fn.getInitialValueExpression(); } } } } else if (exp instanceof ListExpression) { ListExpression le = (ListExpression) exp; ListExpression result = new ListExpression(); for (Expression e : le.getExpressions()) { result.addExpression(transformInlineConstants(e)); } return result; } else if (exp instanceof AnnotationConstantExpression) { ConstantExpression ce = (ConstantExpression) exp; if (ce.getValue() instanceof AnnotationNode) { // replicate a little bit of AnnotationVisitor here // because we can't wait until later to do this AnnotationNode an = (AnnotationNode) ce.getValue(); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { member.setValue(transformInlineConstants(member.getValue())); } } } return exp; } private void checkAnnotationMemberValue(Expression newValue) { if (newValue instanceof PropertyExpression) { PropertyExpression pe = (PropertyExpression) newValue; if (!(pe.getObjectExpression() instanceof ClassExpression)) { addError("unable to find class '" + pe.getText() + "' for annotation attribute constant", pe.getObjectExpression()); } } else if (newValue instanceof ListExpression) { ListExpression le = (ListExpression) newValue; for (Expression e : le.getExpressions()) { checkAnnotationMemberValue(e); } } } public void visitClass(ClassNode node) { ClassNode oldNode = currentClass; currentClass = node; resolveGenericsHeader(node.getGenericsTypes()); ModuleNode module = node.getModule(); if (!module.hasImportsResolved()) { List l = module.getImports(); for (ImportNode importNode : module.getImports()) { ClassNode type = importNode.getType(); if (resolve(type, false, false, true)) continue; addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticStarImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, false, false, true)) continue; // May be this type belongs in the same package as the node that is doing the // static import. In that case, the package may not have been explicitly specified. // Try with the node's package too. If still not found, revert to original type name. if (type.getPackageName() == null && node.getPackageName() != null) { String oldTypeName = type.getName(); type.setName(node.getPackageName() + "." + oldTypeName); if (resolve(type, false, false, true)) continue; type.setName(oldTypeName); } addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, true, true, true)) continue; addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticStarImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, true, true, true)) continue; addError("unable to resolve class " + type.getName(), type); } module.setImportsResolved(true); } ClassNode sn = node.getUnresolvedSuperClass(); if (sn != null) resolveOrFail(sn, node, true); for (ClassNode anInterface : node.getInterfaces()) { resolveOrFail(anInterface, node, true); } super.visitClass(node); currentClass = oldNode; } public void visitCatchStatement(CatchStatement cs) { resolveOrFail(cs.getExceptionType(), cs); if (cs.getExceptionType() == ClassHelper.DYNAMIC_TYPE) { cs.getVariable().setType(ClassHelper.make(Exception.class)); } super.visitCatchStatement(cs); } public void visitForLoop(ForStatement forLoop) { resolveOrFail(forLoop.getVariableType(), forLoop); super.visitForLoop(forLoop); } public void visitBlockStatement(BlockStatement block) { VariableScope oldScope = currentScope; currentScope = block.getVariableScope(); super.visitBlockStatement(block); currentScope = oldScope; } protected SourceUnit getSourceUnit() { return source; } private void resolveGenericsTypes(GenericsType[] types) { if (types == null) return; currentClass.setUsingGenerics(true); for (GenericsType type : types) { resolveGenericsType(type); } } private void resolveGenericsHeader(GenericsType[] types) { if (types == null) return; currentClass.setUsingGenerics(true); for (GenericsType type : types) { ClassNode classNode = type.getType(); String name = type.getName(); ClassNode[] bounds = type.getUpperBounds(); if (bounds != null) { boolean nameAdded = false; for (ClassNode upperBound : bounds) { if (!nameAdded && upperBound != null || !resolve(classNode)) { genericParameterNames.put(name, type); type.setPlaceholder(true); classNode.setRedirect(upperBound); nameAdded = true; } resolveOrFail(upperBound, classNode); } } else { genericParameterNames.put(name, type); classNode.setRedirect(ClassHelper.OBJECT_TYPE); type.setPlaceholder(true); } } } private void resolveGenericsType(GenericsType genericsType) { if (genericsType.isResolved()) return; currentClass.setUsingGenerics(true); ClassNode type = genericsType.getType(); // save name before redirect String name = type.getName(); ClassNode[] bounds = genericsType.getUpperBounds(); if (!genericParameterNames.containsKey(name)) { if (bounds != null) { for (ClassNode upperBound : bounds) { resolveOrFail(upperBound, genericsType); type.setRedirect(upperBound); resolveGenericsTypes(upperBound.getGenericsTypes()); } } else if (genericsType.isWildcard()) { type.setRedirect(ClassHelper.OBJECT_TYPE); } else { resolveOrFail(type, genericsType); } } else { GenericsType gt = genericParameterNames.get(name); type.setRedirect(gt.getType()); genericsType.setPlaceholder(true); } if (genericsType.getLowerBound() != null) { resolveOrFail(genericsType.getLowerBound(), genericsType); } resolveGenericsTypes(type.getGenericsTypes()); genericsType.setResolved(genericsType.getType().isResolved()); } }
src/main/org/codehaus/groovy/control/ResolveVisitor.java
/* * Copyright 2003-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.control; import groovy.lang.GroovyClassLoader; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.*; import org.codehaus.groovy.ast.stmt.BlockStatement; import org.codehaus.groovy.ast.stmt.CatchStatement; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.ast.stmt.Statement; import org.codehaus.groovy.classgen.Verifier; import org.codehaus.groovy.control.messages.ExceptionMessage; import org.codehaus.groovy.syntax.Types; import org.codehaus.groovy.GroovyBugError; import org.objectweb.asm.Opcodes; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.*; /** * Visitor to resolve Types and convert VariableExpression to * ClassExpressions if needed. The ResolveVisitor will try to * find the Class for a ClassExpression and prints an error if * it fails to do so. Constructions like C[], foo as C, (C) foo * will force creation of a ClassExpression for C * <p/> * Note: the method to start the resolving is startResolving(ClassNode, SourceUnit). * * @author Jochen Theodorou */ public class ResolveVisitor extends ClassCodeExpressionTransformer { private ClassNode currentClass; // note: BigInteger and BigDecimal are also imported by default public static final String[] DEFAULT_IMPORTS = {"java.lang.", "java.io.", "java.net.", "java.util.", "groovy.lang.", "groovy.util."}; private CompilationUnit compilationUnit; private Map cachedClasses = new HashMap(); private static final Object NO_CLASS = new Object(); private static final Object SCRIPT = new Object(); private SourceUnit source; private VariableScope currentScope; private boolean isTopLevelProperty = true; private boolean inPropertyExpression = false; private boolean inClosure = false; private boolean isSpecialConstructorCall = false; private Map<String, GenericsType> genericParameterNames = new HashMap<String, GenericsType>(); /** * we use ConstructedClassWithPackage to limit the resolving the compiler * does when combining package names and class names. The idea * that if we use a package, then we do not want to replace the * '.' with a '$' for the package part, only for the class name * part. There is also the case of a imported class, so this logic * can't be done in these cases... */ private static class ConstructedClassWithPackage extends ClassNode { String prefix; String className; public ConstructedClassWithPackage(String pkg, String name) { super(pkg+name, Opcodes.ACC_PUBLIC,ClassHelper.OBJECT_TYPE); isPrimaryNode = false; this.prefix = pkg; this.className = name; } public String getName() { if (redirect()!=this) return super.getName(); return prefix+className; } public boolean hasPackageName() { if (redirect()!=this) return super.hasPackageName(); return className.indexOf('.')!=-1; } public String setName(String name) { if (redirect()!=this) { return super.setName(name); } else { throw new GroovyBugError("ConstructedClassWithPackage#setName should not be called"); } } } /** * we use LowerCaseClass to limit the resolving the compiler * does for vanilla names starting with a lower case letter. The idea * that if we use a vanilla name with a lower case letter, that this * is in most cases no class. If it is a class the class needs to be * imported explicitly. The efffect is that in an expression like * "def foo = bar" we do not have to use a loadClass call to check the * name foo and bar for being classes. Instead we will ask the module * for an alias for this name which is much faster. */ private static class LowerCaseClass extends ClassNode { String className; public LowerCaseClass(String name) { super(name, Opcodes.ACC_PUBLIC,ClassHelper.OBJECT_TYPE); isPrimaryNode = false; this.className = name; } public String getName() { if (redirect()!=this) return super.getName(); return className; } public boolean hasPackageName() { if (redirect()!=this) return super.hasPackageName(); return false; } public String setName(String name) { if (redirect()!=this) { return super.setName(name); } else { throw new GroovyBugError("ConstructedClassWithPackage#setName should not be called"); } } } public ResolveVisitor(CompilationUnit cu) { compilationUnit = cu; } public void startResolving(ClassNode node, SourceUnit source) { this.source = source; visitClass(node); } protected void visitConstructorOrMethod(MethodNode node, boolean isConstructor) { VariableScope oldScope = currentScope; currentScope = node.getVariableScope(); Map<String, GenericsType> oldPNames = genericParameterNames; genericParameterNames = new HashMap<String, GenericsType>(genericParameterNames); resolveGenericsHeader(node.getGenericsTypes()); Parameter[] paras = node.getParameters(); for (Parameter p : paras) { p.setInitialExpression(transform(p.getInitialExpression())); resolveOrFail(p.getType(), p.getType()); visitAnnotations(p); } ClassNode[] exceptions = node.getExceptions(); for (ClassNode t : exceptions) { resolveOrFail(t, node); } resolveOrFail(node.getReturnType(), node); super.visitConstructorOrMethod(node, isConstructor); genericParameterNames = oldPNames; currentScope = oldScope; } public void visitField(FieldNode node) { ClassNode t = node.getType(); resolveOrFail(t, node); super.visitField(node); } public void visitProperty(PropertyNode node) { ClassNode t = node.getType(); resolveOrFail(t, node); super.visitProperty(node); } private boolean resolveToInner (ClassNode type) { // we do not do our name mangling to find an inner class // if the type is a ConstructedClassWithPackage, because in this case we // are resolving the name at a different place already if (type instanceof ConstructedClassWithPackage) return false; String name = type.getName(); String saved = name; while (true) { int len = name.lastIndexOf('.'); if (len == -1) break; name = name.substring(0,len) + "$" + name.substring(len+1); type.setName(name); if (resolve(type)) return true; } if(resolveToInnerEnum (type)) return true; type.setName(saved); return false; } private boolean resolveToInnerEnum (ClassNode type) { // GROOVY-3110: It may be an inner enum defined by this class itself, in which case it does not need to be // explicitly qualified by the currentClass name String name = type.getName(); if(currentClass != type && !name.contains(".") && type.getClass().equals(ClassNode.class)) { type.setName(currentClass.getName() + "$" + name); if (resolve(type)) return true; } return false; } private void resolveOrFail(ClassNode type, String msg, ASTNode node) { if (resolve(type)) return; if (resolveToInner(type)) return; addError("unable to resolve class " + type.getName() + " " + msg, node); } private void resolveOrFail(ClassNode type, ASTNode node, boolean prefereImports) { resolveGenericsTypes(type.getGenericsTypes()); if (prefereImports && resolveAliasFromModule(type)) return; resolveOrFail(type, node); } private void resolveOrFail(ClassNode type, ASTNode node) { resolveOrFail(type, "", node); } private boolean resolve(ClassNode type) { return resolve(type, true, true, true); } private boolean resolve(ClassNode type, boolean testModuleImports, boolean testDefaultImports, boolean testStaticInnerClasses) { resolveGenericsTypes(type.getGenericsTypes()); if (type.isResolved() || type.isPrimaryClassNode()) return true; if (type.isArray()) { ClassNode element = type.getComponentType(); boolean resolved = resolve(element, testModuleImports, testDefaultImports, testStaticInnerClasses); if (resolved) { ClassNode cn = element.makeArray(); type.setRedirect(cn); } return resolved; } // test if vanilla name is current class name if (currentClass == type) return true; if (genericParameterNames.get(type.getName()) != null) { GenericsType gt = genericParameterNames.get(type.getName()); type.setRedirect(gt.getType()); type.setGenericsTypes(new GenericsType[]{gt}); type.setGenericsPlaceHolder(true); return true; } if (currentClass.getNameWithoutPackage().equals(type.getName())) { type.setRedirect(currentClass); return true; } return resolveFromModule(type, testModuleImports) || resolveFromCompileUnit(type) || resolveFromDefaultImports(type, testDefaultImports) || resolveFromStaticInnerClasses(type, testStaticInnerClasses) || resolveFromClassCache(type) || resolveToClass(type) || resolveToScript(type); } private boolean resolveFromClassCache(ClassNode type) { String name = type.getName(); Object val = cachedClasses.get(name); if (val == null || val == NO_CLASS) { return false; } else { type.setRedirect((ClassNode)val); return true; } } // NOTE: copied from GroovyClassLoader private long getTimeStamp(Class cls) { return Verifier.getTimestamp(cls); } // NOTE: copied from GroovyClassLoader private boolean isSourceNewer(URL source, Class cls) { try { long lastMod; // Special handling for file:// protocol, as getLastModified() often reports // incorrect results (-1) if (source.getProtocol().equals("file")) { // Coerce the file URL to a File String path = source.getPath().replace('/', File.separatorChar).replace('|', ':'); File file = new File(path); lastMod = file.lastModified(); } else { URLConnection conn = source.openConnection(); lastMod = conn.getLastModified(); conn.getInputStream().close(); } return lastMod > getTimeStamp(cls); } catch (IOException e) { // if the stream can't be opened, let's keep the old reference return false; } } private boolean resolveToScript(ClassNode type) { String name = type.getName(); // We do not need to check instances of LowerCaseClass // to be a script, because unless there was an import for // for this we do not lookup these cases. This was a decision // made on the mailing list. To ensure we will not visit this // method again we set a NO_CLASS for this name if (type instanceof LowerCaseClass) { cachedClasses.put(name, NO_CLASS); } if (cachedClasses.get(name) == NO_CLASS) return false; if (cachedClasses.get(name) == SCRIPT) cachedClasses.put(name, NO_CLASS); if (name.startsWith("java.")) return type.isResolved(); //TODO: don't ignore inner static classes completely if (name.indexOf('$') != -1) return type.isResolved(); ModuleNode module = currentClass.getModule(); if (module.hasPackageName() && name.indexOf('.') == -1) return type.isResolved(); // try to find a script from classpath GroovyClassLoader gcl = compilationUnit.getClassLoader(); URL url = null; try { url = gcl.getResourceLoader().loadGroovySource(name); } catch (MalformedURLException e) { // fall through and let the URL be null } if (url != null) { if (type.isResolved()) { Class cls = type.getTypeClass(); // if the file is not newer we don't want to recompile if (!isSourceNewer(url, cls)) return true; // since we came to this, we want to recompile cachedClasses.remove(type.getName()); type.setRedirect(null); } SourceUnit su = compilationUnit.addSource(url); currentClass.getCompileUnit().addClassNodeToCompile(type, su); return true; } // type may be resolved through the classloader before return type.isResolved(); } private String replaceLastPoint(String name) { int lastPoint = name.lastIndexOf('.'); name = new StringBuffer() .append(name.substring(0, lastPoint)) .append("$") .append(name.substring(lastPoint + 1)) .toString(); return name; } private boolean resolveFromStaticInnerClasses(ClassNode type, boolean testStaticInnerClasses) { // a class consisting of a vanilla name can never be // a static inner class, because at least one dot is // required for this. Example: foo.bar -> foo$bar if (type instanceof LowerCaseClass) return false; // try to resolve a public static inner class' name testStaticInnerClasses &= type.hasPackageName(); if (testStaticInnerClasses) { if (type instanceof ConstructedClassWithPackage) { // we replace '.' only in the className part // with '$' to find an inner class. The case that // the package is really a class is handled else where ConstructedClassWithPackage tmp = (ConstructedClassWithPackage) type; String name = ((ConstructedClassWithPackage) type).className; tmp.className = replaceLastPoint(name); if (resolve(tmp, false, true, true)) { type.setRedirect(tmp.redirect()); return true; } tmp.className = name; } else { String name = type.getName(); String replacedPointType = replaceLastPoint(name); type.setName(replacedPointType); if (resolve(type, false, true, true)) return true; type.setName(name); } } return false; } private boolean resolveFromDefaultImports(ClassNode type, boolean testDefaultImports) { // test default imports testDefaultImports &= !type.hasPackageName(); // we do not resolve a vanilla name starting with a lower case letter // try to resolve against adefault import, because we know that the // default packages do not contain classes like these testDefaultImports &= !(type instanceof LowerCaseClass); if (testDefaultImports) { for (int i = 0, size = DEFAULT_IMPORTS.length; i < size; i++) { String packagePrefix = DEFAULT_IMPORTS[i]; String name = type.getName(); // We limit the inner class lookups here by using ConstructedClassWithPackage. // This way only the name will change, the packagePrefix will // not be included in the lookup. The case where the // packagePrefix is really a class is handled else where. // WARNING: This code does not expect a class that has an static // inner class in DEFAULT_IMPORTS ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(packagePrefix,name); if (resolve(tmp, false, false, false)) { type.setRedirect(tmp.redirect()); return true; } } String name = type.getName(); if (name.equals("BigInteger")) { type.setRedirect(ClassHelper.BigInteger_TYPE); return true; } else if (name.equals("BigDecimal")) { type.setRedirect(ClassHelper.BigDecimal_TYPE); return true; } } return false; } private boolean resolveFromCompileUnit(ClassNode type) { // look into the compile unit if there is a class with that name CompileUnit compileUnit = currentClass.getCompileUnit(); if (compileUnit == null) return false; ClassNode cuClass = compileUnit.getClass(type.getName()); if (cuClass != null) { if (type != cuClass) type.setRedirect(cuClass); return true; } return false; } private void ambiguousClass(ClassNode type, ClassNode iType, String name) { if (type.getName().equals(iType.getName())) { addError("reference to " + name + " is ambiguous, both class " + type.getName() + " and " + iType.getName() + " match", type); } else { type.setRedirect(iType); } } private boolean resolveAliasFromModule(ClassNode type) { // In case of getting a ConstructedClassWithPackage here we do not do checks for partial // matches with imported classes. The ConstructedClassWithPackage is already a constructed // node and any subclass resolving will then take elsewhere place if (type instanceof ConstructedClassWithPackage) return false; ModuleNode module = currentClass.getModule(); if (module == null) return false; String name = type.getName(); // check module node imports aliases // the while loop enables a check for inner classes which are not fully imported, // but visible as the surrounding class is imported and the inner class is public/protected static String pname = name; int index = name.length(); /* * we have a name foo.bar and an import foo.foo. This means foo.bar is possibly * foo.foo.bar rather than foo.bar. This means to cut at the dot in foo.bar and * foo for import */ while (true) { pname = name.substring(0, index); ClassNode aliasedNode = module.getImportType(pname); if (aliasedNode != null) { if (pname.length() == name.length()) { // full match // We can compare here by length, because pname is always // a sbustring of name, so same length means they are equal. type.setRedirect(aliasedNode); return true; } else { //partial match // At this point we know that we have a match for pname. This may // mean, that name[pname.length()..<-1] is a static inner class. // For this the rest of the name does not need any dots in its name. // It is either completely a inner static class or it is not. // Since we do not want to have useless lookups we create the name // completely and use a ConstructedClassWithPackage to prevent lookups against the package. String className = aliasedNode.getNameWithoutPackage() + '$' + name.substring(pname.length()+1).replace('.', '$'); ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(aliasedNode.getPackageName()+".", className); if (resolve(tmp, true, true, false)) { type.setRedirect(tmp.redirect()); return true; } } } index = pname.lastIndexOf('.'); if (index == -1) break; } return false; } private boolean resolveFromModule(ClassNode type, boolean testModuleImports) { // we decided if we have a vanilla name starting with a lower case // letter that we will not try to resolve this name against .* // imports. Instead a full import is needed for these. // resolveAliasFromModule will do this check for us. This method // does also check the module contains a class in the same package // of this name. This check is not done for vanilla names starting // with a lower case letter anymore if (type instanceof LowerCaseClass) { return resolveAliasFromModule(type); } String name = type.getName(); ModuleNode module = currentClass.getModule(); if (module == null) return false; boolean newNameUsed = false; // we add a package if there is none yet and the module has one. But we // do not add that if the type is a ConstructedClassWithPackage. The code in ConstructedClassWithPackage // hasPackageName() will return true if ConstructedClassWithPackage#className has no dots. // but since the prefix may have them and the code there does ignore that // fact. We check here for ConstructedClassWithPackage. if (!type.hasPackageName() && module.hasPackageName() && !(type instanceof ConstructedClassWithPackage)) { type.setName(module.getPackageName() + name); newNameUsed = true; } // look into the module node if there is a class with that name List<ClassNode> moduleClasses = module.getClasses(); for (ClassNode mClass : moduleClasses) { if (mClass.getName().equals(type.getName())) { if (mClass != type) type.setRedirect(mClass); return true; } } if (newNameUsed) type.setName(name); if (testModuleImports) { if (resolveAliasFromModule(type)) return true; if (module.hasPackageName()) { // check package this class is defined in. The usage of ConstructedClassWithPackage here // means, that the module package will not be involved when the // compiler tries to find an inner class. ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(module.getPackageName(),name); if (resolve(tmp, false, false, false)) { type.setRedirect(tmp.redirect()); return true; } } // check module node imports packages for (ImportNode importNode : module.getStarImports()) { String packagePrefix = importNode.getPackageName(); // We limit the inner class lookups here by using ConstructedClassWithPackage. // This way only the name will change, the packagePrefix will // not be included in the lookup. The case where the // packagePrefix is really a class is handled else where. ConstructedClassWithPackage tmp = new ConstructedClassWithPackage(packagePrefix, name); if (resolve(tmp, false, false, true)) { ambiguousClass(type, tmp, name); type.setRedirect(tmp.redirect()); return true; } } } return false; } private boolean resolveToClass(ClassNode type) { String name = type.getName(); // We do not need to check instances of LowerCaseClass // to be a Class, because unless there was an import for // for this we do not lookup these cases. This was a decision // made on the mailing list. To ensure we will not visit this // method again we set a NO_CLASS for this name if (type instanceof LowerCaseClass) { cachedClasses.put(name,NO_CLASS); } // We use here the class cache cachedClasses to prevent // calls to ClassLoader#loadClass. disabling this cache will // cause a major performance hit. Unlike at the end of this // method we do not return true or false depending on if we // want to recompile or not. If the class was cached, then // we do not want to recompile, recompilation is already // scheduled then Object cached = cachedClasses.get(name); if (cached == NO_CLASS) return false; // cached == SCRIPT should not happen here! if (cached == SCRIPT) throw new GroovyBugError("name "+name+" was marked as script, but was not resolved as such"); if (cached != null) return true; if (currentClass.getModule().hasPackageName() && name.indexOf('.') == -1) return false; GroovyClassLoader loader = compilationUnit.getClassLoader(); Class cls; try { // NOTE: it's important to do no lookup against script files // here since the GroovyClassLoader would create a new CompilationUnit cls = loader.loadClass(name, false, true); } catch (ClassNotFoundException cnfe) { cachedClasses.put(name, SCRIPT); return false; } catch (CompilationFailedException cfe) { compilationUnit.getErrorCollector().addErrorAndContinue(new ExceptionMessage(cfe, true, source)); return false; } //TODO: the case of a NoClassDefFoundError needs a bit more research // a simple recompilation is not possible it seems. The current class // we are searching for is there, so we should mark that somehow. // Basically the missing class needs to be completly compiled before // we can again search for the current name. /*catch (NoClassDefFoundError ncdfe) { cachedClasses.put(name,SCRIPT); return false; }*/ if (cls == null) return false; ClassNode cn = ClassHelper.make(cls); cachedClasses.put(name, cn); type.setRedirect(cn); //NOTE: we might return false here even if we found a class, // because we want to give a possible script a chance to // recompile. This can only be done if the loader was not // the instance defining the class. return cls.getClassLoader() == loader; } public Expression transform(Expression exp) { if (exp == null) return null; Expression ret = null; if (exp instanceof VariableExpression) { ret = transformVariableExpression((VariableExpression) exp); } else if (exp.getClass() == PropertyExpression.class) { ret = transformPropertyExpression((PropertyExpression) exp); } else if (exp instanceof DeclarationExpression) { ret = transformDeclarationExpression((DeclarationExpression) exp); } else if (exp instanceof BinaryExpression) { ret = transformBinaryExpression((BinaryExpression) exp); } else if (exp instanceof MethodCallExpression) { ret = transformMethodCallExpression((MethodCallExpression) exp); } else if (exp instanceof ClosureExpression) { ret = transformClosureExpression((ClosureExpression) exp); } else if (exp instanceof ConstructorCallExpression) { ret = transformConstructorCallExpression((ConstructorCallExpression) exp); } else if (exp instanceof AnnotationConstantExpression) { ret = transformAnnotationConstantExpression((AnnotationConstantExpression) exp); } else { resolveOrFail(exp.getType(), exp); ret = exp.transformExpression(this); } if (ret!=null && ret!=exp) ret.setSourcePosition(exp); return ret; } private String lookupClassName(PropertyExpression pe) { boolean doInitialClassTest=true; String name = ""; // this loop builds a name from right to left each name part // separated by "." for (Expression it = pe; it != null; it = ((PropertyExpression) it).getObjectExpression()) { if (it instanceof VariableExpression) { VariableExpression ve = (VariableExpression) it; // stop at super and this if (ve.isSuperExpression() || ve.isThisExpression()) { return null; } String varName = ve.getName(); if (doInitialClassTest) { // we are at the first name part. This is the right most part. // If this part is in lower case, then we do not need a class // check. other parts of the property expression will be tested // by a different method call to this method, so foo.Bar.bar // can still be resolved to the class foo.Bar and the static // field bar. if (!testVanillaNameForClass(varName)) return null; doInitialClassTest = false; name = varName; } else { name = varName + "." + name; } break; } // anything other than PropertyExpressions or // VariableExpressions will stop resolving else if (it.getClass() != PropertyExpression.class) { return null; } else { PropertyExpression current = (PropertyExpression) it; String propertyPart = current.getPropertyAsString(); // the class property stops resolving, dynamic property names too if (propertyPart == null || propertyPart.equals("class")) { return null; } if (doInitialClassTest) { // we are at the first name part. This is the right most part. // If this part is in lower case, then we do not need a class // check. other parts of the property expression will be tested // by a different method call to this method, so foo.Bar.bar // can still be resolved to the class foo.Bar and the static // field bar. if (!testVanillaNameForClass(propertyPart)) return null; doInitialClassTest= false; name = propertyPart; } else { name = propertyPart + "." + name; } } } if (name.length() == 0) return null; return name; } // iterate from the inner most to the outer and check for classes // this check will ignore a .class property, for Example Integer.class will be // a PropertyExpression with the ClassExpression of Integer as objectExpression // and class as property private Expression correctClassClassChain(PropertyExpression pe) { LinkedList<Expression> stack = new LinkedList<Expression>(); ClassExpression found = null; for (Expression it = pe; it != null; it = ((PropertyExpression) it).getObjectExpression()) { if (it instanceof ClassExpression) { found = (ClassExpression) it; break; } else if (!(it.getClass() == PropertyExpression.class)) { return pe; } stack.addFirst(it); } if (found == null) return pe; if (stack.isEmpty()) return pe; Object stackElement = stack.removeFirst(); if (!(stackElement.getClass() == PropertyExpression.class)) return pe; PropertyExpression classPropertyExpression = (PropertyExpression) stackElement; String propertyNamePart = classPropertyExpression.getPropertyAsString(); if (propertyNamePart == null || !propertyNamePart.equals("class")) return pe; found.setSourcePosition(classPropertyExpression); if (stack.isEmpty()) return found; stackElement = stack.removeFirst(); if (!(stackElement.getClass() == PropertyExpression.class)) return pe; PropertyExpression classPropertyExpressionContainer = (PropertyExpression) stackElement; classPropertyExpressionContainer.setObjectExpression(found); return pe; } protected Expression transformPropertyExpression(PropertyExpression pe) { boolean itlp = isTopLevelProperty; boolean ipe = inPropertyExpression; Expression objectExpression = pe.getObjectExpression(); inPropertyExpression = true; isTopLevelProperty = (objectExpression.getClass() != PropertyExpression.class); objectExpression = transform(objectExpression); // we handle the property part as if it were not part of the property inPropertyExpression = false; Expression property = transform(pe.getProperty()); isTopLevelProperty = itlp; inPropertyExpression = ipe; boolean spreadSafe = pe.isSpreadSafe(); PropertyExpression old = pe; pe = new PropertyExpression(objectExpression, property, pe.isSafe()); pe.setSpreadSafe(spreadSafe); pe.setSourcePosition(old); String className = lookupClassName(pe); if (className != null) { ClassNode type = ClassHelper.make(className); if (resolve(type)) { Expression ret = new ClassExpression(type); ret.setSourcePosition(pe); return ret; } } if (objectExpression instanceof ClassExpression && pe.getPropertyAsString() != null) { // possibly an inner class ClassExpression ce = (ClassExpression) objectExpression; ClassNode type = ClassHelper.make(ce.getType().getName() + "$" + pe.getPropertyAsString()); if (resolve(type, false, false, false)) { Expression ret = new ClassExpression(type); ret.setSourcePosition(ce); return ret; } } Expression ret = pe; if (isTopLevelProperty) ret = correctClassClassChain(pe); return ret; } protected Expression transformVariableExpression(VariableExpression ve) { Variable v = ve.getAccessedVariable(); if (v instanceof DynamicVariable){ String name = ve.getName(); ClassNode t = ClassHelper.make(name); // asking isResolved here allows to check if a primitive // type name like "int" was used to make t. In such a case // we have nothing left to do. boolean isClass = t.isResolved(); if (!isClass) { // It was no primitive type, so next we see if the name, // which is a vanilla name, starts with a lower case letter. // In that case we change it to a LowerCaseClass to let the // compiler skip the resolving at several places in this class. if (Character.isLowerCase(name.charAt(0))) { t = new LowerCaseClass(name); } isClass = resolve(t); if(!isClass) isClass = resolveToInnerEnum(t); } if (isClass) { // the name is a type so remove it from the scoping // as it is only a classvariable, it is only in // referencedClassVariables, but must be removed // for each parentscope too for (VariableScope scope = currentScope; scope != null && !scope.isRoot(); scope = scope.getParent()) { if (scope.isRoot()) break; if (scope.removeReferencedClassVariable(ve.getName()) == null) break; } ClassExpression ce = new ClassExpression(t); ce.setSourcePosition(ve); return ce; } } resolveOrFail(ve.getType(), ve); return ve; } private boolean testVanillaNameForClass(String name) { if (name==null || name.length()==0) return false; return !Character.isLowerCase(name.charAt(0)); } protected Expression transformBinaryExpression(BinaryExpression be) { Expression left = transform(be.getLeftExpression()); int type = be.getOperation().getType(); if ((type == Types.ASSIGNMENT_OPERATOR || type == Types.EQUAL) && left instanceof ClassExpression) { ClassExpression ce = (ClassExpression) left; String error = "you tried to assign a value to the class '" + ce.getType().getName() + "'"; if (ce.getType().isScript()) { error += ". Do you have a script with this name?"; } addError(error, be.getLeftExpression()); return be; } if (left instanceof ClassExpression && be.getRightExpression() instanceof ListExpression) { // we have C[] if the list is empty -> should be an array then! ListExpression list = (ListExpression) be.getRightExpression(); if (list.getExpressions().isEmpty()) { return new ClassExpression(left.getType().makeArray()); } } Expression right = transform(be.getRightExpression()); be.setLeftExpression(left); be.setRightExpression(right); return be; } protected Expression transformClosureExpression(ClosureExpression ce) { boolean oldInClosure = inClosure; inClosure = true; Parameter[] paras = ce.getParameters(); if (paras != null) { for (Parameter para : paras) { ClassNode t = para.getType(); resolveOrFail(t, ce); if (para.hasInitialExpression()) { Object initialVal = para.getInitialExpression(); if (initialVal instanceof Expression) { transform((Expression) initialVal); } } } } Statement code = ce.getCode(); if (code != null) code.visit(this); inClosure = oldInClosure; return ce; } protected Expression transformConstructorCallExpression(ConstructorCallExpression cce) { ClassNode type = cce.getType(); resolveOrFail(type, cce); isSpecialConstructorCall = cce.isSpecialCall(); Expression ret = cce.transformExpression(this); isSpecialConstructorCall = false; return ret; } protected Expression transformMethodCallExpression(MethodCallExpression mce) { Expression args = transform(mce.getArguments()); Expression method = transform(mce.getMethod()); Expression object = transform(mce.getObjectExpression()); MethodCallExpression result = new MethodCallExpression(object, method, args); result.setSafe(mce.isSafe()); result.setImplicitThis(mce.isImplicitThis()); result.setSpreadSafe(mce.isSpreadSafe()); result.setSourcePosition(mce); return result; } protected Expression transformDeclarationExpression(DeclarationExpression de) { Expression oldLeft = de.getLeftExpression(); Expression left = transform(oldLeft); if (left instanceof ClassExpression) { ClassExpression ce = (ClassExpression) left; addError("you tried to assign a value to the class " + ce.getType().getName(), oldLeft); return de; } Expression right = transform(de.getRightExpression()); if (right == de.getRightExpression()) return de; DeclarationExpression newDeclExpr = new DeclarationExpression(left, de.getOperation(), right); newDeclExpr.setSourcePosition(de); return newDeclExpr; } protected Expression transformAnnotationConstantExpression(AnnotationConstantExpression ace) { AnnotationNode an = (AnnotationNode) ace.getValue(); ClassNode type = an.getClassNode(); resolveOrFail(type, ", unable to find class for annotation", an); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { member.setValue(transform(member.getValue())); } return ace; } public void visitAnnotations(AnnotatedNode node) { List<AnnotationNode> annotations = node.getAnnotations(); if (annotations.isEmpty()) return; for (AnnotationNode an : annotations) { // skip built-in properties if (an.isBuiltIn()) continue; resolveOrFail(an.getClassNode(), ", unable to find class for annotation", an); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { Expression newValue = transform(member.getValue()); newValue = transformInlineConstants(newValue); member.setValue(newValue); checkAnnotationMemberValue(newValue); } } } // resolve constant-looking expressions statically (do here as gets transformed away later) private Expression transformInlineConstants(Expression exp) { if (exp instanceof PropertyExpression) { PropertyExpression pe = (PropertyExpression) exp; if (pe.getObjectExpression() instanceof ClassExpression) { ClassExpression ce = (ClassExpression) pe.getObjectExpression(); ClassNode type = ce.getType(); if (type.isEnum()) return exp; FieldNode fn = type.getField(pe.getPropertyAsString()); if (fn != null && !fn.isEnum() && fn.isStatic() && fn.isFinal()) { if (fn.getInitialValueExpression() instanceof ConstantExpression) { return fn.getInitialValueExpression(); } } } } else if (exp instanceof ListExpression) { ListExpression le = (ListExpression) exp; ListExpression result = new ListExpression(); for (Expression e : le.getExpressions()) { result.addExpression(transformInlineConstants(e)); } return result; } else if (exp instanceof AnnotationConstantExpression) { ConstantExpression ce = (ConstantExpression) exp; if (ce.getValue() instanceof AnnotationNode) { // replicate a little bit of AnnotationVisitor here // because we can't wait until later to do this AnnotationNode an = (AnnotationNode) ce.getValue(); for (Map.Entry<String, Expression> member : an.getMembers().entrySet()) { member.setValue(transformInlineConstants(member.getValue())); } } } return exp; } private void checkAnnotationMemberValue(Expression newValue) { if (newValue instanceof PropertyExpression) { PropertyExpression pe = (PropertyExpression) newValue; if (!(pe.getObjectExpression() instanceof ClassExpression)) { addError("unable to find class '" + pe.getText() + "' for annotation attribute constant", pe.getObjectExpression()); } } else if (newValue instanceof ListExpression) { ListExpression le = (ListExpression) newValue; for (Expression e : le.getExpressions()) { checkAnnotationMemberValue(e); } } } public void visitClass(ClassNode node) { ClassNode oldNode = currentClass; currentClass = node; resolveGenericsHeader(node.getGenericsTypes()); ModuleNode module = node.getModule(); if (!module.hasImportsResolved()) { List l = module.getImports(); for (ImportNode importNode : module.getImports()) { ClassNode type = importNode.getType(); if (resolve(type, false, false, true)) continue; addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticStarImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, false, false, true)) continue; // May be this type belongs in the same package as the node that is doing the // static import. In that case, the package may not have been explicitly specified. // Try with the node's package too. If still not found, revert to original type name. if (type.getPackageName() == null && node.getPackageName() != null) { String oldTypeName = type.getName(); type.setName(node.getPackageName() + "." + oldTypeName); if (resolve(type, false, false, true)) continue; type.setName(oldTypeName); } addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, true, true, true)) continue; addError("unable to resolve class " + type.getName(), type); } for (ImportNode importNode : module.getStaticStarImports().values()) { ClassNode type = importNode.getType(); if (resolve(type, true, true, true)) continue; addError("unable to resolve class " + type.getName(), type); } module.setImportsResolved(true); } ClassNode sn = node.getUnresolvedSuperClass(); if (sn != null) resolveOrFail(sn, node, true); for (ClassNode anInterface : node.getInterfaces()) { resolveOrFail(anInterface, node, true); } super.visitClass(node); currentClass = oldNode; } public void visitCatchStatement(CatchStatement cs) { resolveOrFail(cs.getExceptionType(), cs); if (cs.getExceptionType() == ClassHelper.DYNAMIC_TYPE) { cs.getVariable().setType(ClassHelper.make(Exception.class)); } super.visitCatchStatement(cs); } public void visitForLoop(ForStatement forLoop) { resolveOrFail(forLoop.getVariableType(), forLoop); super.visitForLoop(forLoop); } public void visitBlockStatement(BlockStatement block) { VariableScope oldScope = currentScope; currentScope = block.getVariableScope(); super.visitBlockStatement(block); currentScope = oldScope; } protected SourceUnit getSourceUnit() { return source; } private void resolveGenericsTypes(GenericsType[] types) { if (types == null) return; currentClass.setUsingGenerics(true); for (GenericsType type : types) { resolveGenericsType(type); } } private void resolveGenericsHeader(GenericsType[] types) { if (types == null) return; currentClass.setUsingGenerics(true); for (GenericsType type : types) { ClassNode classNode = type.getType(); String name = classNode.getName(); ClassNode[] bounds = type.getUpperBounds(); if (bounds != null) { boolean nameAdded = false; for (ClassNode upperBound : bounds) { if (!nameAdded && upperBound != null || !resolve(classNode)) { genericParameterNames.put(name, type); type.setPlaceholder(true); classNode.setRedirect(upperBound); nameAdded = true; } resolveOrFail(upperBound, classNode); } } else { genericParameterNames.put(name, type); classNode.setRedirect(ClassHelper.OBJECT_TYPE); type.setPlaceholder(true); } } } private void resolveGenericsType(GenericsType genericsType) { if (genericsType.isResolved()) return; currentClass.setUsingGenerics(true); ClassNode type = genericsType.getType(); // save name before redirect String name = type.getName(); ClassNode[] bounds = genericsType.getUpperBounds(); if (!genericParameterNames.containsKey(name)) { if (bounds != null) { for (ClassNode upperBound : bounds) { resolveOrFail(upperBound, genericsType); type.setRedirect(upperBound); resolveGenericsTypes(upperBound.getGenericsTypes()); } } else if (genericsType.isWildcard()) { type.setRedirect(ClassHelper.OBJECT_TYPE); } else { resolveOrFail(type, genericsType); } } else { GenericsType gt = genericParameterNames.get(name); type.setRedirect(gt.getType()); genericsType.setPlaceholder(true); } if (genericsType.getLowerBound() != null) { resolveOrFail(genericsType.getLowerBound(), genericsType); } resolveGenericsTypes(type.getGenericsTypes()); genericsType.setResolved(genericsType.getType().isResolved()); } }
GROOVY-3794 ResolveVisitor was caching the generic types with wrong generic type name as the key because of which some generics usage was failing in GParallelizer build. Fixed it. git-svn-id: aa43ce4553b005588bb3cc6c16966320b011facb@17755 a5544e8c-8a19-0410-ba12-f9af4593a198
src/main/org/codehaus/groovy/control/ResolveVisitor.java
GROOVY-3794 ResolveVisitor was caching the generic types with wrong generic type name as the key because of which some generics usage was failing in GParallelizer build. Fixed it.
Java
apache-2.0
f638870b120df65b6b4cceb51ee9bcff32c14e04
0
mohanaraosv/commons-dbcp,mohanaraosv/commons-dbcp,mohanaraosv/commons-dbcp
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Hashtable; import java.util.Stack; import junit.framework.TestCase; // XXX FIX ME XXX // this class still needs some cleanup, but at least // this consolidates most of the relevant test code // in a fairly re-usable fashion // XXX FIX ME XXX /** * Base test suite for DBCP pools. * * @author Rodney Waldhoff * @author Sean C. Sullivan * @author John McNally * @author Dirk Verbeeck * @version $Revision$ $Date$ */ public abstract class TestConnectionPool extends TestCase { public TestConnectionPool(String testName) { super(testName); } public void tearDown() throws Exception { super.tearDown(); // Close any connections opened by the test while (!connections.isEmpty()) { Connection conn = (Connection) connections.pop(); try { conn.close(); } catch (Exception ex) { // ignore } finally { conn = null; } } } protected abstract Connection getConnection() throws Exception; protected int getMaxActive() { return 10; } protected long getMaxWait() { return 100L; } /** Connections opened during the course of a test */ protected Stack connections = new Stack(); /** Acquire a connection and push it onto the connections stack */ protected Connection newConnection() throws Exception { Connection connection = getConnection(); connections.push(connection); return connection; } // ----------- Utility Methods --------------------------------- protected String getUsername(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("select username"); if (rs.next()) { return rs.getString(1); } return null; } // ----------- tests --------------------------------- public void testClearWarnings() throws Exception { Connection[] c = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); // generate SQLWarning on connection c[i].prepareCall("warning"); } for (int i = 0; i < c.length; i++) { assertNotNull(c[i].getWarnings()); } for (int i = 0; i < c.length; i++) { c[i].close(); } for (int i = 0; i < c.length; i++) { c[i] = newConnection(); } for (int i = 0; i < c.length; i++) { // warnings should have been cleared by putting the connection back in the pool assertNull(c[i].getWarnings()); } for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testIsClosed() throws Exception { for(int i=0;i<getMaxActive();i++) { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); assertTrue(conn.isClosed()); } } /** * Verify the close method can be called multiple times on a single connection without * an exception being thrown. */ public void testCanCloseConnectionTwice() throws Exception { for (int i = 0; i < getMaxActive(); i++) { // loop to show we *can* close again once we've borrowed it from the pool again Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); conn.close(); assertTrue(conn.isClosed()); conn.close(); assertTrue(conn.isClosed()); } } public void testCanCloseStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again Statement stmt = conn.createStatement(); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanClosePreparedStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanCloseCallableStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareCall("select * from dual"); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanCloseResultSetTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertFalse(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); } conn.close(); } public void testBackPointers() throws Exception { // normal statement Connection conn = newConnection(); assertBackPointers(conn, conn.createStatement()); conn = newConnection(); assertBackPointers(conn, conn.createStatement(0, 0)); conn = newConnection(); assertBackPointers(conn, conn.createStatement(0, 0, 0)); // prepared statement conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual")); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0, 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", new int[0])); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", new String[0])); // callable statement conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual")); conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual", 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual", 0, 0, 0)); } protected void assertBackPointers(Connection conn, Statement statement) throws SQLException { assertFalse(conn.isClosed()); assertFalse(isClosed(statement)); assertSame("statement.getConnection() should return the exact same connection instance that was used to create the statement", conn, statement.getConnection()); ResultSet resultSet = statement.getResultSet(); assertFalse(isClosed(resultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, resultSet.getStatement()); ResultSet executeResultSet = statement.executeQuery("select * from dual"); assertFalse(isClosed(executeResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, executeResultSet.getStatement()); ResultSet keysResultSet = statement.getGeneratedKeys(); assertFalse(isClosed(keysResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, keysResultSet.getStatement()); ResultSet preparedResultSet = null; if (statement instanceof PreparedStatement) { PreparedStatement preparedStatement = (PreparedStatement) statement; preparedResultSet = preparedStatement.executeQuery(); assertFalse(isClosed(preparedResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, preparedResultSet.getStatement()); } resultSet.getStatement().getConnection().close(); assertTrue(conn.isClosed()); assertTrue(isClosed(statement)); assertTrue(isClosed(resultSet)); assertTrue(isClosed(executeResultSet)); assertTrue(isClosed(keysResultSet)); if (preparedResultSet != null) { assertTrue(isClosed(preparedResultSet)); } } public void testSimple() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); } public void testRepeatedBorrowAndReturn() throws Exception { for(int i=0;i<100;i++) { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); } } public void testSimple2() throws Exception { Connection conn = newConnection(); assertNotNull(conn); { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } conn.close(); try { conn.createStatement(); fail("Can't use closed connections"); } catch(SQLException e) { // expected } conn = newConnection(); assertNotNull(conn); { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } conn.close(); conn = null; } public void testPooling() throws Exception { // Grab a maximal set of open connections from the pool Connection[] c = new Connection[getMaxActive()]; Connection[] u = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); if (c[i] instanceof DelegatingConnection) { u[i] = ((DelegatingConnection) c[i]).getInnermostDelegate(); } else { for (int j = 0; j <= i; j++) { c[j].close(); } return; // skip this test } } // Close connections one at a time and get new ones, making sure // the new ones come from the pool for (int i = 0; i < c.length; i++) { c[i].close(); Connection con = newConnection(); Connection underCon = ((DelegatingConnection) con).getInnermostDelegate(); assertTrue("Failed to get connection", underCon != null); boolean found = false; for (int j = 0; j < c.length; j++) { if (underCon == u[j]) { found = true; break; } } assertTrue("New connection not from pool", found); con.close(); } } public void testAutoCommitBehavior() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(conn.getAutoCommit()); conn.setAutoCommit(false); conn.close(); Connection conn2 = newConnection(); assertTrue( conn2.getAutoCommit() ); Connection conn3 = newConnection(); assertTrue( conn3.getAutoCommit() ); conn2.close(); conn3.close(); } /** @see "http://issues.apache.org/bugzilla/show_bug.cgi?id=12400" */ public void testConnectionsAreDistinct() throws Exception { Connection[] conn = new Connection[getMaxActive()]; for(int i=0;i<conn.length;i++) { conn[i] = newConnection(); for(int j=0;j<i;j++) { assertTrue(conn[j] != conn[i]); assertTrue(!conn[j].equals(conn[i])); } } for(int i=0;i<conn.length;i++) { conn[i].close(); } } public void testOpening() throws Exception { Connection[] c = new Connection[getMaxActive()]; // test that opening new connections is not closing previous for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); for (int j = 0; j <= i; j++) { assertTrue(!c[j].isClosed()); } } for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testClosing() throws Exception { Connection[] c = new Connection[getMaxActive()]; // open the maximum connections for (int i = 0; i < c.length; i++) { c[i] = newConnection(); } // close one of the connections c[0].close(); assertTrue(c[0].isClosed()); // get a new connection c[0] = newConnection(); for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testMaxActive() throws Exception { Connection[] c = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); } try { newConnection(); fail("Allowed to open more than DefaultMaxActive connections."); } catch (java.sql.SQLException e) { // should only be able to open 10 connections, so this test should // throw an exception } for (int i = 0; i < c.length; i++) { c[i].close(); } } /** * DBCP-128: BasicDataSource.getConnection() * Connections don't work as hashtable keys */ public void testHashing() throws Exception { Connection con = getConnection(); Hashtable hash = new Hashtable(); hash.put(con, "test"); assertEquals("test", hash.get(con)); assertTrue(hash.containsKey(con)); assertTrue(hash.contains("test")); hash.clear(); con.close(); } public void testThreaded() { TestThread[] threads = new TestThread[getMaxActive()]; for(int i=0;i<threads.length;i++) { threads[i] = new TestThread(50,50); Thread t = new Thread(threads[i]); t.start(); } for(int i=0;i<threads.length;i++) { while(!(threads[i]).complete()) { try { Thread.sleep(100L); } catch(Exception e) { // ignored } } if(threads[i].failed()) { fail(); } } } class TestThread implements Runnable { java.util.Random _random = new java.util.Random(); boolean _complete = false; boolean _failed = false; int _iter = 100; int _delay = 50; public TestThread() { } public TestThread(int iter) { _iter = iter; } public TestThread(int iter, int delay) { _iter = iter; _delay = delay; } public boolean complete() { return _complete; } public boolean failed() { return _failed; } public void run() { for(int i=0;i<_iter;i++) { try { Thread.sleep(_random.nextInt(_delay)); } catch(Exception e) { // ignored } Connection conn = null; PreparedStatement stmt = null; ResultSet rset = null; try { conn = newConnection(); stmt = conn.prepareStatement("select 'literal', SYSDATE from dual"); rset = stmt.executeQuery(); try { Thread.sleep(_random.nextInt(_delay)); } catch(Exception e) { // ignored } } catch(Exception e) { e.printStackTrace(); _failed = true; _complete = true; break; } finally { try { if (rset != null) rset.close(); } catch(Exception e) { } try { if (stmt != null) stmt.close(); } catch(Exception e) { } try { if (conn != null) conn.close(); } catch(Exception e) { } } } _complete = true; } } // Bugzilla Bug 24328: PooledConnectionImpl ignores resultsetType // and Concurrency if statement pooling is not enabled // http://issues.apache.org/bugzilla/show_bug.cgi?id=24328 public void testPrepareStatementOptions() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); assertEquals(ResultSet.TYPE_SCROLL_SENSITIVE, rset.getType()); assertEquals(ResultSet.CONCUR_UPDATABLE, rset.getConcurrency()); rset.close(); stmt.close(); conn.close(); } // Bugzilla Bug 24966: NullPointer with Oracle 9 driver // wrong order of passivate/close when a rset isn't closed public void testNoRsetClose() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("test"); assertNotNull(stmt); ResultSet rset = stmt.getResultSet(); assertNotNull(rset); // forget to close the resultset: rset.close(); stmt.close(); conn.close(); } // Bugzilla Bug 26966: Connectionpool's connections always returns same public void testHashCode() throws Exception { Connection conn1 = newConnection(); assertNotNull(conn1); Connection conn2 = newConnection(); assertNotNull(conn2); assertTrue(conn1.hashCode() != conn2.hashCode()); } protected boolean isClosed(Statement statement) { try { statement.getWarnings(); return false; } catch (SQLException e) { // getWarnings throws an exception if the statement is // closed, but could throw an exception for other reasons // in this case it is good enought to assume the statement // is closed return true; } } protected boolean isClosed(ResultSet resultSet) { try { resultSet.getWarnings(); return false; } catch (SQLException e) { // getWarnings throws an exception if the statement is // closed, but could throw an exception for other reasons // in this case it is good enought to assume the result set // is closed return true; } } /** * Launches a group of 2 * getMaxActive() threads, each of which will attempt to obtain a connection * from the pool, hold it for <holdTime> ms, and then return it to the pool. If <loopOnce> is false, * threads will continue this process indefinitely. If <expectError> is true, exactly 1/2 of the * threads are expected to either throw exceptions or fail to complete. If <expectError> is false, * all threads are expected to complete successfully. * * @param holdTime time in ms that a thread holds a connection before returning it to the pool * @param expectError whether or not an error is expected * @param loopOnce whether threads should complete the borrow - hold - return cycle only once, or loop indefinitely * @param maxWait passed in by client - has no impact on the test itself, but does get reported * * @throws Exception */ protected void multipleThreads(final int holdTime, final boolean expectError, final boolean loopOnce, final long maxWait) throws Exception { long startTime = timeStamp(); final PoolTest[] pts = new PoolTest[2 * getMaxActive()]; // Catch Exception so we can stop all threads if one fails ThreadGroup threadGroup = new ThreadGroup("foo") { public void uncaughtException(Thread t, Throwable e) { for (int i = 0; i < pts.length; i++) { pts[i].stop(); } } }; for (int i = 0; i < pts.length; i++) { (pts[i] = new PoolTest(threadGroup, holdTime, expectError, loopOnce)).start(); } // Give all threads a chance to start and succeed Thread.sleep(300L); // Stop threads for (int i = 0; i < pts.length; i++) { pts[i].stop(); } /* * Wait for all threads to terminate. * This is essential to ensure that all threads have a chance to update success[0] * and to ensure that the variable is published correctly. */ int done=0; int failed=0; int didNotRun = 0; int loops=0; for (int i = 0; i < pts.length; i++) { final PoolTest poolTest = pts[i]; poolTest.thread.join(); loops += poolTest.loops; final String state = poolTest.state; if (DONE.equals(state)){ done++; } if (poolTest.loops == 0){ didNotRun++; } final Throwable thrown = poolTest.thrown; if (thrown != null) { failed++; if (!expectError || !(thrown instanceof SQLException)){ System.out.println("Unexpected error: "+thrown.getMessage()); } } } long time = timeStamp() - startTime; System.out.println("Multithread test time = " + time + " ms. Threads: " + pts.length + ". Loops: " + loops + ". Hold time: " + holdTime + ". Maxwait: " + maxWait + ". Done: " + done + ". Did not run: " + didNotRun + ". Failed: " + failed + ". expectError: " + expectError ); if (expectError) { // DBCP-318 is now fixed, so disable extra debug if (pts.length/2 != failed){ for (int i = 0; i < pts.length; i++) { PoolTest pt = pts[i]; System.out.println( "StartupDelay: " + (pt.started-pt.created) + ". ConnectStart: " + pt.preconnected + ". ConnectTime: " + (pt.connected > 0 ? Long.toString(pt.connected-pt.preconnected) : "-") + ". Runtime: " + (pt.ended-pt.started) + ". Loops: " + pt.loops + ". State: " + pt.state + ". thrown: "+ pt.thrown + ". (using nanoTime)" ); } } if (didNotRun > 0){ System.out.println("NOTE: some threads did not run the code: "+didNotRun); } // Perform initial sanity check: assertTrue("Expected some of the threads to fail",failed > 0); // Assume that threads that did not run would have timed out. assertEquals("WARNING: Expected half the threads to fail",pts.length/2,failed+didNotRun); } else { assertEquals("Did not expect any threads to fail",0,failed); } } private static int currentThreadCount = 0; private static final String DONE = "Done"; protected class PoolTest implements Runnable { /** * The number of milliseconds to hold onto a database connection */ private final int connHoldTime; private volatile boolean isRun; private String state; // No need to be volatile if it is read after the thread finishes private final Thread thread; private Throwable thrown; // Debug for DBCP-318 private final long created; // When object was created private long started; // when thread started private long ended; // when thread ended private long preconnected; // just before connect private long connected; // when thread last connected private int loops = 0; private final boolean stopOnException; // If true, don't rethrow Exception private final boolean loopOnce; // If true, don't repeat loop public PoolTest(ThreadGroup threadGroup, int connHoldTime, boolean isStopOnException) { this(threadGroup, connHoldTime, isStopOnException, false); } private PoolTest(ThreadGroup threadGroup, int connHoldTime, boolean isStopOnException, boolean once) { this.loopOnce = once; this.connHoldTime = connHoldTime; stopOnException = isStopOnException; isRun = true; // Must be done here so main thread is guaranteed to be able to set it false thrown = null; thread = new Thread(threadGroup, this, "Thread+" + currentThreadCount++); thread.setDaemon(false); created = timeStamp(); } public void start(){ thread.start(); } public void run() { started = timeStamp(); try { while (isRun) { loops++; state = "Getting Connection"; preconnected = timeStamp(); Connection conn = getConnection(); connected = timeStamp(); state = "Using Connection"; assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); state = "Holding Connection"; Thread.sleep(connHoldTime); state = "Closing ResultSet"; rset.close(); state = "Closing Statement"; stmt.close(); state = "Closing Connection"; conn.close(); state = "Closed"; if (loopOnce){ break; // Or could set isRun=false } } state = DONE; } catch (Throwable t) { thrown = t; if (!stopOnException) { throw new RuntimeException(); } } finally { ended = timeStamp(); } } public void stop() { isRun = false; } public Thread getThread() { return thread; } } long timeStamp() { return System.nanoTime() / 1000000; } }
src/test/org/apache/commons/dbcp/TestConnectionPool.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Hashtable; import java.util.Stack; import junit.framework.TestCase; // XXX FIX ME XXX // this class still needs some cleanup, but at least // this consolidates most of the relevant test code // in a fairly re-usable fashion // XXX FIX ME XXX /** * Base test suite for DBCP pools. * * @author Rodney Waldhoff * @author Sean C. Sullivan * @author John McNally * @author Dirk Verbeeck * @version $Revision$ $Date$ */ public abstract class TestConnectionPool extends TestCase { public TestConnectionPool(String testName) { super(testName); } public void tearDown() throws Exception { super.tearDown(); // Close any connections opened by the test while (!connections.isEmpty()) { Connection conn = (Connection) connections.pop(); try { conn.close(); } catch (Exception ex) { // ignore } finally { conn = null; } } } protected abstract Connection getConnection() throws Exception; protected int getMaxActive() { return 10; } protected long getMaxWait() { return 100L; } /** Connections opened during the course of a test */ protected Stack connections = new Stack(); /** Acquire a connection and push it onto the connections stack */ protected Connection newConnection() throws Exception { Connection connection = getConnection(); connections.push(connection); return connection; } // ----------- Utility Methods --------------------------------- protected String getUsername(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("select username"); if (rs.next()) { return rs.getString(1); } return null; } // ----------- tests --------------------------------- public void testClearWarnings() throws Exception { Connection[] c = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); // generate SQLWarning on connection c[i].prepareCall("warning"); } for (int i = 0; i < c.length; i++) { assertNotNull(c[i].getWarnings()); } for (int i = 0; i < c.length; i++) { c[i].close(); } for (int i = 0; i < c.length; i++) { c[i] = newConnection(); } for (int i = 0; i < c.length; i++) { // warnings should have been cleared by putting the connection back in the pool assertNull(c[i].getWarnings()); } for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testIsClosed() throws Exception { for(int i=0;i<getMaxActive();i++) { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); assertTrue(conn.isClosed()); } } /** * Verify the close method can be called multiple times on a single connection without * an exception being thrown. */ public void testCanCloseConnectionTwice() throws Exception { for (int i = 0; i < getMaxActive(); i++) { // loop to show we *can* close again once we've borrowed it from the pool again Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); conn.close(); assertTrue(conn.isClosed()); conn.close(); assertTrue(conn.isClosed()); } } public void testCanCloseStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again Statement stmt = conn.createStatement(); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanClosePreparedStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanCloseCallableStatementTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareCall("select * from dual"); assertNotNull(stmt); assertFalse(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); stmt.close(); assertTrue(isClosed(stmt)); } conn.close(); } public void testCanCloseResultSetTwice() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(!conn.isClosed()); for(int i=0;i<2;i++) { // loop to show we *can* close again once we've borrowed it from the pool again PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertFalse(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); rset.close(); assertTrue(isClosed(rset)); } conn.close(); } public void testBackPointers() throws Exception { // normal statement Connection conn = newConnection(); assertBackPointers(conn, conn.createStatement()); conn = newConnection(); assertBackPointers(conn, conn.createStatement(0, 0)); conn = newConnection(); assertBackPointers(conn, conn.createStatement(0, 0, 0)); // prepared statement conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual")); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", 0, 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", new int[0])); conn = newConnection(); assertBackPointers(conn, conn.prepareStatement("select * from dual", new String[0])); // callable statement conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual")); conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual", 0, 0)); conn = newConnection(); assertBackPointers(conn, conn.prepareCall("select * from dual", 0, 0, 0)); } protected void assertBackPointers(Connection conn, Statement statement) throws SQLException { assertFalse(conn.isClosed()); assertFalse(isClosed(statement)); assertSame("statement.getConnection() should return the exact same connection instance that was used to create the statement", conn, statement.getConnection()); ResultSet resultSet = statement.getResultSet(); assertFalse(isClosed(resultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, resultSet.getStatement()); ResultSet executeResultSet = statement.executeQuery("select * from dual"); assertFalse(isClosed(executeResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, executeResultSet.getStatement()); ResultSet keysResultSet = statement.getGeneratedKeys(); assertFalse(isClosed(keysResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, keysResultSet.getStatement()); ResultSet preparedResultSet = null; if (statement instanceof PreparedStatement) { PreparedStatement preparedStatement = (PreparedStatement) statement; preparedResultSet = preparedStatement.executeQuery(); assertFalse(isClosed(preparedResultSet)); assertSame("resultSet.getStatement() should return the exact same statement instance that was used to create the result set", statement, preparedResultSet.getStatement()); } resultSet.getStatement().getConnection().close(); assertTrue(conn.isClosed()); assertTrue(isClosed(statement)); assertTrue(isClosed(resultSet)); assertTrue(isClosed(executeResultSet)); assertTrue(isClosed(keysResultSet)); if (preparedResultSet != null) { assertTrue(isClosed(preparedResultSet)); } } public void testSimple() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); } public void testRepeatedBorrowAndReturn() throws Exception { for(int i=0;i<100;i++) { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); conn.close(); } } public void testSimple2() throws Exception { Connection conn = newConnection(); assertNotNull(conn); { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } conn.close(); try { conn.createStatement(); fail("Can't use closed connections"); } catch(SQLException e) { // expected } conn = newConnection(); assertNotNull(conn); { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } { PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); rset.close(); stmt.close(); } conn.close(); conn = null; } public void testPooling() throws Exception { // Grab a maximal set of open connections from the pool Connection[] c = new Connection[getMaxActive()]; Connection[] u = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); if (c[i] instanceof DelegatingConnection) { u[i] = ((DelegatingConnection) c[i]).getInnermostDelegate(); } else { for (int j = 0; j <= i; j++) { c[j].close(); } return; // skip this test } } // Close connections one at a time and get new ones, making sure // the new ones come from the pool for (int i = 0; i < c.length; i++) { c[i].close(); Connection con = newConnection(); Connection underCon = ((DelegatingConnection) con).getInnermostDelegate(); assertTrue("Failed to get connection", underCon != null); boolean found = false; for (int j = 0; j < c.length; j++) { if (underCon == u[j]) { found = true; break; } } assertTrue("New connection not from pool", found); con.close(); } } public void testAutoCommitBehavior() throws Exception { Connection conn = newConnection(); assertNotNull(conn); assertTrue(conn.getAutoCommit()); conn.setAutoCommit(false); conn.close(); Connection conn2 = newConnection(); assertTrue( conn2.getAutoCommit() ); Connection conn3 = newConnection(); assertTrue( conn3.getAutoCommit() ); conn2.close(); conn3.close(); } /** @see "http://issues.apache.org/bugzilla/show_bug.cgi?id=12400" */ public void testConnectionsAreDistinct() throws Exception { Connection[] conn = new Connection[getMaxActive()]; for(int i=0;i<conn.length;i++) { conn[i] = newConnection(); for(int j=0;j<i;j++) { assertTrue(conn[j] != conn[i]); assertTrue(!conn[j].equals(conn[i])); } } for(int i=0;i<conn.length;i++) { conn[i].close(); } } public void testOpening() throws Exception { Connection[] c = new Connection[getMaxActive()]; // test that opening new connections is not closing previous for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); for (int j = 0; j <= i; j++) { assertTrue(!c[j].isClosed()); } } for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testClosing() throws Exception { Connection[] c = new Connection[getMaxActive()]; // open the maximum connections for (int i = 0; i < c.length; i++) { c[i] = newConnection(); } // close one of the connections c[0].close(); assertTrue(c[0].isClosed()); // get a new connection c[0] = newConnection(); for (int i = 0; i < c.length; i++) { c[i].close(); } } public void testMaxActive() throws Exception { Connection[] c = new Connection[getMaxActive()]; for (int i = 0; i < c.length; i++) { c[i] = newConnection(); assertTrue(c[i] != null); } try { newConnection(); fail("Allowed to open more than DefaultMaxActive connections."); } catch (java.sql.SQLException e) { // should only be able to open 10 connections, so this test should // throw an exception } for (int i = 0; i < c.length; i++) { c[i].close(); } } /** * DBCP-128: BasicDataSource.getConnection() * Connections don't work as hashtable keys */ public void testHashing() throws Exception { Connection con = getConnection(); Hashtable hash = new Hashtable(); hash.put(con, "test"); assertEquals("test", hash.get(con)); assertTrue(hash.containsKey(con)); assertTrue(hash.contains("test")); hash.clear(); con.close(); } public void testThreaded() { TestThread[] threads = new TestThread[getMaxActive()]; for(int i=0;i<threads.length;i++) { threads[i] = new TestThread(50,50); Thread t = new Thread(threads[i]); t.start(); } for(int i=0;i<threads.length;i++) { while(!(threads[i]).complete()) { try { Thread.sleep(100L); } catch(Exception e) { // ignored } } if(threads[i].failed()) { fail(); } } } class TestThread implements Runnable { java.util.Random _random = new java.util.Random(); boolean _complete = false; boolean _failed = false; int _iter = 100; int _delay = 50; public TestThread() { } public TestThread(int iter) { _iter = iter; } public TestThread(int iter, int delay) { _iter = iter; _delay = delay; } public boolean complete() { return _complete; } public boolean failed() { return _failed; } public void run() { for(int i=0;i<_iter;i++) { try { Thread.sleep(_random.nextInt(_delay)); } catch(Exception e) { // ignored } Connection conn = null; PreparedStatement stmt = null; ResultSet rset = null; try { conn = newConnection(); stmt = conn.prepareStatement("select 'literal', SYSDATE from dual"); rset = stmt.executeQuery(); try { Thread.sleep(_random.nextInt(_delay)); } catch(Exception e) { // ignored } } catch(Exception e) { e.printStackTrace(); _failed = true; _complete = true; break; } finally { try { if (rset != null) rset.close(); } catch(Exception e) { } try { if (stmt != null) stmt.close(); } catch(Exception e) { } try { if (conn != null) conn.close(); } catch(Exception e) { } } } _complete = true; } } // Bugzilla Bug 24328: PooledConnectionImpl ignores resultsetType // and Concurrency if statement pooling is not enabled // http://issues.apache.org/bugzilla/show_bug.cgi?id=24328 public void testPrepareStatementOptions() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); assertEquals(ResultSet.TYPE_SCROLL_SENSITIVE, rset.getType()); assertEquals(ResultSet.CONCUR_UPDATABLE, rset.getConcurrency()); rset.close(); stmt.close(); conn.close(); } // Bugzilla Bug 24966: NullPointer with Oracle 9 driver // wrong order of passivate/close when a rset isn't closed public void testNoRsetClose() throws Exception { Connection conn = newConnection(); assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("test"); assertNotNull(stmt); ResultSet rset = stmt.getResultSet(); assertNotNull(rset); // forget to close the resultset: rset.close(); stmt.close(); conn.close(); } // Bugzilla Bug 26966: Connectionpool's connections always returns same public void testHashCode() throws Exception { Connection conn1 = newConnection(); assertNotNull(conn1); Connection conn2 = newConnection(); assertNotNull(conn2); assertTrue(conn1.hashCode() != conn2.hashCode()); } protected boolean isClosed(Statement statement) { try { statement.getWarnings(); return false; } catch (SQLException e) { // getWarnings throws an exception if the statement is // closed, but could throw an exception for other reasons // in this case it is good enought to assume the statement // is closed return true; } } protected boolean isClosed(ResultSet resultSet) { try { resultSet.getWarnings(); return false; } catch (SQLException e) { // getWarnings throws an exception if the statement is // closed, but could throw an exception for other reasons // in this case it is good enought to assume the result set // is closed return true; } } /** * Launches a group of 2 * getMaxActive() threads, each of which will attempt to obtain a connection * from the pool, hold it for <holdTime> ms, and then return it to the pool. If <loopOnce> is false, * threads will continue this process indefinitely. If <expectError> is true, exactly 1/2 of the * threads are expected to either throw exceptions or fail to complete. If <expectError> is false, * all threads are expected to complete successfully. * * @param holdTime time in ms that a thread holds a connection before returning it to the pool * @param expectError whether or not an error is expected * @param loopOnce whether threads should complete the borrow - hold - return cycle only once, or loop indefinitely * @param maxWait passed in by client - has no impact on the test itself, but does get reported * * @throws Exception */ protected void multipleThreads(final int holdTime, final boolean expectError, final boolean loopOnce, final long maxWait) throws Exception { long startTime = timeStamp(); final PoolTest[] pts = new PoolTest[2 * getMaxActive()]; // Catch Exception so we can stop all threads if one fails ThreadGroup threadGroup = new ThreadGroup("foo") { public void uncaughtException(Thread t, Throwable e) { for (int i = 0; i < pts.length; i++) { pts[i].stop(); } } }; for (int i = 0; i < pts.length; i++) { (pts[i] = new PoolTest(threadGroup, holdTime, expectError, loopOnce)).start(); } // Give all threads a chance to start and succeed Thread.sleep(300L); // Stop threads for (int i = 0; i < pts.length; i++) { pts[i].stop(); } /* * Wait for all threads to terminate. * This is essential to ensure that all threads have a chance to update success[0] * and to ensure that the variable is published correctly. */ int done=0; int failed=0; int didNotRun = 0; int loops=0; for (int i = 0; i < pts.length; i++) { final PoolTest poolTest = pts[i]; poolTest.thread.join(); loops += poolTest.loops; final String state = poolTest.state; if (DONE.equals(state)){ done++; } if (poolTest.loops == 0){ didNotRun++; } final Throwable thrown = poolTest.thrown; if (thrown != null) { failed++; if (!expectError || !(thrown instanceof SQLException)){ System.out.println("Unexpected error: "+thrown.getMessage()); } } } long time = timeStamp() - startTime; System.out.println("Multithread test time = " + time + " ms. Threads: " + pts.length + ". Loops: " + loops + ". Hold time: " + holdTime + ". Maxwait: " + maxWait + ". Done: " + done + ". Did not run: " + didNotRun + ". Failed: " + failed + ". expectError: " + expectError ); if (expectError) { // DBCP-318 is now fixed, so disable extra debug if (pts.length/2 != failed){ for (int i = 0; i < pts.length; i++) { PoolTest pt = pts[i]; System.out.println( "StartupDelay: " + (pt.started-pt.created) + ". ConnectTime: " + (pt.connected > 0 ? Long.toString(pt.connected-pt.preconnected) : "-") + ". Runtime: " + (pt.ended-pt.started) + ". Loops: " + pt.loops + ". State: " + pt.state + ". thrown: "+ pt.thrown + ". (using nanoTime)" ); } } if (didNotRun > 0){ System.out.println("NOTE: some threads did not run the code: "+didNotRun); } // Perform initial sanity check: assertTrue("Expected some of the threads to fail",failed > 0); // Assume that threads that did not run would have timed out. assertEquals("WARNING: Expected half the threads to fail",pts.length/2,failed+didNotRun); } else { assertEquals("Did not expect any threads to fail",0,failed); } } private static int currentThreadCount = 0; private static final String DONE = "Done"; protected class PoolTest implements Runnable { /** * The number of milliseconds to hold onto a database connection */ private final int connHoldTime; private volatile boolean isRun; private String state; // No need to be volatile if it is read after the thread finishes private final Thread thread; private Throwable thrown; // Debug for DBCP-318 private final long created; // When object was created private long started; // when thread started private long ended; // when thread ended private long preconnected; // just before connect private long connected; // when thread last connected private int loops = 0; private final boolean stopOnException; // If true, don't rethrow Exception private final boolean loopOnce; // If true, don't repeat loop public PoolTest(ThreadGroup threadGroup, int connHoldTime, boolean isStopOnException) { this(threadGroup, connHoldTime, isStopOnException, false); } private PoolTest(ThreadGroup threadGroup, int connHoldTime, boolean isStopOnException, boolean once) { this.loopOnce = once; this.connHoldTime = connHoldTime; stopOnException = isStopOnException; isRun = true; // Must be done here so main thread is guaranteed to be able to set it false thrown = null; thread = new Thread(threadGroup, this, "Thread+" + currentThreadCount++); thread.setDaemon(false); created = timeStamp(); } public void start(){ thread.start(); } public void run() { started = timeStamp(); try { while (isRun) { loops++; state = "Getting Connection"; preconnected = timeStamp(); Connection conn = getConnection(); connected = timeStamp(); state = "Using Connection"; assertNotNull(conn); PreparedStatement stmt = conn.prepareStatement("select * from dual"); assertNotNull(stmt); ResultSet rset = stmt.executeQuery(); assertNotNull(rset); assertTrue(rset.next()); state = "Holding Connection"; Thread.sleep(connHoldTime); state = "Closing ResultSet"; rset.close(); state = "Closing Statement"; stmt.close(); state = "Closing Connection"; conn.close(); state = "Closed"; if (loopOnce){ break; // Or could set isRun=false } } state = DONE; } catch (Throwable t) { thrown = t; if (!stopOnException) { throw new RuntimeException(); } } finally { ended = timeStamp(); } } public void stop() { isRun = false; } public Thread getThread() { return thread; } } long timeStamp() { return System.nanoTime() / 1000000; } }
More debug for Continuum fail git-svn-id: ad951d5a084f562764370c7a59da74380db26404@897906 13f79535-47bb-0310-9956-ffa450edef68
src/test/org/apache/commons/dbcp/TestConnectionPool.java
More debug for Continuum fail
Java
apache-2.0
10b2db82895bb2a1d58dab4a87f52b9c95dde8cb
0
nlnwa/broprox,nlnwa/broprox,nlnwa/broprox
package no.nb.nna.veidemann.chrome.client.codegen; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import com.google.gson.Gson; import com.google.gson.annotations.SerializedName; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.List; import java.util.Objects; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import static javax.lang.model.element.Modifier.*; public class Codegen { static Gson gson = new Gson(); static String PACKAGE = "no.nb.nna.veidemann.chrome.client"; static ClassName CLIENT_CLASS = ClassName.get(PACKAGE + ".ws", "Cdp"); static String CHROME_VERSION = "64.0.3269.3"; public static void main(String args[]) throws IOException { String browserProtocol = "https://chromium.googlesource.com/chromium/src/+/" + CHROME_VERSION + "/third_party/WebKit/Source/core/inspector/browser_protocol.json?format=text"; String jsProtocol = "https://chromium.googlesource.com/v8/v8/+/chromium/" + CHROME_VERSION.split("\\.")[2] + "/src/inspector/js_protocol.json?format=text"; Protocol protocol = loadProtocol(browserProtocol); protocol.merge(loadProtocol(jsProtocol)); File outdir = args.length > 0 ? new File(args[0]) : null; protocol.gencode(outdir); } static Protocol loadProtocol(String url) throws IOException { try (InputStream stream = Base64.getDecoder().wrap(new URL(url).openStream()); InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { return gson.fromJson(reader, Protocol.class); } } static ClassName buildStruct(TypeSpec.Builder b, String name, String description, List<Parameter> members, Protocol protocol, Domain domain) { String typeName = name.substring(0, 1).toUpperCase() + name.substring(1); TypeSpec.Builder typeSpec = TypeSpec.classBuilder(typeName) .addModifiers(PUBLIC, STATIC); StringBuilder fieldStrings = new StringBuilder(); if (description != null) { typeSpec.addJavadoc(description.replace("$", "$$") + "\n"); } for (Parameter member : members) { if (Objects.equals(member.name, "this")) { member.name = "this_"; } FieldSpec.Builder field = FieldSpec.builder(member.typeName(protocol, domain), member.name, PUBLIC); if (member.name.equals("this_")) { field.addAnnotation(AnnotationSpec.builder(SerializedName.class) .addMember("value", "$S", "this").build()); } if (member.description != null) { field.addJavadoc(member.description.replace("$", "$$") + "\n"); } typeSpec.addField(field.build()); if (fieldStrings.length() > 0) { fieldStrings.append(", "); } fieldStrings.append(member.name + "=\" + " + member.name + " + \""); } typeSpec.addMethod(MethodSpec.methodBuilder("toString") .addModifiers(PUBLIC) .returns(String.class) .addStatement("return \"" + typeName + "{" + fieldStrings + "}\"").build()); TypeSpec spec = typeSpec.build(); b.addType(typeSpec.build()); return ClassName.get(PACKAGE, domain.javaName, typeName); } public static String coalesce(String... strs) { for (String s : strs) { if (s != null) { return s; } } return null; } static String cap(String name) { return name.substring(0, 1).toUpperCase() + name.substring(1); } }
veidemann-chrome/veidemann-chrome-codegen/src/main/java/no/nb/nna/veidemann/chrome/client/codegen/Codegen.java
package no.nb.nna.veidemann.chrome.client.codegen; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import com.google.gson.Gson; import com.google.gson.annotations.SerializedName; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.List; import java.util.Objects; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import static javax.lang.model.element.Modifier.*; public class Codegen { static Gson gson = new Gson(); static String PACKAGE = "no.nb.nna.veidemann.chrome.client"; static ClassName CLIENT_CLASS = ClassName.get(PACKAGE + ".ws", "Cdp"); static String CHROME_VERSION = "64.0.3260.2"; public static void main(String args[]) throws IOException { String browserProtocol = "https://chromium.googlesource.com/chromium/src/+/" + CHROME_VERSION + "/third_party/WebKit/Source/core/inspector/browser_protocol.json?format=text"; String jsProtocol = "https://chromium.googlesource.com/v8/v8/+/chromium/" + CHROME_VERSION.split("\\.")[2] + "/src/inspector/js_protocol.json?format=text"; Protocol protocol = loadProtocol(browserProtocol); protocol.merge(loadProtocol(jsProtocol)); File outdir = args.length > 0 ? new File(args[0]) : null; protocol.gencode(outdir); } static Protocol loadProtocol(String url) throws IOException { try (InputStream stream = Base64.getDecoder().wrap(new URL(url).openStream()); InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { return gson.fromJson(reader, Protocol.class); } } static ClassName buildStruct(TypeSpec.Builder b, String name, String description, List<Parameter> members, Protocol protocol, Domain domain) { String typeName = name.substring(0, 1).toUpperCase() + name.substring(1); TypeSpec.Builder typeSpec = TypeSpec.classBuilder(typeName) .addModifiers(PUBLIC, STATIC); StringBuilder fieldStrings = new StringBuilder(); if (description != null) { typeSpec.addJavadoc(description.replace("$", "$$") + "\n"); } for (Parameter member : members) { if (Objects.equals(member.name, "this")) { member.name = "this_"; } FieldSpec.Builder field = FieldSpec.builder(member.typeName(protocol, domain), member.name, PUBLIC); if (member.name.equals("this_")) { field.addAnnotation(AnnotationSpec.builder(SerializedName.class) .addMember("value", "$S", "this").build()); } if (member.description != null) { field.addJavadoc(member.description.replace("$", "$$") + "\n"); } typeSpec.addField(field.build()); if (fieldStrings.length() > 0) { fieldStrings.append(", "); } fieldStrings.append(member.name + "=\" + " + member.name + " + \""); } typeSpec.addMethod(MethodSpec.methodBuilder("toString") .addModifiers(PUBLIC) .returns(String.class) .addStatement("return \"" + typeName + "{" + fieldStrings + "}\"").build()); TypeSpec spec = typeSpec.build(); b.addType(typeSpec.build()); return ClassName.get(PACKAGE, domain.javaName, typeName); } public static String coalesce(String... strs) { for (String s : strs) { if (s != null) { return s; } } return null; } static String cap(String name) { return name.substring(0, 1).toUpperCase() + name.substring(1); } }
Updated to newest Chrome debug protocol
veidemann-chrome/veidemann-chrome-codegen/src/main/java/no/nb/nna/veidemann/chrome/client/codegen/Codegen.java
Updated to newest Chrome debug protocol
Java
bsd-3-clause
b0286ae5ff98f55b2fec18fc43064c72469ded62
0
davidB/jmonkeyengine,delftsre/jmonkeyengine,olafmaas/jmonkeyengine,yetanotherindie/jMonkey-Engine,yetanotherindie/jMonkey-Engine,yetanotherindie/jMonkey-Engine,aaronang/jmonkeyengine,skapi1992/jmonkeyengine,aaronang/jmonkeyengine,mbenson/jmonkeyengine,g-rocket/jmonkeyengine,rbottema/jmonkeyengine,zzuegg/jmonkeyengine,bertleft/jmonkeyengine,shurun19851206/jMonkeyEngine,GreenCubes/jmonkeyengine,g-rocket/jmonkeyengine,weilichuang/jmonkeyengine,phr00t/jmonkeyengine,rbottema/jmonkeyengine,wrvangeest/jmonkeyengine,Georgeto/jmonkeyengine,olafmaas/jmonkeyengine,bsmr-java/jmonkeyengine,nickschot/jmonkeyengine,Georgeto/jmonkeyengine,Georgeto/jmonkeyengine,GreenCubes/jmonkeyengine,tr0k/jmonkeyengine,zzuegg/jmonkeyengine,d235j/jmonkeyengine,delftsre/jmonkeyengine,mbenson/jmonkeyengine,zzuegg/jmonkeyengine,jMonkeyEngine/jmonkeyengine,danteinforno/jmonkeyengine,shurun19851206/jMonkeyEngine,weilichuang/jmonkeyengine,Georgeto/jmonkeyengine,g-rocket/jmonkeyengine,yetanotherindie/jMonkey-Engine,sandervdo/jmonkeyengine,delftsre/jmonkeyengine,amit2103/jmonkeyengine,GreenCubes/jmonkeyengine,sandervdo/jmonkeyengine,Georgeto/jmonkeyengine,sandervdo/jmonkeyengine,tr0k/jmonkeyengine,bsmr-java/jmonkeyengine,GreenCubes/jmonkeyengine,danteinforno/jmonkeyengine,sandervdo/jmonkeyengine,InShadow/jmonkeyengine,bertleft/jmonkeyengine,atomixnmc/jmonkeyengine,atomixnmc/jmonkeyengine,d235j/jmonkeyengine,danteinforno/jmonkeyengine,danteinforno/jmonkeyengine,shurun19851206/jMonkeyEngine,shurun19851206/jMonkeyEngine,aaronang/jmonkeyengine,InShadow/jmonkeyengine,yetanotherindie/jMonkey-Engine,mbenson/jmonkeyengine,bertleft/jmonkeyengine,InShadow/jmonkeyengine,shurun19851206/jMonkeyEngine,tr0k/jmonkeyengine,tr0k/jmonkeyengine,wrvangeest/jmonkeyengine,amit2103/jmonkeyengine,OpenGrabeso/jmonkeyengine,bsmr-java/jmonkeyengine,jMonkeyEngine/jmonkeyengine,nickschot/jmonkeyengine,davidB/jmonkeyengine,danteinforno/jmonkeyengine,skapi1992/jmonkeyengine,phr00t/jmonkeyengine,yetanotherindie/jMonkey-Engine,atomixnmc/jmonkeyengine,skapi1992/jmonkeyengine,weilichuang/jmonkeyengine,OpenGrabeso/jmonkeyengine,atomixnmc/jmonkeyengine,skapi1992/jmonkeyengine,amit2103/jmonkeyengine,olafmaas/jmonkeyengine,phr00t/jmonkeyengine,d235j/jmonkeyengine,mbenson/jmonkeyengine,davidB/jmonkeyengine,davidB/jmonkeyengine,weilichuang/jmonkeyengine,wrvangeest/jmonkeyengine,nickschot/jmonkeyengine,d235j/jmonkeyengine,danteinforno/jmonkeyengine,aaronang/jmonkeyengine,d235j/jmonkeyengine,weilichuang/jmonkeyengine,g-rocket/jmonkeyengine,mbenson/jmonkeyengine,shurun19851206/jMonkeyEngine,olafmaas/jmonkeyengine,phr00t/jmonkeyengine,OpenGrabeso/jmonkeyengine,OpenGrabeso/jmonkeyengine,jMonkeyEngine/jmonkeyengine,d235j/jmonkeyengine,g-rocket/jmonkeyengine,rbottema/jmonkeyengine,InShadow/jmonkeyengine,delftsre/jmonkeyengine,Georgeto/jmonkeyengine,mbenson/jmonkeyengine,OpenGrabeso/jmonkeyengine,davidB/jmonkeyengine,atomixnmc/jmonkeyengine,zzuegg/jmonkeyengine,amit2103/jmonkeyengine,g-rocket/jmonkeyengine,amit2103/jmonkeyengine,weilichuang/jmonkeyengine,amit2103/jmonkeyengine,bertleft/jmonkeyengine,jMonkeyEngine/jmonkeyengine,nickschot/jmonkeyengine,davidB/jmonkeyengine,bsmr-java/jmonkeyengine,rbottema/jmonkeyengine,atomixnmc/jmonkeyengine,OpenGrabeso/jmonkeyengine,wrvangeest/jmonkeyengine
/* * Copyright (c) 2009-2010 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.collision; import com.jme3.math.Triangle; import com.jme3.math.Vector3f; import com.jme3.scene.Geometry; import com.jme3.scene.Mesh; /** * A <code>CollisionResult</code> represents a single collision instance * between two {@link Collidable}. A collision check can result in many * collision instances (places where collision has occured). * * @author Kirill Vainer */ public class CollisionResult implements Comparable<CollisionResult> { private Geometry geometry; private Vector3f contactPoint; private Vector3f contactNormal; private float distance; private int triangleIndex; public CollisionResult(Geometry geometry, Vector3f contactPoint, float distance, int triangleIndex) { this.geometry = geometry; this.contactPoint = contactPoint; this.distance = distance; this.triangleIndex = triangleIndex; } public CollisionResult(Vector3f contactPoint, float distance) { this.contactPoint = contactPoint; this.distance = distance; } public CollisionResult(){ } public void setGeometry(Geometry geom){ this.geometry = geom; } public void setContactNormal(Vector3f norm){ this.contactNormal = norm; } public void setContactPoint(Vector3f point){ this.contactPoint = point; } public void setDistance(float dist){ this.distance = dist; } public void setTriangleIndex(int index){ this.triangleIndex = index; } public Triangle getTriangle(Triangle store){ if (store == null) store = new Triangle(); Mesh m = geometry.getMesh(); m.getTriangle(triangleIndex, store); store.calculateCenter(); store.calculateNormal(); return store; } public int compareTo(CollisionResult other) { return Float.compare(distance, other.distance); } @Override public boolean equals(Object obj) { if(obj instanceof CollisionResult){ return ((CollisionResult)obj).compareTo(this) == 0; } return super.equals(obj); } public Vector3f getContactPoint() { return contactPoint; } public Vector3f getContactNormal() { return contactNormal; } public float getDistance() { return distance; } public Geometry getGeometry() { return geometry; } public int getTriangleIndex() { return triangleIndex; } }
engine/src/core/com/jme3/collision/CollisionResult.java
/* * Copyright (c) 2009-2010 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.collision; import com.jme3.math.Triangle; import com.jme3.math.Vector3f; import com.jme3.scene.Geometry; import com.jme3.scene.Mesh; /** * A <code>CollisionResult</code> represents a single collision instance * between two {@link Collidable}. A collision check can result in many * collision instances (places where collision has occured). * * @author Kirill Vainer */ public class CollisionResult implements Comparable<CollisionResult> { private Geometry geometry; private Vector3f contactPoint; private Vector3f contactNormal; private float distance; private int triangleIndex; public CollisionResult(Geometry geometry, Vector3f contactPoint, float distance, int triangleIndex) { this.geometry = geometry; this.contactPoint = contactPoint; this.distance = distance; this.triangleIndex = triangleIndex; } public CollisionResult(Vector3f contactPoint, float distance) { this.contactPoint = contactPoint; this.distance = distance; } public CollisionResult(){ } public void setGeometry(Geometry geom){ this.geometry = geom; } public void setContactNormal(Vector3f norm){ this.contactNormal = norm; } public void setContactPoint(Vector3f point){ this.contactPoint = point; } public void setDistance(float dist){ this.distance = dist; } public void setTriangleIndex(int index){ this.triangleIndex = index; } public Triangle getTriangle(Triangle store){ if (store == null) store = new Triangle(); Mesh m = geometry.getMesh(); m.getTriangle(triangleIndex, store); store.calculateCenter(); store.calculateNormal(); return store; } public int compareTo(CollisionResult other) { if (distance < other.distance) return -1; else if (distance > other.distance) return 1; else return 0; } @Override public boolean equals(Object obj) { if(obj instanceof CollisionResult){ return ((CollisionResult)obj).compareTo(this) == 0; } return super.equals(obj); } public Vector3f getContactPoint() { return contactPoint; } public Vector3f getContactNormal() { return contactNormal; } public float getDistance() { return distance; } public Geometry getGeometry() { return geometry; } public int getTriangleIndex() { return triangleIndex; } }
* Fixed "IllegalArgumentException: Comparison method violates its general contract!" issue with CollisionResults git-svn-id: f9411aee4f13664f2fc428a5b3e824fe43a079a3@9618 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
engine/src/core/com/jme3/collision/CollisionResult.java
* Fixed "IllegalArgumentException: Comparison method violates its general contract!" issue with CollisionResults
Java
bsd-3-clause
d02951bc01cbb46af08208cc59877f1d34a1b211
0
NCIP/c3pr,NCIP/c3pr,NCIP/c3pr
package edu.duke.cabig.c3pr.domain; import edu.duke.cabig.c3pr.AbstractTestCase; import edu.duke.cabig.c3pr.constants.ContactMechanismType; /** * The Class UserBasedRecipientTest. */ public class UserBasedRecipientTest extends AbstractTestCase{ /** The Constant INV. */ public static final String INV = "[email protected]"; /** The Constant STAFF. */ public static final String STAFF = "[email protected]"; /** * Test get email address for Investigator. */ public void testGetEmailAddressForInv(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForInv(); String emails = userBasedRecipient.getEmailAddress(); assertTrue(emails.equals(INV)); } /** * Test get email address for staff. */ public void testGetEmailAddressForStaff(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForStaff(); String emails = userBasedRecipient.getEmailAddress(); assertTrue(emails.equals(STAFF)); } public void testGetFullNameForStaff(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForStaff(); String name = userBasedRecipient.getFullName(); assertTrue(name.equals("John Doe")); } public void testGetFullNameForInv(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForInv(); String name = userBasedRecipient.getFullName(); assertTrue(name.equals("Jane Doe")); } /** * Gets the user based recipient for staff. * * @return the user based recipient for staff */ private UserBasedRecipient getUserBasedRecipientForStaff() { ResearchStaff researchStaff = new LocalResearchStaff(); ContactMechanism contactMechanism = new ContactMechanism(); contactMechanism.setType(ContactMechanismType.EMAIL); contactMechanism.setValue(STAFF); researchStaff.getContactMechanisms().add(contactMechanism); researchStaff.setFirstName("John"); researchStaff.setLastName("Doe"); researchStaff.setMiddleName("middle"); UserBasedRecipient userBasedRecipient = new UserBasedRecipient(); userBasedRecipient.setResearchStaff(researchStaff); return userBasedRecipient; } /** * Gets the user based recipient for inv. * * @return the user based recipient for inv */ private UserBasedRecipient getUserBasedRecipientForInv() { Investigator investigator = new LocalInvestigator(); ContactMechanism contactMechanismInv = new ContactMechanism(); contactMechanismInv.setType(ContactMechanismType.EMAIL); contactMechanismInv.setValue(INV); investigator.getContactMechanisms().add(contactMechanismInv); investigator.setFirstName("Jane"); investigator.setLastName("Doe"); investigator.setMiddleName("middle"); UserBasedRecipient userBasedRecipient = new UserBasedRecipient(); userBasedRecipient.setInvestigator(investigator); return userBasedRecipient; } }
codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/UserBasedRecipientTest.java
package edu.duke.cabig.c3pr.domain; import edu.duke.cabig.c3pr.AbstractTestCase; /** * The Class UserBasedRecipientTest. */ public class UserBasedRecipientTest extends AbstractTestCase{ /** The Constant INV. */ public static final String INV = "[email protected]"; /** The Constant STAFF. */ public static final String STAFF = "[email protected]"; /** * Test get email address for Investigator. */ public void testGetEmailAddressForInv(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForInv(); String emails = userBasedRecipient.getEmailAddress(); assertTrue(emails.equals(INV)); } /** * Test get email address for staff. */ public void testGetEmailAddressForStaff(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForStaff(); String emails = userBasedRecipient.getEmailAddress(); assertTrue(emails.equals(STAFF)); } public void testGetFullNameForStaff(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForStaff(); String name = userBasedRecipient.getFullName(); assertTrue(name.equals("John Doe")); } public void testGetFullNameForInv(){ UserBasedRecipient userBasedRecipient = getUserBasedRecipientForInv(); String name = userBasedRecipient.getFullName(); assertTrue(name.equals("Jane Doe")); } /** * Gets the user based recipient for staff. * * @return the user based recipient for staff */ private UserBasedRecipient getUserBasedRecipientForStaff() { ResearchStaff researchStaff = new LocalResearchStaff(); ContactMechanism contactMechanism = new ContactMechanism(); contactMechanism.setType(ContactMechanismType.EMAIL); contactMechanism.setValue(STAFF); researchStaff.getContactMechanisms().add(contactMechanism); researchStaff.setFirstName("John"); researchStaff.setLastName("Doe"); researchStaff.setMiddleName("middle"); UserBasedRecipient userBasedRecipient = new UserBasedRecipient(); userBasedRecipient.setResearchStaff(researchStaff); return userBasedRecipient; } /** * Gets the user based recipient for inv. * * @return the user based recipient for inv */ private UserBasedRecipient getUserBasedRecipientForInv() { Investigator investigator = new LocalInvestigator(); ContactMechanism contactMechanismInv = new ContactMechanism(); contactMechanismInv.setType(ContactMechanismType.EMAIL); contactMechanismInv.setValue(INV); investigator.getContactMechanisms().add(contactMechanismInv); investigator.setFirstName("Jane"); investigator.setLastName("Doe"); investigator.setMiddleName("middle"); UserBasedRecipient userBasedRecipient = new UserBasedRecipient(); userBasedRecipient.setInvestigator(investigator); return userBasedRecipient; } }
UPDATED
codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/UserBasedRecipientTest.java
UPDATED
Java
bsd-3-clause
6ac39a5509a9ce4388a6680f7b66641134c82ae0
0
chocoteam/choco3,piyushsh/choco3,Tiger66639/choco3,PhilAndrew/choco3gwt,piyushsh/choco3,piyushsh/choco3,Tiger66639/choco3,chocoteam/choco3,Tiger66639/choco3,chocoteam/choco3,cp-profiler/choco3,PhilAndrew/choco3gwt,cp-profiler/choco3,cp-profiler/choco3,chocoteam/choco3,PhilAndrew/choco3gwt,cp-profiler/choco3,Tiger66639/choco3,piyushsh/choco3
/** * Copyright (c) 1999-2011, Ecole des Mines de Nantes * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Ecole des Mines de Nantes nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package parser.flatzinc.ast; import org.slf4j.LoggerFactory; import parser.flatzinc.FZNException; import parser.flatzinc.ast.expression.EAnnotation; import parser.flatzinc.ast.expression.EArray; import parser.flatzinc.ast.expression.EIdentifier; import parser.flatzinc.ast.expression.Expression; import parser.flatzinc.ast.searches.IntSearch; import parser.flatzinc.ast.searches.Strategy; import parser.flatzinc.ast.searches.VarChoice; import parser.flatzinc.parser.FZNParser; import solver.Solver; import solver.objective.MaxObjectiveManager; import solver.objective.MinObjectiveManager; import solver.search.loop.AbstractSearchLoop; import solver.search.strategy.StrategyFactory; import solver.search.strategy.strategy.AbstractStrategy; import solver.search.strategy.strategy.StrategiesSequencer; import solver.variables.IntVar; import solver.variables.Variable; import java.util.List; /* * User : CPRUDHOM * Mail : cprudhom(a)emn.fr * Date : 12 janv. 2010 * Since : Choco 2.1.1 * * Class for solve goals definition based on flatzinc-like objects. * * A solve goal is defined with: * </br> 'solve annotations satisfy;' * </br> or 'solve annotations maximize expression;' * /br> or 'solve annotations minimize expression;' */ public class SolveGoal { final List<EAnnotation> annotations; final Resolution type; final Expression expr; public enum Resolution { SATISFY, MINIMIZE, MAXIMIZE } private enum Search { int_search, bool_search, set_search } public SolveGoal(FZNParser parser, List<EAnnotation> annotations, Resolution type, Expression expr) { this.annotations = annotations; this.type = type; this.expr = expr; defineGoal(parser.solver); } private void defineGoal(Solver solver) { if (annotations.size() > 0) { AbstractStrategy strategy = null; if (annotations.size() > 1) { throw new UnsupportedOperationException("SolveGoal:: wrong annotations size"); } else { EAnnotation annotation = annotations.get(0); if (annotation.id.value.equals("seq_search")) { EArray earray = (EArray) annotation.exps.get(0); AbstractStrategy[] strategies = new AbstractStrategy[earray.what.size()]; for (int i = 0; i < strategies.length; i++) { strategies[i] = readSearchAnnotation((EAnnotation) earray.getWhat_i(i), solver); } strategy = new StrategiesSequencer(solver.getEnvironment(), strategies); } else { strategy = readSearchAnnotation(annotation, solver); } solver.set(strategy); } } else { LoggerFactory.getLogger(SolveGoal.class).warn("% No search annotation. Set default."); Variable[] vars = solver.getVars(); IntVar[] ivars = new IntVar[vars.length]; for (int i = 0; i < ivars.length; i++) { ivars[i] = (IntVar) vars[i]; } /*ActivityBased abs = new ActivityBased(solver, ivars, 0.999d, 0.2d, 8, 1.1d, 1, 29091981L); solver.set(abs); if (type != Resolution.SATISFY) { solver.getSearchLoop().plugSearchMonitor(new ABSLNS(solver, ivars, 29091981L, abs, false, ivars.length / 2)); }*/ solver.set(StrategyFactory.random(ivars, solver.getEnvironment())); } AbstractSearchLoop search = solver.getSearchLoop(); switch (type) { case SATISFY: search.stopAtFirstSolution(true); break; case MAXIMIZE: IntVar max = expr.intVarValue(solver); MaxObjectiveManager maom = new MaxObjectiveManager(max); maom.setMeasures(solver.getMeasures()); search.setObjectivemanager(maom); // solver.setRestart(true); search.stopAtFirstSolution(false); break; case MINIMIZE: IntVar min = expr.intVarValue(solver); MinObjectiveManager miom = new MinObjectiveManager(min); miom.setMeasures(solver.getMeasures()); search.setObjectivemanager(miom); // solver.setRestart(true); search.stopAtFirstSolution(false); break; } } /** * Read search annotation and build corresponding strategy * * @param e {@link parser.flatzinc.ast.expression.EAnnotation} * @param solver solver within the search is defined * @return {@code true} if a search strategy is defined */ private AbstractStrategy readSearchAnnotation(EAnnotation e, Solver solver) { Expression[] exps = new Expression[e.exps.size()]; e.exps.toArray(exps); Search search = Search.valueOf(e.id.value); VarChoice vchoice = VarChoice.valueOf(((EIdentifier) exps[1]).value); parser.flatzinc.ast.searches.Assignment assignment = parser.flatzinc.ast.searches.Assignment.valueOf(((EIdentifier) exps[2]).value); switch (search) { case int_search: case bool_search: IntVar[] scope = exps[0].toIntVarArray(solver); return IntSearch.build(scope, vchoice, assignment, Strategy.complete, solver); case set_search: default: LoggerFactory.getLogger(SolveGoal.class).error("Unknown search annotation " + e.toString()); throw new FZNException(); } } }
parser/src/main/java/parser/flatzinc/ast/SolveGoal.java
/** * Copyright (c) 1999-2011, Ecole des Mines de Nantes * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Ecole des Mines de Nantes nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package parser.flatzinc.ast; import org.slf4j.LoggerFactory; import parser.flatzinc.FZNException; import parser.flatzinc.ast.expression.EAnnotation; import parser.flatzinc.ast.expression.EArray; import parser.flatzinc.ast.expression.EIdentifier; import parser.flatzinc.ast.expression.Expression; import parser.flatzinc.ast.searches.IntSearch; import parser.flatzinc.ast.searches.Strategy; import parser.flatzinc.ast.searches.VarChoice; import parser.flatzinc.parser.FZNParser; import solver.Solver; import solver.objective.MaxObjectiveManager; import solver.objective.MinObjectiveManager; import solver.search.loop.AbstractSearchLoop; import solver.search.loop.monitors.ABSLNS; import solver.search.strategy.enumerations.sorters.ActivityBased; import solver.search.strategy.strategy.AbstractStrategy; import solver.search.strategy.strategy.StrategiesSequencer; import solver.variables.IntVar; import solver.variables.Variable; import java.util.List; /* * User : CPRUDHOM * Mail : cprudhom(a)emn.fr * Date : 12 janv. 2010 * Since : Choco 2.1.1 * * Class for solve goals definition based on flatzinc-like objects. * * A solve goal is defined with: * </br> 'solve annotations satisfy;' * </br> or 'solve annotations maximize expression;' * /br> or 'solve annotations minimize expression;' */ public class SolveGoal { final List<EAnnotation> annotations; final Resolution type; final Expression expr; public enum Resolution { SATISFY, MINIMIZE, MAXIMIZE } private enum Search { int_search, bool_search, set_search } public SolveGoal(FZNParser parser, List<EAnnotation> annotations, Resolution type, Expression expr) { this.annotations = annotations; this.type = type; this.expr = expr; defineGoal(parser.solver); } private void defineGoal(Solver solver) { if (annotations.size() > 0) { AbstractStrategy strategy = null; if (annotations.size() > 1) { throw new UnsupportedOperationException("SolveGoal:: wrong annotations size"); } else { EAnnotation annotation = annotations.get(0); if (annotation.id.value.equals("seq_search")) { EArray earray = (EArray) annotation.exps.get(0); AbstractStrategy[] strategies = new AbstractStrategy[earray.what.size()]; for (int i = 0; i < strategies.length; i++) { strategies[i] = readSearchAnnotation((EAnnotation) earray.getWhat_i(i), solver); } strategy = new StrategiesSequencer(solver.getEnvironment(), strategies); } else { strategy = readSearchAnnotation(annotation, solver); } solver.set(strategy); } } else { LoggerFactory.getLogger(SolveGoal.class).warn("% No search annotation. Set default."); Variable[] vars = solver.getVars(); IntVar[] ivars = new IntVar[vars.length]; for (int i = 0; i < ivars.length; i++) { ivars[i] = (IntVar) vars[i]; } ActivityBased abs = new ActivityBased(solver, ivars, 0.999d, 0.2d, 8, 1.1d, 1, 29091981L); solver.set(abs); if (type != Resolution.SATISFY) { solver.getSearchLoop().plugSearchMonitor(new ABSLNS(solver, ivars, 29091981L, abs, false, ivars.length / 2)); } } AbstractSearchLoop search = solver.getSearchLoop(); switch (type) { case SATISFY: search.stopAtFirstSolution(true); break; case MAXIMIZE: IntVar max = expr.intVarValue(solver); MaxObjectiveManager maom = new MaxObjectiveManager(max); maom.setMeasures(solver.getMeasures()); search.setObjectivemanager(maom); // solver.setRestart(true); search.stopAtFirstSolution(false); break; case MINIMIZE: IntVar min = expr.intVarValue(solver); MinObjectiveManager miom = new MinObjectiveManager(min); miom.setMeasures(solver.getMeasures()); search.setObjectivemanager(miom); // solver.setRestart(true); search.stopAtFirstSolution(false); break; } } /** * Read search annotation and build corresponding strategy * * @param e {@link parser.flatzinc.ast.expression.EAnnotation} * @param solver solver within the search is defined * @return {@code true} if a search strategy is defined */ private AbstractStrategy readSearchAnnotation(EAnnotation e, Solver solver) { Expression[] exps = new Expression[e.exps.size()]; e.exps.toArray(exps); Search search = Search.valueOf(e.id.value); VarChoice vchoice = VarChoice.valueOf(((EIdentifier) exps[1]).value); parser.flatzinc.ast.searches.Assignment assignment = parser.flatzinc.ast.searches.Assignment.valueOf(((EIdentifier) exps[2]).value); switch (search) { case int_search: case bool_search: IntVar[] scope = exps[0].toIntVarArray(solver); return IntSearch.build(scope, vchoice, assignment, Strategy.complete, solver); case set_search: default: LoggerFactory.getLogger(SolveGoal.class).error("Unknown search annotation " + e.toString()); throw new FZNException(); } } }
Change default search in MZN
parser/src/main/java/parser/flatzinc/ast/SolveGoal.java
Change default search in MZN
Java
bsd-3-clause
f2c464718ae9abc2f4e9c737d33224411dfed4e8
0
dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk
/* * Copyright (c) 2017, University of Oslo * * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.core.indicator; import android.database.sqlite.SQLiteStatement; import android.support.annotation.NonNull; import org.hisp.dhis.android.core.arch.db.binders.StatementBinder; import org.hisp.dhis.android.core.common.LinkModelStore; import org.hisp.dhis.android.core.common.StoreFactory; import org.hisp.dhis.android.core.data.database.DatabaseAdapter; import static org.hisp.dhis.android.core.utils.StoreUtils.sqLiteBind; public final class DataSetIndicatorLinkStore { private DataSetIndicatorLinkStore() {} private static final StatementBinder<DataSetIndicatorLinkModel> BINDER = new StatementBinder<DataSetIndicatorLinkModel>() { @Override public void bindToStatement(@NonNull DataSetIndicatorLinkModel o, @NonNull SQLiteStatement sqLiteStatement) { sqLiteBind(sqLiteStatement, 1, o.dataSet()); sqLiteBind(sqLiteStatement, 2, o.indicator()); } }; public static LinkModelStore<DataSetIndicatorLinkModel> create(DatabaseAdapter databaseAdapter) { return StoreFactory.linkModelStore(databaseAdapter, DataSetIndicatorLinkModel.TABLE, new DataSetIndicatorLinkModel.Columns(), DataSetIndicatorLinkModel.Columns.DATA_SET, BINDER); } }
core/src/main/java/org/hisp/dhis/android/core/indicator/DataSetIndicatorLinkStore.java
/* * Copyright (c) 2017, University of Oslo * * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.core.indicator; import android.database.sqlite.SQLiteStatement; import android.support.annotation.NonNull; import org.hisp.dhis.android.core.arch.db.binders.StatementBinder; import org.hisp.dhis.android.core.arch.db.binders.WhereStatementBinder; import org.hisp.dhis.android.core.common.ObjectWithoutUidStore; import org.hisp.dhis.android.core.common.StoreFactory; import org.hisp.dhis.android.core.data.database.DatabaseAdapter; import static org.hisp.dhis.android.core.utils.StoreUtils.sqLiteBind; public final class DataSetIndicatorLinkStore { private DataSetIndicatorLinkStore() {} private static final StatementBinder<DataSetIndicatorLinkModel> BINDER = new StatementBinder<DataSetIndicatorLinkModel>() { @Override public void bindToStatement(@NonNull DataSetIndicatorLinkModel o, @NonNull SQLiteStatement sqLiteStatement) { sqLiteBind(sqLiteStatement, 1, o.dataSet()); sqLiteBind(sqLiteStatement, 2, o.indicator()); } }; private static final WhereStatementBinder<DataSetIndicatorLinkModel> WHERE_UPDATE_BINDER = new WhereStatementBinder<DataSetIndicatorLinkModel>() { @Override public void bindToUpdateWhereStatement(@NonNull DataSetIndicatorLinkModel o, @NonNull SQLiteStatement sqLiteStatement) { sqLiteBind(sqLiteStatement, 3, o.dataSet()); sqLiteBind(sqLiteStatement, 4, o.indicator()); } }; public static ObjectWithoutUidStore<DataSetIndicatorLinkModel> create(DatabaseAdapter databaseAdapter) { return StoreFactory.objectWithoutUidStore(databaseAdapter, DataSetIndicatorLinkModel.TABLE, new DataSetIndicatorLinkModel.Columns(), BINDER, WHERE_UPDATE_BINDER); } }
data-set-data-element-indicator-link-handlers: change DataSetIndicatorLinkStore type to linkStore type
core/src/main/java/org/hisp/dhis/android/core/indicator/DataSetIndicatorLinkStore.java
data-set-data-element-indicator-link-handlers: change DataSetIndicatorLinkStore type to linkStore type
Java
bsd-3-clause
285b5d0de738b743bf9fcbcff972f114c03d62f2
0
NCIP/cagrid,NCIP/cagrid,NCIP/cagrid,NCIP/cagrid
package gov.nih.nci.cagrid.introduce.portal; import gov.nih.nci.cagrid.common.portal.ErrorDialog; import gov.nih.nci.cagrid.common.portal.SplashScreen; import java.awt.Dimension; import java.awt.EventQueue; import java.io.File; import java.io.FileWriter; import javax.swing.JFrame; import javax.swing.JOptionPane; import org.jdom.Document; import org.jdom.Element; import org.projectmobius.common.MobiusException; import org.projectmobius.common.XMLUtilities; import org.projectmobius.portal.GridPortal; import org.projectmobius.portal.PortalResourceManager; public final class Introduce { public static final String DEFAULT_CONFIG_FILE = "conf/introduce/introduce-portal-conf.xml"; private static SplashScreen introduceSplash; public static void main(String[] args) { showIntroduceSplash(); if (args.length > 0) { showGridPortal(args[0]); } else { showGridPortal(null); } EventQueue.invokeLater(new IntroduceSplashCloser()); } private static void showIntroduceSplash() { try { introduceSplash = new SplashScreen("/introduceSplash.png"); } catch (Exception e) { } } private static void showGridPortal(String confFile) { try { GridPortal portal = null; if (confFile == null) { confFile = DEFAULT_CONFIG_FILE; } checkGlobusLocation(confFile); portal = new GridPortal(confFile); Dimension dim = PortalResourceManager.getInstance().getGridPortalConfig().getApplicationDimensions(); portal.setSize(dim); try { portal.pack(); } catch (Exception e) { portal.setIconImage(null); portal.setSize(dim); portal.pack(); } portal.setVisible(true); portal.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } catch (MobiusException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private static void checkGlobusLocation(String configFilename) { try { Document doc = XMLUtilities.fileNameToDocument(new File(configFilename).getAbsolutePath()); Element resource = (Element) doc.getRootElement().getChildren("resource").get(1); Element globusConfig = resource.getChild("introduce-portal-config").getChild("globusLocation"); if (globusConfig.getText() == null || globusConfig.getText().length() == 0) { try { String globusLocation = System.getenv("GLOBUS_LOCATION"); globusConfig.setText(globusLocation); } catch (Exception ex) { ex.printStackTrace(); // not using PortalUtils.showErrorMessage because at this // point, // there IS no grid portal instance yet String[] error = {"Error getting GLOBUS_LOCATION environment variable: ", ex.getMessage(), "Please set GLOBUS_LOCATION in preferences!"}; // JOptionPane.showMessageDialog(null, error, "Configuration // Error", JOptionPane.ERROR_MESSAGE); ErrorDialog.showErrorDialog(error); } } // write the configuration back out to disk FileWriter fw = new FileWriter(configFilename); fw.write(XMLUtilities.formatXML(XMLUtilities.documentToString(doc))); fw.flush(); fw.close(); } catch (Exception ex) { ex.printStackTrace(); String[] error = {"Error updating configuration:", ex.getMessage()}; JOptionPane.showMessageDialog(null, error, "Configuration Error", JOptionPane.ERROR_MESSAGE); } } private static final class IntroduceSplashCloser implements Runnable { public void run() { try { introduceSplash.dispose(); } catch (Exception e) { } } } }
cagrid-1-0/caGrid/projects/introduce/src/java/Portal/gov/nih/nci/cagrid/introduce/portal/Introduce.java
package gov.nih.nci.cagrid.introduce.portal; import gov.nih.nci.cagrid.common.portal.ErrorDialog; import gov.nih.nci.cagrid.common.portal.SplashScreen; import java.awt.Dimension; import java.awt.EventQueue; import java.io.File; import java.io.FileWriter; import javax.swing.JFrame; import javax.swing.JOptionPane; import org.jdom.Document; import org.jdom.Element; import org.projectmobius.common.MobiusException; import org.projectmobius.common.XMLUtilities; import org.projectmobius.portal.GridPortal; import org.projectmobius.portal.PortalResourceManager; public final class Introduce { public static final String DEFAULT_CONFIG_FILE = "conf/introduce/introduce-portal-conf.xml"; private static SplashScreen introduceSplash; public static void main(String[] args) { showIntroduceSplash(); if (args.length > 0) { showGridPortal(args[0]); } else { showGridPortal(null); } EventQueue.invokeLater(new IntroduceSplashCloser()); } private static void showIntroduceSplash() { try { introduceSplash = new SplashScreen("/introduceSplash.png"); } catch (Exception e) { } } private static void showGridPortal(String confFile) { try { GridPortal portal = null; if (confFile == null) { confFile = DEFAULT_CONFIG_FILE; } checkGlobusLocation(confFile); portal = new GridPortal(confFile); Dimension dim = PortalResourceManager.getInstance().getGridPortalConfig().getApplicationDimensions(); portal.setSize(dim); try { portal.pack(); } catch (Exception e) { portal.setIconImage(null); portal.pack(); } portal.setVisible(true); portal.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } catch (MobiusException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private static void checkGlobusLocation(String configFilename) { try { Document doc = XMLUtilities.fileNameToDocument(new File(configFilename).getAbsolutePath()); Element resource = (Element) doc.getRootElement().getChildren("resource").get(1); Element globusConfig = resource.getChild("introduce-portal-config").getChild("globusLocation"); if (globusConfig.getText() == null || globusConfig.getText().length() == 0) { try { String globusLocation = System.getenv("GLOBUS_LOCATION"); globusConfig.setText(globusLocation); } catch (Exception ex) { ex.printStackTrace(); // not using PortalUtils.showErrorMessage because at this // point, // there IS no grid portal instance yet String[] error = {"Error getting GLOBUS_LOCATION environment variable: ", ex.getMessage(), "Please set GLOBUS_LOCATION in preferences!"}; // JOptionPane.showMessageDialog(null, error, "Configuration // Error", JOptionPane.ERROR_MESSAGE); ErrorDialog.showErrorDialog(error); } } // write the configuration back out to disk FileWriter fw = new FileWriter(configFilename); fw.write(XMLUtilities.formatXML(XMLUtilities.documentToString(doc))); fw.flush(); fw.close(); } catch (Exception ex) { ex.printStackTrace(); String[] error = {"Error updating configuration:", ex.getMessage()}; JOptionPane.showMessageDialog(null, error, "Configuration Error", JOptionPane.ERROR_MESSAGE); } } private static final class IntroduceSplashCloser implements Runnable { public void run() { try { introduceSplash.dispose(); } catch (Exception e) { } } } }
*** empty log message ***
cagrid-1-0/caGrid/projects/introduce/src/java/Portal/gov/nih/nci/cagrid/introduce/portal/Introduce.java
*** empty log message ***