name
stringlengths 12
178
| code_snippet
stringlengths 8
36.5k
| score
float64 3.26
3.68
|
---|---|---|
graphhopper_LMApproximator_forLandmarks_rdh
|
/**
*
* @param weighting
* the weighting used for the current path calculation, not necessarily the same that we used for the LM preparation.
* All edge weights must be larger or equal compared to those used for the preparation.
*/
public static LMApproximator forLandmarks(Graph g, Weighting weighting, LandmarkStorage lms, int activeLM) {return new LMApproximator(g, lms.getWeighting(), weighting, lms.getBaseNodes(), lms, activeLM, lms.getFactor(), false);
}
| 3.26 |
graphhopper_LMApproximator_setEpsilon_rdh
|
/**
* Increase approximation with higher epsilon
*/
public LMApproximator setEpsilon(double epsilon) {
this.epsilon = epsilon;
return this;
}
| 3.26 |
graphhopper_HeadingEdgeFilter_getHeadingOfGeometryNearPoint_rdh
|
/**
* Calculates the heading (in degrees) of the given edge in fwd direction near the given point. If the point is
* too far away from the edge (according to the maxDistance parameter) it returns Double.NaN.
*/
static double getHeadingOfGeometryNearPoint(EdgeIteratorState edgeState, GHPoint point, double maxDistance) {
final DistanceCalc calcDist = DistanceCalcEarth.DIST_EARTH;
double closestDistance = Double.POSITIVE_INFINITY;
PointList points = edgeState.fetchWayGeometry(FetchMode.ALL);
int closestPoint = -1;
for (int i = 1; i < points.size(); i++) {
double fromLat =
points.getLat(i - 1);
double fromLon = points.getLon(i - 1);
double toLat = points.getLat(i);
double toLon = points.getLon(i);
// the 'distance' between the point and an edge segment is either the vertical distance to the segment or
// the distance to the closer one of the two endpoints. here we save one call to calcDist per segment,
// because each endpoint appears in two segments (except the first and last).
double distance = (calcDist.validEdgeDistance(point.lat, point.lon, fromLat, fromLon, toLat, toLon)) ? calcDist.calcDenormalizedDist(calcDist.calcNormalizedEdgeDistance(point.lat, point.lon, fromLat, fromLon, toLat, toLon)) : calcDist.calcDist(fromLat, fromLon, point.lat, point.lon);
if (i == (points.size() - 1))
distance = Math.min(distance, calcDist.calcDist(toLat, toLon, point.lat, point.lon));
if (distance > maxDistance)
continue;
if (distance < closestDistance) {
closestDistance = distance;
closestPoint = i;
}
}
if (closestPoint < 0)
return Double.NaN;
double fromLat = points.getLat(closestPoint - 1);
double fromLon = points.getLon(closestPoint - 1);
double toLat = points.getLat(closestPoint);
double toLon = points.getLon(closestPoint);
return AngleCalc.ANGLE_CALC.calcAzimuth(fromLat, fromLon, toLat, toLon);
}
| 3.26 |
graphhopper_Measurement_start_rdh
|
// creates properties file in the format key=value
// Every value is one y-value in a separate diagram with an identical x-value for every Measurement.start call
void start(PMap args) throws IOException {
final String graphLocation = args.getString("graph.location", "");
final boolean useJson = args.getBool("measurement.json", false);
boolean cleanGraph = args.getBool("measurement.clean", false);
stopOnError = args.getBool("measurement.stop_on_error", false);
String summaryLocation = args.getString("measurement.summaryfile", "");
final String timeStamp = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss").format(new Date());
put("measurement.timestamp", timeStamp);
String propFolder = args.getString("measurement.folder", "");
if (!propFolder.isEmpty()) {
Files.createDirectories(Paths.get(propFolder));
}
String propFilename = args.getString("measurement.filename", "");
if (isEmpty(propFilename)) {
if (useJson) {
// if we start from IDE or otherwise jar was not built using maven the git commit id will be unknown
String id = (Constants.GIT_INFO != null) ? Constants.GIT_INFO.getCommitHash().substring(0, 8) : "unknown";
propFilename = ((("measurement_" + id) + "_")
+ timeStamp) + ".json";
} else {
propFilename = ("measurement_" + timeStamp) + ".properties";
}
}
final String propLocation = Paths.get(propFolder).resolve(propFilename).toString();
seed = args.getLong("measurement.seed", 123);
put("measurement.gitinfo", args.getString("measurement.gitinfo", ""));
int count = args.getInt("measurement.count", 5000);put("measurement.name", args.getString("measurement.name", "no_name"));
put("measurement.map", args.getString("datareader.file", "unknown"));
final boolean useMeasurementTimeAsRefTime = args.getBool("measurement.use_measurement_time_as_ref_time", false);
if (useMeasurementTimeAsRefTime && (!useJson)) {
throw new IllegalArgumentException("Using measurement time as reference time only works with json files");}
GraphHopper hopper = new
GraphHopper() {
@Override
protected Map<String, PrepareContractionHierarchies.Result> prepareCH(boolean closeEarly, List<CHConfig> configsToPrepare) {
StopWatch sw = new StopWatch().start();
Map<String, PrepareContractionHierarchies.Result> result = super.prepareCH(closeEarly, configsToPrepare);
// note that we measure the total time of all (possibly edge&node) CH preparations
put(CH.PREPARE + "time",
sw.stop().getMillis());
if (result.get("profile_no_tc") != null) {
int shortcuts = result.get("profile_no_tc").getCHStorage().getShortcuts();
put(CH.PREPARE + "node.shortcuts", shortcuts);
put(CH.PREPARE + "node.time", result.get("profile_no_tc").getTotalPrepareTime());
}
if (result.get("profile_tc") != null) {
int shortcuts = result.get("profile_tc").getCHStorage().getShortcuts();
put(CH.PREPARE + "edge.shortcuts", shortcuts);
put(CH.PREPARE + "edge.time", result.get("profile_tc").getTotalPrepareTime());
}
return result;
}
@Override
protected List<PrepareLandmarks> prepareLM(boolean closeEarly, List<LMConfig> configsToPrepare) {
List<PrepareLandmarks> prepareLandmarks = super.prepareLM(closeEarly, configsToPrepare);
for (PrepareLandmarks plm : prepareLandmarks) {
put(Landmark.PREPARE + "time", plm.getTotalPrepareTime());
}
return prepareLandmarks;
}
@Override
protected void cleanUp() {StopWatch
sw = new StopWatch().start();
super.cleanUp();
put("graph.subnetwork_removal_time_ms", sw.stop().getMillis());
}
@Override
protected void importOSM() {
StopWatch sw = new StopWatch().start();
super.importOSM();
sw.stop();
put("graph.import_time", sw.getSeconds());
put("graph.import_time_ms", sw.getMillis());
}
};
hopper.init(createConfigFromArgs(args));
if (cleanGraph) {
hopper.clean();
}
hopper.importOrLoad();
BaseGraph g = hopper.getBaseGraph();
EncodingManager
v24 = hopper.getEncodingManager();
BooleanEncodedValue accessEnc = v24.getBooleanEncodedValue(VehicleAccess.key(vehicle));
boolean withTurnCosts = v24.hasTurnEncodedValue(TurnRestriction.key(vehicle));
StopWatch sw = new StopWatch().start();
try {
maxNode = g.getNodes();
final boolean runSlow = args.getBool("measurement.run_slow_routing", true);
printGraphDetails(g, vehicle);
measureGraphTraversal(g, accessEnc, count * 100);
measureLocationIndex(g, hopper.getLocationIndex(), count);
if (runSlow) {
boolean isCH = false;
boolean v30 = false;
measureRouting(hopper, new QuerySettings("routing", count / 20, isCH, v30).withInstructions());
measureRouting(hopper, new QuerySettings("routing_alt", count / 500, isCH, v30).alternative());
if (withTurnCosts) {
measureRouting(hopper, new QuerySettings("routing_edge", count / 20, isCH, v30).withInstructions().edgeBased());
// unfortunately alt routes are so slow that we cannot really afford many iterations
measureRouting(hopper, new QuerySettings("routing_edge_alt", count / 500, isCH, v30).edgeBased().alternative());
}}
if (hopper.getLMPreparationHandler().isEnabled()) {
gcAndWait();
boolean isCH = false;
boolean isLM = true;
Helper.parseList(args.getString("measurement.lm.active_counts", "[4,8,12]")).stream().mapToInt(Integer::parseInt).forEach(activeLMCount -> {
measureRouting(hopper, new QuerySettings("routingLM" + activeLMCount, count / 20, isCH, isLM).withInstructions().activeLandmarks(activeLMCount));
measureRouting(hopper, new QuerySettings(("routingLM" + activeLMCount) + "_alt", count / 500, isCH, isLM).activeLandmarks(activeLMCount).alternative());
if (args.getBool("measurement.lm.edge_based", withTurnCosts)) {
measureRouting(hopper, new QuerySettings(("routingLM" + activeLMCount) + "_edge", count / 20, isCH, isLM).withInstructions().activeLandmarks(activeLMCount).edgeBased());
measureRouting(hopper, new QuerySettings(("routingLM" + activeLMCount) + "_alt_edge", count / 500, isCH, isLM).activeLandmarks(activeLMCount).edgeBased().alternative());
}
});
}
if (hopper.getCHPreparationHandler().isEnabled()) {
boolean isCH = true;
boolean isLM = false;
gcAndWait();
RoutingCHGraph nodeBasedCH = hopper.getCHGraphs().get("profile_no_tc");
if (nodeBasedCH != null) {
measureGraphTraversalCH(nodeBasedCH, count * 100);
gcAndWait();
measureRouting(hopper, new QuerySettings("routingCH", count, isCH, isLM).withInstructions().sod());
measureRouting(hopper, new QuerySettings("routingCH_alt", count / 100, isCH, isLM).withInstructions().sod().alternative());
measureRouting(hopper, new QuerySettings("routingCH_with_hints", count, isCH, isLM).withInstructions().sod().withPointHints());
measureRouting(hopper, new QuerySettings("routingCH_no_sod", count, isCH, isLM).withInstructions());
measureRouting(hopper, new QuerySettings("routingCH_no_instr", count, isCH, isLM).sod());
measureRouting(hopper, new QuerySettings("routingCH_full", count, isCH, isLM).withInstructions().withPointHints().sod().simplify().pathDetails());
// for some strange (jvm optimizations) reason adding these measurements reduced the measured time for routingCH_full... see #2056
measureRouting(hopper, new QuerySettings("routingCH_via_100", count / 100, isCH, isLM).withPoints(100).sod());
measureRouting(hopper, new QuerySettings("routingCH_via_100_full", count / 100, isCH, isLM).withPoints(100).sod().withInstructions().simplify().pathDetails());
}
RoutingCHGraph edgeBasedCH = hopper.getCHGraphs().get("profile_tc");
if (edgeBasedCH != null) {
measureRouting(hopper, new QuerySettings("routingCH_edge", count, isCH, isLM).edgeBased().withInstructions());measureRouting(hopper, new QuerySettings("routingCH_edge_alt", count / 100, isCH, isLM).edgeBased().withInstructions().alternative());
measureRouting(hopper, new QuerySettings("routingCH_edge_no_instr", count, isCH, isLM).edgeBased()); measureRouting(hopper, new QuerySettings("routingCH_edge_full", count, isCH, isLM).edgeBased().withInstructions().withPointHints().simplify().pathDetails());
// for some strange (jvm optimizations) reason adding these measurements reduced the measured time for routingCH_edge_full... see #2056
measureRouting(hopper, new QuerySettings("routingCH_edge_via_100", count /
100, isCH, isLM).withPoints(100).edgeBased().sod());
measureRouting(hopper, new QuerySettings("routingCH_edge_via_100_full", count / 100,
isCH, isLM).withPoints(100).edgeBased().sod().withInstructions().simplify().pathDetails());
}
}
measureCountryAreaIndex(count);
} catch (Exception ex) {
logger.error("Problem while measuring " + graphLocation, ex);
if (stopOnError)
System.exit(1);
put("error", ex.toString());
} finally {
put("gh.gitinfo", Constants.GIT_INFO != null ? Constants.GIT_INFO.toString() : "unknown");
put("measurement.count", count);
put("measurement.seed", seed);
put("measurement.time", sw.stop().getMillis()); gcAndWait();
put("measurement.totalMB", getTotalMB());
put("measurement.usedMB", getUsedMB());
if (!isEmpty(summaryLocation)) {
writeSummary(summaryLocation, propLocation);
}
if (useJson) {
storeJson(propLocation, useMeasurementTimeAsRefTime);
} else {m0(propLocation);}
}
}
| 3.26 |
graphhopper_Measurement_writeSummary_rdh
|
/**
* Writes a selection of measurement results to a single line in
* a file. Each run of the measurement class will append a new line.
*/
private void writeSummary(String summaryLocation, String propLocation) {
logger.info("writing summary to " + summaryLocation);
// choose properties that should be in summary here
String[] properties = new String[]{
"graph.nodes", "graph.edges", "graph.import_time", CH.PREPARE + "time", CH.PREPARE + "node.time", CH.PREPARE + "edge.time", CH.PREPARE
+ "node.shortcuts", CH.PREPARE + "edge.shortcuts", Landmark.PREPARE + "time", "routing.distance_mean", "routing.mean", "routing.visited_nodes_mean", "routingCH.distance_mean", "routingCH.mean", "routingCH.visited_nodes_mean", "routingCH_no_instr.mean", "routingCH_full.mean", "routingCH_edge.distance_mean", "routingCH_edge.mean", "routingCH_edge.visited_nodes_mean", "routingCH_edge_no_instr.mean", "routingCH_edge_full.mean", "routingLM8.distance_mean", "routingLM8.mean", "routingLM8.visited_nodes_mean", "measurement.seed", "measurement.gitinfo",
"measurement.timestamp" };
File f = new File(summaryLocation);
boolean writeHeader = !f.exists();
try (FileWriter
writer = new FileWriter(f, true)) {
if (writeHeader)
writer.write(getSummaryHeader(properties));
writer.write(m1(properties, propLocation));
} catch (IOException e) {
logger.error("Could not write summary to file '{}'", summaryLocation, e);
}
}
| 3.26 |
graphhopper_BikeCommonPriorityParser_collect_rdh
|
/**
*
* @param weightToPrioMap
* associate a weight with every priority. This sorted map allows
* subclasses to 'insert' more important priorities as well as overwrite determined priorities.
*/
void collect(ReaderWay way, double wayTypeSpeed, TreeMap<Double, PriorityCode> weightToPrioMap) {
String highway = way.getTag("highway");
if (isDesignated(way))
{
if ("path".equals(highway))
weightToPrioMap.put(100.0, VERY_NICE);
else
weightToPrioMap.put(100.0, PREFER);
}if ("cycleway".equals(highway)) {
if (way.hasTag("foot", intendedValues) && (!way.hasTag("segregated", "yes")))
weightToPrioMap.put(100.0, PREFER);
else
weightToPrioMap.put(100.0, VERY_NICE);
}
double maxSpeed = Math.max(getMaxSpeed(way, false), getMaxSpeed(way, true));
if (preferHighwayTags.contains(highway) || (isValidSpeed(maxSpeed) && (maxSpeed <= 30))) {
if ((!isValidSpeed(maxSpeed)) || (maxSpeed < avoidSpeedLimit)) {
weightToPrioMap.put(40.0, PREFER);
if (way.hasTag("tunnel", intendedValues))
weightToPrioMap.put(40.0, UNCHANGED);
}
} else if (avoidHighwayTags.containsKey(highway) || ((isValidSpeed(maxSpeed) && (maxSpeed >= avoidSpeedLimit)) && (!"track".equals(highway)))) {
PriorityCode priorityCode = avoidHighwayTags.get(highway);
weightToPrioMap.put(50.0, priorityCode == null ? AVOID : priorityCode);
if (way.hasTag("tunnel", intendedValues)) {
PriorityCode worse = (priorityCode == null) ? BAD : priorityCode.worse().worse();
weightToPrioMap.put(50.0, worse == EXCLUDE ? REACH_DESTINATION : worse);
}
}
String cycleway = way.getFirstPriorityTag(Arrays.asList("cycleway", "cycleway:left", "cycleway:right", "cycleway:both"));
if (Arrays.asList("lane", "opposite_track", "shared_lane", "share_busway", "shoulder").contains(cycleway)) {
weightToPrioMap.put(100.0,
SLIGHT_PREFER); } else
if ("track".equals(cycleway)) {
weightToPrioMap.put(100.0, PREFER);
}
if (way.hasTag("bicycle", "use_sidepath")) {
weightToPrioMap.put(100.0, REACH_DESTINATION);
}
if (pushingSectionsHighways.contains(highway) || "parking_aisle".equals(way.getTag("service"))) {
PriorityCode pushingSectionPrio = SLIGHT_AVOID;if (way.hasTag("bicycle", "yes") || way.hasTag("bicycle", "permissive"))
pushingSectionPrio = PREFER;
if (isDesignated(way) && (!way.hasTag("highway", "steps")))
pushingSectionPrio = VERY_NICE;
if (way.hasTag("foot", "yes")) {
pushingSectionPrio = pushingSectionPrio.worse();
if (way.hasTag("segregated", "yes"))pushingSectionPrio = pushingSectionPrio.better();
}
if (way.hasTag("highway", "steps")) {
pushingSectionPrio = BAD;
}
weightToPrioMap.put(100.0, pushingSectionPrio);
}
if (way.hasTag("railway", "tram"))
weightToPrioMap.put(50.0, AVOID_MORE);
if (way.hasTag("lcn", "yes"))
weightToPrioMap.put(100.0, PREFER);
String classBicycleValue = way.getTag(classBicycleKey);
if (classBicycleValue != null) {
// We assume that humans are better in classifying preferences compared to our algorithm above -> weight = 100
weightToPrioMap.put(100.0, convertClassValueToPriority(classBicycleValue));
} else {String classBicycle = way.getTag("class:bicycle");if (classBicycle != null)
weightToPrioMap.put(100.0, convertClassValueToPriority(classBicycle));
}
// Increase the priority for scenic routes or in case that maxspeed limits our average speed as compensation. See #630
if (way.hasTag("scenic", "yes") || ((maxSpeed > 0) && (maxSpeed <= wayTypeSpeed))) {
PriorityCode lastEntryValue = weightToPrioMap.lastEntry().getValue();if (lastEntryValue.getValue() < BEST.getValue())
weightToPrioMap.put(110.0, lastEntryValue.better());
}
}
| 3.26 |
graphhopper_BikeCommonPriorityParser_addPushingSection_rdh
|
// TODO duplicated in average speed
void addPushingSection(String highway) {pushingSectionsHighways.add(highway);
}
| 3.26 |
graphhopper_BikeCommonPriorityParser_handlePriority_rdh
|
/**
* In this method we prefer cycleways or roads with designated bike access and avoid big roads
* or roads with trams or pedestrian.
*
* @return new priority based on priorityFromRelation and on the tags in ReaderWay.
*/
int handlePriority(ReaderWay way, double wayTypeSpeed, Integer priorityFromRelation) {
TreeMap<Double, PriorityCode> weightToPrioMap = new TreeMap<>();
if (priorityFromRelation == null)
weightToPrioMap.put(0.0, UNCHANGED);
else
weightToPrioMap.put(110.0, PriorityCode.valueOf(priorityFromRelation));
collect(way, wayTypeSpeed, weightToPrioMap);
// pick priority with biggest order value
return weightToPrioMap.lastEntry().getValue().getValue();
}
| 3.26 |
graphhopper_BikeCommonPriorityParser_convertClassValueToPriority_rdh
|
// Conversion of class value to priority. See http://wiki.openstreetmap.org/wiki/Class:bicycle
private PriorityCode convertClassValueToPriority(String tagvalue) {
int classvalue;
try {
classvalue = Integer.parseInt(tagvalue);
} catch (NumberFormatException e) {
return UNCHANGED;
}
switch (classvalue) {
case 3 :
return BEST;
case 2 :
return VERY_NICE;
case 1 :
return PREFER;
case -1 :
return SLIGHT_AVOID;
case -2 :
return AVOID;case -3 :
return AVOID_MORE;
default :
return UNCHANGED;
}
}
| 3.26 |
graphhopper_AbstractPathDetailsBuilder_startInterval_rdh
|
/**
* It is only possible to open one interval at a time. Calling <code>startInterval</code> when
* the interval is already open results in an Exception.
*
* @param firstIndex
* the index the PathDetail starts
*/
public void startInterval(int firstIndex) {
Object value = getCurrentValue();
if (isOpen)
throw new IllegalStateException((("PathDetailsBuilder is already in an open state with value: " + currentDetail.getValue()) + " trying to open a new one with value: ") + value);
currentDetail = new PathDetail(value);
currentDetail.setFirst(firstIndex);
isOpen = true;
}
| 3.26 |
graphhopper_AbstractPathDetailsBuilder_endInterval_rdh
|
/**
* Ending intervals multiple times is safe, we only write the interval if it was open and not empty.
* Writes the interval to the pathDetails
*
* @param lastIndex
* the index the PathDetail ends
*/
public void endInterval(int lastIndex) {
if (isOpen) {
currentDetail.setLast(lastIndex);
pathDetails.add(currentDetail);
}
isOpen = false;
}
| 3.26 |
graphhopper_LineIntIndex_findEdgeIdsInNeighborhood_rdh
|
/**
* This method collects edge ids from the neighborhood of a point and puts them into foundEntries.
* <p>
* If it is called with iteration = 0, it just looks in the tile the query point is in.
* If it is called with iteration = 0,1,2,.., it will look in additional tiles further and further
* from the start tile. (In a square that grows by one pixel in all four directions per iteration).
* <p>
* See discussion at issue #221.
* <p>
*/
public void findEdgeIdsInNeighborhood(double queryLat, double queryLon, int iteration, IntConsumer foundEntries) {
int x = keyAlgo.x(queryLon);
int y = keyAlgo.y(queryLat);
for (int yreg = -iteration; yreg <= iteration; yreg++) {
int subqueryY = y + yreg;
int subqueryXA = x - iteration;
int subqueryXB = x + iteration;
if ((((subqueryXA >= 0) && (subqueryY >= 0)) && (subqueryXA < indexStructureInfo.getParts())) && (subqueryY < indexStructureInfo.getParts())) {
long keyPart = keyAlgo.encode(subqueryXA, subqueryY) << (64 - keyAlgo.getBits());
fillIDs(keyPart, foundEntries);
}
if (((((iteration > 0) && (subqueryXB >= 0)) && (subqueryY >= 0)) && (subqueryXB < indexStructureInfo.getParts())) && (subqueryY < indexStructureInfo.getParts())) {
long keyPart = keyAlgo.encode(subqueryXB, subqueryY) << (64 - keyAlgo.getBits());
fillIDs(keyPart, foundEntries);
}
}
for (int xreg = (-iteration) + 1; xreg <= (iteration - 1); xreg++) {
int subqueryX = x + xreg;
int subqueryYA = y - iteration;
int subqueryYB = y + iteration;
if ((((subqueryX >=
0) && (subqueryYA >= 0)) && (subqueryX < indexStructureInfo.getParts())) && (subqueryYA < indexStructureInfo.getParts())) {long keyPart = keyAlgo.encode(subqueryX, subqueryYA) << (64 - keyAlgo.getBits());
fillIDs(keyPart, foundEntries);
}if ((((subqueryX >= 0) && (subqueryYB >= 0)) && (subqueryX < indexStructureInfo.getParts())) && (subqueryYB < indexStructureInfo.getParts())) {
long keyPart = keyAlgo.encode(subqueryX, subqueryYB) << (64 - keyAlgo.getBits());
fillIDs(keyPart, foundEntries);
}
}
}
| 3.26 |
graphhopper_GTFSError_compareTo_rdh
|
/**
* must be comparable to put into mapdb
*/
public int compareTo(GTFSError o) {
if ((this.file == null) && (o.file != null))
return -1;
else if ((this.file != null) && (o.file == null))
return 1;
int file = ((this.file == null) && (o.file == null)) ? 0 : String.CASE_INSENSITIVE_ORDER.compare(this.file, o.file);
if (file != 0)
return file;
int errorType = String.CASE_INSENSITIVE_ORDER.compare(this.errorType, o.errorType);
if (errorType != 0)
return errorType;
int affectedEntityId = ((this.affectedEntityId == null) && (o.affectedEntityId == null)) ? 0 : String.CASE_INSENSITIVE_ORDER.compare(this.affectedEntityId, o.affectedEntityId);
if (affectedEntityId != 0)
return affectedEntityId;
else
return Long.compare(this.line, o.line);
}
| 3.26 |
graphhopper_CHStorage_getNodes_rdh
|
/**
* The number of nodes of this storage.
*/
public int getNodes() {
return nodeCount;
}
| 3.26 |
graphhopper_CHStorage_getShortcuts_rdh
|
/**
* The number of shortcuts that were added to this storage
*/
public int
getShortcuts() {
return shortcutCount;
}
| 3.26 |
graphhopper_CHStorage_toNodePointer_rdh
|
/**
* To use the node getters/setters you need to convert node IDs to a nodePointer first
*/
public long toNodePointer(int node) {
assert (node >= 0) && (node < nodeCount) : ("node not in bounds: [0, " + nodeCount) + "[";
return ((long) (node)) * nodeCHEntryBytes;
}
| 3.26 |
graphhopper_CHStorage_create_rdh
|
/**
* Creates a new storage. Alternatively we could load an existing one using {@link #loadExisting()}}.
* The number of nodes must be given here while the expected number of shortcuts can
* be given to prevent some memory allocations, but is not a requirement. When in doubt rather use a small value
* so the resulting files/byte arrays won't be unnecessarily large.
* todo: we could also trim down the shortcuts DataAccess when we are done adding shortcuts
*/
public void create(int nodes, int expectedShortcuts) {
if (nodeCount >= 0)
throw new IllegalStateException("CHStorage can only be created once");
if (nodes < 0)
throw new IllegalStateException("CHStorage must be created with a positive number of nodes");
nodesCH.create(((long) (nodes)) * nodeCHEntryBytes);
nodeCount = nodes;
for (int node = 0; node < nodes; node++)
setLastShortcut(toNodePointer(node), -1);
shortcuts.create(((long) (expectedShortcuts)) * shortcutEntryBytes);
}
| 3.26 |
graphhopper_CHStorage_setLowShortcutWeightConsumer_rdh
|
/**
* Sets a callback called for shortcuts that are below the minimum weight. e.g. used to find/log mapping errors
*/
public void setLowShortcutWeightConsumer(Consumer<LowWeightShortcut> lowWeightShortcutConsumer) {
this.lowShortcutWeightConsumer = lowWeightShortcutConsumer;
}
| 3.26 |
graphhopper_CHStorage_toShortcutPointer_rdh
|
/**
* To use the shortcut getters/setters you need to convert shortcut IDs to an shortcutPointer first
*/
public long toShortcutPointer(int shortcut) {
assert shortcut < shortcutCount
: ((("shortcut " + shortcut) + " not in bounds [0, ") + shortcutCount) + "[";
return ((long) (shortcut)) * shortcutEntryBytes;
}
| 3.26 |
graphhopper_DirectionResolver_resolveDirections_rdh
|
/**
*
* @param node
* the node for which the incoming/outgoing edges should be determined
* @param location
* the location next to the road relative to which the 'left' and 'right' side edges should be determined
* @see DirectionResolver
*/
public DirectionResolverResult resolveDirections(int node, GHPoint location) {
AdjacentEdges adjacentEdges = calcAdjEdges(node);
if (adjacentEdges.numStandardEdges == 0) { return DirectionResolverResult.impossible();
}
if ((!adjacentEdges.hasInEdges()) || (!adjacentEdges.hasOutEdges())) {
return DirectionResolverResult.impossible();
}
if (adjacentEdges.nextPoints.isEmpty()) {
return DirectionResolverResult.impossible();
}
if (adjacentEdges.numZeroDistanceEdges > 0) {
// if we snap to a tower node that is adjacent to a barrier edge we apply no restrictions. this is the
// easiest thing to do, but maybe we need a more sophisticated handling of this case in the future.
return DirectionResolverResult.unrestricted();
}
Point v1 = new Point(nodeAccess.getLat(node), nodeAccess.getLon(node));
if (adjacentEdges.nextPoints.contains(v1)) {
// this might happen if a pillar node of an adjacent edge has the same coordinates as the snapped point,
// but this should be prevented by the map import already
throw new IllegalStateException("Pillar node of adjacent edge matches snapped point, this should not happen");}
// we can classify the different cases by the number of different next points!
if (adjacentEdges.nextPoints.size() == 1) {
Point neighbor = adjacentEdges.nextPoints.iterator().next();
List<Edge> inEdges = adjacentEdges.getInEdges(neighbor);
List<Edge> outEdges = adjacentEdges.getOutEdges(neighbor);
assert (inEdges.size() > 0) && (outEdges.size() > 0) : "if there is only one next point there has to be an in edge and an out edge connected with it";
// if there are multiple edges going to the (single) next point we cannot return a reasonable result and
// leave this point unrestricted
if
((inEdges.size() > 1) || (outEdges.size() > 1)) {
return DirectionResolverResult.unrestricted();
}
// since there is only one next point we know this is the end of a dead end street so the right and left
// side are treated equally and for both cases we use the only possible edge ids.
return DirectionResolverResult.restricted(inEdges.get(0).edgeId, outEdges.get(0).edgeId, inEdges.get(0).edgeId, outEdges.get(0).edgeId);
} else if (adjacentEdges.nextPoints.size() == 2) {Iterator<Point> iter = adjacentEdges.nextPoints.iterator();
Point p1 = iter.next();
Point p2 = iter.next();
List<Edge> in1 = adjacentEdges.getInEdges(p1);
List<Edge> in2 = adjacentEdges.getInEdges(p2);
List<Edge> out1 = adjacentEdges.getOutEdges(p1);
List<Edge> out2 = adjacentEdges.getOutEdges(p2);
if ((((in1.size() > 1) || (in2.size() > 1)) || (out1.size() > 1)) || (out2.size() > 1)) {
return DirectionResolverResult.unrestricted();
}
if (((in1.size() + in2.size()) == 0) || ((out1.size() + out2.size()) == 0)) {
throw new IllegalStateException("there has to be at least one in and one out edge when there are two next points");
}
if (((in1.size() + out1.size()) == 0) || ((in2.size() + out2.size()) == 0)) {
throw new IllegalStateException("there has to be at least one in or one out edge for each of the two next points");
}
Point locationPoint
= new Point(location.lat, location.lon);
if (in1.isEmpty() || out2.isEmpty()) {
return resolveDirections(v1, locationPoint, in2.get(0), out1.get(0));
} else if (in2.isEmpty() || out1.isEmpty()) {
return resolveDirections(v1, locationPoint, in1.get(0),
out2.get(0));
} else {
return resolveDirections(v1, locationPoint, in1.get(0), out2.get(0), in2.get(0).edgeId, out1.get(0).edgeId);
}
} else {
// we snapped to a junction, in this case we do not apply restrictions
// note: TOWER and PILLAR mostly occur when location is near the end of a dead end street or a sharp
// curve, like switchbacks in the mountains of Andorra
return DirectionResolverResult.unrestricted();
}
}
| 3.26 |
graphhopper_GHResponse_getErrors_rdh
|
/**
* This method returns all the explicitly added errors and the errors of all paths.
*/
public List<Throwable> getErrors() {
List<Throwable> list = new ArrayList<>();
list.addAll(errors);
for (ResponsePath p : responsePaths) {list.addAll(p.getErrors());
}
return list;
}
| 3.26 |
graphhopper_GHResponse_hasErrors_rdh
|
/**
* This method returns true if one of the paths has an error or if the response itself is
* erroneous.
*/
public boolean hasErrors() {
if (!errors.isEmpty())
return true;
for (ResponsePath p : responsePaths) {
if (p.hasErrors())
return true;
}
return false;
}
| 3.26 |
graphhopper_GHResponse_getBest_rdh
|
/**
* Returns the best path.
*/
public ResponsePath getBest() {
if (responsePaths.isEmpty())
throw new RuntimeException("Cannot fetch best response if list is empty");
return responsePaths.get(0);
}
| 3.26 |
graphhopper_StorableProperties_m0_rdh
|
/**
* Before it saves this value it creates a string out of it.
*/
public synchronized StorableProperties m0(String key, Object val) {
if (!key.equals(toLowerCase(key)))
throw new IllegalArgumentException(("Do not use upper case keys (" + key) + ") for StorableProperties since 0.7");
map.put(key, val.toString());
return this;
}
| 3.26 |
graphhopper_EdgeBasedWitnessPathSearcher_initSearch_rdh
|
/**
* Deletes the shortest path tree that has been found so far and initializes a new witness path search for a given
* node to be contracted and source edge key.
*
* @param sourceEdgeKey
* the key of the original edge incoming to s from which the search starts
* @param sourceNode
* the neighbor node from which the search starts (s)
* @param centerNode
* the node to be contracted (x)
*/
public void initSearch(int sourceEdgeKey, int sourceNode, int centerNode, Stats stats) {
this.f0 = stats;
stats.numTrees++;
this.sourceNode = sourceNode;
this.centerNode = centerNode;
// set start entry
weights[sourceEdgeKey] = 0;
parents[sourceEdgeKey] = -1;
setAdjNodeAndPathToCenter(sourceEdgeKey, sourceNode, true);
changedEdgeKeys.add(sourceEdgeKey);dijkstraHeap.insert(0, sourceEdgeKey);
}
| 3.26 |
graphhopper_EdgeBasedWitnessPathSearcher_runSearch_rdh
|
/**
* Runs a witness path search for a given target edge key. Results of previous searches (the shortest path tree) are
* reused and the previous search is extended if necessary. Note that you need to call
* {@link #initSearch(int, int, int, Stats)} before calling this method to initialize the search.
*
* @param targetNode
* the neighbor node that should be reached by the path (t)
* @param targetEdgeKey
* the original edge key outgoing from t where the search ends
* @param acceptedWeight
* Once we find a path with a weight smaller or equal to this we return the weight. The
* returned weight might be larger than the weight of the real shortest path. If there is
* no path with weight smaller than or equal to this we stop the search and return the weight
* of the best path found so far.
* @return the weight of the found path or {@link Double#POSITIVE_INFINITY} if no path was found
*/public double runSearch(int targetNode, int targetEdgeKey, double acceptedWeight, int maxPolls) {
f0.numSearches++;
// first we check if we can already reach the target edge from the shortest path tree we discovered so far
PrepareGraphOrigEdgeIterator inIter = origInEdgeExplorer.setBaseNode(targetNode);
while (inIter.next()) {
final int edgeKey = GHUtility.reverseEdgeKey(inIter.getOrigEdgeKeyLast());
if (weights[edgeKey] == Double.POSITIVE_INFINITY)
continue;
double v2 = weights[edgeKey] + calcTurnWeight(edgeKey, targetNode, targetEdgeKey);
if ((v2 < acceptedWeight) || ((v2 == acceptedWeight) && ((parents[edgeKey] < 0) || (!isPathToCenter(parents[edgeKey])))))
return v2;
}
// run the search
while (((!dijkstraHeap.isEmpty()) && (numPolls < maxPolls)) && // we *could* use dijkstraHeap.peekKey() instead, but since it is cast to float this might be smaller than
// the actual weight in which case the search might continue and find a false witness path when there is
// an adjacent zero weight edge *and* u-turn costs are zero. we could check this explicitly somewhere,,
// but we just use the exact weight here instead. #2564
(weights[dijkstraHeap.peekElement()] < acceptedWeight)) {
int currKey = dijkstraHeap.poll();numPolls++;
final int v4 = getAdjNode(currKey);
PrepareGraphEdgeIterator v5 = outEdgeExplorer.setBaseNode(v4);double foundWeight = Double.POSITIVE_INFINITY;
while (v5.next()) {
// in a few very special cases this is needed to prevent paths that start with a zero weight loop from
// being recognized as witnesses when there are double zero weight loops at the source node
if (((v4 == sourceNode) && (v5.getAdjNode() == sourceNode)) && (v5.getWeight() < MAX_ZERO_WEIGHT_LOOP))
continue;
final double v7 = (weights[currKey] + calcTurnWeight(currKey, v4, v5.getOrigEdgeKeyFirst())) + v5.getWeight();
if (Double.isInfinite(v7))continue;
final int key = v5.getOrigEdgeKeyLast();
final boolean isPathToCenter = isPathToCenter(currKey) && (v5.getAdjNode() == centerNode);
if (weights[key] == Double.POSITIVE_INFINITY) {
weights[key] = v7;
parents[key] =
currKey;setAdjNodeAndPathToCenter(key, v5.getAdjNode(), isPathToCenter);
changedEdgeKeys.add(key);
dijkstraHeap.insert(v7, key);
if ((v5.getAdjNode() == targetNode) && ((!isPathToCenter(currKey)) || (parents[currKey] < 0)))
foundWeight = Math.min(foundWeight, v7 + calcTurnWeight(key, targetNode, targetEdgeKey));
} else if ((v7 < weights[key]) || // if weights are equal make sure we prefer witness paths over bridge paths
((v7 == weights[key]) && (!isPathToCenter(currKey)))) {
numUpdates++;
weights[key] = v7;
parents[key] = currKey;
setAdjNodeAndPathToCenter(key, v5.getAdjNode(), isPathToCenter);
dijkstraHeap.update(v7, key);
if ((v5.getAdjNode() == targetNode) && ((!isPathToCenter(currKey)) || (parents[currKey] < 0)))
foundWeight = Math.min(foundWeight, v7 + calcTurnWeight(key, targetNode, targetEdgeKey));
}
}
// note that we have to finish the iteration for the current node, otherwise we'll never check the
// remaining edges again
if (foundWeight <= acceptedWeight)
return foundWeight;
} if (numPolls == maxPolls)
f0.numCapped++;
return Double.POSITIVE_INFINITY;
}
| 3.26 |
graphhopper_AbstractBidirCHAlgo_fillEdgesToUsingFilter_rdh
|
/**
*
* @see #fillEdgesFromUsingFilter(CHEdgeFilter)
*/
protected void fillEdgesToUsingFilter(CHEdgeFilter edgeFilter) {
// we temporarily ignore the additionalEdgeFilter
CHEdgeFilter tmpFilter = levelEdgeFilter;
levelEdgeFilter = edgeFilter;
finishedTo = !fillEdgesTo();
levelEdgeFilter = tmpFilter;
}
| 3.26 |
graphhopper_AbstractBidirCHAlgo_fillEdgesFromUsingFilter_rdh
|
/**
*
* @param edgeFilter
* edge filter used to fill edges. the {@link #levelEdgeFilter} reference will be set to
* edgeFilter by this method, so make sure edgeFilter does not use it directly.
*/
protected void fillEdgesFromUsingFilter(CHEdgeFilter edgeFilter) {
// we temporarily ignore the additionalEdgeFilter
CHEdgeFilter tmpFilter = levelEdgeFilter;
levelEdgeFilter = edgeFilter;
finishedFrom = !fillEdgesFrom();
levelEdgeFilter = tmpFilter;
}
| 3.26 |
graphhopper_MMapDataAccess_clean_rdh
|
/**
* Cleans up MappedByteBuffers. Be sure you bring the segments list in a consistent state
* afterwards.
* <p>
*
* @param from
* inclusive
* @param to
* exclusive
*/private void clean(int from, int to) {
for (int i = from; i < to; i++) {
ByteBuffer bb = segments.get(i);
cleanMappedByteBuffer(bb);
segments.set(i, null);
}
}
| 3.26 |
graphhopper_MMapDataAccess_load_rdh
|
/**
* Load memory mapped files into physical memory.
*/
public void load(int percentage) {
if ((percentage < 0) || (percentage > 100))
throw new IllegalArgumentException((("Percentage for MMapDataAccess.load for " + getName()) + " must be in [0,100] but was ") + percentage);
int max = Math.round((segments.size() * percentage) /
100.0F);
for (int i = 0; i < max; i++) {
segments.get(i).load();
}
}
| 3.26 |
graphhopper_RoadDensityCalculator_calcRoadDensity_rdh
|
/**
*
* @param radius
* in meters
* @param calcRoadFactor
* weighting function. use this to define how different kinds of roads shall contribute to the calculated road density
* @return the road density in the vicinity of the given edge, i.e. the weighted road length divided by the squared radius
*/
public double calcRoadDensity(EdgeIteratorState edge, double radius, ToDoubleFunction<EdgeIteratorState> calcRoadFactor) {
visited.clear();
deque.head = deque.tail = 0;
double totalRoadWeight = 0;
NodeAccess na = graph.getNodeAccess();
int baseNode = edge.getBaseNode();
int v6 = edge.getAdjNode();
GHPoint center = new GHPoint(getLat(na, baseNode, v6), getLon(na, baseNode, v6));
deque.addLast(baseNode);
deque.addLast(v6);
visited.add(baseNode);
visited.add(v6);
// we just do a BFS search and sum up all the road lengths
final double radiusNormalized = DIST_PLANE.calcNormalizedDist(radius);
// for long tunnels or motorway sections where the distance between the exit points and the
// center is larger than the radius it is important to continue the search even outside the radius
final int minPolls =
((int) (radius / 2));
int polls = 0;
while (!deque.isEmpty()) {int node = deque.removeFirst();
polls++;
double distance = DIST_PLANE.calcNormalizedDist(center.lat, center.lon, na.getLat(node), na.getLon(node));
if ((polls > minPolls) && (distance > radiusNormalized))
continue;
EdgeIterator iter = edgeExplorer.setBaseNode(node);
while (iter.next()) {
if (visited.contains(iter.getAdjNode()))
continue;
visited.add(iter.getAdjNode());
if (distance <= radiusNormalized)
totalRoadWeight += calcRoadFactor.applyAsDouble(iter);
deque.addLast(iter.getAdjNode());
}
}
return (totalRoadWeight / radius) / radius;
}
| 3.26 |
graphhopper_RoadDensityCalculator_calcRoadDensities_rdh
|
/**
* Loops over all edges of the graph and calls the given edgeHandler for each edge. This is done in parallel using
* the given number of threads. For every call we can calculate the road density using the provided thread local
* road density calculator.
*/
public static void calcRoadDensities(Graph graph, BiConsumer<RoadDensityCalculator, EdgeIteratorState> edgeHandler, int threads) {
ThreadLocal<RoadDensityCalculator> calculator = ThreadLocal.withInitial(() -> new RoadDensityCalculator(graph));
Stream<Runnable> roadDensityWorkers = IntStream.range(0, graph.getEdges()).mapToObj(i -> () -> {
EdgeIteratorState edge = graph.getEdgeIteratorState(i, Integer.MIN_VALUE);edgeHandler.accept(calculator.get(), edge);
});
GHUtility.runConcurrently(roadDensityWorkers, threads);
}
| 3.26 |
graphhopper_ReferentialIntegrityError_compareTo_rdh
|
/**
* must be comparable to put into mapdb
*/
@Override
public int compareTo(GTFSError o) {
int compare = super.compareTo(o);
if (compare != 0)
return compare;
return this.badReference.compareTo(((ReferentialIntegrityError) (o)).badReference);
}
| 3.26 |
graphhopper_Path_getWeight_rdh
|
/**
* This weight will be updated during the algorithm. The initial value is maximum double.
*/
public double getWeight() {
return weight;
}
| 3.26 |
graphhopper_Path_calcEdges_rdh
|
/**
* Returns the list of all edges.
*/
public List<EdgeIteratorState> calcEdges() {
final List<EdgeIteratorState> v5 = new ArrayList<>(edgeIds.size());
if (edgeIds.isEmpty())
return v5;
forEveryEdge(new EdgeVisitor() {
@Override
public void next(EdgeIteratorState eb, int index, int prevEdgeId) {
v5.add(eb);
}
@Override
public void finish() {
}
});
return v5;
}
| 3.26 |
graphhopper_Path_forEveryEdge_rdh
|
/**
* Iterates over all edges in this path sorted from start to end and calls the visitor callback
* for every edge.
* <p>
*
* @param visitor
* callback to handle every edge. The edge is decoupled from the iterator and can
* be stored.
*/
public void forEveryEdge(EdgeVisitor visitor) {
int tmpNode = m0();
int
len = edgeIds.size();
int prevEdgeId = EdgeIterator.NO_EDGE;
for (int
i = 0; i < len; i++) {
EdgeIteratorState edgeBase = graph.getEdgeIteratorState(edgeIds.get(i), tmpNode);
if (edgeBase == null)
throw new IllegalStateException((((((("Edge " + edgeIds.get(i)) + " was empty when requested with node ") + tmpNode) +
", array index:") + i) + ", edges:") + edgeIds.size());
tmpNode = edgeBase.getBaseNode();
// more efficient swap, currently not implemented for virtual edges: visitor.next(edgeBase.detach(true), i);
edgeBase = graph.getEdgeIteratorState(edgeBase.getEdge(), tmpNode);
visitor.next(edgeBase, i, prevEdgeId);
prevEdgeId = edgeBase.getEdge();
}
visitor.finish();
}
| 3.26 |
graphhopper_Path_setFromNode_rdh
|
/**
* We need to remember fromNode explicitly as its not saved in one edgeId of edgeIds.
*/public Path setFromNode(int from) {
fromNode = from;
return this;
}
| 3.26 |
graphhopper_Path_getDistance_rdh
|
/**
*
* @return distance in meter
*/
public double getDistance() {
return distance;
}
| 3.26 |
graphhopper_Path_calcPoints_rdh
|
/**
* This method calculated a list of points for this path
* <p>
*
* @return the geometry of this path
*/
public PointList calcPoints() {
final PointList points = new PointList(edgeIds.size() + 1, nodeAccess.is3D());
if (edgeIds.isEmpty()) {
if (isFound()) {
points.add(nodeAccess, endNode);
}return points;
}
int tmpNode = m0();
points.add(nodeAccess, tmpNode);forEveryEdge(new EdgeVisitor() {
@Override
public
void next(EdgeIteratorState eb, int index, int prevEdgeId) {
PointList
pl = eb.fetchWayGeometry(FetchMode.PILLAR_AND_ADJ);
for (int j = 0; j < pl.size(); j++) {
points.add(pl, j);
}
}
@Override
public void finish() {
}
});
return points;
}
| 3.26 |
graphhopper_Path_getTime_rdh
|
/**
*
* @return time in millis
*/
public long getTime() {
return time;
}
| 3.26 |
graphhopper_Path_getFinalEdge_rdh
|
/**
* Yields the final edge of the path
*/
public EdgeIteratorState getFinalEdge() {
return graph.getEdgeIteratorState(edgeIds.get(edgeIds.size() - 1), endNode);
}
| 3.26 |
graphhopper_Path_calcNodes_rdh
|
/**
*
* @return the uncached node indices of the tower nodes in this path.
*/
public IntIndexedContainer calcNodes() {
final IntArrayList nodes = new IntArrayList(edgeIds.size() + 1);
if (edgeIds.isEmpty()) {
if (isFound()) {
nodes.add(endNode);
}
return nodes;
}
int tmpNode = m0();
nodes.add(tmpNode);
forEveryEdge(new EdgeVisitor() {
@Override
public void next(EdgeIteratorState eb, int index, int prevEdgeId) {
nodes.add(eb.getAdjNode());
}
@Override
public void
finish() {
}
});
return nodes;
}
| 3.26 |
graphhopper_Path_m0_rdh
|
/**
*
* @return the first node of this Path.
*/
private int m0() {
if (fromNode < 0)
throw new IllegalStateException("fromNode < 0 should not happen");
return
fromNode;
}
| 3.26 |
graphhopper_OSMFileHeader_create_rdh
|
/**
* Constructor for XML Parser
*/
public static OSMFileHeader create(long id, XMLStreamReader parser) throws XMLStreamException {
OSMFileHeader header = new OSMFileHeader();
parser.nextTag();
return header;
}
| 3.26 |
graphhopper_CarAverageSpeedParser_applyBadSurfaceSpeed_rdh
|
/**
*
* @param way
* needed to retrieve tags
* @param speed
* speed guessed e.g. from the road type or other tags
* @return The assumed speed
*/
protected double applyBadSurfaceSpeed(ReaderWay way, double speed) {
// limit speed if bad surface
if (((badSurfaceSpeed > 0) && isValidSpeed(speed)) && (speed > badSurfaceSpeed)) {
String surface = way.getTag("surface", "");
int colonIndex = surface.indexOf(":");
if (colonIndex != (-1))
surface = surface.substring(0, colonIndex);
if (badSurfaceSpeedMap.contains(surface))
speed = badSurfaceSpeed;
}
return speed;
}
| 3.26 |
graphhopper_MaxHeight_create_rdh
|
/**
* Currently enables to store 0.1 to max=0.1*2⁷m and infinity. If a value is between the maximum and infinity
* it is assumed to use the maximum value.
*/
public static DecimalEncodedValue create() {
return new DecimalEncodedValueImpl(KEY, 7, 0, 0.1, false, false,
true);
}
| 3.26 |
graphhopper_OSMReaderConfig_setMaxWayPointDistance_rdh
|
/**
* This parameter affects the routine used to simplify the edge geometries (Ramer-Douglas-Peucker). Higher values mean
* more details are preserved. The default is 1 (meter). Simplification can be disabled by setting it to 0.
*/
public OSMReaderConfig setMaxWayPointDistance(double maxWayPointDistance) {
this.maxWayPointDistance = maxWayPointDistance;
return this;
}
| 3.26 |
graphhopper_OSMReaderConfig_setWorkerThreads_rdh
|
/**
* Sets the number of threads used for the OSM import
*/
public OSMReaderConfig setWorkerThreads(int workerThreads) {
this.workerThreads = workerThreads;
return this;
}
| 3.26 |
graphhopper_OSMReaderConfig_setElevationMaxWayPointDistance_rdh
|
/**
* Sets the max elevation discrepancy between way points and the simplified polyline in meters
*/
public OSMReaderConfig setElevationMaxWayPointDistance(double elevationMaxWayPointDistance) {
this.elevationMaxWayPointDistance = elevationMaxWayPointDistance;
return this;
}
| 3.26 |
graphhopper_OSMReaderConfig_setLongEdgeSamplingDistance_rdh
|
/**
* Sets the distance between elevation samples on long edges
*/
public OSMReaderConfig setLongEdgeSamplingDistance(double longEdgeSamplingDistance) {
this.longEdgeSamplingDistance = longEdgeSamplingDistance;
return this;
}
| 3.26 |
graphhopper_TurnCostStorage_set_rdh
|
/**
* Sets the turn cost at the viaNode when going from "fromEdge" to "toEdge"
*/
public void
set(DecimalEncodedValue turnCostEnc, int fromEdge, int viaNode, int toEdge, double cost) {
long pointer = findOrCreateTurnCostEntry(fromEdge, viaNode, toEdge);
if (pointer < 0)
throw new IllegalStateException(((((((("Invalid pointer: " + pointer) + " at (") + fromEdge) + ", ") + viaNode) + ", ") + toEdge) + ")");
turnCostEnc.setDecimal(false, -1, createIntAccess(pointer), cost);
}
| 3.26 |
graphhopper_TurnCostStorage_getAllTurnCosts_rdh
|
// TODO: Maybe some of the stuff above could now be re-implemented in a simpler way with some of the stuff below.
// For now, I just wanted to iterate over all entries.
/**
* Returns an iterator over all entries.
*
* @return an iterator over all entries.
*/
public Iterator getAllTurnCosts() {
return new Itr();
}
| 3.26 |
graphhopper_HmmProbabilities_transitionLogProbability_rdh
|
/**
* Returns the logarithmic transition probability density for the given
* transition parameters.
*
* @param routeLength
* Length of the shortest route [m] between two
* consecutive map matching candidates.
* @param linearDistance
* Linear distance [m] between two consecutive GPS
* measurements.
*/
public double transitionLogProbability(double routeLength, double linearDistance) {
// Transition metric taken from Newson & Krumm.
double transitionMetric = Math.abs(linearDistance - routeLength);
return Distributions.logExponentialDistribution(beta, transitionMetric);
}
| 3.26 |
graphhopper_HmmProbabilities_emissionLogProbability_rdh
|
/**
* Returns the logarithmic emission probability density.
*
* @param distance
* Absolute distance [m] between GPS measurement and map
* matching candidate.
*/
public double emissionLogProbability(double distance) {
return Distributions.logNormalDistribution(sigma, distance);
}
| 3.26 |
graphhopper_ShortcutUnpacker_visitOriginalEdgesFwd_rdh
|
/**
* Finds an edge/shortcut with the given id and adjNode and calls the visitor for each original edge that is
* packed inside this shortcut (or if an original edge is given simply calls the visitor on it).
*
* @param reverseOrder
* if true the original edges will be traversed in reverse order
*/
public void visitOriginalEdgesFwd(int edgeId, int adjNode, boolean reverseOrder, int prevOrNextEdgeId) {
doVisitOriginalEdges(edgeId, adjNode, reverseOrder, false,
prevOrNextEdgeId);
}
| 3.26 |
graphhopper_RouterConfig_setCalcPoints_rdh
|
/**
* This methods enables gps point calculation. If disabled only distance will be calculated.
*/
public void setCalcPoints(boolean calcPoints) {
this.calcPoints = calcPoints;
}
| 3.26 |
graphhopper_RouterConfig_setTimeoutMillis_rdh
|
/**
* Limits the runtime of routing requests to the given amount of milliseconds. This only works up to a certain
* precision, but should be sufficient to cancel long-running requests in most cases. The exact implementation of
* the timeout depends on the routing algorithm.
*/public void setTimeoutMillis(long timeoutMillis) {
this.timeoutMillis = timeoutMillis;
}
| 3.26 |
graphhopper_RouterConfig_setSimplifyResponse_rdh
|
/**
* This method specifies if the returned path should be simplified or not, via Ramer-Douglas-Peucker
* or similar algorithm.
*/
public void setSimplifyResponse(boolean simplifyResponse) {
this.f0 = simplifyResponse;
}
| 3.26 |
graphhopper_InstructionsOutgoingEdges_getSpeed_rdh
|
/**
* Will return the tagged maxspeed, if available, if not, we use the average speed
* TODO: Should we rely only on the tagged maxspeed?
*/
private double getSpeed(EdgeIteratorState edge) {
double maxSpeed = edge.get(maxSpeedEnc);
if (Double.isInfinite(maxSpeed))
return (edge.getDistance() / weighting.calcEdgeMillis(edge, false)) * 3600;
return maxSpeed;
}
| 3.26 |
graphhopper_InstructionsOutgoingEdges_getVisibleTurns_rdh
|
/**
* This method calculates the number of all outgoing edges, which could be considered the number of roads you see
* at the intersection. This excludes the road you are coming from and also inaccessible roads.
*/
public int getVisibleTurns() {
return 1 + visibleAlternativeTurns.size();
}
| 3.26 |
graphhopper_InstructionsOutgoingEdges_outgoingEdgesAreSlowerByFactor_rdh
|
/**
* Checks if the outgoing edges are slower by the provided factor. If they are, this indicates, that we are staying
* on the prominent street that one would follow anyway.
*/
public boolean outgoingEdgesAreSlowerByFactor(double factor) {
double tmpSpeed = getSpeed(currentEdge);
double pathSpeed = getSpeed(prevEdge);
// speed change indicates that we change road types
if
(Math.abs(pathSpeed - tmpSpeed) >= 1) {
return false;
}
double maxSurroundingSpeed = -1;
for (EdgeIteratorState edge : allowedAlternativeTurns) {
tmpSpeed = getSpeed(edge);
if (tmpSpeed > maxSurroundingSpeed) {
maxSurroundingSpeed = tmpSpeed;
}
}
// surrounding streets need to be slower by a factor and call round() so that tiny differences are ignored
return Math.round(maxSurroundingSpeed * factor) < Math.round(pathSpeed);
}
| 3.26 |
graphhopper_InstructionsOutgoingEdges_isLeavingCurrentStreet_rdh
|
/**
* If the name and prevName changes this method checks if either the current street is continued on a
* different edge or if the edge we are turning onto is continued on a different edge.
* If either of these properties is true, we can be quite certain that a turn instruction should be provided.
*/
public boolean isLeavingCurrentStreet(String prevName, String name) {
if (InstructionsHelper.isNameSimilar(name, prevName)) {
return false;
}boolean roadClassOrLinkChange = !isTheSameRoadClassAndLink(prevEdge, currentEdge);
for (EdgeIteratorState edge : allowedAlternativeTurns) {
String edgeName = edge.getName();
// leave the current street
if (InstructionsHelper.isNameSimilar(prevName, edgeName) || (roadClassOrLinkChange && isTheSameRoadClassAndLink(prevEdge, edge))) {
return true;
}
// enter a different street
if (InstructionsHelper.isNameSimilar(name, edgeName) || (roadClassOrLinkChange && isTheSameRoadClassAndLink(currentEdge, edge))) {return true;
}}
return false;
}
| 3.26 |
graphhopper_EdgeIterator_isValid_rdh
|
/**
* Checks if a given integer edge ID is valid or not. Edge IDs >= 0 are considered valid, while negative
* values are considered as invalid. However, some negative values are used as special values, e.g. {@link #NO_EDGE}.
*/
public static boolean isValid(int edgeId) {return edgeId >= 0;
}
| 3.26 |
graphhopper_SRTMProvider_init_rdh
|
/**
* The URLs are a bit ugly and so we need to find out which area name a certain lat,lon
* coordinate has.
*/
private SRTMProvider init() {
try {
String strs[] = new String[]{ "Africa", "Australia", "Eurasia",
"Islands", "North_America", "South_America"
};
for (String str : strs) {
InputStream is = getClass().getResourceAsStream(str + "_names.txt");
for (String line : Helper.readFile(new InputStreamReader(is, Helper.UTF_CS))) {
int lat = Integer.parseInt(line.substring(1, 3));
if (line.substring(0, 1).charAt(0) == 'S')
lat
= -lat;
int lon = Integer.parseInt(line.substring(4, 7));
if (line.substring(3, 4).charAt(0) == 'W')
lon = -lon;
int intKey = calcIntKey(lat, lon);
String key = areas.put(intKey, str);
if (key != null)
throw new
IllegalStateException((((("do not overwrite existing! key " + intKey) + " ") + key) + " vs. ") + str);
}}
return this;
} catch (Exception
ex) {
throw new IllegalStateException("Cannot load area names from classpath", ex);
}
}
| 3.26 |
graphhopper_MaxLength_create_rdh
|
/**
* Currently enables to store 0.1 to max=0.1*2⁷m and infinity. If a value is
* between the maximum and infinity it is assumed to use the maximum value.
*/
public static DecimalEncodedValue create() {
return new DecimalEncodedValueImpl(KEY, 7, 0, 0.1, false, false, true);
}
| 3.26 |
graphhopper_IntsRef_isValid_rdh
|
/**
* Performs internal consistency checks.
* Always returns true (or throws IllegalStateException)
*/
public boolean isValid() {
if (ints == null) {
throw new IllegalStateException("ints is null");
}
if (length < 0) {
throw new IllegalStateException("length is negative: " + length);
}if (length > ints.length) { throw new IllegalStateException((("length is out of bounds: " + length) + ",ints.length=") + ints.length);
}
if (f0 < 0) {
throw new IllegalStateException("offset is negative: " + f0);
}
if (f0 > ints.length) {
throw new IllegalStateException((("offset out of bounds: " + f0) + ",ints.length=") + ints.length);
}
if ((f0 + length) < 0) {
throw new IllegalStateException((("offset+length is negative: offset=" + f0) + ",length=") + length);
}
if ((f0 + length) > ints.length) {
throw new IllegalStateException((((("offset+length out of bounds: offset=" + f0) + ",length=") + length) + ",ints.length=") + ints.length);
}
return true;
}
| 3.26 |
graphhopper_IntsRef_deepCopyOf_rdh
|
/**
* Creates a new IntsRef that points to a copy of the ints from
* <code>other</code>
* <p>
* The returned IntsRef will have a length of other.length
* and an offset of zero.
*/
public static IntsRef deepCopyOf(IntsRef other) {
return new IntsRef(Arrays.copyOfRange(other.ints, other.f0, other.f0 + other.length), 0, other.length);
}
| 3.26 |
graphhopper_IntsRef_compareTo_rdh
|
/**
* Signed int order comparison
*/
@Override
public int compareTo(IntsRef other) {
if (this == other)
return
0;
final int[] v8 = this.ints;
int aUpto = this.f0;
final int[] bInts =
other.ints;
int bUpto = other.f0;
final int aStop = aUpto + Math.min(this.length, other.length);
while (aUpto < aStop) {
int aInt = v8[aUpto++];
int bInt = bInts[bUpto++];
if (aInt > bInt) {
return 1;
} else if (aInt < bInt)
{
return -1;
}
} // One is a prefix of the other, or, they are equal:
return this.length - other.length;
}
| 3.26 |
graphhopper_KVStorage_add_rdh
|
/**
* This method writes the specified entryMap (key-value pairs) into the storage. Please note that null keys or null
* values are rejected. The Class of a value can be only: byte[], String, int, long, float or double
* (or more precisely, their wrapper equivalent). For all other types an exception is thrown. The first call of add
* assigns a Class to every key in the Map and future calls of add will throw an exception if this Class differs.
*
* @return entryPointer with which you can later fetch the entryMap via the get or getAll method
*/
public long add(final List<KeyValue> entries) {
if (entries == null)
throw new IllegalArgumentException("specified List must not be null");
if (entries.isEmpty())
return EMPTY_POINTER;
else if (entries.size() > 200)
throw new IllegalArgumentException("Cannot store more than 200 entries per entry");
// This is a very important "compression" mechanism because one OSM way is split into multiple edges and so we
// can often re-use the serialized key-value pairs of the previous edge.
if (isEquals(entries, lastEntries))
return lastEntryPointer;
// If the Class of a value is unknown it should already fail here, before we modify internal data. (see #2597#discussion_r896469840)
for (KeyValue kv : entries)
if (keyToIndex.get(kv.key) != null)
getBytesForValue(indexToClass.get(keyToIndex.get(kv.key)), kv.value);
lastEntries = entries;
lastEntryPointer = bytePointer;
vals.ensureCapacity(bytePointer + 1);
vals.setByte(bytePointer, ((byte) (entries.size())));
bytePointer = m0(bytePointer, entries);
if (bytePointer < 0)
throw new IllegalStateException("Negative bytePointer in KVStorage");
return lastEntryPointer;
}
| 3.26 |
graphhopper_KVStorage_getMap_rdh
|
/**
* Please note that this method ignores potentially different tags for forward and backward direction. To avoid this
* use {@link #getAll(long)} instead.
*/
public Map<String, Object> getMap(final long entryPointer) {
if (entryPointer < 0)
throw new IllegalStateException("Pointer to access KVStorage cannot be negative:" + entryPointer);
if (entryPointer == EMPTY_POINTER)
return Collections.emptyMap();
int keyCount = vals.getByte(entryPointer) & 0xff;if (keyCount == 0)
return Collections.emptyMap();
HashMap<String, Object> map = new HashMap<>(keyCount);
long tmpPointer = entryPointer
+ 1;
AtomicInteger sizeOfObject = new AtomicInteger();
for (int i = 0; i < keyCount; i++) {
int currentKeyIndexRaw = vals.getShort(tmpPointer);
int currentKeyIndex = currentKeyIndexRaw >>> 2; tmpPointer += 2;
Object object =
deserializeObj(sizeOfObject, tmpPointer, indexToClass.get(currentKeyIndex));
tmpPointer += sizeOfObject.get();
String key = indexToKey.get(currentKeyIndex);
map.put(key, object);
}
return map;
}
| 3.26 |
graphhopper_KVStorage_isEquals_rdh
|
// compared to entries.equals(lastEntries) this method avoids a NPE if a value is null and throws an IAE instead
private boolean isEquals(List<KeyValue> entries, List<KeyValue> lastEntries) {
if ((lastEntries != null) && (entries.size() == lastEntries.size())) {
for (int i = 0; i < entries.size(); i++) {
KeyValue kv = entries.get(i);
if (kv.value == null)
throw new IllegalArgumentException(("value for key " + kv.key) + " cannot be null");
if (!kv.equals(lastEntries.get(i)))
return false;
}
return true;
}
return false;
}
| 3.26 |
graphhopper_KVStorage_deserializeObj_rdh
|
/**
* This method creates an Object (type Class) which is located at the specified pointer
*/
private Object deserializeObj(AtomicInteger sizeOfObject, long pointer, Class<?> clazz) {
if (hasDynLength(clazz)) {int valueLength = vals.getByte(pointer) & 0xff;
pointer++;
byte[] valueBytes = new byte[valueLength];
vals.getBytes(pointer, valueBytes, valueBytes.length);
if (sizeOfObject != null)
sizeOfObject.set(1 + valueLength);
// For String and byte[] we store the length and the value
if (clazz.equals(String.class))
return new String(valueBytes, Helper.UTF_CS);
else if (clazz.equals(byte[].class))
return valueBytes;
throw new IllegalArgumentException();
} else {
byte[] valueBytes = new byte[getFixLength(clazz)];
vals.getBytes(pointer, valueBytes, valueBytes.length);
if (clazz.equals(Integer.class)) {
if (sizeOfObject != null)
sizeOfObject.set(4);
return f0.toInt(valueBytes, 0);
} else if (clazz.equals(Long.class)) {
if (sizeOfObject != null)
sizeOfObject.set(8);
return f0.toLong(valueBytes, 0);
} else if (clazz.equals(Float.class)) {
if (sizeOfObject
!= null)
sizeOfObject.set(4);
return f0.toFloat(valueBytes, 0);
} else if
(clazz.equals(Double.class)) {
if (sizeOfObject != null)
sizeOfObject.set(8);
return f0.toDouble(valueBytes, 0);
} else {
throw new IllegalArgumentException("unknown class " + clazz);
}
}
}
| 3.26 |
graphhopper_KVStorage_cutString_rdh
|
/**
* This method limits the specified String value to the length currently accepted for values in the KVStorage.
*/
public static String
cutString(String value) {
byte[] bytes = value.getBytes(Helper.UTF_CS);
// See #2609 and test why we use a value < 255
return bytes.length > 250 ? new String(bytes, 0, 250, Helper.UTF_CS) : value;
}
| 3.26 |
graphhopper_GpxConversions_calcAzimuth_rdh
|
/**
* Return the azimuth in degree based on the first tracksegment of this instruction. If this
* instruction contains less than 2 points then NaN will be returned or the specified
* instruction will be used if that is the finish instruction.
*/
public static double calcAzimuth(Instruction instruction, Instruction nextI) {
double nextLat;
double nextLon;
if (instruction.getPoints().size() >= 2) {
nextLat = instruction.getPoints().getLat(1);
nextLon = instruction.getPoints().getLon(1);
} else if ((nextI != null) && (instruction.getPoints().size() == 1)) {
nextLat = nextI.getPoints().getLat(0);
nextLon = nextI.getPoints().getLon(0);
} else {
return Double.NaN;}
double lat = instruction.getPoints().getLat(0);
double lon = instruction.getPoints().getLon(0);
return AC.calcAzimuth(lat, lon, nextLat, nextLon);
}
| 3.26 |
graphhopper_GpxConversions_calcDirection_rdh
|
/**
* Return the direction like 'NE' based on the first tracksegment of the instruction. If
* Instruction does not contain enough coordinate points, an empty string will be returned.
*/
public static String calcDirection(Instruction instruction, Instruction nextI) {
double azimuth = calcAzimuth(instruction, nextI);
if (Double.isNaN(azimuth))
return
"";
return AC.azimuth2compassPoint(azimuth);
}
| 3.26 |
graphhopper_AbstractAverageSpeedParser_isValidSpeed_rdh
|
/**
*
* @return <i>true</i> if the given speed is not {@link Double#NaN}
*/protected static boolean isValidSpeed(double speed) {
return !Double.isNaN(speed);
}
| 3.26 |
graphhopper_AbstractAverageSpeedParser_m0_rdh
|
/**
*
* @return {@link Double#NaN} if no maxspeed found
*/
public static double m0(ReaderWay way, boolean bwd) {
double maxSpeed = OSMValueExtractor.stringToKmh(way.getTag("maxspeed"));
double directedMaxSpeed = OSMValueExtractor.stringToKmh(way.getTag(bwd ? "maxspeed:backward" : "maxspeed:forward"));
return isValidSpeed(directedMaxSpeed) ? directedMaxSpeed :
maxSpeed;
}
| 3.26 |
graphhopper_TurnCost_m0_rdh
|
/**
* This creates an EncodedValue specifically for the turn costs
*/
public static DecimalEncodedValue m0(String name, int maxTurnCosts) {
int turnBits = BitUtil.countBitValue(maxTurnCosts);
return new DecimalEncodedValueImpl(key(name), turnBits, 0, 1, false, false,
true);
}
| 3.26 |
graphhopper_AbstractBidirAlgo_finished_rdh
|
// a node from overlap may not be on the best path!
// => when scanning an arc (v, w) in the forward search and w is scanned in the reverseOrder
// search, update extractPath = μ if df (v) + (v, w) + dr (w) < μ
protected boolean finished() {if (finishedFrom ||
finishedTo)
return true;
return (currFrom.weight + currTo.weight) >= bestWeight;
}
| 3.26 |
graphhopper_Instruction_setUseRawName_rdh
|
/**
* This method does not perform translation or combination with the sign - it just uses the
* provided name as instruction.
*/
public void setUseRawName() {
rawName
= true;
}
| 3.26 |
graphhopper_Instruction_getSign_rdh
|
/**
* The instruction for the person/driver to execute.
*/
public int getSign() {return sign;
}
| 3.26 |
graphhopper_OSMInputFile_setWorkerThreads_rdh
|
/**
* Currently only for pbf format. Default is number of cores.
*/
public OSMInputFile setWorkerThreads(int threads) {
f1 = threads;
return this;
}
| 3.26 |
graphhopper_HeadingResolver_getEdgesWithDifferentHeading_rdh
|
/**
* Returns a list of edge IDs of edges adjacent to the given base node that do *not* have the same or a similar
* heading as the given heading. If for example the tolerance is 45 degrees this method returns all edges for which
* the absolute difference to the given heading is greater than 45 degrees. The heading of an edge is defined as
* the direction of the first segment of an edge (adjacent and facing away from the base node).
*
* @param heading
* north based azimuth, between 0 and 360 degrees
* @see #setTolerance
*/
public IntArrayList getEdgesWithDifferentHeading(int baseNode, double heading) {
double xAxisAngle = AngleCalc.ANGLE_CALC.convertAzimuth2xaxisAngle(heading);
IntArrayList edges = new IntArrayList(1);
EdgeIterator iter = edgeExplorer.setBaseNode(baseNode);
while (iter.next()) {
PointList points = iter.fetchWayGeometry(FetchMode.ALL);
double orientation = AngleCalc.ANGLE_CALC.calcOrientation(points.getLat(0), points.getLon(0), points.getLat(1), points.getLon(1));
orientation = AngleCalc.ANGLE_CALC.alignOrientation(xAxisAngle, orientation);
double diff = Math.abs(orientation - xAxisAngle);
if (diff > toleranceRad)
edges.add(iter.getEdge());
}
return edges;
}
| 3.26 |
graphhopper_DistanceCalcEarth_calcCircumference_rdh
|
/**
* Circumference of the earth at different latitudes (breitengrad)
*/
public double calcCircumference(double lat) {
return ((2 * PI) * R) * cos(toRadians(lat));
}
| 3.26 |
graphhopper_DistanceCalcEarth_calcDist_rdh
|
/**
* Calculates distance of (from, to) in meter.
* <p>
* http://en.wikipedia.org/wiki/Haversine_formula a = sin²(Δlat/2) +
* cos(lat1).cos(lat2).sin²(Δlong/2) c = 2.atan2(√a, √(1−a)) d = R.c
*/
@Override
public double calcDist(double fromLat, double fromLon, double toLat, double toLon) {
double normedDist = calcNormalizedDist(fromLat, fromLon,
toLat, toLon);
return (R * 2) * asin(sqrt(normedDist));
}
| 3.26 |
graphhopper_DistanceCalcEarth_calcNormalizedDist_rdh
|
/**
* Returns the specified length in normalized meter.
*/
@Override
public double calcNormalizedDist(double dist) {
double tmp = sin((dist / 2) / R);
return tmp * tmp;
}
| 3.26 |
graphhopper_InstructionsFromEdges_calcInstructions_rdh
|
/**
*
* @return the list of instructions for this path.
*/
public static InstructionList calcInstructions(Path path, Graph graph, Weighting weighting, EncodedValueLookup evLookup, final Translation tr) {
final InstructionList ways = new InstructionList(tr);
if (path.isFound()) {
if (path.getEdgeCount() == 0) {
ways.add(new FinishInstruction(graph.getNodeAccess(), path.getEndNode()));
} else {
path.forEveryEdge(new InstructionsFromEdges(graph, weighting, evLookup, ways));
}
}
return ways;
}
| 3.26 |
graphhopper_GTFSFeed_fastDistance_rdh
|
/**
*
* @return Equirectangular approximation to distance.
*/
public static double fastDistance(double lat0, double lon0, double lat1, double lon1) {
double midLat = (lat0 + lat1) / 2;
double xscale = Math.cos(Math.toRadians(midLat));
double dx = xscale * (lon1 - lon0);
double dy = lat1 - lat0;
return Math.sqrt((dx * dx) + (dy * dy)) * METERS_PER_DEGREE_LATITUDE;
}
| 3.26 |
graphhopper_GTFSFeed_getOrderedStopTimesForTrip_rdh
|
/**
* For the given trip ID, fetch all the stop times in order of increasing stop_sequence.
* This is an efficient iteration over a tree map.
*/
public Iterable<StopTime> getOrderedStopTimesForTrip(String trip_id) {
Map<Fun.Tuple2, StopTime> v3
= stop_times.subMap(Fun.t2(trip_id, null), Fun.t2(trip_id, Fun.HI));
return v3.values();
}
| 3.26 |
graphhopper_GTFSFeed_loadFromZipfileOrDirectory_rdh
|
/**
* The order in which we load the tables is important for two reasons.
* 1. We must load feed_info first so we know the feed ID before loading any other entities. This could be relaxed
* by having entities point to the feed object rather than its ID String.
* 2. Referenced entities must be loaded before any entities that reference them. This is because we check
* referential integrity while the files are being loaded. This is done on the fly during loading because it allows
* us to associate a line number with errors in objects that don't have any other clear identifier.
*
* Interestingly, all references are resolvable when tables are loaded in alphabetical order.
*/
public void loadFromZipfileOrDirectory(File zip, String fid) throws IOException {
if (this.loaded)
throw new UnsupportedOperationException("Attempt to load GTFS into existing database");
new FeedInfo.Loader(this).loadTable(zip);
// maybe we should just point to the feed object itself instead of its ID, and null out its stoptimes map after loading
if (fid != null) {
feedId = fid;
LOG.info("Feed ID is undefined, pester maintainers to include a feed ID. Using file name {}.",
feedId);// TODO log an error, ideally feeds should include a feedID
} else if ((feedId == null) || feedId.isEmpty()) {
feedId = new File(zip.getName()).getName().replaceAll("\\.zip$", "");
LOG.info("Feed ID is undefined, pester maintainers to include a feed ID. Using file name {}.", feedId);// TODO log an error, ideally feeds should include a feedID
} else {
LOG.info("Feed ID is '{}'.", feedId);
}
db.getAtomicString("feed_id").set(feedId);
new Agency.Loader(this).loadTable(zip);
if (agency.isEmpty()) {
errors.add(new GeneralError("agency", 0, "agency_id", "Need at least one agency."));
}
// calendars and calendar dates are joined into services. This means a lot of manipulating service objects as
// they are loaded; since mapdb keys/values are immutable, load them in memory then copy them to MapDB once
// we're done loading them
Map<String, Service>
v0 = new HashMap<>();
new Calendar.Loader(this, v0).loadTable(zip);
new CalendarDate.Loader(this, v0).loadTable(zip);
this.services.putAll(v0);
v0 = null;// free memory
// Same deal
Map<String, Fare> fares = new HashMap<>();
new FareAttribute.Loader(this, fares).loadTable(zip);
new FareRule.Loader(this, fares).loadTable(zip);
this.fares.putAll(fares);
fares = null;// free memory
new Route.Loader(this).loadTable(zip);
new ShapePoint.Loader(this).loadTable(zip);
new Stop.Loader(this).loadTable(zip);
new Transfer.Loader(this).loadTable(zip);
new Trip.Loader(this).loadTable(zip);
new Frequency.Loader(this).loadTable(zip);
new StopTime.Loader(this).loadTable(zip);
loaded = true;
}
| 3.26 |
graphhopper_GTFSFeed_clone_rdh
|
/**
* Cloning can be useful when you want to make only a few modifications to an existing feed.
* Keep in mind that this is a shallow copy, so you'll have to create new maps in the clone for tables you want
* to modify.
*/
@Override
public GTFSFeed clone() {
try {
return ((GTFSFeed) (super.clone()));
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
| 3.26 |
graphhopper_GTFSFeed_getShape_rdh
|
/**
* Get the shape for the given shape ID
*/
public Shape getShape(String shape_id) {
Shape shape = new Shape(this, shape_id);
return shape.shape_dist_traveled.length > 0 ? shape : null;
}
| 3.26 |
graphhopper_GTFSFeed_getInterpolatedStopTimesForTrip_rdh
|
/**
* For the given trip ID, fetch all the stop times in order, and interpolate stop-to-stop travel times.
*/
public Iterable<StopTime> getInterpolatedStopTimesForTrip(String trip_id) throws FirstAndLastStopsDoNotHaveTimes {
// clone stop times so as not to modify base GTFS structures
StopTime[] stopTimes = StreamSupport.stream(getOrderedStopTimesForTrip(trip_id).spliterator(), false).map(st -> st.clone()).toArray(i
-> new StopTime[i]);
// avoid having to make sure that the array has length below.
if (stopTimes.length == 0)
return Collections.emptyList();
// first pass: set all partially filled stop times
for (StopTime st : stopTimes) {
if ((st.arrival_time != Entity.INT_MISSING) && (st.departure_time == Entity.INT_MISSING)) {
st.departure_time = st.arrival_time;
}
if ((st.arrival_time == Entity.INT_MISSING) && (st.departure_time !=
Entity.INT_MISSING)) {
st.arrival_time
= st.departure_time;
}}
// quick check: ensure that first and last stops have times.
// technically GTFS requires that both arrival_time and departure_time be filled at both the first and last stop,
// but we are slightly more lenient and only insist that one of them be filled at both the first and last stop.
// The meaning of the first stop's arrival time is unclear, and same for the last stop's departure time (except
// in the case of interlining).
// it's fine to just check departure time, as the above pass ensures that all stop times have either both
// arrival and departure times, or neither
if ((stopTimes[0].departure_time == Entity.INT_MISSING) || (stopTimes[stopTimes.length - 1].departure_time == Entity.INT_MISSING)) {
throw new FirstAndLastStopsDoNotHaveTimes();
}
// second pass: fill complete stop times
int startOfInterpolatedBlock = -1;
for
(int stopTime = 0;
stopTime < stopTimes.length; stopTime++) {
if ((stopTimes[stopTime].departure_time == Entity.INT_MISSING) && (startOfInterpolatedBlock == (-1))) {
startOfInterpolatedBlock = stopTime;
} else
if ((stopTimes[stopTime].departure_time != Entity.INT_MISSING) && (startOfInterpolatedBlock != (-1))) {
// we have found the end of the interpolated section
int nInterpolatedStops = stopTime - startOfInterpolatedBlock;
double totalLengthOfInterpolatedSection = 0;
double[] lengthOfInterpolatedSections = new double[nInterpolatedStops];for (int stopTimeToInterpolate = startOfInterpolatedBlock, i = 0; stopTimeToInterpolate < stopTime; stopTimeToInterpolate++ , i++) {
Stop start = stops.get(stopTimes[stopTimeToInterpolate
- 1].stop_id);
Stop v15 = stops.get(stopTimes[stopTimeToInterpolate].stop_id);
double segLen = fastDistance(start.stop_lat, start.stop_lon, v15.stop_lat, v15.stop_lon);
totalLengthOfInterpolatedSection += segLen;
lengthOfInterpolatedSections[i] = segLen;
}
// add the segment post-last-interpolated-stop
Stop start = stops.get(stopTimes[stopTime - 1].stop_id);
Stop end = stops.get(stopTimes[stopTime].stop_id);
totalLengthOfInterpolatedSection += fastDistance(start.stop_lat, start.stop_lon, end.stop_lat, end.stop_lon);
int departureBeforeInterpolation = stopTimes[startOfInterpolatedBlock - 1].departure_time;
int arrivalAfterInterpolation = stopTimes[stopTime].arrival_time;
int totalTime = arrivalAfterInterpolation - departureBeforeInterpolation;
double lengthSoFar = 0;
for (int stopTimeToInterpolate = startOfInterpolatedBlock, i = 0; stopTimeToInterpolate < stopTime; stopTimeToInterpolate++ , i++) {
lengthSoFar += lengthOfInterpolatedSections[i];
int time = ((int) (departureBeforeInterpolation + (totalTime * (lengthSoFar / totalLengthOfInterpolatedSection))));
stopTimes[stopTimeToInterpolate].arrival_time = stopTimes[stopTimeToInterpolate].departure_time = time;
}
// we're done with this block
startOfInterpolatedBlock = -1;
}
}
return Arrays.asList(stopTimes);
}
| 3.26 |
graphhopper_GTFSFeed_getTripGeometry_rdh
|
/**
* Returns a trip geometry object (LineString) for a given trip id.
* If the trip has a shape reference, this will be used for the geometry.
* Otherwise, the ordered stoptimes will be used.
*
* @param trip_id
* trip id of desired trip geometry
* @return the LineString representing the trip geometry.
* @see LineString
*/
public LineString getTripGeometry(String trip_id) {
CoordinateList coordinates = new CoordinateList();
LineString ls
= null;
Trip trip = trips.get(trip_id);
// If trip has shape_id, use it to generate geometry.
if (trip.shape_id != null) {
Shape shape = getShape(trip.shape_id);
if (shape != null)
ls = shape.geometry;
}// Use the ordered stoptimes.
if (ls == null) {
ls = getStraightLineForStops(trip_id);
}
return ls;
}
| 3.26 |
graphhopper_PrepareEncoder_getScDirMask_rdh
|
/**
* A bitmask for two directions
*/
public static int getScDirMask() {return scDirMask;
}
| 3.26 |
graphhopper_AngleCalc_m0_rdh
|
/**
* Change the representation of an orientation, so the difference to the given baseOrientation
* will be smaller or equal to PI (180 degree). This is achieved by adding or subtracting a
* 2*PI, so the direction of the orientation will not be changed
*/
public double m0(double baseOrientation, double orientation) {
double resultOrientation;
if (baseOrientation >= 0) {
if (orientation < ((-Math.PI) + baseOrientation))
resultOrientation = orientation + (2 * Math.PI);
else
resultOrientation = orientation;
} else if (orientation > ((+Math.PI) + baseOrientation))
resultOrientation = orientation - (2 * Math.PI);
else
resultOrientation = orientation;
return resultOrientation;
}
| 3.26 |
graphhopper_AngleCalc_convertAzimuth2xaxisAngle_rdh
|
/**
* convert north based clockwise azimuth (0, 360) into x-axis/east based angle (-Pi, Pi)
*/
public double convertAzimuth2xaxisAngle(double azimuth) {
if ((Double.compare(azimuth, 360) > 0) || (Double.compare(azimuth, 0) < 0)) {
throw new IllegalArgumentException(("Azimuth " + azimuth) +
" must be in (0, 360)");
}
double angleXY = PI_2 - ((azimuth / 180.0) * Math.PI);
if (angleXY < (-Math.PI))
angleXY += 2 * Math.PI;
if (angleXY > Math.PI)
angleXY -= 2 * Math.PI;
return angleXY;
}
| 3.26 |
graphhopper_AngleCalc_calcAzimuth_rdh
|
/**
* Calculate the azimuth in degree for a line given by two coordinates. Direction in 'degree'
* where 0 is north, 90 is east, 180 is south and 270 is west.
*/
public double calcAzimuth(double lat1, double lon1, double lat2, double lon2) {
double orientation = (Math.PI / 2) - calcOrientation(lat1, lon1, lat2, lon2);
if (orientation < 0)
orientation += 2 * Math.PI;
return Math.toDegrees(Helper.round4(orientation)) % 360;
}
| 3.26 |
graphhopper_AngleCalc_calcOrientation_rdh
|
/**
* Return orientation of line relative to east.
* <p>
*
* @param exact
* If false the atan gets calculated faster, but it might contain small errors
* @return Orientation in interval -pi to +pi where 0 is east
*/
public double calcOrientation(double lat1, double lon1, double lat2, double lon2, boolean
exact) {
double
shrinkFactor = cos(toRadians((lat1 + lat2) / 2));
if (exact)
return Math.atan2(lat2 - lat1, shrinkFactor * (lon2 - lon1));
else
return atan2(lat2 - lat1, shrinkFactor * (lon2 - lon1));
}
| 3.26 |
graphhopper_PbfRawBlob_getType_rdh
|
/**
* Gets the type of data represented by this blob. This corresponds to the type field in the
* blob header.
* <p>
*
* @return The blob type.
*/
public String getType() {
return type;
}
| 3.26 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.