[osrm] 01/04: Imported Upstream version 5.4.2+ds
Bas Couwenberg
sebastic at debian.org
Thu Nov 3 18:13:32 UTC 2016
This is an automated email from the git hooks/post-receive script.
sebastic pushed a commit to branch master
in repository osrm.
commit 87d05decc8dcdbfdb261362d69e9500950bdb578
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date: Thu Nov 3 18:52:21 2016 +0100
Imported Upstream version 5.4.2+ds
---
CHANGELOG.md | 9 +
features/step_definitions/matching.js | 108 +++++----
features/support/data_classes.js | 1 +
features/testbot/matching.feature | 113 ++++++++--
include/engine/api/route_api.hpp | 8 +-
include/engine/guidance/assemble_geometry.hpp | 67 ++++--
include/engine/routing_algorithms/map_matching.hpp | 245 +++++++++++----------
include/engine/routing_algorithms/routing_base.hpp | 3 +-
include/extractor/profile_properties.hpp | 2 +-
src/engine/guidance/assemble_overview.cpp | 54 ++---
src/engine/guidance/post_processing.cpp | 10 +-
src/extractor/guidance/intersection_handler.cpp | 38 +++-
12 files changed, 421 insertions(+), 237 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a598963..f8b324b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,12 @@
+# 5.4.2
+ - Changes from 5.4.1
+ - Bugfixes
+ - #3032 Fixed a bug that could result in emitting `invalid` as an instruction type on sliproads with mode changes
+ - #3085 Fixed an outdated assertion that could throw without a cause for concern
+ - #3037 Fixed omitting the last coordinate for overview=simplified
+ - #3176 Fixed exposing wrong OSM ids in matching
+ - Fixes splitting logic in map matching
+
# 5.4.1
- Changes from 5.4.0
- Bugfixes
diff --git a/features/step_definitions/matching.js b/features/step_definitions/matching.js
index 41cd0ea..3fe3f39 100644
--- a/features/step_definitions/matching.js
+++ b/features/step_definitions/matching.js
@@ -1,5 +1,6 @@
+'use strict';
+
var util = require('util');
-var d3 = require('d3-queue');
var polyline = require('polyline');
module.exports = function () {
@@ -43,7 +44,28 @@ module.exports = function () {
if (res.statusCode === 200) {
if (headers.has('matchings')) {
- subMatchings = json.matchings.filter(m => !!m).map(sub => sub.matched_points);
+ subMatchings = [];
+
+ // find the first matched
+ let start_index = 0;
+ while (start_index < json.tracepoints.length && json.tracepoints[start_index] === null) start_index++;
+
+ var sub = [];
+ let prev_index = null;
+ for(var i = start_index; i < json.tracepoints.length; i++){
+ if (json.tracepoints[i] === null) continue;
+
+ let current_index = json.tracepoints[i].matchings_index;
+
+ if(prev_index !== current_index) {
+ if (sub.length > 0) subMatchings.push(sub);
+ sub = [];
+ prev_index = current_index;
+ }
+
+ sub.push(json.tracepoints[i].location);
+ }
+ subMatchings.push(sub);
}
if (headers.has('turns')) {
@@ -68,11 +90,11 @@ module.exports = function () {
if (headers.has('geometry')) {
if (json.matchings.length != 1) throw new Error('*** Checking geometry only supported for matchings with one subtrace');
- geometry = json.matchings[0].geometry;
+ geometry = json.matchings[0].geometry.coordinates;
}
if (headers.has('OSM IDs')) {
- if (json.matchings.length != 1) throw new Error('*** CHecking annotation only supported for matchings with one subtrace');
+ if (json.matchings.length != 1) throw new Error('*** Checking annotation only supported for matchings with one subtrace');
OSMIDs = this.OSMIDList(json.matchings[0]);
}
}
@@ -108,59 +130,55 @@ module.exports = function () {
var encodedResult = '',
extendedTarget = '';
- var q = d3.queue();
+ var testSubMatching = (sub, si) => {
+ var testSubNode = (ni) => {
+ var node = this.findNodeByName(sub[ni]),
+ outNode = subMatchings[si][ni];
- var testSubMatching = (sub, si, scb) => {
- if (si >= subMatchings.length) {
- ok = false;
- q.abort();
- scb();
- } else {
- var sq = d3.queue();
-
- var testSubNode = (ni, ncb) => {
- var node = this.findNodeByName(sub[ni]),
- outNode = subMatchings[si][ni];
-
- if (this.FuzzyMatch.matchLocation(outNode, node)) {
- encodedResult += sub[ni];
- extendedTarget += sub[ni];
- } else {
+ if (this.FuzzyMatch.matchLocation(outNode, node)) {
+ encodedResult += sub[ni];
+ extendedTarget += sub[ni];
+ } else {
+ if (outNode != null) {
encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]);
- extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon);
- ok = false;
+ } else {
+ encodedResult += '?';
}
- ncb();
- };
-
- for (var i=0; i<sub.length; i++) {
- sq.defer(testSubNode, i);
+ extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon);
+ ok = false;
}
+ };
- sq.awaitAll(scb);
+ for (var i=0; i<sub.length; i++) {
+ testSubNode(i);
}
};
- row.matchings.split(',').forEach((sub, si) => {
- q.defer(testSubMatching, sub, si);
- });
+ if (headers.has('matchings')) {
+ if (subMatchings.length != row.matchings.split(',').length) {
+ ok = false;
+ cb(new Error('*** table matchings and api response are not the same'));
+ }
- q.awaitAll(() => {
- if (ok) {
- if (headers.has('matchings')) {
- got.matchings = row.matchings;
- }
+ row.matchings.split(',').forEach((sub, si) => {
+ testSubMatching(sub, si);
+ });
+ }
- if (headers.has('timestamps')) {
- got.timestamps = row.timestamps;
- }
- } else {
- got.matchings = encodedResult;
- row.matchings = extendedTarget;
+ if (ok) {
+ if (headers.has('matchings')) {
+ got.matchings = row.matchings;
+ }
+
+ if (headers.has('timestamps')) {
+ got.timestamps = row.timestamps;
}
+ } else {
+ got.matchings = encodedResult;
+ row.matchings = extendedTarget;
+ }
- cb(null, got);
- });
+ cb(null, got);
};
if (row.request) {
diff --git a/features/support/data_classes.js b/features/support/data_classes.js
index a17141e..2ea8d4e 100644
--- a/features/support/data_classes.js
+++ b/features/support/data_classes.js
@@ -107,6 +107,7 @@ module.exports = {
}
matchLocation (got, want) {
+ if (got == null || want == null) return false;
return this.match(got[0], util.format('%d ~0.0025%', want.lon)) &&
this.match(got[1], util.format('%d ~0.0025%', want.lat));
}
diff --git a/features/testbot/matching.feature b/features/testbot/matching.feature
index ead4b6a..15dc34b 100644
--- a/features/testbot/matching.feature
+++ b/features/testbot/matching.feature
@@ -5,6 +5,8 @@ Feature: Basic Map Matching
Given the profile "testbot"
Given a grid size of 10 meters
Given the extract extra arguments "--generate-edge-lookup"
+ Given the query options
+ | geometries | geojson |
Scenario: Testbot - Map matching with outlier that has no candidate
Given a grid size of 100 meters
@@ -21,7 +23,7 @@ Feature: Basic Map Matching
When I match I should get
| trace | timestamps | matchings |
- | ab1d | 0 1 2 3 | abcd |
+ | ab1d | 0 1 2 3 | ad |
Scenario: Testbot - Map matching with trace splitting
Given the node map
@@ -102,13 +104,13 @@ Feature: Basic Map Matching
| fe | yes |
When I match I should get
- | trace | matchings |
- | dcba | hg,gf,fe |
- | efgh | ab,bc,cd |
+ | trace | matchings |
+ | dcba | hgfe |
+ | efgh | abcd |
Scenario: Testbot - Duration details
Given the query options
- | annotations | true |
+ | annotations | true |
Given the node map
| a | b | c | d | e | | g | h |
@@ -128,23 +130,47 @@ Feature: Basic Map Matching
When I match I should get
| trace | matchings | annotation |
- | abeh | abcedgh | 1:9.897633:1,0:0:0,1:10.008842:0,1:10.008842:0,1:10.008842:0,0:0:0,2:20.017685:0,1:10.008842:0 |
- | abci | abc,ci | 1:9.897633:1,0:0:0,1:10.008842:0,0:0.111209:0,1:10.010367:0 |
+ | abeh | abeh | 1:9.897633:1,0:0:0,1:10.008842:0,1:10.008842:0,1:10.008842:0,0:0:0,2:20.017685:0,1:10.008842:0 |
+ | abci | abci | 1:9.897633:1,0:0:0,1:10.008842:0,0:0.111209:0,1:10.010367:0 |
# The following is the same as the above, but separated for readability (line length)
When I match I should get
| trace | matchings | OSM IDs |
- | abeh | abcedgh | 1,2,3,2,3,4,5,4,5,6,7 |
- | abci | abc,ci | 1,2,3,2,3,8,3,8 |
+ | abeh | abeh | 1,2,3,2,3,4,5,4,5,6,7 |
+ | abci | abci | 1,2,3,2,3,8,3,8 |
+
+ Scenario: Testbot - Regression test for #3037
+ Given the query options
+ | overview | simplified |
+ | geometries | geojson |
+
+ Given the node map
+ | a | | b | | c |
+ | | | | | |
+ | | | | | |
+ | | | | | |
+ | e | | f | | g |
+
+ And the ways
+ | nodes | oneway |
+ | abc | yes |
+ | efg | yes |
+ | ae | yes |
+ | cg | yes |
+ | fb | yes |
+
+ When I match I should get
+ | trace | matchings | geometry |
+ | efbc | efbc | 1,0.99964,1.000178,0.99964,1.000178,1,1.000359,1 |
Scenario: Testbot - Geometry details
Given the query options
| overview | full |
- | geometries | polyline |
+ | geometries | geojson |
Given the node map
- | a | b | c |
- | | d | |
+ | a | | b | | c |
+ | | | d | | |
And the ways
| nodes | oneway |
@@ -152,5 +178,64 @@ Feature: Basic Map Matching
| bd | no |
When I match I should get
- | trace | matchings | geometry |
- | abd | abd | 1,1,1,1.00009,1,1.00009,0.99991,1.00009 |
+ | trace | matchings | geometry |
+ | abd | abd | 1,1,1.000179,1,1.000178,1,1.000178,0.99991 |
+
+ # Regression test 1 for issue 3176
+ Scenario: Testbot - multiuple segments: properly expose OSM IDs
+ Given the query options
+ | annotations | true |
+
+ Given the node map
+ | a | 1 | b | | c | | d | | e | | f | 2 | g |
+
+ And the nodes
+ | node | id |
+ | a | 1 |
+ | b | 2 |
+ | c | 3 |
+ | d | 4 |
+ | e | 5 |
+ | f | 6 |
+ | g | 7 |
+
+ And the ways
+ | nodes | oneway |
+ | ab | no |
+ | bc | no |
+ | cd | no |
+ | de | no |
+ | ef | no |
+ | fg | no |
+
+ When I match I should get
+ | trace | OSM IDs |
+ | 12 | 1,2,3,4,5,6,7 |
+ | 21 | 7,6,5,4,3,2,1 |
+
+ # Regression test 2 for issue 3176
+ Scenario: Testbot - same edge: properly expose OSM IDs
+ Given the query options
+ | annotations | true |
+
+ Given the node map
+ | a | 1 | b | | c | | d | | e | 2 | f |
+
+ And the nodes
+ | node | id |
+ | a | 1 |
+ | b | 2 |
+ | c | 3 |
+ | d | 4 |
+ | e | 5 |
+ | f | 6 |
+
+ And the ways
+ | nodes | oneway |
+ | abcdef | no |
+
+ When I match I should get
+ | trace | OSM IDs |
+ | 12 | 1,2,3,4,5,6 |
+ | 21 | 6,5,4,3,2,1 |
+
diff --git a/include/engine/api/route_api.hpp b/include/engine/api/route_api.hpp
index 0ef32a1..95a4056 100644
--- a/include/engine/api/route_api.hpp
+++ b/include/engine/api/route_api.hpp
@@ -94,8 +94,12 @@ class RouteAPI : public BaseAPI
const bool reversed_source = source_traversed_in_reverse[idx];
const bool reversed_target = target_traversed_in_reverse[idx];
- auto leg_geometry = guidance::assembleGeometry(
- BaseAPI::facade, path_data, phantoms.source_phantom, phantoms.target_phantom);
+ auto leg_geometry = guidance::assembleGeometry(BaseAPI::facade,
+ path_data,
+ phantoms.source_phantom,
+ phantoms.target_phantom,
+ reversed_source,
+ reversed_target);
auto leg = guidance::assembleLeg(facade,
path_data,
leg_geometry,
diff --git a/include/engine/guidance/assemble_geometry.hpp b/include/engine/guidance/assemble_geometry.hpp
index fb9e8fa..14e6b5a 100644
--- a/include/engine/guidance/assemble_geometry.hpp
+++ b/include/engine/guidance/assemble_geometry.hpp
@@ -35,7 +35,9 @@ namespace guidance
inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
const std::vector<PathData> &leg_data,
const PhantomNode &source_node,
- const PhantomNode &target_node)
+ const PhantomNode &target_node,
+ const bool reversed_source,
+ const bool reversed_target)
{
LegGeometry geometry;
@@ -43,16 +45,30 @@ inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
geometry.segment_offsets.push_back(0);
geometry.locations.push_back(source_node.location);
- // Need to get the node ID preceding the source phantom node
- // TODO: check if this was traversed in reverse?
- std::vector<NodeID> reverse_geometry;
- facade.GetUncompressedGeometry(source_node.reverse_packed_geometry_id, reverse_geometry);
- geometry.osm_node_ids.push_back(facade.GetOSMNodeIDOfNode(
- reverse_geometry[reverse_geometry.size() - source_node.fwd_segment_position - 1]));
-
- std::vector<uint8_t> forward_datasource_vector;
- facade.GetUncompressedDatasources(source_node.forward_packed_geometry_id,
- forward_datasource_vector);
+ // u * v
+ // 0 -- 1 -- 2 -- 3
+ // fwd_segment_position: 1
+ // source node fwd: 1 1 -> 2 -> 3
+ // source node rev: 2 0 <- 1 <- 2
+ const auto source_segment_start_coordinate =
+ source_node.fwd_segment_position + (reversed_source ? 1 : 0);
+
+ // we don't save the first node id in the forward geometry, we need to get it as last coordinate from the reverse
+ // geometry
+ if (source_segment_start_coordinate == 0)
+ {
+ std::vector<NodeID> source_geometry;
+ facade.GetUncompressedGeometry(source_node.reverse_packed_geometry_id, source_geometry);
+ geometry.osm_node_ids.push_back(
+ facade.GetOSMNodeIDOfNode(source_geometry.back()));
+ }
+ else
+ {
+ std::vector<NodeID> source_geometry;
+ facade.GetUncompressedGeometry(source_node.forward_packed_geometry_id, source_geometry);
+ geometry.osm_node_ids.push_back(
+ facade.GetOSMNodeIDOfNode(source_geometry[source_segment_start_coordinate - 1]));
+ }
auto cumulative_distance = 0.;
auto current_distance = 0.;
@@ -94,12 +110,29 @@ inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
geometry.segment_offsets.push_back(geometry.locations.size());
geometry.locations.push_back(target_node.location);
- // Need to get the node ID following the destination phantom node
- // TODO: check if this was traversed in reverse??
- std::vector<NodeID> forward_geometry;
- facade.GetUncompressedGeometry(target_node.forward_packed_geometry_id, forward_geometry);
- geometry.osm_node_ids.push_back(
- facade.GetOSMNodeIDOfNode(forward_geometry[target_node.fwd_segment_position]));
+ // u * v
+ // 0 -- 1 -- 2 -- 3
+ // fwd_segment_position: 1
+ // target node fwd: 2 0 -> 1 -> 2
+ // target node rev: 1 1 <- 2 <- 3
+ const auto target_segment_end_coordinate =
+ target_node.fwd_segment_position + (reversed_target ? 0 : 1);
+ // we don't save the first node id in the forward geometry, we need to get it as last coordinate from the reverse
+ // geometry
+ if (target_segment_end_coordinate == 0)
+ {
+ std::vector<NodeID> target_geometry;
+ facade.GetUncompressedGeometry(target_node.reverse_packed_geometry_id, target_geometry);
+ geometry.osm_node_ids.push_back(
+ facade.GetOSMNodeIDOfNode(target_geometry.back()));
+ }
+ else
+ {
+ std::vector<NodeID> target_geometry;
+ facade.GetUncompressedGeometry(target_node.forward_packed_geometry_id, target_geometry);
+ geometry.osm_node_ids.push_back(
+ facade.GetOSMNodeIDOfNode(target_geometry[target_segment_end_coordinate - 1]));
+ }
BOOST_ASSERT(geometry.segment_distances.size() == geometry.segment_offsets.size() - 1);
BOOST_ASSERT(geometry.locations.size() > geometry.segment_distances.size());
diff --git a/include/engine/routing_algorithms/map_matching.hpp b/include/engine/routing_algorithms/map_matching.hpp
index 343e26a..a0029fb 100644
--- a/include/engine/routing_algorithms/map_matching.hpp
+++ b/include/engine/routing_algorithms/map_matching.hpp
@@ -176,24 +176,133 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
prev_unbroken_timestamps.push_back(initial_timestamp);
for (auto t = initial_timestamp + 1; t < candidates_list.size(); ++t)
{
- // breakage recover has removed all previous good points
- bool trace_split = prev_unbroken_timestamps.empty();
- // use temporal information if available to determine a split
- if (use_timestamps)
- {
- trace_split =
- trace_split ||
- (trace_timestamps[t] - trace_timestamps[prev_unbroken_timestamps.back()] >
- max_broken_time);
- }
- else
+ const bool gap_in_trace = [&, use_timestamps]() {
+ // use temporal information if available to determine a split
+ if (use_timestamps)
+ {
+ return trace_timestamps[t] - trace_timestamps[prev_unbroken_timestamps.back()] >
+ max_broken_time;
+ }
+ else
+ {
+ return t - prev_unbroken_timestamps.back() > MAX_BROKEN_STATES;
+ }
+ }();
+
+ if (!gap_in_trace)
{
- trace_split =
- trace_split || (t - prev_unbroken_timestamps.back() > MAX_BROKEN_STATES);
+ BOOST_ASSERT(!prev_unbroken_timestamps.empty());
+ const std::size_t prev_unbroken_timestamp = prev_unbroken_timestamps.back();
+
+ const auto &prev_viterbi = model.viterbi[prev_unbroken_timestamp];
+ const auto &prev_pruned = model.pruned[prev_unbroken_timestamp];
+ const auto &prev_unbroken_timestamps_list =
+ candidates_list[prev_unbroken_timestamp];
+ const auto &prev_coordinate = trace_coordinates[prev_unbroken_timestamp];
+
+ auto ¤t_viterbi = model.viterbi[t];
+ auto ¤t_pruned = model.pruned[t];
+ auto ¤t_parents = model.parents[t];
+ auto ¤t_lengths = model.path_distances[t];
+ const auto ¤t_timestamps_list = candidates_list[t];
+ const auto ¤t_coordinate = trace_coordinates[t];
+
+ const auto haversine_distance = util::coordinate_calculation::haversineDistance(
+ prev_coordinate, current_coordinate);
+ // assumes minumum of 0.1 m/s
+ const int duration_upper_bound =
+ ((haversine_distance + max_distance_delta) * 0.25) * 10;
+
+ // compute d_t for this timestamp and the next one
+ for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
+ {
+ if (prev_pruned[s])
+ {
+ continue;
+ }
+
+ for (const auto s_prime :
+ util::irange<std::size_t>(0UL, current_viterbi.size()))
+ {
+ const double emission_pr = emission_log_probabilities[t][s_prime];
+ double new_value = prev_viterbi[s] + emission_pr;
+ if (current_viterbi[s_prime] > new_value)
+ {
+ continue;
+ }
+
+ forward_heap.Clear();
+ reverse_heap.Clear();
+
+ double network_distance;
+ if (super::facade->GetCoreSize() > 0)
+ {
+ forward_core_heap.Clear();
+ reverse_core_heap.Clear();
+ network_distance = super::GetNetworkDistanceWithCore(
+ forward_heap,
+ reverse_heap,
+ forward_core_heap,
+ reverse_core_heap,
+ prev_unbroken_timestamps_list[s].phantom_node,
+ current_timestamps_list[s_prime].phantom_node,
+ duration_upper_bound);
+ }
+ else
+ {
+ network_distance = super::GetNetworkDistance(
+ forward_heap,
+ reverse_heap,
+ prev_unbroken_timestamps_list[s].phantom_node,
+ current_timestamps_list[s_prime].phantom_node);
+ }
+
+ // get distance diff between loc1/2 and locs/s_prime
+ const auto d_t = std::abs(network_distance - haversine_distance);
+
+ // very low probability transition -> prune
+ if (d_t >= max_distance_delta)
+ {
+ continue;
+ }
+
+ const double transition_pr = transition_log_probability(d_t);
+ new_value += transition_pr;
+
+ if (new_value > current_viterbi[s_prime])
+ {
+ current_viterbi[s_prime] = new_value;
+ current_parents[s_prime] = std::make_pair(prev_unbroken_timestamp, s);
+ current_lengths[s_prime] = network_distance;
+ current_pruned[s_prime] = false;
+ model.breakage[t] = false;
+ }
+ }
+ }
+
+ if (model.breakage[t])
+ {
+ // save start of breakage -> we need this as split point
+ if (t < breakage_begin)
+ {
+ breakage_begin = t;
+ }
+
+ BOOST_ASSERT(prev_unbroken_timestamps.size() > 0);
+ // remove both ends of the breakage
+ prev_unbroken_timestamps.pop_back();
+ }
+ else
+ {
+ prev_unbroken_timestamps.push_back(t);
+ }
}
- if (trace_split)
+ // breakage recover has removed all previous good points
+ const bool trace_split = prev_unbroken_timestamps.empty();
+
+ if (trace_split || gap_in_trace)
{
std::size_t split_index = t;
if (breakage_begin != map_matching::INVALID_STATE)
@@ -217,111 +326,9 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
// Important: We potentially go back here!
// However since t > new_start >= breakge_begin
// we can only reset trace_coordindates.size() times.
- t = new_start + 1;
- }
-
- BOOST_ASSERT(!prev_unbroken_timestamps.empty());
- const std::size_t prev_unbroken_timestamp = prev_unbroken_timestamps.back();
-
- const auto &prev_viterbi = model.viterbi[prev_unbroken_timestamp];
- const auto &prev_pruned = model.pruned[prev_unbroken_timestamp];
- const auto &prev_unbroken_timestamps_list = candidates_list[prev_unbroken_timestamp];
- const auto &prev_coordinate = trace_coordinates[prev_unbroken_timestamp];
-
- auto ¤t_viterbi = model.viterbi[t];
- auto ¤t_pruned = model.pruned[t];
- auto ¤t_parents = model.parents[t];
- auto ¤t_lengths = model.path_distances[t];
- const auto ¤t_timestamps_list = candidates_list[t];
- const auto ¤t_coordinate = trace_coordinates[t];
-
- const auto haversine_distance = util::coordinate_calculation::haversineDistance(
- prev_coordinate, current_coordinate);
- // assumes minumum of 0.1 m/s
- const int duration_uppder_bound =
- ((haversine_distance + max_distance_delta) * 0.25) * 10;
-
- // compute d_t for this timestamp and the next one
- for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
- {
- if (prev_pruned[s])
- {
- continue;
- }
-
- for (const auto s_prime : util::irange<std::size_t>(0UL, current_viterbi.size()))
- {
- const double emission_pr = emission_log_probabilities[t][s_prime];
- double new_value = prev_viterbi[s] + emission_pr;
- if (current_viterbi[s_prime] > new_value)
- {
- continue;
- }
-
- forward_heap.Clear();
- reverse_heap.Clear();
-
- double network_distance;
- if (super::facade->GetCoreSize() > 0)
- {
- forward_core_heap.Clear();
- reverse_core_heap.Clear();
- network_distance = super::GetNetworkDistanceWithCore(
- forward_heap,
- reverse_heap,
- forward_core_heap,
- reverse_core_heap,
- prev_unbroken_timestamps_list[s].phantom_node,
- current_timestamps_list[s_prime].phantom_node,
- duration_uppder_bound);
- }
- else
- {
- network_distance = super::GetNetworkDistance(
- forward_heap,
- reverse_heap,
- prev_unbroken_timestamps_list[s].phantom_node,
- current_timestamps_list[s_prime].phantom_node);
- }
-
- // get distance diff between loc1/2 and locs/s_prime
- const auto d_t = std::abs(network_distance - haversine_distance);
-
- // very low probability transition -> prune
- if (d_t >= max_distance_delta)
- {
- continue;
- }
-
- const double transition_pr = transition_log_probability(d_t);
- new_value += transition_pr;
-
- if (new_value > current_viterbi[s_prime])
- {
- current_viterbi[s_prime] = new_value;
- current_parents[s_prime] = std::make_pair(prev_unbroken_timestamp, s);
- current_lengths[s_prime] = network_distance;
- current_pruned[s_prime] = false;
- model.breakage[t] = false;
- }
- }
- }
-
- if (model.breakage[t])
- {
- // save start of breakage -> we need this as split point
- if (t < breakage_begin)
- {
- breakage_begin = t;
- }
-
- BOOST_ASSERT(prev_unbroken_timestamps.size() > 0);
- // remove both ends of the breakage
- prev_unbroken_timestamps.pop_back();
- }
- else
- {
- prev_unbroken_timestamps.push_back(t);
+ t = new_start;
+ // note: the head of the loop will call ++t, hence the next
+ // iteration will actually be on new_start+1
}
}
diff --git a/include/engine/routing_algorithms/routing_base.hpp b/include/engine/routing_algorithms/routing_base.hpp
index 6951209..cec5bef 100644
--- a/include/engine/routing_algorithms/routing_base.hpp
+++ b/include/engine/routing_algorithms/routing_base.hpp
@@ -215,12 +215,13 @@ template <class DataFacadeT, class Derived> class BasicRoutingInterface
const PhantomNodes &phantom_node_pair,
std::vector<PathData> &unpacked_path) const
{
+ BOOST_ASSERT(std::distance(packed_path_begin, packed_path_end) > 0);
+
const bool start_traversed_in_reverse =
(*packed_path_begin != phantom_node_pair.source_phantom.forward_segment_id.id);
const bool target_traversed_in_reverse =
(*std::prev(packed_path_end) != phantom_node_pair.target_phantom.forward_segment_id.id);
- BOOST_ASSERT(std::distance(packed_path_begin, packed_path_end) > 0);
std::stack<std::pair<NodeID, NodeID>> recursion_stack;
// We have to push the path in reverse order onto the stack because it's LIFO.
diff --git a/include/extractor/profile_properties.hpp b/include/extractor/profile_properties.hpp
index 58a191d..16e2d36 100644
--- a/include/extractor/profile_properties.hpp
+++ b/include/extractor/profile_properties.hpp
@@ -12,7 +12,7 @@ struct ProfileProperties
{
ProfileProperties()
: traffic_signal_penalty(0), u_turn_penalty(0), continue_straight_at_waypoint(true),
- use_turn_restrictions(false), left_hand_driving(false)
+ use_turn_restrictions(false), left_hand_driving(false)
{
}
diff --git a/src/engine/guidance/assemble_overview.cpp b/src/engine/guidance/assemble_overview.cpp
index 7070435..33921be 100644
--- a/src/engine/guidance/assemble_overview.cpp
+++ b/src/engine/guidance/assemble_overview.cpp
@@ -43,37 +43,11 @@ unsigned calculateOverviewZoomLevel(const std::vector<LegGeometry> &leg_geometri
return util::viewport::getFittedZoom(south_west, north_east);
}
-std::vector<util::Coordinate> simplifyGeometry(const std::vector<LegGeometry> &leg_geometries,
- const unsigned zoom_level)
-{
- std::vector<util::Coordinate> overview_geometry;
- auto leg_index = 0UL;
- for (const auto &geometry : leg_geometries)
- {
- auto simplified_geometry =
- douglasPeucker(geometry.locations.begin(), geometry.locations.end(), zoom_level);
- // not the last leg
- if (leg_index < leg_geometries.size() - 1)
- {
- simplified_geometry.pop_back();
- }
- overview_geometry.insert(
- overview_geometry.end(), simplified_geometry.begin(), simplified_geometry.end());
- }
- return overview_geometry;
-}
}
std::vector<util::Coordinate> assembleOverview(const std::vector<LegGeometry> &leg_geometries,
const bool use_simplification)
{
- if (use_simplification)
- {
- const auto zoom_level = std::min(18u, calculateOverviewZoomLevel(leg_geometries));
- return simplifyGeometry(leg_geometries, zoom_level);
- }
- BOOST_ASSERT(!use_simplification);
-
auto overview_size =
std::accumulate(leg_geometries.begin(),
leg_geometries.end(),
@@ -85,16 +59,34 @@ std::vector<util::Coordinate> assembleOverview(const std::vector<LegGeometry> &l
std::vector<util::Coordinate> overview_geometry;
overview_geometry.reserve(overview_size);
+ using GeometryIter = decltype(overview_geometry)::const_iterator;
+
auto leg_reverse_index = leg_geometries.size();
- for (const auto &geometry : leg_geometries)
- {
- auto begin = geometry.locations.begin();
- auto end = geometry.locations.end();
- if (--leg_reverse_index > 0)
+ const auto insert_without_overlap = [&leg_reverse_index, &overview_geometry](GeometryIter begin, GeometryIter end) {
+ // not the last leg
+ if (leg_reverse_index > 1)
{
+ --leg_reverse_index;
end = std::prev(end);
}
overview_geometry.insert(overview_geometry.end(), begin, end);
+ };
+
+ if (use_simplification)
+ {
+ const auto zoom_level = std::min(18u, calculateOverviewZoomLevel(leg_geometries));
+ for (const auto &geometry : leg_geometries)
+ {
+ const auto simplified = douglasPeucker(geometry.locations.begin(), geometry.locations.end(), zoom_level);
+ insert_without_overlap(simplified.begin(), simplified.end());
+ }
+ }
+ else
+ {
+ for (const auto &geometry : leg_geometries)
+ {
+ insert_without_overlap(geometry.locations.begin(), geometry.locations.end());
+ }
}
return overview_geometry;
diff --git a/src/engine/guidance/post_processing.cpp b/src/engine/guidance/post_processing.cpp
index 7f02b66..21f7a3d 100644
--- a/src/engine/guidance/post_processing.cpp
+++ b/src/engine/guidance/post_processing.cpp
@@ -1,5 +1,5 @@
-#include "engine/guidance/post_processing.hpp"
#include "extractor/guidance/turn_instruction.hpp"
+#include "engine/guidance/post_processing.hpp"
#include "engine/guidance/assemble_steps.hpp"
#include "engine/guidance/lane_processing.hpp"
@@ -260,7 +260,8 @@ void closeOffRoundabout(const bool on_roundabout,
BOOST_ASSERT(leavesRoundabout(steps[1].maneuver.instruction) ||
steps[1].maneuver.instruction.type == TurnType::StayOnRoundabout ||
steps[1].maneuver.instruction.type == TurnType::Suppressed ||
- steps[1].maneuver.instruction.type == TurnType::NoTurn);
+ steps[1].maneuver.instruction.type == TurnType::NoTurn ||
+ steps[1].maneuver.instruction.type == TurnType::UseLane);
steps[0].geometry_end = 1;
steps[1].geometry_begin = 0;
steps[1] = forwardInto(steps[1], steps[0]);
@@ -838,6 +839,11 @@ std::vector<RouteStep> collapseTurns(std::vector<RouteStep> steps)
::osrm::util::guidance::getTurnDirection(angle);
invalidateStep(steps[step_index]);
}
+ else
+ {
+ // the sliproad turn is incompatible. So we handle it as a turn
+ steps[one_back_index].maneuver.instruction.type = TurnType::Turn;
+ }
}
}
// Due to empty segments, we can get name-changes from A->A
diff --git a/src/extractor/guidance/intersection_handler.cpp b/src/extractor/guidance/intersection_handler.cpp
index 8d80138..5ab1899 100644
--- a/src/extractor/guidance/intersection_handler.cpp
+++ b/src/extractor/guidance/intersection_handler.cpp
@@ -510,17 +510,45 @@ std::size_t IntersectionHandler::findObviousTurn(const EdgeID via_edge,
!best_data.road_classification.IsRampClass()))
{
// Find left/right deviation
- const double left_deviation = angularDeviation(
- intersection[(best + 1) % intersection.size()].turn.angle, STRAIGHT_ANGLE);
+ // skipping over service roads
+ const std::size_t left_index = [&]() {
+ const auto index_candidate = (best + 1) % intersection.size();
+ if (index_candidate == 0)
+ return index_candidate;
+ const auto &candidate_data =
+ node_based_graph.GetEdgeData(intersection[index_candidate].turn.eid);
+ if (obvious_by_road_class(in_data.road_classification,
+ best_data.road_classification,
+ candidate_data.road_classification))
+ return (index_candidate + 1) % intersection.size();
+ else
+ return index_candidate;
+
+ }();
+ const auto right_index = [&]() {
+ BOOST_ASSERT(best > 0);
+ const auto index_candidate = best - 1;
+ if (index_candidate == 0)
+ return index_candidate;
+ const auto candidate_data =
+ node_based_graph.GetEdgeData(intersection[index_candidate].turn.eid);
+ if (obvious_by_road_class(in_data.road_classification,
+ best_data.road_classification,
+ candidate_data.road_classification))
+ return index_candidate - 1;
+ else
+ return index_candidate;
+ }();
+
+ const double left_deviation =
+ angularDeviation(intersection[left_index].turn.angle, STRAIGHT_ANGLE);
const double right_deviation =
- angularDeviation(intersection[best - 1].turn.angle, STRAIGHT_ANGLE);
+ angularDeviation(intersection[right_index].turn.angle, STRAIGHT_ANGLE);
if (best_deviation < MAXIMAL_ALLOWED_NO_TURN_DEVIATION &&
std::min(left_deviation, right_deviation) > FUZZY_ANGLE_DIFFERENCE)
return best;
- const auto left_index = (best + 1) % intersection.size();
- const auto right_index = best - 1;
const auto &left_data = node_based_graph.GetEdgeData(intersection[left_index].turn.eid);
const auto &right_data = node_based_graph.GetEdgeData(intersection[right_index].turn.eid);
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/osrm.git
More information about the Pkg-grass-devel
mailing list