mirror of
git://projects.qi-hardware.com/nanomap.git
synced 2025-04-21 12:27:27 +03:00
add monav plugins from http://code.google.com/p/monav/ and use them to calculate routes
This commit is contained in:
242
monav/contractionhierarchies/binaryheap.h
Normal file
242
monav/contractionhierarchies/binaryheap.h
Normal file
@@ -0,0 +1,242 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef BINARYHEAP_H_INCLUDED
|
||||
#define BINARYHEAP_H_INCLUDED
|
||||
|
||||
//Not compatible with non contiguous node ids
|
||||
|
||||
#include <cassert>
|
||||
#include <vector>
|
||||
#include <QHash>
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class ArrayStorage {
|
||||
public:
|
||||
|
||||
ArrayStorage( size_t size ) :
|
||||
positions( new Key[size] )
|
||||
{
|
||||
}
|
||||
|
||||
~ArrayStorage()
|
||||
{
|
||||
delete[] positions;
|
||||
}
|
||||
|
||||
Key &operator[]( NodeID node )
|
||||
{
|
||||
return positions[node];
|
||||
}
|
||||
|
||||
void clear() {}
|
||||
|
||||
private:
|
||||
Key* positions;
|
||||
};
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class MapStorage {
|
||||
public:
|
||||
|
||||
MapStorage( size_t )
|
||||
{
|
||||
}
|
||||
|
||||
Key &operator[]( NodeID node )
|
||||
{
|
||||
return nodes[node];
|
||||
}
|
||||
|
||||
void clear()
|
||||
{
|
||||
nodes.clear();
|
||||
}
|
||||
|
||||
private:
|
||||
QHash< NodeID, Key > nodes;
|
||||
|
||||
};
|
||||
|
||||
template < typename NodeID, typename Key, typename Weight, typename Data, typename IndexStorage = ArrayStorage< NodeID, Key > >
|
||||
class BinaryHeap {
|
||||
private:
|
||||
BinaryHeap( const BinaryHeap& right );
|
||||
void operator=( const BinaryHeap& right );
|
||||
public:
|
||||
typedef Weight WeightType;
|
||||
typedef Data DataType;
|
||||
|
||||
BinaryHeap( size_t maxID )
|
||||
: nodeIndex( maxID ) {
|
||||
Clear();
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
heap.resize( 1 );
|
||||
insertedNodes.clear();
|
||||
nodeIndex.clear();
|
||||
heap[0].weight = 0;
|
||||
}
|
||||
|
||||
Key Size() const {
|
||||
return ( Key )( heap.size() - 1 );
|
||||
}
|
||||
|
||||
void Insert( NodeID node, Weight weight, const Data &data ) {
|
||||
HeapElement element;
|
||||
element.index = ( NodeID ) insertedNodes.size();
|
||||
element.weight = weight;
|
||||
const Key key = ( Key ) heap.size();
|
||||
heap.push_back( element );
|
||||
insertedNodes.push_back( HeapNode( node, key, weight, data ) );
|
||||
nodeIndex[node] = element.index;
|
||||
Upheap( key );
|
||||
CheckHeap();
|
||||
}
|
||||
|
||||
Data& GetData( NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].data;
|
||||
}
|
||||
|
||||
Weight& GetKey( NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].weight;
|
||||
}
|
||||
|
||||
bool WasRemoved( NodeID node ) {
|
||||
assert( WasInserted( node ) );
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].key == 0;
|
||||
}
|
||||
|
||||
bool WasInserted( NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
if ( index >= ( Key ) insertedNodes.size() )
|
||||
return false;
|
||||
return insertedNodes[index].node == node;
|
||||
}
|
||||
|
||||
NodeID Min() const {
|
||||
assert( heap.size() > 1 );
|
||||
return insertedNodes[heap[1].index].node;
|
||||
}
|
||||
|
||||
NodeID DeleteMin() {
|
||||
assert( heap.size() > 1 );
|
||||
const Key removedIndex = heap[1].index;
|
||||
heap[1] = heap[heap.size()-1];
|
||||
heap.pop_back();
|
||||
if ( heap.size() > 1 )
|
||||
Downheap( 1 );
|
||||
insertedNodes[removedIndex].key = 0;
|
||||
CheckHeap();
|
||||
return insertedNodes[removedIndex].node;
|
||||
}
|
||||
|
||||
void DeleteAll() {
|
||||
for ( typename std::vector< HeapElement >::iterator i = heap.begin() + 1, iend = heap.end(); i != iend; ++i )
|
||||
insertedNodes[i->index].key = 0;
|
||||
heap.resize( 1 );
|
||||
heap[0].weight = 0;
|
||||
}
|
||||
|
||||
|
||||
void DecreaseKey( NodeID node, Weight weight ) {
|
||||
const Key index = nodeIndex[node];
|
||||
Key key = insertedNodes[index].key;
|
||||
assert ( key != 0 );
|
||||
|
||||
insertedNodes[index].weight = weight;
|
||||
heap[key].weight = weight;
|
||||
Upheap( key );
|
||||
CheckHeap();
|
||||
}
|
||||
|
||||
private:
|
||||
class HeapNode {
|
||||
public:
|
||||
HeapNode() {
|
||||
}
|
||||
HeapNode( NodeID n, Key k, Weight w, Data d )
|
||||
: node( n ), key( k ), weight( w ), data( d ) {
|
||||
}
|
||||
|
||||
NodeID node;
|
||||
Key key;
|
||||
Weight weight;
|
||||
Data data;
|
||||
};
|
||||
struct HeapElement {
|
||||
Key index;
|
||||
Weight weight;
|
||||
};
|
||||
|
||||
std::vector< HeapNode > insertedNodes;
|
||||
std::vector< HeapElement > heap;
|
||||
IndexStorage nodeIndex;
|
||||
|
||||
void Downheap( Key key ) {
|
||||
const Key droppingIndex = heap[key].index;
|
||||
const Weight weight = heap[key].weight;
|
||||
Key nextKey = key << 1;
|
||||
while ( nextKey < ( Key ) heap.size() ) {
|
||||
const Key nextKeyOther = nextKey + 1;
|
||||
if ( ( nextKeyOther < ( Key ) heap.size() ) )
|
||||
if ( heap[nextKey].weight > heap[nextKeyOther].weight )
|
||||
nextKey = nextKeyOther;
|
||||
|
||||
if ( weight <= heap[nextKey].weight )
|
||||
break;
|
||||
|
||||
heap[key] = heap[nextKey];
|
||||
insertedNodes[heap[key].index].key = key;
|
||||
key = nextKey;
|
||||
nextKey <<= 1;
|
||||
}
|
||||
heap[key].index = droppingIndex;
|
||||
heap[key].weight = weight;
|
||||
insertedNodes[droppingIndex].key = key;
|
||||
}
|
||||
|
||||
void Upheap( Key key ) {
|
||||
const Key risingIndex = heap[key].index;
|
||||
const Weight weight = heap[key].weight;
|
||||
Key nextKey = key >> 1;
|
||||
while ( heap[nextKey].weight > weight ) {
|
||||
assert( nextKey != 0 );
|
||||
heap[key] = heap[nextKey];
|
||||
insertedNodes[heap[key].index].key = key;
|
||||
key = nextKey;
|
||||
nextKey >>= 1;
|
||||
}
|
||||
heap[key].index = risingIndex;
|
||||
heap[key].weight = weight;
|
||||
insertedNodes[risingIndex].key = key;
|
||||
}
|
||||
|
||||
void CheckHeap() {
|
||||
/*for ( Key i = 2; i < heap.size(); ++i ) {
|
||||
assert( heap[i].weight >= heap[i >> 1].weight );
|
||||
}*/
|
||||
}
|
||||
};
|
||||
|
||||
#endif //#ifndef BINARYHEAP_H_INCLUDED
|
||||
162
monav/contractionhierarchies/blockcache.h
Normal file
162
monav/contractionhierarchies/blockcache.h
Normal file
@@ -0,0 +1,162 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef BLOCKCACHE_H_INCLUDED
|
||||
#define BLOCKCACHE_H_INCLUDED
|
||||
|
||||
#include <QFile>
|
||||
#include <QHash>
|
||||
#include <limits>
|
||||
#include <QtDebug>
|
||||
|
||||
// Block must have member function / variables:
|
||||
// variable id => block id
|
||||
// function void load( const unsigned char* buffer )
|
||||
template< class Block >
|
||||
class BlockCache{
|
||||
|
||||
public:
|
||||
|
||||
BlockCache()
|
||||
{
|
||||
m_cache = NULL;
|
||||
m_LRU = NULL;
|
||||
m_blocks = NULL;
|
||||
}
|
||||
|
||||
bool load( const QString& filename, int cacheBlocks, unsigned blockSize )
|
||||
{
|
||||
m_cacheBlocks = cacheBlocks;
|
||||
m_blockSize = blockSize;
|
||||
m_inputFile.setFileName( filename );
|
||||
if ( !m_inputFile.open( QIODevice::ReadOnly | QIODevice::Unbuffered ) ) {
|
||||
qCritical() << "failed to open file:" << m_inputFile.fileName();
|
||||
return false;
|
||||
}
|
||||
|
||||
m_cache = new unsigned char[( m_cacheBlocks + 1 ) * m_blockSize];
|
||||
m_LRU = new LRUEntry[m_cacheBlocks];
|
||||
m_blocks = new Block[m_cacheBlocks];
|
||||
|
||||
m_firstLoaded = -1;
|
||||
m_lastLoaded = -1;
|
||||
m_loadedCount = 0;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void unload ( )
|
||||
{
|
||||
m_inputFile.close();
|
||||
if ( m_cache != NULL )
|
||||
delete[] m_cache;
|
||||
if ( m_LRU != NULL )
|
||||
delete[] m_LRU;
|
||||
if ( m_blocks != NULL )
|
||||
delete[] m_blocks;
|
||||
m_cache = NULL;
|
||||
m_LRU = NULL;
|
||||
m_blocks = NULL;
|
||||
m_index.clear();
|
||||
}
|
||||
|
||||
const Block* getBlock( unsigned block )
|
||||
{
|
||||
int cacheID = m_index.value( block, -1 );
|
||||
if ( cacheID == -1 )
|
||||
return loadBlock( block );
|
||||
|
||||
useBlock( cacheID );
|
||||
return m_blocks + cacheID;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
const Block* loadBlock( unsigned block )
|
||||
{
|
||||
int freeBlock = m_loadedCount;
|
||||
// cache is full => select least recently used block
|
||||
if ( m_loadedCount == m_cacheBlocks ) {
|
||||
assert ( m_lastLoaded != -1 );
|
||||
freeBlock = m_lastLoaded;
|
||||
m_index.remove( m_blocks[freeBlock].id );
|
||||
useBlock( freeBlock );
|
||||
} else {
|
||||
//insert into the front of the list
|
||||
m_LRU[freeBlock].previousLoaded = -1;
|
||||
m_LRU[freeBlock].nextLoaded = m_firstLoaded;
|
||||
if ( m_firstLoaded != -1 )
|
||||
m_LRU[m_firstLoaded].previousLoaded = freeBlock;
|
||||
if ( m_lastLoaded == -1 )
|
||||
m_lastLoaded = freeBlock;
|
||||
m_firstLoaded = freeBlock;
|
||||
m_loadedCount++;
|
||||
}
|
||||
|
||||
//load block
|
||||
m_inputFile.seek( ( long long ) block * m_blockSize );
|
||||
m_inputFile.read( ( char* ) m_cache + freeBlock * m_blockSize, m_blockSize );
|
||||
m_blocks[freeBlock].load( block, m_cache + freeBlock * m_blockSize );
|
||||
m_index[block] = freeBlock;
|
||||
|
||||
return m_blocks + freeBlock;
|
||||
}
|
||||
|
||||
void useBlock( int cacheID )
|
||||
{
|
||||
assert( m_firstLoaded != -1 );
|
||||
if ( m_firstLoaded == cacheID )
|
||||
return;
|
||||
|
||||
LRUEntry& block = m_LRU[cacheID];
|
||||
|
||||
//remove block from the list to put it into the front
|
||||
if ( block.nextLoaded != -1 )
|
||||
m_LRU[block.nextLoaded].previousLoaded = block.previousLoaded;
|
||||
else
|
||||
m_lastLoaded = block.previousLoaded;
|
||||
|
||||
m_LRU[block.previousLoaded].nextLoaded = block.nextLoaded;
|
||||
|
||||
// insert block into the front
|
||||
m_LRU[m_firstLoaded].previousLoaded = cacheID;
|
||||
block.nextLoaded = m_firstLoaded;
|
||||
block.previousLoaded = -1;
|
||||
m_firstLoaded = cacheID;
|
||||
}
|
||||
|
||||
struct LRUEntry{
|
||||
int nextLoaded;
|
||||
int previousLoaded;
|
||||
};
|
||||
|
||||
Block* m_blocks;
|
||||
LRUEntry* m_LRU;
|
||||
unsigned char* m_cache;
|
||||
int m_firstLoaded;
|
||||
int m_lastLoaded;
|
||||
int m_loadedCount;
|
||||
int m_cacheBlocks;
|
||||
unsigned m_blockSize;
|
||||
QFile m_inputFile;
|
||||
QHash< unsigned, int > m_index;
|
||||
|
||||
};
|
||||
|
||||
#endif // BLOCKCACHE_H_INCLUDED
|
||||
600
monav/contractionhierarchies/compressedgraph.h
Normal file
600
monav/contractionhierarchies/compressedgraph.h
Normal file
@@ -0,0 +1,600 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef COMPRESSEDGRAPH_H
|
||||
#define COMPRESSEDGRAPH_H
|
||||
|
||||
#include "interfaces/irouter.h"
|
||||
#include "utils/coordinates.h"
|
||||
#include "utils/bithelpers.h"
|
||||
#include "blockcache.h"
|
||||
#include <QString>
|
||||
#include <QFile>
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
class CompressedGraph {
|
||||
|
||||
public :
|
||||
|
||||
typedef unsigned NodeIterator;
|
||||
|
||||
protected:
|
||||
|
||||
//TYPES
|
||||
|
||||
struct Block {
|
||||
struct Settings {
|
||||
// adress blocks from the adjacent blocks array
|
||||
unsigned char blockBits;
|
||||
// address an entry in the adjacent blocks array
|
||||
//unsigned char adjacentBlockBits; ==> can be computed from adjacentBlockcount bitsNeeded( count - 1 )
|
||||
// address an internal node with a shortcut's middle
|
||||
//unsigned char internalBits; ==> can be computed from nodeCount bitsNeeded( count - 1 );
|
||||
// address an external node in another block
|
||||
unsigned char externalBits;
|
||||
// address the first edge of a node
|
||||
unsigned char firstEdgeBits;
|
||||
// bits used for the short weight class
|
||||
unsigned char shortWeightBits;
|
||||
// bits uses for the long weight class
|
||||
unsigned char longWeightBits;
|
||||
// bits used for the difference ( x - minX )
|
||||
unsigned char xBits;
|
||||
// bits used for the difference ( y - minY )
|
||||
unsigned char yBits;
|
||||
// minimal x value
|
||||
unsigned minX;
|
||||
// minimal y value
|
||||
unsigned minY;
|
||||
// #nodes => used for the size of firstEdges
|
||||
unsigned nodeCount;
|
||||
// #adjacent blocks => used for the size of adjacentBlocks
|
||||
unsigned adjacentBlockCount;
|
||||
} settings;
|
||||
|
||||
unsigned char adjacentBlockBits;
|
||||
unsigned char internalBits;
|
||||
|
||||
unsigned edges;
|
||||
unsigned adjacentBlocks;
|
||||
unsigned firstEdges;
|
||||
unsigned nodeCoordinates;
|
||||
|
||||
unsigned id;
|
||||
const unsigned char* buffer;
|
||||
|
||||
void load( unsigned id, const unsigned char* buffer )
|
||||
{
|
||||
CompressedGraph::loadBlock( this, id, buffer );
|
||||
}
|
||||
};
|
||||
|
||||
struct PathBlock {
|
||||
|
||||
struct DataItem {
|
||||
unsigned a;
|
||||
unsigned b;
|
||||
|
||||
DataItem()
|
||||
{
|
||||
a = b = 0;
|
||||
}
|
||||
|
||||
DataItem( const IRouter::Node& node )
|
||||
{
|
||||
assert( bits_needed( node.coordinate.x ) < 32 );
|
||||
a = node.coordinate.x << 1;
|
||||
a |= 1;
|
||||
b = node.coordinate.y;
|
||||
}
|
||||
|
||||
DataItem( const IRouter::Edge& description )
|
||||
{
|
||||
a = description.name;
|
||||
a <<= 1;
|
||||
a |= description.branchingPossible ? 1 : 0;
|
||||
a <<= 1;
|
||||
b = description.type;
|
||||
b <<= 16;
|
||||
b |= description.length;
|
||||
b <<= 8;
|
||||
b |= encode_integer< 4, 4 >( description.seconds );
|
||||
}
|
||||
|
||||
bool isNode() const
|
||||
{
|
||||
return ( a & 1 ) == 1;
|
||||
}
|
||||
|
||||
bool isEdge() const
|
||||
{
|
||||
return ( a & 1 ) == 0;
|
||||
}
|
||||
|
||||
IRouter::Node toNode()
|
||||
{
|
||||
IRouter::Node node;
|
||||
node.coordinate = UnsignedCoordinate( a >> 1, b );
|
||||
return node;
|
||||
}
|
||||
|
||||
IRouter::Edge toEdge()
|
||||
{
|
||||
IRouter::Edge edge;
|
||||
edge.name = a >> 2;
|
||||
edge.branchingPossible = ( a & 2 ) == 2;
|
||||
edge.type = b >> 24;
|
||||
edge.length = ( b >> 8 ) & ( ( 1u << 16 ) -1 );
|
||||
edge.seconds = decode_integer< 4, 4 >( b & 255 );
|
||||
return edge;
|
||||
}
|
||||
};
|
||||
|
||||
unsigned id;
|
||||
const unsigned char* buffer;
|
||||
|
||||
void load( unsigned id, const unsigned char* buffer )
|
||||
{
|
||||
CompressedGraph::loadPathBlock( this, id, buffer );
|
||||
}
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
// TYPES
|
||||
|
||||
struct Edge {
|
||||
NodeIterator source;
|
||||
NodeIterator target;
|
||||
struct Data {
|
||||
unsigned distance;
|
||||
bool shortcut : 1;
|
||||
bool forward : 1;
|
||||
bool backward : 1;
|
||||
bool unpacked : 1;
|
||||
bool reversed : 1;
|
||||
union {
|
||||
NodeIterator middle;
|
||||
unsigned id;
|
||||
};
|
||||
unsigned path;
|
||||
} data;
|
||||
|
||||
bool operator<( const Edge& right ) const {
|
||||
if ( source != right.source )
|
||||
return source < right.source;
|
||||
int l = ( data.forward ? -1 : 0 ) + ( data.backward ? 1 : 0 );
|
||||
int r = ( right.data.forward ? -1 : 0 ) + ( right.data.backward ? 1 : 0 );
|
||||
if ( l != r )
|
||||
return l < r;
|
||||
if ( target != right.target )
|
||||
return target < right.target;
|
||||
return data.distance < right.data.distance;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
class EdgeIterator {
|
||||
|
||||
friend class CompressedGraph;
|
||||
|
||||
public:
|
||||
|
||||
EdgeIterator()
|
||||
{
|
||||
}
|
||||
|
||||
bool hasEdgesLeft()
|
||||
{
|
||||
return m_position < m_end;
|
||||
}
|
||||
|
||||
NodeIterator target() const { return m_target; }
|
||||
bool forward() const { return m_data.forward; }
|
||||
bool backward() const { return m_data.backward; }
|
||||
bool shortcut() const { return m_data.shortcut; }
|
||||
bool unpacked() const { return m_data.unpacked; }
|
||||
NodeIterator middle() const { return m_data.middle; }
|
||||
unsigned distance() const { return m_data.distance; }
|
||||
IRouter::Edge description() const { return IRouter::Edge( m_data.description.nameID, m_data.description.branchingPossible, m_data.description.type, 1, ( m_data.distance + 5 ) / 10 ); }
|
||||
#ifdef NDEBUG
|
||||
private:
|
||||
#endif
|
||||
|
||||
EdgeIterator( unsigned source, const Block& block, unsigned position, unsigned end ) :
|
||||
m_block( &block ), m_source( source ), m_position( position ), m_end( end )
|
||||
{
|
||||
}
|
||||
|
||||
const Block* m_block;
|
||||
NodeIterator m_target;
|
||||
NodeIterator m_source;
|
||||
unsigned m_position;
|
||||
unsigned m_end;
|
||||
|
||||
struct EdgeData {
|
||||
unsigned distance;
|
||||
bool shortcut : 1;
|
||||
bool forward : 1;
|
||||
bool backward : 1;
|
||||
bool unpacked : 1;
|
||||
bool reversed : 1;
|
||||
union {
|
||||
NodeIterator middle;
|
||||
struct {
|
||||
unsigned nameID : 30;
|
||||
bool branchingPossible : 1;
|
||||
unsigned type;
|
||||
} description;
|
||||
};
|
||||
unsigned path;
|
||||
} m_data;
|
||||
};
|
||||
|
||||
// FUNCTIONS
|
||||
|
||||
CompressedGraph()
|
||||
{
|
||||
m_loaded = false;
|
||||
}
|
||||
|
||||
~CompressedGraph()
|
||||
{
|
||||
if ( m_loaded )
|
||||
unloadGraph();
|
||||
}
|
||||
|
||||
bool loadGraph( QString filename, unsigned cacheSize )
|
||||
{
|
||||
if ( m_loaded )
|
||||
unloadGraph();
|
||||
QFile settingsFile( filename + "_config" );
|
||||
if ( !settingsFile.open( QIODevice::ReadOnly ) ) {
|
||||
qCritical() << "failed to open file:" << settingsFile.fileName();
|
||||
return false;
|
||||
}
|
||||
m_settings.read( settingsFile );
|
||||
if ( !m_blockCache.load( filename + "_edges", cacheSize / m_settings.blockSize / 2 + 1, m_settings.blockSize ) )
|
||||
return false;
|
||||
if ( !m_pathCache.load( filename + "_paths", cacheSize / m_settings.blockSize / 2 + 1, m_settings.blockSize ) )
|
||||
return false;
|
||||
m_loaded = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
EdgeIterator edges( NodeIterator node )
|
||||
{
|
||||
unsigned blockID = nodeToBlock( node );
|
||||
unsigned internal = nodeToInternal( node );
|
||||
const Block* block = getBlock( blockID );
|
||||
return unpackFirstEdges( *block, internal );
|
||||
}
|
||||
|
||||
EdgeIterator findEdge( NodeIterator source, NodeIterator target, unsigned id )
|
||||
{
|
||||
if ( source < target )
|
||||
std::swap( source, target );
|
||||
EdgeIterator e = edges( source );
|
||||
while ( e.hasEdgesLeft() ) {
|
||||
unpackNextEdge( &e );
|
||||
if ( e.target() != target )
|
||||
continue;
|
||||
if ( e.shortcut() )
|
||||
continue;
|
||||
if ( id != 0 ) {
|
||||
id--;
|
||||
continue;
|
||||
}
|
||||
|
||||
return e;
|
||||
}
|
||||
assert( false );
|
||||
return e;
|
||||
}
|
||||
|
||||
void unpackNextEdge( EdgeIterator* edge )
|
||||
{
|
||||
const Block& block = *edge->m_block;
|
||||
EdgeIterator::EdgeData& edgeData = edge->m_data;
|
||||
const unsigned char* buffer = block.buffer + ( edge->m_position >> 3 );
|
||||
int offset = edge->m_position & 7;
|
||||
|
||||
// forward + backward flag
|
||||
bool forwardAndBackward = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
if ( forwardAndBackward ) {
|
||||
edgeData.forward = true;
|
||||
edgeData.backward = true;
|
||||
} else {
|
||||
edgeData.forward = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
edgeData.backward = !edgeData.forward;
|
||||
}
|
||||
|
||||
// target
|
||||
bool internalTarget = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
if ( internalTarget ) {
|
||||
unsigned target = read_unaligned_unsigned( &buffer, bits_needed( edge->m_source ), &offset );
|
||||
edge->m_target = nodeFromDescriptor( block.id, target );
|
||||
} else {
|
||||
unsigned adjacentBlock = read_unaligned_unsigned( &buffer, block.adjacentBlockBits, &offset );
|
||||
unsigned target = read_unaligned_unsigned( &buffer, block.settings.externalBits, &offset );
|
||||
unsigned adjacentBlockPosition = block.adjacentBlocks + adjacentBlock * block.settings.blockBits;
|
||||
unsigned targetBlock = read_unaligned_unsigned( block.buffer + ( adjacentBlockPosition >> 3 ), block.settings.blockBits, adjacentBlockPosition & 7 );
|
||||
edge->m_target = nodeFromDescriptor( targetBlock, target );
|
||||
}
|
||||
|
||||
// weight
|
||||
bool longWeight = block.settings.shortWeightBits == block.settings.longWeightBits;
|
||||
if ( !longWeight )
|
||||
longWeight = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
edgeData.distance = read_unaligned_unsigned( &buffer, longWeight ? block.settings.longWeightBits : block.settings.shortWeightBits, &offset );
|
||||
|
||||
// unpacked
|
||||
edgeData.unpacked = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
if ( edgeData.unpacked ) {
|
||||
if ( forwardAndBackward )
|
||||
edgeData.reversed = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
else
|
||||
edgeData.reversed = edgeData.backward;
|
||||
edgeData.path = read_unaligned_unsigned( &buffer, m_settings.pathBits, &offset );
|
||||
}
|
||||
|
||||
// shortcut
|
||||
edgeData.shortcut = read_unaligned_unsigned( &buffer, 1, &offset ) != 0;
|
||||
if ( edgeData.shortcut ) {
|
||||
if ( !edgeData.unpacked ) {
|
||||
unsigned middle = read_unaligned_unsigned( &buffer, block.internalBits, &offset );
|
||||
edgeData.middle = nodeFromDescriptor( block.id, middle );
|
||||
}
|
||||
}
|
||||
|
||||
// edge description
|
||||
if ( !edgeData.shortcut && !edgeData.unpacked ) {
|
||||
edgeData.description.type = read_unaligned_unsigned( &buffer, m_settings.typeBits, &offset );
|
||||
edgeData.description.nameID = read_unaligned_unsigned( &buffer, m_settings.nameBits, &offset );
|
||||
edgeData.description.branchingPossible = read_unaligned_unsigned( &buffer, 1, &offset );
|
||||
}
|
||||
|
||||
edge->m_position = ( buffer - block.buffer ) * 8 + offset;
|
||||
}
|
||||
|
||||
IRouter::Node node( NodeIterator node )
|
||||
{
|
||||
unsigned blockID = nodeToBlock( node );
|
||||
unsigned internal = nodeToInternal( node );
|
||||
const Block* block = getBlock( blockID );
|
||||
IRouter::Node result;
|
||||
unpackCoordinates( *block, internal, &result.coordinate );
|
||||
return result;
|
||||
}
|
||||
|
||||
unsigned numberOfNodes() const
|
||||
{
|
||||
return m_settings.numberOfNodes;
|
||||
}
|
||||
|
||||
unsigned numberOfEdges() const
|
||||
{
|
||||
return m_settings.numberOfEdges;
|
||||
}
|
||||
|
||||
template< class T, class S >
|
||||
void path( const EdgeIterator& edge, T path, S edges, bool forward )
|
||||
{
|
||||
assert( edge.unpacked() );
|
||||
unsigned pathBegin = path->size();
|
||||
unsigned edgesBegin = edges->size();
|
||||
int increase = edge.m_data.reversed ? -1 : 1;
|
||||
|
||||
IRouter::Node targetNode = node( edge.target() );
|
||||
unsigned pathID = edge.m_data.path;
|
||||
|
||||
if ( !forward ) {
|
||||
PathBlock::DataItem data = unpackPath( pathID );
|
||||
assert( data.isNode() );
|
||||
path->push_back( data.toNode().coordinate );
|
||||
}
|
||||
|
||||
pathID += increase;
|
||||
|
||||
while( true ) {
|
||||
PathBlock::DataItem data = unpackPath( pathID );
|
||||
if ( data.isEdge() ) {
|
||||
edges->push_back( data.toEdge() );
|
||||
pathID += increase;
|
||||
continue;
|
||||
}
|
||||
assert( data.isNode() );
|
||||
IRouter::Node node = data.toNode();
|
||||
if ( node.coordinate.x == targetNode.coordinate.x && node.coordinate.y == targetNode.coordinate.y )
|
||||
break;
|
||||
path->push_back( node.coordinate );
|
||||
pathID += increase;
|
||||
}
|
||||
|
||||
if ( forward ) {
|
||||
path->push_back( targetNode.coordinate );
|
||||
} else {
|
||||
std::reverse( path->begin() + pathBegin, path->end() );
|
||||
std::reverse( edges->begin() + edgesBegin, edges->end() );
|
||||
}
|
||||
|
||||
assert( edges->size() != ( int ) edgesBegin ); // at least one edge description has to be present
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
// TYPES
|
||||
|
||||
struct GlobalSettings {
|
||||
unsigned blockSize;
|
||||
unsigned char internalBits;
|
||||
unsigned char pathBits;
|
||||
unsigned char typeBits;
|
||||
unsigned char nameBits;
|
||||
unsigned numberOfNodes;
|
||||
unsigned numberOfEdges;
|
||||
|
||||
void read( QFile& in )
|
||||
{
|
||||
in.read( ( char* ) &blockSize, sizeof( blockSize ) );
|
||||
in.read( ( char* ) &internalBits, sizeof( internalBits ) );
|
||||
in.read( ( char* ) &pathBits, sizeof( pathBits ) );
|
||||
in.read( ( char* ) &typeBits, sizeof( typeBits ) );
|
||||
in.read( ( char* ) &nameBits, sizeof( nameBits ) );
|
||||
in.read( ( char* ) &numberOfNodes, sizeof( numberOfNodes ) );
|
||||
in.read( ( char* ) &numberOfEdges, sizeof( numberOfEdges ) );
|
||||
}
|
||||
|
||||
void write( QFile& out )
|
||||
{
|
||||
out.write( ( const char* ) &blockSize, sizeof( blockSize ) );
|
||||
out.write( ( const char* ) &internalBits, sizeof( internalBits ) );
|
||||
out.write( ( const char* ) &pathBits, sizeof( pathBits ) );
|
||||
out.write( ( const char* ) &typeBits, sizeof( typeBits ) );
|
||||
out.write( ( const char* ) &nameBits, sizeof( nameBits ) );
|
||||
out.write( ( const char* ) &numberOfNodes, sizeof( numberOfNodes ) );
|
||||
out.write( ( const char* ) &numberOfEdges, sizeof( numberOfEdges ) );
|
||||
}
|
||||
};
|
||||
|
||||
struct nodeDescriptor {
|
||||
unsigned block;
|
||||
unsigned node;
|
||||
};
|
||||
|
||||
// FUNCTIONS
|
||||
|
||||
PathBlock::DataItem unpackPath( unsigned position ) {
|
||||
unsigned blockID = position / ( m_settings.blockSize / 8 );
|
||||
unsigned internal = ( position % ( m_settings.blockSize / 8 ) ) * 8;
|
||||
const PathBlock* block = getPathBlock( blockID );
|
||||
PathBlock::DataItem data;
|
||||
data.a = *( ( unsigned* ) ( block->buffer + internal ) );
|
||||
data.b = *( ( unsigned* ) ( block->buffer + internal + 4 ) );
|
||||
return data;
|
||||
}
|
||||
|
||||
void unpackCoordinates( const Block& block, unsigned node, UnsignedCoordinate* result )
|
||||
{
|
||||
unsigned position = block.nodeCoordinates + ( block.settings.xBits + block.settings.yBits ) * node;
|
||||
const unsigned char* buffer = block.buffer + ( position >> 3 );
|
||||
int offset = position & 7;
|
||||
result->x = read_unaligned_unsigned( &buffer, block.settings.xBits, &offset ) + block.settings.minX;
|
||||
result->y = read_unaligned_unsigned( buffer, block.settings.yBits, offset ) + block.settings.minY;
|
||||
}
|
||||
|
||||
EdgeIterator unpackFirstEdges( const Block& block, unsigned node )
|
||||
{
|
||||
unsigned position = block.firstEdges + block.settings.firstEdgeBits * node;
|
||||
const unsigned char* buffer = block.buffer + ( position >> 3 );
|
||||
int offset = position & 7;
|
||||
unsigned begin = read_unaligned_unsigned( &buffer, block.settings.firstEdgeBits, &offset );
|
||||
unsigned end = read_unaligned_unsigned( buffer, block.settings.firstEdgeBits, offset );
|
||||
return EdgeIterator( node, block, begin + block.edges, end + block.edges );
|
||||
}
|
||||
|
||||
const Block* getBlock( unsigned block )
|
||||
{
|
||||
return m_blockCache.getBlock( block );
|
||||
}
|
||||
|
||||
const PathBlock* getPathBlock( unsigned block )
|
||||
{
|
||||
return m_pathCache.getBlock( block );
|
||||
}
|
||||
|
||||
unsigned nodeToBlock( NodeIterator node )
|
||||
{
|
||||
return node >> m_settings.internalBits;
|
||||
}
|
||||
|
||||
unsigned nodeToInternal( NodeIterator node )
|
||||
{
|
||||
return read_bits( node, m_settings.internalBits );
|
||||
}
|
||||
|
||||
NodeIterator nodeFromDescriptor( nodeDescriptor node )
|
||||
{
|
||||
NodeIterator result = ( node.block << m_settings.internalBits ) | node.node;
|
||||
assert( nodeToBlock( result ) == node.block );
|
||||
assert( nodeToInternal( result ) == node.node );
|
||||
return result;
|
||||
}
|
||||
|
||||
NodeIterator nodeFromDescriptor( unsigned block, unsigned node )
|
||||
{
|
||||
NodeIterator result = ( block << m_settings.internalBits ) | node;
|
||||
assert( nodeToBlock( result ) == block );
|
||||
assert( nodeToInternal( result ) == node );
|
||||
return result;
|
||||
}
|
||||
|
||||
static void loadBlock( Block* block, unsigned blockID, const unsigned char* blockBuffer )
|
||||
{
|
||||
const unsigned char* buffer = blockBuffer;
|
||||
int offset = 0;
|
||||
|
||||
// read settings
|
||||
block->settings.blockBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.externalBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.firstEdgeBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.shortWeightBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.longWeightBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.xBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.yBits = read_unaligned_unsigned( &buffer, 8, &offset );
|
||||
block->settings.minX = read_unaligned_unsigned( &buffer, 32, &offset );
|
||||
block->settings.minY = read_unaligned_unsigned( &buffer, 32, &offset );
|
||||
block->settings.nodeCount = read_unaligned_unsigned( &buffer, 32, &offset );
|
||||
block->settings.adjacentBlockCount = read_unaligned_unsigned( &buffer, 32, &offset );
|
||||
|
||||
// set other values
|
||||
block->internalBits = bits_needed( block->settings.nodeCount - 1 );
|
||||
block->adjacentBlockBits = bits_needed( block->settings.adjacentBlockCount - 1 );
|
||||
block->id = blockID;
|
||||
block->buffer = blockBuffer;
|
||||
|
||||
// compute offsets
|
||||
block->nodeCoordinates = ( buffer - blockBuffer ) * 8 + offset;
|
||||
block->adjacentBlocks = block->nodeCoordinates + ( block->settings.xBits + block->settings.yBits ) * block->settings.nodeCount;
|
||||
block->firstEdges = block->adjacentBlocks + block->settings.blockBits * block->settings.adjacentBlockCount;
|
||||
block->edges = block->firstEdges + block->settings.firstEdgeBits * ( block->settings.nodeCount + 1 );
|
||||
}
|
||||
|
||||
static void loadPathBlock( PathBlock* block, unsigned blockID, const unsigned char* blockBuffer )
|
||||
{
|
||||
block->id = blockID;
|
||||
block->buffer = blockBuffer;
|
||||
}
|
||||
|
||||
void unloadGraph()
|
||||
{
|
||||
m_blockCache.unload();
|
||||
m_pathCache.unload();
|
||||
}
|
||||
|
||||
// VARIABLES
|
||||
|
||||
GlobalSettings m_settings;
|
||||
BlockCache< Block > m_blockCache;
|
||||
BlockCache< PathBlock > m_pathCache;
|
||||
bool m_loaded;
|
||||
};
|
||||
|
||||
#endif // COMPRESSEDGRAPH_H
|
||||
407
monav/contractionhierarchies/contractionhierarchiesclient.cpp
Normal file
407
monav/contractionhierarchies/contractionhierarchiesclient.cpp
Normal file
@@ -0,0 +1,407 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY
|
||||
{
|
||||
}
|
||||
without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include "contractionhierarchiesclient.h"
|
||||
#include "utils/qthelpers.h"
|
||||
#include <QtDebug>
|
||||
#include <stack>
|
||||
#ifndef NOGUI
|
||||
#include <QMessageBox>
|
||||
#endif
|
||||
|
||||
ContractionHierarchiesClient::ContractionHierarchiesClient()
|
||||
{
|
||||
m_heapForward = NULL;
|
||||
m_heapBackward = NULL;
|
||||
}
|
||||
|
||||
ContractionHierarchiesClient::~ContractionHierarchiesClient()
|
||||
{
|
||||
unload();
|
||||
}
|
||||
|
||||
|
||||
QString ContractionHierarchiesClient::GetName()
|
||||
{
|
||||
return "Contraction Hierarchies";
|
||||
}
|
||||
|
||||
void ContractionHierarchiesClient::SetInputDirectory( const QString& dir )
|
||||
{
|
||||
m_directory = dir;
|
||||
}
|
||||
|
||||
void ContractionHierarchiesClient::ShowSettings()
|
||||
{
|
||||
#ifndef NOGUI
|
||||
QMessageBox::information( NULL, "Settings", "No settings available" );
|
||||
#endif
|
||||
}
|
||||
|
||||
void ContractionHierarchiesClient::unload()
|
||||
{
|
||||
if ( m_heapForward != NULL )
|
||||
delete m_heapForward;
|
||||
m_heapForward = NULL;
|
||||
if ( m_heapBackward != NULL )
|
||||
delete m_heapBackward;
|
||||
m_heapBackward = NULL;
|
||||
m_types.clear();
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::LoadData()
|
||||
{
|
||||
QString filename = fileInDirectory( m_directory,"Contraction Hierarchies" );
|
||||
unload();
|
||||
|
||||
if ( !m_graph.loadGraph( filename, 1024 * 1024 * 4 ) )
|
||||
return false;
|
||||
|
||||
m_namesFile.setFileName( filename + "_names" );
|
||||
if ( !openQFile( &m_namesFile, QIODevice::ReadOnly ) )
|
||||
return false;
|
||||
m_names = ( const char* ) m_namesFile.map( 0, m_namesFile.size() );
|
||||
if ( m_names == NULL )
|
||||
return false;
|
||||
m_namesFile.close();
|
||||
|
||||
m_heapForward = new Heap( m_graph.numberOfNodes() );
|
||||
m_heapBackward = new Heap( m_graph.numberOfNodes() );
|
||||
|
||||
QFile typeFile( filename + "_types" );
|
||||
if ( !openQFile( &typeFile, QIODevice::ReadOnly ) )
|
||||
return false;
|
||||
|
||||
QByteArray buffer = typeFile.readAll();
|
||||
QString types = QString::fromUtf8( buffer.constData() );
|
||||
m_types = types.split( ';' );
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::GetRoute( double* distance, QVector< Node>* pathNodes, QVector< Edge >* pathEdges, const IGPSLookup::Result& source, const IGPSLookup::Result& target )
|
||||
{
|
||||
m_heapForward->Clear();
|
||||
m_heapBackward->Clear();
|
||||
|
||||
*distance = computeRoute( source, target, pathNodes, pathEdges );
|
||||
if ( *distance == std::numeric_limits< int >::max() )
|
||||
return false;
|
||||
|
||||
// is it shorter to drive along the edge?
|
||||
if ( target.source == source.source && target.target == source.target && source.edgeID == target.edgeID ) {
|
||||
EdgeIterator targetEdge = m_graph.findEdge( target.source, target.target, target.edgeID );
|
||||
double onEdgeDistance = fabs( target.percentage - source.percentage ) * targetEdge.distance();
|
||||
if ( onEdgeDistance < *distance ) {
|
||||
if ( ( targetEdge.forward() && targetEdge.backward() ) || source.percentage < target.percentage ) {
|
||||
pathNodes->clear();
|
||||
pathEdges->clear();
|
||||
pathNodes->push_back( source.nearestPoint );
|
||||
|
||||
QVector< Node > tempNodes;
|
||||
if ( targetEdge.unpacked() )
|
||||
m_graph.path( targetEdge, &tempNodes, pathEdges, target.target == targetEdge.target() );
|
||||
else
|
||||
pathEdges->push_back( targetEdge.description() );
|
||||
|
||||
if ( target.previousWayCoordinates < source.previousWayCoordinates ) {
|
||||
for ( unsigned pathID = target.previousWayCoordinates; pathID < source.previousWayCoordinates; pathID++ )
|
||||
pathNodes->push_back( tempNodes[pathID - 1] );
|
||||
std::reverse( pathNodes->begin() + 1, pathNodes->end() );
|
||||
} else {
|
||||
for ( unsigned pathID = source.previousWayCoordinates; pathID < target.previousWayCoordinates; pathID++ )
|
||||
pathNodes->push_back( tempNodes[pathID - 1] );
|
||||
}
|
||||
|
||||
pathNodes->push_back( target.nearestPoint );
|
||||
pathEdges->front().length = pathNodes->size() - 1;
|
||||
*distance = onEdgeDistance;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*distance /= 10;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::GetName( QString* result, unsigned name )
|
||||
{
|
||||
*result = QString::fromUtf8( m_names + name );
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::GetNames( QVector< QString >* result, QVector< unsigned > names )
|
||||
{
|
||||
result->resize( names.size() );
|
||||
for ( int i = 0; i < names.size(); i++ )
|
||||
( *result )[i] = QString::fromUtf8( m_names + names[i] );
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::GetType( QString* result, unsigned type )
|
||||
{
|
||||
*result = m_types[type];
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::GetTypes( QVector< QString >* result, QVector< unsigned > types )
|
||||
{
|
||||
result->resize( types.size() );
|
||||
for ( int i = 0; i < types.size(); i++ )
|
||||
( *result )[i] = m_types[types[i]];
|
||||
return true;
|
||||
}
|
||||
|
||||
template< class EdgeAllowed, class StallEdgeAllowed >
|
||||
void ContractionHierarchiesClient::computeStep( Heap* heapForward, Heap* heapBackward, const EdgeAllowed& edgeAllowed, const StallEdgeAllowed& stallEdgeAllowed, NodeIterator* middle, int* targetDistance ) {
|
||||
|
||||
const NodeIterator node = heapForward->DeleteMin();
|
||||
const int distance = heapForward->GetKey( node );
|
||||
|
||||
if ( heapForward->GetData( node ).stalled )
|
||||
return;
|
||||
|
||||
if ( heapBackward->WasInserted( node ) && !heapBackward->GetData( node ).stalled ) {
|
||||
const int newDistance = heapBackward->GetKey( node ) + distance;
|
||||
if ( newDistance < *targetDistance ) {
|
||||
*middle = node;
|
||||
*targetDistance = newDistance;
|
||||
}
|
||||
}
|
||||
|
||||
if ( distance > *targetDistance ) {
|
||||
heapForward->DeleteAll();
|
||||
return;
|
||||
}
|
||||
for ( EdgeIterator edge = m_graph.edges( node ); edge.hasEdgesLeft(); ) {
|
||||
m_graph.unpackNextEdge( &edge );
|
||||
const NodeIterator to = edge.target();
|
||||
const int edgeWeight = edge.distance();
|
||||
assert( edgeWeight > 0 );
|
||||
const int toDistance = distance + edgeWeight;
|
||||
|
||||
if ( stallEdgeAllowed( edge.forward(), edge.backward() ) && heapForward->WasInserted( to ) ) {
|
||||
const int shorterDistance = heapForward->GetKey( to ) + edgeWeight;
|
||||
if ( shorterDistance < distance ) {
|
||||
//perform a bfs starting at node
|
||||
//only insert nodes when a sub-optimal path can be proven
|
||||
//insert node into the stall queue
|
||||
heapForward->GetKey( node ) = shorterDistance;
|
||||
heapForward->GetData( node ).stalled = true;
|
||||
m_stallQueue.push( node );
|
||||
|
||||
while ( !m_stallQueue.empty() ) {
|
||||
//get node from the queue
|
||||
const NodeIterator stallNode = m_stallQueue.front();
|
||||
m_stallQueue.pop();
|
||||
const int stallDistance = heapForward->GetKey( stallNode );
|
||||
|
||||
//iterate over outgoing edges
|
||||
for ( EdgeIterator stallEdge = m_graph.edges( stallNode ); stallEdge.hasEdgesLeft(); ) {
|
||||
m_graph.unpackNextEdge( &stallEdge );
|
||||
//is edge outgoing/reached/stalled?
|
||||
if ( !edgeAllowed( stallEdge.forward(), stallEdge.backward() ) )
|
||||
continue;
|
||||
const NodeIterator stallTo = stallEdge.target();
|
||||
if ( !heapForward->WasInserted( stallTo ) )
|
||||
continue;
|
||||
if ( heapForward->GetData( stallTo ).stalled == true )
|
||||
continue;
|
||||
|
||||
const int stallToDistance = stallDistance + stallEdge.distance();
|
||||
//sub-optimal path found -> insert stallTo
|
||||
if ( stallToDistance < heapForward->GetKey( stallTo ) ) {
|
||||
if ( heapForward->WasRemoved( stallTo ) )
|
||||
heapForward->GetKey( stallTo ) = stallToDistance;
|
||||
else
|
||||
heapForward->DecreaseKey( stallTo, stallToDistance );
|
||||
|
||||
m_stallQueue.push( stallTo );
|
||||
heapForward->GetData( stallTo ).stalled = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ( edgeAllowed( edge.forward(), edge.backward() ) ) {
|
||||
//New Node discovered -> Add to Heap + Node Info Storage
|
||||
if ( !heapForward->WasInserted( to ) )
|
||||
heapForward->Insert( to, toDistance, node );
|
||||
|
||||
//Found a shorter Path -> Update distance
|
||||
else if ( toDistance <= heapForward->GetKey( to ) ) {
|
||||
heapForward->DecreaseKey( to, toDistance );
|
||||
//new parent + unstall
|
||||
heapForward->GetData( to ).parent = node;
|
||||
heapForward->GetData( to ).stalled = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int ContractionHierarchiesClient::computeRoute( const IGPSLookup::Result& source, const IGPSLookup::Result& target, QVector< Node>* pathNodes, QVector< Edge >* pathEdges ) {
|
||||
EdgeIterator sourceEdge = m_graph.findEdge( source.source, source.target, source.edgeID );
|
||||
unsigned sourceWeight = sourceEdge.distance();
|
||||
EdgeIterator targetEdge = m_graph.findEdge( target.source, target.target, target.edgeID );
|
||||
unsigned targetWeight = targetEdge.distance();
|
||||
|
||||
//insert source into heap
|
||||
m_heapForward->Insert( source.target, sourceWeight - sourceWeight * source.percentage, source.target );
|
||||
if ( sourceEdge.backward() && sourceEdge.forward() && source.target != source.source )
|
||||
m_heapForward->Insert( source.source, sourceWeight * source.percentage, source.source );
|
||||
|
||||
//insert target into heap
|
||||
m_heapBackward->Insert( target.source, targetWeight * target.percentage, target.source );
|
||||
if ( targetEdge.backward() && targetEdge.forward() && target.target != target.source )
|
||||
m_heapBackward->Insert( target.target, targetWeight - targetWeight * target.percentage, target.target );
|
||||
|
||||
int targetDistance = std::numeric_limits< int >::max();
|
||||
NodeIterator middle = ( NodeIterator ) 0;
|
||||
AllowForwardEdge forward;
|
||||
AllowBackwardEdge backward;
|
||||
|
||||
while ( m_heapForward->Size() + m_heapBackward->Size() > 0 ) {
|
||||
|
||||
if ( m_heapForward->Size() > 0 )
|
||||
computeStep( m_heapForward, m_heapBackward, forward, backward, &middle, &targetDistance );
|
||||
|
||||
if ( m_heapBackward->Size() > 0 )
|
||||
computeStep( m_heapBackward, m_heapForward, backward, forward, &middle, &targetDistance );
|
||||
|
||||
}
|
||||
|
||||
if ( targetDistance == std::numeric_limits< int >::max() )
|
||||
return std::numeric_limits< int >::max();
|
||||
|
||||
std::stack< NodeIterator > stack;
|
||||
NodeIterator pathNode = middle;
|
||||
while ( true ) {
|
||||
NodeIterator parent = m_heapForward->GetData( pathNode ).parent;
|
||||
stack.push( pathNode );
|
||||
if ( parent == pathNode )
|
||||
break;
|
||||
pathNode = parent;
|
||||
}
|
||||
|
||||
pathNodes->push_back( source.nearestPoint );
|
||||
bool reverseSourceDescription = pathNode != source.target;
|
||||
if ( source.source == source.target && sourceEdge.backward() && sourceEdge.forward() && source.percentage < 0.5 )
|
||||
reverseSourceDescription = !reverseSourceDescription;
|
||||
if ( sourceEdge.unpacked() ) {
|
||||
bool unpackSourceForward = source.target != sourceEdge.target() ? reverseSourceDescription : !reverseSourceDescription;
|
||||
m_graph.path( sourceEdge, pathNodes, pathEdges, unpackSourceForward );
|
||||
if ( reverseSourceDescription ) {
|
||||
pathNodes->remove( 1, pathNodes->size() - 1 - source.previousWayCoordinates );
|
||||
} else {
|
||||
pathNodes->remove( 1, source.previousWayCoordinates - 1 );
|
||||
}
|
||||
} else {
|
||||
pathNodes->push_back( m_graph.node( pathNode ) );
|
||||
pathEdges->push_back( sourceEdge.description() );
|
||||
}
|
||||
pathEdges->front().length = pathNodes->size() - 1;
|
||||
|
||||
while ( stack.size() > 1 ) {
|
||||
const NodeIterator node = stack.top();
|
||||
stack.pop();
|
||||
unpackEdge( node, stack.top(), true, pathNodes, pathEdges );
|
||||
}
|
||||
|
||||
pathNode = middle;
|
||||
while ( true ) {
|
||||
NodeIterator parent = m_heapBackward->GetData( pathNode ).parent;
|
||||
if ( parent == pathNode )
|
||||
break;
|
||||
unpackEdge( parent, pathNode, false, pathNodes, pathEdges );
|
||||
pathNode = parent;
|
||||
}
|
||||
|
||||
int begin = pathNodes->size();
|
||||
bool reverseTargetDescription = pathNode != target.source;
|
||||
if ( target.source == target.target && targetEdge.backward() && targetEdge.forward() && target.percentage > 0.5 )
|
||||
reverseSourceDescription = !reverseSourceDescription;
|
||||
if ( targetEdge.unpacked() ) {
|
||||
bool unpackTargetForward = target.target != targetEdge.target() ? reverseTargetDescription : !reverseTargetDescription;
|
||||
m_graph.path( targetEdge, pathNodes, pathEdges, unpackTargetForward );
|
||||
if ( reverseTargetDescription ) {
|
||||
pathNodes->resize( pathNodes->size() - target.previousWayCoordinates );
|
||||
} else {
|
||||
pathNodes->resize( begin + target.previousWayCoordinates - 1 );
|
||||
}
|
||||
} else {
|
||||
pathEdges->push_back( targetEdge.description() );
|
||||
}
|
||||
pathNodes->push_back( target.nearestPoint );
|
||||
pathEdges->back().length = pathNodes->size() - begin;
|
||||
|
||||
return targetDistance;
|
||||
}
|
||||
|
||||
bool ContractionHierarchiesClient::unpackEdge( const NodeIterator source, const NodeIterator target, bool forward, QVector< Node >* pathNodes, QVector< Edge >* pathEdges ) {
|
||||
EdgeIterator shortestEdge;
|
||||
|
||||
unsigned distance = std::numeric_limits< unsigned >::max();
|
||||
for ( EdgeIterator edge = m_graph.edges( source ); edge.hasEdgesLeft(); ) {
|
||||
m_graph.unpackNextEdge( &edge );
|
||||
if ( edge.target() != target )
|
||||
continue;
|
||||
if ( forward && !edge.forward() )
|
||||
continue;
|
||||
if ( !forward && !edge.backward() )
|
||||
continue;
|
||||
if ( edge.distance() > distance )
|
||||
continue;
|
||||
distance = edge.distance();
|
||||
shortestEdge = edge;
|
||||
}
|
||||
|
||||
if ( shortestEdge.unpacked() ) {
|
||||
m_graph.path( shortestEdge, pathNodes, pathEdges, forward );
|
||||
return true;
|
||||
}
|
||||
|
||||
if ( !shortestEdge.shortcut() ) {
|
||||
pathEdges->push_back( shortestEdge.description() );
|
||||
if ( forward )
|
||||
pathNodes->push_back( m_graph.node( target ).coordinate );
|
||||
else
|
||||
pathNodes->push_back( m_graph.node( source ).coordinate );
|
||||
return true;
|
||||
}
|
||||
|
||||
const NodeIterator middle = shortestEdge.middle();
|
||||
|
||||
if ( forward ) {
|
||||
unpackEdge( middle, source, false, pathNodes, pathEdges );
|
||||
unpackEdge( middle, target, true, pathNodes, pathEdges );
|
||||
return true;
|
||||
} else {
|
||||
unpackEdge( middle, target, false, pathNodes, pathEdges );
|
||||
unpackEdge( middle, source, true, pathNodes, pathEdges );
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
Q_EXPORT_PLUGIN2( contractionhierarchiesclient, ContractionHierarchiesClient )
|
||||
|
||||
93
monav/contractionhierarchies/contractionhierarchiesclient.h
Normal file
93
monav/contractionhierarchies/contractionhierarchiesclient.h
Normal file
@@ -0,0 +1,93 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef CONTRACTIONHIERARCHIESCLIENT_H
|
||||
#define CONTRACTIONHIERARCHIESCLIENT_H
|
||||
|
||||
#include <QObject>
|
||||
#include <QStringList>
|
||||
#include "interfaces/irouter.h"
|
||||
#include "binaryheap.h"
|
||||
#include "compressedgraph.h"
|
||||
#include <queue>
|
||||
|
||||
class ContractionHierarchiesClient : public QObject, public IRouter
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_INTERFACES( IRouter )
|
||||
public:
|
||||
ContractionHierarchiesClient();
|
||||
virtual ~ContractionHierarchiesClient();
|
||||
|
||||
virtual QString GetName();
|
||||
virtual void SetInputDirectory( const QString& dir );
|
||||
virtual void ShowSettings();
|
||||
virtual bool LoadData();
|
||||
virtual bool GetRoute( double* distance, QVector< Node>* pathNodes, QVector< Edge >* pathEdges, const IGPSLookup::Result& source, const IGPSLookup::Result& target );
|
||||
virtual bool GetName( QString* result, unsigned name );
|
||||
virtual bool GetNames( QVector< QString >* result, QVector< unsigned > names );
|
||||
virtual bool GetType( QString* result, unsigned type );
|
||||
virtual bool GetTypes( QVector< QString >* result, QVector< unsigned > types );
|
||||
|
||||
protected:
|
||||
struct HeapData {
|
||||
CompressedGraph::NodeIterator parent;
|
||||
bool stalled: 1;
|
||||
HeapData( CompressedGraph::NodeIterator p ) {
|
||||
parent = p;
|
||||
stalled = false;
|
||||
}
|
||||
};
|
||||
|
||||
class AllowForwardEdge {
|
||||
public:
|
||||
bool operator()( bool forward, bool /*backward*/ ) const {
|
||||
return forward;
|
||||
}
|
||||
};
|
||||
|
||||
class AllowBackwardEdge {
|
||||
public:
|
||||
bool operator()( bool /*forward*/, bool backward ) const {
|
||||
return backward;
|
||||
}
|
||||
};
|
||||
|
||||
typedef CompressedGraph::NodeIterator NodeIterator;
|
||||
typedef CompressedGraph::EdgeIterator EdgeIterator;
|
||||
typedef BinaryHeap< NodeIterator, int, int, HeapData, MapStorage< NodeIterator, unsigned > > Heap;
|
||||
|
||||
CompressedGraph m_graph;
|
||||
const char* m_names;
|
||||
QFile m_namesFile;
|
||||
Heap* m_heapForward;
|
||||
Heap* m_heapBackward;
|
||||
std::queue< NodeIterator > m_stallQueue;
|
||||
QString m_directory;
|
||||
QStringList m_types;
|
||||
|
||||
void unload();
|
||||
template< class EdgeAllowed, class StallEdgeAllowed >
|
||||
void computeStep( Heap* heapForward, Heap* heapBackward, const EdgeAllowed& edgeAllowed, const StallEdgeAllowed& stallEdgeAllowed, NodeIterator* middle, int* targetDistance );
|
||||
int computeRoute( const IGPSLookup::Result& source, const IGPSLookup::Result& target, QVector< Node>* pathNodes, QVector< Edge >* pathEdges );
|
||||
bool unpackEdge( const NodeIterator source, const NodeIterator target, bool forward, QVector< Node>* pathNodes, QVector< Edge >* pathEdges );
|
||||
|
||||
};
|
||||
|
||||
#endif // CONTRACTIONHIERARCHIESCLIENT_H
|
||||
@@ -0,0 +1,25 @@
|
||||
TEMPLATE = lib
|
||||
CONFIG += plugin
|
||||
#CONFIG += debug
|
||||
DESTDIR = ..
|
||||
unix {
|
||||
QMAKE_CXXFLAGS_RELEASE -= -O2
|
||||
QMAKE_CXXFLAGS_RELEASE += -O3 \
|
||||
-Wno-unused-function
|
||||
QMAKE_CXXFLAGS_DEBUG += -Wno-unused-function
|
||||
}
|
||||
|
||||
HEADERS += \
|
||||
utils/coordinates.h \
|
||||
utils/config.h \
|
||||
blockcache.h \
|
||||
binaryheap.h \
|
||||
interfaces/irouter.h \
|
||||
contractionhierarchiesclient.h \
|
||||
compressedgraph.h \
|
||||
interfaces/igpslookup.h \
|
||||
utils/bithelpers.h \
|
||||
utils/qthelpers.h
|
||||
|
||||
SOURCES += \
|
||||
contractionhierarchiesclient.cpp
|
||||
659
monav/contractionhierarchies/contractor.h
Normal file
659
monav/contractionhierarchies/contractor.h
Normal file
@@ -0,0 +1,659 @@
|
||||
/*
|
||||
Copyright 2010 Christian Vetter veaac.fdirct@gmail.com
|
||||
|
||||
This file is part of MoNav.
|
||||
|
||||
MoNav is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
MoNav is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with MoNav. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef CONTRACTOR_H_INCLUDED
|
||||
#define CONTRACTOR_H_INCLUDED
|
||||
#include <vector>
|
||||
#include <omp.h>
|
||||
#include <limits>
|
||||
#include "utils/qthelpers.h"
|
||||
#include "dynamicgraph.h"
|
||||
#include "binaryheap.h"
|
||||
#include "utils/config.h"
|
||||
|
||||
class Contractor {
|
||||
|
||||
public:
|
||||
|
||||
struct Witness {
|
||||
NodeID source;
|
||||
NodeID target;
|
||||
NodeID middle;
|
||||
};
|
||||
|
||||
private:
|
||||
|
||||
struct _EdgeData {
|
||||
unsigned distance;
|
||||
unsigned originalEdges : 29;
|
||||
bool shortcut : 1;
|
||||
bool forward : 1;
|
||||
bool backward : 1;
|
||||
union {
|
||||
NodeID middle; // shortcut
|
||||
unsigned id; // original edge
|
||||
};
|
||||
} data;
|
||||
|
||||
struct _HeapData {
|
||||
};
|
||||
|
||||
typedef DynamicGraph< _EdgeData > _DynamicGraph;
|
||||
typedef BinaryHeap< NodeID, NodeID, unsigned, _HeapData > _Heap;
|
||||
typedef _DynamicGraph::InputEdge _ImportEdge;
|
||||
|
||||
struct _ThreadData {
|
||||
_Heap heap;
|
||||
std::vector< _ImportEdge > insertedEdges;
|
||||
std::vector< Witness > witnessList;
|
||||
std::vector< NodeID > neighbours;
|
||||
_ThreadData( NodeID nodes ): heap( nodes ) {
|
||||
}
|
||||
};
|
||||
|
||||
struct _PriorityData {
|
||||
int depth;
|
||||
NodeID bias;
|
||||
_PriorityData() {
|
||||
depth = 0;
|
||||
}
|
||||
};
|
||||
|
||||
struct _ContractionInformation {
|
||||
int edgesDeleted;
|
||||
int edgesAdded;
|
||||
int originalEdgesDeleted;
|
||||
int originalEdgesAdded;
|
||||
_ContractionInformation() {
|
||||
edgesAdded = edgesDeleted = originalEdgesAdded = originalEdgesDeleted = 0;
|
||||
}
|
||||
};
|
||||
|
||||
struct _NodePartitionor {
|
||||
bool operator()( std::pair< NodeID, bool > nodeData ) {
|
||||
return !nodeData.second;
|
||||
}
|
||||
};
|
||||
|
||||
struct _LogItem {
|
||||
unsigned iteration;
|
||||
NodeID nodes;
|
||||
double contraction;
|
||||
double independent;
|
||||
double inserting;
|
||||
double removing;
|
||||
double updating;
|
||||
|
||||
_LogItem() {
|
||||
iteration = nodes = contraction = independent = inserting = removing = updating = 0;
|
||||
}
|
||||
|
||||
double GetTotalTime() const {
|
||||
return contraction + independent + inserting + removing + updating;
|
||||
}
|
||||
|
||||
void PrintStatistics() const {
|
||||
qDebug( "%d\t%d\t%lf\t%lf\t%lf\t%lf\t%lf", iteration, nodes, independent, contraction, inserting, removing, updating );
|
||||
}
|
||||
};
|
||||
|
||||
class _LogData {
|
||||
public:
|
||||
|
||||
std::vector < _LogItem > iterations;
|
||||
|
||||
unsigned GetNIterations() {
|
||||
return ( unsigned ) iterations.size();
|
||||
}
|
||||
|
||||
_LogItem GetSum() const {
|
||||
_LogItem sum;
|
||||
sum.iteration = ( unsigned ) iterations.size();
|
||||
|
||||
for ( int i = 0, e = ( int ) iterations.size(); i < e; ++i ) {
|
||||
sum.nodes += iterations[i].nodes;
|
||||
sum.contraction += iterations[i].contraction;
|
||||
sum.independent += iterations[i].independent;
|
||||
sum.inserting += iterations[i].inserting;
|
||||
sum.removing += iterations[i].removing;
|
||||
sum.updating += iterations[i].updating;
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
void PrintHeader() const {
|
||||
qDebug( "Iteration\tNodes\tIndependent\tContraction\tInserting\tRemoving\tUpdating" );
|
||||
}
|
||||
|
||||
void PrintSummary() const {
|
||||
PrintHeader();
|
||||
GetSum().PrintStatistics();
|
||||
}
|
||||
|
||||
void Print() const {
|
||||
PrintHeader();
|
||||
for ( int i = 0, e = ( int ) iterations.size(); i < e; ++i )
|
||||
iterations[i].PrintStatistics();
|
||||
}
|
||||
|
||||
void Insert( const _LogItem& data ) {
|
||||
iterations.push_back( data );
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
template< class InputEdge >
|
||||
Contractor( int nodes, const std::vector< InputEdge >& inputEdges ) {
|
||||
std::vector< _ImportEdge > edges;
|
||||
edges.reserve( 2 * inputEdges.size() );
|
||||
int skippedLargeEdges = 0;
|
||||
for ( typename std::vector< InputEdge >::const_iterator i = inputEdges.begin(), e = inputEdges.end(); i != e; ++i ) {
|
||||
_ImportEdge edge;
|
||||
edge.source = i->source;
|
||||
edge.target = i->target;
|
||||
edge.data.distance = std::max( i->distance * 10.0 + 0.5, 1.0 );
|
||||
if ( edge.data.distance > 24 * 60 * 60 * 10 ) {
|
||||
skippedLargeEdges++;
|
||||
continue;
|
||||
}
|
||||
edge.data.shortcut = false;
|
||||
edge.data.id = i - inputEdges.begin();
|
||||
edge.data.forward = true;
|
||||
edge.data.backward = i->bidirectional;
|
||||
edge.data.originalEdges = 1;
|
||||
|
||||
if ( edge.data.distance < 1 ) {
|
||||
qDebug() << edge.source << edge.target << edge.data.forward << edge.data.backward << edge.data.distance << edge.data.id << i->distance;
|
||||
}
|
||||
|
||||
if ( edge.source == edge.target ) {
|
||||
_loops.push_back( edge );
|
||||
continue;
|
||||
}
|
||||
|
||||
edges.push_back( edge );
|
||||
std::swap( edge.source, edge.target );
|
||||
edge.data.forward = i->bidirectional;
|
||||
edge.data.backward = true;
|
||||
edges.push_back( edge );
|
||||
}
|
||||
if ( skippedLargeEdges != 0 )
|
||||
qDebug( "Skipped %d edges with too large edge weight", skippedLargeEdges );
|
||||
std::sort( edges.begin(), edges.end() );
|
||||
|
||||
_graph = new _DynamicGraph( nodes, edges );
|
||||
|
||||
std::vector< _ImportEdge >().swap( edges );
|
||||
}
|
||||
|
||||
~Contractor() {
|
||||
delete _graph;
|
||||
}
|
||||
|
||||
void Run() {
|
||||
const NodeID numberOfNodes = _graph->GetNumberOfNodes();
|
||||
_LogData log;
|
||||
|
||||
int maxThreads = omp_get_max_threads();
|
||||
std::vector < _ThreadData* > threadData;
|
||||
for ( int threadNum = 0; threadNum < maxThreads; ++threadNum ) {
|
||||
threadData.push_back( new _ThreadData( numberOfNodes ) );
|
||||
}
|
||||
qDebug( "%d nodes, %d edges", numberOfNodes, _graph->GetNumberOfEdges() );
|
||||
qDebug( "using %d threads", maxThreads );
|
||||
|
||||
NodeID levelID = 0;
|
||||
NodeID iteration = 0;
|
||||
std::vector< std::pair< NodeID, bool > > remainingNodes( numberOfNodes );
|
||||
std::vector< double > nodePriority( numberOfNodes );
|
||||
std::vector< _PriorityData > nodeData( numberOfNodes );
|
||||
|
||||
//initialize the variables
|
||||
#pragma omp parallel for schedule ( guided )
|
||||
for ( int x = 0; x < ( int ) numberOfNodes; ++x )
|
||||
remainingNodes[x].first = x;
|
||||
std::random_shuffle( remainingNodes.begin(), remainingNodes.end() );
|
||||
for ( int x = 0; x < ( int ) numberOfNodes; ++x )
|
||||
nodeData[remainingNodes[x].first].bias = x;
|
||||
|
||||
qDebug( "Initialise Elimination PQ... " );
|
||||
_LogItem statistics0;
|
||||
statistics0.updating = _Timestamp();
|
||||
statistics0.iteration = 0;
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided )
|
||||
for ( int x = 0; x < ( int ) numberOfNodes; ++x ) {
|
||||
nodePriority[x] = _Evaluate( data, &nodeData[x], x );
|
||||
}
|
||||
}
|
||||
qDebug( "done" );
|
||||
|
||||
statistics0.updating = _Timestamp() - statistics0.updating;
|
||||
log.Insert( statistics0 );
|
||||
|
||||
log.PrintHeader();
|
||||
statistics0.PrintStatistics();
|
||||
|
||||
while ( levelID < numberOfNodes ) {
|
||||
_LogItem statistics;
|
||||
statistics.iteration = iteration++;
|
||||
const int last = ( int ) remainingNodes.size();
|
||||
|
||||
//determine independent node set
|
||||
double timeLast = _Timestamp();
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* const data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided )
|
||||
for ( int i = 0; i < last; ++i ) {
|
||||
const NodeID node = remainingNodes[i].first;
|
||||
remainingNodes[i].second = _IsIndependent( nodePriority, nodeData, data, node );
|
||||
}
|
||||
}
|
||||
_NodePartitionor functor;
|
||||
const std::vector < std::pair < NodeID, bool > >::const_iterator first = stable_partition( remainingNodes.begin(), remainingNodes.end(), functor );
|
||||
const int firstIndependent = first - remainingNodes.begin();
|
||||
statistics.nodes = last - firstIndependent;
|
||||
statistics.independent += _Timestamp() - timeLast;
|
||||
timeLast = _Timestamp();
|
||||
|
||||
//contract independent nodes
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* const data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].first;
|
||||
_Contract< false > ( data, x );
|
||||
nodePriority[x] = -1;
|
||||
}
|
||||
std::sort( data->insertedEdges.begin(), data->insertedEdges.end() );
|
||||
}
|
||||
statistics.contraction += _Timestamp() - timeLast;
|
||||
timeLast = _Timestamp();
|
||||
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* const data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].first;
|
||||
_DeleteIncommingEdges( data, x );
|
||||
}
|
||||
}
|
||||
statistics.removing += _Timestamp() - timeLast;
|
||||
timeLast = _Timestamp();
|
||||
|
||||
//insert new edges
|
||||
for ( int threadNum = 0; threadNum < maxThreads; ++threadNum ) {
|
||||
_ThreadData& data = *threadData[threadNum];
|
||||
for ( int i = 0; i < ( int ) data.insertedEdges.size(); ++i ) {
|
||||
const _ImportEdge& edge = data.insertedEdges[i];
|
||||
_graph->InsertEdge( edge.source, edge.target, edge.data );
|
||||
}
|
||||
std::vector< _ImportEdge >().swap( data.insertedEdges );
|
||||
}
|
||||
statistics.inserting += _Timestamp() - timeLast;
|
||||
timeLast = _Timestamp();
|
||||
|
||||
//update priorities
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* const data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].first;
|
||||
_UpdateNeighbours( &nodePriority, &nodeData, data, x );
|
||||
}
|
||||
}
|
||||
statistics.updating += _Timestamp() - timeLast;
|
||||
timeLast = _Timestamp();
|
||||
|
||||
//output some statistics
|
||||
statistics.PrintStatistics();
|
||||
//qDebug( wxT( "Printed" ) );
|
||||
|
||||
//remove contracted nodes from the pool
|
||||
levelID += last - firstIndependent;
|
||||
remainingNodes.resize( firstIndependent );
|
||||
std::vector< std::pair< NodeID, bool > >( remainingNodes ).swap( remainingNodes );
|
||||
log.Insert( statistics );
|
||||
}
|
||||
|
||||
for ( int threadNum = 0; threadNum < maxThreads; threadNum++ ) {
|
||||
_witnessList.insert( _witnessList.end(), threadData[threadNum]->witnessList.begin(), threadData[threadNum]->witnessList.end() );
|
||||
delete threadData[threadNum];
|
||||
}
|
||||
|
||||
log.PrintSummary();
|
||||
qDebug( "Total Time: %lf s", log.GetSum().GetTotalTime() );
|
||||
|
||||
}
|
||||
|
||||
template< class Edge >
|
||||
void GetEdges( std::vector< Edge >* edges ) {
|
||||
NodeID numberOfNodes = _graph->GetNumberOfNodes();
|
||||
for ( NodeID node = 0; node < numberOfNodes; ++node ) {
|
||||
for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge != endEdges; ++edge ) {
|
||||
const NodeID target = _graph->GetTarget( edge );
|
||||
const _EdgeData& data = _graph->GetEdgeData( edge );
|
||||
Edge newEdge;
|
||||
newEdge.source = node;
|
||||
newEdge.target = target;
|
||||
newEdge.data.distance = data.distance;
|
||||
newEdge.data.shortcut = data.shortcut;
|
||||
if ( data.shortcut )
|
||||
newEdge.data.middle = data.middle;
|
||||
else
|
||||
newEdge.data.id = data.id;
|
||||
newEdge.data.forward = data.forward;
|
||||
newEdge.data.backward = data.backward;
|
||||
edges->push_back( newEdge );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template< class Edge >
|
||||
void GetLoops( std::vector< Edge >* edges ) {
|
||||
for ( unsigned i = 0; i < _loops.size(); i++ ) {
|
||||
Edge newEdge;
|
||||
newEdge.source = _loops[i].source;
|
||||
newEdge.target = _loops[i].target;
|
||||
newEdge.data.distance = _loops[i].data.distance;
|
||||
newEdge.data.shortcut = _loops[i].data.shortcut;
|
||||
newEdge.data.id = _loops[i].data.id;
|
||||
newEdge.data.forward = _loops[i].data.forward;
|
||||
newEdge.data.backward = _loops[i].data.backward;
|
||||
edges->push_back( newEdge );
|
||||
}
|
||||
}
|
||||
|
||||
void GetWitnessList( std::vector< Witness >& list ) {
|
||||
list = _witnessList;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
double _Timestamp() {
|
||||
static Timer timer;
|
||||
return ( double ) timer.elapsed() / 1000;
|
||||
}
|
||||
|
||||
bool _ConstructCH( _DynamicGraph* _graph );
|
||||
|
||||
void _Dijkstra( const unsigned maxDistance, const int maxNodes, _ThreadData* const data ){
|
||||
|
||||
_Heap& heap = data->heap;
|
||||
|
||||
int nodes = 0;
|
||||
while ( heap.Size() > 0 ) {
|
||||
const NodeID node = heap.DeleteMin();
|
||||
const unsigned distance = heap.GetKey( node );
|
||||
if ( nodes++ > maxNodes )
|
||||
return;
|
||||
//Destination settled?
|
||||
if ( distance > maxDistance )
|
||||
return;
|
||||
|
||||
//iterate over all edges of node
|
||||
for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge != endEdges; ++edge ) {
|
||||
const _EdgeData& data = _graph->GetEdgeData( edge );
|
||||
if ( !data.forward )
|
||||
continue;
|
||||
const NodeID to = _graph->GetTarget( edge );
|
||||
const unsigned toDistance = distance + data.distance;
|
||||
|
||||
//New Node discovered -> Add to Heap + Node Info Storage
|
||||
if ( !heap.WasInserted( to ) )
|
||||
heap.Insert( to, toDistance, _HeapData() );
|
||||
|
||||
//Found a shorter Path -> Update distance
|
||||
else if ( toDistance < heap.GetKey( to ) ) {
|
||||
heap.DecreaseKey( to, toDistance );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
double _Evaluate( _ThreadData* const data, _PriorityData* const nodeData, NodeID node ){
|
||||
_ContractionInformation stats;
|
||||
|
||||
//perform simulated contraction
|
||||
_Contract< true > ( data, node, &stats );
|
||||
|
||||
// Result will contain the priority
|
||||
double result;
|
||||
if ( stats.edgesDeleted == 0 || stats.originalEdgesDeleted == 0 )
|
||||
result = 1 * nodeData->depth;
|
||||
else
|
||||
result = 2 * ((( double ) stats.edgesAdded ) / stats.edgesDeleted ) + 1 * ((( double ) stats.originalEdgesAdded ) / stats.originalEdgesDeleted ) + 1 * nodeData->depth;
|
||||
assert( result >= 0 );
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
template< bool Simulate > bool _Contract( _ThreadData* const data, NodeID node, _ContractionInformation* const stats = NULL ) {
|
||||
_Heap& heap = data->heap;
|
||||
//std::vector< Witness >& witnessList = data->witnessList;
|
||||
int insertedEdgesSize = data->insertedEdges.size();
|
||||
std::vector< _ImportEdge >& insertedEdges = data->insertedEdges;
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator inEdge = _graph->BeginEdges( node ), endInEdges = _graph->EndEdges( node ); inEdge != endInEdges; ++inEdge ) {
|
||||
const _EdgeData& inData = _graph->GetEdgeData( inEdge );
|
||||
const NodeID source = _graph->GetTarget( inEdge );
|
||||
if ( Simulate ) {
|
||||
assert( stats != NULL );
|
||||
stats->edgesDeleted++;
|
||||
stats->originalEdgesDeleted += inData.originalEdges;
|
||||
}
|
||||
if ( !inData.backward )
|
||||
continue;
|
||||
|
||||
heap.Clear();
|
||||
heap.Insert( source, 0, _HeapData() );
|
||||
if ( node != source )
|
||||
heap.Insert( node, inData.distance, _HeapData() );
|
||||
unsigned maxDistance = 0;
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) {
|
||||
const _EdgeData& outData = _graph->GetEdgeData( outEdge );
|
||||
if ( !outData.forward )
|
||||
continue;
|
||||
const NodeID target = _graph->GetTarget( outEdge );
|
||||
const unsigned pathDistance = inData.distance + outData.distance;
|
||||
maxDistance = std::max( maxDistance, pathDistance );
|
||||
if ( !heap.WasInserted( target ) )
|
||||
heap.Insert( target, pathDistance, _HeapData() );
|
||||
else if ( pathDistance < heap.GetKey( target ) )
|
||||
heap.DecreaseKey( target, pathDistance );
|
||||
}
|
||||
|
||||
if ( Simulate )
|
||||
_Dijkstra( maxDistance, 500, data );
|
||||
else
|
||||
_Dijkstra( maxDistance, 1000, data );
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) {
|
||||
const _EdgeData& outData = _graph->GetEdgeData( outEdge );
|
||||
if ( !outData.forward )
|
||||
continue;
|
||||
const NodeID target = _graph->GetTarget( outEdge );
|
||||
const int pathDistance = inData.distance + outData.distance;
|
||||
const int distance = heap.GetKey( target );
|
||||
|
||||
if ( pathDistance <= distance ) {
|
||||
if ( Simulate ) {
|
||||
assert( stats != NULL );
|
||||
stats->edgesAdded += 2;
|
||||
stats->originalEdgesAdded += 2 * ( outData.originalEdges + inData.originalEdges );
|
||||
} else {
|
||||
_ImportEdge newEdge;
|
||||
newEdge.source = source;
|
||||
newEdge.target = target;
|
||||
newEdge.data.distance = pathDistance;
|
||||
newEdge.data.forward = true;
|
||||
newEdge.data.backward = false;
|
||||
newEdge.data.middle = node;
|
||||
newEdge.data.shortcut = true;
|
||||
newEdge.data.originalEdges = outData.originalEdges + inData.originalEdges;
|
||||
insertedEdges.push_back( newEdge );
|
||||
std::swap( newEdge.source, newEdge.target );
|
||||
newEdge.data.forward = false;
|
||||
newEdge.data.backward = true;
|
||||
insertedEdges.push_back( newEdge );
|
||||
}
|
||||
}
|
||||
/*else if ( !Simulate ) {
|
||||
Witness witness;
|
||||
witness.source = source;
|
||||
witness.target = target;
|
||||
witness.middle = node;
|
||||
witnessList.push_back( witness );
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
if ( !Simulate ) {
|
||||
for ( int i = insertedEdgesSize, iend = insertedEdges.size(); i < iend; i++ ) {
|
||||
bool found = false;
|
||||
for ( int other = i + 1 ; other < iend ; ++other ) {
|
||||
if ( insertedEdges[other].source != insertedEdges[i].source )
|
||||
continue;
|
||||
if ( insertedEdges[other].target != insertedEdges[i].target )
|
||||
continue;
|
||||
if ( insertedEdges[other].data.distance != insertedEdges[i].data.distance )
|
||||
continue;
|
||||
if ( insertedEdges[other].data.shortcut != insertedEdges[i].data.shortcut )
|
||||
continue;
|
||||
insertedEdges[other].data.forward |= insertedEdges[i].data.forward;
|
||||
insertedEdges[other].data.backward |= insertedEdges[i].data.backward;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
if ( !found )
|
||||
insertedEdges[insertedEdgesSize++] = insertedEdges[i];
|
||||
}
|
||||
insertedEdges.resize( insertedEdgesSize );
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool _DeleteIncommingEdges( _ThreadData* const data, NodeID node ) {
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
//find all neighbours
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ) ; e < _graph->EndEdges( node ) ; ++e ) {
|
||||
const NodeID u = _graph->GetTarget( e );
|
||||
if ( u == node )
|
||||
continue;
|
||||
neighbours.push_back( u );
|
||||
}
|
||||
//eliminate duplicate entries ( forward + backward edges )
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
for ( int i = 0, e = ( int ) neighbours.size(); i < e; ++i ) {
|
||||
const NodeID u = neighbours[i];
|
||||
_graph->DeleteEdgesTo( u, node );
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool _UpdateNeighbours( std::vector< double >* priorities, std::vector< _PriorityData >* const nodeData, _ThreadData* const data, NodeID node ) {
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
//find all neighbours
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ) ; e < _graph->EndEdges( node ) ; ++e ) {
|
||||
const NodeID u = _graph->GetTarget( e );
|
||||
if ( u == node )
|
||||
continue;
|
||||
neighbours.push_back( u );
|
||||
( *nodeData )[u].depth = std::max(( *nodeData )[node].depth + 1, ( *nodeData )[u].depth );
|
||||
}
|
||||
//eliminate duplicate entries ( forward + backward edges )
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
for ( int i = 0, e = ( int ) neighbours.size(); i < e; ++i ) {
|
||||
const NodeID u = neighbours[i];
|
||||
( *priorities )[u] = _Evaluate( data, &( *nodeData )[u], u );
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool _IsIndependent( const std::vector< double >& priorities, const std::vector< _PriorityData >& nodeData, _ThreadData* const data, NodeID node ) {
|
||||
const double priority = priorities[node];
|
||||
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ) ; e < _graph->EndEdges( node ) ; ++e ) {
|
||||
const NodeID target = _graph->GetTarget( e );
|
||||
const double targetPriority = priorities[target];
|
||||
assert( targetPriority >= 0 );
|
||||
//found a neighbour with lower priority?
|
||||
if ( priority > targetPriority )
|
||||
return false;
|
||||
//tie breaking
|
||||
if ( priority == targetPriority && nodeData[node].bias < nodeData[target].bias )
|
||||
return false;
|
||||
neighbours.push_back( target );
|
||||
}
|
||||
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
//examine all neighbours that are at most 2 hops away
|
||||
for ( std::vector< NodeID >::const_iterator i = neighbours.begin(), lastNode = neighbours.end(); i != lastNode; ++i ) {
|
||||
const NodeID u = *i;
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( u ) ; e < _graph->EndEdges( u ) ; ++e ) {
|
||||
const NodeID target = _graph->GetTarget( e );
|
||||
|
||||
const double targetPriority = priorities[target];
|
||||
assert( targetPriority >= 0 );
|
||||
//found a neighbour with lower priority?
|
||||
if ( priority > targetPriority )
|
||||
return false;
|
||||
//tie breaking
|
||||
if ( priority == targetPriority && nodeData[node].bias < nodeData[target].bias )
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
_DynamicGraph* _graph;
|
||||
std::vector< Witness > _witnessList;
|
||||
std::vector< _ImportEdge > _loops;
|
||||
};
|
||||
|
||||
#endif // CONTRACTOR_H_INCLUDED
|
||||
1
monav/contractionhierarchies/interfaces
Symbolic link
1
monav/contractionhierarchies/interfaces
Symbolic link
@@ -0,0 +1 @@
|
||||
../interfaces/
|
||||
1
monav/contractionhierarchies/utils
Symbolic link
1
monav/contractionhierarchies/utils
Symbolic link
@@ -0,0 +1 @@
|
||||
../utils/
|
||||
Reference in New Issue
Block a user