statistical encoding
This commit is contained in:
parent
f8dee5b7d1
commit
ccf6641bad
41 changed files with 4543 additions and 1965 deletions
26
brouter-codec/pom.xml
Normal file
26
brouter-codec/pom.xml
Normal file
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter</artifactId>
|
||||
<version>1.2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>brouter-codec</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
31
brouter-codec/src/main/java/btools/codec/DataBuffers.java
Normal file
31
brouter-codec/src/main/java/btools/codec/DataBuffers.java
Normal file
|
@ -0,0 +1,31 @@
|
|||
package btools.codec;
|
||||
|
||||
/**
|
||||
* Container for some re-usable databuffers for the decoder
|
||||
*/
|
||||
public final class DataBuffers
|
||||
{
|
||||
public byte[] iobuffer;
|
||||
public byte[] tagbuf1 = new byte[256];
|
||||
public byte[] bbuf1 = new byte[65636];
|
||||
public int[] ibuf1 = new int[4096];
|
||||
public int[] ibuf2 = new int[2048];
|
||||
public int[] ibuf3 = new int[2048];
|
||||
public int[] alon = new int[2048];
|
||||
public int[] alat = new int[2048];
|
||||
|
||||
public DataBuffers()
|
||||
{
|
||||
this( new byte[65636] );
|
||||
}
|
||||
|
||||
/**
|
||||
* construct a set of databuffers except
|
||||
* for 'iobuffer', where the given array is used
|
||||
*/
|
||||
public DataBuffers( byte[] iobuffer )
|
||||
{
|
||||
this.iobuffer = iobuffer;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package btools.codec;
|
||||
|
||||
/**
|
||||
* Special integer fifo suitable for 3-pass encoding
|
||||
*/
|
||||
public class IntegerFifo3Pass
|
||||
{
|
||||
private int[] a;
|
||||
private int size;
|
||||
private int pos;
|
||||
|
||||
private int pass;
|
||||
|
||||
public IntegerFifo3Pass( int capacity )
|
||||
{
|
||||
a = capacity < 4 ? new int[4] : new int[capacity];
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new encoding pass and resets the reading pointer
|
||||
* from the stats collected in pass2 and writes that to the given context
|
||||
*/
|
||||
public void init()
|
||||
{
|
||||
pass++;
|
||||
pos = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* writes to the fifo in pass2
|
||||
*/
|
||||
public void add( int value )
|
||||
{
|
||||
if ( pass == 2 )
|
||||
{
|
||||
if ( size == a.length )
|
||||
{
|
||||
int[] aa = new int[2 * size];
|
||||
System.arraycopy( a, 0, aa, 0, size );
|
||||
a = aa;
|
||||
}
|
||||
a[size++] = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* reads from the fifo in pass3 (in pass1/2 returns just 1)
|
||||
*/
|
||||
public int getNext()
|
||||
{
|
||||
return pass == 3 ? get( pos++ ) : 1;
|
||||
}
|
||||
|
||||
private int get( int idx )
|
||||
{
|
||||
if ( idx >= size )
|
||||
{
|
||||
throw new IndexOutOfBoundsException( "list size=" + size + " idx=" + idx );
|
||||
}
|
||||
return a[idx];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
package btools.codec;
|
||||
|
||||
/**
|
||||
* Simple container for a list of lists of integers
|
||||
*/
|
||||
public class LinkedListContainer
|
||||
{
|
||||
private int[] ia; // prev, data, prev, data, ...
|
||||
private int size;
|
||||
private int[] startpointer; // 0=void, odd=head-data-cell
|
||||
private int listpointer;
|
||||
|
||||
/**
|
||||
* Construct a container for the given number of lists
|
||||
*
|
||||
* If no default-buffer is given, an int[nlists*4] is constructed,
|
||||
* able to hold 2 entries per list on average
|
||||
*
|
||||
* @param nlists the number of lists
|
||||
* @param defaultbuffer an optional data array for re-use (gets replaced if too small)
|
||||
*/
|
||||
public LinkedListContainer( int nlists, int[] defaultbuffer )
|
||||
{
|
||||
ia = defaultbuffer == null ? new int[nlists*4] : defaultbuffer;
|
||||
startpointer = new int[nlists];
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a data element to the given list
|
||||
*
|
||||
* @param listNr the list to add the data to
|
||||
* @param data the data value
|
||||
*/
|
||||
public void addDataElement( int listNr, int data )
|
||||
{
|
||||
if ( size + 2 > ia.length )
|
||||
{
|
||||
resize();
|
||||
}
|
||||
ia[size++] = startpointer[ listNr ];
|
||||
startpointer[ listNr ] = size;
|
||||
ia[size++] = data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a list for reading
|
||||
*
|
||||
* @param listNr the list to initialize
|
||||
* @return the number of entries in that list
|
||||
*/
|
||||
public int initList( int listNr )
|
||||
{
|
||||
int cnt = 0;
|
||||
int lp = listpointer = startpointer[ listNr ];
|
||||
while( lp != 0 )
|
||||
{
|
||||
lp = ia[ lp-1 ];
|
||||
cnt++;
|
||||
}
|
||||
return cnt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a data element from the list previously initialized.
|
||||
* Data elements are return in reverse order (lifo)
|
||||
*
|
||||
* @return the data element
|
||||
* @throws IllegalArgumentException if no more element
|
||||
*/
|
||||
public int getDataElement()
|
||||
{
|
||||
if ( listpointer == 0 )
|
||||
{
|
||||
throw new IllegalArgumentException( "no more element!" );
|
||||
}
|
||||
int data = ia[ listpointer ];
|
||||
listpointer = ia[ listpointer-1 ];
|
||||
return data;
|
||||
}
|
||||
|
||||
private void resize()
|
||||
{
|
||||
int[] ia2 = new int[2*ia.length];
|
||||
System.arraycopy( ia, 0, ia2, 0, ia.length );
|
||||
ia = ia2;
|
||||
}
|
||||
}
|
306
brouter-codec/src/main/java/btools/codec/MicroCache.java
Normal file
306
brouter-codec/src/main/java/btools/codec/MicroCache.java
Normal file
|
@ -0,0 +1,306 @@
|
|||
package btools.codec;
|
||||
|
||||
import btools.util.ByteDataWriter;
|
||||
|
||||
/**
|
||||
* a micro-cache is a data cache for an area of some square kilometers or some
|
||||
* hundreds or thousands nodes
|
||||
*
|
||||
* This is the basic io-unit: always a full microcache is loaded from the
|
||||
* data-file if a node is requested at a position not yet covered by the caches
|
||||
* already loaded
|
||||
*
|
||||
* The nodes are represented in a compact way (typical 20-50 bytes per node),
|
||||
* but in a way that they do not depend on each other, and garbage collection is
|
||||
* supported to remove the nodes already consumed from the cache.
|
||||
*
|
||||
* The cache-internal data representation is different from that in the
|
||||
* data-files, where a cache is encoded as a whole, allowing more
|
||||
* redundancy-removal for a more compact encoding
|
||||
*/
|
||||
public class MicroCache extends ByteDataWriter
|
||||
{
|
||||
protected int[] faid;
|
||||
protected int[] fapos;
|
||||
protected int size = 0;
|
||||
|
||||
private int delcount = 0;
|
||||
private int delbytes = 0;
|
||||
private int p2size; // next power of 2 of size
|
||||
|
||||
// cache control: a virgin cache can be
|
||||
// put to ghost state for later recovery
|
||||
public boolean virgin = true;
|
||||
public boolean ghost = false;
|
||||
|
||||
public static boolean debug = false;
|
||||
|
||||
protected MicroCache( byte[] ab )
|
||||
{
|
||||
super( ab );
|
||||
}
|
||||
|
||||
public static MicroCache emptyCache()
|
||||
{
|
||||
return new MicroCache( null ); // TODO: singleton?
|
||||
}
|
||||
|
||||
protected void init( int size )
|
||||
{
|
||||
this.size = size;
|
||||
delcount = 0;
|
||||
delbytes = 0;
|
||||
p2size = 0x40000000;
|
||||
while (p2size > size)
|
||||
p2size >>= 1;
|
||||
}
|
||||
|
||||
public void finishNode( long id )
|
||||
{
|
||||
fapos[size] = aboffset;
|
||||
faid[size] = shrinkId( id );
|
||||
size++;
|
||||
}
|
||||
|
||||
public void discardNode()
|
||||
{
|
||||
aboffset = startPos( size );
|
||||
}
|
||||
|
||||
public int getSize()
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
public int getDataSize()
|
||||
{
|
||||
return ab == null ? 0 : ab.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the internal reader (aboffset, aboffsetEnd) to the body data for the given id
|
||||
*
|
||||
* If a node is not found in an empty cache, this is usually an edge-effect
|
||||
* (data-file does not exist or neighboured data-files of differnt age),
|
||||
* but is can as well be a symptom of a node-identity breaking bug.
|
||||
*
|
||||
* Current implementation always returns false for not-found, however, for
|
||||
* regression testing, at least for the case that is most likely a bug
|
||||
* (node found but marked as deleted = ready for garbage collection
|
||||
* = already consumed) the RunException should be re-enabled
|
||||
*
|
||||
* @return true if id was found
|
||||
*/
|
||||
public boolean getAndClear( long id64 )
|
||||
{
|
||||
if ( size == 0 )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
int id = shrinkId( id64 );
|
||||
int[] a = faid;
|
||||
int offset = p2size;
|
||||
int n = 0;
|
||||
|
||||
while (offset > 0)
|
||||
{
|
||||
int nn = n + offset;
|
||||
if ( nn < size && a[nn] <= id )
|
||||
{
|
||||
n = nn;
|
||||
}
|
||||
offset >>= 1;
|
||||
}
|
||||
if ( a[n] == id )
|
||||
{
|
||||
if ( ( fapos[n] & 0x80000000 ) == 0 )
|
||||
{
|
||||
aboffset = startPos( n );
|
||||
aboffsetEnd = fapos[n];
|
||||
fapos[n] |= 0x80000000; // mark deleted
|
||||
delbytes += aboffsetEnd - aboffset;
|
||||
delcount++;
|
||||
return true;
|
||||
}
|
||||
else // .. marked as deleted
|
||||
{
|
||||
// throw new RuntimeException( "MicroCache: node already consumed: id=" + id );
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected int startPos( int n )
|
||||
{
|
||||
return n > 0 ? fapos[n - 1] & 0x7fffffff : 0;
|
||||
}
|
||||
|
||||
public void collect( int threshold )
|
||||
{
|
||||
if ( delcount > threshold )
|
||||
{
|
||||
virgin = false;
|
||||
|
||||
int nsize = size - delcount;
|
||||
if ( nsize == 0 )
|
||||
{
|
||||
faid = null;
|
||||
fapos = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
int[] nfaid = new int[nsize];
|
||||
int[] nfapos = new int[nsize];
|
||||
int idx = 0;
|
||||
|
||||
byte[] nab = new byte[ab.length - delbytes];
|
||||
int nab_off = 0;
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
int pos = fapos[i];
|
||||
if ( ( pos & 0x80000000 ) == 0 )
|
||||
{
|
||||
int start = startPos( i );
|
||||
int end = fapos[i];
|
||||
int len = end - start;
|
||||
System.arraycopy( ab, start, nab, nab_off, len );
|
||||
nfaid[idx] = faid[i];
|
||||
nab_off += len;
|
||||
nfapos[idx] = nab_off;
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
faid = nfaid;
|
||||
fapos = nfapos;
|
||||
ab = nab;
|
||||
}
|
||||
init( nsize );
|
||||
}
|
||||
}
|
||||
|
||||
public void unGhost()
|
||||
{
|
||||
ghost = false;
|
||||
delcount = 0;
|
||||
delbytes = 0;
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
fapos[i] &= 0x7fffffff; // clear deleted flags
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the 64-bit global id for the given cache-position
|
||||
*/
|
||||
public long getIdForIndex( int i )
|
||||
{
|
||||
int id32 = faid[i];
|
||||
return expandId( id32 );
|
||||
}
|
||||
|
||||
/**
|
||||
* expand a 32-bit micro-cache-internal id into a 64-bit (lon|lat) global-id
|
||||
*
|
||||
* @see #shrinkId
|
||||
*/
|
||||
public long expandId( int id32 )
|
||||
{
|
||||
throw new IllegalArgumentException( "expandId for empty cache" );
|
||||
}
|
||||
|
||||
/**
|
||||
* shrink a 64-bit (lon|lat) global-id into a a 32-bit micro-cache-internal id
|
||||
*
|
||||
* @see #expandId
|
||||
*/
|
||||
public int shrinkId( long id64 )
|
||||
{
|
||||
throw new IllegalArgumentException( "shrinkId for empty cache" );
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the given lon/lat position is internal for that micro-cache
|
||||
*/
|
||||
public boolean isInternal( int ilon, int ilat )
|
||||
{
|
||||
throw new IllegalArgumentException( "isInternal for empty cache" );
|
||||
}
|
||||
|
||||
/**
|
||||
* (stasticially) encode the micro-cache into the format used in the datafiles
|
||||
*
|
||||
* @param buffer
|
||||
* byte array to encode into (considered big enough)
|
||||
* @return the size of the encoded data
|
||||
*/
|
||||
public int encodeMicroCache( byte[] buffer )
|
||||
{
|
||||
throw new IllegalArgumentException( "encodeMicroCache for empty cache" );
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare the content of this microcache to another
|
||||
*
|
||||
* @return null if equals, else a diff-report
|
||||
*/
|
||||
public String compareWith( MicroCache mc )
|
||||
{
|
||||
String msg = _compareWith( mc );
|
||||
if ( msg != null )
|
||||
{
|
||||
StringBuilder sb = new StringBuilder( msg );
|
||||
sb.append( "\nencode cache:\n" ).append( summary() );
|
||||
sb.append( "\ndecode cache:\n" ).append( mc.summary() );
|
||||
return sb.toString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private String summary()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder( "size=" + size + " aboffset=" + aboffset );
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
sb.append( "\nidx=" + i + " faid=" + faid[i] + " fapos=" + fapos[i] );
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private String _compareWith( MicroCache mc )
|
||||
{
|
||||
if ( size != mc.size )
|
||||
{
|
||||
return "size missmatch: " + size + "->" + mc.size;
|
||||
}
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
if ( faid[i] != mc.faid[i] )
|
||||
{
|
||||
return "faid missmatch at index " + i + ":" + faid[i] + "->" + mc.faid[i];
|
||||
}
|
||||
int start = i > 0 ? fapos[i - 1] : 0;
|
||||
int end = fapos[i] < mc.fapos[i] ? fapos[i] : mc.fapos[i];
|
||||
int len = end - start;
|
||||
for ( int offset = 0; offset < len; offset++ )
|
||||
{
|
||||
if ( mc.ab.length <= start + offset )
|
||||
{
|
||||
return "data buffer too small";
|
||||
}
|
||||
if ( ab[start + offset] != mc.ab[start + offset] )
|
||||
{
|
||||
return "data missmatch at index " + i + " offset=" + offset;
|
||||
}
|
||||
}
|
||||
if ( fapos[i] != mc.fapos[i] )
|
||||
{
|
||||
return "fapos missmatch at index " + i + ":" + fapos[i] + "->" + mc.fapos[i];
|
||||
}
|
||||
}
|
||||
if ( aboffset != mc.aboffset )
|
||||
{
|
||||
return "datasize missmatch: " + aboffset + "->" + mc.aboffset;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
99
brouter-codec/src/main/java/btools/codec/MicroCache1.java
Normal file
99
brouter-codec/src/main/java/btools/codec/MicroCache1.java
Normal file
|
@ -0,0 +1,99 @@
|
|||
package btools.codec;
|
||||
|
||||
import btools.util.ByteDataWriter;
|
||||
|
||||
/**
|
||||
* MicroCache1 is the old data format as of brouter 1.1 that does not allow to
|
||||
* filter out unaccessable nodes at the beginning of the cache pipeline
|
||||
*
|
||||
* Kept for backward compatibility
|
||||
*/
|
||||
public final class MicroCache1 extends MicroCache
|
||||
{
|
||||
private int lonIdxBase;
|
||||
private int latIdxBase;
|
||||
|
||||
public MicroCache1( int size, byte[] databuffer, int lonIdx80, int latIdx80 ) throws Exception
|
||||
{
|
||||
super( databuffer ); // sets ab=databuffer, aboffset=0
|
||||
faid = new int[size];
|
||||
fapos = new int[size];
|
||||
this.size = 0;
|
||||
lonIdxBase = ( lonIdx80 / 5 ) * 62500 + 31250;
|
||||
latIdxBase = ( latIdx80 / 5 ) * 62500 + 31250;
|
||||
}
|
||||
|
||||
public MicroCache1( byte[] databuffer, int lonIdx80, int latIdx80 ) throws Exception
|
||||
{
|
||||
super( databuffer ); // sets ab=databuffer, aboffset=0
|
||||
lonIdxBase = ( lonIdx80 / 5 ) * 62500 + 31250;
|
||||
latIdxBase = ( latIdx80 / 5 ) * 62500 + 31250;
|
||||
|
||||
size = readInt();
|
||||
|
||||
// get net size
|
||||
int nbytes = 0;
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
aboffset += 4;
|
||||
int bodySize = readVarLengthUnsigned();
|
||||
aboffset += bodySize;
|
||||
nbytes += bodySize;
|
||||
}
|
||||
|
||||
// new array with only net data
|
||||
byte[] nab = new byte[nbytes];
|
||||
aboffset = 4;
|
||||
int noffset = 0;
|
||||
faid = new int[size];
|
||||
fapos = new int[size];
|
||||
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
faid[i] = readInt() ^ 0x8000; // flip lat-sign for correct ordering
|
||||
|
||||
int bodySize = readVarLengthUnsigned();
|
||||
System.arraycopy( ab, aboffset, nab, noffset, bodySize );
|
||||
aboffset += bodySize;
|
||||
noffset += bodySize;
|
||||
fapos[i] = noffset;
|
||||
}
|
||||
|
||||
ab = nab;
|
||||
aboffset = noffset;
|
||||
init( size );
|
||||
}
|
||||
|
||||
@Override
|
||||
public long expandId( int id32 )
|
||||
{
|
||||
int lon32 = lonIdxBase + (short) ( id32 >> 16 );
|
||||
int lat32 = latIdxBase + (short) ( ( id32 & 0xffff ) ^ 0x8000 );
|
||||
return ( (long) lon32 ) << 32 | lat32;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int shrinkId( long id64 )
|
||||
{
|
||||
int lon32 = (int) ( id64 >> 32 );
|
||||
int lat32 = (int) ( id64 & 0xffffffff );
|
||||
return ( lon32 - lonIdxBase ) << 16 | ( ( ( lat32 - latIdxBase ) & 0xffff ) ^ 0x8000 );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int encodeMicroCache( byte[] buffer )
|
||||
{
|
||||
ByteDataWriter dos = new ByteDataWriter( buffer );
|
||||
dos.writeInt( size );
|
||||
for ( int n = 0; n < size; n++ )
|
||||
{
|
||||
dos.writeInt( faid[n] ^ 0x8000 );
|
||||
int start = n > 0 ? fapos[n - 1] : 0;
|
||||
int end = fapos[n];
|
||||
int len = end - start;
|
||||
dos.writeVarLengthUnsigned( len );
|
||||
dos.write( ab, start, len );
|
||||
}
|
||||
return dos.size();
|
||||
}
|
||||
}
|
444
brouter-codec/src/main/java/btools/codec/MicroCache2.java
Normal file
444
brouter-codec/src/main/java/btools/codec/MicroCache2.java
Normal file
|
@ -0,0 +1,444 @@
|
|||
package btools.codec;
|
||||
|
||||
import java.util.BitSet;
|
||||
import java.util.HashMap;
|
||||
|
||||
import btools.util.ByteArrayUnifier;
|
||||
import btools.util.ByteDataReader;
|
||||
|
||||
/**
|
||||
* MicroCache2 is the new format that uses statistical encoding and
|
||||
* is able to do access filtering and waypoint matching during encoding
|
||||
*/
|
||||
public final class MicroCache2 extends MicroCache
|
||||
{
|
||||
private int lonBase;
|
||||
private int latBase;
|
||||
private int cellsize;
|
||||
|
||||
public MicroCache2( int size, byte[] databuffer, int lonIdx, int latIdx, int divisor ) throws Exception
|
||||
{
|
||||
super( databuffer ); // sets ab=databuffer, aboffset=0
|
||||
|
||||
faid = new int[size];
|
||||
fapos = new int[size];
|
||||
this.size = 0;
|
||||
cellsize = 1000000 / divisor;
|
||||
lonBase = lonIdx*cellsize;
|
||||
latBase = latIdx*cellsize;
|
||||
}
|
||||
|
||||
public byte[] readUnified( int len, ByteArrayUnifier u )
|
||||
{
|
||||
byte[] b = u.unify( ab, aboffset, len );
|
||||
aboffset += len;
|
||||
return b;
|
||||
}
|
||||
|
||||
public MicroCache2( DataBuffers dataBuffers, int lonIdx, int latIdx, int divisor, TagValueValidator wayValidator, WaypointMatcher waypointMatcher ) throws Exception
|
||||
{
|
||||
super( null );
|
||||
cellsize = 1000000 / divisor;
|
||||
lonBase = lonIdx*cellsize;
|
||||
latBase = latIdx*cellsize;
|
||||
|
||||
StatCoderContext bc = new StatCoderContext( dataBuffers.iobuffer );
|
||||
|
||||
TagValueCoder wayTagCoder = new TagValueCoder( bc, dataBuffers.tagbuf1, wayValidator );
|
||||
TagValueCoder nodeTagCoder = new TagValueCoder( bc, dataBuffers.tagbuf1, null );
|
||||
NoisyDiffCoder nodeIdxDiff = new NoisyDiffCoder( bc );
|
||||
NoisyDiffCoder nodeEleDiff = new NoisyDiffCoder( bc );
|
||||
NoisyDiffCoder extLonDiff = new NoisyDiffCoder(bc);
|
||||
NoisyDiffCoder extLatDiff = new NoisyDiffCoder(bc);
|
||||
NoisyDiffCoder transEleDiff = new NoisyDiffCoder( bc );
|
||||
|
||||
size = bc.decodeNoisyNumber( 5 );
|
||||
faid = size > dataBuffers.ibuf2.length ? new int[size] : dataBuffers.ibuf2;
|
||||
fapos = size > dataBuffers.ibuf3.length ? new int[size] : dataBuffers.ibuf3;
|
||||
|
||||
int[] alon = size > dataBuffers.alon.length ? new int[size] : dataBuffers.alon;
|
||||
int[] alat = size > dataBuffers.alat.length ? new int[size] : dataBuffers.alat;
|
||||
|
||||
if ( debug ) System.out.println( "*** decoding cache of size=" + size );
|
||||
|
||||
bc.decodeSortedArray( faid, 0, size, 0x20000000, 0 );
|
||||
|
||||
for( int n = 0; n<size; n++ )
|
||||
{
|
||||
long id64 = expandId( faid[n] );
|
||||
alon[n] = (int)(id64 >> 32);
|
||||
alat[n] = (int)(id64 & 0xffffffff);
|
||||
}
|
||||
|
||||
int netdatasize = bc.decodeNoisyNumber( 10 );
|
||||
ab = netdatasize > dataBuffers.bbuf1.length ? new byte[netdatasize] : dataBuffers.bbuf1;
|
||||
aboffset = 0;
|
||||
BitSet validNodes = new BitSet( size );
|
||||
int finaldatasize = 0;
|
||||
|
||||
LinkedListContainer reverseLinks = new LinkedListContainer( size, dataBuffers.ibuf1 );
|
||||
|
||||
int selev = 0;
|
||||
for( int n=0; n<size; n++ ) // loop over nodes
|
||||
{
|
||||
int ilon = alon[n];
|
||||
int ilat = alat[n];
|
||||
|
||||
// future feature escape (turn restrictions?)
|
||||
for(;;)
|
||||
{
|
||||
int featureId = bc.decodeVarBits();
|
||||
if ( featureId == 0 ) break;
|
||||
int bitsize = bc.decodeNoisyNumber( 5 );
|
||||
for( int i=0; i< bitsize; i++ ) bc.decodeBit(); // just skip
|
||||
}
|
||||
|
||||
selev += nodeEleDiff.decodeSignedValue();
|
||||
writeShort( (short) selev );
|
||||
writeVarBytes( nodeTagCoder.decodeTagValueSet() );
|
||||
|
||||
int links = bc.decodeNoisyNumber( 1 );
|
||||
if ( debug ) System.out.println( "*** decoding node with links=" + links );
|
||||
for( int li=0; li<links; li++ )
|
||||
{
|
||||
int startPointer = aboffset;
|
||||
int sizeoffset = writeSizePlaceHolder();
|
||||
int nodeIdx = n + nodeIdxDiff.decodeSignedValue();
|
||||
|
||||
int dlon_remaining;
|
||||
int dlat_remaining;
|
||||
|
||||
boolean isReverse = false;
|
||||
if ( nodeIdx != n ) // internal (forward-) link
|
||||
{
|
||||
writeVarLengthSigned( dlon_remaining = alon[nodeIdx] - ilon );
|
||||
writeVarLengthSigned( dlat_remaining = alat[nodeIdx] - ilat );
|
||||
}
|
||||
else
|
||||
{
|
||||
isReverse = bc.decodeBit();
|
||||
writeVarLengthSigned( dlon_remaining = extLonDiff.decodeSignedValue() );
|
||||
writeVarLengthSigned( dlat_remaining = extLatDiff.decodeSignedValue() );
|
||||
}
|
||||
byte[] wayTags = wayTagCoder.decodeTagValueSet();
|
||||
|
||||
if ( wayTags != null )
|
||||
{
|
||||
validNodes.set( n, true ); // mark source-node valid
|
||||
if ( nodeIdx != n ) // valid internal (forward-) link
|
||||
{
|
||||
reverseLinks.addDataElement( nodeIdx, n ); // register reverse link
|
||||
finaldatasize += 1 + aboffset-startPointer; // reserve place for reverse
|
||||
validNodes.set( nodeIdx, true ); // mark target-node valid
|
||||
}
|
||||
}
|
||||
|
||||
writeModeAndDesc( isReverse, wayTags );
|
||||
if ( !isReverse ) // write geometry for forward links only
|
||||
{
|
||||
WaypointMatcher matcher = wayTags == null ? null : waypointMatcher;
|
||||
if ( matcher != null ) matcher.startNode( ilon, ilat );
|
||||
int ilontarget = ilon + dlon_remaining;
|
||||
int ilattarget = ilat + dlat_remaining;
|
||||
|
||||
int transcount = bc.decodeVarBits();
|
||||
if ( debug ) System.out.println( "*** decoding geometry with count=" + transcount );
|
||||
int count = transcount+1;
|
||||
for( int i=0; i<transcount; i++ )
|
||||
{
|
||||
int dlon = bc.decodePredictedValue( dlon_remaining/count );
|
||||
int dlat = bc.decodePredictedValue( dlat_remaining/count );
|
||||
dlon_remaining -= dlon;
|
||||
dlat_remaining -= dlat;
|
||||
count--;
|
||||
writeVarLengthSigned( dlon );
|
||||
writeVarLengthSigned( dlat );
|
||||
writeVarLengthSigned( transEleDiff.decodeSignedValue() );
|
||||
|
||||
if ( matcher != null ) matcher.transferNode( ilontarget - dlon_remaining, ilattarget - dlat_remaining );
|
||||
}
|
||||
if ( matcher != null ) matcher.endNode( ilontarget, ilattarget );
|
||||
}
|
||||
if ( wayTags == null )
|
||||
{
|
||||
aboffset = startPointer; // not a valid link, delete it
|
||||
}
|
||||
else
|
||||
{
|
||||
injectSize( sizeoffset );
|
||||
}
|
||||
}
|
||||
fapos[n] = aboffset;
|
||||
}
|
||||
|
||||
// calculate final data size
|
||||
int finalsize = 0;
|
||||
for( int i=0; i<size; i++ )
|
||||
{
|
||||
int startpos = i > 0 ? fapos[i-1] : 0;
|
||||
int endpos = fapos[i];
|
||||
if ( validNodes.get( i ) )
|
||||
{
|
||||
finaldatasize += endpos-startpos;
|
||||
finalsize++;
|
||||
}
|
||||
}
|
||||
// append the reverse links at the end of each node
|
||||
byte[] abOld = ab;
|
||||
int[] faidOld = faid;
|
||||
int[] faposOld = fapos;
|
||||
int sizeOld = size;
|
||||
ab = new byte[finaldatasize];
|
||||
faid = new int[finalsize];
|
||||
fapos = new int[finalsize];
|
||||
aboffset = 0;
|
||||
size = 0;
|
||||
|
||||
for( int n=0; n<sizeOld; n++ )
|
||||
{
|
||||
if ( !validNodes.get( n ) )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
int startpos = n > 0 ? faposOld[n-1] : 0;
|
||||
int endpos = faposOld[n];
|
||||
int len = endpos-startpos;
|
||||
System.arraycopy( abOld, startpos, ab, aboffset, len );
|
||||
if ( debug ) System.out.println( "*** copied " + len + " bytes from " + aboffset + " for node " + n );
|
||||
aboffset += len;
|
||||
|
||||
int cnt = reverseLinks.initList( n );
|
||||
if ( debug ) System.out.println( "*** appending " + cnt + " reverse links for node " + n );
|
||||
|
||||
for( int ri = 0; ri < cnt; ri++ )
|
||||
{
|
||||
int nodeIdx = reverseLinks.getDataElement();
|
||||
int sizeoffset = writeSizePlaceHolder();
|
||||
writeVarLengthSigned( alon[nodeIdx] - alon[n] );
|
||||
writeVarLengthSigned( alat[nodeIdx] - alat[n] );
|
||||
writeModeAndDesc( true, null );
|
||||
injectSize( sizeoffset );
|
||||
}
|
||||
faid[size] = faidOld[n];
|
||||
fapos[size] = aboffset;
|
||||
size++;
|
||||
}
|
||||
init( size );
|
||||
}
|
||||
|
||||
@Override
|
||||
public long expandId( int id32 )
|
||||
{
|
||||
int dlon = 0;
|
||||
int dlat = 0;
|
||||
|
||||
for( int bm = 1; bm < 0x8000; bm <<= 1 )
|
||||
{
|
||||
if ( (id32 & 1) != 0 ) dlon |= bm;
|
||||
if ( (id32 & 2) != 0 ) dlat |= bm;
|
||||
id32 >>= 2;
|
||||
}
|
||||
|
||||
int lon32 = lonBase + dlon;
|
||||
int lat32 = latBase + dlat;
|
||||
|
||||
return ((long)lon32)<<32 | lat32;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int shrinkId( long id64 )
|
||||
{
|
||||
int lon32 = (int)(id64 >> 32);
|
||||
int lat32 = (int)(id64 & 0xffffffff);
|
||||
int dlon = lon32 - lonBase;
|
||||
int dlat = lat32 - latBase;
|
||||
int id32 = 0;
|
||||
|
||||
for( int bm = 0x4000; bm > 0; bm >>= 1 )
|
||||
{
|
||||
id32 <<= 2;
|
||||
if ( ( dlon & bm ) != 0 ) id32 |= 1;
|
||||
if ( ( dlat & bm ) != 0 ) id32 |= 2;
|
||||
}
|
||||
return id32;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInternal( int ilon, int ilat )
|
||||
{
|
||||
return ilon >= lonBase && ilon < lonBase + cellsize
|
||||
&& ilat >= latBase && ilat < latBase + cellsize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int encodeMicroCache( byte[] buffer )
|
||||
{
|
||||
HashMap<Long,Integer> idMap = new HashMap<Long,Integer>();
|
||||
for( int n=0; n<size; n++ ) // loop over nodes
|
||||
{
|
||||
idMap.put( Long.valueOf( expandId( faid[n] ) ), Integer.valueOf( n ) );
|
||||
}
|
||||
|
||||
IntegerFifo3Pass linkCounts = new IntegerFifo3Pass( 256 );
|
||||
IntegerFifo3Pass transCounts = new IntegerFifo3Pass( 256 );
|
||||
|
||||
TagValueCoder wayTagCoder = new TagValueCoder();
|
||||
TagValueCoder nodeTagCoder = new TagValueCoder();
|
||||
NoisyDiffCoder nodeIdxDiff = new NoisyDiffCoder();
|
||||
NoisyDiffCoder nodeEleDiff = new NoisyDiffCoder();
|
||||
NoisyDiffCoder extLonDiff = new NoisyDiffCoder();
|
||||
NoisyDiffCoder extLatDiff = new NoisyDiffCoder();
|
||||
NoisyDiffCoder transEleDiff = new NoisyDiffCoder();
|
||||
|
||||
int netdatasize = 0;
|
||||
|
||||
for(int pass=1;; pass++) // 3 passes: counters, stat-collection, encoding
|
||||
{
|
||||
boolean dostats = pass == 3;
|
||||
boolean dodebug = debug && pass == 3;
|
||||
|
||||
if ( pass < 3 ) netdatasize = fapos[size-1];
|
||||
|
||||
StatCoderContext bc = new StatCoderContext( buffer );
|
||||
|
||||
linkCounts.init();
|
||||
transCounts.init();
|
||||
|
||||
wayTagCoder.encodeDictionary( bc );
|
||||
if ( dostats ) bc.assignBits( "wayTagDictionary" );
|
||||
nodeTagCoder.encodeDictionary( bc );
|
||||
if ( dostats ) bc.assignBits( "nodeTagDictionary" );
|
||||
nodeIdxDiff.encodeDictionary( bc );
|
||||
nodeEleDiff.encodeDictionary( bc );
|
||||
extLonDiff.encodeDictionary( bc );
|
||||
extLatDiff.encodeDictionary( bc );
|
||||
transEleDiff.encodeDictionary( bc );
|
||||
if ( dostats ) bc.assignBits( "noisebits" );
|
||||
bc.encodeNoisyNumber( size, 5 );
|
||||
if ( dostats ) bc.assignBits( "nodecount" );
|
||||
bc.encodeSortedArray( faid, 0, size, 0x20000000, 0 );
|
||||
if ( dostats ) bc.assignBits( "node-positions" );
|
||||
bc.encodeNoisyNumber( netdatasize, 10 ); // net-size
|
||||
if ( dostats ) bc.assignBits( "netdatasize" );
|
||||
if ( dodebug ) System.out.println( "*** encoding cache of size=" + size );
|
||||
int lastSelev = 0;
|
||||
|
||||
for( int n=0; n<size; n++ ) // loop over nodes
|
||||
{
|
||||
aboffset = startPos( n );
|
||||
aboffsetEnd = fapos[n];
|
||||
if ( dodebug ) System.out.println( "*** encoding node " + n + " from " + aboffset + " to " + aboffsetEnd );
|
||||
|
||||
// future feature escape (turn restrictions?)
|
||||
bc.encodeVarBits( 0 );
|
||||
|
||||
int selev = readShort();
|
||||
nodeEleDiff.encodeSignedValue( selev - lastSelev );
|
||||
if ( dostats ) bc.assignBits( "nodeele" );
|
||||
lastSelev = selev;
|
||||
nodeTagCoder.encodeTagValueSet( readVarBytes() );
|
||||
if ( dostats ) bc.assignBits( "nodeTagIdx" );
|
||||
int nlinks = linkCounts.getNext();
|
||||
if ( dodebug ) System.out.println( "*** nlinks=" + nlinks );
|
||||
bc.encodeNoisyNumber( nlinks, 1 );
|
||||
if ( dostats ) bc.assignBits( "link-counts" );
|
||||
|
||||
long id64 = expandId( faid[n] );
|
||||
int ilon = (int)(id64 >> 32);
|
||||
int ilat = (int)(id64 & 0xffffffff);
|
||||
|
||||
nlinks = 0;
|
||||
while( hasMoreData() ) // loop over links
|
||||
{
|
||||
// read link data
|
||||
int startPointer = aboffset;
|
||||
int endPointer = getEndPointer();
|
||||
|
||||
int ilonlink = ilon + readVarLengthSigned();
|
||||
int ilatlink = ilat + readVarLengthSigned();
|
||||
|
||||
int sizecode = readVarLengthUnsigned();
|
||||
boolean isReverse = ( sizecode & 1 ) != 0;
|
||||
int descSize = sizecode >> 1;
|
||||
byte[] description = null;
|
||||
if ( descSize > 0 )
|
||||
{
|
||||
description = new byte[descSize];
|
||||
readFully( description );
|
||||
}
|
||||
|
||||
boolean isInternal = isInternal( ilonlink, ilatlink );
|
||||
if ( isReverse && isInternal )
|
||||
{
|
||||
if ( dodebug ) System.out.println( "*** NOT encoding link reverse=" + isReverse + " internal=" + isInternal );
|
||||
netdatasize -= aboffset-startPointer;
|
||||
continue; // do not encode internal reverse links
|
||||
}
|
||||
if ( dodebug ) System.out.println( "*** encoding link reverse=" + isReverse + " internal=" + isInternal );
|
||||
nlinks++;
|
||||
|
||||
if ( isInternal )
|
||||
{
|
||||
long link64 = ((long)ilonlink)<<32 | ilatlink;
|
||||
Integer idx = idMap.get( Long.valueOf( link64 ) );
|
||||
if ( idx == null ) throw new RuntimeException( "ups: internal not found?" );
|
||||
int nodeIdx = idx.intValue();
|
||||
if ( dodebug ) System.out.println( "*** target nodeIdx=" + nodeIdx );
|
||||
if ( nodeIdx == n ) throw new RuntimeException( "ups: self ref?" );
|
||||
nodeIdxDiff.encodeSignedValue( nodeIdx - n );
|
||||
if ( dostats ) bc.assignBits( "nodeIdx" );
|
||||
}
|
||||
else
|
||||
{
|
||||
nodeIdxDiff.encodeSignedValue( 0 );
|
||||
bc.encodeBit( isReverse );
|
||||
extLonDiff.encodeSignedValue( ilonlink - ilon );
|
||||
extLatDiff.encodeSignedValue( ilatlink - ilat );
|
||||
if ( dostats ) bc.assignBits( "externalNode" );
|
||||
}
|
||||
wayTagCoder.encodeTagValueSet( description );
|
||||
if ( dostats ) bc.assignBits( "wayDescIdx" );
|
||||
|
||||
if ( !isReverse )
|
||||
{
|
||||
byte[] geometry = readDataUntil( endPointer );
|
||||
// write transition nodes
|
||||
int count = transCounts.getNext();
|
||||
if ( dodebug ) System.out.println( "*** encoding geometry with count=" + count );
|
||||
bc.encodeVarBits( count++ );
|
||||
if ( dostats ) bc.assignBits( "transcount" );
|
||||
int transcount = 0;
|
||||
if ( geometry != null )
|
||||
{
|
||||
int dlon_remaining = ilonlink - ilon;
|
||||
int dlat_remaining = ilatlink - ilat;
|
||||
|
||||
ByteDataReader r = new ByteDataReader( geometry );
|
||||
while ( r.hasMoreData() )
|
||||
{
|
||||
transcount++;
|
||||
|
||||
int dlon = r.readVarLengthSigned();
|
||||
int dlat = r.readVarLengthSigned();
|
||||
bc.encodePredictedValue( dlon, dlon_remaining/count );
|
||||
bc.encodePredictedValue( dlat, dlat_remaining/count );
|
||||
dlon_remaining -= dlon;
|
||||
dlat_remaining -= dlat;
|
||||
if ( count > 1 ) count--;
|
||||
if ( dostats ) bc.assignBits( "transpos" );
|
||||
transEleDiff.encodeSignedValue( r.readVarLengthSigned() );
|
||||
if ( dostats ) bc.assignBits( "transele" );
|
||||
}
|
||||
}
|
||||
transCounts.add( transcount );
|
||||
}
|
||||
}
|
||||
linkCounts.add( nlinks );
|
||||
}
|
||||
if ( pass == 3 )
|
||||
{
|
||||
return bc.getEncodedLength();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
92
brouter-codec/src/main/java/btools/codec/NoisyDiffCoder.java
Normal file
92
brouter-codec/src/main/java/btools/codec/NoisyDiffCoder.java
Normal file
|
@ -0,0 +1,92 @@
|
|||
package btools.codec;
|
||||
|
||||
/**
|
||||
* Encoder/Decoder for signed integers that automatically detects the typical
|
||||
* range of these numbers to determine a noisy-bit count as a very simple
|
||||
* dictionary
|
||||
*
|
||||
* Adapted for 3-pass encoding (counters -> statistics -> encoding )
|
||||
* but doesn't do anything at pass1
|
||||
*/
|
||||
public final class NoisyDiffCoder
|
||||
{
|
||||
private int tot;
|
||||
private int[] freqs;
|
||||
private int noisybits;
|
||||
private StatCoderContext bc;
|
||||
private int pass;
|
||||
|
||||
/**
|
||||
* Create a decoder and read the noisy-bit count from the gibe context
|
||||
*/
|
||||
public NoisyDiffCoder( StatCoderContext bc )
|
||||
{
|
||||
noisybits = bc.decodeVarBits();
|
||||
this.bc = bc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an encoder for 3-pass-encoding
|
||||
*/
|
||||
public NoisyDiffCoder()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* encodes a signed int (pass3 only, stats collection in pass2)
|
||||
*/
|
||||
public void encodeSignedValue( int value )
|
||||
{
|
||||
if ( pass == 3 )
|
||||
{
|
||||
bc.encodeNoisyDiff( value, noisybits );
|
||||
}
|
||||
else if ( pass == 2 )
|
||||
{
|
||||
count( value < 0 ? -value : value );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* decodes a signed int
|
||||
*/
|
||||
public int decodeSignedValue()
|
||||
{
|
||||
return bc.decodeNoisyDiff( noisybits );
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new encoding pass and (in pass3) calculates the noisy-bit count
|
||||
* from the stats collected in pass2 and writes that to the given context
|
||||
*/
|
||||
public void encodeDictionary( StatCoderContext bc )
|
||||
{
|
||||
if ( ++pass == 3 )
|
||||
{
|
||||
// how many noisy bits?
|
||||
for ( noisybits = 0; noisybits < 14 && tot > 0; noisybits++ )
|
||||
{
|
||||
if ( freqs[noisybits] < ( tot >> 1 ) )
|
||||
break;
|
||||
}
|
||||
bc.encodeVarBits( noisybits );
|
||||
}
|
||||
this.bc = bc;
|
||||
}
|
||||
|
||||
private void count( int value )
|
||||
{
|
||||
if ( freqs == null )
|
||||
freqs = new int[14];
|
||||
int bm = 1;
|
||||
for ( int i = 0; i < 14; i++ )
|
||||
{
|
||||
if ( value < bm )
|
||||
break;
|
||||
else
|
||||
freqs[i]++;
|
||||
bm <<= 1;
|
||||
}
|
||||
tot++;
|
||||
}
|
||||
}
|
291
brouter-codec/src/main/java/btools/codec/StatCoderContext.java
Normal file
291
brouter-codec/src/main/java/btools/codec/StatCoderContext.java
Normal file
|
@ -0,0 +1,291 @@
|
|||
package btools.codec;
|
||||
|
||||
import java.util.TreeMap;
|
||||
|
||||
import btools.util.BitCoderContext;
|
||||
|
||||
public final class StatCoderContext extends BitCoderContext
|
||||
{
|
||||
private static TreeMap<String, long[]> statsPerName;
|
||||
private long lastbitpos = 0;
|
||||
|
||||
public StatCoderContext( byte[] ab )
|
||||
{
|
||||
super( ab );
|
||||
}
|
||||
|
||||
/**
|
||||
* assign the de-/encoded bits since the last call assignBits to the given
|
||||
* name. Used for encoding statistics
|
||||
*
|
||||
* @see #getBitReport
|
||||
*/
|
||||
public void assignBits( String name )
|
||||
{
|
||||
long bitpos = getBitPosition();
|
||||
if ( statsPerName == null )
|
||||
{
|
||||
statsPerName = new TreeMap<String, long[]>();
|
||||
}
|
||||
long[] stats = statsPerName.get( name );
|
||||
if ( stats == null )
|
||||
{
|
||||
stats = new long[2];
|
||||
statsPerName.put( name, stats );
|
||||
}
|
||||
stats[0] += bitpos - lastbitpos;
|
||||
stats[1] += 1;
|
||||
lastbitpos = bitpos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a textual report on the bit-statistics
|
||||
*
|
||||
* @see #assignBits
|
||||
*/
|
||||
public static String getBitReport()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for ( String name : statsPerName.keySet() )
|
||||
{
|
||||
long[] stats = statsPerName.get( name );
|
||||
sb.append( name + " count=" + stats[1] + " bits=" + stats[0] + "\n" );
|
||||
}
|
||||
statsPerName = null;
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* encode an unsigned integer with some of of least significant bits
|
||||
* considered noisy
|
||||
*
|
||||
* @see #decodeNoisyNumber
|
||||
*/
|
||||
public void encodeNoisyNumber( int value, int noisybits )
|
||||
{
|
||||
if ( value < 0 )
|
||||
{
|
||||
throw new IllegalArgumentException( "encodeVarBits expects positive value" );
|
||||
}
|
||||
if ( noisybits > 0 )
|
||||
{
|
||||
int mask = 0xffffffff >>> ( 32 - noisybits );
|
||||
encodeBounded( mask, value & mask );
|
||||
value >>= noisybits;
|
||||
}
|
||||
encodeVarBits( value );
|
||||
}
|
||||
|
||||
/**
|
||||
* decode an unsigned integer with some of of least significant bits
|
||||
* considered noisy
|
||||
*
|
||||
* @see #encodeNoisyNumber
|
||||
*/
|
||||
public int decodeNoisyNumber( int noisybits )
|
||||
{
|
||||
int value = 0;
|
||||
if ( noisybits > 0 )
|
||||
{
|
||||
int mask = 0xffffffff >>> ( 32 - noisybits );
|
||||
value = decodeBounded( mask );
|
||||
}
|
||||
return value | ( decodeVarBits() << noisybits );
|
||||
}
|
||||
|
||||
/**
|
||||
* encode a signed integer with some of of least significant bits considered
|
||||
* noisy
|
||||
*
|
||||
* @see #decodeNoisyDiff
|
||||
*/
|
||||
public void encodeNoisyDiff( int value, int noisybits )
|
||||
{
|
||||
if ( noisybits > 0 )
|
||||
{
|
||||
value += 1 << ( noisybits - 1 );
|
||||
int mask = 0xffffffff >>> ( 32 - noisybits );
|
||||
encodeBounded( mask, value & mask );
|
||||
value >>= noisybits;
|
||||
}
|
||||
encodeVarBits( value < 0 ? -value : value );
|
||||
if ( value != 0 )
|
||||
{
|
||||
encodeBit( value < 0 );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* decode a signed integer with some of of least significant bits considered
|
||||
* noisy
|
||||
*
|
||||
* @see #encodeNoisyDiff
|
||||
*/
|
||||
public int decodeNoisyDiff( int noisybits )
|
||||
{
|
||||
int value = 0;
|
||||
if ( noisybits > 0 )
|
||||
{
|
||||
int mask = 0xffffffff >>> ( 32 - noisybits );
|
||||
value = decodeBounded( mask ) - ( 1 << ( noisybits - 1 ) );
|
||||
}
|
||||
int val2 = decodeVarBits() << noisybits;
|
||||
if ( val2 != 0 )
|
||||
{
|
||||
if ( decodeBit() )
|
||||
{
|
||||
val2 = -val2;
|
||||
}
|
||||
}
|
||||
return value + val2;
|
||||
}
|
||||
|
||||
/**
|
||||
* encode a signed integer with the typical range and median taken from the
|
||||
* predicted value
|
||||
*
|
||||
* @see #decodePredictedValue
|
||||
*/
|
||||
public void encodePredictedValue( int value, int predictor )
|
||||
{
|
||||
int p = predictor < 0 ? -predictor : predictor;
|
||||
int noisybits = 0;
|
||||
|
||||
while (p > 2)
|
||||
{
|
||||
noisybits++;
|
||||
p >>= 1;
|
||||
}
|
||||
encodeNoisyDiff( value - predictor, noisybits );
|
||||
}
|
||||
|
||||
/**
|
||||
* decode a signed integer with the typical range and median taken from the
|
||||
* predicted value
|
||||
*
|
||||
* @see #encodePredictedValue
|
||||
*/
|
||||
public int decodePredictedValue( int predictor )
|
||||
{
|
||||
int p = predictor < 0 ? -predictor : predictor;
|
||||
int noisybits = 0;
|
||||
while (p > 2)
|
||||
{
|
||||
noisybits++;
|
||||
p >>= 1;
|
||||
}
|
||||
return predictor + decodeNoisyDiff( noisybits );
|
||||
}
|
||||
|
||||
/**
|
||||
* encode an integer-array making use of the fact that it is sorted. This is
|
||||
* done, starting with the most significant bit, by recursively encoding the
|
||||
* number of values with the current bit being 0. This yields an number of
|
||||
* bits per value that only depends on the typical distance between subsequent
|
||||
* values and also benefits
|
||||
*
|
||||
* @param values
|
||||
* the array to encode
|
||||
* @param offset
|
||||
* position in this array where to start
|
||||
* @param subsize
|
||||
* number of values to encode
|
||||
* @param nextbit
|
||||
* bitmask with the most significant bit set to 1
|
||||
* @param mask
|
||||
* should be 0
|
||||
*/
|
||||
public void encodeSortedArray( int[] values, int offset, int subsize, int nextbit, int mask )
|
||||
{
|
||||
if ( subsize == 1 ) // last-choice shortcut
|
||||
{
|
||||
while (nextbit != 0)
|
||||
{
|
||||
encodeBit( ( values[offset] & nextbit ) != 0 );
|
||||
nextbit >>= 1;
|
||||
}
|
||||
}
|
||||
if ( nextbit == 0 )
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
int data = mask & values[offset];
|
||||
mask |= nextbit;
|
||||
|
||||
// count 0-bit-fraction
|
||||
int i = offset;
|
||||
int end = subsize + offset;
|
||||
for ( ; i < end; i++ )
|
||||
{
|
||||
if ( ( values[i] & mask ) != data )
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
int size1 = i - offset;
|
||||
int size2 = subsize - size1;
|
||||
|
||||
encodeBounded( subsize, size1 );
|
||||
if ( size1 > 0 )
|
||||
{
|
||||
encodeSortedArray( values, offset, size1, nextbit >> 1, mask );
|
||||
}
|
||||
if ( size2 > 0 )
|
||||
{
|
||||
encodeSortedArray( values, i, size2, nextbit >> 1, mask );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see #encodeSortedArray
|
||||
*
|
||||
* @param values
|
||||
* the array to encode
|
||||
* @param offset
|
||||
* position in this array where to start
|
||||
* @param subsize
|
||||
* number of values to encode
|
||||
* @param nextbit
|
||||
* bitmask with the most significant bit set to 1
|
||||
* @param value
|
||||
* should be 0
|
||||
*/
|
||||
public void decodeSortedArray( int[] values, int offset, int subsize, int nextbit, int value )
|
||||
{
|
||||
if ( subsize == 1 ) // last-choice shortcut
|
||||
{
|
||||
while (nextbit != 0)
|
||||
{
|
||||
if ( decodeBit() )
|
||||
{
|
||||
value |= nextbit;
|
||||
}
|
||||
nextbit >>= 1;
|
||||
}
|
||||
values[offset] = value;
|
||||
return;
|
||||
}
|
||||
if ( nextbit == 0 )
|
||||
{
|
||||
while (subsize-- > 0)
|
||||
{
|
||||
values[offset++] = value;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int size1 = decodeBounded( subsize );
|
||||
int size2 = subsize - size1;
|
||||
|
||||
if ( size1 > 0 )
|
||||
{
|
||||
decodeSortedArray( values, offset, size1, nextbit >> 1, value );
|
||||
}
|
||||
if ( size2 > 0 )
|
||||
{
|
||||
decodeSortedArray( values, offset + size1, size2, nextbit >> 1, value | nextbit );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
235
brouter-codec/src/main/java/btools/codec/TagValueCoder.java
Normal file
235
brouter-codec/src/main/java/btools/codec/TagValueCoder.java
Normal file
|
@ -0,0 +1,235 @@
|
|||
package btools.codec;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.PriorityQueue;
|
||||
|
||||
import btools.util.BitCoderContext;
|
||||
|
||||
/**
|
||||
* Encoder/Decoder for way-/node-descriptions
|
||||
*
|
||||
* It detects identical descriptions and sorts them
|
||||
* into a huffman-tree according to their frequencies
|
||||
*
|
||||
* Adapted for 3-pass encoding (counters -> statistics -> encoding )
|
||||
* but doesn't do anything at pass1
|
||||
*/
|
||||
public final class TagValueCoder
|
||||
{
|
||||
private HashMap<TagValueSet, TagValueSet> identityMap;
|
||||
private Object tree;
|
||||
private BitCoderContext bc;
|
||||
private int pass;
|
||||
|
||||
public void encodeTagValueSet( byte[] data )
|
||||
{
|
||||
if ( pass == 1 )
|
||||
{
|
||||
return;
|
||||
}
|
||||
TagValueSet tvsProbe = new TagValueSet();
|
||||
tvsProbe.data = data;
|
||||
TagValueSet tvs = identityMap.get( tvsProbe );
|
||||
if ( pass == 3 )
|
||||
{
|
||||
bc.encodeBounded( tvs.range - 1, tvs.code );
|
||||
}
|
||||
else if ( pass == 2 )
|
||||
{
|
||||
if ( tvs == null )
|
||||
{
|
||||
tvs = tvsProbe;
|
||||
identityMap.put( tvs, tvs );
|
||||
}
|
||||
tvs.frequency++;
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] decodeTagValueSet()
|
||||
{
|
||||
Object node = tree;
|
||||
while (node instanceof TreeNode)
|
||||
{
|
||||
TreeNode tn = (TreeNode) node;
|
||||
boolean nextBit = bc.decodeBit();
|
||||
node = nextBit ? tn.child2 : tn.child1;
|
||||
}
|
||||
return (byte[]) node;
|
||||
}
|
||||
|
||||
public void encodeDictionary( BitCoderContext bc )
|
||||
{
|
||||
if ( ++pass == 3 )
|
||||
{
|
||||
PriorityQueue<TagValueSet> queue = new PriorityQueue<TagValueSet>( identityMap.values() );
|
||||
while (queue.size() > 1)
|
||||
{
|
||||
TagValueSet node = new TagValueSet();
|
||||
node.child1 = queue.poll();
|
||||
node.child2 = queue.poll();
|
||||
node.frequency = node.child1.frequency + node.child2.frequency;
|
||||
queue.add( node );
|
||||
}
|
||||
TagValueSet root = queue.poll();
|
||||
root.encode( bc, 1, 0 );
|
||||
}
|
||||
this.bc = bc;
|
||||
}
|
||||
|
||||
public TagValueCoder( BitCoderContext bc, byte[] buffer, TagValueValidator validator )
|
||||
{
|
||||
tree = decodeTree( bc, buffer, validator );
|
||||
this.bc = bc;
|
||||
}
|
||||
|
||||
public TagValueCoder()
|
||||
{
|
||||
identityMap = new HashMap<TagValueSet, TagValueSet>();
|
||||
}
|
||||
|
||||
private Object decodeTree( BitCoderContext bc, byte[] buffer, TagValueValidator validator )
|
||||
{
|
||||
boolean isNode = bc.decodeBit();
|
||||
if ( isNode )
|
||||
{
|
||||
TreeNode node = new TreeNode();
|
||||
node.child1 = decodeTree( bc, buffer, validator );
|
||||
node.child2 = decodeTree( bc, buffer, validator );
|
||||
return node;
|
||||
}
|
||||
BitCoderContext target = null;
|
||||
for ( ;; )
|
||||
{
|
||||
int delta = bc.decodeVarBits();
|
||||
if ( target == null )
|
||||
{
|
||||
if ( delta == 0 )
|
||||
return null;
|
||||
target = new BitCoderContext( buffer );
|
||||
target.encodeBit( false ); // dummy reverse bit
|
||||
}
|
||||
target.encodeVarBits( delta );
|
||||
if ( delta == 0 )
|
||||
break;
|
||||
int data = bc.decodeVarBits();
|
||||
target.encodeVarBits( data );
|
||||
}
|
||||
int len = target.getEncodedLength();
|
||||
byte[] res = new byte[len];
|
||||
System.arraycopy( buffer, 0, res, 0, len );
|
||||
|
||||
if ( validator == null || validator.accessAllowed( res ) )
|
||||
{
|
||||
return res;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static final class TreeNode
|
||||
{
|
||||
public Object child1;
|
||||
public Object child2;
|
||||
}
|
||||
|
||||
public static final class TagValueSet implements Comparable<TagValueSet>
|
||||
{
|
||||
public byte[] data;
|
||||
public int frequency;
|
||||
public int code;
|
||||
public int range;
|
||||
public TagValueSet child1;
|
||||
public TagValueSet child2;
|
||||
|
||||
public void encode( BitCoderContext bc, int range, int code )
|
||||
{
|
||||
this.range = range;
|
||||
this.code = code;
|
||||
boolean isNode = child1 != null;
|
||||
bc.encodeBit( isNode );
|
||||
if ( isNode )
|
||||
{
|
||||
child1.encode( bc, range << 1, code );
|
||||
child2.encode( bc, range << 1, code + range );
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( data == null )
|
||||
{
|
||||
bc.encodeVarBits( 0 );
|
||||
return;
|
||||
}
|
||||
BitCoderContext src = new BitCoderContext( data );
|
||||
if ( src.decodeBit() )
|
||||
{
|
||||
throw new IllegalArgumentException( "cannot encode reverse bit!" );
|
||||
}
|
||||
for ( ;; )
|
||||
{
|
||||
int delta = src.decodeVarBits();
|
||||
bc.encodeVarBits( delta );
|
||||
if ( delta == 0 )
|
||||
{
|
||||
break;
|
||||
}
|
||||
int data = src.decodeVarBits();
|
||||
bc.encodeVarBits( data );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals( Object o )
|
||||
{
|
||||
if ( o instanceof TagValueSet )
|
||||
{
|
||||
TagValueSet tvs = (TagValueSet) o;
|
||||
if ( data == null )
|
||||
{
|
||||
return tvs.data == null;
|
||||
}
|
||||
if ( tvs.data == null )
|
||||
{
|
||||
return data == null;
|
||||
}
|
||||
if ( data.length != tvs.data.length )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
for ( int i = 0; i < data.length; i++ )
|
||||
{
|
||||
if ( data[i] != tvs.data[i] )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
if ( data == null )
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
int h = 17;
|
||||
for ( int i = 0; i < data.length; i++ )
|
||||
{
|
||||
h = ( h << 8 ) + data[i];
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo( TagValueSet tvs )
|
||||
{
|
||||
if ( frequency < tvs.frequency )
|
||||
return -1;
|
||||
if ( frequency > tvs.frequency )
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
package btools.codec;
|
||||
|
||||
|
||||
public interface TagValueValidator
|
||||
{
|
||||
/**
|
||||
* @param tagValueSet the way description to check
|
||||
* @return true if access is allowed in the current profile
|
||||
*/
|
||||
public boolean accessAllowed( byte[] tagValueSet );
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package btools.codec;
|
||||
|
||||
/**
|
||||
* a waypoint matcher gets way geometries
|
||||
* from the decoder to find the closest
|
||||
* matches to the waypoints
|
||||
*/
|
||||
public interface WaypointMatcher
|
||||
{
|
||||
void startNode( int ilon, int ilat );
|
||||
void transferNode( int ilon, int ilat );
|
||||
void endNode( int ilon, int ilat );
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package btools.codec;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class LinkedListContainerTest
|
||||
{
|
||||
@Test
|
||||
public void linkedListTest1()
|
||||
{
|
||||
int nlists = 553;
|
||||
|
||||
LinkedListContainer llc = new LinkedListContainer( nlists, null );
|
||||
|
||||
for ( int ln = 0; ln < nlists; ln++ )
|
||||
{
|
||||
for ( int i = 0; i < 10; i++ )
|
||||
{
|
||||
llc.addDataElement( ln, ln * i );
|
||||
}
|
||||
}
|
||||
|
||||
for ( int i = 0; i < 10; i++ )
|
||||
{
|
||||
for ( int ln = 0; ln < nlists; ln++ )
|
||||
{
|
||||
llc.addDataElement( ln, ln * i );
|
||||
}
|
||||
}
|
||||
|
||||
for ( int ln = 0; ln < nlists; ln++ )
|
||||
{
|
||||
int cnt = llc.initList( ln );
|
||||
Assert.assertTrue( "list size test", cnt == 20 );
|
||||
|
||||
for ( int i = 19; i >= 0; i-- )
|
||||
{
|
||||
int data = llc.getDataElement();
|
||||
Assert.assertTrue( "data value test", data == ln * ( i % 10 ) );
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
llc.getDataElement();
|
||||
Assert.fail( "no more elements expected" );
|
||||
}
|
||||
catch (IllegalArgumentException e)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
package btools.codec;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class StatCoderContextTest
|
||||
{
|
||||
@Test
|
||||
public void noisyVarBitsEncodeDecodeTest()
|
||||
{
|
||||
byte[] ab = new byte[40000];
|
||||
StatCoderContext ctx = new StatCoderContext( ab );
|
||||
for ( int noisybits = 0; noisybits < 12; noisybits++ )
|
||||
{
|
||||
for ( int i = 0; i < 1000; i++ )
|
||||
{
|
||||
ctx.encodeNoisyNumber( i, noisybits );
|
||||
}
|
||||
}
|
||||
ctx = new StatCoderContext( ab );
|
||||
|
||||
for ( int noisybits = 0; noisybits < 12; noisybits++ )
|
||||
{
|
||||
for ( int i = 0; i < 1000; i++ )
|
||||
{
|
||||
int value = ctx.decodeNoisyNumber( noisybits );
|
||||
if ( value != i )
|
||||
{
|
||||
Assert.fail( "value mismatch: noisybits=" + noisybits + " i=" + i + " value=" + value );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noisySignedVarBitsEncodeDecodeTest()
|
||||
{
|
||||
byte[] ab = new byte[80000];
|
||||
StatCoderContext ctx = new StatCoderContext( ab );
|
||||
for ( int noisybits = 0; noisybits < 12; noisybits++ )
|
||||
{
|
||||
for ( int i = -1000; i < 1000; i++ )
|
||||
{
|
||||
ctx.encodeNoisyDiff( i, noisybits );
|
||||
}
|
||||
}
|
||||
ctx = new StatCoderContext( ab );
|
||||
|
||||
for ( int noisybits = 0; noisybits < 12; noisybits++ )
|
||||
{
|
||||
for ( int i = -1000; i < 1000; i++ )
|
||||
{
|
||||
int value = ctx.decodeNoisyDiff( noisybits );
|
||||
if ( value != i )
|
||||
{
|
||||
Assert.fail( "value mismatch: noisybits=" + noisybits + " i=" + i + " value=" + value );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void predictedValueEncodeDecodeTest()
|
||||
{
|
||||
byte[] ab = new byte[80000];
|
||||
StatCoderContext ctx = new StatCoderContext( ab );
|
||||
for ( int value = -100; value < 100; value += 5 )
|
||||
{
|
||||
for ( int predictor = -200; predictor < 200; predictor += 7 )
|
||||
{
|
||||
ctx.encodePredictedValue( value, predictor );
|
||||
}
|
||||
}
|
||||
ctx = new StatCoderContext( ab );
|
||||
|
||||
for ( int value = -100; value < 100; value += 5 )
|
||||
{
|
||||
for ( int predictor = -200; predictor < 200; predictor += 7 )
|
||||
{
|
||||
int decodedValue = ctx.decodePredictedValue( predictor );
|
||||
if ( value != decodedValue )
|
||||
{
|
||||
Assert.fail( "value mismatch: value=" + value + " predictor=" + predictor + " decodedValue=" + decodedValue );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sortedArrayEncodeDecodeTest()
|
||||
{
|
||||
Random rand = new Random();
|
||||
int size = 1000000;
|
||||
int[] values = new int[size];
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
values[i] = rand.nextInt() & 0x0fffffff;
|
||||
}
|
||||
values[5] = 175384; // force collision
|
||||
values[8] = 175384;
|
||||
|
||||
values[15] = 275384; // force neighbours
|
||||
values[18] = 275385;
|
||||
|
||||
Arrays.sort( values );
|
||||
|
||||
byte[] ab = new byte[3000000];
|
||||
StatCoderContext ctx = new StatCoderContext( ab );
|
||||
ctx.encodeSortedArray( values, 0, size, 0x08000000, 0 );
|
||||
|
||||
ctx = new StatCoderContext( ab );
|
||||
|
||||
int[] decodedValues = new int[size];
|
||||
ctx.decodeSortedArray( decodedValues, 0, size, 0x08000000, 0 );
|
||||
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
if ( values[i] != decodedValues[i] )
|
||||
{
|
||||
Assert.fail( "mismatch at i=" + i + " " + values[i] + "<>" + decodedValues[i] );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,6 +17,11 @@
|
|||
<artifactId>brouter-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-codec</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-mapaccess</artifactId>
|
||||
|
|
|
@ -18,7 +18,7 @@ final class MatchedWaypoint
|
|||
public OsmNodeNamed crosspoint;
|
||||
public OsmNodeNamed waypoint;
|
||||
public double radius;
|
||||
public int cost;
|
||||
public boolean hasUpdate;
|
||||
|
||||
public void writeToStream( DataOutput dos ) throws IOException
|
||||
{
|
||||
|
|
|
@ -129,7 +129,7 @@ final class OsmPath implements OsmLinkHolder
|
|||
|
||||
MessageData msgData = new MessageData();
|
||||
|
||||
OsmTransferNode transferNode = link.decodeFirsttransfer();
|
||||
OsmTransferNode transferNode = link.decodeFirsttransfer( p1 );
|
||||
OsmNode targetNode = link.targetNode;
|
||||
for(;;)
|
||||
{
|
||||
|
|
|
@ -140,6 +140,27 @@ public final class RoutingContext implements DistanceChecker
|
|||
}
|
||||
}
|
||||
|
||||
public void cleanNogolist( List<OsmNodeNamed> waypoints )
|
||||
{
|
||||
if ( nogopoints == null ) return;
|
||||
List<OsmNodeNamed> nogos = new ArrayList<OsmNodeNamed>();
|
||||
for( OsmNodeNamed nogo : nogopoints )
|
||||
{
|
||||
int radiusInMeter = (int)(nogo.radius * 111894.);
|
||||
boolean goodGuy = true;
|
||||
for( OsmNodeNamed wp : waypoints )
|
||||
{
|
||||
if ( wp.calcDistance( nogo ) < radiusInMeter )
|
||||
{
|
||||
goodGuy = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ( goodGuy ) nogos.add( nogo );
|
||||
}
|
||||
nogopoints = nogos;
|
||||
}
|
||||
|
||||
public long[] getNogoChecksums()
|
||||
{
|
||||
long[] cs = new long[3];
|
||||
|
@ -215,8 +236,7 @@ public final class RoutingContext implements DistanceChecker
|
|||
// calculate remaining distance
|
||||
if ( s2 < 0. )
|
||||
{
|
||||
double distance = d > 0. ? -s2 / d : 0.;
|
||||
wayfraction = d > 0. ? distance / d : 0.;
|
||||
wayfraction = -s2 / (d*d);
|
||||
double xm = x2 - wayfraction*dx;
|
||||
double ym = y2 - wayfraction*dy;
|
||||
ilonshortest = (int)(xm / coslat6 + nogo.ilon);
|
||||
|
|
|
@ -3,12 +3,13 @@ package btools.router;
|
|||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import btools.expressions.BExpressionContext;
|
||||
import btools.expressions.BExpressionContextGlobal;
|
||||
import btools.expressions.BExpressionContextNode;
|
||||
import btools.expressions.BExpressionContextWay;
|
||||
|
@ -28,6 +29,7 @@ public class RoutingEngine extends Thread
|
|||
private boolean finished = false;
|
||||
|
||||
protected List<OsmNodeNamed> waypoints = null;
|
||||
protected List<MatchedWaypoint> matchedWaypoints;
|
||||
private int linksProcessed = 0;
|
||||
|
||||
protected OsmTrack foundTrack = new OsmTrack();
|
||||
|
@ -85,8 +87,8 @@ public class RoutingEngine extends Thread
|
|||
BExpressionMetaData meta = new BExpressionMetaData();
|
||||
|
||||
BExpressionContextGlobal expctxGlobal = new BExpressionContextGlobal( meta );
|
||||
rc.expctxWay = new BExpressionContextWay( rc.serversizing ? 262144 : 4096, meta );
|
||||
rc.expctxNode = new BExpressionContextNode( rc.serversizing ? 16384 : 1024, meta );
|
||||
rc.expctxWay = new BExpressionContextWay( rc.serversizing ? 262144 : 8192, meta );
|
||||
rc.expctxNode = new BExpressionContextNode( rc.serversizing ? 16384 : 2048, meta );
|
||||
|
||||
meta.readMetaData( new File( profileDir, "lookups.dat" ) );
|
||||
|
||||
|
@ -111,6 +113,7 @@ public class RoutingEngine extends Thread
|
|||
{
|
||||
infoLogWriter.write( s );
|
||||
infoLogWriter.write( '\n' );
|
||||
infoLogWriter.flush();
|
||||
}
|
||||
catch( IOException io )
|
||||
{
|
||||
|
@ -119,6 +122,14 @@ public class RoutingEngine extends Thread
|
|||
}
|
||||
}
|
||||
|
||||
private void logThrowable( Throwable t )
|
||||
{
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
t.printStackTrace(pw);
|
||||
logInfo( sw.toString() );
|
||||
}
|
||||
|
||||
public void run()
|
||||
{
|
||||
doRun( 0 );
|
||||
|
@ -135,6 +146,9 @@ public class RoutingEngine extends Thread
|
|||
logInfo( "start request at " + new Date() );
|
||||
}
|
||||
|
||||
// delete nogos with waypoints in them
|
||||
routingContext.cleanNogolist( waypoints );
|
||||
|
||||
startTime = System.currentTimeMillis();
|
||||
this.maxRunningTime = maxRunningTime;
|
||||
int nsections = waypoints.size() - 1;
|
||||
|
@ -205,14 +219,14 @@ public class RoutingEngine extends Thread
|
|||
{
|
||||
errorMessage = e instanceof IllegalArgumentException ? e.getMessage() : e.toString();
|
||||
logInfo( "Exception (linksProcessed=" + linksProcessed + ": " + errorMessage );
|
||||
e.printStackTrace();
|
||||
logThrowable( e );
|
||||
}
|
||||
catch( Error e)
|
||||
{
|
||||
String hint = cleanOnOOM();
|
||||
errorMessage = e.toString() + hint;
|
||||
logInfo( "Error (linksProcessed=" + linksProcessed + ": " + errorMessage );
|
||||
e.printStackTrace();
|
||||
logThrowable( e );
|
||||
}
|
||||
finally
|
||||
{
|
||||
|
@ -250,14 +264,14 @@ public class RoutingEngine extends Thread
|
|||
{
|
||||
errorMessage = e instanceof IllegalArgumentException ? e.getMessage() : e.toString();
|
||||
logInfo( "Exception (linksProcessed=" + linksProcessed + ": " + errorMessage );
|
||||
e.printStackTrace();
|
||||
logThrowable( e );
|
||||
}
|
||||
catch( Error e)
|
||||
{
|
||||
String hint = cleanOnOOM();
|
||||
errorMessage = e.toString() + hint;
|
||||
logInfo( "Error (linksProcessed=" + linksProcessed + ": " + errorMessage );
|
||||
e.printStackTrace();
|
||||
logThrowable( e );
|
||||
}
|
||||
finally
|
||||
{
|
||||
|
@ -290,7 +304,7 @@ public class RoutingEngine extends Thread
|
|||
private OsmTrack findTrack( OsmTrack[] refTracks, OsmTrack[] lastTracks )
|
||||
{
|
||||
OsmTrack totaltrack = new OsmTrack();
|
||||
MatchedWaypoint[] wayointIds = new MatchedWaypoint[waypoints.size()];
|
||||
int nUnmatched = waypoints.size();
|
||||
|
||||
// check for a track for that target
|
||||
OsmTrack nearbyTrack = null;
|
||||
|
@ -299,20 +313,27 @@ public class RoutingEngine extends Thread
|
|||
nearbyTrack = OsmTrack.readBinary( routingContext.rawTrackPath, waypoints.get( waypoints.size()-1), routingContext.getNogoChecksums() );
|
||||
if ( nearbyTrack != null )
|
||||
{
|
||||
wayointIds[waypoints.size()-1] = nearbyTrack.endPoint;
|
||||
nUnmatched--;
|
||||
}
|
||||
}
|
||||
|
||||
// match waypoints to nodes
|
||||
for( int i=0; i<waypoints.size(); i++ )
|
||||
if ( matchedWaypoints == null ) // could exist from the previous alternative level
|
||||
{
|
||||
if ( wayointIds[i] == null )
|
||||
matchedWaypoints = new ArrayList<MatchedWaypoint>();
|
||||
for( int i=0; i<nUnmatched; i++ )
|
||||
{
|
||||
wayointIds[i] = matchNodeForPosition( waypoints.get(i) );
|
||||
MatchedWaypoint mwp = new MatchedWaypoint();
|
||||
mwp.waypoint = waypoints.get(i);
|
||||
matchedWaypoints.add( mwp );
|
||||
}
|
||||
matchWaypointsToNodes( matchedWaypoints );
|
||||
if ( nearbyTrack != null )
|
||||
{
|
||||
matchedWaypoints.add( nearbyTrack.endPoint );
|
||||
}
|
||||
}
|
||||
|
||||
for( int i=0; i<waypoints.size() -1; i++ )
|
||||
for( int i=0; i<matchedWaypoints.size() -1; i++ )
|
||||
{
|
||||
if ( lastTracks[i] != null )
|
||||
{
|
||||
|
@ -320,7 +341,7 @@ public class RoutingEngine extends Thread
|
|||
refTracks[i].addNodes( lastTracks[i] );
|
||||
}
|
||||
|
||||
OsmTrack seg = searchTrack( wayointIds[i], wayointIds[i+1], i == waypoints.size()-2 ? nearbyTrack : null, refTracks[i] );
|
||||
OsmTrack seg = searchTrack( matchedWaypoints.get(i), matchedWaypoints.get(i+1), i == matchedWaypoints.size()-2 ? nearbyTrack : null, refTracks[i] );
|
||||
if ( seg == null ) return null;
|
||||
totaltrack.appendTrack( seg );
|
||||
lastTracks[i] = seg;
|
||||
|
@ -328,6 +349,50 @@ public class RoutingEngine extends Thread
|
|||
return totaltrack;
|
||||
}
|
||||
|
||||
// geometric position matching finding the nearest routable way-section
|
||||
private void matchWaypointsToNodes( List<MatchedWaypoint> unmatchedWaypoints )
|
||||
{
|
||||
resetCache();
|
||||
nodesCache.waypointMatcher = new WaypointMatcherImpl( unmatchedWaypoints, 250. );
|
||||
for( MatchedWaypoint mwp : unmatchedWaypoints )
|
||||
{
|
||||
preloadPosition( mwp.waypoint );
|
||||
}
|
||||
|
||||
// preliminary-hack: use old stuff if not yet matched
|
||||
for( int i=0; i<unmatchedWaypoints.size(); i++)
|
||||
{
|
||||
MatchedWaypoint mwp = unmatchedWaypoints.get(i);
|
||||
if ( mwp.crosspoint == null )
|
||||
{
|
||||
System.out.println( "name=" + mwp.waypoint.name + " NOT matched r=" + mwp.radius * 111894. );
|
||||
unmatchedWaypoints.set(i, matchNodeForPosition( mwp.waypoint ) );
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println( "name=" + mwp.waypoint.name + " matched r=" + mwp.radius * 111894. );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void preloadPosition( OsmNode n )
|
||||
{
|
||||
int d = 12500;
|
||||
nodesCache.first_file_access_failed = false;
|
||||
nodesCache.first_file_access_name = null;
|
||||
nodesCache.loadSegmentFor( n.ilon, n.ilat );
|
||||
if ( nodesCache.first_file_access_failed )
|
||||
{
|
||||
throw new IllegalArgumentException( "datafile " + nodesCache.first_file_access_name + " not found" );
|
||||
}
|
||||
for( int idxLat=-1; idxLat<=1; idxLat++ )
|
||||
for( int idxLon=-1; idxLon<=1; idxLon++ )
|
||||
{
|
||||
nodesCache.loadSegmentFor( n.ilon + d*idxLon , n.ilat +d*idxLat );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// geometric position matching finding the nearest routable way-section
|
||||
private MatchedWaypoint matchNodeForPosition( OsmNodeNamed wp )
|
||||
{
|
||||
|
@ -406,7 +471,6 @@ public class RoutingEngine extends Thread
|
|||
mwp.node1 = n;
|
||||
mwp.node2 = nextNode;
|
||||
mwp.radius = wp.radius;
|
||||
mwp.cost = testPath.cost;
|
||||
mwp.crosspoint = new OsmNodeNamed();
|
||||
mwp.crosspoint.ilon = routingContext.ilonshortest;
|
||||
mwp.crosspoint.ilat = routingContext.ilatshortest;
|
||||
|
@ -510,8 +574,7 @@ public class RoutingEngine extends Thread
|
|||
private void resetCache()
|
||||
{
|
||||
nodesMap = new OsmNodesMap();
|
||||
BExpressionContext ctx = routingContext.expctxWay;
|
||||
nodesCache = new NodesCache(segmentDir, nodesMap, ctx.meta.lookupVersion, ctx.meta.lookupMinorVersion, routingContext.carMode, routingContext.forceSecondaryData, nodesCache );
|
||||
nodesCache = new NodesCache(segmentDir, nodesMap, routingContext.expctxWay, routingContext.carMode, routingContext.forceSecondaryData, nodesCache );
|
||||
}
|
||||
|
||||
private OsmNode getStartNode( long startId )
|
||||
|
|
|
@ -0,0 +1,140 @@
|
|||
package btools.router;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import btools.codec.WaypointMatcher;
|
||||
import btools.mapaccess.OsmNode;
|
||||
|
||||
/**
|
||||
* the WaypointMatcher is feeded by the decoder with geoemtries of ways that are
|
||||
* already check for allowed access according to the current routing profile
|
||||
*
|
||||
* It matches these geometries against the list of waypoints to find the best
|
||||
* match for each waypoint
|
||||
*/
|
||||
public final class WaypointMatcherImpl implements WaypointMatcher
|
||||
{
|
||||
private List<MatchedWaypoint> waypoints;
|
||||
|
||||
private int lonStart;
|
||||
private int latStart;
|
||||
private boolean anyUpdate;
|
||||
private int lonLast;
|
||||
private int latLast;
|
||||
|
||||
public WaypointMatcherImpl( List<MatchedWaypoint> waypoints, double maxDistance )
|
||||
{
|
||||
this.waypoints = waypoints;
|
||||
for ( MatchedWaypoint mwp : waypoints )
|
||||
{
|
||||
mwp.radius = maxDistance / 111894.; // 6378000. / 57.;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkSegment( int lon1, int lat1, int lon2, int lat2 )
|
||||
{
|
||||
// todo: bounding-box pre-filter
|
||||
|
||||
double l = ( lat2 - 90000000 ) * 0.00000001234134;
|
||||
double l2 = l * l;
|
||||
double l4 = l2 * l2;
|
||||
double coslat = 1. - l2 + l4 / 6.;
|
||||
double coslat6 = coslat * 0.000001;
|
||||
|
||||
double dx = ( lon2 - lon1 ) * coslat6;
|
||||
double dy = ( lat2 - lat1 ) * 0.000001;
|
||||
double d = Math.sqrt( dy * dy + dx * dx );
|
||||
if ( d == 0. )
|
||||
return;
|
||||
|
||||
for ( MatchedWaypoint mwp : waypoints )
|
||||
{
|
||||
OsmNodeNamed wp = mwp.waypoint;
|
||||
|
||||
double x1 = ( lon1 - wp.ilon ) * coslat6;
|
||||
double y1 = ( lat1 - wp.ilat ) * 0.000001;
|
||||
double x2 = ( lon2 - wp.ilon ) * coslat6;
|
||||
double y2 = ( lat2 - wp.ilat ) * 0.000001;
|
||||
double r12 = x1 * x1 + y1 * y1;
|
||||
double r22 = x2 * x2 + y2 * y2;
|
||||
double radius = Math.abs( r12 < r22 ? y1 * dx - x1 * dy : y2 * dx - x2 * dy ) / d;
|
||||
|
||||
if ( radius < mwp.radius )
|
||||
{
|
||||
double s1 = x1 * dx + y1 * dy;
|
||||
double s2 = x2 * dx + y2 * dy;
|
||||
|
||||
if ( s1 < 0. )
|
||||
{
|
||||
s1 = -s1;
|
||||
s2 = -s2;
|
||||
}
|
||||
if ( s2 > 0. )
|
||||
{
|
||||
radius = Math.sqrt( s1 < s2 ? r12 : r22 );
|
||||
if ( radius > mwp.radius )
|
||||
continue;
|
||||
}
|
||||
// new match for that waypoint
|
||||
mwp.radius = radius; // shortest distance to way
|
||||
mwp.hasUpdate = true;
|
||||
anyUpdate = true;
|
||||
// calculate crosspoint
|
||||
if ( mwp.crosspoint == null )
|
||||
mwp.crosspoint = new OsmNodeNamed();
|
||||
if ( s2 < 0. )
|
||||
{
|
||||
double wayfraction = -s2 / ( d * d );
|
||||
double xm = x2 - wayfraction * dx;
|
||||
double ym = y2 - wayfraction * dy;
|
||||
mwp.crosspoint.ilon = (int) ( xm / coslat6 + wp.ilon );
|
||||
mwp.crosspoint.ilat = (int) ( ym / 0.000001 + wp.ilat );
|
||||
}
|
||||
else if ( s1 > s2 )
|
||||
{
|
||||
mwp.crosspoint.ilon = lon2;
|
||||
mwp.crosspoint.ilat = lat2;
|
||||
}
|
||||
else
|
||||
{
|
||||
mwp.crosspoint.ilon = lon1;
|
||||
mwp.crosspoint.ilat = lat1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startNode( int ilon, int ilat )
|
||||
{
|
||||
lonLast = lonStart = ilon;
|
||||
latLast = latStart = ilat;
|
||||
anyUpdate = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferNode( int ilon, int ilat )
|
||||
{
|
||||
checkSegment( lonLast, latLast, ilon, ilat );
|
||||
lonLast = ilon;
|
||||
latLast = ilat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void endNode( int ilon, int ilat )
|
||||
{
|
||||
checkSegment( lonLast, latLast, ilon, ilat );
|
||||
if ( anyUpdate )
|
||||
{
|
||||
for ( MatchedWaypoint mwp : waypoints )
|
||||
{
|
||||
if ( mwp.hasUpdate )
|
||||
{
|
||||
mwp.hasUpdate = false;
|
||||
mwp.node1 = new OsmNode( lonStart, latStart );
|
||||
mwp.node2 = new OsmNode( ilon, ilat );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,6 +17,11 @@
|
|||
<artifactId>brouter-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-codec</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
|
|
@ -6,9 +6,11 @@
|
|||
|
||||
package btools.expressions;
|
||||
|
||||
import btools.codec.TagValueValidator;
|
||||
|
||||
|
||||
public final class BExpressionContextWay extends BExpressionContext
|
||||
|
||||
public final class BExpressionContextWay extends BExpressionContext implements TagValueValidator
|
||||
{
|
||||
private static String[] buildInVariables =
|
||||
{ "costfactor", "turncost", "uphillcostfactor", "downhillcostfactor", "initialcost", "nodeaccessgranted", "initialclassifier", "trafficsourcedensity", "istrafficbackbone" };
|
||||
|
@ -43,4 +45,15 @@ public final class BExpressionContextWay extends BExpressionContext
|
|||
{
|
||||
super( "way", hashSize, meta );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accessAllowed( byte[] description )
|
||||
{
|
||||
evaluate( false, description, null );
|
||||
boolean ok = getCostfactor() < 10000.;
|
||||
evaluate( true, description, null );
|
||||
ok |= getCostfactor() < 10000.;
|
||||
return ok;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,6 +17,11 @@
|
|||
<artifactId>brouter-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-codec</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-expressions</artifactId>
|
||||
|
|
|
@ -6,10 +6,13 @@
|
|||
package btools.mapcreator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import btools.util.ByteDataWriter;
|
||||
import btools.codec.MicroCache;
|
||||
import btools.codec.MicroCache1;
|
||||
import btools.codec.MicroCache2;
|
||||
|
||||
public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
||||
public class OsmNodeP extends OsmLinkP
|
||||
{
|
||||
public static final int SIGNLON_BITMASK = 0x80;
|
||||
public static final int SIGNLAT_BITMASK = 0x40;
|
||||
|
@ -28,7 +31,6 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
*/
|
||||
public int ilon;
|
||||
|
||||
|
||||
/**
|
||||
* The elevation
|
||||
*/
|
||||
|
@ -38,6 +40,8 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
public final static int NO_TUNNEL_BIT = 2;
|
||||
public final static int BORDER_BIT = 4;
|
||||
public final static int TRAFFIC_BIT = 8;
|
||||
public final static int ANY_WAY_BIT = 16;
|
||||
public final static int MULTI_WAY_BIT = 32;
|
||||
|
||||
public byte bits = 0;
|
||||
|
||||
|
@ -63,7 +67,6 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
return selev / 4.;
|
||||
}
|
||||
|
||||
|
||||
// populate and return the inherited link, if available,
|
||||
// else create a new one
|
||||
public OsmLinkP createLink( OsmNodeP source )
|
||||
|
@ -82,7 +85,6 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
return link;
|
||||
}
|
||||
|
||||
|
||||
// memory-squeezing-hack: OsmLinkP's "previous" also used as firstlink..
|
||||
|
||||
public void addLink( OsmLinkP link )
|
||||
|
@ -101,19 +103,14 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
return null;
|
||||
}
|
||||
|
||||
public void writeNodeData( ByteDataWriter os, byte[] abBuf ) throws IOException
|
||||
public void writeNodeData1( MicroCache1 mc ) throws IOException
|
||||
{
|
||||
int lonIdx = ilon/62500;
|
||||
int latIdx = ilat/62500;
|
||||
|
||||
// buffer the body to first calc size
|
||||
ByteDataWriter os2 = new ByteDataWriter( abBuf );
|
||||
os2.writeShort( getSElev() );
|
||||
mc.writeShort( getSElev() );
|
||||
|
||||
// hack: write node-desc as link tag (copy cycleway-bits)
|
||||
byte[] nodeDescription = getNodeDecsription();
|
||||
|
||||
for( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
|
||||
for ( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
|
||||
{
|
||||
int ilonref = ilon;
|
||||
int ilatref = ilat;
|
||||
|
@ -123,7 +120,7 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
int skipDetailBit = link0.descriptionBitmap == null ? SKIPDETAILS_BITMASK : 0;
|
||||
|
||||
// first pass just to see if that link is consistent
|
||||
while( link != null )
|
||||
while (link != null)
|
||||
{
|
||||
OsmNodeP target = link.getTarget( origin );
|
||||
if ( !target.isTransferNode() )
|
||||
|
@ -131,23 +128,26 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
break;
|
||||
}
|
||||
// next link is the one (of two), does does'nt point back
|
||||
for( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
|
||||
for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
|
||||
{
|
||||
if ( link.getTarget( target ) != origin ) break;
|
||||
if ( link.getTarget( target ) != origin )
|
||||
break;
|
||||
}
|
||||
origin = target;
|
||||
}
|
||||
if ( link == null ) continue; // dead end
|
||||
if ( link == null )
|
||||
continue; // dead end
|
||||
|
||||
if ( skipDetailBit == 0)
|
||||
if ( skipDetailBit == 0 )
|
||||
{
|
||||
link = link0;
|
||||
origin = this;
|
||||
}
|
||||
byte[] lastDescription = null;
|
||||
while( link != null )
|
||||
while (link != null)
|
||||
{
|
||||
if ( link.descriptionBitmap == null && skipDetailBit == 0 ) throw new IllegalArgumentException( "missing way description...");
|
||||
if ( link.descriptionBitmap == null && skipDetailBit == 0 )
|
||||
throw new IllegalArgumentException( "missing way description..." );
|
||||
|
||||
OsmNodeP target = link.getTarget( origin );
|
||||
int tranferbit = target.isTransferNode() ? TRANSFERNODE_BITMASK : 0;
|
||||
|
@ -164,19 +164,25 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
boolean equalsCurrent = abLen == lastLen;
|
||||
if ( equalsCurrent )
|
||||
{
|
||||
for( int i=0; i<abLen; i++ )
|
||||
for ( int i = 0; i < abLen; i++ )
|
||||
{
|
||||
byte b = ab[i];
|
||||
if ( i == inverseBitByteIndex && inverseDirection ) b ^= 1;
|
||||
if ( b != lastDescription[i] ) { equalsCurrent = false; break; }
|
||||
if ( i == inverseBitByteIndex && inverseDirection )
|
||||
b ^= 1;
|
||||
if ( b != lastDescription[i] )
|
||||
{
|
||||
equalsCurrent = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( !equalsCurrent )
|
||||
{
|
||||
writedescbit = WRITEDESC_BITMASK;
|
||||
lastDescription = new byte[abLen];
|
||||
System.arraycopy( ab, 0, lastDescription, 0 , abLen );
|
||||
if ( inverseDirection ) lastDescription[inverseBitByteIndex] ^= 1;
|
||||
System.arraycopy( ab, 0, lastDescription, 0, abLen );
|
||||
if ( inverseDirection )
|
||||
lastDescription[inverseBitByteIndex] ^= 1;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -186,102 +192,237 @@ public class OsmNodeP extends OsmLinkP implements Comparable<OsmNodeP>
|
|||
int dlat = target.ilat - ilatref;
|
||||
ilonref = target.ilon;
|
||||
ilatref = target.ilat;
|
||||
if ( dlon < 0 ) { bm |= SIGNLON_BITMASK; dlon = - dlon; }
|
||||
if ( dlat < 0 ) { bm |= SIGNLAT_BITMASK; dlat = - dlat; }
|
||||
os2.writeByte( bm );
|
||||
if ( dlon < 0 )
|
||||
{
|
||||
bm |= SIGNLON_BITMASK;
|
||||
dlon = -dlon;
|
||||
}
|
||||
if ( dlat < 0 )
|
||||
{
|
||||
bm |= SIGNLAT_BITMASK;
|
||||
dlat = -dlat;
|
||||
}
|
||||
mc.writeByte( bm );
|
||||
|
||||
int blon = os2.writeVarLengthUnsigned( dlon );
|
||||
int blat = os2.writeVarLengthUnsigned( dlat );
|
||||
mc.writeVarLengthUnsigned( dlon );
|
||||
mc.writeVarLengthUnsigned( dlat );
|
||||
|
||||
if ( writedescbit != 0 )
|
||||
{
|
||||
// write the way description, code direction into the first bit
|
||||
os2.writeByte( lastDescription.length );
|
||||
os2.write( lastDescription );
|
||||
mc.writeByte( lastDescription.length );
|
||||
mc.write( lastDescription );
|
||||
}
|
||||
if ( nodedescbit != 0 )
|
||||
{
|
||||
os2.writeByte( nodeDescription.length );
|
||||
os2.write( nodeDescription );
|
||||
mc.writeByte( nodeDescription.length );
|
||||
mc.write( nodeDescription );
|
||||
nodeDescription = null;
|
||||
}
|
||||
|
||||
link.descriptionBitmap = null; // mark link as written
|
||||
|
||||
if ( tranferbit == 0)
|
||||
if ( tranferbit == 0 )
|
||||
{
|
||||
break;
|
||||
}
|
||||
os2.writeVarLengthSigned( target.getSElev() -getSElev() );
|
||||
mc.writeVarLengthSigned( target.getSElev() - getSElev() );
|
||||
// next link is the one (of two), does does'nt point back
|
||||
for( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
|
||||
for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
|
||||
{
|
||||
if ( link.getTarget( target ) != origin ) break;
|
||||
if ( link.getTarget( target ) != origin )
|
||||
break;
|
||||
}
|
||||
if ( link == null ) throw new RuntimeException( "follow-up link not found for transfer-node!" );
|
||||
if ( link == null )
|
||||
throw new RuntimeException( "follow-up link not found for transfer-node!" );
|
||||
origin = target;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// calculate the body size
|
||||
int bodySize = os2.size();
|
||||
public void writeNodeData( MicroCache mc ) throws IOException
|
||||
{
|
||||
boolean valid = true;
|
||||
if ( mc instanceof MicroCache1 )
|
||||
{
|
||||
writeNodeData1( (MicroCache1) mc );
|
||||
}
|
||||
else if ( mc instanceof MicroCache2 )
|
||||
{
|
||||
valid = writeNodeData2( (MicroCache2) mc );
|
||||
}
|
||||
else
|
||||
throw new IllegalArgumentException( "unknown cache version: " + mc.getClass() );
|
||||
if ( valid )
|
||||
{
|
||||
mc.finishNode( getIdFromPos() );
|
||||
}
|
||||
else
|
||||
{
|
||||
mc.discardNode();
|
||||
}
|
||||
}
|
||||
|
||||
os.ensureCapacity( bodySize + 8 );
|
||||
public boolean writeNodeData2( MicroCache2 mc ) throws IOException
|
||||
{
|
||||
boolean hasLinks = false;
|
||||
mc.writeShort( getSElev() );
|
||||
mc.writeVarBytes( getNodeDecsription() );
|
||||
|
||||
os.writeShort( (short)(ilon - lonIdx*62500 - 31250) );
|
||||
os.writeShort( (short)(ilat - latIdx*62500 - 31250) );
|
||||
// buffer internal reverse links
|
||||
ArrayList<OsmNodeP> internalReverse = new ArrayList<OsmNodeP>();
|
||||
|
||||
os.writeVarLengthUnsigned( bodySize );
|
||||
os.write( abBuf, 0, bodySize );
|
||||
for ( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
|
||||
{
|
||||
OsmLinkP link = link0;
|
||||
OsmNodeP origin = this;
|
||||
OsmNodeP target = null;
|
||||
|
||||
// first pass just to see if that link is consistent
|
||||
while (link != null)
|
||||
{
|
||||
target = link.getTarget( origin );
|
||||
if ( !target.isTransferNode() )
|
||||
{
|
||||
break;
|
||||
}
|
||||
// next link is the one (of two), does does'nt point back
|
||||
for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
|
||||
{
|
||||
if ( link.getTarget( target ) != origin )
|
||||
break;
|
||||
}
|
||||
|
||||
if ( link != null && link.descriptionBitmap != link0.descriptionBitmap )
|
||||
{
|
||||
throw new IllegalArgumentException( "assertion failed: description change along transfer nodes" );
|
||||
}
|
||||
|
||||
origin = target;
|
||||
}
|
||||
if ( link == null )
|
||||
continue; // dead end
|
||||
if ( target == this )
|
||||
continue; // self-ref
|
||||
hasLinks = true;
|
||||
|
||||
// internal reverse links later
|
||||
boolean isReverse = link0.isReverse( this );
|
||||
if ( isReverse )
|
||||
{
|
||||
if ( mc.isInternal( target.ilon, target.ilat ) )
|
||||
{
|
||||
internalReverse.add( target );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// write link data
|
||||
int sizeoffset = mc.writeSizePlaceHolder();
|
||||
mc.writeVarLengthSigned( target.ilon - ilon );
|
||||
mc.writeVarLengthSigned( target.ilat - ilat );
|
||||
mc.writeModeAndDesc( isReverse, link0.descriptionBitmap );
|
||||
if ( !isReverse ) // write geometry for forward links only
|
||||
{
|
||||
link = link0;
|
||||
origin = this;
|
||||
while (link != null)
|
||||
{
|
||||
OsmNodeP tranferNode = link.getTarget( origin );
|
||||
if ( !tranferNode.isTransferNode() )
|
||||
{
|
||||
break;
|
||||
}
|
||||
mc.writeVarLengthSigned( tranferNode.ilon - origin.ilon );
|
||||
mc.writeVarLengthSigned( tranferNode.ilat - origin.ilat );
|
||||
mc.writeVarLengthSigned( tranferNode.getSElev() - origin.getSElev() );
|
||||
|
||||
// next link is the one (of two), does does'nt point back
|
||||
for ( link = tranferNode.getFirstLink(); link != null; link = link.getNext( tranferNode ) )
|
||||
{
|
||||
if ( link.getTarget( tranferNode ) != origin )
|
||||
break;
|
||||
}
|
||||
if ( link == null )
|
||||
throw new RuntimeException( "follow-up link not found for transfer-node!" );
|
||||
origin = tranferNode;
|
||||
}
|
||||
}
|
||||
mc.injectSize( sizeoffset );
|
||||
}
|
||||
|
||||
while (internalReverse.size() > 0)
|
||||
{
|
||||
int nextIdx = 0;
|
||||
if ( internalReverse.size() > 1 )
|
||||
{
|
||||
int max32 = Integer.MIN_VALUE;
|
||||
for ( int i = 0; i < internalReverse.size(); i++ )
|
||||
{
|
||||
int id32 = mc.shrinkId( internalReverse.get( i ).getIdFromPos() );
|
||||
if ( id32 > max32 )
|
||||
{
|
||||
max32 = id32;
|
||||
nextIdx = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
OsmNodeP target = internalReverse.remove( nextIdx );
|
||||
int sizeoffset = mc.writeSizePlaceHolder();
|
||||
mc.writeVarLengthSigned( target.ilon - ilon );
|
||||
mc.writeVarLengthSigned( target.ilat - ilat );
|
||||
mc.writeModeAndDesc( true, null );
|
||||
mc.injectSize( sizeoffset );
|
||||
}
|
||||
return hasLinks;
|
||||
}
|
||||
|
||||
public String toString2()
|
||||
{
|
||||
return (ilon-180000000) + "_" + (ilat-90000000) + "_" + (selev/4);
|
||||
return ( ilon - 180000000 ) + "_" + ( ilat - 90000000 ) + "_" + ( selev / 4 );
|
||||
}
|
||||
|
||||
public long getIdFromPos()
|
||||
{
|
||||
return ((long)ilon)<<32 | ilat;
|
||||
return ( (long) ilon ) << 32 | ilat;
|
||||
}
|
||||
|
||||
public boolean isBorderNode()
|
||||
{
|
||||
return (bits & BORDER_BIT) != 0;
|
||||
return ( bits & BORDER_BIT ) != 0;
|
||||
}
|
||||
|
||||
public boolean hasTraffic()
|
||||
{
|
||||
return (bits & TRAFFIC_BIT) != 0;
|
||||
return ( bits & TRAFFIC_BIT ) != 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Not really count the ways, just detect if more than one
|
||||
*/
|
||||
public void incWayCount()
|
||||
{
|
||||
if ( ( bits & ANY_WAY_BIT ) != 0 )
|
||||
{
|
||||
bits |= MULTI_WAY_BIT;
|
||||
}
|
||||
bits |= ANY_WAY_BIT;
|
||||
}
|
||||
|
||||
public boolean isTransferNode()
|
||||
{
|
||||
return (bits & BORDER_BIT) == 0 && _linkCnt() == 2;
|
||||
return ( bits & BORDER_BIT ) == 0 && ( bits & MULTI_WAY_BIT ) == 0 && _linkCnt() == 2;
|
||||
}
|
||||
|
||||
private int _linkCnt()
|
||||
{
|
||||
int cnt = 0;
|
||||
|
||||
for( OsmLinkP link = getFirstLink(); link != null; link = link.getNext( this ) )
|
||||
for ( OsmLinkP link = getFirstLink(); link != null; link = link.getNext( this ) )
|
||||
{
|
||||
cnt++;
|
||||
}
|
||||
return cnt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two OsmNodes for position ordering.
|
||||
*
|
||||
* @return -1,0,1 depending an comparson result
|
||||
*/
|
||||
public int compareTo( OsmNodeP n )
|
||||
{
|
||||
long id1 = getIdFromPos();
|
||||
long id2 = n.getIdFromPos();
|
||||
if ( id1 < id2 ) return -1;
|
||||
if ( id1 > id2 ) return 1;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,20 @@
|
|||
package btools.mapcreator;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.EOFException;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import btools.expressions.BExpressionContextNode;
|
||||
import btools.codec.DataBuffers;
|
||||
import btools.codec.MicroCache;
|
||||
import btools.codec.MicroCache1;
|
||||
import btools.codec.MicroCache2;
|
||||
import btools.codec.StatCoderContext;
|
||||
import btools.expressions.BExpressionContextWay;
|
||||
import btools.expressions.BExpressionMetaData;
|
||||
import btools.util.ByteArrayUnifier;
|
||||
import btools.util.ByteDataWriter;
|
||||
import btools.util.CompactLongMap;
|
||||
import btools.util.CompactLongSet;
|
||||
import btools.util.Crc32;
|
||||
|
@ -24,11 +23,9 @@ import btools.util.FrozenLongSet;
|
|||
import btools.util.LazyArrayOfLists;
|
||||
|
||||
/**
|
||||
* WayLinker finally puts the pieces together
|
||||
* to create the rd5 files. For each 5*5 tile,
|
||||
* the corresponding nodefile and wayfile is read,
|
||||
* plus the (global) bordernodes file, and an rd5
|
||||
* is written
|
||||
* WayLinker finally puts the pieces together to create the rd5 files. For each
|
||||
* 5*5 tile, the corresponding nodefile and wayfile is read, plus the (global)
|
||||
* bordernodes file, and an rd5 is written
|
||||
*
|
||||
* @author ab
|
||||
*/
|
||||
|
@ -53,13 +50,16 @@ public class WayLinker extends MapCreatorBase
|
|||
private long creationTimeStamp;
|
||||
|
||||
private BExpressionContextWay expctxWay;
|
||||
private BExpressionContextNode expctxNode;
|
||||
|
||||
private ByteArrayUnifier abUnifier;
|
||||
|
||||
private int minLon;
|
||||
private int minLat;
|
||||
|
||||
private int microCacheEncoding = 2;
|
||||
private int divisor = microCacheEncoding == 2 ? 32 : 80;
|
||||
private int cellsize = 1000000 / divisor;
|
||||
|
||||
private void reset()
|
||||
{
|
||||
minLon = -1;
|
||||
|
@ -68,18 +68,21 @@ public class WayLinker extends MapCreatorBase
|
|||
borderSet = new CompactLongSet();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception
|
||||
public static void main( String[] args ) throws Exception
|
||||
{
|
||||
System.out.println("*** WayLinker: Format a region of an OSM map for routing");
|
||||
if (args.length != 7)
|
||||
System.out.println( "*** WayLinker: Format a region of an OSM map for routing" );
|
||||
if ( args.length != 7 )
|
||||
{
|
||||
System.out.println("usage: java WayLinker <node-tiles-in> <way-tiles-in> <bordernodes> <lookup-file> <profile-file> <data-tiles-out> <data-tiles-suffix> ");
|
||||
System.out
|
||||
.println( "usage: java WayLinker <node-tiles-in> <way-tiles-in> <bordernodes> <lookup-file> <profile-file> <data-tiles-out> <data-tiles-suffix> " );
|
||||
return;
|
||||
}
|
||||
new WayLinker().process( new File( args[0] ), new File( args[1] ), new File( args[2] ), new File( args[3] ), new File( args[4] ), new File( args[5] ), args[6] );
|
||||
new WayLinker().process( new File( args[0] ), new File( args[1] ), new File( args[2] ), new File( args[3] ), new File( args[4] ), new File(
|
||||
args[5] ), args[6] );
|
||||
}
|
||||
|
||||
public void process( File nodeTilesIn, File wayTilesIn, File borderFileIn, File lookupFile, File profileFile, File dataTilesOut, String dataTilesSuffix ) throws Exception
|
||||
public void process( File nodeTilesIn, File wayTilesIn, File borderFileIn, File lookupFile, File profileFile, File dataTilesOut,
|
||||
String dataTilesSuffix ) throws Exception
|
||||
{
|
||||
this.nodeTilesIn = nodeTilesIn;
|
||||
this.trafficTilesIn = new File( "traffic" );
|
||||
|
@ -90,15 +93,13 @@ public class WayLinker extends MapCreatorBase
|
|||
BExpressionMetaData meta = new BExpressionMetaData();
|
||||
|
||||
// read lookup + profile for lookup-version + access-filter
|
||||
expctxWay = new BExpressionContextWay( meta);
|
||||
expctxNode = new BExpressionContextNode( meta);
|
||||
expctxWay = new BExpressionContextWay( meta );
|
||||
meta.readMetaData( lookupFile );
|
||||
|
||||
lookupVersion = meta.lookupVersion;
|
||||
lookupMinorVersion = meta.lookupMinorVersion;
|
||||
|
||||
expctxWay.parseFile( profileFile, "global" );
|
||||
expctxNode.parseFile( profileFile, "global" );
|
||||
|
||||
creationTimeStamp = System.currentTimeMillis();
|
||||
|
||||
|
@ -144,12 +145,12 @@ public class WayLinker extends MapCreatorBase
|
|||
@Override
|
||||
public void nextNode( NodeData data ) throws Exception
|
||||
{
|
||||
OsmNodeP n = data.description == null ? new OsmNodeP() : new OsmNodePT(data.description);
|
||||
OsmNodeP n = data.description == null ? new OsmNodeP() : new OsmNodePT( data.description );
|
||||
n.ilon = data.ilon;
|
||||
n.ilat = data.ilat;
|
||||
n.selev = data.selev;
|
||||
|
||||
if ( readingBorder || (!borderSet.contains( data.nid )) )
|
||||
if ( readingBorder || ( !borderSet.contains( data.nid ) ) )
|
||||
{
|
||||
nodesMap.fastPut( data.nid, n );
|
||||
}
|
||||
|
@ -162,10 +163,12 @@ public class WayLinker extends MapCreatorBase
|
|||
}
|
||||
|
||||
// remember the segment coords
|
||||
int min_lon = (n.ilon / 5000000 ) * 5000000;
|
||||
int min_lat = (n.ilat / 5000000 ) * 5000000;
|
||||
if ( minLon == -1 ) minLon = min_lon;
|
||||
if ( minLat == -1 ) minLat = min_lat;
|
||||
int min_lon = ( n.ilon / 5000000 ) * 5000000;
|
||||
int min_lat = ( n.ilat / 5000000 ) * 5000000;
|
||||
if ( minLon == -1 )
|
||||
minLon = min_lon;
|
||||
if ( minLat == -1 )
|
||||
minLat = min_lat;
|
||||
if ( minLat != min_lat || minLon != min_lon )
|
||||
throw new IllegalArgumentException( "inconsistent node: " + n.ilon + " " + n.ilat );
|
||||
}
|
||||
|
@ -181,20 +184,24 @@ public class WayLinker extends MapCreatorBase
|
|||
boolean ok = expctxWay.getCostfactor() < 10000.;
|
||||
expctxWay.evaluate( true, description, null );
|
||||
ok |= expctxWay.getCostfactor() < 10000.;
|
||||
if ( !ok ) return;
|
||||
if ( !ok )
|
||||
return;
|
||||
|
||||
byte wayBits = 0;
|
||||
expctxWay.decode( description );
|
||||
if ( !expctxWay.getBooleanLookupValue( "bridge" ) ) wayBits |= OsmNodeP.NO_BRIDGE_BIT;
|
||||
if ( !expctxWay.getBooleanLookupValue( "tunnel" ) ) wayBits |= OsmNodeP.NO_TUNNEL_BIT;
|
||||
if ( !expctxWay.getBooleanLookupValue( "bridge" ) )
|
||||
wayBits |= OsmNodeP.NO_BRIDGE_BIT;
|
||||
if ( !expctxWay.getBooleanLookupValue( "tunnel" ) )
|
||||
wayBits |= OsmNodeP.NO_TUNNEL_BIT;
|
||||
|
||||
OsmNodeP n1 = null;
|
||||
OsmNodeP n2 = null;
|
||||
for (int i=0; i<way.nodes.size(); i++)
|
||||
for ( int i = 0; i < way.nodes.size(); i++ )
|
||||
{
|
||||
long nid = way.nodes.get(i);
|
||||
long nid = way.nodes.get( i );
|
||||
n1 = n2;
|
||||
n2 = nodesMap.get( nid );
|
||||
|
||||
if ( n1 != null && n2 != null && n1 != n2 )
|
||||
{
|
||||
OsmLinkP link = n2.createLink( n1 );
|
||||
|
@ -208,10 +215,17 @@ public class WayLinker extends MapCreatorBase
|
|||
lastTraffic = traffic;
|
||||
}
|
||||
link.descriptionBitmap = description;
|
||||
|
||||
if ( n1.ilon / cellsize != n2.ilon / cellsize || n1.ilat / cellsize != n2.ilat / cellsize )
|
||||
{
|
||||
n2.incWayCount(); // force first node after cell-change to be a
|
||||
// network node
|
||||
}
|
||||
}
|
||||
if ( n2 != null )
|
||||
{
|
||||
n2.bits |= wayBits;
|
||||
n2.incWayCount();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -219,33 +233,37 @@ public class WayLinker extends MapCreatorBase
|
|||
@Override
|
||||
public void wayFileEnd( File wayfile ) throws Exception
|
||||
{
|
||||
int ncaches = divisor * divisor;
|
||||
int indexsize = ncaches * 4;
|
||||
|
||||
nodesMap = null;
|
||||
borderSet = null;
|
||||
trafficMap = null;
|
||||
|
||||
byte[] abBuf = new byte[1024*1024];
|
||||
byte[] abBuf2 = new byte[10*1024*1024];
|
||||
byte[] abBuf1 = new byte[10 * 1024 * 1024];
|
||||
byte[] abBuf2 = new byte[10 * 1024 * 1024];
|
||||
|
||||
int maxLon = minLon + 5000000;
|
||||
int maxLat = minLat + 5000000;
|
||||
|
||||
// write segment data to individual files
|
||||
{
|
||||
int nLonSegs = (maxLon - minLon)/1000000;
|
||||
int nLatSegs = (maxLat - minLat)/1000000;
|
||||
int nLonSegs = ( maxLon - minLon ) / 1000000;
|
||||
int nLatSegs = ( maxLat - minLat ) / 1000000;
|
||||
|
||||
// sort the nodes into segments
|
||||
LazyArrayOfLists<OsmNodeP> seglists = new LazyArrayOfLists<OsmNodeP>(nLonSegs*nLatSegs);
|
||||
for( OsmNodeP n : nodesList )
|
||||
LazyArrayOfLists<OsmNodeP> seglists = new LazyArrayOfLists<OsmNodeP>( nLonSegs * nLatSegs );
|
||||
for ( OsmNodeP n : nodesList )
|
||||
{
|
||||
if ( n == null || n.getFirstLink() == null || n.isTransferNode() ) continue;
|
||||
if ( n.ilon < minLon || n.ilon >= maxLon
|
||||
|| n.ilat < minLat || n.ilat >= maxLat ) continue;
|
||||
int lonIdx = (n.ilon-minLon)/1000000;
|
||||
int latIdx = (n.ilat-minLat)/1000000;
|
||||
if ( n == null || n.getFirstLink() == null || n.isTransferNode() )
|
||||
continue;
|
||||
if ( n.ilon < minLon || n.ilon >= maxLon || n.ilat < minLat || n.ilat >= maxLat )
|
||||
continue;
|
||||
int lonIdx = ( n.ilon - minLon ) / 1000000;
|
||||
int latIdx = ( n.ilat - minLat ) / 1000000;
|
||||
|
||||
int tileIndex = lonIdx * nLatSegs + latIdx;
|
||||
seglists.getList(tileIndex).add( n );
|
||||
seglists.getList( tileIndex ).add( n );
|
||||
}
|
||||
nodesList = null;
|
||||
seglists.trimAll();
|
||||
|
@ -258,72 +276,111 @@ public class WayLinker extends MapCreatorBase
|
|||
int[] fileHeaderCrcs = new int[25];
|
||||
|
||||
// write 5*5 index dummy
|
||||
for( int i55=0; i55<25; i55++)
|
||||
for ( int i55 = 0; i55 < 25; i55++ )
|
||||
{
|
||||
os.writeLong( 0 );
|
||||
}
|
||||
long filepos = 200L;
|
||||
|
||||
// sort further in 1/80-degree squares
|
||||
for( int lonIdx = 0; lonIdx < nLonSegs; lonIdx++ )
|
||||
// sort further in 1/divisor-degree squares
|
||||
for ( int lonIdx = 0; lonIdx < nLonSegs; lonIdx++ )
|
||||
{
|
||||
for( int latIdx = 0; latIdx < nLatSegs; latIdx++ )
|
||||
for ( int latIdx = 0; latIdx < nLatSegs; latIdx++ )
|
||||
{
|
||||
int tileIndex = lonIdx * nLatSegs + latIdx;
|
||||
if ( seglists.getSize(tileIndex) > 0 )
|
||||
if ( seglists.getSize( tileIndex ) > 0 )
|
||||
{
|
||||
List<OsmNodeP> nlist = seglists.getList(tileIndex);
|
||||
List<OsmNodeP> nlist = seglists.getList( tileIndex );
|
||||
|
||||
LazyArrayOfLists<OsmNodeP> subs = new LazyArrayOfLists<OsmNodeP>(6400);
|
||||
byte[][] subByteArrays = new byte[6400][];
|
||||
for( int ni=0; ni<nlist.size(); ni++ )
|
||||
LazyArrayOfLists<OsmNodeP> subs = new LazyArrayOfLists<OsmNodeP>( ncaches );
|
||||
byte[][] subByteArrays = new byte[ncaches][];
|
||||
for ( int ni = 0; ni < nlist.size(); ni++ )
|
||||
{
|
||||
OsmNodeP n = nlist.get(ni);
|
||||
int subLonIdx = (n.ilon - minLon) / 12500 - 80*lonIdx;
|
||||
int subLatIdx = (n.ilat - minLat) / 12500 - 80*latIdx;
|
||||
int si = subLatIdx*80 + subLonIdx;
|
||||
subs.getList(si).add( n );
|
||||
OsmNodeP n = nlist.get( ni );
|
||||
int subLonIdx = ( n.ilon - minLon ) / cellsize - divisor * lonIdx;
|
||||
int subLatIdx = ( n.ilat - minLat ) / cellsize - divisor * latIdx;
|
||||
int si = subLatIdx * divisor + subLonIdx;
|
||||
subs.getList( si ).add( n );
|
||||
}
|
||||
subs.trimAll();
|
||||
int[] posIdx = new int[6400];
|
||||
int pos = 25600;
|
||||
for( int si=0; si<6400; si++)
|
||||
{
|
||||
List<OsmNodeP> subList = subs.getList(si);
|
||||
if ( subList.size() > 0 )
|
||||
{
|
||||
Collections.sort( subList );
|
||||
int[] posIdx = new int[ncaches];
|
||||
int pos = indexsize;
|
||||
|
||||
ByteDataWriter dos = new ByteDataWriter( abBuf2 );
|
||||
|
||||
dos.writeInt( subList.size() );
|
||||
for( int ni=0; ni<subList.size(); ni++ )
|
||||
for ( int si = 0; si < ncaches; si++ )
|
||||
{
|
||||
OsmNodeP n = subList.get(ni);
|
||||
n.writeNodeData( dos, abBuf );
|
||||
List<OsmNodeP> subList = subs.getList( si );
|
||||
int size = subList.size();
|
||||
if ( size > 0 )
|
||||
{
|
||||
OsmNodeP n0 = subList.get( 0 );
|
||||
int lonIdxDiv = n0.ilon / cellsize;
|
||||
int latIdxDiv = n0.ilat / cellsize;
|
||||
MicroCache mc = microCacheEncoding == 0 ? new MicroCache1( size, abBuf2, lonIdxDiv, latIdxDiv ) : new MicroCache2( size, abBuf2,
|
||||
lonIdxDiv, latIdxDiv, divisor );
|
||||
|
||||
// sort via treemap
|
||||
TreeMap<Integer, OsmNodeP> sortedList = new TreeMap<Integer, OsmNodeP>();
|
||||
for ( OsmNodeP n : subList )
|
||||
{
|
||||
long longId = n.getIdFromPos();
|
||||
int shrinkid = mc.shrinkId( longId );
|
||||
if ( mc.expandId( shrinkid ) != longId )
|
||||
{
|
||||
throw new IllegalArgumentException( "inconstistent shrinking: " + longId );
|
||||
}
|
||||
sortedList.put( Integer.valueOf( shrinkid ), n );
|
||||
}
|
||||
|
||||
for ( OsmNodeP n : sortedList.values() )
|
||||
{
|
||||
n.writeNodeData( mc );
|
||||
}
|
||||
if ( mc.getSize() > 0 )
|
||||
{
|
||||
byte[] subBytes;
|
||||
for ( ;; )
|
||||
{
|
||||
int len = mc.encodeMicroCache( abBuf1 );
|
||||
subBytes = new byte[len];
|
||||
System.arraycopy( abBuf1, 0, subBytes, 0, len );
|
||||
|
||||
// cross-check the encoding: re-instantiate the cache
|
||||
MicroCache mc2 = microCacheEncoding == 0 ? new MicroCache1( subBytes, lonIdxDiv, latIdxDiv ) : new MicroCache2( new DataBuffers(
|
||||
subBytes ), lonIdxDiv, latIdxDiv, divisor, null, null );
|
||||
// ..and check if still the same
|
||||
String diffMessage = mc.compareWith( mc2 );
|
||||
if ( diffMessage != null )
|
||||
{
|
||||
if ( MicroCache.debug )
|
||||
throw new RuntimeException( "encoding crosscheck failed: " + diffMessage );
|
||||
else
|
||||
MicroCache.debug = true;
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
byte[] subBytes = dos.toByteArray();
|
||||
pos += subBytes.length + 4; // reserve 4 bytes for crc
|
||||
subByteArrays[si] = subBytes;
|
||||
}
|
||||
}
|
||||
posIdx[si] = pos;
|
||||
}
|
||||
|
||||
byte[] abSubIndex = compileSubFileIndex( posIdx );
|
||||
fileHeaderCrcs[tileIndex] = Crc32.crc( abSubIndex, 0, abSubIndex.length );
|
||||
os.write( abSubIndex, 0, abSubIndex.length );
|
||||
for( int si=0; si<6400; si++)
|
||||
for ( int si = 0; si < ncaches; si++ )
|
||||
{
|
||||
byte[] ab = subByteArrays[si];
|
||||
if ( ab != null )
|
||||
{
|
||||
os.write( ab );
|
||||
os.writeInt( Crc32.crc( ab, 0 , ab.length ) );
|
||||
os.writeInt( Crc32.crc( ab, 0, ab.length ) ^ microCacheEncoding );
|
||||
}
|
||||
}
|
||||
filepos += pos;
|
||||
}
|
||||
fileIndex[ tileIndex ] = filepos;
|
||||
fileIndex[tileIndex] = filepos;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,8 +388,8 @@ public class WayLinker extends MapCreatorBase
|
|||
|
||||
// write extra data: timestamp + index-checksums
|
||||
os.writeLong( creationTimeStamp );
|
||||
os.writeInt( Crc32.crc( abFileIndex, 0, abFileIndex.length ) );
|
||||
for( int i55=0; i55<25; i55++)
|
||||
os.writeInt( Crc32.crc( abFileIndex, 0, abFileIndex.length ) ^ microCacheEncoding );
|
||||
for ( int i55 = 0; i55 < 25; i55++ )
|
||||
{
|
||||
os.writeInt( fileHeaderCrcs[i55] );
|
||||
}
|
||||
|
@ -344,13 +401,14 @@ public class WayLinker extends MapCreatorBase
|
|||
ra.write( abFileIndex, 0, abFileIndex.length );
|
||||
ra.close();
|
||||
}
|
||||
System.out.println( "**** codec stats: *******\n" + StatCoderContext.getBitReport() );
|
||||
}
|
||||
|
||||
private byte[] compileFileIndex( long[] fileIndex, short lookupVersion, short lookupMinorVersion ) throws Exception
|
||||
{
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream( );
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream( bos );
|
||||
for( int i55=0; i55<25; i55++)
|
||||
for ( int i55 = 0; i55 < 25; i55++ )
|
||||
{
|
||||
long versionPrefix = i55 == 1 ? lookupMinorVersion : lookupVersion;
|
||||
versionPrefix <<= 48;
|
||||
|
@ -362,9 +420,9 @@ public class WayLinker extends MapCreatorBase
|
|||
|
||||
private byte[] compileSubFileIndex( int[] posIdx ) throws Exception
|
||||
{
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream( );
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream( bos );
|
||||
for( int si=0; si<6400; si++)
|
||||
for ( int si = 0; si < posIdx.length; si++ )
|
||||
{
|
||||
dos.writeInt( posIdx[si] );
|
||||
}
|
||||
|
|
|
@ -17,5 +17,15 @@
|
|||
<artifactId>brouter-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-codec</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.btools</groupId>
|
||||
<artifactId>brouter-expressions</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -1,271 +0,0 @@
|
|||
/**
|
||||
* cache for a single square
|
||||
*
|
||||
* @author ab
|
||||
*/
|
||||
package btools.mapaccess;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import btools.util.ByteDataReader;
|
||||
import btools.util.Crc32;
|
||||
|
||||
final class MicroCache extends ByteDataReader
|
||||
{
|
||||
private int[] faid;
|
||||
private int[] fapos;
|
||||
private int size = 0;
|
||||
private int delcount = 0;
|
||||
private int delbytes = 0;
|
||||
private int p2size; // next power of 2 of size
|
||||
|
||||
// the object parsing position and length
|
||||
private int aboffsetEnd;
|
||||
|
||||
private int lonIdxBase;
|
||||
private int latIdxBase;
|
||||
|
||||
// cache control: a virgin cache can be
|
||||
// put to ghost state for later recovery
|
||||
boolean virgin = true;
|
||||
boolean ghost = false;
|
||||
|
||||
public MicroCache( OsmFile segfile, int lonIdx80, int latIdx80, byte[] iobuffer ) throws Exception
|
||||
{
|
||||
super( null );
|
||||
int lonDegree = lonIdx80/80;
|
||||
int latDegree = latIdx80/80;
|
||||
|
||||
lonIdxBase = (lonIdx80/5)*62500 + 31250;
|
||||
latIdxBase = (latIdx80/5)*62500 + 31250;
|
||||
|
||||
int subIdx = (latIdx80-80*latDegree)*80 + (lonIdx80-80*lonDegree);
|
||||
|
||||
{
|
||||
ab = iobuffer;
|
||||
int asize = segfile.getDataInputForSubIdx(subIdx, ab);
|
||||
|
||||
if ( asize == 0 )
|
||||
{
|
||||
ab = null;
|
||||
return;
|
||||
}
|
||||
if ( asize > iobuffer.length )
|
||||
{
|
||||
ab = new byte[asize];
|
||||
asize = segfile.getDataInputForSubIdx(subIdx, ab);
|
||||
}
|
||||
aboffset = 0;
|
||||
size = readInt();
|
||||
|
||||
// get net size
|
||||
int nbytes = 0;
|
||||
for(int i = 0; i<size; i++)
|
||||
{
|
||||
aboffset += 4;
|
||||
int bodySize = readVarLengthUnsigned();
|
||||
|
||||
aboffset += bodySize;
|
||||
nbytes += bodySize;
|
||||
}
|
||||
|
||||
int crc = Crc32.crc( ab, 0, aboffset );
|
||||
if ( crc != readInt() )
|
||||
{
|
||||
throw new IOException( "checkum error" );
|
||||
}
|
||||
|
||||
// new array with only net data
|
||||
byte[] nab = new byte[nbytes];
|
||||
aboffset = 4;
|
||||
int noffset = 0;
|
||||
faid = new int[size];
|
||||
fapos = new int[size];
|
||||
p2size = 0x40000000;
|
||||
while( p2size > size ) p2size >>= 1;
|
||||
|
||||
for(int i = 0; i<size; i++)
|
||||
{
|
||||
faid[i] = readInt() ^ 0x8000; // flip lat-sign for correct ordering
|
||||
|
||||
int bodySize = readVarLengthUnsigned();
|
||||
fapos[i] = noffset;
|
||||
System.arraycopy( ab, aboffset, nab, noffset, bodySize );
|
||||
aboffset += bodySize;
|
||||
noffset += bodySize;
|
||||
}
|
||||
|
||||
ab = nab;
|
||||
}
|
||||
}
|
||||
|
||||
public int getSize()
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
public int getDataSize()
|
||||
{
|
||||
return ab == null ? 0 : ab.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the internal reader (aboffset, ablength)
|
||||
* to the body data for the given id
|
||||
*
|
||||
* @return true if id was found
|
||||
*
|
||||
* Throws an exception if that id was already requested
|
||||
* as an early detector for identity problems
|
||||
*/
|
||||
private boolean getAndClear( long id64 )
|
||||
{
|
||||
if ( size == 0 )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
int id = shrinkId( id64 );
|
||||
int[] a = faid;
|
||||
int offset = p2size;
|
||||
int n = 0;
|
||||
|
||||
|
||||
while ( offset> 0 )
|
||||
{
|
||||
int nn = n + offset;
|
||||
if ( nn < size && a[nn] <= id )
|
||||
{
|
||||
n = nn;
|
||||
}
|
||||
offset >>= 1;
|
||||
}
|
||||
if ( a[n] == id )
|
||||
{
|
||||
if ( ( fapos[n] & 0x80000000 ) == 0 )
|
||||
{
|
||||
aboffset = fapos[n];
|
||||
int ablength = ( n+1 < size ? fapos[n+1] & 0x7fffffff : ab.length ) - aboffset;
|
||||
aboffsetEnd = aboffset + ablength;
|
||||
fapos[n] |= 0x80000000; // mark deleted
|
||||
delbytes+= ablength;
|
||||
delcount++;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new RuntimeException( "MicroCache: node already consumed: id=" + id );
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fill a hollow node with it's body data
|
||||
*/
|
||||
public void fillNode( OsmNode node, OsmNodesMap nodesMap, DistanceChecker dc, boolean doCollect )
|
||||
{
|
||||
long id = node.getIdFromPos();
|
||||
if ( getAndClear( id ) )
|
||||
{
|
||||
node.parseNodeBody( this, nodesMap, dc );
|
||||
}
|
||||
|
||||
if ( doCollect && delcount > size / 2 ) // garbage collection
|
||||
{
|
||||
collect();
|
||||
}
|
||||
}
|
||||
|
||||
void collect()
|
||||
{
|
||||
if ( delcount > 0 )
|
||||
{
|
||||
virgin = false;
|
||||
|
||||
int nsize = size - delcount;
|
||||
if ( nsize == 0 )
|
||||
{
|
||||
faid = null;
|
||||
fapos = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
int[] nfaid = new int[nsize];
|
||||
int[] nfapos = new int[nsize];
|
||||
int idx = 0;
|
||||
|
||||
byte[] nab = new byte[ab.length - delbytes];
|
||||
int nab_off = 0;
|
||||
for( int i=0; i<size; i++ )
|
||||
{
|
||||
int pos = fapos[i];
|
||||
if ( ( pos & 0x80000000 ) == 0 )
|
||||
{
|
||||
int ablength = ( i+1 < size ? fapos[i+1] & 0x7fffffff : ab.length ) - pos;
|
||||
System.arraycopy( ab, pos, nab, nab_off, ablength );
|
||||
nfaid[idx] = faid[i];
|
||||
nfapos[idx] = nab_off;
|
||||
nab_off += ablength;
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
faid = nfaid;
|
||||
fapos = nfapos;
|
||||
ab = nab;
|
||||
}
|
||||
size = nsize;
|
||||
delcount = 0;
|
||||
delbytes = 0;
|
||||
p2size = 0x40000000;
|
||||
while( p2size > size ) p2size >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
void unGhost()
|
||||
{
|
||||
ghost = false;
|
||||
delcount = 0;
|
||||
delbytes = 0;
|
||||
for( int i=0; i<size; i++ )
|
||||
{
|
||||
fapos[i] &= 0x7fffffff; // clear deleted flags
|
||||
}
|
||||
}
|
||||
|
||||
public List<OsmNode> getPositions( OsmNodesMap nodesMap )
|
||||
{
|
||||
ArrayList<OsmNode> positions = new ArrayList<OsmNode>();
|
||||
|
||||
for( int i=0; i<size; i++ )
|
||||
{
|
||||
int id32 = faid[i];
|
||||
long id64 = expandId( id32 );
|
||||
OsmNode n = new OsmNode( id64 );
|
||||
n.setHollow();
|
||||
nodesMap.put( n );
|
||||
positions.add( n );
|
||||
}
|
||||
return positions;
|
||||
}
|
||||
|
||||
private long expandId( int id32 )
|
||||
{
|
||||
int lon32 = lonIdxBase + (short)(id32 >> 16);
|
||||
int lat32 = latIdxBase + (short)((id32 & 0xffff) ^ 0x8000);
|
||||
return ((long)lon32)<<32 | lat32;
|
||||
}
|
||||
|
||||
private int shrinkId( long id64 )
|
||||
{
|
||||
int lon32 = (int)(id64 >> 32);
|
||||
int lat32 = (int)(id64 & 0xffffffff);
|
||||
return (lon32 - lonIdxBase)<<16 | ( ( (lat32 - latIdxBase) & 0xffff) ^ 0x8000);
|
||||
}
|
||||
|
||||
public boolean hasMoreData()
|
||||
{
|
||||
return aboffset < aboffsetEnd;
|
||||
}
|
||||
}
|
|
@ -11,26 +11,34 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import btools.codec.DataBuffers;
|
||||
import btools.codec.MicroCache;
|
||||
import btools.codec.WaypointMatcher;
|
||||
import btools.expressions.BExpressionContextWay;
|
||||
|
||||
public final class NodesCache
|
||||
{
|
||||
private File segmentDir;
|
||||
private File secondarySegmentsDir = null;
|
||||
|
||||
private OsmNodesMap nodesMap;
|
||||
private BExpressionContextWay expCtxWay;
|
||||
private int lookupVersion;
|
||||
private int lookupMinorVersion;
|
||||
private boolean carMode;
|
||||
private boolean forceSecondaryData;
|
||||
private String currentFileName;
|
||||
|
||||
private HashMap<String,PhysicalFile> fileCache;
|
||||
private byte[] iobuffer;
|
||||
private HashMap<String, PhysicalFile> fileCache;
|
||||
private DataBuffers dataBuffers;
|
||||
|
||||
private OsmFile[][] fileRows;
|
||||
private ArrayList<MicroCache> segmentList = new ArrayList<MicroCache>();
|
||||
|
||||
public DistanceChecker distanceChecker;
|
||||
|
||||
public WaypointMatcher waypointMatcher;
|
||||
|
||||
public boolean oom_carsubset_hint = false;
|
||||
public boolean first_file_access_failed = false;
|
||||
public String first_file_access_name;
|
||||
|
@ -38,34 +46,37 @@ public final class NodesCache
|
|||
private long cacheSum = 0;
|
||||
private boolean garbageCollectionEnabled = false;
|
||||
|
||||
|
||||
public NodesCache( String segmentDir, OsmNodesMap nodesMap, int lookupVersion, int minorVersion, boolean carMode, boolean forceSecondaryData, NodesCache oldCache )
|
||||
public NodesCache( String segmentDir, OsmNodesMap nodesMap, BExpressionContextWay ctxWay, boolean carMode, boolean forceSecondaryData,
|
||||
NodesCache oldCache )
|
||||
{
|
||||
this.segmentDir = new File( segmentDir );
|
||||
this.nodesMap = nodesMap;
|
||||
this.lookupVersion = lookupVersion;
|
||||
this.lookupMinorVersion = minorVersion;
|
||||
this.expCtxWay = ctxWay;
|
||||
this.lookupVersion = ctxWay.meta.lookupVersion;
|
||||
this.lookupMinorVersion = ctxWay.meta.lookupMinorVersion;
|
||||
this.carMode = carMode;
|
||||
this.forceSecondaryData = forceSecondaryData;
|
||||
|
||||
first_file_access_failed = false;
|
||||
first_file_access_name = null;
|
||||
|
||||
if ( !this.segmentDir.isDirectory() ) throw new RuntimeException( "segment directory " + segmentDir + " does not exist" );
|
||||
if ( !this.segmentDir.isDirectory() )
|
||||
throw new RuntimeException( "segment directory " + segmentDir + " does not exist" );
|
||||
|
||||
if ( oldCache != null )
|
||||
{
|
||||
fileCache = oldCache.fileCache;
|
||||
iobuffer = oldCache.iobuffer;
|
||||
dataBuffers = oldCache.dataBuffers;
|
||||
oom_carsubset_hint = oldCache.oom_carsubset_hint;
|
||||
secondarySegmentsDir = oldCache.secondarySegmentsDir;
|
||||
|
||||
// re-use old, virgin caches
|
||||
fileRows = oldCache.fileRows;
|
||||
for( OsmFile[] fileRow : fileRows )
|
||||
for ( OsmFile[] fileRow : fileRows )
|
||||
{
|
||||
if ( fileRow == null ) continue;
|
||||
for( OsmFile osmf : fileRow )
|
||||
if ( fileRow == null )
|
||||
continue;
|
||||
for ( OsmFile osmf : fileRow )
|
||||
{
|
||||
cacheSum += osmf.setGhostState();
|
||||
}
|
||||
|
@ -73,9 +84,9 @@ public final class NodesCache
|
|||
}
|
||||
else
|
||||
{
|
||||
fileCache = new HashMap<String,PhysicalFile>(4);
|
||||
fileCache = new HashMap<String, PhysicalFile>( 4 );
|
||||
fileRows = new OsmFile[180][];
|
||||
iobuffer = new byte[65636];
|
||||
dataBuffers = new DataBuffers();
|
||||
secondarySegmentsDir = StorageConfigHelper.getSecondarySegmentDir( segmentDir );
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +102,8 @@ public final class NodesCache
|
|||
if ( secondarySegmentsDir != null && !f.exists() )
|
||||
{
|
||||
File f2 = new File( secondarySegmentsDir, filename );
|
||||
if ( f2.exists() ) return f2;
|
||||
if ( f2.exists() )
|
||||
return f2;
|
||||
}
|
||||
return f;
|
||||
}
|
||||
|
@ -100,24 +112,30 @@ public final class NodesCache
|
|||
// clean all ghosts and enable garbage collection
|
||||
private void checkEnableCacheCleaning()
|
||||
{
|
||||
if ( cacheSum < 200000 || garbageCollectionEnabled ) return;
|
||||
if ( cacheSum < 500000 || garbageCollectionEnabled )
|
||||
return;
|
||||
|
||||
for( int i=0; i<fileRows.length; i++ )
|
||||
for ( int i = 0; i < fileRows.length; i++ )
|
||||
{
|
||||
OsmFile[] fileRow = fileRows[i];
|
||||
if ( fileRow == null ) continue;
|
||||
if ( fileRow == null )
|
||||
continue;
|
||||
int nghosts = 0;
|
||||
for( OsmFile osmf : fileRow )
|
||||
for ( OsmFile osmf : fileRow )
|
||||
{
|
||||
if ( osmf.ghost ) nghosts++;
|
||||
else osmf.cleanAll();
|
||||
if ( osmf.ghost )
|
||||
nghosts++;
|
||||
else
|
||||
osmf.cleanAll();
|
||||
}
|
||||
if ( nghosts == 0 ) continue;
|
||||
int j=0;
|
||||
OsmFile[] frow = new OsmFile[fileRow.length-nghosts];
|
||||
for( OsmFile osmf : fileRow )
|
||||
if ( nghosts == 0 )
|
||||
continue;
|
||||
int j = 0;
|
||||
OsmFile[] frow = new OsmFile[fileRow.length - nghosts];
|
||||
for ( OsmFile osmf : fileRow )
|
||||
{
|
||||
if ( osmf.ghost ) continue;
|
||||
if ( osmf.ghost )
|
||||
continue;
|
||||
frow[j++] = osmf;
|
||||
}
|
||||
fileRows[i] = frow;
|
||||
|
@ -135,14 +153,12 @@ public final class NodesCache
|
|||
{
|
||||
try
|
||||
{
|
||||
int lonIdx80 = ilon/12500;
|
||||
int latIdx80 = ilat/12500;
|
||||
int lonDegree = lonIdx80/80;
|
||||
int latDegree = latIdx80/80;
|
||||
int lonDegree = ilon / 1000000;
|
||||
int latDegree = ilat / 1000000;
|
||||
OsmFile osmf = null;
|
||||
OsmFile[] fileRow = fileRows[latDegree];
|
||||
int ndegrees = fileRow == null ? 0 : fileRow.length;
|
||||
for( int i=0; i<ndegrees; i++ )
|
||||
for ( int i = 0; i < ndegrees; i++ )
|
||||
{
|
||||
if ( fileRow[i].lonDegree == lonDegree )
|
||||
{
|
||||
|
@ -153,8 +169,8 @@ public final class NodesCache
|
|||
if ( osmf == null )
|
||||
{
|
||||
osmf = fileForSegment( lonDegree, latDegree );
|
||||
OsmFile[] newFileRow = new OsmFile[ndegrees+1];
|
||||
for( int i=0; i<ndegrees; i++ )
|
||||
OsmFile[] newFileRow = new OsmFile[ndegrees + 1];
|
||||
for ( int i = 0; i < ndegrees; i++ )
|
||||
{
|
||||
newFileRow[i] = fileRow[i];
|
||||
}
|
||||
|
@ -163,51 +179,66 @@ public final class NodesCache
|
|||
}
|
||||
osmf.ghost = false;
|
||||
currentFileName = osmf.filename;
|
||||
if ( osmf.microCaches == null )
|
||||
|
||||
if ( !osmf.hasData() )
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int subIdx = (latIdx80-80*latDegree)*80 + (lonIdx80-80*lonDegree);
|
||||
MicroCache segment = osmf.microCaches[subIdx];
|
||||
|
||||
MicroCache segment = osmf.getMicroCache( ilon, ilat );
|
||||
if ( segment == null )
|
||||
{
|
||||
// nodesMap.removeCompleteNodes();
|
||||
|
||||
checkEnableCacheCleaning();
|
||||
segment = osmf.createMicroCache( ilon, ilat, dataBuffers, expCtxWay, waypointMatcher );
|
||||
|
||||
segment = new MicroCache( osmf, lonIdx80, latIdx80, iobuffer );
|
||||
cacheSum += segment.getDataSize();
|
||||
osmf.microCaches[subIdx] = segment;
|
||||
if ( segment.getSize() > 0 )
|
||||
{
|
||||
segmentList.add( segment );
|
||||
}
|
||||
}
|
||||
else if ( segment.ghost )
|
||||
{
|
||||
segment.unGhost();
|
||||
if ( segment.getSize() > 0 )
|
||||
{
|
||||
segmentList.add( segment );
|
||||
}
|
||||
}
|
||||
return segment;
|
||||
}
|
||||
catch( RuntimeException re )
|
||||
catch (RuntimeException re)
|
||||
{
|
||||
throw re;
|
||||
}
|
||||
catch( Exception e )
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RuntimeException( "error reading datafile " + currentFileName + ": " + e );
|
||||
throw new RuntimeException( "error reading datafile " + currentFileName + ": ", e );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public boolean obtainNonHollowNode( OsmNode node )
|
||||
{
|
||||
if ( !node.isHollow() ) return true;
|
||||
if ( !node.isHollow() )
|
||||
return true;
|
||||
|
||||
MicroCache segment = getSegmentFor( node.ilon, node.ilat );
|
||||
if ( segment == null )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
segment.fillNode( node, nodesMap, distanceChecker, garbageCollectionEnabled );
|
||||
|
||||
long id = node.getIdFromPos();
|
||||
if ( segment.getAndClear( id ) )
|
||||
{
|
||||
node.parseNodeBody( segment, nodesMap, distanceChecker );
|
||||
}
|
||||
|
||||
if ( garbageCollectionEnabled ) // garbage collection
|
||||
{
|
||||
segment.collect( segment.getSize() >> 1 );
|
||||
}
|
||||
|
||||
return !node.isHollow();
|
||||
}
|
||||
|
||||
|
@ -215,13 +246,12 @@ public final class NodesCache
|
|||
{
|
||||
int lonMod5 = lonDegree % 5;
|
||||
int latMod5 = latDegree % 5;
|
||||
int tileIndex = lonMod5 * 5 + latMod5;
|
||||
|
||||
int lon = lonDegree - 180 - lonMod5;
|
||||
String slon = lon < 0 ? "W" + (-lon) : "E" + lon;
|
||||
String slon = lon < 0 ? "W" + ( -lon ) : "E" + lon;
|
||||
int lat = latDegree - 90 - latMod5;
|
||||
|
||||
String slat = lat < 0 ? "S" + (-lat) : "N" + lat;
|
||||
String slat = lat < 0 ? "S" + ( -lat ) : "N" + lat;
|
||||
String filenameBase = slon + "_" + slat;
|
||||
|
||||
currentFileName = filenameBase + ".rd5/cd5";
|
||||
|
@ -233,25 +263,26 @@ public final class NodesCache
|
|||
if ( carMode )
|
||||
{
|
||||
File carFile = getFileFromSegmentDir( "carsubset/" + filenameBase + ".cd5" );
|
||||
if ( carFile.exists() ) f = carFile;
|
||||
if ( carFile.exists() )
|
||||
f = carFile;
|
||||
}
|
||||
if ( f == null )
|
||||
{
|
||||
File fullFile = getFileFromSegmentDir( filenameBase + ".rd5" );
|
||||
if ( fullFile.exists() ) f = fullFile;
|
||||
if ( carMode && f != null ) oom_carsubset_hint = true;
|
||||
if ( fullFile.exists() )
|
||||
f = fullFile;
|
||||
if ( carMode && f != null )
|
||||
oom_carsubset_hint = true;
|
||||
}
|
||||
if ( f != null )
|
||||
{
|
||||
currentFileName = f.getName();
|
||||
ra = new PhysicalFile( f, iobuffer, lookupVersion, lookupMinorVersion );
|
||||
ra = new PhysicalFile( f, dataBuffers, lookupVersion, lookupMinorVersion );
|
||||
}
|
||||
fileCache.put( filenameBase, ra );
|
||||
}
|
||||
ra = fileCache.get( filenameBase );
|
||||
OsmFile osmf = new OsmFile( ra, tileIndex, iobuffer );
|
||||
osmf.lonDegree = lonDegree;
|
||||
osmf.latDegree = latDegree;
|
||||
OsmFile osmf = new OsmFile( ra, lonDegree, latDegree, dataBuffers );
|
||||
|
||||
if ( first_file_access_name == null )
|
||||
{
|
||||
|
@ -265,24 +296,34 @@ public final class NodesCache
|
|||
public List<OsmNode> getAllNodes()
|
||||
{
|
||||
List<OsmNode> all = new ArrayList<OsmNode>();
|
||||
for( MicroCache segment : segmentList )
|
||||
for ( MicroCache segment : segmentList )
|
||||
{
|
||||
List<OsmNode> positions = segment.getPositions( nodesMap );
|
||||
ArrayList<OsmNode> positions = new ArrayList<OsmNode>();
|
||||
int size = segment.getSize();
|
||||
|
||||
for ( int i = 0; i < size; i++ )
|
||||
{
|
||||
long id = segment.getIdForIndex( i );
|
||||
OsmNode n = new OsmNode( id );
|
||||
n.setHollow();
|
||||
nodesMap.put( n );
|
||||
positions.add( n );
|
||||
}
|
||||
all.addAll( positions );
|
||||
}
|
||||
return all;
|
||||
}
|
||||
|
||||
|
||||
public void close()
|
||||
{
|
||||
for( PhysicalFile f: fileCache.values() )
|
||||
for ( PhysicalFile f : fileCache.values() )
|
||||
{
|
||||
try
|
||||
{
|
||||
if ( f != null ) f.ra.close();
|
||||
if ( f != null )
|
||||
f.ra.close();
|
||||
}
|
||||
catch( IOException ioe )
|
||||
catch (IOException ioe)
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
|
|
|
@ -8,6 +8,12 @@ package btools.mapaccess;
|
|||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
|
||||
import btools.codec.DataBuffers;
|
||||
import btools.codec.MicroCache;
|
||||
import btools.codec.MicroCache1;
|
||||
import btools.codec.MicroCache2;
|
||||
import btools.codec.TagValueValidator;
|
||||
import btools.codec.WaypointMatcher;
|
||||
import btools.util.ByteDataReader;
|
||||
import btools.util.Crc32;
|
||||
|
||||
|
@ -17,7 +23,7 @@ final class OsmFile
|
|||
private long fileOffset;
|
||||
|
||||
private int[] posIdx;
|
||||
public MicroCache[] microCaches;
|
||||
private MicroCache[] microCaches;
|
||||
|
||||
public int lonDegree;
|
||||
public int latDegree;
|
||||
|
@ -26,25 +32,44 @@ final class OsmFile
|
|||
|
||||
public boolean ghost = false;
|
||||
|
||||
public OsmFile( PhysicalFile rafile, int tileIndex, byte[] iobuffer ) throws Exception
|
||||
private int divisor;
|
||||
private int cellsize;
|
||||
private int ncaches;
|
||||
private int indexsize;
|
||||
|
||||
public OsmFile( PhysicalFile rafile, int lonDegree, int latDegree, DataBuffers dataBuffers ) throws Exception
|
||||
{
|
||||
this.lonDegree = lonDegree;
|
||||
this.latDegree = latDegree;
|
||||
int lonMod5 = lonDegree % 5;
|
||||
int latMod5 = latDegree % 5;
|
||||
int tileIndex = lonMod5 * 5 + latMod5;
|
||||
|
||||
if ( rafile != null )
|
||||
{
|
||||
divisor = rafile.divisor;
|
||||
|
||||
cellsize = 1000000 / divisor;
|
||||
ncaches = divisor * divisor;
|
||||
indexsize = ncaches * 4;
|
||||
|
||||
byte[] iobuffer = dataBuffers.iobuffer;
|
||||
filename = rafile.fileName;
|
||||
|
||||
long[] index = rafile.fileIndex;
|
||||
fileOffset = tileIndex > 0 ? index[ tileIndex-1 ] : 200L;
|
||||
if ( fileOffset == index[ tileIndex] ) return; // empty
|
||||
fileOffset = tileIndex > 0 ? index[tileIndex - 1] : 200L;
|
||||
if ( fileOffset == index[tileIndex] )
|
||||
return; // empty
|
||||
|
||||
is = rafile.ra;
|
||||
posIdx = new int[6400];
|
||||
microCaches = new MicroCache[6400];
|
||||
posIdx = new int[ncaches];
|
||||
microCaches = new MicroCache[ncaches];
|
||||
is.seek( fileOffset );
|
||||
is.readFully( iobuffer, 0, 25600 );
|
||||
is.readFully( iobuffer, 0, indexsize );
|
||||
|
||||
if ( rafile.fileHeaderCrcs != null )
|
||||
{
|
||||
int headerCrc = Crc32.crc( iobuffer, 0, 25600 );
|
||||
int headerCrc = Crc32.crc( iobuffer, 0, indexsize );
|
||||
if ( rafile.fileHeaderCrcs[tileIndex] != headerCrc )
|
||||
{
|
||||
throw new IOException( "sub index checksum error" );
|
||||
|
@ -52,23 +77,47 @@ final class OsmFile
|
|||
}
|
||||
|
||||
ByteDataReader dis = new ByteDataReader( iobuffer );
|
||||
for( int i=0; i<6400; i++ )
|
||||
for ( int i = 0; i < ncaches; i++ )
|
||||
{
|
||||
posIdx[i] = dis.readInt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasData()
|
||||
{
|
||||
return microCaches != null;
|
||||
}
|
||||
|
||||
public MicroCache getMicroCache( int ilon, int ilat )
|
||||
{
|
||||
int lonIdx = ilon / cellsize;
|
||||
int latIdx = ilat / cellsize;
|
||||
int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
|
||||
return microCaches[subIdx];
|
||||
}
|
||||
|
||||
public MicroCache createMicroCache( int ilon, int ilat, DataBuffers dataBuffers, TagValueValidator wayValidator, WaypointMatcher waypointMatcher )
|
||||
throws Exception
|
||||
{
|
||||
int lonIdx = ilon / cellsize;
|
||||
int latIdx = ilat / cellsize;
|
||||
MicroCache segment = createMicroCache( lonIdx, latIdx, dataBuffers, wayValidator, waypointMatcher, true );
|
||||
int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
|
||||
microCaches[subIdx] = segment;
|
||||
return segment;
|
||||
}
|
||||
|
||||
private int getPosIdx( int idx )
|
||||
{
|
||||
return idx == -1 ? 25600 : posIdx[idx];
|
||||
return idx == -1 ? indexsize : posIdx[idx];
|
||||
}
|
||||
|
||||
public int getDataInputForSubIdx( int subIdx, byte[] iobuffer ) throws Exception
|
||||
{
|
||||
int startPos = getPosIdx(subIdx-1);
|
||||
int endPos = getPosIdx(subIdx);
|
||||
int size = endPos-startPos;
|
||||
int startPos = getPosIdx( subIdx - 1 );
|
||||
int endPos = getPosIdx( subIdx );
|
||||
int size = endPos - startPos;
|
||||
if ( size > 0 )
|
||||
{
|
||||
is.seek( fileOffset + startPos );
|
||||
|
@ -80,16 +129,50 @@ final class OsmFile
|
|||
return size;
|
||||
}
|
||||
|
||||
public MicroCache createMicroCache( int lonIdx, int latIdx, DataBuffers dataBuffers, TagValueValidator wayValidator,
|
||||
WaypointMatcher waypointMatcher, boolean reallyDecode ) throws Exception
|
||||
{
|
||||
int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
|
||||
|
||||
byte[] ab = dataBuffers.iobuffer;
|
||||
int asize = getDataInputForSubIdx( subIdx, ab );
|
||||
|
||||
if ( asize == 0 )
|
||||
{
|
||||
return MicroCache.emptyCache();
|
||||
}
|
||||
if ( asize > ab.length )
|
||||
{
|
||||
ab = new byte[asize];
|
||||
asize = getDataInputForSubIdx( subIdx, ab );
|
||||
}
|
||||
// hack: the checksum contains the information
|
||||
// which type of microcache we have
|
||||
|
||||
int crcData = Crc32.crc( ab, 0, asize - 4 );
|
||||
int crcFooter = new ByteDataReader( ab, asize - 4 ).readInt();
|
||||
if ( crcData == crcFooter )
|
||||
{
|
||||
return reallyDecode ? new MicroCache1( ab, lonIdx, latIdx ) : null;
|
||||
}
|
||||
if ( ( crcData ^ 2 ) == crcFooter )
|
||||
{
|
||||
return reallyDecode ? new MicroCache2( dataBuffers, lonIdx, latIdx, divisor, wayValidator, waypointMatcher ) : null;
|
||||
}
|
||||
throw new IOException( "checkum error" );
|
||||
}
|
||||
|
||||
// set this OsmFile to ghost-state:
|
||||
long setGhostState()
|
||||
{
|
||||
long sum = 0;
|
||||
ghost = true;
|
||||
int nc = microCaches == null ? 0 : microCaches.length;
|
||||
for( int i=0; i< nc; i++ )
|
||||
for ( int i = 0; i < nc; i++ )
|
||||
{
|
||||
MicroCache mc = microCaches[i];
|
||||
if ( mc == null ) continue;
|
||||
if ( mc == null )
|
||||
continue;
|
||||
if ( mc.virgin )
|
||||
{
|
||||
mc.ghost = true;
|
||||
|
@ -106,17 +189,18 @@ final class OsmFile
|
|||
void cleanAll()
|
||||
{
|
||||
int nc = microCaches == null ? 0 : microCaches.length;
|
||||
for( int i=0; i< nc; i++ )
|
||||
for ( int i = 0; i < nc; i++ )
|
||||
{
|
||||
MicroCache mc = microCaches[i];
|
||||
if ( mc == null ) continue;
|
||||
if ( mc == null )
|
||||
continue;
|
||||
if ( mc.ghost )
|
||||
{
|
||||
microCaches[i] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
mc.collect();
|
||||
mc.collect( 0 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
*/
|
||||
package btools.mapaccess;
|
||||
|
||||
import btools.util.ByteDataReader;
|
||||
|
||||
|
||||
public class OsmLink
|
||||
{
|
||||
|
@ -14,32 +16,77 @@ public class OsmLink
|
|||
*/
|
||||
public byte[] descriptionBitmap;
|
||||
|
||||
/**
|
||||
* The target is either the next link or the target node
|
||||
*/
|
||||
public OsmNode targetNode;
|
||||
|
||||
public OsmLink next;
|
||||
|
||||
public byte[] firsttransferBytes;
|
||||
public OsmLinkHolder firstlinkholder = null;
|
||||
|
||||
final public OsmTransferNode decodeFirsttransfer()
|
||||
public byte[] geometry;
|
||||
|
||||
public boolean counterLinkWritten;
|
||||
|
||||
public boolean hasNewGeometry; // preliminary
|
||||
|
||||
public byte state;
|
||||
|
||||
public void setGeometry( byte[] geometry )
|
||||
{
|
||||
return firsttransferBytes == null ? null : OsmTransferNode.decode( firsttransferBytes );
|
||||
this.geometry = geometry;
|
||||
hasNewGeometry = true;
|
||||
}
|
||||
|
||||
final public OsmTransferNode decodeFirsttransfer( OsmNode sourceNode )
|
||||
{
|
||||
if ( geometry == null ) return null;
|
||||
if ( hasNewGeometry )
|
||||
{
|
||||
OsmTransferNode firstTransferNode = null;
|
||||
OsmTransferNode lastTransferNode = null;
|
||||
OsmNode startnode = counterLinkWritten ? targetNode : sourceNode;
|
||||
ByteDataReader r = new ByteDataReader( geometry );
|
||||
int olon = startnode.ilon;
|
||||
int olat = startnode.ilat;
|
||||
int oselev = startnode.selev;
|
||||
while ( r.hasMoreData() )
|
||||
{
|
||||
OsmTransferNode trans = new OsmTransferNode();
|
||||
trans.ilon = olon + r.readVarLengthSigned();
|
||||
trans.ilat = olat + r.readVarLengthSigned();
|
||||
trans.descriptionBitmap = descriptionBitmap;
|
||||
trans.selev = (short)(oselev + r.readVarLengthSigned());
|
||||
olon = trans.ilon;
|
||||
olat = trans.ilat;
|
||||
oselev = trans.selev;
|
||||
if ( counterLinkWritten ) // reverse chaining
|
||||
{
|
||||
trans.next = firstTransferNode;
|
||||
firstTransferNode = trans;
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( lastTransferNode == null )
|
||||
{
|
||||
firstTransferNode = trans;
|
||||
}
|
||||
else
|
||||
{
|
||||
lastTransferNode.next = trans;
|
||||
}
|
||||
lastTransferNode = trans;
|
||||
}
|
||||
}
|
||||
return firstTransferNode;
|
||||
}
|
||||
return OsmTransferNode.decode( geometry );
|
||||
}
|
||||
|
||||
final public void encodeFirsttransfer( OsmTransferNode firsttransfer )
|
||||
{
|
||||
if ( firsttransfer == null ) firsttransferBytes = null;
|
||||
else firsttransferBytes = OsmTransferNode.encode( firsttransfer );
|
||||
if ( firsttransfer == null ) geometry = null;
|
||||
else geometry = OsmTransferNode.encode( firsttransfer );
|
||||
}
|
||||
|
||||
public boolean counterLinkWritten;
|
||||
|
||||
public byte state;
|
||||
|
||||
public OsmLinkHolder firstlinkholder = null;
|
||||
|
||||
final public void addLinkHolder( OsmLinkHolder holder )
|
||||
{
|
||||
if ( firstlinkholder != null ) { holder.setNextForLink( firstlinkholder ); }
|
||||
|
|
|
@ -5,10 +5,11 @@
|
|||
*/
|
||||
package btools.mapaccess;
|
||||
|
||||
import btools.codec.MicroCache;
|
||||
import btools.codec.MicroCache1;
|
||||
import btools.codec.MicroCache2;
|
||||
import btools.util.ByteArrayUnifier;
|
||||
|
||||
|
||||
|
||||
public class OsmNode implements OsmPos
|
||||
{
|
||||
public static final int EXTERNAL_BITMASK = 0x80; // old semantic
|
||||
|
@ -33,8 +34,8 @@ public class OsmNode implements OsmPos
|
|||
|
||||
public OsmNode( long id )
|
||||
{
|
||||
ilon = (int)(id >> 32);
|
||||
ilat = (int)(id & 0xffffffff);
|
||||
ilon = (int) ( id >> 32 );
|
||||
ilat = (int) ( id & 0xffffffff );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -75,7 +76,6 @@ public class OsmNode implements OsmPos
|
|||
return selev / 4.;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The links to other nodes
|
||||
*/
|
||||
|
@ -91,14 +91,15 @@ public class OsmNode implements OsmPos
|
|||
else
|
||||
{
|
||||
OsmLink l = firstlink;
|
||||
while( l.next != null ) l = l.next;
|
||||
while (l.next != null)
|
||||
l = l.next;
|
||||
l.next = link;
|
||||
}
|
||||
}
|
||||
|
||||
private OsmLink getCompatibleLink( int ilon, int ilat, boolean counterLinkWritten, int state )
|
||||
{
|
||||
for( OsmLink l = firstlink; l != null; l = l.next )
|
||||
for ( OsmLink l = firstlink; l != null; l = l.next )
|
||||
{
|
||||
if ( counterLinkWritten == l.counterLinkWritten && l.state == state )
|
||||
{
|
||||
|
@ -112,7 +113,7 @@ public class OsmNode implements OsmPos
|
|||
}
|
||||
// second try ignoring counterLinkWritten
|
||||
// (border links are written in both directions)
|
||||
for( OsmLink l = firstlink; l != null; l = l.next )
|
||||
for ( OsmLink l = firstlink; l != null; l = l.next )
|
||||
{
|
||||
if ( l.state == state )
|
||||
{
|
||||
|
@ -129,15 +130,15 @@ public class OsmNode implements OsmPos
|
|||
|
||||
public int calcDistance( OsmPos p )
|
||||
{
|
||||
double l = (ilat-90000000) * 0.00000001234134;
|
||||
double l2 = l*l;
|
||||
double l4 = l2*l2;
|
||||
double coslat = 1.- l2 + l4 / 6.;
|
||||
double l = ( ilat - 90000000 ) * 0.00000001234134;
|
||||
double l2 = l * l;
|
||||
double l4 = l2 * l2;
|
||||
double coslat = 1. - l2 + l4 / 6.;
|
||||
|
||||
double dlat = (ilat - p.getILat() )/1000000.;
|
||||
double dlon = (ilon - p.getILon() )/1000000. * coslat;
|
||||
double d = Math.sqrt( dlat*dlat + dlon*dlon ) * (6378000. / 57.3);
|
||||
return (int)(d + 1.0 );
|
||||
double dlat = ( ilat - p.getILat() ) / 1000000.;
|
||||
double dlon = ( ilon - p.getILon() ) / 1000000. * coslat;
|
||||
double d = Math.sqrt( dlat * dlat + dlon * dlon ) * ( 6378000. / 57.3 );
|
||||
return (int) ( d + 1.0 );
|
||||
}
|
||||
|
||||
public String toString()
|
||||
|
@ -145,13 +146,111 @@ public class OsmNode implements OsmPos
|
|||
return "" + getIdFromPos();
|
||||
}
|
||||
|
||||
public void parseNodeBody( MicroCache is, OsmNodesMap hollowNodes, DistanceChecker dc )
|
||||
public void parseNodeBody( MicroCache mc, OsmNodesMap hollowNodes, DistanceChecker dc )
|
||||
{
|
||||
if ( mc instanceof MicroCache1 )
|
||||
{
|
||||
parseNodeBody1( (MicroCache1) mc, hollowNodes, dc );
|
||||
}
|
||||
else if ( mc instanceof MicroCache2 )
|
||||
{
|
||||
parseNodeBody2( (MicroCache2) mc, hollowNodes, dc );
|
||||
}
|
||||
else
|
||||
throw new IllegalArgumentException( "unknown cache version: " + mc.getClass() );
|
||||
}
|
||||
|
||||
public void parseNodeBody2( MicroCache2 mc, OsmNodesMap hollowNodes, DistanceChecker dc )
|
||||
{
|
||||
ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
|
||||
|
||||
selev = mc.readShort();
|
||||
int nodeDescSize = mc.readVarLengthUnsigned();
|
||||
nodeDescription = nodeDescSize == 0 ? null : mc.readUnified( nodeDescSize, abUnifier );
|
||||
|
||||
while (mc.hasMoreData())
|
||||
{
|
||||
// read link data
|
||||
int endPointer = mc.getEndPointer();
|
||||
int linklon = ilon + mc.readVarLengthSigned();
|
||||
int linklat = ilat + mc.readVarLengthSigned();
|
||||
int sizecode = mc.readVarLengthUnsigned();
|
||||
boolean isReverse = ( sizecode & 1 ) != 0;
|
||||
byte[] description = null;
|
||||
int descSize = sizecode >> 1;
|
||||
if ( descSize > 0 )
|
||||
{
|
||||
description = mc.readUnified( descSize, abUnifier );
|
||||
}
|
||||
byte[] geometry = mc.readDataUntil( endPointer );
|
||||
|
||||
// preliminary hack: way-point-matching not here (done at decoding time)
|
||||
if ( dc != null )
|
||||
continue;
|
||||
|
||||
if ( linklon == ilon && linklat == ilat )
|
||||
{
|
||||
continue; // skip self-ref
|
||||
}
|
||||
|
||||
// first check the known links for that target
|
||||
OsmLink link = getCompatibleLink( linklon, linklat, isReverse, 2 );
|
||||
if ( link == null ) // .. not found, then check the hollow nodes
|
||||
{
|
||||
long targetNodeId = ( (long) linklon ) << 32 | linklat;
|
||||
OsmNode tn = hollowNodes.get( targetNodeId ); // target node
|
||||
if ( tn == null ) // node not yet known, create a new hollow proxy
|
||||
{
|
||||
tn = new OsmNode( linklon, linklat );
|
||||
tn.setHollow();
|
||||
hollowNodes.put( tn );
|
||||
}
|
||||
link = new OsmLink();
|
||||
link.targetNode = tn;
|
||||
link.counterLinkWritten = isReverse;
|
||||
link.state = 1;
|
||||
addLink( link );
|
||||
}
|
||||
|
||||
// now we have a link with a target node -> get the reverse link
|
||||
OsmLink rlink = link.targetNode.getCompatibleLink( ilon, ilat, !isReverse, 1 );
|
||||
if ( rlink == null ) // .. not found, create it
|
||||
{
|
||||
rlink = new OsmLink();
|
||||
rlink.targetNode = this;
|
||||
rlink.counterLinkWritten = !isReverse;
|
||||
rlink.state = 2;
|
||||
link.targetNode.addLink( rlink );
|
||||
}
|
||||
|
||||
if ( !isReverse )
|
||||
{
|
||||
// we have the data for that link, so fill both the link ..
|
||||
link.descriptionBitmap = description;
|
||||
link.setGeometry( geometry );
|
||||
|
||||
// .. and the reverse
|
||||
if ( rlink.counterLinkWritten )
|
||||
{
|
||||
rlink.descriptionBitmap = description;
|
||||
rlink.setGeometry( geometry );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if ( dc == null )
|
||||
{
|
||||
hollowNodes.remove( this );
|
||||
}
|
||||
}
|
||||
|
||||
public void parseNodeBody1( MicroCache1 is, OsmNodesMap hollowNodes, DistanceChecker dc )
|
||||
{
|
||||
ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
|
||||
|
||||
selev = is.readShort();
|
||||
|
||||
while( is.hasMoreData() )
|
||||
while (is.hasMoreData())
|
||||
{
|
||||
int ilonref = ilon;
|
||||
int ilatref = ilat;
|
||||
|
@ -162,55 +261,63 @@ public class OsmNode implements OsmPos
|
|||
int linklon;
|
||||
int linklat;
|
||||
byte[] description = null;
|
||||
for(;;)
|
||||
for ( ;; )
|
||||
{
|
||||
int bitField = is.readByte();
|
||||
int dlon = is.readVarLengthUnsigned();
|
||||
int dlat = is.readVarLengthUnsigned();
|
||||
if ( (bitField & SIGNLON_BITMASK) != 0 ) { dlon = -dlon;}
|
||||
if ( (bitField & SIGNLAT_BITMASK) != 0 ) { dlat = -dlat;}
|
||||
if ( ( bitField & SIGNLON_BITMASK ) != 0 )
|
||||
{
|
||||
dlon = -dlon;
|
||||
}
|
||||
if ( ( bitField & SIGNLAT_BITMASK ) != 0 )
|
||||
{
|
||||
dlat = -dlat;
|
||||
}
|
||||
linklon = ilonref + dlon;
|
||||
linklat = ilatref + dlat;
|
||||
ilonref = linklon;
|
||||
ilatref = linklat;
|
||||
// read variable length or old 8 byte fixed, and ensure that 8 bytes is only fixed
|
||||
if ( (bitField & WRITEDESC_BITMASK ) != 0 )
|
||||
// read variable length or old 8 byte fixed, and ensure that 8 bytes is
|
||||
// only fixed
|
||||
if ( ( bitField & WRITEDESC_BITMASK ) != 0 )
|
||||
{
|
||||
byte[] ab = new byte[is.readByte()];
|
||||
is.readFully( ab );
|
||||
description = abUnifier.unify( ab );
|
||||
}
|
||||
if ( (bitField & NODEDESC_BITMASK ) != 0 )
|
||||
if ( ( bitField & NODEDESC_BITMASK ) != 0 )
|
||||
{
|
||||
byte[] ab = new byte[is.readByte()];
|
||||
is.readFully( ab );
|
||||
nodeDescription = abUnifier.unify( ab );
|
||||
}
|
||||
if ( (bitField & RESERVED1_BITMASK ) != 0 )
|
||||
if ( ( bitField & RESERVED1_BITMASK ) != 0 )
|
||||
{
|
||||
byte[] ab = new byte[is.readByte()];
|
||||
is.readFully( ab );
|
||||
}
|
||||
if ( (bitField & RESERVED2_BITMASK ) != 0 )
|
||||
if ( ( bitField & RESERVED2_BITMASK ) != 0 )
|
||||
{
|
||||
byte[] ab = new byte[is.readByte()];
|
||||
is.readFully( ab );
|
||||
}
|
||||
if ( (bitField & SKIPDETAILS_BITMASK ) != 0 )
|
||||
if ( ( bitField & SKIPDETAILS_BITMASK ) != 0 )
|
||||
{
|
||||
counterLinkWritten = true;
|
||||
}
|
||||
|
||||
if ( description == null && !counterLinkWritten ) throw new IllegalArgumentException( "internal error: missing way description!" );
|
||||
if ( description == null && !counterLinkWritten )
|
||||
throw new IllegalArgumentException( "internal error: missing way description!" );
|
||||
|
||||
boolean isTransfer = (bitField & TRANSFERNODE_BITMASK ) != 0;
|
||||
boolean isTransfer = ( bitField & TRANSFERNODE_BITMASK ) != 0;
|
||||
if ( isTransfer )
|
||||
{
|
||||
OsmTransferNode trans = new OsmTransferNode();
|
||||
trans.ilon = linklon;
|
||||
trans.ilat = linklat;
|
||||
trans.descriptionBitmap = description;
|
||||
trans.selev = (short)(selev + is.readVarLengthSigned());
|
||||
trans.selev = (short) ( selev + is.readVarLengthSigned() );
|
||||
if ( lastTransferNode == null )
|
||||
{
|
||||
firstTransferNode = trans;
|
||||
|
@ -245,11 +352,11 @@ public class OsmNode implements OsmPos
|
|||
OsmLink link = getCompatibleLink( linklon, linklat, counterLinkWritten, 2 );
|
||||
if ( link == null ) // .. not found, then check the hollow nodes
|
||||
{
|
||||
long targetNodeId = ((long)linklon)<<32 | linklat;
|
||||
long targetNodeId = ( (long) linklon ) << 32 | linklat;
|
||||
OsmNode tn = hollowNodes.get( targetNodeId ); // target node
|
||||
if ( tn == null ) // node not yet known, create a new hollow proxy
|
||||
{
|
||||
tn = new OsmNode(linklon, linklat);
|
||||
tn = new OsmNode( linklon, linklat );
|
||||
tn.setHollow();
|
||||
hollowNodes.put( tn );
|
||||
}
|
||||
|
@ -275,15 +382,16 @@ public class OsmNode implements OsmPos
|
|||
{
|
||||
// we have the data for that link, so fill both the link ..
|
||||
link.descriptionBitmap = description;
|
||||
link.encodeFirsttransfer(firstTransferNode);
|
||||
link.encodeFirsttransfer( firstTransferNode );
|
||||
|
||||
// .. and the reverse
|
||||
if ( rlink.counterLinkWritten )
|
||||
{
|
||||
rlink.descriptionBitmap = description; // default for no transfer-nodes
|
||||
rlink.descriptionBitmap = description; // default for no
|
||||
// transfer-nodes
|
||||
OsmTransferNode previous = null;
|
||||
OsmTransferNode rtrans = null;
|
||||
for( OsmTransferNode trans = firstTransferNode; trans != null; trans = trans.next )
|
||||
for ( OsmTransferNode trans = firstTransferNode; trans != null; trans = trans.next )
|
||||
{
|
||||
if ( previous == null )
|
||||
{
|
||||
|
@ -301,7 +409,7 @@ public class OsmNode implements OsmPos
|
|||
rtrans.descriptionBitmap = description;
|
||||
previous = rtrans;
|
||||
}
|
||||
rlink.encodeFirsttransfer(rtrans);
|
||||
rlink.encodeFirsttransfer( rtrans );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -324,7 +432,7 @@ public class OsmNode implements OsmPos
|
|||
|
||||
public long getIdFromPos()
|
||||
{
|
||||
return ((long)ilon)<<32 | ilat;
|
||||
return ( (long) ilon ) << 32 | ilat;
|
||||
}
|
||||
|
||||
public void unlinkLink( OsmLink link )
|
||||
|
@ -334,7 +442,7 @@ public class OsmNode implements OsmPos
|
|||
firstlink = link.next;
|
||||
return;
|
||||
}
|
||||
for( OsmLink l = firstlink; l != null; l = l.next )
|
||||
for ( OsmLink l = firstlink; l != null; l = l.next )
|
||||
{
|
||||
if ( l.next == link )
|
||||
{
|
||||
|
@ -349,14 +457,14 @@ public class OsmNode implements OsmPos
|
|||
{
|
||||
if ( o instanceof OsmNode )
|
||||
{
|
||||
OsmNode n = (OsmNode)o;
|
||||
OsmNode n = (OsmNode) o;
|
||||
return n.ilon == ilon && n.ilat == ilat;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode( )
|
||||
public int hashCode()
|
||||
{
|
||||
return ilon + ilat;
|
||||
}
|
||||
|
|
|
@ -5,8 +5,11 @@
|
|||
*/
|
||||
package btools.mapaccess;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
|
||||
import btools.codec.DataBuffers;
|
||||
import btools.util.ByteDataReader;
|
||||
import btools.util.Crc32;
|
||||
|
||||
|
@ -21,6 +24,8 @@ final public class PhysicalFile
|
|||
|
||||
String fileName;
|
||||
|
||||
public int divisor = 80;
|
||||
|
||||
/**
|
||||
* Checks the integrity of the file using the build-in checksums
|
||||
*
|
||||
|
@ -31,36 +36,47 @@ final public class PhysicalFile
|
|||
PhysicalFile pf = null;
|
||||
try
|
||||
{
|
||||
byte[] iobuffer = new byte[65636];
|
||||
pf = new PhysicalFile( f, new byte[65636], -1, -1 );
|
||||
for( int tileIndex=0; tileIndex<25; tileIndex++ )
|
||||
DataBuffers dataBuffers = new DataBuffers();
|
||||
pf = new PhysicalFile( f, dataBuffers, -1, -1 );
|
||||
int div = pf.divisor;
|
||||
for ( int lonDegree = 0; lonDegree < 5; lonDegree++ ) // does'nt really matter..
|
||||
{
|
||||
OsmFile osmf = new OsmFile( pf, tileIndex, iobuffer );
|
||||
if ( osmf.microCaches != null )
|
||||
for( int lonIdx80=0; lonIdx80<80; lonIdx80++ )
|
||||
for( int latIdx80=0; latIdx80<80; latIdx80++ )
|
||||
new MicroCache( osmf, lonIdx80, latIdx80, iobuffer );
|
||||
for ( int latDegree = 0; latDegree < 5; latDegree++ ) // ..where on earth we are
|
||||
{
|
||||
OsmFile osmf = new OsmFile( pf, lonDegree, latDegree, dataBuffers );
|
||||
if ( osmf.hasData() )
|
||||
for ( int lonIdx = 0; lonIdx < div; lonIdx++ )
|
||||
for ( int latIdx = 0; latIdx < div; latIdx++ )
|
||||
osmf.createMicroCache( lonDegree * div + lonIdx, latDegree * div + latIdx, dataBuffers, null, null, false );
|
||||
}
|
||||
}
|
||||
catch( IllegalArgumentException iae )
|
||||
}
|
||||
catch (IllegalArgumentException iae)
|
||||
{
|
||||
return iae.getMessage();
|
||||
}
|
||||
catch( Exception e )
|
||||
catch (Exception e)
|
||||
{
|
||||
return e.toString();
|
||||
}
|
||||
finally
|
||||
{
|
||||
if ( pf != null ) try{ pf.ra.close(); } catch( Exception ee ) {}
|
||||
if ( pf != null )
|
||||
try
|
||||
{
|
||||
pf.ra.close();
|
||||
}
|
||||
catch (Exception ee)
|
||||
{
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public PhysicalFile( File f, byte[] iobuffer, int lookupVersion, int lookupMinorVersion ) throws Exception
|
||||
public PhysicalFile( File f, DataBuffers dataBuffers, int lookupVersion, int lookupMinorVersion ) throws Exception
|
||||
{
|
||||
fileName = f.getName();
|
||||
|
||||
byte[] iobuffer = dataBuffers.iobuffer;
|
||||
ra = new RandomAccessFile( f, "r" );
|
||||
ra.readFully( iobuffer, 0, 200 );
|
||||
fileIndexCrc = Crc32.crc( iobuffer, 0, 200 );
|
||||
|
@ -99,7 +115,17 @@ final public class PhysicalFile
|
|||
ra.readFully( iobuffer, 0, extraLen );
|
||||
dis = new ByteDataReader( iobuffer );
|
||||
creationTime = dis.readLong();
|
||||
if ( dis.readInt() != fileIndexCrc )
|
||||
|
||||
int crcData = dis.readInt();
|
||||
if ( crcData == fileIndexCrc )
|
||||
{
|
||||
divisor = 80; // old format
|
||||
}
|
||||
else if ( (crcData ^ 2) == fileIndexCrc )
|
||||
{
|
||||
divisor = 32; // new format
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IOException( "top index checksum error" );
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ public class BInstallerView extends View
|
|||
private void startDownload( int tileIndex, boolean isCd5 )
|
||||
{
|
||||
String namebase = baseNameForTile( tileIndex );
|
||||
String baseurl = "http://brouter.de/brouter/segments3/";
|
||||
String baseurl = "http://brouter.de/brouter/segments4/";
|
||||
currentDownloadFile = namebase + (isCd5 ? ".cd5" : ".rd5" );
|
||||
String url = baseurl + (isCd5 ? "carsubset/" : "" ) + currentDownloadFile;
|
||||
isDownloading = true;
|
||||
|
@ -606,7 +606,7 @@ float tx, ty;
|
|||
// download the file
|
||||
input = connection.getInputStream();
|
||||
|
||||
int slidx = surl.lastIndexOf( "segments3/" );
|
||||
int slidx = surl.lastIndexOf( "segments4/" );
|
||||
fname = baseDir + "/brouter/segments3/" + surl.substring( slidx+10 );
|
||||
tmp_file = new File( fname + "_tmp" );
|
||||
if ( new File( fname ).exists() ) return "internal error: file exists: " + fname;
|
||||
|
@ -633,6 +633,15 @@ float tx, ty;
|
|||
try { Thread.sleep( dt ); } catch( InterruptedException ie ) {}
|
||||
}
|
||||
}
|
||||
publishProgress( 101 );
|
||||
String check_result = PhysicalFile.checkFileIntegrity( tmp_file );
|
||||
if ( check_result != null ) return check_result;
|
||||
|
||||
if ( !tmp_file.renameTo( new File( fname ) ) )
|
||||
{
|
||||
return "Could not rename to " + fname;
|
||||
}
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
return e.toString();
|
||||
} finally {
|
||||
|
@ -647,15 +656,6 @@ float tx, ty;
|
|||
if (connection != null)
|
||||
connection.disconnect();
|
||||
}
|
||||
publishProgress( 101 );
|
||||
String check_result = PhysicalFile.checkFileIntegrity( tmp_file );
|
||||
if ( check_result != null ) return check_result;
|
||||
|
||||
if ( !tmp_file.renameTo( new File( fname ) ) )
|
||||
{
|
||||
return "Could not rename to " + fname;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
|
|
|
@ -13,7 +13,6 @@ import java.util.List;
|
|||
import java.util.StringTokenizer;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import btools.memrouter.TwinRoutingEngine;
|
||||
import btools.router.OsmNodeNamed;
|
||||
import btools.router.OsmTrack;
|
||||
import btools.router.RoutingContext;
|
||||
|
@ -102,7 +101,7 @@ public class RouteServer extends Thread
|
|||
RoutingContext rc = handler.readRoutingContext();
|
||||
List<OsmNodeNamed> wplist = handler.readWayPointList();
|
||||
|
||||
cr = new TwinRoutingEngine( null, null, serviceContext.segmentDir, wplist, rc );
|
||||
cr = new RoutingEngine( null, null, serviceContext.segmentDir, wplist, rc );
|
||||
cr.quite = true;
|
||||
cr.doRun( maxRunningTime );
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
package btools.server;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import btools.mapaccess.PhysicalFile;
|
||||
|
||||
public class IntegrityCheckTest
|
||||
{
|
||||
private File workingDir;
|
||||
|
||||
@Test
|
||||
public void integrityTest() throws Exception
|
||||
{
|
||||
URL resulturl = this.getClass().getResource( "/testtrack0.gpx" );
|
||||
Assert.assertTrue( "reference result not found: ", resulturl != null );
|
||||
File resultfile = new File( resulturl.getFile() );
|
||||
workingDir = resultfile.getParentFile();
|
||||
|
||||
File segmentDir = new File( workingDir, "/../../../brouter-map-creator/target/test-classes/tmp/segments" );
|
||||
File[] files = segmentDir.listFiles();
|
||||
|
||||
for ( File f : files )
|
||||
{
|
||||
PhysicalFile.checkFileIntegrity( f );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,10 +1,11 @@
|
|||
package btools.util;
|
||||
|
||||
public final class BitCoderContext
|
||||
{
|
||||
|
||||
public class BitCoderContext
|
||||
{
|
||||
private byte[] ab;
|
||||
private int idx = -1;
|
||||
private int bm = 0x100 ; // byte mask
|
||||
private int bm = 0x100; // byte mask
|
||||
private int b;
|
||||
|
||||
public BitCoderContext( byte[] ab )
|
||||
|
@ -12,85 +13,138 @@ package btools.util;
|
|||
this.ab = ab;
|
||||
}
|
||||
|
||||
// encode a distance with a variable bit length
|
||||
// (poor mans huffman tree)
|
||||
// 1 -> 0
|
||||
// 01 -> 1 + following 1-bit word ( 1..2 )
|
||||
// 001 -> 3 + following 2-bit word ( 3..6 )
|
||||
// 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
|
||||
|
||||
public void encodeVarBits( int value )
|
||||
/**
|
||||
* encode a distance with a variable bit length
|
||||
* (poor mans huffman tree)
|
||||
* 1 -> 0
|
||||
* 01 -> 1 + following 1-bit word ( 1..2 )
|
||||
* 001 -> 3 + following 2-bit word ( 3..6 )
|
||||
* 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
|
||||
*
|
||||
* @see #decodeVarBits
|
||||
*/
|
||||
public final void encodeVarBits( int value )
|
||||
{
|
||||
int range = 0;
|
||||
while ( value > range )
|
||||
while (value > range)
|
||||
{
|
||||
encodeBit( false );
|
||||
value -= range+1;
|
||||
range = 2*range + 1;
|
||||
value -= range + 1;
|
||||
range = 2 * range + 1;
|
||||
}
|
||||
encodeBit( true );
|
||||
encode( range, value );
|
||||
encodeBounded( range, value );
|
||||
}
|
||||
|
||||
// twin to encodeDistance
|
||||
public int decodeVarBits()
|
||||
/**
|
||||
* @see #encodeVarBits
|
||||
*/
|
||||
public final int decodeVarBits()
|
||||
{
|
||||
int range = 0;
|
||||
int value = 0;
|
||||
while ( !decodeBit() )
|
||||
while (!decodeBit())
|
||||
{
|
||||
value += range+1;
|
||||
range = 2*range + 1;
|
||||
value += range + 1;
|
||||
range = 2 * range + 1;
|
||||
}
|
||||
return value + decode( range );
|
||||
return value + decodeBounded( range );
|
||||
}
|
||||
|
||||
public void encodeBit( boolean value )
|
||||
public final void encodeBit( boolean value )
|
||||
{
|
||||
if ( bm == 0x100 ) { bm = 1; ab[++idx] = 0; }
|
||||
if ( value ) ab[idx] |= bm;
|
||||
if ( bm == 0x100 )
|
||||
{
|
||||
bm = 1;
|
||||
ab[++idx] = 0;
|
||||
}
|
||||
if ( value )
|
||||
ab[idx] |= bm;
|
||||
bm <<= 1;
|
||||
}
|
||||
|
||||
public boolean decodeBit()
|
||||
public final boolean decodeBit()
|
||||
{
|
||||
if ( bm == 0x100 ) { bm = 1; b = ab[++idx]; }
|
||||
boolean value = ( (b & bm) != 0 );
|
||||
if ( bm == 0x100 )
|
||||
{
|
||||
bm = 1;
|
||||
b = ab[++idx];
|
||||
}
|
||||
boolean value = ( ( b & bm ) != 0 );
|
||||
bm <<= 1;
|
||||
return value;
|
||||
}
|
||||
|
||||
// encode a symbol with number of bits according to maxvalue
|
||||
public void encode( int max, int value )
|
||||
/**
|
||||
* encode an integer in the range 0..max (inclusive).
|
||||
* For max = 2^n-1, this just encodes n bits, but in general
|
||||
* this is variable length encoding, with the shorter codes
|
||||
* for the central value range
|
||||
*/
|
||||
public final void encodeBounded( int max, int value )
|
||||
{
|
||||
int im = 1; // integer mask
|
||||
while( max != 0 )
|
||||
while (im <= max)
|
||||
{
|
||||
if ( bm == 0x100 ) { bm = 1; ab[++idx] = 0; }
|
||||
if ( (value & im) != 0 ) ab[idx] |= bm;
|
||||
max >>= 1;
|
||||
if ( bm == 0x100 )
|
||||
{
|
||||
bm = 1;
|
||||
ab[++idx] = 0;
|
||||
}
|
||||
if ( ( value & im ) != 0 )
|
||||
{
|
||||
ab[idx] |= bm;
|
||||
max -= im;
|
||||
}
|
||||
bm <<= 1;
|
||||
im <<= 1;
|
||||
}
|
||||
}
|
||||
|
||||
public int getEncodedLength()
|
||||
{
|
||||
return idx+1;
|
||||
}
|
||||
|
||||
public int decode( int max )
|
||||
/**
|
||||
* decode an integer in the range 0..max (inclusive).
|
||||
* @see #encodeBounded
|
||||
*/
|
||||
public final int decodeBounded( int max )
|
||||
{
|
||||
int value = 0;
|
||||
int im = 1; // integer mask
|
||||
while( max != 0 )
|
||||
while (( value | im ) <= max)
|
||||
{
|
||||
if ( bm == 0x100 ) { bm = 1; b = ab[++idx]; }
|
||||
if ( (b & bm) != 0 ) value |= im;
|
||||
max >>= 1;
|
||||
if ( bm == 0x100 )
|
||||
{
|
||||
bm = 1;
|
||||
b = ab[++idx];
|
||||
}
|
||||
if ( ( b & bm ) != 0 )
|
||||
value |= im;
|
||||
bm <<= 1;
|
||||
im <<= 1;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the encoded length in bytes
|
||||
*/
|
||||
public final int getEncodedLength()
|
||||
{
|
||||
return idx + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the encoded length in bits
|
||||
*/
|
||||
public final long getBitPosition()
|
||||
{
|
||||
long bitpos = idx << 3;
|
||||
int m = bm;
|
||||
while (m > 1)
|
||||
{
|
||||
bitpos++;
|
||||
m >>= 1;
|
||||
}
|
||||
return bitpos;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,11 +9,7 @@ public final class ByteArrayUnifier
|
|||
public ByteArrayUnifier( int size, boolean validateImmutability )
|
||||
{
|
||||
this.size = size;
|
||||
|
||||
if ( !Boolean.getBoolean( "disableByteArrayUnifification" ) )
|
||||
{
|
||||
byteArrayCache = new byte[size][];
|
||||
}
|
||||
if ( validateImmutability ) crcCrosscheck = new int[size];
|
||||
}
|
||||
|
||||
|
@ -26,21 +22,25 @@ public final class ByteArrayUnifier
|
|||
*/
|
||||
public byte[] unify( byte[] ab )
|
||||
{
|
||||
if ( byteArrayCache == null ) return ab;
|
||||
return unify( ab, 0, ab.length );
|
||||
}
|
||||
|
||||
int n = ab.length;
|
||||
int crc = Crc32.crc( ab, 0, n );
|
||||
int idx = (crc & 0xfffffff) % size;
|
||||
public byte[] unify( byte[] ab, int offset, int len )
|
||||
{
|
||||
int crc = Crc32.crc( ab, offset, len );
|
||||
int idx = ( crc & 0xfffffff ) % size;
|
||||
byte[] abc = byteArrayCache[idx];
|
||||
if ( abc != null && abc.length == n )
|
||||
if ( abc != null && abc.length == len )
|
||||
{
|
||||
int i = 0;
|
||||
while( i < n )
|
||||
while (i < len)
|
||||
{
|
||||
if ( ab[i] != abc[i] ) break;
|
||||
if ( ab[offset + i] != abc[i] )
|
||||
break;
|
||||
i++;
|
||||
}
|
||||
if ( i == n ) return abc;
|
||||
if ( i == len )
|
||||
return abc;
|
||||
}
|
||||
if ( crcCrosscheck != null )
|
||||
{
|
||||
|
@ -48,11 +48,14 @@ public final class ByteArrayUnifier
|
|||
{
|
||||
byte[] abold = byteArrayCache[idx];
|
||||
int crcold = Crc32.crc( abold, 0, abold.length );
|
||||
if ( crcold != crcCrosscheck[idx] ) throw new IllegalArgumentException( "ByteArrayUnifier: immutablity validation failed!" );
|
||||
if ( crcold != crcCrosscheck[idx] )
|
||||
throw new IllegalArgumentException( "ByteArrayUnifier: immutablity validation failed!" );
|
||||
}
|
||||
crcCrosscheck[idx] = crc;
|
||||
}
|
||||
byteArrayCache[idx] = ab;
|
||||
return ab;
|
||||
byte[] nab = new byte[len];
|
||||
System.arraycopy( ab, offset, nab, 0, len );
|
||||
byteArrayCache[idx] = nab;
|
||||
return nab;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,10 +10,19 @@ public class ByteDataReader
|
|||
{
|
||||
protected byte[] ab;
|
||||
protected int aboffset;
|
||||
protected int aboffsetEnd;
|
||||
|
||||
public ByteDataReader( byte[] byteArray )
|
||||
{
|
||||
ab = byteArray;
|
||||
aboffsetEnd = ab == null ? 0 : ab.length;
|
||||
}
|
||||
|
||||
public ByteDataReader( byte[] byteArray, int offset )
|
||||
{
|
||||
ab = byteArray;
|
||||
aboffset = offset;
|
||||
aboffsetEnd = ab == null ? 0 : ab.length;
|
||||
}
|
||||
|
||||
public final int readInt()
|
||||
|
@ -57,6 +66,41 @@ public class ByteDataReader
|
|||
return (short)( (i1 << 8) | i0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a size value and return a pointer to the end of a data section of that size
|
||||
*
|
||||
* @return the pointer to the first byte after that section
|
||||
*/
|
||||
public int getEndPointer()
|
||||
{
|
||||
int size = readVarLengthUnsigned();
|
||||
return aboffset + size;
|
||||
}
|
||||
|
||||
public byte[] readDataUntil( int endPointer )
|
||||
{
|
||||
int size = endPointer - aboffset;
|
||||
if ( size == 0 )
|
||||
{
|
||||
return null;
|
||||
}
|
||||
byte[] data = new byte[size];
|
||||
readFully( data );
|
||||
return data;
|
||||
}
|
||||
|
||||
public byte[] readVarBytes()
|
||||
{
|
||||
int len = readVarLengthUnsigned();
|
||||
if ( len == 0 )
|
||||
{
|
||||
return null;
|
||||
}
|
||||
byte[] bytes = new byte[len];
|
||||
readFully( bytes );
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public final int readVarLengthSigned()
|
||||
{
|
||||
int v = readVarLengthUnsigned();
|
||||
|
@ -83,6 +127,11 @@ public class ByteDataReader
|
|||
aboffset += ta.length;
|
||||
}
|
||||
|
||||
public boolean hasMoreData()
|
||||
{
|
||||
return aboffset < aboffsetEnd;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
|
|
|
@ -6,14 +6,11 @@
|
|||
package btools.util;
|
||||
|
||||
|
||||
public final class ByteDataWriter
|
||||
public class ByteDataWriter extends ByteDataReader
|
||||
{
|
||||
private byte[] ab;
|
||||
private int aboffset;
|
||||
|
||||
public ByteDataWriter( byte[] byteArray )
|
||||
{
|
||||
ab = byteArray;
|
||||
super ( byteArray );
|
||||
}
|
||||
|
||||
public void writeInt( int v )
|
||||
|
@ -64,10 +61,31 @@ public final class ByteDataWriter
|
|||
aboffset += len;
|
||||
}
|
||||
|
||||
public void ensureCapacity( int len )
|
||||
public void writeVarBytes( byte[] sa )
|
||||
{
|
||||
// TODO
|
||||
if ( sa == null )
|
||||
{
|
||||
writeVarLengthUnsigned( 0 );
|
||||
}
|
||||
else
|
||||
{
|
||||
int len = sa.length;
|
||||
writeVarLengthUnsigned( len );
|
||||
write( sa, 0, len );
|
||||
}
|
||||
}
|
||||
|
||||
public void writeModeAndDesc( boolean isReverse, byte[] sa )
|
||||
{
|
||||
int len = sa == null ? 0 : sa.length;
|
||||
int sizecode = len << 1 | ( isReverse ? 1 : 0 );
|
||||
writeVarLengthUnsigned( sizecode );
|
||||
if ( len > 0 )
|
||||
{
|
||||
write( sa, 0, len );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public byte[] toByteArray()
|
||||
{
|
||||
|
@ -76,6 +94,39 @@ public final class ByteDataWriter
|
|||
return c;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Just reserves a single byte and return it' offset.
|
||||
* Used in conjunction with injectVarLengthUnsigned
|
||||
* to efficiently write a size prefix
|
||||
*
|
||||
* @return the offset of the placeholder
|
||||
*/
|
||||
public int writeSizePlaceHolder()
|
||||
{
|
||||
return aboffset++;
|
||||
}
|
||||
|
||||
public void injectSize( int sizeoffset )
|
||||
{
|
||||
int size = 0;
|
||||
int datasize = aboffset-sizeoffset-1;
|
||||
int v = datasize;
|
||||
do
|
||||
{
|
||||
v >>= 7;
|
||||
size++;
|
||||
}
|
||||
while( v != 0 );
|
||||
if ( size > 1 ) // doesn't fit -> shift the data after the placeholder
|
||||
{
|
||||
System.arraycopy( ab, sizeoffset+1, ab, sizeoffset+size, datasize );
|
||||
}
|
||||
aboffset = sizeoffset;
|
||||
writeVarLengthUnsigned( datasize );
|
||||
aboffset = sizeoffset + size + datasize;
|
||||
}
|
||||
|
||||
public int writeVarLengthSigned( int v )
|
||||
{
|
||||
return writeVarLengthUnsigned( v < 0 ? ( (-v) << 1 ) | 1 : v << 1 );
|
||||
|
@ -100,12 +151,4 @@ public final class ByteDataWriter
|
|||
return aboffset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder( "[" );
|
||||
for( int i=0; i<ab.length; i++ ) sb.append( i == 0 ? " " : ", " ).append( Integer.toString( ab[i] ) );
|
||||
sb.append( " ]" );
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -1,8 +1,5 @@
|
|||
package btools.util;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -13,16 +10,44 @@ public class BitCoderContextTest
|
|||
{
|
||||
byte[] ab = new byte[4000];
|
||||
BitCoderContext ctx = new BitCoderContext( ab );
|
||||
for( int i=0; i<1000; i++ )
|
||||
for ( int i = 0; i < 1000; i++ )
|
||||
{
|
||||
ctx.encodeVarBits( i );
|
||||
}
|
||||
ctx = new BitCoderContext( ab );
|
||||
|
||||
for( int i=0; i<1000; i++ )
|
||||
for ( int i = 0; i < 1000; i++ )
|
||||
{
|
||||
int value = ctx.decodeVarBits();
|
||||
Assert.assertTrue( "distance value mismatch", value == i );
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void boundedEncodeDecodeTest()
|
||||
{
|
||||
byte[] ab = new byte[581969];
|
||||
BitCoderContext ctx = new BitCoderContext( ab );
|
||||
for ( int max = 1; max < 1000; max++ )
|
||||
{
|
||||
for ( int val = 0; val <= max; val++ )
|
||||
{
|
||||
ctx.encodeBounded( max, val );
|
||||
}
|
||||
}
|
||||
|
||||
ctx = new BitCoderContext( ab );
|
||||
|
||||
for ( int max = 1; max < 1000; max++ )
|
||||
{
|
||||
for ( int val = 0; val <= max; val++ )
|
||||
{
|
||||
int valDecoded = ctx.decodeBounded( max );
|
||||
if ( valDecoded != val )
|
||||
{
|
||||
Assert.fail( "mismatch at max=" + max + " " + valDecoded + "<>" + val );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
4
pom.xml
4
pom.xml
|
@ -6,16 +6,18 @@
|
|||
<artifactId>brouter</artifactId>
|
||||
<version>1.2</version>
|
||||
<packaging>pom</packaging>
|
||||
<url>http://brensche.de/brouter/</url>
|
||||
<url>http://brouter.de/brouter/</url>
|
||||
<name>brouter</name>
|
||||
<description>configurable OSM offline router with elevation awareness, Java + Android</description>
|
||||
|
||||
<modules>
|
||||
<module>brouter-util</module>
|
||||
<module>brouter-codec</module>
|
||||
<module>brouter-expressions</module>
|
||||
<module>brouter-mapaccess</module>
|
||||
<module>brouter-core</module>
|
||||
<module>brouter-map-creator</module>
|
||||
<!-- <module>brouter-mem-router</module> -->
|
||||
<module>brouter-server</module>
|
||||
<module>brouter-routing-app</module>
|
||||
</modules>
|
||||
|
|
Loading…
Reference in a new issue