diff --git a/brouter-codec/pom.xml b/brouter-codec/pom.xml
new file mode 100644
index 0000000..ccf8518
--- /dev/null
+++ b/brouter-codec/pom.xml
@@ -0,0 +1,26 @@
+
+
+ 4.0.0
+
+ org.btools
+ brouter
+ 1.2
+ ../pom.xml
+
+ brouter-codec
+ jar
+
+
+
+ org.btools
+ brouter-util
+ ${project.version}
+
+
+ junit
+ junit
+ test
+
+
+
diff --git a/brouter-codec/src/main/java/btools/codec/DataBuffers.java b/brouter-codec/src/main/java/btools/codec/DataBuffers.java
new file mode 100644
index 0000000..4ee1038
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/DataBuffers.java
@@ -0,0 +1,31 @@
+package btools.codec;
+
+/**
+ * Container for some re-usable databuffers for the decoder
+ */
+public final class DataBuffers
+{
+ public byte[] iobuffer;
+ public byte[] tagbuf1 = new byte[256];
+ public byte[] bbuf1 = new byte[65636];
+ public int[] ibuf1 = new int[4096];
+ public int[] ibuf2 = new int[2048];
+ public int[] ibuf3 = new int[2048];
+ public int[] alon = new int[2048];
+ public int[] alat = new int[2048];
+
+ public DataBuffers()
+ {
+ this( new byte[65636] );
+ }
+
+ /**
+ * construct a set of databuffers except
+ * for 'iobuffer', where the given array is used
+ */
+ public DataBuffers( byte[] iobuffer )
+ {
+ this.iobuffer = iobuffer;
+ }
+
+}
diff --git a/brouter-codec/src/main/java/btools/codec/IntegerFifo3Pass.java b/brouter-codec/src/main/java/btools/codec/IntegerFifo3Pass.java
new file mode 100644
index 0000000..cdb186b
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/IntegerFifo3Pass.java
@@ -0,0 +1,62 @@
+package btools.codec;
+
+/**
+ * Special integer fifo suitable for 3-pass encoding
+ */
+public class IntegerFifo3Pass
+{
+ private int[] a;
+ private int size;
+ private int pos;
+
+ private int pass;
+
+ public IntegerFifo3Pass( int capacity )
+ {
+ a = capacity < 4 ? new int[4] : new int[capacity];
+ }
+
+ /**
+ * Starts a new encoding pass and resets the reading pointer
+ * from the stats collected in pass2 and writes that to the given context
+ */
+ public void init()
+ {
+ pass++;
+ pos = 0;
+ }
+
+ /**
+ * writes to the fifo in pass2
+ */
+ public void add( int value )
+ {
+ if ( pass == 2 )
+ {
+ if ( size == a.length )
+ {
+ int[] aa = new int[2 * size];
+ System.arraycopy( a, 0, aa, 0, size );
+ a = aa;
+ }
+ a[size++] = value;
+ }
+ }
+
+ /**
+ * reads from the fifo in pass3 (in pass1/2 returns just 1)
+ */
+ public int getNext()
+ {
+ return pass == 3 ? get( pos++ ) : 1;
+ }
+
+ private int get( int idx )
+ {
+ if ( idx >= size )
+ {
+ throw new IndexOutOfBoundsException( "list size=" + size + " idx=" + idx );
+ }
+ return a[idx];
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/LinkedListContainer.java b/brouter-codec/src/main/java/btools/codec/LinkedListContainer.java
new file mode 100644
index 0000000..4004444
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/LinkedListContainer.java
@@ -0,0 +1,87 @@
+package btools.codec;
+
+/**
+ * Simple container for a list of lists of integers
+ */
+public class LinkedListContainer
+{
+ private int[] ia; // prev, data, prev, data, ...
+ private int size;
+ private int[] startpointer; // 0=void, odd=head-data-cell
+ private int listpointer;
+
+ /**
+ * Construct a container for the given number of lists
+ *
+ * If no default-buffer is given, an int[nlists*4] is constructed,
+ * able to hold 2 entries per list on average
+ *
+ * @param nlists the number of lists
+ * @param defaultbuffer an optional data array for re-use (gets replaced if too small)
+ */
+ public LinkedListContainer( int nlists, int[] defaultbuffer )
+ {
+ ia = defaultbuffer == null ? new int[nlists*4] : defaultbuffer;
+ startpointer = new int[nlists];
+ }
+
+ /**
+ * Add a data element to the given list
+ *
+ * @param listNr the list to add the data to
+ * @param data the data value
+ */
+ public void addDataElement( int listNr, int data )
+ {
+ if ( size + 2 > ia.length )
+ {
+ resize();
+ }
+ ia[size++] = startpointer[ listNr ];
+ startpointer[ listNr ] = size;
+ ia[size++] = data;
+ }
+
+ /**
+ * Initialize a list for reading
+ *
+ * @param listNr the list to initialize
+ * @return the number of entries in that list
+ */
+ public int initList( int listNr )
+ {
+ int cnt = 0;
+ int lp = listpointer = startpointer[ listNr ];
+ while( lp != 0 )
+ {
+ lp = ia[ lp-1 ];
+ cnt++;
+ }
+ return cnt;
+ }
+
+ /**
+ * Get a data element from the list previously initialized.
+ * Data elements are return in reverse order (lifo)
+ *
+ * @return the data element
+ * @throws IllegalArgumentException if no more element
+ */
+ public int getDataElement()
+ {
+ if ( listpointer == 0 )
+ {
+ throw new IllegalArgumentException( "no more element!" );
+ }
+ int data = ia[ listpointer ];
+ listpointer = ia[ listpointer-1 ];
+ return data;
+ }
+
+ private void resize()
+ {
+ int[] ia2 = new int[2*ia.length];
+ System.arraycopy( ia, 0, ia2, 0, ia.length );
+ ia = ia2;
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/MicroCache.java b/brouter-codec/src/main/java/btools/codec/MicroCache.java
new file mode 100644
index 0000000..11d547e
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/MicroCache.java
@@ -0,0 +1,306 @@
+package btools.codec;
+
+import btools.util.ByteDataWriter;
+
+/**
+ * a micro-cache is a data cache for an area of some square kilometers or some
+ * hundreds or thousands nodes
+ *
+ * This is the basic io-unit: always a full microcache is loaded from the
+ * data-file if a node is requested at a position not yet covered by the caches
+ * already loaded
+ *
+ * The nodes are represented in a compact way (typical 20-50 bytes per node),
+ * but in a way that they do not depend on each other, and garbage collection is
+ * supported to remove the nodes already consumed from the cache.
+ *
+ * The cache-internal data representation is different from that in the
+ * data-files, where a cache is encoded as a whole, allowing more
+ * redundancy-removal for a more compact encoding
+ */
+public class MicroCache extends ByteDataWriter
+{
+ protected int[] faid;
+ protected int[] fapos;
+ protected int size = 0;
+
+ private int delcount = 0;
+ private int delbytes = 0;
+ private int p2size; // next power of 2 of size
+
+ // cache control: a virgin cache can be
+ // put to ghost state for later recovery
+ public boolean virgin = true;
+ public boolean ghost = false;
+
+ public static boolean debug = false;
+
+ protected MicroCache( byte[] ab )
+ {
+ super( ab );
+ }
+
+ public static MicroCache emptyCache()
+ {
+ return new MicroCache( null ); // TODO: singleton?
+ }
+
+ protected void init( int size )
+ {
+ this.size = size;
+ delcount = 0;
+ delbytes = 0;
+ p2size = 0x40000000;
+ while (p2size > size)
+ p2size >>= 1;
+ }
+
+ public void finishNode( long id )
+ {
+ fapos[size] = aboffset;
+ faid[size] = shrinkId( id );
+ size++;
+ }
+
+ public void discardNode()
+ {
+ aboffset = startPos( size );
+ }
+
+ public int getSize()
+ {
+ return size;
+ }
+
+ public int getDataSize()
+ {
+ return ab == null ? 0 : ab.length;
+ }
+
+ /**
+ * Set the internal reader (aboffset, aboffsetEnd) to the body data for the given id
+ *
+ * If a node is not found in an empty cache, this is usually an edge-effect
+ * (data-file does not exist or neighboured data-files of differnt age),
+ * but is can as well be a symptom of a node-identity breaking bug.
+ *
+ * Current implementation always returns false for not-found, however, for
+ * regression testing, at least for the case that is most likely a bug
+ * (node found but marked as deleted = ready for garbage collection
+ * = already consumed) the RunException should be re-enabled
+ *
+ * @return true if id was found
+ */
+ public boolean getAndClear( long id64 )
+ {
+ if ( size == 0 )
+ {
+ return false;
+ }
+ int id = shrinkId( id64 );
+ int[] a = faid;
+ int offset = p2size;
+ int n = 0;
+
+ while (offset > 0)
+ {
+ int nn = n + offset;
+ if ( nn < size && a[nn] <= id )
+ {
+ n = nn;
+ }
+ offset >>= 1;
+ }
+ if ( a[n] == id )
+ {
+ if ( ( fapos[n] & 0x80000000 ) == 0 )
+ {
+ aboffset = startPos( n );
+ aboffsetEnd = fapos[n];
+ fapos[n] |= 0x80000000; // mark deleted
+ delbytes += aboffsetEnd - aboffset;
+ delcount++;
+ return true;
+ }
+ else // .. marked as deleted
+ {
+ // throw new RuntimeException( "MicroCache: node already consumed: id=" + id );
+ }
+ }
+ return false;
+ }
+
+ protected int startPos( int n )
+ {
+ return n > 0 ? fapos[n - 1] & 0x7fffffff : 0;
+ }
+
+ public void collect( int threshold )
+ {
+ if ( delcount > threshold )
+ {
+ virgin = false;
+
+ int nsize = size - delcount;
+ if ( nsize == 0 )
+ {
+ faid = null;
+ fapos = null;
+ }
+ else
+ {
+ int[] nfaid = new int[nsize];
+ int[] nfapos = new int[nsize];
+ int idx = 0;
+
+ byte[] nab = new byte[ab.length - delbytes];
+ int nab_off = 0;
+ for ( int i = 0; i < size; i++ )
+ {
+ int pos = fapos[i];
+ if ( ( pos & 0x80000000 ) == 0 )
+ {
+ int start = startPos( i );
+ int end = fapos[i];
+ int len = end - start;
+ System.arraycopy( ab, start, nab, nab_off, len );
+ nfaid[idx] = faid[i];
+ nab_off += len;
+ nfapos[idx] = nab_off;
+ idx++;
+ }
+ }
+ faid = nfaid;
+ fapos = nfapos;
+ ab = nab;
+ }
+ init( nsize );
+ }
+ }
+
+ public void unGhost()
+ {
+ ghost = false;
+ delcount = 0;
+ delbytes = 0;
+ for ( int i = 0; i < size; i++ )
+ {
+ fapos[i] &= 0x7fffffff; // clear deleted flags
+ }
+ }
+
+ /**
+ * @return the 64-bit global id for the given cache-position
+ */
+ public long getIdForIndex( int i )
+ {
+ int id32 = faid[i];
+ return expandId( id32 );
+ }
+
+ /**
+ * expand a 32-bit micro-cache-internal id into a 64-bit (lon|lat) global-id
+ *
+ * @see #shrinkId
+ */
+ public long expandId( int id32 )
+ {
+ throw new IllegalArgumentException( "expandId for empty cache" );
+ }
+
+ /**
+ * shrink a 64-bit (lon|lat) global-id into a a 32-bit micro-cache-internal id
+ *
+ * @see #expandId
+ */
+ public int shrinkId( long id64 )
+ {
+ throw new IllegalArgumentException( "shrinkId for empty cache" );
+ }
+
+ /**
+ * @return true if the given lon/lat position is internal for that micro-cache
+ */
+ public boolean isInternal( int ilon, int ilat )
+ {
+ throw new IllegalArgumentException( "isInternal for empty cache" );
+ }
+
+ /**
+ * (stasticially) encode the micro-cache into the format used in the datafiles
+ *
+ * @param buffer
+ * byte array to encode into (considered big enough)
+ * @return the size of the encoded data
+ */
+ public int encodeMicroCache( byte[] buffer )
+ {
+ throw new IllegalArgumentException( "encodeMicroCache for empty cache" );
+ }
+
+ /**
+ * Compare the content of this microcache to another
+ *
+ * @return null if equals, else a diff-report
+ */
+ public String compareWith( MicroCache mc )
+ {
+ String msg = _compareWith( mc );
+ if ( msg != null )
+ {
+ StringBuilder sb = new StringBuilder( msg );
+ sb.append( "\nencode cache:\n" ).append( summary() );
+ sb.append( "\ndecode cache:\n" ).append( mc.summary() );
+ return sb.toString();
+ }
+ return null;
+ }
+
+ private String summary()
+ {
+ StringBuilder sb = new StringBuilder( "size=" + size + " aboffset=" + aboffset );
+ for ( int i = 0; i < size; i++ )
+ {
+ sb.append( "\nidx=" + i + " faid=" + faid[i] + " fapos=" + fapos[i] );
+ }
+ return sb.toString();
+ }
+
+ private String _compareWith( MicroCache mc )
+ {
+ if ( size != mc.size )
+ {
+ return "size missmatch: " + size + "->" + mc.size;
+ }
+ for ( int i = 0; i < size; i++ )
+ {
+ if ( faid[i] != mc.faid[i] )
+ {
+ return "faid missmatch at index " + i + ":" + faid[i] + "->" + mc.faid[i];
+ }
+ int start = i > 0 ? fapos[i - 1] : 0;
+ int end = fapos[i] < mc.fapos[i] ? fapos[i] : mc.fapos[i];
+ int len = end - start;
+ for ( int offset = 0; offset < len; offset++ )
+ {
+ if ( mc.ab.length <= start + offset )
+ {
+ return "data buffer too small";
+ }
+ if ( ab[start + offset] != mc.ab[start + offset] )
+ {
+ return "data missmatch at index " + i + " offset=" + offset;
+ }
+ }
+ if ( fapos[i] != mc.fapos[i] )
+ {
+ return "fapos missmatch at index " + i + ":" + fapos[i] + "->" + mc.fapos[i];
+ }
+ }
+ if ( aboffset != mc.aboffset )
+ {
+ return "datasize missmatch: " + aboffset + "->" + mc.aboffset;
+ }
+ return null;
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/MicroCache1.java b/brouter-codec/src/main/java/btools/codec/MicroCache1.java
new file mode 100644
index 0000000..9e13d07
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/MicroCache1.java
@@ -0,0 +1,99 @@
+package btools.codec;
+
+import btools.util.ByteDataWriter;
+
+/**
+ * MicroCache1 is the old data format as of brouter 1.1 that does not allow to
+ * filter out unaccessable nodes at the beginning of the cache pipeline
+ *
+ * Kept for backward compatibility
+ */
+public final class MicroCache1 extends MicroCache
+{
+ private int lonIdxBase;
+ private int latIdxBase;
+
+ public MicroCache1( int size, byte[] databuffer, int lonIdx80, int latIdx80 ) throws Exception
+ {
+ super( databuffer ); // sets ab=databuffer, aboffset=0
+ faid = new int[size];
+ fapos = new int[size];
+ this.size = 0;
+ lonIdxBase = ( lonIdx80 / 5 ) * 62500 + 31250;
+ latIdxBase = ( latIdx80 / 5 ) * 62500 + 31250;
+ }
+
+ public MicroCache1( byte[] databuffer, int lonIdx80, int latIdx80 ) throws Exception
+ {
+ super( databuffer ); // sets ab=databuffer, aboffset=0
+ lonIdxBase = ( lonIdx80 / 5 ) * 62500 + 31250;
+ latIdxBase = ( latIdx80 / 5 ) * 62500 + 31250;
+
+ size = readInt();
+
+ // get net size
+ int nbytes = 0;
+ for ( int i = 0; i < size; i++ )
+ {
+ aboffset += 4;
+ int bodySize = readVarLengthUnsigned();
+ aboffset += bodySize;
+ nbytes += bodySize;
+ }
+
+ // new array with only net data
+ byte[] nab = new byte[nbytes];
+ aboffset = 4;
+ int noffset = 0;
+ faid = new int[size];
+ fapos = new int[size];
+
+ for ( int i = 0; i < size; i++ )
+ {
+ faid[i] = readInt() ^ 0x8000; // flip lat-sign for correct ordering
+
+ int bodySize = readVarLengthUnsigned();
+ System.arraycopy( ab, aboffset, nab, noffset, bodySize );
+ aboffset += bodySize;
+ noffset += bodySize;
+ fapos[i] = noffset;
+ }
+
+ ab = nab;
+ aboffset = noffset;
+ init( size );
+ }
+
+ @Override
+ public long expandId( int id32 )
+ {
+ int lon32 = lonIdxBase + (short) ( id32 >> 16 );
+ int lat32 = latIdxBase + (short) ( ( id32 & 0xffff ) ^ 0x8000 );
+ return ( (long) lon32 ) << 32 | lat32;
+ }
+
+ @Override
+ public int shrinkId( long id64 )
+ {
+ int lon32 = (int) ( id64 >> 32 );
+ int lat32 = (int) ( id64 & 0xffffffff );
+ return ( lon32 - lonIdxBase ) << 16 | ( ( ( lat32 - latIdxBase ) & 0xffff ) ^ 0x8000 );
+ }
+
+ @Override
+ public int encodeMicroCache( byte[] buffer )
+ {
+ ByteDataWriter dos = new ByteDataWriter( buffer );
+ dos.writeInt( size );
+ for ( int n = 0; n < size; n++ )
+ {
+ dos.writeInt( faid[n] ^ 0x8000 );
+ int start = n > 0 ? fapos[n - 1] : 0;
+ int end = fapos[n];
+ int len = end - start;
+ dos.writeVarLengthUnsigned( len );
+ dos.write( ab, start, len );
+ }
+ return dos.size();
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/MicroCache2.java b/brouter-codec/src/main/java/btools/codec/MicroCache2.java
new file mode 100644
index 0000000..f6971ea
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/MicroCache2.java
@@ -0,0 +1,444 @@
+package btools.codec;
+
+import java.util.BitSet;
+import java.util.HashMap;
+
+import btools.util.ByteArrayUnifier;
+import btools.util.ByteDataReader;
+
+/**
+ * MicroCache2 is the new format that uses statistical encoding and
+ * is able to do access filtering and waypoint matching during encoding
+ */
+public final class MicroCache2 extends MicroCache
+{
+ private int lonBase;
+ private int latBase;
+ private int cellsize;
+
+ public MicroCache2( int size, byte[] databuffer, int lonIdx, int latIdx, int divisor ) throws Exception
+ {
+ super( databuffer ); // sets ab=databuffer, aboffset=0
+
+ faid = new int[size];
+ fapos = new int[size];
+ this.size = 0;
+ cellsize = 1000000 / divisor;
+ lonBase = lonIdx*cellsize;
+ latBase = latIdx*cellsize;
+ }
+
+ public byte[] readUnified( int len, ByteArrayUnifier u )
+ {
+ byte[] b = u.unify( ab, aboffset, len );
+ aboffset += len;
+ return b;
+ }
+
+ public MicroCache2( DataBuffers dataBuffers, int lonIdx, int latIdx, int divisor, TagValueValidator wayValidator, WaypointMatcher waypointMatcher ) throws Exception
+ {
+ super( null );
+ cellsize = 1000000 / divisor;
+ lonBase = lonIdx*cellsize;
+ latBase = latIdx*cellsize;
+
+ StatCoderContext bc = new StatCoderContext( dataBuffers.iobuffer );
+
+ TagValueCoder wayTagCoder = new TagValueCoder( bc, dataBuffers.tagbuf1, wayValidator );
+ TagValueCoder nodeTagCoder = new TagValueCoder( bc, dataBuffers.tagbuf1, null );
+ NoisyDiffCoder nodeIdxDiff = new NoisyDiffCoder( bc );
+ NoisyDiffCoder nodeEleDiff = new NoisyDiffCoder( bc );
+ NoisyDiffCoder extLonDiff = new NoisyDiffCoder(bc);
+ NoisyDiffCoder extLatDiff = new NoisyDiffCoder(bc);
+ NoisyDiffCoder transEleDiff = new NoisyDiffCoder( bc );
+
+ size = bc.decodeNoisyNumber( 5 );
+ faid = size > dataBuffers.ibuf2.length ? new int[size] : dataBuffers.ibuf2;
+ fapos = size > dataBuffers.ibuf3.length ? new int[size] : dataBuffers.ibuf3;
+
+ int[] alon = size > dataBuffers.alon.length ? new int[size] : dataBuffers.alon;
+ int[] alat = size > dataBuffers.alat.length ? new int[size] : dataBuffers.alat;
+
+ if ( debug ) System.out.println( "*** decoding cache of size=" + size );
+
+ bc.decodeSortedArray( faid, 0, size, 0x20000000, 0 );
+
+ for( int n = 0; n> 32);
+ alat[n] = (int)(id64 & 0xffffffff);
+ }
+
+ int netdatasize = bc.decodeNoisyNumber( 10 );
+ ab = netdatasize > dataBuffers.bbuf1.length ? new byte[netdatasize] : dataBuffers.bbuf1;
+ aboffset = 0;
+ BitSet validNodes = new BitSet( size );
+ int finaldatasize = 0;
+
+ LinkedListContainer reverseLinks = new LinkedListContainer( size, dataBuffers.ibuf1 );
+
+ int selev = 0;
+ for( int n=0; n>= 2;
+ }
+
+ int lon32 = lonBase + dlon;
+ int lat32 = latBase + dlat;
+
+ return ((long)lon32)<<32 | lat32;
+ }
+
+ @Override
+ public int shrinkId( long id64 )
+ {
+ int lon32 = (int)(id64 >> 32);
+ int lat32 = (int)(id64 & 0xffffffff);
+ int dlon = lon32 - lonBase;
+ int dlat = lat32 - latBase;
+ int id32 = 0;
+
+ for( int bm = 0x4000; bm > 0; bm >>= 1 )
+ {
+ id32 <<= 2;
+ if ( ( dlon & bm ) != 0 ) id32 |= 1;
+ if ( ( dlat & bm ) != 0 ) id32 |= 2;
+ }
+ return id32;
+ }
+
+ @Override
+ public boolean isInternal( int ilon, int ilat )
+ {
+ return ilon >= lonBase && ilon < lonBase + cellsize
+ && ilat >= latBase && ilat < latBase + cellsize;
+ }
+
+ @Override
+ public int encodeMicroCache( byte[] buffer )
+ {
+ HashMap idMap = new HashMap();
+ for( int n=0; n> 32);
+ int ilat = (int)(id64 & 0xffffffff);
+
+ nlinks = 0;
+ while( hasMoreData() ) // loop over links
+ {
+ // read link data
+ int startPointer = aboffset;
+ int endPointer = getEndPointer();
+
+ int ilonlink = ilon + readVarLengthSigned();
+ int ilatlink = ilat + readVarLengthSigned();
+
+ int sizecode = readVarLengthUnsigned();
+ boolean isReverse = ( sizecode & 1 ) != 0;
+ int descSize = sizecode >> 1;
+ byte[] description = null;
+ if ( descSize > 0 )
+ {
+ description = new byte[descSize];
+ readFully( description );
+ }
+
+ boolean isInternal = isInternal( ilonlink, ilatlink );
+ if ( isReverse && isInternal )
+ {
+ if ( dodebug ) System.out.println( "*** NOT encoding link reverse=" + isReverse + " internal=" + isInternal );
+ netdatasize -= aboffset-startPointer;
+ continue; // do not encode internal reverse links
+ }
+ if ( dodebug ) System.out.println( "*** encoding link reverse=" + isReverse + " internal=" + isInternal );
+ nlinks++;
+
+ if ( isInternal )
+ {
+ long link64 = ((long)ilonlink)<<32 | ilatlink;
+ Integer idx = idMap.get( Long.valueOf( link64 ) );
+ if ( idx == null ) throw new RuntimeException( "ups: internal not found?" );
+ int nodeIdx = idx.intValue();
+ if ( dodebug ) System.out.println( "*** target nodeIdx=" + nodeIdx );
+ if ( nodeIdx == n ) throw new RuntimeException( "ups: self ref?" );
+ nodeIdxDiff.encodeSignedValue( nodeIdx - n );
+ if ( dostats ) bc.assignBits( "nodeIdx" );
+ }
+ else
+ {
+ nodeIdxDiff.encodeSignedValue( 0 );
+ bc.encodeBit( isReverse );
+ extLonDiff.encodeSignedValue( ilonlink - ilon );
+ extLatDiff.encodeSignedValue( ilatlink - ilat );
+ if ( dostats ) bc.assignBits( "externalNode" );
+ }
+ wayTagCoder.encodeTagValueSet( description );
+ if ( dostats ) bc.assignBits( "wayDescIdx" );
+
+ if ( !isReverse )
+ {
+ byte[] geometry = readDataUntil( endPointer );
+ // write transition nodes
+ int count = transCounts.getNext();
+ if ( dodebug ) System.out.println( "*** encoding geometry with count=" + count );
+ bc.encodeVarBits( count++ );
+ if ( dostats ) bc.assignBits( "transcount" );
+ int transcount = 0;
+ if ( geometry != null )
+ {
+ int dlon_remaining = ilonlink - ilon;
+ int dlat_remaining = ilatlink - ilat;
+
+ ByteDataReader r = new ByteDataReader( geometry );
+ while ( r.hasMoreData() )
+ {
+ transcount++;
+
+ int dlon = r.readVarLengthSigned();
+ int dlat = r.readVarLengthSigned();
+ bc.encodePredictedValue( dlon, dlon_remaining/count );
+ bc.encodePredictedValue( dlat, dlat_remaining/count );
+ dlon_remaining -= dlon;
+ dlat_remaining -= dlat;
+ if ( count > 1 ) count--;
+ if ( dostats ) bc.assignBits( "transpos" );
+ transEleDiff.encodeSignedValue( r.readVarLengthSigned() );
+ if ( dostats ) bc.assignBits( "transele" );
+ }
+ }
+ transCounts.add( transcount );
+ }
+ }
+ linkCounts.add( nlinks );
+ }
+ if ( pass == 3 )
+ {
+ return bc.getEncodedLength();
+ }
+ }
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/NoisyDiffCoder.java b/brouter-codec/src/main/java/btools/codec/NoisyDiffCoder.java
new file mode 100644
index 0000000..2dcde92
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/NoisyDiffCoder.java
@@ -0,0 +1,92 @@
+package btools.codec;
+
+/**
+ * Encoder/Decoder for signed integers that automatically detects the typical
+ * range of these numbers to determine a noisy-bit count as a very simple
+ * dictionary
+ *
+ * Adapted for 3-pass encoding (counters -> statistics -> encoding )
+ * but doesn't do anything at pass1
+ */
+public final class NoisyDiffCoder
+{
+ private int tot;
+ private int[] freqs;
+ private int noisybits;
+ private StatCoderContext bc;
+ private int pass;
+
+ /**
+ * Create a decoder and read the noisy-bit count from the gibe context
+ */
+ public NoisyDiffCoder( StatCoderContext bc )
+ {
+ noisybits = bc.decodeVarBits();
+ this.bc = bc;
+ }
+
+ /**
+ * Create an encoder for 3-pass-encoding
+ */
+ public NoisyDiffCoder()
+ {
+ }
+
+ /**
+ * encodes a signed int (pass3 only, stats collection in pass2)
+ */
+ public void encodeSignedValue( int value )
+ {
+ if ( pass == 3 )
+ {
+ bc.encodeNoisyDiff( value, noisybits );
+ }
+ else if ( pass == 2 )
+ {
+ count( value < 0 ? -value : value );
+ }
+ }
+
+ /**
+ * decodes a signed int
+ */
+ public int decodeSignedValue()
+ {
+ return bc.decodeNoisyDiff( noisybits );
+ }
+
+ /**
+ * Starts a new encoding pass and (in pass3) calculates the noisy-bit count
+ * from the stats collected in pass2 and writes that to the given context
+ */
+ public void encodeDictionary( StatCoderContext bc )
+ {
+ if ( ++pass == 3 )
+ {
+ // how many noisy bits?
+ for ( noisybits = 0; noisybits < 14 && tot > 0; noisybits++ )
+ {
+ if ( freqs[noisybits] < ( tot >> 1 ) )
+ break;
+ }
+ bc.encodeVarBits( noisybits );
+ }
+ this.bc = bc;
+ }
+
+ private void count( int value )
+ {
+ if ( freqs == null )
+ freqs = new int[14];
+ int bm = 1;
+ for ( int i = 0; i < 14; i++ )
+ {
+ if ( value < bm )
+ break;
+ else
+ freqs[i]++;
+ bm <<= 1;
+ }
+ tot++;
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/StatCoderContext.java b/brouter-codec/src/main/java/btools/codec/StatCoderContext.java
new file mode 100644
index 0000000..6b2ade8
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/StatCoderContext.java
@@ -0,0 +1,291 @@
+package btools.codec;
+
+import java.util.TreeMap;
+
+import btools.util.BitCoderContext;
+
+public final class StatCoderContext extends BitCoderContext
+{
+ private static TreeMap statsPerName;
+ private long lastbitpos = 0;
+
+ public StatCoderContext( byte[] ab )
+ {
+ super( ab );
+ }
+
+ /**
+ * assign the de-/encoded bits since the last call assignBits to the given
+ * name. Used for encoding statistics
+ *
+ * @see #getBitReport
+ */
+ public void assignBits( String name )
+ {
+ long bitpos = getBitPosition();
+ if ( statsPerName == null )
+ {
+ statsPerName = new TreeMap();
+ }
+ long[] stats = statsPerName.get( name );
+ if ( stats == null )
+ {
+ stats = new long[2];
+ statsPerName.put( name, stats );
+ }
+ stats[0] += bitpos - lastbitpos;
+ stats[1] += 1;
+ lastbitpos = bitpos;
+ }
+
+ /**
+ * Get a textual report on the bit-statistics
+ *
+ * @see #assignBits
+ */
+ public static String getBitReport()
+ {
+ StringBuilder sb = new StringBuilder();
+ for ( String name : statsPerName.keySet() )
+ {
+ long[] stats = statsPerName.get( name );
+ sb.append( name + " count=" + stats[1] + " bits=" + stats[0] + "\n" );
+ }
+ statsPerName = null;
+ return sb.toString();
+ }
+
+ /**
+ * encode an unsigned integer with some of of least significant bits
+ * considered noisy
+ *
+ * @see #decodeNoisyNumber
+ */
+ public void encodeNoisyNumber( int value, int noisybits )
+ {
+ if ( value < 0 )
+ {
+ throw new IllegalArgumentException( "encodeVarBits expects positive value" );
+ }
+ if ( noisybits > 0 )
+ {
+ int mask = 0xffffffff >>> ( 32 - noisybits );
+ encodeBounded( mask, value & mask );
+ value >>= noisybits;
+ }
+ encodeVarBits( value );
+ }
+
+ /**
+ * decode an unsigned integer with some of of least significant bits
+ * considered noisy
+ *
+ * @see #encodeNoisyNumber
+ */
+ public int decodeNoisyNumber( int noisybits )
+ {
+ int value = 0;
+ if ( noisybits > 0 )
+ {
+ int mask = 0xffffffff >>> ( 32 - noisybits );
+ value = decodeBounded( mask );
+ }
+ return value | ( decodeVarBits() << noisybits );
+ }
+
+ /**
+ * encode a signed integer with some of of least significant bits considered
+ * noisy
+ *
+ * @see #decodeNoisyDiff
+ */
+ public void encodeNoisyDiff( int value, int noisybits )
+ {
+ if ( noisybits > 0 )
+ {
+ value += 1 << ( noisybits - 1 );
+ int mask = 0xffffffff >>> ( 32 - noisybits );
+ encodeBounded( mask, value & mask );
+ value >>= noisybits;
+ }
+ encodeVarBits( value < 0 ? -value : value );
+ if ( value != 0 )
+ {
+ encodeBit( value < 0 );
+ }
+ }
+
+ /**
+ * decode a signed integer with some of of least significant bits considered
+ * noisy
+ *
+ * @see #encodeNoisyDiff
+ */
+ public int decodeNoisyDiff( int noisybits )
+ {
+ int value = 0;
+ if ( noisybits > 0 )
+ {
+ int mask = 0xffffffff >>> ( 32 - noisybits );
+ value = decodeBounded( mask ) - ( 1 << ( noisybits - 1 ) );
+ }
+ int val2 = decodeVarBits() << noisybits;
+ if ( val2 != 0 )
+ {
+ if ( decodeBit() )
+ {
+ val2 = -val2;
+ }
+ }
+ return value + val2;
+ }
+
+ /**
+ * encode a signed integer with the typical range and median taken from the
+ * predicted value
+ *
+ * @see #decodePredictedValue
+ */
+ public void encodePredictedValue( int value, int predictor )
+ {
+ int p = predictor < 0 ? -predictor : predictor;
+ int noisybits = 0;
+
+ while (p > 2)
+ {
+ noisybits++;
+ p >>= 1;
+ }
+ encodeNoisyDiff( value - predictor, noisybits );
+ }
+
+ /**
+ * decode a signed integer with the typical range and median taken from the
+ * predicted value
+ *
+ * @see #encodePredictedValue
+ */
+ public int decodePredictedValue( int predictor )
+ {
+ int p = predictor < 0 ? -predictor : predictor;
+ int noisybits = 0;
+ while (p > 2)
+ {
+ noisybits++;
+ p >>= 1;
+ }
+ return predictor + decodeNoisyDiff( noisybits );
+ }
+
+ /**
+ * encode an integer-array making use of the fact that it is sorted. This is
+ * done, starting with the most significant bit, by recursively encoding the
+ * number of values with the current bit being 0. This yields an number of
+ * bits per value that only depends on the typical distance between subsequent
+ * values and also benefits
+ *
+ * @param values
+ * the array to encode
+ * @param offset
+ * position in this array where to start
+ * @param subsize
+ * number of values to encode
+ * @param nextbit
+ * bitmask with the most significant bit set to 1
+ * @param mask
+ * should be 0
+ */
+ public void encodeSortedArray( int[] values, int offset, int subsize, int nextbit, int mask )
+ {
+ if ( subsize == 1 ) // last-choice shortcut
+ {
+ while (nextbit != 0)
+ {
+ encodeBit( ( values[offset] & nextbit ) != 0 );
+ nextbit >>= 1;
+ }
+ }
+ if ( nextbit == 0 )
+ {
+ return;
+ }
+
+ int data = mask & values[offset];
+ mask |= nextbit;
+
+ // count 0-bit-fraction
+ int i = offset;
+ int end = subsize + offset;
+ for ( ; i < end; i++ )
+ {
+ if ( ( values[i] & mask ) != data )
+ {
+ break;
+ }
+ }
+ int size1 = i - offset;
+ int size2 = subsize - size1;
+
+ encodeBounded( subsize, size1 );
+ if ( size1 > 0 )
+ {
+ encodeSortedArray( values, offset, size1, nextbit >> 1, mask );
+ }
+ if ( size2 > 0 )
+ {
+ encodeSortedArray( values, i, size2, nextbit >> 1, mask );
+ }
+ }
+
+ /**
+ * @see #encodeSortedArray
+ *
+ * @param values
+ * the array to encode
+ * @param offset
+ * position in this array where to start
+ * @param subsize
+ * number of values to encode
+ * @param nextbit
+ * bitmask with the most significant bit set to 1
+ * @param value
+ * should be 0
+ */
+ public void decodeSortedArray( int[] values, int offset, int subsize, int nextbit, int value )
+ {
+ if ( subsize == 1 ) // last-choice shortcut
+ {
+ while (nextbit != 0)
+ {
+ if ( decodeBit() )
+ {
+ value |= nextbit;
+ }
+ nextbit >>= 1;
+ }
+ values[offset] = value;
+ return;
+ }
+ if ( nextbit == 0 )
+ {
+ while (subsize-- > 0)
+ {
+ values[offset++] = value;
+ }
+ return;
+ }
+
+ int size1 = decodeBounded( subsize );
+ int size2 = subsize - size1;
+
+ if ( size1 > 0 )
+ {
+ decodeSortedArray( values, offset, size1, nextbit >> 1, value );
+ }
+ if ( size2 > 0 )
+ {
+ decodeSortedArray( values, offset + size1, size2, nextbit >> 1, value | nextbit );
+ }
+ }
+
+}
diff --git a/brouter-codec/src/main/java/btools/codec/TagValueCoder.java b/brouter-codec/src/main/java/btools/codec/TagValueCoder.java
new file mode 100644
index 0000000..b667299
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/TagValueCoder.java
@@ -0,0 +1,235 @@
+package btools.codec;
+
+import java.util.HashMap;
+import java.util.PriorityQueue;
+
+import btools.util.BitCoderContext;
+
+/**
+ * Encoder/Decoder for way-/node-descriptions
+ *
+ * It detects identical descriptions and sorts them
+ * into a huffman-tree according to their frequencies
+ *
+ * Adapted for 3-pass encoding (counters -> statistics -> encoding )
+ * but doesn't do anything at pass1
+ */
+public final class TagValueCoder
+{
+ private HashMap identityMap;
+ private Object tree;
+ private BitCoderContext bc;
+ private int pass;
+
+ public void encodeTagValueSet( byte[] data )
+ {
+ if ( pass == 1 )
+ {
+ return;
+ }
+ TagValueSet tvsProbe = new TagValueSet();
+ tvsProbe.data = data;
+ TagValueSet tvs = identityMap.get( tvsProbe );
+ if ( pass == 3 )
+ {
+ bc.encodeBounded( tvs.range - 1, tvs.code );
+ }
+ else if ( pass == 2 )
+ {
+ if ( tvs == null )
+ {
+ tvs = tvsProbe;
+ identityMap.put( tvs, tvs );
+ }
+ tvs.frequency++;
+ }
+ }
+
+ public byte[] decodeTagValueSet()
+ {
+ Object node = tree;
+ while (node instanceof TreeNode)
+ {
+ TreeNode tn = (TreeNode) node;
+ boolean nextBit = bc.decodeBit();
+ node = nextBit ? tn.child2 : tn.child1;
+ }
+ return (byte[]) node;
+ }
+
+ public void encodeDictionary( BitCoderContext bc )
+ {
+ if ( ++pass == 3 )
+ {
+ PriorityQueue queue = new PriorityQueue( identityMap.values() );
+ while (queue.size() > 1)
+ {
+ TagValueSet node = new TagValueSet();
+ node.child1 = queue.poll();
+ node.child2 = queue.poll();
+ node.frequency = node.child1.frequency + node.child2.frequency;
+ queue.add( node );
+ }
+ TagValueSet root = queue.poll();
+ root.encode( bc, 1, 0 );
+ }
+ this.bc = bc;
+ }
+
+ public TagValueCoder( BitCoderContext bc, byte[] buffer, TagValueValidator validator )
+ {
+ tree = decodeTree( bc, buffer, validator );
+ this.bc = bc;
+ }
+
+ public TagValueCoder()
+ {
+ identityMap = new HashMap();
+ }
+
+ private Object decodeTree( BitCoderContext bc, byte[] buffer, TagValueValidator validator )
+ {
+ boolean isNode = bc.decodeBit();
+ if ( isNode )
+ {
+ TreeNode node = new TreeNode();
+ node.child1 = decodeTree( bc, buffer, validator );
+ node.child2 = decodeTree( bc, buffer, validator );
+ return node;
+ }
+ BitCoderContext target = null;
+ for ( ;; )
+ {
+ int delta = bc.decodeVarBits();
+ if ( target == null )
+ {
+ if ( delta == 0 )
+ return null;
+ target = new BitCoderContext( buffer );
+ target.encodeBit( false ); // dummy reverse bit
+ }
+ target.encodeVarBits( delta );
+ if ( delta == 0 )
+ break;
+ int data = bc.decodeVarBits();
+ target.encodeVarBits( data );
+ }
+ int len = target.getEncodedLength();
+ byte[] res = new byte[len];
+ System.arraycopy( buffer, 0, res, 0, len );
+
+ if ( validator == null || validator.accessAllowed( res ) )
+ {
+ return res;
+ }
+ return null;
+ }
+
+ public static final class TreeNode
+ {
+ public Object child1;
+ public Object child2;
+ }
+
+ public static final class TagValueSet implements Comparable
+ {
+ public byte[] data;
+ public int frequency;
+ public int code;
+ public int range;
+ public TagValueSet child1;
+ public TagValueSet child2;
+
+ public void encode( BitCoderContext bc, int range, int code )
+ {
+ this.range = range;
+ this.code = code;
+ boolean isNode = child1 != null;
+ bc.encodeBit( isNode );
+ if ( isNode )
+ {
+ child1.encode( bc, range << 1, code );
+ child2.encode( bc, range << 1, code + range );
+ }
+ else
+ {
+ if ( data == null )
+ {
+ bc.encodeVarBits( 0 );
+ return;
+ }
+ BitCoderContext src = new BitCoderContext( data );
+ if ( src.decodeBit() )
+ {
+ throw new IllegalArgumentException( "cannot encode reverse bit!" );
+ }
+ for ( ;; )
+ {
+ int delta = src.decodeVarBits();
+ bc.encodeVarBits( delta );
+ if ( delta == 0 )
+ {
+ break;
+ }
+ int data = src.decodeVarBits();
+ bc.encodeVarBits( data );
+ }
+ }
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( o instanceof TagValueSet )
+ {
+ TagValueSet tvs = (TagValueSet) o;
+ if ( data == null )
+ {
+ return tvs.data == null;
+ }
+ if ( tvs.data == null )
+ {
+ return data == null;
+ }
+ if ( data.length != tvs.data.length )
+ {
+ return false;
+ }
+ for ( int i = 0; i < data.length; i++ )
+ {
+ if ( data[i] != tvs.data[i] )
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ if ( data == null )
+ {
+ return 0;
+ }
+ int h = 17;
+ for ( int i = 0; i < data.length; i++ )
+ {
+ h = ( h << 8 ) + data[i];
+ }
+ return h;
+ }
+
+ @Override
+ public int compareTo( TagValueSet tvs )
+ {
+ if ( frequency < tvs.frequency )
+ return -1;
+ if ( frequency > tvs.frequency )
+ return 1;
+ return 0;
+ }
+ }
+}
diff --git a/brouter-codec/src/main/java/btools/codec/TagValueValidator.java b/brouter-codec/src/main/java/btools/codec/TagValueValidator.java
new file mode 100644
index 0000000..7c7693e
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/TagValueValidator.java
@@ -0,0 +1,11 @@
+package btools.codec;
+
+
+public interface TagValueValidator
+{
+ /**
+ * @param tagValueSet the way description to check
+ * @return true if access is allowed in the current profile
+ */
+ public boolean accessAllowed( byte[] tagValueSet );
+}
diff --git a/brouter-codec/src/main/java/btools/codec/WaypointMatcher.java b/brouter-codec/src/main/java/btools/codec/WaypointMatcher.java
new file mode 100644
index 0000000..0438ac1
--- /dev/null
+++ b/brouter-codec/src/main/java/btools/codec/WaypointMatcher.java
@@ -0,0 +1,13 @@
+package btools.codec;
+
+/**
+ * a waypoint matcher gets way geometries
+ * from the decoder to find the closest
+ * matches to the waypoints
+ */
+public interface WaypointMatcher
+{
+ void startNode( int ilon, int ilat );
+ void transferNode( int ilon, int ilat );
+ void endNode( int ilon, int ilat );
+}
diff --git a/brouter-codec/src/test/java/btools/codec/LinkedListContainerTest.java b/brouter-codec/src/test/java/btools/codec/LinkedListContainerTest.java
new file mode 100644
index 0000000..8f8f623
--- /dev/null
+++ b/brouter-codec/src/test/java/btools/codec/LinkedListContainerTest.java
@@ -0,0 +1,52 @@
+package btools.codec;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class LinkedListContainerTest
+{
+ @Test
+ public void linkedListTest1()
+ {
+ int nlists = 553;
+
+ LinkedListContainer llc = new LinkedListContainer( nlists, null );
+
+ for ( int ln = 0; ln < nlists; ln++ )
+ {
+ for ( int i = 0; i < 10; i++ )
+ {
+ llc.addDataElement( ln, ln * i );
+ }
+ }
+
+ for ( int i = 0; i < 10; i++ )
+ {
+ for ( int ln = 0; ln < nlists; ln++ )
+ {
+ llc.addDataElement( ln, ln * i );
+ }
+ }
+
+ for ( int ln = 0; ln < nlists; ln++ )
+ {
+ int cnt = llc.initList( ln );
+ Assert.assertTrue( "list size test", cnt == 20 );
+
+ for ( int i = 19; i >= 0; i-- )
+ {
+ int data = llc.getDataElement();
+ Assert.assertTrue( "data value test", data == ln * ( i % 10 ) );
+ }
+ }
+
+ try
+ {
+ llc.getDataElement();
+ Assert.fail( "no more elements expected" );
+ }
+ catch (IllegalArgumentException e)
+ {
+ }
+ }
+}
diff --git a/brouter-codec/src/test/java/btools/codec/StatCoderContextTest.java b/brouter-codec/src/test/java/btools/codec/StatCoderContextTest.java
new file mode 100644
index 0000000..2ee1af6
--- /dev/null
+++ b/brouter-codec/src/test/java/btools/codec/StatCoderContextTest.java
@@ -0,0 +1,127 @@
+package btools.codec;
+
+import java.util.Arrays;
+import java.util.Random;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class StatCoderContextTest
+{
+ @Test
+ public void noisyVarBitsEncodeDecodeTest()
+ {
+ byte[] ab = new byte[40000];
+ StatCoderContext ctx = new StatCoderContext( ab );
+ for ( int noisybits = 0; noisybits < 12; noisybits++ )
+ {
+ for ( int i = 0; i < 1000; i++ )
+ {
+ ctx.encodeNoisyNumber( i, noisybits );
+ }
+ }
+ ctx = new StatCoderContext( ab );
+
+ for ( int noisybits = 0; noisybits < 12; noisybits++ )
+ {
+ for ( int i = 0; i < 1000; i++ )
+ {
+ int value = ctx.decodeNoisyNumber( noisybits );
+ if ( value != i )
+ {
+ Assert.fail( "value mismatch: noisybits=" + noisybits + " i=" + i + " value=" + value );
+ }
+ }
+ }
+ }
+
+ @Test
+ public void noisySignedVarBitsEncodeDecodeTest()
+ {
+ byte[] ab = new byte[80000];
+ StatCoderContext ctx = new StatCoderContext( ab );
+ for ( int noisybits = 0; noisybits < 12; noisybits++ )
+ {
+ for ( int i = -1000; i < 1000; i++ )
+ {
+ ctx.encodeNoisyDiff( i, noisybits );
+ }
+ }
+ ctx = new StatCoderContext( ab );
+
+ for ( int noisybits = 0; noisybits < 12; noisybits++ )
+ {
+ for ( int i = -1000; i < 1000; i++ )
+ {
+ int value = ctx.decodeNoisyDiff( noisybits );
+ if ( value != i )
+ {
+ Assert.fail( "value mismatch: noisybits=" + noisybits + " i=" + i + " value=" + value );
+ }
+ }
+ }
+ }
+
+ @Test
+ public void predictedValueEncodeDecodeTest()
+ {
+ byte[] ab = new byte[80000];
+ StatCoderContext ctx = new StatCoderContext( ab );
+ for ( int value = -100; value < 100; value += 5 )
+ {
+ for ( int predictor = -200; predictor < 200; predictor += 7 )
+ {
+ ctx.encodePredictedValue( value, predictor );
+ }
+ }
+ ctx = new StatCoderContext( ab );
+
+ for ( int value = -100; value < 100; value += 5 )
+ {
+ for ( int predictor = -200; predictor < 200; predictor += 7 )
+ {
+ int decodedValue = ctx.decodePredictedValue( predictor );
+ if ( value != decodedValue )
+ {
+ Assert.fail( "value mismatch: value=" + value + " predictor=" + predictor + " decodedValue=" + decodedValue );
+ }
+ }
+ }
+ }
+
+ @Test
+ public void sortedArrayEncodeDecodeTest()
+ {
+ Random rand = new Random();
+ int size = 1000000;
+ int[] values = new int[size];
+ for ( int i = 0; i < size; i++ )
+ {
+ values[i] = rand.nextInt() & 0x0fffffff;
+ }
+ values[5] = 175384; // force collision
+ values[8] = 175384;
+
+ values[15] = 275384; // force neighbours
+ values[18] = 275385;
+
+ Arrays.sort( values );
+
+ byte[] ab = new byte[3000000];
+ StatCoderContext ctx = new StatCoderContext( ab );
+ ctx.encodeSortedArray( values, 0, size, 0x08000000, 0 );
+
+ ctx = new StatCoderContext( ab );
+
+ int[] decodedValues = new int[size];
+ ctx.decodeSortedArray( decodedValues, 0, size, 0x08000000, 0 );
+
+ for ( int i = 0; i < size; i++ )
+ {
+ if ( values[i] != decodedValues[i] )
+ {
+ Assert.fail( "mismatch at i=" + i + " " + values[i] + "<>" + decodedValues[i] );
+ }
+ }
+ }
+}
diff --git a/brouter-core/pom.xml b/brouter-core/pom.xml
index f111c6f..8a486b2 100644
--- a/brouter-core/pom.xml
+++ b/brouter-core/pom.xml
@@ -17,6 +17,11 @@
brouter-util
${project.version}
+
+ org.btools
+ brouter-codec
+ ${project.version}
+
org.btools
brouter-mapaccess
diff --git a/brouter-core/src/main/java/btools/router/MatchedWaypoint.java b/brouter-core/src/main/java/btools/router/MatchedWaypoint.java
index cb59d90..055ccd9 100644
--- a/brouter-core/src/main/java/btools/router/MatchedWaypoint.java
+++ b/brouter-core/src/main/java/btools/router/MatchedWaypoint.java
@@ -18,7 +18,7 @@ final class MatchedWaypoint
public OsmNodeNamed crosspoint;
public OsmNodeNamed waypoint;
public double radius;
- public int cost;
+ public boolean hasUpdate;
public void writeToStream( DataOutput dos ) throws IOException
{
diff --git a/brouter-core/src/main/java/btools/router/OsmPath.java b/brouter-core/src/main/java/btools/router/OsmPath.java
index 4e20fb6..e7bb104 100644
--- a/brouter-core/src/main/java/btools/router/OsmPath.java
+++ b/brouter-core/src/main/java/btools/router/OsmPath.java
@@ -129,7 +129,7 @@ final class OsmPath implements OsmLinkHolder
MessageData msgData = new MessageData();
- OsmTransferNode transferNode = link.decodeFirsttransfer();
+ OsmTransferNode transferNode = link.decodeFirsttransfer( p1 );
OsmNode targetNode = link.targetNode;
for(;;)
{
diff --git a/brouter-core/src/main/java/btools/router/RoutingContext.java b/brouter-core/src/main/java/btools/router/RoutingContext.java
index 67b548f..91acb69 100644
--- a/brouter-core/src/main/java/btools/router/RoutingContext.java
+++ b/brouter-core/src/main/java/btools/router/RoutingContext.java
@@ -140,6 +140,27 @@ public final class RoutingContext implements DistanceChecker
}
}
+ public void cleanNogolist( List waypoints )
+ {
+ if ( nogopoints == null ) return;
+ List nogos = new ArrayList();
+ for( OsmNodeNamed nogo : nogopoints )
+ {
+ int radiusInMeter = (int)(nogo.radius * 111894.);
+ boolean goodGuy = true;
+ for( OsmNodeNamed wp : waypoints )
+ {
+ if ( wp.calcDistance( nogo ) < radiusInMeter )
+ {
+ goodGuy = false;
+ break;
+ }
+ }
+ if ( goodGuy ) nogos.add( nogo );
+ }
+ nogopoints = nogos;
+ }
+
public long[] getNogoChecksums()
{
long[] cs = new long[3];
@@ -215,8 +236,7 @@ public final class RoutingContext implements DistanceChecker
// calculate remaining distance
if ( s2 < 0. )
{
- double distance = d > 0. ? -s2 / d : 0.;
- wayfraction = d > 0. ? distance / d : 0.;
+ wayfraction = -s2 / (d*d);
double xm = x2 - wayfraction*dx;
double ym = y2 - wayfraction*dy;
ilonshortest = (int)(xm / coslat6 + nogo.ilon);
diff --git a/brouter-core/src/main/java/btools/router/RoutingEngine.java b/brouter-core/src/main/java/btools/router/RoutingEngine.java
index 74f3c91..8ad6398 100644
--- a/brouter-core/src/main/java/btools/router/RoutingEngine.java
+++ b/brouter-core/src/main/java/btools/router/RoutingEngine.java
@@ -3,12 +3,13 @@ package btools.router;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import btools.expressions.BExpressionContext;
import btools.expressions.BExpressionContextGlobal;
import btools.expressions.BExpressionContextNode;
import btools.expressions.BExpressionContextWay;
@@ -28,6 +29,7 @@ public class RoutingEngine extends Thread
private boolean finished = false;
protected List waypoints = null;
+ protected List matchedWaypoints;
private int linksProcessed = 0;
protected OsmTrack foundTrack = new OsmTrack();
@@ -85,8 +87,8 @@ public class RoutingEngine extends Thread
BExpressionMetaData meta = new BExpressionMetaData();
BExpressionContextGlobal expctxGlobal = new BExpressionContextGlobal( meta );
- rc.expctxWay = new BExpressionContextWay( rc.serversizing ? 262144 : 4096, meta );
- rc.expctxNode = new BExpressionContextNode( rc.serversizing ? 16384 : 1024, meta );
+ rc.expctxWay = new BExpressionContextWay( rc.serversizing ? 262144 : 8192, meta );
+ rc.expctxNode = new BExpressionContextNode( rc.serversizing ? 16384 : 2048, meta );
meta.readMetaData( new File( profileDir, "lookups.dat" ) );
@@ -111,6 +113,7 @@ public class RoutingEngine extends Thread
{
infoLogWriter.write( s );
infoLogWriter.write( '\n' );
+ infoLogWriter.flush();
}
catch( IOException io )
{
@@ -119,6 +122,14 @@ public class RoutingEngine extends Thread
}
}
+ private void logThrowable( Throwable t )
+ {
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ t.printStackTrace(pw);
+ logInfo( sw.toString() );
+ }
+
public void run()
{
doRun( 0 );
@@ -135,6 +146,9 @@ public class RoutingEngine extends Thread
logInfo( "start request at " + new Date() );
}
+ // delete nogos with waypoints in them
+ routingContext.cleanNogolist( waypoints );
+
startTime = System.currentTimeMillis();
this.maxRunningTime = maxRunningTime;
int nsections = waypoints.size() - 1;
@@ -205,14 +219,14 @@ public class RoutingEngine extends Thread
{
errorMessage = e instanceof IllegalArgumentException ? e.getMessage() : e.toString();
logInfo( "Exception (linksProcessed=" + linksProcessed + ": " + errorMessage );
- e.printStackTrace();
+ logThrowable( e );
}
catch( Error e)
{
String hint = cleanOnOOM();
errorMessage = e.toString() + hint;
logInfo( "Error (linksProcessed=" + linksProcessed + ": " + errorMessage );
- e.printStackTrace();
+ logThrowable( e );
}
finally
{
@@ -250,14 +264,14 @@ public class RoutingEngine extends Thread
{
errorMessage = e instanceof IllegalArgumentException ? e.getMessage() : e.toString();
logInfo( "Exception (linksProcessed=" + linksProcessed + ": " + errorMessage );
- e.printStackTrace();
+ logThrowable( e );
}
catch( Error e)
{
String hint = cleanOnOOM();
errorMessage = e.toString() + hint;
logInfo( "Error (linksProcessed=" + linksProcessed + ": " + errorMessage );
- e.printStackTrace();
+ logThrowable( e );
}
finally
{
@@ -290,7 +304,7 @@ public class RoutingEngine extends Thread
private OsmTrack findTrack( OsmTrack[] refTracks, OsmTrack[] lastTracks )
{
OsmTrack totaltrack = new OsmTrack();
- MatchedWaypoint[] wayointIds = new MatchedWaypoint[waypoints.size()];
+ int nUnmatched = waypoints.size();
// check for a track for that target
OsmTrack nearbyTrack = null;
@@ -299,20 +313,27 @@ public class RoutingEngine extends Thread
nearbyTrack = OsmTrack.readBinary( routingContext.rawTrackPath, waypoints.get( waypoints.size()-1), routingContext.getNogoChecksums() );
if ( nearbyTrack != null )
{
- wayointIds[waypoints.size()-1] = nearbyTrack.endPoint;
- }
- }
-
- // match waypoints to nodes
- for( int i=0; i();
+ for( int i=0; i unmatchedWaypoints )
+ {
+ resetCache();
+ nodesCache.waypointMatcher = new WaypointMatcherImpl( unmatchedWaypoints, 250. );
+ for( MatchedWaypoint mwp : unmatchedWaypoints )
+ {
+ preloadPosition( mwp.waypoint );
+ }
+
+ // preliminary-hack: use old stuff if not yet matched
+ for( int i=0; i waypoints;
+
+ private int lonStart;
+ private int latStart;
+ private boolean anyUpdate;
+ private int lonLast;
+ private int latLast;
+
+ public WaypointMatcherImpl( List waypoints, double maxDistance )
+ {
+ this.waypoints = waypoints;
+ for ( MatchedWaypoint mwp : waypoints )
+ {
+ mwp.radius = maxDistance / 111894.; // 6378000. / 57.;
+ }
+ }
+
+ private void checkSegment( int lon1, int lat1, int lon2, int lat2 )
+ {
+ // todo: bounding-box pre-filter
+
+ double l = ( lat2 - 90000000 ) * 0.00000001234134;
+ double l2 = l * l;
+ double l4 = l2 * l2;
+ double coslat = 1. - l2 + l4 / 6.;
+ double coslat6 = coslat * 0.000001;
+
+ double dx = ( lon2 - lon1 ) * coslat6;
+ double dy = ( lat2 - lat1 ) * 0.000001;
+ double d = Math.sqrt( dy * dy + dx * dx );
+ if ( d == 0. )
+ return;
+
+ for ( MatchedWaypoint mwp : waypoints )
+ {
+ OsmNodeNamed wp = mwp.waypoint;
+
+ double x1 = ( lon1 - wp.ilon ) * coslat6;
+ double y1 = ( lat1 - wp.ilat ) * 0.000001;
+ double x2 = ( lon2 - wp.ilon ) * coslat6;
+ double y2 = ( lat2 - wp.ilat ) * 0.000001;
+ double r12 = x1 * x1 + y1 * y1;
+ double r22 = x2 * x2 + y2 * y2;
+ double radius = Math.abs( r12 < r22 ? y1 * dx - x1 * dy : y2 * dx - x2 * dy ) / d;
+
+ if ( radius < mwp.radius )
+ {
+ double s1 = x1 * dx + y1 * dy;
+ double s2 = x2 * dx + y2 * dy;
+
+ if ( s1 < 0. )
+ {
+ s1 = -s1;
+ s2 = -s2;
+ }
+ if ( s2 > 0. )
+ {
+ radius = Math.sqrt( s1 < s2 ? r12 : r22 );
+ if ( radius > mwp.radius )
+ continue;
+ }
+ // new match for that waypoint
+ mwp.radius = radius; // shortest distance to way
+ mwp.hasUpdate = true;
+ anyUpdate = true;
+ // calculate crosspoint
+ if ( mwp.crosspoint == null )
+ mwp.crosspoint = new OsmNodeNamed();
+ if ( s2 < 0. )
+ {
+ double wayfraction = -s2 / ( d * d );
+ double xm = x2 - wayfraction * dx;
+ double ym = y2 - wayfraction * dy;
+ mwp.crosspoint.ilon = (int) ( xm / coslat6 + wp.ilon );
+ mwp.crosspoint.ilat = (int) ( ym / 0.000001 + wp.ilat );
+ }
+ else if ( s1 > s2 )
+ {
+ mwp.crosspoint.ilon = lon2;
+ mwp.crosspoint.ilat = lat2;
+ }
+ else
+ {
+ mwp.crosspoint.ilon = lon1;
+ mwp.crosspoint.ilat = lat1;
+ }
+ }
+ }
+ }
+
+ @Override
+ public void startNode( int ilon, int ilat )
+ {
+ lonLast = lonStart = ilon;
+ latLast = latStart = ilat;
+ anyUpdate = false;
+ }
+
+ @Override
+ public void transferNode( int ilon, int ilat )
+ {
+ checkSegment( lonLast, latLast, ilon, ilat );
+ lonLast = ilon;
+ latLast = ilat;
+ }
+
+ @Override
+ public void endNode( int ilon, int ilat )
+ {
+ checkSegment( lonLast, latLast, ilon, ilat );
+ if ( anyUpdate )
+ {
+ for ( MatchedWaypoint mwp : waypoints )
+ {
+ if ( mwp.hasUpdate )
+ {
+ mwp.hasUpdate = false;
+ mwp.node1 = new OsmNode( lonStart, latStart );
+ mwp.node2 = new OsmNode( ilon, ilat );
+ }
+ }
+ }
+ }
+}
diff --git a/brouter-expressions/pom.xml b/brouter-expressions/pom.xml
index a1ce01e..86d2ac9 100644
--- a/brouter-expressions/pom.xml
+++ b/brouter-expressions/pom.xml
@@ -17,6 +17,11 @@
brouter-util
${project.version}
+
+ org.btools
+ brouter-codec
+ ${project.version}
+
junit
junit
diff --git a/brouter-expressions/src/main/java/btools/expressions/BExpressionContextWay.java b/brouter-expressions/src/main/java/btools/expressions/BExpressionContextWay.java
index 703172d..ae8e07e 100644
--- a/brouter-expressions/src/main/java/btools/expressions/BExpressionContextWay.java
+++ b/brouter-expressions/src/main/java/btools/expressions/BExpressionContextWay.java
@@ -6,9 +6,11 @@
package btools.expressions;
+import btools.codec.TagValueValidator;
-public final class BExpressionContextWay extends BExpressionContext
+
+public final class BExpressionContextWay extends BExpressionContext implements TagValueValidator
{
private static String[] buildInVariables =
{ "costfactor", "turncost", "uphillcostfactor", "downhillcostfactor", "initialcost", "nodeaccessgranted", "initialclassifier", "trafficsourcedensity", "istrafficbackbone" };
@@ -43,4 +45,15 @@ public final class BExpressionContextWay extends BExpressionContext
{
super( "way", hashSize, meta );
}
+
+ @Override
+ public boolean accessAllowed( byte[] description )
+ {
+ evaluate( false, description, null );
+ boolean ok = getCostfactor() < 10000.;
+ evaluate( true, description, null );
+ ok |= getCostfactor() < 10000.;
+ return ok;
+ }
+
}
diff --git a/brouter-map-creator/pom.xml b/brouter-map-creator/pom.xml
index 4ff1261..9be4c89 100644
--- a/brouter-map-creator/pom.xml
+++ b/brouter-map-creator/pom.xml
@@ -17,6 +17,11 @@
brouter-util
${project.version}
+
+ org.btools
+ brouter-codec
+ ${project.version}
+
org.btools
brouter-expressions
diff --git a/brouter-map-creator/src/main/java/btools/mapcreator/OsmNodeP.java b/brouter-map-creator/src/main/java/btools/mapcreator/OsmNodeP.java
index cdbf73f..41dfeb3 100644
--- a/brouter-map-creator/src/main/java/btools/mapcreator/OsmNodeP.java
+++ b/brouter-map-creator/src/main/java/btools/mapcreator/OsmNodeP.java
@@ -1,287 +1,428 @@
-/**
- * Container for an osm node (pre-pocessor version)
- *
- * @author ab
- */
-package btools.mapcreator;
-
-import java.io.IOException;
-
-import btools.util.ByteDataWriter;
-
-public class OsmNodeP extends OsmLinkP implements Comparable
-{
- public static final int SIGNLON_BITMASK = 0x80;
- public static final int SIGNLAT_BITMASK = 0x40;
- public static final int TRANSFERNODE_BITMASK = 0x20;
- public static final int WRITEDESC_BITMASK = 0x10;
- public static final int SKIPDETAILS_BITMASK = 0x08;
- public static final int NODEDESC_BITMASK = 0x04;
-
- /**
- * The latitude
- */
- public int ilat;
-
- /**
- * The longitude
- */
- public int ilon;
-
-
- /**
- * The elevation
- */
- public short selev;
-
- public final static int NO_BRIDGE_BIT = 1;
- public final static int NO_TUNNEL_BIT = 2;
- public final static int BORDER_BIT = 4;
- public final static int TRAFFIC_BIT = 8;
-
- public byte bits = 0;
-
- // interface OsmPos
- public int getILat()
- {
- return ilat;
- }
-
- public int getILon()
- {
- return ilon;
- }
-
- public short getSElev()
- {
- // if all bridge or all tunnel, elevation=no-data
- return ( bits & NO_BRIDGE_BIT ) == 0 || ( bits & NO_TUNNEL_BIT ) == 0 ? Short.MIN_VALUE : selev;
- }
-
- public double getElev()
- {
- return selev / 4.;
- }
-
-
- // populate and return the inherited link, if available,
- // else create a new one
- public OsmLinkP createLink( OsmNodeP source )
- {
- if ( sourceNode == null && targetNode == null )
- {
- // inherited instance is available, use this
- sourceNode = source;
- targetNode = this;
- source.addLink( this );
- return this;
- }
- OsmLinkP link = new OsmLinkP( source, this );
- addLink( link );
- source.addLink( link );
- return link;
- }
-
-
- // memory-squeezing-hack: OsmLinkP's "previous" also used as firstlink..
-
- public void addLink( OsmLinkP link )
- {
- link.setNext( previous, this );
- previous = link;
- }
-
- public OsmLinkP getFirstLink()
- {
- return sourceNode == null && targetNode == null ? previous : this;
- }
-
- public byte[] getNodeDecsription()
- {
- return null;
- }
-
- public void writeNodeData( ByteDataWriter os, byte[] abBuf ) throws IOException
- {
- int lonIdx = ilon/62500;
- int latIdx = ilat/62500;
-
- // buffer the body to first calc size
- ByteDataWriter os2 = new ByteDataWriter( abBuf );
- os2.writeShort( getSElev() );
-
- // hack: write node-desc as link tag (copy cycleway-bits)
- byte[] nodeDescription = getNodeDecsription();
-
- for( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
- {
- int ilonref = ilon;
- int ilatref = ilat;
-
- OsmLinkP link = link0;
- OsmNodeP origin = this;
- int skipDetailBit = link0.descriptionBitmap == null ? SKIPDETAILS_BITMASK : 0;
-
- // first pass just to see if that link is consistent
- while( link != null )
- {
- OsmNodeP target = link.getTarget( origin );
- if ( !target.isTransferNode() )
- {
- break;
- }
- // next link is the one (of two), does does'nt point back
- for( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
- {
- if ( link.getTarget( target ) != origin ) break;
- }
- origin = target;
- }
- if ( link == null ) continue; // dead end
-
- if ( skipDetailBit == 0)
- {
- link = link0;
- origin = this;
- }
- byte[] lastDescription = null;
- while( link != null )
- {
- if ( link.descriptionBitmap == null && skipDetailBit == 0 ) throw new IllegalArgumentException( "missing way description...");
-
- OsmNodeP target = link.getTarget( origin );
- int tranferbit = target.isTransferNode() ? TRANSFERNODE_BITMASK : 0;
- int nodedescbit = nodeDescription != null ? NODEDESC_BITMASK : 0;
-
- int writedescbit = 0;
- if ( skipDetailBit == 0 ) // check if description changed
- {
- int inverseBitByteIndex = 0;
- boolean inverseDirection = link.isReverse( origin );
- byte[] ab = link.descriptionBitmap;
- int abLen = ab.length;
- int lastLen = lastDescription == null ? 0 : lastDescription.length;
- boolean equalsCurrent = abLen == lastLen;
- if ( equalsCurrent )
- {
- for( int i=0; i id2 ) return 1;
- return 0;
- }
-}
+/**
+ * Container for an osm node (pre-pocessor version)
+ *
+ * @author ab
+ */
+package btools.mapcreator;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import btools.codec.MicroCache;
+import btools.codec.MicroCache1;
+import btools.codec.MicroCache2;
+
+public class OsmNodeP extends OsmLinkP
+{
+ public static final int SIGNLON_BITMASK = 0x80;
+ public static final int SIGNLAT_BITMASK = 0x40;
+ public static final int TRANSFERNODE_BITMASK = 0x20;
+ public static final int WRITEDESC_BITMASK = 0x10;
+ public static final int SKIPDETAILS_BITMASK = 0x08;
+ public static final int NODEDESC_BITMASK = 0x04;
+
+ /**
+ * The latitude
+ */
+ public int ilat;
+
+ /**
+ * The longitude
+ */
+ public int ilon;
+
+ /**
+ * The elevation
+ */
+ public short selev;
+
+ public final static int NO_BRIDGE_BIT = 1;
+ public final static int NO_TUNNEL_BIT = 2;
+ public final static int BORDER_BIT = 4;
+ public final static int TRAFFIC_BIT = 8;
+ public final static int ANY_WAY_BIT = 16;
+ public final static int MULTI_WAY_BIT = 32;
+
+ public byte bits = 0;
+
+ // interface OsmPos
+ public int getILat()
+ {
+ return ilat;
+ }
+
+ public int getILon()
+ {
+ return ilon;
+ }
+
+ public short getSElev()
+ {
+ // if all bridge or all tunnel, elevation=no-data
+ return ( bits & NO_BRIDGE_BIT ) == 0 || ( bits & NO_TUNNEL_BIT ) == 0 ? Short.MIN_VALUE : selev;
+ }
+
+ public double getElev()
+ {
+ return selev / 4.;
+ }
+
+ // populate and return the inherited link, if available,
+ // else create a new one
+ public OsmLinkP createLink( OsmNodeP source )
+ {
+ if ( sourceNode == null && targetNode == null )
+ {
+ // inherited instance is available, use this
+ sourceNode = source;
+ targetNode = this;
+ source.addLink( this );
+ return this;
+ }
+ OsmLinkP link = new OsmLinkP( source, this );
+ addLink( link );
+ source.addLink( link );
+ return link;
+ }
+
+ // memory-squeezing-hack: OsmLinkP's "previous" also used as firstlink..
+
+ public void addLink( OsmLinkP link )
+ {
+ link.setNext( previous, this );
+ previous = link;
+ }
+
+ public OsmLinkP getFirstLink()
+ {
+ return sourceNode == null && targetNode == null ? previous : this;
+ }
+
+ public byte[] getNodeDecsription()
+ {
+ return null;
+ }
+
+ public void writeNodeData1( MicroCache1 mc ) throws IOException
+ {
+ mc.writeShort( getSElev() );
+
+ // hack: write node-desc as link tag (copy cycleway-bits)
+ byte[] nodeDescription = getNodeDecsription();
+
+ for ( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
+ {
+ int ilonref = ilon;
+ int ilatref = ilat;
+
+ OsmLinkP link = link0;
+ OsmNodeP origin = this;
+ int skipDetailBit = link0.descriptionBitmap == null ? SKIPDETAILS_BITMASK : 0;
+
+ // first pass just to see if that link is consistent
+ while (link != null)
+ {
+ OsmNodeP target = link.getTarget( origin );
+ if ( !target.isTransferNode() )
+ {
+ break;
+ }
+ // next link is the one (of two), does does'nt point back
+ for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
+ {
+ if ( link.getTarget( target ) != origin )
+ break;
+ }
+ origin = target;
+ }
+ if ( link == null )
+ continue; // dead end
+
+ if ( skipDetailBit == 0 )
+ {
+ link = link0;
+ origin = this;
+ }
+ byte[] lastDescription = null;
+ while (link != null)
+ {
+ if ( link.descriptionBitmap == null && skipDetailBit == 0 )
+ throw new IllegalArgumentException( "missing way description..." );
+
+ OsmNodeP target = link.getTarget( origin );
+ int tranferbit = target.isTransferNode() ? TRANSFERNODE_BITMASK : 0;
+ int nodedescbit = nodeDescription != null ? NODEDESC_BITMASK : 0;
+
+ int writedescbit = 0;
+ if ( skipDetailBit == 0 ) // check if description changed
+ {
+ int inverseBitByteIndex = 0;
+ boolean inverseDirection = link.isReverse( origin );
+ byte[] ab = link.descriptionBitmap;
+ int abLen = ab.length;
+ int lastLen = lastDescription == null ? 0 : lastDescription.length;
+ boolean equalsCurrent = abLen == lastLen;
+ if ( equalsCurrent )
+ {
+ for ( int i = 0; i < abLen; i++ )
+ {
+ byte b = ab[i];
+ if ( i == inverseBitByteIndex && inverseDirection )
+ b ^= 1;
+ if ( b != lastDescription[i] )
+ {
+ equalsCurrent = false;
+ break;
+ }
+ }
+ }
+ if ( !equalsCurrent )
+ {
+ writedescbit = WRITEDESC_BITMASK;
+ lastDescription = new byte[abLen];
+ System.arraycopy( ab, 0, lastDescription, 0, abLen );
+ if ( inverseDirection )
+ lastDescription[inverseBitByteIndex] ^= 1;
+ }
+
+ }
+
+ int bm = tranferbit | writedescbit | nodedescbit | skipDetailBit;
+ int dlon = target.ilon - ilonref;
+ int dlat = target.ilat - ilatref;
+ ilonref = target.ilon;
+ ilatref = target.ilat;
+ if ( dlon < 0 )
+ {
+ bm |= SIGNLON_BITMASK;
+ dlon = -dlon;
+ }
+ if ( dlat < 0 )
+ {
+ bm |= SIGNLAT_BITMASK;
+ dlat = -dlat;
+ }
+ mc.writeByte( bm );
+
+ mc.writeVarLengthUnsigned( dlon );
+ mc.writeVarLengthUnsigned( dlat );
+
+ if ( writedescbit != 0 )
+ {
+ // write the way description, code direction into the first bit
+ mc.writeByte( lastDescription.length );
+ mc.write( lastDescription );
+ }
+ if ( nodedescbit != 0 )
+ {
+ mc.writeByte( nodeDescription.length );
+ mc.write( nodeDescription );
+ nodeDescription = null;
+ }
+
+ link.descriptionBitmap = null; // mark link as written
+
+ if ( tranferbit == 0 )
+ {
+ break;
+ }
+ mc.writeVarLengthSigned( target.getSElev() - getSElev() );
+ // next link is the one (of two), does does'nt point back
+ for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
+ {
+ if ( link.getTarget( target ) != origin )
+ break;
+ }
+ if ( link == null )
+ throw new RuntimeException( "follow-up link not found for transfer-node!" );
+ origin = target;
+ }
+ }
+ }
+
+ public void writeNodeData( MicroCache mc ) throws IOException
+ {
+ boolean valid = true;
+ if ( mc instanceof MicroCache1 )
+ {
+ writeNodeData1( (MicroCache1) mc );
+ }
+ else if ( mc instanceof MicroCache2 )
+ {
+ valid = writeNodeData2( (MicroCache2) mc );
+ }
+ else
+ throw new IllegalArgumentException( "unknown cache version: " + mc.getClass() );
+ if ( valid )
+ {
+ mc.finishNode( getIdFromPos() );
+ }
+ else
+ {
+ mc.discardNode();
+ }
+ }
+
+ public boolean writeNodeData2( MicroCache2 mc ) throws IOException
+ {
+ boolean hasLinks = false;
+ mc.writeShort( getSElev() );
+ mc.writeVarBytes( getNodeDecsription() );
+
+ // buffer internal reverse links
+ ArrayList internalReverse = new ArrayList();
+
+ for ( OsmLinkP link0 = getFirstLink(); link0 != null; link0 = link0.getNext( this ) )
+ {
+ OsmLinkP link = link0;
+ OsmNodeP origin = this;
+ OsmNodeP target = null;
+
+ // first pass just to see if that link is consistent
+ while (link != null)
+ {
+ target = link.getTarget( origin );
+ if ( !target.isTransferNode() )
+ {
+ break;
+ }
+ // next link is the one (of two), does does'nt point back
+ for ( link = target.getFirstLink(); link != null; link = link.getNext( target ) )
+ {
+ if ( link.getTarget( target ) != origin )
+ break;
+ }
+
+ if ( link != null && link.descriptionBitmap != link0.descriptionBitmap )
+ {
+ throw new IllegalArgumentException( "assertion failed: description change along transfer nodes" );
+ }
+
+ origin = target;
+ }
+ if ( link == null )
+ continue; // dead end
+ if ( target == this )
+ continue; // self-ref
+ hasLinks = true;
+
+ // internal reverse links later
+ boolean isReverse = link0.isReverse( this );
+ if ( isReverse )
+ {
+ if ( mc.isInternal( target.ilon, target.ilat ) )
+ {
+ internalReverse.add( target );
+ continue;
+ }
+ }
+
+ // write link data
+ int sizeoffset = mc.writeSizePlaceHolder();
+ mc.writeVarLengthSigned( target.ilon - ilon );
+ mc.writeVarLengthSigned( target.ilat - ilat );
+ mc.writeModeAndDesc( isReverse, link0.descriptionBitmap );
+ if ( !isReverse ) // write geometry for forward links only
+ {
+ link = link0;
+ origin = this;
+ while (link != null)
+ {
+ OsmNodeP tranferNode = link.getTarget( origin );
+ if ( !tranferNode.isTransferNode() )
+ {
+ break;
+ }
+ mc.writeVarLengthSigned( tranferNode.ilon - origin.ilon );
+ mc.writeVarLengthSigned( tranferNode.ilat - origin.ilat );
+ mc.writeVarLengthSigned( tranferNode.getSElev() - origin.getSElev() );
+
+ // next link is the one (of two), does does'nt point back
+ for ( link = tranferNode.getFirstLink(); link != null; link = link.getNext( tranferNode ) )
+ {
+ if ( link.getTarget( tranferNode ) != origin )
+ break;
+ }
+ if ( link == null )
+ throw new RuntimeException( "follow-up link not found for transfer-node!" );
+ origin = tranferNode;
+ }
+ }
+ mc.injectSize( sizeoffset );
+ }
+
+ while (internalReverse.size() > 0)
+ {
+ int nextIdx = 0;
+ if ( internalReverse.size() > 1 )
+ {
+ int max32 = Integer.MIN_VALUE;
+ for ( int i = 0; i < internalReverse.size(); i++ )
+ {
+ int id32 = mc.shrinkId( internalReverse.get( i ).getIdFromPos() );
+ if ( id32 > max32 )
+ {
+ max32 = id32;
+ nextIdx = i;
+ }
+ }
+ }
+ OsmNodeP target = internalReverse.remove( nextIdx );
+ int sizeoffset = mc.writeSizePlaceHolder();
+ mc.writeVarLengthSigned( target.ilon - ilon );
+ mc.writeVarLengthSigned( target.ilat - ilat );
+ mc.writeModeAndDesc( true, null );
+ mc.injectSize( sizeoffset );
+ }
+ return hasLinks;
+ }
+
+ public String toString2()
+ {
+ return ( ilon - 180000000 ) + "_" + ( ilat - 90000000 ) + "_" + ( selev / 4 );
+ }
+
+ public long getIdFromPos()
+ {
+ return ( (long) ilon ) << 32 | ilat;
+ }
+
+ public boolean isBorderNode()
+ {
+ return ( bits & BORDER_BIT ) != 0;
+ }
+
+ public boolean hasTraffic()
+ {
+ return ( bits & TRAFFIC_BIT ) != 0;
+ }
+
+ /**
+ * Not really count the ways, just detect if more than one
+ */
+ public void incWayCount()
+ {
+ if ( ( bits & ANY_WAY_BIT ) != 0 )
+ {
+ bits |= MULTI_WAY_BIT;
+ }
+ bits |= ANY_WAY_BIT;
+ }
+
+ public boolean isTransferNode()
+ {
+ return ( bits & BORDER_BIT ) == 0 && ( bits & MULTI_WAY_BIT ) == 0 && _linkCnt() == 2;
+ }
+
+ private int _linkCnt()
+ {
+ int cnt = 0;
+
+ for ( OsmLinkP link = getFirstLink(); link != null; link = link.getNext( this ) )
+ {
+ cnt++;
+ }
+ return cnt;
+ }
+
+}
diff --git a/brouter-map-creator/src/main/java/btools/mapcreator/WayLinker.java b/brouter-map-creator/src/main/java/btools/mapcreator/WayLinker.java
index a1d3f02..3b5bddd 100644
--- a/brouter-map-creator/src/main/java/btools/mapcreator/WayLinker.java
+++ b/brouter-map-creator/src/main/java/btools/mapcreator/WayLinker.java
@@ -1,374 +1,432 @@
-package btools.mapcreator;
-
-import java.io.BufferedInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.RandomAccessFile;
-import java.util.Collections;
-import java.util.List;
-
-import btools.expressions.BExpressionContextNode;
-import btools.expressions.BExpressionContextWay;
-import btools.expressions.BExpressionMetaData;
-import btools.util.ByteArrayUnifier;
-import btools.util.ByteDataWriter;
-import btools.util.CompactLongMap;
-import btools.util.CompactLongSet;
-import btools.util.Crc32;
-import btools.util.FrozenLongMap;
-import btools.util.FrozenLongSet;
-import btools.util.LazyArrayOfLists;
-
-/**
- * WayLinker finally puts the pieces together
- * to create the rd5 files. For each 5*5 tile,
- * the corresponding nodefile and wayfile is read,
- * plus the (global) bordernodes file, and an rd5
- * is written
- *
- * @author ab
- */
-public class WayLinker extends MapCreatorBase
-{
- private File nodeTilesIn;
- private File trafficTilesIn;
- private File dataTilesOut;
- private File borderFileIn;
-
- private String dataTilesSuffix;
-
- private boolean readingBorder;
-
- private CompactLongMap nodesMap;
- private OsmTrafficMap trafficMap;
- private List nodesList;
- private CompactLongSet borderSet;
- private short lookupVersion;
- private short lookupMinorVersion;
-
- private long creationTimeStamp;
-
- private BExpressionContextWay expctxWay;
- private BExpressionContextNode expctxNode;
-
- private ByteArrayUnifier abUnifier;
-
- private int minLon;
- private int minLat;
-
- private void reset()
- {
- minLon = -1;
- minLat = -1;
- nodesMap = new CompactLongMap();
- borderSet = new CompactLongSet();
- }
-
- public static void main(String[] args) throws Exception
- {
- System.out.println("*** WayLinker: Format a region of an OSM map for routing");
- if (args.length != 7)
- {
- System.out.println("usage: java WayLinker ");
- return;
- }
- new WayLinker().process( new File( args[0] ), new File( args[1] ), new File( args[2] ), new File( args[3] ), new File( args[4] ), new File( args[5] ), args[6] );
- }
-
- public void process( File nodeTilesIn, File wayTilesIn, File borderFileIn, File lookupFile, File profileFile, File dataTilesOut, String dataTilesSuffix ) throws Exception
- {
- this.nodeTilesIn = nodeTilesIn;
- this.trafficTilesIn = new File( "traffic" );
- this.dataTilesOut = dataTilesOut;
- this.borderFileIn = borderFileIn;
- this.dataTilesSuffix = dataTilesSuffix;
-
- BExpressionMetaData meta = new BExpressionMetaData();
-
- // read lookup + profile for lookup-version + access-filter
- expctxWay = new BExpressionContextWay( meta);
- expctxNode = new BExpressionContextNode( meta);
- meta.readMetaData( lookupFile );
-
- lookupVersion = meta.lookupVersion;
- lookupMinorVersion = meta.lookupMinorVersion;
-
- expctxWay.parseFile( profileFile, "global" );
- expctxNode.parseFile( profileFile, "global" );
-
- creationTimeStamp = System.currentTimeMillis();
-
- abUnifier = new ByteArrayUnifier( 16384, false );
-
- // then process all segments
- new WayIterator( this, true ).processDir( wayTilesIn, ".wt5" );
- }
-
- @Override
- public void wayFileStart( File wayfile ) throws Exception
- {
- // process corresponding node-file, if any
- File nodeFile = fileFromTemplate( wayfile, nodeTilesIn, "u5d" );
- if ( nodeFile.exists() )
- {
- reset();
-
- // read the border file
- readingBorder = true;
- new NodeIterator( this, false ).processFile( borderFileIn );
- borderSet = new FrozenLongSet( borderSet );
-
- // read this tile's nodes
- readingBorder = false;
- new NodeIterator( this, true ).processFile( nodeFile );
-
- // freeze the nodes-map
- FrozenLongMap nodesMapFrozen = new FrozenLongMap( nodesMap );
- nodesMap = nodesMapFrozen;
- nodesList = nodesMapFrozen.getValueList();
- }
-
- // read a traffic-file, if any
- File trafficFile = fileFromTemplate( wayfile, trafficTilesIn, "trf" );
- if ( trafficFile.exists() )
- {
- trafficMap = new OsmTrafficMap();
- trafficMap.load( trafficFile, minLon, minLat, minLon + 5000000, minLat + 5000000, false );
- }
- }
-
- @Override
- public void nextNode( NodeData data ) throws Exception
- {
- OsmNodeP n = data.description == null ? new OsmNodeP() : new OsmNodePT(data.description);
- n.ilon = data.ilon;
- n.ilat = data.ilat;
- n.selev = data.selev;
-
- if ( readingBorder || (!borderSet.contains( data.nid )) )
- {
- nodesMap.fastPut( data.nid, n );
- }
-
- if ( readingBorder )
- {
- n.bits |= OsmNodeP.BORDER_BIT;
- borderSet.fastAdd( data.nid );
- return;
- }
-
- // remember the segment coords
- int min_lon = (n.ilon / 5000000 ) * 5000000;
- int min_lat = (n.ilat / 5000000 ) * 5000000;
- if ( minLon == -1 ) minLon = min_lon;
- if ( minLat == -1 ) minLat = min_lat;
- if ( minLat != min_lat || minLon != min_lon )
- throw new IllegalArgumentException( "inconsistent node: " + n.ilon + " " + n.ilat );
- }
-
- @Override
- public void nextWay( WayData way ) throws Exception
- {
- byte[] description = abUnifier.unify( way.description );
- int lastTraffic = 0;
-
- // filter according to profile
- expctxWay.evaluate( false, description, null );
- boolean ok = expctxWay.getCostfactor() < 10000.;
- expctxWay.evaluate( true, description, null );
- ok |= expctxWay.getCostfactor() < 10000.;
- if ( !ok ) return;
-
- byte wayBits = 0;
- expctxWay.decode( description );
- if ( !expctxWay.getBooleanLookupValue( "bridge" ) ) wayBits |= OsmNodeP.NO_BRIDGE_BIT;
- if ( !expctxWay.getBooleanLookupValue( "tunnel" ) ) wayBits |= OsmNodeP.NO_TUNNEL_BIT;
-
- OsmNodeP n1 = null;
- OsmNodeP n2 = null;
- for (int i=0; i seglists = new LazyArrayOfLists(nLonSegs*nLatSegs);
- for( OsmNodeP n : nodesList )
- {
- if ( n == null || n.getFirstLink() == null || n.isTransferNode() ) continue;
- if ( n.ilon < minLon || n.ilon >= maxLon
- || n.ilat < minLat || n.ilat >= maxLat ) continue;
- int lonIdx = (n.ilon-minLon)/1000000;
- int latIdx = (n.ilat-minLat)/1000000;
-
- int tileIndex = lonIdx * nLatSegs + latIdx;
- seglists.getList(tileIndex).add( n );
- }
- nodesList = null;
- seglists.trimAll();
-
- // open the output file
- File outfile = fileFromTemplate( wayfile, dataTilesOut, dataTilesSuffix );
- DataOutputStream os = createOutStream( outfile );
-
- long[] fileIndex = new long[25];
- int[] fileHeaderCrcs = new int[25];
-
- // write 5*5 index dummy
- for( int i55=0; i55<25; i55++)
- {
- os.writeLong( 0 );
- }
- long filepos = 200L;
-
- // sort further in 1/80-degree squares
- for( int lonIdx = 0; lonIdx < nLonSegs; lonIdx++ )
- {
- for( int latIdx = 0; latIdx < nLatSegs; latIdx++ )
- {
- int tileIndex = lonIdx * nLatSegs + latIdx;
- if ( seglists.getSize(tileIndex) > 0 )
- {
- List nlist = seglists.getList(tileIndex);
-
- LazyArrayOfLists subs = new LazyArrayOfLists(6400);
- byte[][] subByteArrays = new byte[6400][];
- for( int ni=0; ni subList = subs.getList(si);
- if ( subList.size() > 0 )
- {
- Collections.sort( subList );
-
- ByteDataWriter dos = new ByteDataWriter( abBuf2 );
-
- dos.writeInt( subList.size() );
- for( int ni=0; ni nodesMap;
+ private OsmTrafficMap trafficMap;
+ private List nodesList;
+ private CompactLongSet borderSet;
+ private short lookupVersion;
+ private short lookupMinorVersion;
+
+ private long creationTimeStamp;
+
+ private BExpressionContextWay expctxWay;
+
+ private ByteArrayUnifier abUnifier;
+
+ private int minLon;
+ private int minLat;
+
+ private int microCacheEncoding = 2;
+ private int divisor = microCacheEncoding == 2 ? 32 : 80;
+ private int cellsize = 1000000 / divisor;
+
+ private void reset()
+ {
+ minLon = -1;
+ minLat = -1;
+ nodesMap = new CompactLongMap();
+ borderSet = new CompactLongSet();
+ }
+
+ public static void main( String[] args ) throws Exception
+ {
+ System.out.println( "*** WayLinker: Format a region of an OSM map for routing" );
+ if ( args.length != 7 )
+ {
+ System.out
+ .println( "usage: java WayLinker " );
+ return;
+ }
+ new WayLinker().process( new File( args[0] ), new File( args[1] ), new File( args[2] ), new File( args[3] ), new File( args[4] ), new File(
+ args[5] ), args[6] );
+ }
+
+ public void process( File nodeTilesIn, File wayTilesIn, File borderFileIn, File lookupFile, File profileFile, File dataTilesOut,
+ String dataTilesSuffix ) throws Exception
+ {
+ this.nodeTilesIn = nodeTilesIn;
+ this.trafficTilesIn = new File( "traffic" );
+ this.dataTilesOut = dataTilesOut;
+ this.borderFileIn = borderFileIn;
+ this.dataTilesSuffix = dataTilesSuffix;
+
+ BExpressionMetaData meta = new BExpressionMetaData();
+
+ // read lookup + profile for lookup-version + access-filter
+ expctxWay = new BExpressionContextWay( meta );
+ meta.readMetaData( lookupFile );
+
+ lookupVersion = meta.lookupVersion;
+ lookupMinorVersion = meta.lookupMinorVersion;
+
+ expctxWay.parseFile( profileFile, "global" );
+
+ creationTimeStamp = System.currentTimeMillis();
+
+ abUnifier = new ByteArrayUnifier( 16384, false );
+
+ // then process all segments
+ new WayIterator( this, true ).processDir( wayTilesIn, ".wt5" );
+ }
+
+ @Override
+ public void wayFileStart( File wayfile ) throws Exception
+ {
+ // process corresponding node-file, if any
+ File nodeFile = fileFromTemplate( wayfile, nodeTilesIn, "u5d" );
+ if ( nodeFile.exists() )
+ {
+ reset();
+
+ // read the border file
+ readingBorder = true;
+ new NodeIterator( this, false ).processFile( borderFileIn );
+ borderSet = new FrozenLongSet( borderSet );
+
+ // read this tile's nodes
+ readingBorder = false;
+ new NodeIterator( this, true ).processFile( nodeFile );
+
+ // freeze the nodes-map
+ FrozenLongMap nodesMapFrozen = new FrozenLongMap( nodesMap );
+ nodesMap = nodesMapFrozen;
+ nodesList = nodesMapFrozen.getValueList();
+ }
+
+ // read a traffic-file, if any
+ File trafficFile = fileFromTemplate( wayfile, trafficTilesIn, "trf" );
+ if ( trafficFile.exists() )
+ {
+ trafficMap = new OsmTrafficMap();
+ trafficMap.load( trafficFile, minLon, minLat, minLon + 5000000, minLat + 5000000, false );
+ }
+ }
+
+ @Override
+ public void nextNode( NodeData data ) throws Exception
+ {
+ OsmNodeP n = data.description == null ? new OsmNodeP() : new OsmNodePT( data.description );
+ n.ilon = data.ilon;
+ n.ilat = data.ilat;
+ n.selev = data.selev;
+
+ if ( readingBorder || ( !borderSet.contains( data.nid ) ) )
+ {
+ nodesMap.fastPut( data.nid, n );
+ }
+
+ if ( readingBorder )
+ {
+ n.bits |= OsmNodeP.BORDER_BIT;
+ borderSet.fastAdd( data.nid );
+ return;
+ }
+
+ // remember the segment coords
+ int min_lon = ( n.ilon / 5000000 ) * 5000000;
+ int min_lat = ( n.ilat / 5000000 ) * 5000000;
+ if ( minLon == -1 )
+ minLon = min_lon;
+ if ( minLat == -1 )
+ minLat = min_lat;
+ if ( minLat != min_lat || minLon != min_lon )
+ throw new IllegalArgumentException( "inconsistent node: " + n.ilon + " " + n.ilat );
+ }
+
+ @Override
+ public void nextWay( WayData way ) throws Exception
+ {
+ byte[] description = abUnifier.unify( way.description );
+ int lastTraffic = 0;
+
+ // filter according to profile
+ expctxWay.evaluate( false, description, null );
+ boolean ok = expctxWay.getCostfactor() < 10000.;
+ expctxWay.evaluate( true, description, null );
+ ok |= expctxWay.getCostfactor() < 10000.;
+ if ( !ok )
+ return;
+
+ byte wayBits = 0;
+ expctxWay.decode( description );
+ if ( !expctxWay.getBooleanLookupValue( "bridge" ) )
+ wayBits |= OsmNodeP.NO_BRIDGE_BIT;
+ if ( !expctxWay.getBooleanLookupValue( "tunnel" ) )
+ wayBits |= OsmNodeP.NO_TUNNEL_BIT;
+
+ OsmNodeP n1 = null;
+ OsmNodeP n2 = null;
+ for ( int i = 0; i < way.nodes.size(); i++ )
+ {
+ long nid = way.nodes.get( i );
+ n1 = n2;
+ n2 = nodesMap.get( nid );
+
+ if ( n1 != null && n2 != null && n1 != n2 )
+ {
+ OsmLinkP link = n2.createLink( n1 );
+
+ int traffic = trafficMap == null ? 0 : trafficMap.getTrafficClass( n1.getIdFromPos(), n2.getIdFromPos() );
+ if ( traffic != lastTraffic )
+ {
+ expctxWay.decode( description );
+ expctxWay.addLookupValue( "estimated_traffic_class", traffic == 0 ? 0 : traffic + 1 );
+ description = abUnifier.unify( expctxWay.encode() );
+ lastTraffic = traffic;
+ }
+ link.descriptionBitmap = description;
+
+ if ( n1.ilon / cellsize != n2.ilon / cellsize || n1.ilat / cellsize != n2.ilat / cellsize )
+ {
+ n2.incWayCount(); // force first node after cell-change to be a
+ // network node
+ }
+ }
+ if ( n2 != null )
+ {
+ n2.bits |= wayBits;
+ n2.incWayCount();
+ }
+ }
+ }
+
+ @Override
+ public void wayFileEnd( File wayfile ) throws Exception
+ {
+ int ncaches = divisor * divisor;
+ int indexsize = ncaches * 4;
+
+ nodesMap = null;
+ borderSet = null;
+ trafficMap = null;
+
+ byte[] abBuf1 = new byte[10 * 1024 * 1024];
+ byte[] abBuf2 = new byte[10 * 1024 * 1024];
+
+ int maxLon = minLon + 5000000;
+ int maxLat = minLat + 5000000;
+
+ // write segment data to individual files
+ {
+ int nLonSegs = ( maxLon - minLon ) / 1000000;
+ int nLatSegs = ( maxLat - minLat ) / 1000000;
+
+ // sort the nodes into segments
+ LazyArrayOfLists seglists = new LazyArrayOfLists( nLonSegs * nLatSegs );
+ for ( OsmNodeP n : nodesList )
+ {
+ if ( n == null || n.getFirstLink() == null || n.isTransferNode() )
+ continue;
+ if ( n.ilon < minLon || n.ilon >= maxLon || n.ilat < minLat || n.ilat >= maxLat )
+ continue;
+ int lonIdx = ( n.ilon - minLon ) / 1000000;
+ int latIdx = ( n.ilat - minLat ) / 1000000;
+
+ int tileIndex = lonIdx * nLatSegs + latIdx;
+ seglists.getList( tileIndex ).add( n );
+ }
+ nodesList = null;
+ seglists.trimAll();
+
+ // open the output file
+ File outfile = fileFromTemplate( wayfile, dataTilesOut, dataTilesSuffix );
+ DataOutputStream os = createOutStream( outfile );
+
+ long[] fileIndex = new long[25];
+ int[] fileHeaderCrcs = new int[25];
+
+ // write 5*5 index dummy
+ for ( int i55 = 0; i55 < 25; i55++ )
+ {
+ os.writeLong( 0 );
+ }
+ long filepos = 200L;
+
+ // sort further in 1/divisor-degree squares
+ for ( int lonIdx = 0; lonIdx < nLonSegs; lonIdx++ )
+ {
+ for ( int latIdx = 0; latIdx < nLatSegs; latIdx++ )
+ {
+ int tileIndex = lonIdx * nLatSegs + latIdx;
+ if ( seglists.getSize( tileIndex ) > 0 )
+ {
+ List nlist = seglists.getList( tileIndex );
+
+ LazyArrayOfLists subs = new LazyArrayOfLists( ncaches );
+ byte[][] subByteArrays = new byte[ncaches][];
+ for ( int ni = 0; ni < nlist.size(); ni++ )
+ {
+ OsmNodeP n = nlist.get( ni );
+ int subLonIdx = ( n.ilon - minLon ) / cellsize - divisor * lonIdx;
+ int subLatIdx = ( n.ilat - minLat ) / cellsize - divisor * latIdx;
+ int si = subLatIdx * divisor + subLonIdx;
+ subs.getList( si ).add( n );
+ }
+ subs.trimAll();
+ int[] posIdx = new int[ncaches];
+ int pos = indexsize;
+
+ for ( int si = 0; si < ncaches; si++ )
+ {
+ List subList = subs.getList( si );
+ int size = subList.size();
+ if ( size > 0 )
+ {
+ OsmNodeP n0 = subList.get( 0 );
+ int lonIdxDiv = n0.ilon / cellsize;
+ int latIdxDiv = n0.ilat / cellsize;
+ MicroCache mc = microCacheEncoding == 0 ? new MicroCache1( size, abBuf2, lonIdxDiv, latIdxDiv ) : new MicroCache2( size, abBuf2,
+ lonIdxDiv, latIdxDiv, divisor );
+
+ // sort via treemap
+ TreeMap sortedList = new TreeMap();
+ for ( OsmNodeP n : subList )
+ {
+ long longId = n.getIdFromPos();
+ int shrinkid = mc.shrinkId( longId );
+ if ( mc.expandId( shrinkid ) != longId )
+ {
+ throw new IllegalArgumentException( "inconstistent shrinking: " + longId );
+ }
+ sortedList.put( Integer.valueOf( shrinkid ), n );
+ }
+
+ for ( OsmNodeP n : sortedList.values() )
+ {
+ n.writeNodeData( mc );
+ }
+ if ( mc.getSize() > 0 )
+ {
+ byte[] subBytes;
+ for ( ;; )
+ {
+ int len = mc.encodeMicroCache( abBuf1 );
+ subBytes = new byte[len];
+ System.arraycopy( abBuf1, 0, subBytes, 0, len );
+
+ // cross-check the encoding: re-instantiate the cache
+ MicroCache mc2 = microCacheEncoding == 0 ? new MicroCache1( subBytes, lonIdxDiv, latIdxDiv ) : new MicroCache2( new DataBuffers(
+ subBytes ), lonIdxDiv, latIdxDiv, divisor, null, null );
+ // ..and check if still the same
+ String diffMessage = mc.compareWith( mc2 );
+ if ( diffMessage != null )
+ {
+ if ( MicroCache.debug )
+ throw new RuntimeException( "encoding crosscheck failed: " + diffMessage );
+ else
+ MicroCache.debug = true;
+ }
+ else
+ break;
+ }
+ pos += subBytes.length + 4; // reserve 4 bytes for crc
+ subByteArrays[si] = subBytes;
+ }
+ }
+ posIdx[si] = pos;
+ }
+
+ byte[] abSubIndex = compileSubFileIndex( posIdx );
+ fileHeaderCrcs[tileIndex] = Crc32.crc( abSubIndex, 0, abSubIndex.length );
+ os.write( abSubIndex, 0, abSubIndex.length );
+ for ( int si = 0; si < ncaches; si++ )
+ {
+ byte[] ab = subByteArrays[si];
+ if ( ab != null )
+ {
+ os.write( ab );
+ os.writeInt( Crc32.crc( ab, 0, ab.length ) ^ microCacheEncoding );
+ }
+ }
+ filepos += pos;
+ }
+ fileIndex[tileIndex] = filepos;
+ }
+ }
+
+ byte[] abFileIndex = compileFileIndex( fileIndex, lookupVersion, lookupMinorVersion );
+
+ // write extra data: timestamp + index-checksums
+ os.writeLong( creationTimeStamp );
+ os.writeInt( Crc32.crc( abFileIndex, 0, abFileIndex.length ) ^ microCacheEncoding );
+ for ( int i55 = 0; i55 < 25; i55++ )
+ {
+ os.writeInt( fileHeaderCrcs[i55] );
+ }
+
+ os.close();
+
+ // re-open random-access to write file-index
+ RandomAccessFile ra = new RandomAccessFile( outfile, "rw" );
+ ra.write( abFileIndex, 0, abFileIndex.length );
+ ra.close();
+ }
+ System.out.println( "**** codec stats: *******\n" + StatCoderContext.getBitReport() );
+ }
+
+ private byte[] compileFileIndex( long[] fileIndex, short lookupVersion, short lookupMinorVersion ) throws Exception
+ {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ DataOutputStream dos = new DataOutputStream( bos );
+ for ( int i55 = 0; i55 < 25; i55++ )
+ {
+ long versionPrefix = i55 == 1 ? lookupMinorVersion : lookupVersion;
+ versionPrefix <<= 48;
+ dos.writeLong( fileIndex[i55] | versionPrefix );
+ }
+ dos.close();
+ return bos.toByteArray();
+ }
+
+ private byte[] compileSubFileIndex( int[] posIdx ) throws Exception
+ {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ DataOutputStream dos = new DataOutputStream( bos );
+ for ( int si = 0; si < posIdx.length; si++ )
+ {
+ dos.writeInt( posIdx[si] );
+ }
+ dos.close();
+ return bos.toByteArray();
+ }
+}
diff --git a/brouter-mapaccess/pom.xml b/brouter-mapaccess/pom.xml
index 4b7405c..0ff9336 100644
--- a/brouter-mapaccess/pom.xml
+++ b/brouter-mapaccess/pom.xml
@@ -17,5 +17,15 @@
brouter-util
${project.version}
+
+ org.btools
+ brouter-codec
+ ${project.version}
+
+
+ org.btools
+ brouter-expressions
+ ${project.version}
+
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/MicroCache.java b/brouter-mapaccess/src/main/java/btools/mapaccess/MicroCache.java
deleted file mode 100644
index b48f514..0000000
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/MicroCache.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/**
- * cache for a single square
- *
- * @author ab
- */
-package btools.mapaccess;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import btools.util.ByteDataReader;
-import btools.util.Crc32;
-
-final class MicroCache extends ByteDataReader
-{
- private int[] faid;
- private int[] fapos;
- private int size = 0;
- private int delcount = 0;
- private int delbytes = 0;
- private int p2size; // next power of 2 of size
-
- // the object parsing position and length
- private int aboffsetEnd;
-
- private int lonIdxBase;
- private int latIdxBase;
-
- // cache control: a virgin cache can be
- // put to ghost state for later recovery
- boolean virgin = true;
- boolean ghost = false;
-
- public MicroCache( OsmFile segfile, int lonIdx80, int latIdx80, byte[] iobuffer ) throws Exception
- {
- super( null );
- int lonDegree = lonIdx80/80;
- int latDegree = latIdx80/80;
-
- lonIdxBase = (lonIdx80/5)*62500 + 31250;
- latIdxBase = (latIdx80/5)*62500 + 31250;
-
- int subIdx = (latIdx80-80*latDegree)*80 + (lonIdx80-80*lonDegree);
-
- {
- ab = iobuffer;
- int asize = segfile.getDataInputForSubIdx(subIdx, ab);
-
- if ( asize == 0 )
- {
- ab = null;
- return;
- }
- if ( asize > iobuffer.length )
- {
- ab = new byte[asize];
- asize = segfile.getDataInputForSubIdx(subIdx, ab);
- }
- aboffset = 0;
- size = readInt();
-
- // get net size
- int nbytes = 0;
- for(int i = 0; i size ) p2size >>= 1;
-
- for(int i = 0; i 0 )
- {
- int nn = n + offset;
- if ( nn < size && a[nn] <= id )
- {
- n = nn;
- }
- offset >>= 1;
- }
- if ( a[n] == id )
- {
- if ( ( fapos[n] & 0x80000000 ) == 0 )
- {
- aboffset = fapos[n];
- int ablength = ( n+1 < size ? fapos[n+1] & 0x7fffffff : ab.length ) - aboffset;
- aboffsetEnd = aboffset + ablength;
- fapos[n] |= 0x80000000; // mark deleted
- delbytes+= ablength;
- delcount++;
- return true;
- }
- else
- {
- throw new RuntimeException( "MicroCache: node already consumed: id=" + id );
- }
- }
- return false;
- }
-
- /**
- * Fill a hollow node with it's body data
- */
- public void fillNode( OsmNode node, OsmNodesMap nodesMap, DistanceChecker dc, boolean doCollect )
- {
- long id = node.getIdFromPos();
- if ( getAndClear( id ) )
- {
- node.parseNodeBody( this, nodesMap, dc );
- }
-
- if ( doCollect && delcount > size / 2 ) // garbage collection
- {
- collect();
- }
- }
-
- void collect()
- {
- if ( delcount > 0 )
- {
- virgin = false;
-
- int nsize = size - delcount;
- if ( nsize == 0 )
- {
- faid = null;
- fapos = null;
- }
- else
- {
- int[] nfaid = new int[nsize];
- int[] nfapos = new int[nsize];
- int idx = 0;
-
- byte[] nab = new byte[ab.length - delbytes];
- int nab_off = 0;
- for( int i=0; i size ) p2size >>= 1;
- }
- }
-
- void unGhost()
- {
- ghost = false;
- delcount = 0;
- delbytes = 0;
- for( int i=0; i getPositions( OsmNodesMap nodesMap )
- {
- ArrayList positions = new ArrayList();
-
- for( int i=0; i> 16);
- int lat32 = latIdxBase + (short)((id32 & 0xffff) ^ 0x8000);
- return ((long)lon32)<<32 | lat32;
- }
-
- private int shrinkId( long id64 )
- {
- int lon32 = (int)(id64 >> 32);
- int lat32 = (int)(id64 & 0xffffffff);
- return (lon32 - lonIdxBase)<<16 | ( ( (lat32 - latIdxBase) & 0xffff) ^ 0x8000);
- }
-
- public boolean hasMoreData()
- {
- return aboffset < aboffsetEnd;
- }
-}
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/NodesCache.java b/brouter-mapaccess/src/main/java/btools/mapaccess/NodesCache.java
index 034ff4c..b4ee2fa 100644
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/NodesCache.java
+++ b/brouter-mapaccess/src/main/java/btools/mapaccess/NodesCache.java
@@ -1,291 +1,332 @@
-/**
- * Efficient cache or osmnodes
- *
- * @author ab
- */
-package btools.mapaccess;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-public final class NodesCache
-{
- private File segmentDir;
- private File secondarySegmentsDir = null;
-
- private OsmNodesMap nodesMap;
- private int lookupVersion;
- private int lookupMinorVersion;
- private boolean carMode;
- private boolean forceSecondaryData;
- private String currentFileName;
-
- private HashMap fileCache;
- private byte[] iobuffer;
-
- private OsmFile[][] fileRows;
- private ArrayList segmentList = new ArrayList();
-
- public DistanceChecker distanceChecker;
-
- public boolean oom_carsubset_hint = false;
- public boolean first_file_access_failed = false;
- public String first_file_access_name;
-
- private long cacheSum = 0;
- private boolean garbageCollectionEnabled = false;
-
-
- public NodesCache( String segmentDir, OsmNodesMap nodesMap, int lookupVersion, int minorVersion, boolean carMode, boolean forceSecondaryData, NodesCache oldCache )
- {
- this.segmentDir = new File( segmentDir );
- this.nodesMap = nodesMap;
- this.lookupVersion = lookupVersion;
- this.lookupMinorVersion = minorVersion;
- this.carMode = carMode;
- this.forceSecondaryData = forceSecondaryData;
-
- first_file_access_failed = false;
- first_file_access_name = null;
-
- if ( !this.segmentDir.isDirectory() ) throw new RuntimeException( "segment directory " + segmentDir + " does not exist" );
-
- if ( oldCache != null )
- {
- fileCache = oldCache.fileCache;
- iobuffer = oldCache.iobuffer;
- oom_carsubset_hint = oldCache.oom_carsubset_hint;
- secondarySegmentsDir = oldCache.secondarySegmentsDir;
-
- // re-use old, virgin caches
- fileRows = oldCache.fileRows;
- for( OsmFile[] fileRow : fileRows )
- {
- if ( fileRow == null ) continue;
- for( OsmFile osmf : fileRow )
- {
- cacheSum += osmf.setGhostState();
- }
- }
- }
- else
- {
- fileCache = new HashMap(4);
- fileRows = new OsmFile[180][];
- iobuffer = new byte[65636];
- secondarySegmentsDir = StorageConfigHelper.getSecondarySegmentDir( segmentDir );
- }
- }
-
- private File getFileFromSegmentDir( String filename )
- {
- if ( forceSecondaryData )
- {
- return new File( secondarySegmentsDir, filename );
- }
-
- File f = new File( segmentDir, filename );
- if ( secondarySegmentsDir != null && !f.exists() )
- {
- File f2 = new File( secondarySegmentsDir, filename );
- if ( f2.exists() ) return f2;
- }
- return f;
- }
-
- // if the cache sum exceeded a threshold,
- // clean all ghosts and enable garbage collection
- private void checkEnableCacheCleaning()
- {
- if ( cacheSum < 200000 || garbageCollectionEnabled ) return;
-
- for( int i=0; i getAllNodes()
- {
- List all = new ArrayList();
- for( MicroCache segment : segmentList )
- {
- List positions = segment.getPositions( nodesMap );
- all.addAll( positions );
- }
- return all;
- }
-
-
- public void close()
- {
- for( PhysicalFile f: fileCache.values() )
- {
- try
- {
- if ( f != null ) f.ra.close();
- }
- catch( IOException ioe )
- {
- // ignore
- }
- }
- }
-}
+/**
+ * Efficient cache or osmnodes
+ *
+ * @author ab
+ */
+package btools.mapaccess;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import btools.codec.DataBuffers;
+import btools.codec.MicroCache;
+import btools.codec.WaypointMatcher;
+import btools.expressions.BExpressionContextWay;
+
+public final class NodesCache
+{
+ private File segmentDir;
+ private File secondarySegmentsDir = null;
+
+ private OsmNodesMap nodesMap;
+ private BExpressionContextWay expCtxWay;
+ private int lookupVersion;
+ private int lookupMinorVersion;
+ private boolean carMode;
+ private boolean forceSecondaryData;
+ private String currentFileName;
+
+ private HashMap fileCache;
+ private DataBuffers dataBuffers;
+
+ private OsmFile[][] fileRows;
+ private ArrayList segmentList = new ArrayList();
+
+ public DistanceChecker distanceChecker;
+
+ public WaypointMatcher waypointMatcher;
+
+ public boolean oom_carsubset_hint = false;
+ public boolean first_file_access_failed = false;
+ public String first_file_access_name;
+
+ private long cacheSum = 0;
+ private boolean garbageCollectionEnabled = false;
+
+ public NodesCache( String segmentDir, OsmNodesMap nodesMap, BExpressionContextWay ctxWay, boolean carMode, boolean forceSecondaryData,
+ NodesCache oldCache )
+ {
+ this.segmentDir = new File( segmentDir );
+ this.nodesMap = nodesMap;
+ this.expCtxWay = ctxWay;
+ this.lookupVersion = ctxWay.meta.lookupVersion;
+ this.lookupMinorVersion = ctxWay.meta.lookupMinorVersion;
+ this.carMode = carMode;
+ this.forceSecondaryData = forceSecondaryData;
+
+ first_file_access_failed = false;
+ first_file_access_name = null;
+
+ if ( !this.segmentDir.isDirectory() )
+ throw new RuntimeException( "segment directory " + segmentDir + " does not exist" );
+
+ if ( oldCache != null )
+ {
+ fileCache = oldCache.fileCache;
+ dataBuffers = oldCache.dataBuffers;
+ oom_carsubset_hint = oldCache.oom_carsubset_hint;
+ secondarySegmentsDir = oldCache.secondarySegmentsDir;
+
+ // re-use old, virgin caches
+ fileRows = oldCache.fileRows;
+ for ( OsmFile[] fileRow : fileRows )
+ {
+ if ( fileRow == null )
+ continue;
+ for ( OsmFile osmf : fileRow )
+ {
+ cacheSum += osmf.setGhostState();
+ }
+ }
+ }
+ else
+ {
+ fileCache = new HashMap( 4 );
+ fileRows = new OsmFile[180][];
+ dataBuffers = new DataBuffers();
+ secondarySegmentsDir = StorageConfigHelper.getSecondarySegmentDir( segmentDir );
+ }
+ }
+
+ private File getFileFromSegmentDir( String filename )
+ {
+ if ( forceSecondaryData )
+ {
+ return new File( secondarySegmentsDir, filename );
+ }
+
+ File f = new File( segmentDir, filename );
+ if ( secondarySegmentsDir != null && !f.exists() )
+ {
+ File f2 = new File( secondarySegmentsDir, filename );
+ if ( f2.exists() )
+ return f2;
+ }
+ return f;
+ }
+
+ // if the cache sum exceeded a threshold,
+ // clean all ghosts and enable garbage collection
+ private void checkEnableCacheCleaning()
+ {
+ if ( cacheSum < 500000 || garbageCollectionEnabled )
+ return;
+
+ for ( int i = 0; i < fileRows.length; i++ )
+ {
+ OsmFile[] fileRow = fileRows[i];
+ if ( fileRow == null )
+ continue;
+ int nghosts = 0;
+ for ( OsmFile osmf : fileRow )
+ {
+ if ( osmf.ghost )
+ nghosts++;
+ else
+ osmf.cleanAll();
+ }
+ if ( nghosts == 0 )
+ continue;
+ int j = 0;
+ OsmFile[] frow = new OsmFile[fileRow.length - nghosts];
+ for ( OsmFile osmf : fileRow )
+ {
+ if ( osmf.ghost )
+ continue;
+ frow[j++] = osmf;
+ }
+ fileRows[i] = frow;
+ }
+ garbageCollectionEnabled = true;
+ }
+
+ public int loadSegmentFor( int ilon, int ilat )
+ {
+ MicroCache mc = getSegmentFor( ilon, ilat );
+ return mc == null ? 0 : mc.getSize();
+ }
+
+ public MicroCache getSegmentFor( int ilon, int ilat )
+ {
+ try
+ {
+ int lonDegree = ilon / 1000000;
+ int latDegree = ilat / 1000000;
+ OsmFile osmf = null;
+ OsmFile[] fileRow = fileRows[latDegree];
+ int ndegrees = fileRow == null ? 0 : fileRow.length;
+ for ( int i = 0; i < ndegrees; i++ )
+ {
+ if ( fileRow[i].lonDegree == lonDegree )
+ {
+ osmf = fileRow[i];
+ break;
+ }
+ }
+ if ( osmf == null )
+ {
+ osmf = fileForSegment( lonDegree, latDegree );
+ OsmFile[] newFileRow = new OsmFile[ndegrees + 1];
+ for ( int i = 0; i < ndegrees; i++ )
+ {
+ newFileRow[i] = fileRow[i];
+ }
+ newFileRow[ndegrees] = osmf;
+ fileRows[latDegree] = newFileRow;
+ }
+ osmf.ghost = false;
+ currentFileName = osmf.filename;
+
+ if ( !osmf.hasData() )
+ {
+ return null;
+ }
+
+ MicroCache segment = osmf.getMicroCache( ilon, ilat );
+ if ( segment == null )
+ {
+ checkEnableCacheCleaning();
+ segment = osmf.createMicroCache( ilon, ilat, dataBuffers, expCtxWay, waypointMatcher );
+
+ cacheSum += segment.getDataSize();
+ if ( segment.getSize() > 0 )
+ {
+ segmentList.add( segment );
+ }
+ }
+ else if ( segment.ghost )
+ {
+ segment.unGhost();
+ if ( segment.getSize() > 0 )
+ {
+ segmentList.add( segment );
+ }
+ }
+ return segment;
+ }
+ catch (RuntimeException re)
+ {
+ throw re;
+ }
+ catch (Exception e)
+ {
+ throw new RuntimeException( "error reading datafile " + currentFileName + ": ", e );
+ }
+ }
+
+ public boolean obtainNonHollowNode( OsmNode node )
+ {
+ if ( !node.isHollow() )
+ return true;
+
+ MicroCache segment = getSegmentFor( node.ilon, node.ilat );
+ if ( segment == null )
+ {
+ return false;
+ }
+
+ long id = node.getIdFromPos();
+ if ( segment.getAndClear( id ) )
+ {
+ node.parseNodeBody( segment, nodesMap, distanceChecker );
+ }
+
+ if ( garbageCollectionEnabled ) // garbage collection
+ {
+ segment.collect( segment.getSize() >> 1 );
+ }
+
+ return !node.isHollow();
+ }
+
+ private OsmFile fileForSegment( int lonDegree, int latDegree ) throws Exception
+ {
+ int lonMod5 = lonDegree % 5;
+ int latMod5 = latDegree % 5;
+
+ int lon = lonDegree - 180 - lonMod5;
+ String slon = lon < 0 ? "W" + ( -lon ) : "E" + lon;
+ int lat = latDegree - 90 - latMod5;
+
+ String slat = lat < 0 ? "S" + ( -lat ) : "N" + lat;
+ String filenameBase = slon + "_" + slat;
+
+ currentFileName = filenameBase + ".rd5/cd5";
+
+ PhysicalFile ra = null;
+ if ( !fileCache.containsKey( filenameBase ) )
+ {
+ File f = null;
+ if ( carMode )
+ {
+ File carFile = getFileFromSegmentDir( "carsubset/" + filenameBase + ".cd5" );
+ if ( carFile.exists() )
+ f = carFile;
+ }
+ if ( f == null )
+ {
+ File fullFile = getFileFromSegmentDir( filenameBase + ".rd5" );
+ if ( fullFile.exists() )
+ f = fullFile;
+ if ( carMode && f != null )
+ oom_carsubset_hint = true;
+ }
+ if ( f != null )
+ {
+ currentFileName = f.getName();
+ ra = new PhysicalFile( f, dataBuffers, lookupVersion, lookupMinorVersion );
+ }
+ fileCache.put( filenameBase, ra );
+ }
+ ra = fileCache.get( filenameBase );
+ OsmFile osmf = new OsmFile( ra, lonDegree, latDegree, dataBuffers );
+
+ if ( first_file_access_name == null )
+ {
+ first_file_access_name = currentFileName;
+ first_file_access_failed = osmf.filename == null;
+ }
+
+ return osmf;
+ }
+
+ public List getAllNodes()
+ {
+ List all = new ArrayList();
+ for ( MicroCache segment : segmentList )
+ {
+ ArrayList positions = new ArrayList();
+ int size = segment.getSize();
+
+ for ( int i = 0; i < size; i++ )
+ {
+ long id = segment.getIdForIndex( i );
+ OsmNode n = new OsmNode( id );
+ n.setHollow();
+ nodesMap.put( n );
+ positions.add( n );
+ }
+ all.addAll( positions );
+ }
+ return all;
+ }
+
+ public void close()
+ {
+ for ( PhysicalFile f : fileCache.values() )
+ {
+ try
+ {
+ if ( f != null )
+ f.ra.close();
+ }
+ catch (IOException ioe)
+ {
+ // ignore
+ }
+ }
+ }
+}
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmFile.java b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmFile.java
index 22726ff..9c1b381 100644
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmFile.java
+++ b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmFile.java
@@ -1,124 +1,208 @@
-/**
- * cache for a single square
- *
- * @author ab
- */
-package btools.mapaccess;
-
-import java.io.IOException;
-import java.io.RandomAccessFile;
-
-import btools.util.ByteDataReader;
-import btools.util.Crc32;
-
-final class OsmFile
-{
- private RandomAccessFile is = null;
- private long fileOffset;
-
- private int[] posIdx;
- public MicroCache[] microCaches;
-
- public int lonDegree;
- public int latDegree;
-
- public String filename;
-
- public boolean ghost = false;
-
- public OsmFile( PhysicalFile rafile, int tileIndex, byte[] iobuffer ) throws Exception
- {
- if ( rafile != null )
- {
- filename = rafile.fileName;
-
- long[] index = rafile.fileIndex;
- fileOffset = tileIndex > 0 ? index[ tileIndex-1 ] : 200L;
- if ( fileOffset == index[ tileIndex] ) return; // empty
-
- is = rafile.ra;
- posIdx = new int[6400];
- microCaches = new MicroCache[6400];
- is.seek( fileOffset );
- is.readFully( iobuffer, 0, 25600 );
-
- if ( rafile.fileHeaderCrcs != null )
- {
- int headerCrc = Crc32.crc( iobuffer, 0, 25600 );
- if ( rafile.fileHeaderCrcs[tileIndex] != headerCrc )
- {
- throw new IOException( "sub index checksum error" );
- }
- }
-
- ByteDataReader dis = new ByteDataReader( iobuffer );
- for( int i=0; i<6400; i++ )
- {
- posIdx[i] = dis.readInt();
- }
- }
- }
-
- private int getPosIdx( int idx )
- {
- return idx == -1 ? 25600 : posIdx[idx];
- }
-
- public int getDataInputForSubIdx( int subIdx, byte[] iobuffer ) throws Exception
- {
- int startPos = getPosIdx(subIdx-1);
- int endPos = getPosIdx(subIdx);
- int size = endPos-startPos;
- if ( size > 0 )
- {
- is.seek( fileOffset + startPos );
- if ( size <= iobuffer.length )
- {
- is.readFully( iobuffer, 0, size );
- }
- }
- return size;
- }
-
- // set this OsmFile to ghost-state:
- long setGhostState()
- {
- long sum = 0;
- ghost = true;
- int nc = microCaches == null ? 0 : microCaches.length;
- for( int i=0; i< nc; i++ )
- {
- MicroCache mc = microCaches[i];
- if ( mc == null ) continue;
- if ( mc.virgin )
- {
- mc.ghost = true;
- sum += mc.getDataSize();
- }
- else
- {
- microCaches[i] = null;
- }
- }
- return sum;
- }
-
- void cleanAll()
- {
- int nc = microCaches == null ? 0 : microCaches.length;
- for( int i=0; i< nc; i++ )
- {
- MicroCache mc = microCaches[i];
- if ( mc == null ) continue;
- if ( mc.ghost )
- {
- microCaches[i] = null;
- }
- else
- {
- mc.collect();
- }
- }
- }
-
-}
+/**
+ * cache for a single square
+ *
+ * @author ab
+ */
+package btools.mapaccess;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+import btools.codec.DataBuffers;
+import btools.codec.MicroCache;
+import btools.codec.MicroCache1;
+import btools.codec.MicroCache2;
+import btools.codec.TagValueValidator;
+import btools.codec.WaypointMatcher;
+import btools.util.ByteDataReader;
+import btools.util.Crc32;
+
+final class OsmFile
+{
+ private RandomAccessFile is = null;
+ private long fileOffset;
+
+ private int[] posIdx;
+ private MicroCache[] microCaches;
+
+ public int lonDegree;
+ public int latDegree;
+
+ public String filename;
+
+ public boolean ghost = false;
+
+ private int divisor;
+ private int cellsize;
+ private int ncaches;
+ private int indexsize;
+
+ public OsmFile( PhysicalFile rafile, int lonDegree, int latDegree, DataBuffers dataBuffers ) throws Exception
+ {
+ this.lonDegree = lonDegree;
+ this.latDegree = latDegree;
+ int lonMod5 = lonDegree % 5;
+ int latMod5 = latDegree % 5;
+ int tileIndex = lonMod5 * 5 + latMod5;
+
+ if ( rafile != null )
+ {
+ divisor = rafile.divisor;
+
+ cellsize = 1000000 / divisor;
+ ncaches = divisor * divisor;
+ indexsize = ncaches * 4;
+
+ byte[] iobuffer = dataBuffers.iobuffer;
+ filename = rafile.fileName;
+
+ long[] index = rafile.fileIndex;
+ fileOffset = tileIndex > 0 ? index[tileIndex - 1] : 200L;
+ if ( fileOffset == index[tileIndex] )
+ return; // empty
+
+ is = rafile.ra;
+ posIdx = new int[ncaches];
+ microCaches = new MicroCache[ncaches];
+ is.seek( fileOffset );
+ is.readFully( iobuffer, 0, indexsize );
+
+ if ( rafile.fileHeaderCrcs != null )
+ {
+ int headerCrc = Crc32.crc( iobuffer, 0, indexsize );
+ if ( rafile.fileHeaderCrcs[tileIndex] != headerCrc )
+ {
+ throw new IOException( "sub index checksum error" );
+ }
+ }
+
+ ByteDataReader dis = new ByteDataReader( iobuffer );
+ for ( int i = 0; i < ncaches; i++ )
+ {
+ posIdx[i] = dis.readInt();
+ }
+ }
+ }
+
+ public boolean hasData()
+ {
+ return microCaches != null;
+ }
+
+ public MicroCache getMicroCache( int ilon, int ilat )
+ {
+ int lonIdx = ilon / cellsize;
+ int latIdx = ilat / cellsize;
+ int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
+ return microCaches[subIdx];
+ }
+
+ public MicroCache createMicroCache( int ilon, int ilat, DataBuffers dataBuffers, TagValueValidator wayValidator, WaypointMatcher waypointMatcher )
+ throws Exception
+ {
+ int lonIdx = ilon / cellsize;
+ int latIdx = ilat / cellsize;
+ MicroCache segment = createMicroCache( lonIdx, latIdx, dataBuffers, wayValidator, waypointMatcher, true );
+ int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
+ microCaches[subIdx] = segment;
+ return segment;
+ }
+
+ private int getPosIdx( int idx )
+ {
+ return idx == -1 ? indexsize : posIdx[idx];
+ }
+
+ public int getDataInputForSubIdx( int subIdx, byte[] iobuffer ) throws Exception
+ {
+ int startPos = getPosIdx( subIdx - 1 );
+ int endPos = getPosIdx( subIdx );
+ int size = endPos - startPos;
+ if ( size > 0 )
+ {
+ is.seek( fileOffset + startPos );
+ if ( size <= iobuffer.length )
+ {
+ is.readFully( iobuffer, 0, size );
+ }
+ }
+ return size;
+ }
+
+ public MicroCache createMicroCache( int lonIdx, int latIdx, DataBuffers dataBuffers, TagValueValidator wayValidator,
+ WaypointMatcher waypointMatcher, boolean reallyDecode ) throws Exception
+ {
+ int subIdx = ( latIdx - divisor * latDegree ) * divisor + ( lonIdx - divisor * lonDegree );
+
+ byte[] ab = dataBuffers.iobuffer;
+ int asize = getDataInputForSubIdx( subIdx, ab );
+
+ if ( asize == 0 )
+ {
+ return MicroCache.emptyCache();
+ }
+ if ( asize > ab.length )
+ {
+ ab = new byte[asize];
+ asize = getDataInputForSubIdx( subIdx, ab );
+ }
+ // hack: the checksum contains the information
+ // which type of microcache we have
+
+ int crcData = Crc32.crc( ab, 0, asize - 4 );
+ int crcFooter = new ByteDataReader( ab, asize - 4 ).readInt();
+ if ( crcData == crcFooter )
+ {
+ return reallyDecode ? new MicroCache1( ab, lonIdx, latIdx ) : null;
+ }
+ if ( ( crcData ^ 2 ) == crcFooter )
+ {
+ return reallyDecode ? new MicroCache2( dataBuffers, lonIdx, latIdx, divisor, wayValidator, waypointMatcher ) : null;
+ }
+ throw new IOException( "checkum error" );
+ }
+
+ // set this OsmFile to ghost-state:
+ long setGhostState()
+ {
+ long sum = 0;
+ ghost = true;
+ int nc = microCaches == null ? 0 : microCaches.length;
+ for ( int i = 0; i < nc; i++ )
+ {
+ MicroCache mc = microCaches[i];
+ if ( mc == null )
+ continue;
+ if ( mc.virgin )
+ {
+ mc.ghost = true;
+ sum += mc.getDataSize();
+ }
+ else
+ {
+ microCaches[i] = null;
+ }
+ }
+ return sum;
+ }
+
+ void cleanAll()
+ {
+ int nc = microCaches == null ? 0 : microCaches.length;
+ for ( int i = 0; i < nc; i++ )
+ {
+ MicroCache mc = microCaches[i];
+ if ( mc == null )
+ continue;
+ if ( mc.ghost )
+ {
+ microCaches[i] = null;
+ }
+ else
+ {
+ mc.collect( 0 );
+ }
+ }
+ }
+
+}
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmLink.java b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmLink.java
index 3243f81..b967251 100644
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmLink.java
+++ b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmLink.java
@@ -5,6 +5,8 @@
*/
package btools.mapaccess;
+import btools.util.ByteDataReader;
+
public class OsmLink
{
@@ -14,32 +16,77 @@ public class OsmLink
*/
public byte[] descriptionBitmap;
- /**
- * The target is either the next link or the target node
- */
public OsmNode targetNode;
public OsmLink next;
- public byte[] firsttransferBytes;
+ public OsmLinkHolder firstlinkholder = null;
- final public OsmTransferNode decodeFirsttransfer()
+ public byte[] geometry;
+
+ public boolean counterLinkWritten;
+
+ public boolean hasNewGeometry; // preliminary
+
+ public byte state;
+
+ public void setGeometry( byte[] geometry )
{
- return firsttransferBytes == null ? null : OsmTransferNode.decode( firsttransferBytes );
+ this.geometry = geometry;
+ hasNewGeometry = true;
+ }
+
+ final public OsmTransferNode decodeFirsttransfer( OsmNode sourceNode )
+ {
+ if ( geometry == null ) return null;
+ if ( hasNewGeometry )
+ {
+ OsmTransferNode firstTransferNode = null;
+ OsmTransferNode lastTransferNode = null;
+ OsmNode startnode = counterLinkWritten ? targetNode : sourceNode;
+ ByteDataReader r = new ByteDataReader( geometry );
+ int olon = startnode.ilon;
+ int olat = startnode.ilat;
+ int oselev = startnode.selev;
+ while ( r.hasMoreData() )
+ {
+ OsmTransferNode trans = new OsmTransferNode();
+ trans.ilon = olon + r.readVarLengthSigned();
+ trans.ilat = olat + r.readVarLengthSigned();
+ trans.descriptionBitmap = descriptionBitmap;
+ trans.selev = (short)(oselev + r.readVarLengthSigned());
+ olon = trans.ilon;
+ olat = trans.ilat;
+ oselev = trans.selev;
+ if ( counterLinkWritten ) // reverse chaining
+ {
+ trans.next = firstTransferNode;
+ firstTransferNode = trans;
+ }
+ else
+ {
+ if ( lastTransferNode == null )
+ {
+ firstTransferNode = trans;
+ }
+ else
+ {
+ lastTransferNode.next = trans;
+ }
+ lastTransferNode = trans;
+ }
+ }
+ return firstTransferNode;
+ }
+ return OsmTransferNode.decode( geometry );
}
final public void encodeFirsttransfer( OsmTransferNode firsttransfer )
{
- if ( firsttransfer == null ) firsttransferBytes = null;
- else firsttransferBytes = OsmTransferNode.encode( firsttransfer );
+ if ( firsttransfer == null ) geometry = null;
+ else geometry = OsmTransferNode.encode( firsttransfer );
}
- public boolean counterLinkWritten;
-
- public byte state;
-
- public OsmLinkHolder firstlinkholder = null;
-
final public void addLinkHolder( OsmLinkHolder holder )
{
if ( firstlinkholder != null ) { holder.setNextForLink( firstlinkholder ); }
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmNode.java b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmNode.java
index f736a65..04bc83e 100644
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/OsmNode.java
+++ b/brouter-mapaccess/src/main/java/btools/mapaccess/OsmNode.java
@@ -1,363 +1,471 @@
-/**
- * Container for an osm node
- *
- * @author ab
- */
-package btools.mapaccess;
-
-import btools.util.ByteArrayUnifier;
-
-
-
-public class OsmNode implements OsmPos
-{
- public static final int EXTERNAL_BITMASK = 0x80; // old semantic
- public static final int SIGNLON_BITMASK = 0x80;
- public static final int SIGNLAT_BITMASK = 0x40;
- public static final int TRANSFERNODE_BITMASK = 0x20;
- public static final int WRITEDESC_BITMASK = 0x10;
- public static final int SKIPDETAILS_BITMASK = 0x08;
- public static final int NODEDESC_BITMASK = 0x04;
- public static final int RESERVED1_BITMASK = 0x02;
- public static final int RESERVED2_BITMASK = 0x01;
-
- public OsmNode()
- {
- }
-
- public OsmNode( int ilon, int ilat )
- {
- this.ilon = ilon;
- this.ilat = ilat;
- }
-
- public OsmNode( long id )
- {
- ilon = (int)(id >> 32);
- ilat = (int)(id & 0xffffffff);
- }
-
- /**
- * The latitude
- */
- public int ilat;
-
- /**
- * The longitude
- */
- public int ilon;
-
- /**
- * The elevation
- */
- public short selev;
-
- public byte[] nodeDescription;
-
- // interface OsmPos
- public int getILat()
- {
- return ilat;
- }
-
- public int getILon()
- {
- return ilon;
- }
-
- public short getSElev()
- {
- return selev;
- }
-
- public double getElev()
- {
- return selev / 4.;
- }
-
-
- /**
- * The links to other nodes
- */
- public OsmLink firstlink = null;
-
- // preliminry in forward order to avoid regressions
- public void addLink( OsmLink link )
- {
- if ( firstlink == null )
- {
- firstlink = link;
- }
- else
- {
- OsmLink l = firstlink;
- while( l.next != null ) l = l.next;
- l.next = link;
- }
- }
-
- private OsmLink getCompatibleLink( int ilon, int ilat, boolean counterLinkWritten, int state )
- {
- for( OsmLink l = firstlink; l != null; l = l.next )
- {
- if ( counterLinkWritten == l.counterLinkWritten && l.state == state )
- {
- OsmNode t = l.targetNode;
- if ( t.ilon == ilon && t.ilat == ilat )
- {
- l.state = 0;
- return l;
- }
- }
- }
- // second try ignoring counterLinkWritten
- // (border links are written in both directions)
- for( OsmLink l = firstlink; l != null; l = l.next )
- {
- if ( l.state == state )
- {
- OsmNode t = l.targetNode;
- if ( t.ilon == ilon && t.ilat == ilat )
- {
- l.state = 0;
- return l;
- }
- }
- }
- return null;
- }
-
- public int calcDistance( OsmPos p )
- {
- double l = (ilat-90000000) * 0.00000001234134;
- double l2 = l*l;
- double l4 = l2*l2;
- double coslat = 1.- l2 + l4 / 6.;
-
- double dlat = (ilat - p.getILat() )/1000000.;
- double dlon = (ilon - p.getILon() )/1000000. * coslat;
- double d = Math.sqrt( dlat*dlat + dlon*dlon ) * (6378000. / 57.3);
- return (int)(d + 1.0 );
- }
-
- public String toString()
- {
- return "" + getIdFromPos();
- }
-
- public void parseNodeBody( MicroCache is, OsmNodesMap hollowNodes, DistanceChecker dc )
- {
- ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
-
- selev = is.readShort();
-
- while( is.hasMoreData() )
- {
- int ilonref = ilon;
- int ilatref = ilat;
-
- boolean counterLinkWritten = false;
- OsmTransferNode firstTransferNode = null;
- OsmTransferNode lastTransferNode = null;
- int linklon;
- int linklat;
- byte[] description = null;
- for(;;)
- {
- int bitField = is.readByte();
- int dlon = is.readVarLengthUnsigned();
- int dlat = is.readVarLengthUnsigned();
- if ( (bitField & SIGNLON_BITMASK) != 0 ) { dlon = -dlon;}
- if ( (bitField & SIGNLAT_BITMASK) != 0 ) { dlat = -dlat;}
- linklon = ilonref + dlon;
- linklat = ilatref + dlat;
- ilonref = linklon;
- ilatref = linklat;
- // read variable length or old 8 byte fixed, and ensure that 8 bytes is only fixed
- if ( (bitField & WRITEDESC_BITMASK ) != 0 )
- {
- byte[] ab = new byte[is.readByte()];
- is.readFully( ab );
- description = abUnifier.unify( ab );
- }
- if ( (bitField & NODEDESC_BITMASK ) != 0 )
- {
- byte[] ab = new byte[is.readByte()];
- is.readFully( ab );
- nodeDescription = abUnifier.unify( ab );
- }
- if ( (bitField & RESERVED1_BITMASK ) != 0 )
- {
- byte[] ab = new byte[is.readByte()];
- is.readFully( ab );
- }
- if ( (bitField & RESERVED2_BITMASK ) != 0 )
- {
- byte[] ab = new byte[is.readByte()];
- is.readFully( ab );
- }
- if ( (bitField & SKIPDETAILS_BITMASK ) != 0 )
- {
- counterLinkWritten = true;
- }
-
- if ( description == null && !counterLinkWritten ) throw new IllegalArgumentException( "internal error: missing way description!" );
-
- boolean isTransfer = (bitField & TRANSFERNODE_BITMASK ) != 0;
- if ( isTransfer )
- {
- OsmTransferNode trans = new OsmTransferNode();
- trans.ilon = linklon;
- trans.ilat = linklat;
- trans.descriptionBitmap = description;
- trans.selev = (short)(selev + is.readVarLengthSigned());
- if ( lastTransferNode == null )
- {
- firstTransferNode = trans;
- }
- else
- {
- lastTransferNode.next = trans;
- }
- lastTransferNode = trans;
- }
- else
- {
- break;
- }
- }
-
- // performance shortcut: ignore link if out of reach
- if ( dc != null && !counterLinkWritten )
- {
- if ( !dc.isWithinRadius( ilon, ilat, firstTransferNode, linklon, linklat ) )
- {
- continue;
- }
- }
-
- if ( linklon == ilon && linklat == ilat )
- {
- continue; // skip self-ref
- }
-
- // first check the known links for that target
- OsmLink link = getCompatibleLink( linklon, linklat, counterLinkWritten, 2 );
- if ( link == null ) // .. not found, then check the hollow nodes
- {
- long targetNodeId = ((long)linklon)<<32 | linklat;
- OsmNode tn = hollowNodes.get( targetNodeId ); // target node
- if ( tn == null ) // node not yet known, create a new hollow proxy
- {
- tn = new OsmNode(linklon, linklat);
- tn.setHollow();
- hollowNodes.put( tn );
- }
- link = new OsmLink();
- link.targetNode = tn;
- link.counterLinkWritten = counterLinkWritten;
- link.state = 1;
- addLink( link );
- }
-
- // now we have a link with a target node -> get the reverse link
- OsmLink rlink = link.targetNode.getCompatibleLink( ilon, ilat, !counterLinkWritten, 1 );
- if ( rlink == null ) // .. not found, create it
- {
- rlink = new OsmLink();
- rlink.targetNode = this;
- rlink.counterLinkWritten = !counterLinkWritten;
- rlink.state = 2;
- link.targetNode.addLink( rlink );
- }
-
- if ( !counterLinkWritten )
- {
- // we have the data for that link, so fill both the link ..
- link.descriptionBitmap = description;
- link.encodeFirsttransfer(firstTransferNode);
-
- // .. and the reverse
- if ( rlink.counterLinkWritten )
- {
- rlink.descriptionBitmap = description; // default for no transfer-nodes
- OsmTransferNode previous = null;
- OsmTransferNode rtrans = null;
- for( OsmTransferNode trans = firstTransferNode; trans != null; trans = trans.next )
- {
- if ( previous == null )
- {
- rlink.descriptionBitmap = trans.descriptionBitmap;
- }
- else
- {
- previous.descriptionBitmap = trans.descriptionBitmap;
- }
- rtrans = new OsmTransferNode();
- rtrans.ilon = trans.ilon;
- rtrans.ilat = trans.ilat;
- rtrans.selev = trans.selev;
- rtrans.next = previous;
- rtrans.descriptionBitmap = description;
- previous = rtrans;
- }
- rlink.encodeFirsttransfer(rtrans);
- }
- }
-
- }
- if ( dc == null )
- {
- hollowNodes.remove( this );
- }
- }
-
- public boolean isHollow()
- {
- return selev == -12345;
- }
-
- public void setHollow()
- {
- selev = -12345;
- }
-
- public long getIdFromPos()
- {
- return ((long)ilon)<<32 | ilat;
- }
-
- public void unlinkLink( OsmLink link )
- {
- if ( link == firstlink )
- {
- firstlink = link.next;
- return;
- }
- for( OsmLink l = firstlink; l != null; l = l.next )
- {
- if ( l.next == link )
- {
- l.next = link.next;
- return;
- }
- }
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( o instanceof OsmNode )
- {
- OsmNode n = (OsmNode)o;
- return n.ilon == ilon && n.ilat == ilat;
- }
- return false;
- }
-
- @Override
- public int hashCode( )
- {
- return ilon + ilat;
- }
-}
+/**
+ * Container for an osm node
+ *
+ * @author ab
+ */
+package btools.mapaccess;
+
+import btools.codec.MicroCache;
+import btools.codec.MicroCache1;
+import btools.codec.MicroCache2;
+import btools.util.ByteArrayUnifier;
+
+public class OsmNode implements OsmPos
+{
+ public static final int EXTERNAL_BITMASK = 0x80; // old semantic
+ public static final int SIGNLON_BITMASK = 0x80;
+ public static final int SIGNLAT_BITMASK = 0x40;
+ public static final int TRANSFERNODE_BITMASK = 0x20;
+ public static final int WRITEDESC_BITMASK = 0x10;
+ public static final int SKIPDETAILS_BITMASK = 0x08;
+ public static final int NODEDESC_BITMASK = 0x04;
+ public static final int RESERVED1_BITMASK = 0x02;
+ public static final int RESERVED2_BITMASK = 0x01;
+
+ public OsmNode()
+ {
+ }
+
+ public OsmNode( int ilon, int ilat )
+ {
+ this.ilon = ilon;
+ this.ilat = ilat;
+ }
+
+ public OsmNode( long id )
+ {
+ ilon = (int) ( id >> 32 );
+ ilat = (int) ( id & 0xffffffff );
+ }
+
+ /**
+ * The latitude
+ */
+ public int ilat;
+
+ /**
+ * The longitude
+ */
+ public int ilon;
+
+ /**
+ * The elevation
+ */
+ public short selev;
+
+ public byte[] nodeDescription;
+
+ // interface OsmPos
+ public int getILat()
+ {
+ return ilat;
+ }
+
+ public int getILon()
+ {
+ return ilon;
+ }
+
+ public short getSElev()
+ {
+ return selev;
+ }
+
+ public double getElev()
+ {
+ return selev / 4.;
+ }
+
+ /**
+ * The links to other nodes
+ */
+ public OsmLink firstlink = null;
+
+ // preliminry in forward order to avoid regressions
+ public void addLink( OsmLink link )
+ {
+ if ( firstlink == null )
+ {
+ firstlink = link;
+ }
+ else
+ {
+ OsmLink l = firstlink;
+ while (l.next != null)
+ l = l.next;
+ l.next = link;
+ }
+ }
+
+ private OsmLink getCompatibleLink( int ilon, int ilat, boolean counterLinkWritten, int state )
+ {
+ for ( OsmLink l = firstlink; l != null; l = l.next )
+ {
+ if ( counterLinkWritten == l.counterLinkWritten && l.state == state )
+ {
+ OsmNode t = l.targetNode;
+ if ( t.ilon == ilon && t.ilat == ilat )
+ {
+ l.state = 0;
+ return l;
+ }
+ }
+ }
+ // second try ignoring counterLinkWritten
+ // (border links are written in both directions)
+ for ( OsmLink l = firstlink; l != null; l = l.next )
+ {
+ if ( l.state == state )
+ {
+ OsmNode t = l.targetNode;
+ if ( t.ilon == ilon && t.ilat == ilat )
+ {
+ l.state = 0;
+ return l;
+ }
+ }
+ }
+ return null;
+ }
+
+ public int calcDistance( OsmPos p )
+ {
+ double l = ( ilat - 90000000 ) * 0.00000001234134;
+ double l2 = l * l;
+ double l4 = l2 * l2;
+ double coslat = 1. - l2 + l4 / 6.;
+
+ double dlat = ( ilat - p.getILat() ) / 1000000.;
+ double dlon = ( ilon - p.getILon() ) / 1000000. * coslat;
+ double d = Math.sqrt( dlat * dlat + dlon * dlon ) * ( 6378000. / 57.3 );
+ return (int) ( d + 1.0 );
+ }
+
+ public String toString()
+ {
+ return "" + getIdFromPos();
+ }
+
+ public void parseNodeBody( MicroCache mc, OsmNodesMap hollowNodes, DistanceChecker dc )
+ {
+ if ( mc instanceof MicroCache1 )
+ {
+ parseNodeBody1( (MicroCache1) mc, hollowNodes, dc );
+ }
+ else if ( mc instanceof MicroCache2 )
+ {
+ parseNodeBody2( (MicroCache2) mc, hollowNodes, dc );
+ }
+ else
+ throw new IllegalArgumentException( "unknown cache version: " + mc.getClass() );
+ }
+
+ public void parseNodeBody2( MicroCache2 mc, OsmNodesMap hollowNodes, DistanceChecker dc )
+ {
+ ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
+
+ selev = mc.readShort();
+ int nodeDescSize = mc.readVarLengthUnsigned();
+ nodeDescription = nodeDescSize == 0 ? null : mc.readUnified( nodeDescSize, abUnifier );
+
+ while (mc.hasMoreData())
+ {
+ // read link data
+ int endPointer = mc.getEndPointer();
+ int linklon = ilon + mc.readVarLengthSigned();
+ int linklat = ilat + mc.readVarLengthSigned();
+ int sizecode = mc.readVarLengthUnsigned();
+ boolean isReverse = ( sizecode & 1 ) != 0;
+ byte[] description = null;
+ int descSize = sizecode >> 1;
+ if ( descSize > 0 )
+ {
+ description = mc.readUnified( descSize, abUnifier );
+ }
+ byte[] geometry = mc.readDataUntil( endPointer );
+
+ // preliminary hack: way-point-matching not here (done at decoding time)
+ if ( dc != null )
+ continue;
+
+ if ( linklon == ilon && linklat == ilat )
+ {
+ continue; // skip self-ref
+ }
+
+ // first check the known links for that target
+ OsmLink link = getCompatibleLink( linklon, linklat, isReverse, 2 );
+ if ( link == null ) // .. not found, then check the hollow nodes
+ {
+ long targetNodeId = ( (long) linklon ) << 32 | linklat;
+ OsmNode tn = hollowNodes.get( targetNodeId ); // target node
+ if ( tn == null ) // node not yet known, create a new hollow proxy
+ {
+ tn = new OsmNode( linklon, linklat );
+ tn.setHollow();
+ hollowNodes.put( tn );
+ }
+ link = new OsmLink();
+ link.targetNode = tn;
+ link.counterLinkWritten = isReverse;
+ link.state = 1;
+ addLink( link );
+ }
+
+ // now we have a link with a target node -> get the reverse link
+ OsmLink rlink = link.targetNode.getCompatibleLink( ilon, ilat, !isReverse, 1 );
+ if ( rlink == null ) // .. not found, create it
+ {
+ rlink = new OsmLink();
+ rlink.targetNode = this;
+ rlink.counterLinkWritten = !isReverse;
+ rlink.state = 2;
+ link.targetNode.addLink( rlink );
+ }
+
+ if ( !isReverse )
+ {
+ // we have the data for that link, so fill both the link ..
+ link.descriptionBitmap = description;
+ link.setGeometry( geometry );
+
+ // .. and the reverse
+ if ( rlink.counterLinkWritten )
+ {
+ rlink.descriptionBitmap = description;
+ rlink.setGeometry( geometry );
+ }
+ }
+
+ }
+ if ( dc == null )
+ {
+ hollowNodes.remove( this );
+ }
+ }
+
+ public void parseNodeBody1( MicroCache1 is, OsmNodesMap hollowNodes, DistanceChecker dc )
+ {
+ ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
+
+ selev = is.readShort();
+
+ while (is.hasMoreData())
+ {
+ int ilonref = ilon;
+ int ilatref = ilat;
+
+ boolean counterLinkWritten = false;
+ OsmTransferNode firstTransferNode = null;
+ OsmTransferNode lastTransferNode = null;
+ int linklon;
+ int linklat;
+ byte[] description = null;
+ for ( ;; )
+ {
+ int bitField = is.readByte();
+ int dlon = is.readVarLengthUnsigned();
+ int dlat = is.readVarLengthUnsigned();
+ if ( ( bitField & SIGNLON_BITMASK ) != 0 )
+ {
+ dlon = -dlon;
+ }
+ if ( ( bitField & SIGNLAT_BITMASK ) != 0 )
+ {
+ dlat = -dlat;
+ }
+ linklon = ilonref + dlon;
+ linklat = ilatref + dlat;
+ ilonref = linklon;
+ ilatref = linklat;
+ // read variable length or old 8 byte fixed, and ensure that 8 bytes is
+ // only fixed
+ if ( ( bitField & WRITEDESC_BITMASK ) != 0 )
+ {
+ byte[] ab = new byte[is.readByte()];
+ is.readFully( ab );
+ description = abUnifier.unify( ab );
+ }
+ if ( ( bitField & NODEDESC_BITMASK ) != 0 )
+ {
+ byte[] ab = new byte[is.readByte()];
+ is.readFully( ab );
+ nodeDescription = abUnifier.unify( ab );
+ }
+ if ( ( bitField & RESERVED1_BITMASK ) != 0 )
+ {
+ byte[] ab = new byte[is.readByte()];
+ is.readFully( ab );
+ }
+ if ( ( bitField & RESERVED2_BITMASK ) != 0 )
+ {
+ byte[] ab = new byte[is.readByte()];
+ is.readFully( ab );
+ }
+ if ( ( bitField & SKIPDETAILS_BITMASK ) != 0 )
+ {
+ counterLinkWritten = true;
+ }
+
+ if ( description == null && !counterLinkWritten )
+ throw new IllegalArgumentException( "internal error: missing way description!" );
+
+ boolean isTransfer = ( bitField & TRANSFERNODE_BITMASK ) != 0;
+ if ( isTransfer )
+ {
+ OsmTransferNode trans = new OsmTransferNode();
+ trans.ilon = linklon;
+ trans.ilat = linklat;
+ trans.descriptionBitmap = description;
+ trans.selev = (short) ( selev + is.readVarLengthSigned() );
+ if ( lastTransferNode == null )
+ {
+ firstTransferNode = trans;
+ }
+ else
+ {
+ lastTransferNode.next = trans;
+ }
+ lastTransferNode = trans;
+ }
+ else
+ {
+ break;
+ }
+ }
+
+ // performance shortcut: ignore link if out of reach
+ if ( dc != null && !counterLinkWritten )
+ {
+ if ( !dc.isWithinRadius( ilon, ilat, firstTransferNode, linklon, linklat ) )
+ {
+ continue;
+ }
+ }
+
+ if ( linklon == ilon && linklat == ilat )
+ {
+ continue; // skip self-ref
+ }
+
+ // first check the known links for that target
+ OsmLink link = getCompatibleLink( linklon, linklat, counterLinkWritten, 2 );
+ if ( link == null ) // .. not found, then check the hollow nodes
+ {
+ long targetNodeId = ( (long) linklon ) << 32 | linklat;
+ OsmNode tn = hollowNodes.get( targetNodeId ); // target node
+ if ( tn == null ) // node not yet known, create a new hollow proxy
+ {
+ tn = new OsmNode( linklon, linklat );
+ tn.setHollow();
+ hollowNodes.put( tn );
+ }
+ link = new OsmLink();
+ link.targetNode = tn;
+ link.counterLinkWritten = counterLinkWritten;
+ link.state = 1;
+ addLink( link );
+ }
+
+ // now we have a link with a target node -> get the reverse link
+ OsmLink rlink = link.targetNode.getCompatibleLink( ilon, ilat, !counterLinkWritten, 1 );
+ if ( rlink == null ) // .. not found, create it
+ {
+ rlink = new OsmLink();
+ rlink.targetNode = this;
+ rlink.counterLinkWritten = !counterLinkWritten;
+ rlink.state = 2;
+ link.targetNode.addLink( rlink );
+ }
+
+ if ( !counterLinkWritten )
+ {
+ // we have the data for that link, so fill both the link ..
+ link.descriptionBitmap = description;
+ link.encodeFirsttransfer( firstTransferNode );
+
+ // .. and the reverse
+ if ( rlink.counterLinkWritten )
+ {
+ rlink.descriptionBitmap = description; // default for no
+ // transfer-nodes
+ OsmTransferNode previous = null;
+ OsmTransferNode rtrans = null;
+ for ( OsmTransferNode trans = firstTransferNode; trans != null; trans = trans.next )
+ {
+ if ( previous == null )
+ {
+ rlink.descriptionBitmap = trans.descriptionBitmap;
+ }
+ else
+ {
+ previous.descriptionBitmap = trans.descriptionBitmap;
+ }
+ rtrans = new OsmTransferNode();
+ rtrans.ilon = trans.ilon;
+ rtrans.ilat = trans.ilat;
+ rtrans.selev = trans.selev;
+ rtrans.next = previous;
+ rtrans.descriptionBitmap = description;
+ previous = rtrans;
+ }
+ rlink.encodeFirsttransfer( rtrans );
+ }
+ }
+
+ }
+ if ( dc == null )
+ {
+ hollowNodes.remove( this );
+ }
+ }
+
+ public boolean isHollow()
+ {
+ return selev == -12345;
+ }
+
+ public void setHollow()
+ {
+ selev = -12345;
+ }
+
+ public long getIdFromPos()
+ {
+ return ( (long) ilon ) << 32 | ilat;
+ }
+
+ public void unlinkLink( OsmLink link )
+ {
+ if ( link == firstlink )
+ {
+ firstlink = link.next;
+ return;
+ }
+ for ( OsmLink l = firstlink; l != null; l = l.next )
+ {
+ if ( l.next == link )
+ {
+ l.next = link.next;
+ return;
+ }
+ }
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( o instanceof OsmNode )
+ {
+ OsmNode n = (OsmNode) o;
+ return n.ilon == ilon && n.ilat == ilat;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return ilon + ilat;
+ }
+}
diff --git a/brouter-mapaccess/src/main/java/btools/mapaccess/PhysicalFile.java b/brouter-mapaccess/src/main/java/btools/mapaccess/PhysicalFile.java
index 7fd85ad..2c4641b 100644
--- a/brouter-mapaccess/src/main/java/btools/mapaccess/PhysicalFile.java
+++ b/brouter-mapaccess/src/main/java/btools/mapaccess/PhysicalFile.java
@@ -5,8 +5,11 @@
*/
package btools.mapaccess;
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import btools.codec.DataBuffers;
import btools.util.ByteDataReader;
import btools.util.Crc32;
@@ -21,6 +24,8 @@ final public class PhysicalFile
String fileName;
+ public int divisor = 80;
+
/**
* Checks the integrity of the file using the build-in checksums
*
@@ -28,39 +33,50 @@ final public class PhysicalFile
*/
public static String checkFileIntegrity( File f )
{
- PhysicalFile pf = null;
- try
- {
- byte[] iobuffer = new byte[65636];
- pf = new PhysicalFile( f, new byte[65636], -1, -1 );
- for( int tileIndex=0; tileIndex<25; tileIndex++ )
- {
- OsmFile osmf = new OsmFile( pf, tileIndex, iobuffer );
- if ( osmf.microCaches != null )
- for( int lonIdx80=0; lonIdx80<80; lonIdx80++ )
- for( int latIdx80=0; latIdx80<80; latIdx80++ )
- new MicroCache( osmf, lonIdx80, latIdx80, iobuffer );
- }
- }
- catch( IllegalArgumentException iae )
- {
- return iae.getMessage();
- }
- catch( Exception e )
- {
- return e.toString();
- }
- finally
- {
- if ( pf != null ) try{ pf.ra.close(); } catch( Exception ee ) {}
- }
- return null;
+ PhysicalFile pf = null;
+ try
+ {
+ DataBuffers dataBuffers = new DataBuffers();
+ pf = new PhysicalFile( f, dataBuffers, -1, -1 );
+ int div = pf.divisor;
+ for ( int lonDegree = 0; lonDegree < 5; lonDegree++ ) // does'nt really matter..
+ {
+ for ( int latDegree = 0; latDegree < 5; latDegree++ ) // ..where on earth we are
+ {
+ OsmFile osmf = new OsmFile( pf, lonDegree, latDegree, dataBuffers );
+ if ( osmf.hasData() )
+ for ( int lonIdx = 0; lonIdx < div; lonIdx++ )
+ for ( int latIdx = 0; latIdx < div; latIdx++ )
+ osmf.createMicroCache( lonDegree * div + lonIdx, latDegree * div + latIdx, dataBuffers, null, null, false );
+ }
+ }
+ }
+ catch (IllegalArgumentException iae)
+ {
+ return iae.getMessage();
+ }
+ catch (Exception e)
+ {
+ return e.toString();
+ }
+ finally
+ {
+ if ( pf != null )
+ try
+ {
+ pf.ra.close();
+ }
+ catch (Exception ee)
+ {
+ }
+ }
+ return null;
}
- public PhysicalFile( File f, byte[] iobuffer, int lookupVersion, int lookupMinorVersion ) throws Exception
+ public PhysicalFile( File f, DataBuffers dataBuffers, int lookupVersion, int lookupMinorVersion ) throws Exception
{
fileName = f.getName();
-
+ byte[] iobuffer = dataBuffers.iobuffer;
ra = new RandomAccessFile( f, "r" );
ra.readFully( iobuffer, 0, 200 );
fileIndexCrc = Crc32.crc( iobuffer, 0, 200 );
@@ -99,7 +115,17 @@ final public class PhysicalFile
ra.readFully( iobuffer, 0, extraLen );
dis = new ByteDataReader( iobuffer );
creationTime = dis.readLong();
- if ( dis.readInt() != fileIndexCrc )
+
+ int crcData = dis.readInt();
+ if ( crcData == fileIndexCrc )
+ {
+ divisor = 80; // old format
+ }
+ else if ( (crcData ^ 2) == fileIndexCrc )
+ {
+ divisor = 32; // new format
+ }
+ else
{
throw new IOException( "top index checksum error" );
}
diff --git a/brouter-routing-app/src/main/java/btools/routingapp/BInstallerView.java b/brouter-routing-app/src/main/java/btools/routingapp/BInstallerView.java
index 258dada..61da4cf 100644
--- a/brouter-routing-app/src/main/java/btools/routingapp/BInstallerView.java
+++ b/brouter-routing-app/src/main/java/btools/routingapp/BInstallerView.java
@@ -157,7 +157,7 @@ public class BInstallerView extends View
private void startDownload( int tileIndex, boolean isCd5 )
{
String namebase = baseNameForTile( tileIndex );
- String baseurl = "http://brouter.de/brouter/segments3/";
+ String baseurl = "http://brouter.de/brouter/segments4/";
currentDownloadFile = namebase + (isCd5 ? ".cd5" : ".rd5" );
String url = baseurl + (isCd5 ? "carsubset/" : "" ) + currentDownloadFile;
isDownloading = true;
@@ -606,7 +606,7 @@ float tx, ty;
// download the file
input = connection.getInputStream();
- int slidx = surl.lastIndexOf( "segments3/" );
+ int slidx = surl.lastIndexOf( "segments4/" );
fname = baseDir + "/brouter/segments3/" + surl.substring( slidx+10 );
tmp_file = new File( fname + "_tmp" );
if ( new File( fname ).exists() ) return "internal error: file exists: " + fname;
@@ -633,6 +633,15 @@ float tx, ty;
try { Thread.sleep( dt ); } catch( InterruptedException ie ) {}
}
}
+ publishProgress( 101 );
+ String check_result = PhysicalFile.checkFileIntegrity( tmp_file );
+ if ( check_result != null ) return check_result;
+
+ if ( !tmp_file.renameTo( new File( fname ) ) )
+ {
+ return "Could not rename to " + fname;
+ }
+ return null;
} catch (Exception e) {
return e.toString();
} finally {
@@ -647,15 +656,6 @@ float tx, ty;
if (connection != null)
connection.disconnect();
}
- publishProgress( 101 );
- String check_result = PhysicalFile.checkFileIntegrity( tmp_file );
- if ( check_result != null ) return check_result;
-
- if ( !tmp_file.renameTo( new File( fname ) ) )
- {
- return "Could not rename to " + fname;
- }
- return null;
}
finally
{
diff --git a/brouter-server/src/main/java/btools/server/RouteServer.java b/brouter-server/src/main/java/btools/server/RouteServer.java
index 95394be..01abf05 100644
--- a/brouter-server/src/main/java/btools/server/RouteServer.java
+++ b/brouter-server/src/main/java/btools/server/RouteServer.java
@@ -13,7 +13,6 @@ import java.util.List;
import java.util.StringTokenizer;
import java.util.TreeMap;
-import btools.memrouter.TwinRoutingEngine;
import btools.router.OsmNodeNamed;
import btools.router.OsmTrack;
import btools.router.RoutingContext;
@@ -102,7 +101,7 @@ public class RouteServer extends Thread
RoutingContext rc = handler.readRoutingContext();
List wplist = handler.readWayPointList();
- cr = new TwinRoutingEngine( null, null, serviceContext.segmentDir, wplist, rc );
+ cr = new RoutingEngine( null, null, serviceContext.segmentDir, wplist, rc );
cr.quite = true;
cr.doRun( maxRunningTime );
diff --git a/brouter-server/src/test/java/btools/server/IntegrityCheckTest.java b/brouter-server/src/test/java/btools/server/IntegrityCheckTest.java
new file mode 100644
index 0000000..5637a9f
--- /dev/null
+++ b/brouter-server/src/test/java/btools/server/IntegrityCheckTest.java
@@ -0,0 +1,32 @@
+package btools.server;
+
+import java.io.File;
+import java.net.URL;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import btools.mapaccess.PhysicalFile;
+
+public class IntegrityCheckTest
+{
+ private File workingDir;
+
+ @Test
+ public void integrityTest() throws Exception
+ {
+ URL resulturl = this.getClass().getResource( "/testtrack0.gpx" );
+ Assert.assertTrue( "reference result not found: ", resulturl != null );
+ File resultfile = new File( resulturl.getFile() );
+ workingDir = resultfile.getParentFile();
+
+ File segmentDir = new File( workingDir, "/../../../brouter-map-creator/target/test-classes/tmp/segments" );
+ File[] files = segmentDir.listFiles();
+
+ for ( File f : files )
+ {
+ PhysicalFile.checkFileIntegrity( f );
+ }
+ }
+
+}
diff --git a/brouter-util/src/main/java/btools/util/BitCoderContext.java b/brouter-util/src/main/java/btools/util/BitCoderContext.java
index 8ae77de..297d768 100644
--- a/brouter-util/src/main/java/btools/util/BitCoderContext.java
+++ b/brouter-util/src/main/java/btools/util/BitCoderContext.java
@@ -1,96 +1,150 @@
package btools.util;
- public final class BitCoderContext
+
+public class BitCoderContext
+{
+ private byte[] ab;
+ private int idx = -1;
+ private int bm = 0x100; // byte mask
+ private int b;
+
+ public BitCoderContext( byte[] ab )
{
- private byte[] ab;
- private int idx = -1;
- private int bm = 0x100 ; // byte mask
- private int b;
-
- public BitCoderContext( byte[] ab )
- {
- this.ab = ab;
- }
-
- // encode a distance with a variable bit length
- // (poor mans huffman tree)
- // 1 -> 0
- // 01 -> 1 + following 1-bit word ( 1..2 )
- // 001 -> 3 + following 2-bit word ( 3..6 )
- // 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
-
- public void encodeVarBits( int value )
- {
- int range = 0;
- while ( value > range )
- {
- encodeBit( false );
- value -= range+1;
- range = 2*range + 1;
- }
- encodeBit( true );
- encode( range, value );
- }
-
- // twin to encodeDistance
- public int decodeVarBits()
- {
- int range = 0;
- int value = 0;
- while ( !decodeBit() )
- {
- value += range+1;
- range = 2*range + 1;
- }
- return value + decode( range );
- }
-
- public void encodeBit( boolean value )
- {
- if ( bm == 0x100 ) { bm = 1; ab[++idx] = 0; }
- if ( value ) ab[idx] |= bm;
- bm <<= 1;
- }
-
- public boolean decodeBit()
- {
- if ( bm == 0x100 ) { bm = 1; b = ab[++idx]; }
- boolean value = ( (b & bm) != 0 );
- bm <<= 1;
- return value;
- }
-
- // encode a symbol with number of bits according to maxvalue
- public void encode( int max, int value )
- {
- int im = 1; // integer mask
- while( max != 0 )
- {
- if ( bm == 0x100 ) { bm = 1; ab[++idx] = 0; }
- if ( (value & im) != 0 ) ab[idx] |= bm;
- max >>= 1;
- bm <<= 1;
- im <<= 1;
- }
- }
-
- public int getEncodedLength()
- {
- return idx+1;
- }
-
- public int decode( int max )
- {
- int value = 0;
- int im = 1; // integer mask
- while( max != 0 )
- {
- if ( bm == 0x100 ) { bm = 1; b = ab[++idx]; }
- if ( (b & bm) != 0 ) value |= im;
- max >>= 1;
- bm <<= 1;
- im <<= 1;
- }
- return value;
- }
+ this.ab = ab;
}
+
+ /**
+ * encode a distance with a variable bit length
+ * (poor mans huffman tree)
+ * 1 -> 0
+ * 01 -> 1 + following 1-bit word ( 1..2 )
+ * 001 -> 3 + following 2-bit word ( 3..6 )
+ * 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
+ *
+ * @see #decodeVarBits
+ */
+ public final void encodeVarBits( int value )
+ {
+ int range = 0;
+ while (value > range)
+ {
+ encodeBit( false );
+ value -= range + 1;
+ range = 2 * range + 1;
+ }
+ encodeBit( true );
+ encodeBounded( range, value );
+ }
+
+ /**
+ * @see #encodeVarBits
+ */
+ public final int decodeVarBits()
+ {
+ int range = 0;
+ int value = 0;
+ while (!decodeBit())
+ {
+ value += range + 1;
+ range = 2 * range + 1;
+ }
+ return value + decodeBounded( range );
+ }
+
+ public final void encodeBit( boolean value )
+ {
+ if ( bm == 0x100 )
+ {
+ bm = 1;
+ ab[++idx] = 0;
+ }
+ if ( value )
+ ab[idx] |= bm;
+ bm <<= 1;
+ }
+
+ public final boolean decodeBit()
+ {
+ if ( bm == 0x100 )
+ {
+ bm = 1;
+ b = ab[++idx];
+ }
+ boolean value = ( ( b & bm ) != 0 );
+ bm <<= 1;
+ return value;
+ }
+
+ /**
+ * encode an integer in the range 0..max (inclusive).
+ * For max = 2^n-1, this just encodes n bits, but in general
+ * this is variable length encoding, with the shorter codes
+ * for the central value range
+ */
+ public final void encodeBounded( int max, int value )
+ {
+ int im = 1; // integer mask
+ while (im <= max)
+ {
+ if ( bm == 0x100 )
+ {
+ bm = 1;
+ ab[++idx] = 0;
+ }
+ if ( ( value & im ) != 0 )
+ {
+ ab[idx] |= bm;
+ max -= im;
+ }
+ bm <<= 1;
+ im <<= 1;
+ }
+ }
+
+ /**
+ * decode an integer in the range 0..max (inclusive).
+ * @see #encodeBounded
+ */
+ public final int decodeBounded( int max )
+ {
+ int value = 0;
+ int im = 1; // integer mask
+ while (( value | im ) <= max)
+ {
+ if ( bm == 0x100 )
+ {
+ bm = 1;
+ b = ab[++idx];
+ }
+ if ( ( b & bm ) != 0 )
+ value |= im;
+ bm <<= 1;
+ im <<= 1;
+ }
+ return value;
+ }
+
+ /**
+ * @return the encoded length in bytes
+ */
+ public final int getEncodedLength()
+ {
+ return idx + 1;
+ }
+
+ /**
+ * @return the encoded length in bits
+ */
+ public final long getBitPosition()
+ {
+ long bitpos = idx << 3;
+ int m = bm;
+ while (m > 1)
+ {
+ bitpos++;
+ m >>= 1;
+ }
+ return bitpos;
+ }
+
+}
diff --git a/brouter-util/src/main/java/btools/util/ByteArrayUnifier.java b/brouter-util/src/main/java/btools/util/ByteArrayUnifier.java
index a43388d..97e9fc9 100644
--- a/brouter-util/src/main/java/btools/util/ByteArrayUnifier.java
+++ b/brouter-util/src/main/java/btools/util/ByteArrayUnifier.java
@@ -9,11 +9,7 @@ public final class ByteArrayUnifier
public ByteArrayUnifier( int size, boolean validateImmutability )
{
this.size = size;
-
- if ( !Boolean.getBoolean( "disableByteArrayUnifification" ) )
- {
- byteArrayCache = new byte[size][];
- }
+ byteArrayCache = new byte[size][];
if ( validateImmutability ) crcCrosscheck = new int[size];
}
@@ -26,33 +22,40 @@ public final class ByteArrayUnifier
*/
public byte[] unify( byte[] ab )
{
- if ( byteArrayCache == null ) return ab;
-
- int n = ab.length;
- int crc = Crc32.crc( ab, 0, n );
- int idx = (crc & 0xfffffff) % size;
- byte[] abc = byteArrayCache[idx];
- if ( abc != null && abc.length == n )
+ return unify( ab, 0, ab.length );
+ }
+
+ public byte[] unify( byte[] ab, int offset, int len )
+ {
+ int crc = Crc32.crc( ab, offset, len );
+ int idx = ( crc & 0xfffffff ) % size;
+ byte[] abc = byteArrayCache[idx];
+ if ( abc != null && abc.length == len )
+ {
+ int i = 0;
+ while (i < len)
{
- int i = 0;
- while( i < n )
- {
- if ( ab[i] != abc[i] ) break;
- i++;
- }
- if ( i == n ) return abc;
+ if ( ab[offset + i] != abc[i] )
+ break;
+ i++;
}
- if ( crcCrosscheck != null )
+ if ( i == len )
+ return abc;
+ }
+ if ( crcCrosscheck != null )
+ {
+ if ( byteArrayCache[idx] != null )
{
- if ( byteArrayCache[idx] != null )
- {
- byte[] abold = byteArrayCache[idx];
- int crcold = Crc32.crc( abold, 0, abold.length );
- if ( crcold != crcCrosscheck[idx] ) throw new IllegalArgumentException( "ByteArrayUnifier: immutablity validation failed!" );
- }
- crcCrosscheck[idx] = crc;
+ byte[] abold = byteArrayCache[idx];
+ int crcold = Crc32.crc( abold, 0, abold.length );
+ if ( crcold != crcCrosscheck[idx] )
+ throw new IllegalArgumentException( "ByteArrayUnifier: immutablity validation failed!" );
}
- byteArrayCache[idx] = ab;
- return ab;
+ crcCrosscheck[idx] = crc;
+ }
+ byte[] nab = new byte[len];
+ System.arraycopy( ab, offset, nab, 0, len );
+ byteArrayCache[idx] = nab;
+ return nab;
}
}
diff --git a/brouter-util/src/main/java/btools/util/ByteDataReader.java b/brouter-util/src/main/java/btools/util/ByteDataReader.java
index 7bc2dd8..ccacfab 100644
--- a/brouter-util/src/main/java/btools/util/ByteDataReader.java
+++ b/brouter-util/src/main/java/btools/util/ByteDataReader.java
@@ -10,10 +10,19 @@ public class ByteDataReader
{
protected byte[] ab;
protected int aboffset;
+ protected int aboffsetEnd;
public ByteDataReader( byte[] byteArray )
{
ab = byteArray;
+ aboffsetEnd = ab == null ? 0 : ab.length;
+ }
+
+ public ByteDataReader( byte[] byteArray, int offset )
+ {
+ ab = byteArray;
+ aboffset = offset;
+ aboffsetEnd = ab == null ? 0 : ab.length;
}
public final int readInt()
@@ -57,6 +66,41 @@ public class ByteDataReader
return (short)( (i1 << 8) | i0);
}
+ /**
+ * Read a size value and return a pointer to the end of a data section of that size
+ *
+ * @return the pointer to the first byte after that section
+ */
+ public int getEndPointer()
+ {
+ int size = readVarLengthUnsigned();
+ return aboffset + size;
+ }
+
+ public byte[] readDataUntil( int endPointer )
+ {
+ int size = endPointer - aboffset;
+ if ( size == 0 )
+ {
+ return null;
+ }
+ byte[] data = new byte[size];
+ readFully( data );
+ return data;
+ }
+
+ public byte[] readVarBytes()
+ {
+ int len = readVarLengthUnsigned();
+ if ( len == 0 )
+ {
+ return null;
+ }
+ byte[] bytes = new byte[len];
+ readFully( bytes );
+ return bytes;
+ }
+
public final int readVarLengthSigned()
{
int v = readVarLengthUnsigned();
@@ -83,6 +127,11 @@ public class ByteDataReader
aboffset += ta.length;
}
+ public boolean hasMoreData()
+ {
+ return aboffset < aboffsetEnd;
+ }
+
@Override
public String toString()
{
diff --git a/brouter-util/src/main/java/btools/util/ByteDataWriter.java b/brouter-util/src/main/java/btools/util/ByteDataWriter.java
index 0fb6e70..130c551 100644
--- a/brouter-util/src/main/java/btools/util/ByteDataWriter.java
+++ b/brouter-util/src/main/java/btools/util/ByteDataWriter.java
@@ -6,33 +6,30 @@
package btools.util;
-public final class ByteDataWriter
+public class ByteDataWriter extends ByteDataReader
{
- private byte[] ab;
- private int aboffset;
-
public ByteDataWriter( byte[] byteArray )
{
- ab = byteArray;
+ super ( byteArray );
}
public void writeInt( int v )
{
- ab[aboffset++] = (byte)( (v >> 24) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 24) & 0xff );
ab[aboffset++] = (byte)( (v >> 16) & 0xff );
- ab[aboffset++] = (byte)( (v >> 8) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 8) & 0xff );
ab[aboffset++] = (byte)( (v ) & 0xff );
}
public void writeLong( long v )
{
- ab[aboffset++] = (byte)( (v >> 56) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 56) & 0xff );
ab[aboffset++] = (byte)( (v >> 48) & 0xff );
- ab[aboffset++] = (byte)( (v >> 40) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 40) & 0xff );
ab[aboffset++] = (byte)( (v >> 32) & 0xff );
- ab[aboffset++] = (byte)( (v >> 24) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 24) & 0xff );
ab[aboffset++] = (byte)( (v >> 16) & 0xff );
- ab[aboffset++] = (byte)( (v >> 8) & 0xff );
+ ab[aboffset++] = (byte)( (v >> 8) & 0xff );
ab[aboffset++] = (byte)( (v ) & 0xff );
}
@@ -51,31 +48,85 @@ public final class ByteDataWriter
ab[aboffset++] = (byte)( (v >> 8) & 0xff );
ab[aboffset++] = (byte)( (v ) & 0xff );
}
-
+
public void write( byte[] sa )
{
System.arraycopy( sa, 0, ab, aboffset, sa.length );
aboffset += sa.length;
}
-
+
public void write( byte[] sa, int offset, int len )
{
System.arraycopy( sa, offset, ab, aboffset, len );
aboffset += len;
}
- public void ensureCapacity( int len )
+ public void writeVarBytes( byte[] sa )
{
- // TODO
+ if ( sa == null )
+ {
+ writeVarLengthUnsigned( 0 );
+ }
+ else
+ {
+ int len = sa.length;
+ writeVarLengthUnsigned( len );
+ write( sa, 0, len );
+ }
}
+ public void writeModeAndDesc( boolean isReverse, byte[] sa )
+ {
+ int len = sa == null ? 0 : sa.length;
+ int sizecode = len << 1 | ( isReverse ? 1 : 0 );
+ writeVarLengthUnsigned( sizecode );
+ if ( len > 0 )
+ {
+ write( sa, 0, len );
+ }
+ }
+
+
public byte[] toByteArray()
{
byte[] c = new byte[aboffset];
System.arraycopy( ab, 0, c, 0, aboffset );
return c;
}
-
+
+
+ /**
+ * Just reserves a single byte and return it' offset.
+ * Used in conjunction with injectVarLengthUnsigned
+ * to efficiently write a size prefix
+ *
+ * @return the offset of the placeholder
+ */
+ public int writeSizePlaceHolder()
+ {
+ return aboffset++;
+ }
+
+ public void injectSize( int sizeoffset )
+ {
+ int size = 0;
+ int datasize = aboffset-sizeoffset-1;
+ int v = datasize;
+ do
+ {
+ v >>= 7;
+ size++;
+ }
+ while( v != 0 );
+ if ( size > 1 ) // doesn't fit -> shift the data after the placeholder
+ {
+ System.arraycopy( ab, sizeoffset+1, ab, sizeoffset+size, datasize );
+ }
+ aboffset = sizeoffset;
+ writeVarLengthUnsigned( datasize );
+ aboffset = sizeoffset + size + datasize;
+ }
+
public int writeVarLengthSigned( int v )
{
return writeVarLengthUnsigned( v < 0 ? ( (-v) << 1 ) | 1 : v << 1 );
@@ -83,29 +134,21 @@ public final class ByteDataWriter
public int writeVarLengthUnsigned( int v )
{
- int start = aboffset;
- do
- {
- int i7 = v & 0x7f;
- v >>= 7;
- if ( v != 0 ) i7 |= 0x80;
+ int start = aboffset;
+ do
+ {
+ int i7 = v & 0x7f;
+ v >>= 7;
+ if ( v != 0 ) i7 |= 0x80;
ab[aboffset++] = (byte)( i7 & 0xff );
- }
- while( v != 0 );
- return aboffset - start;
+ }
+ while( v != 0 );
+ return aboffset - start;
}
public int size()
{
return aboffset;
}
-
- @Override
- public String toString()
- {
- StringBuilder sb = new StringBuilder( "[" );
- for( int i=0; i" + val );
+ }
+ }
+ }
+ }
}
diff --git a/pom.xml b/pom.xml
index d810356..fbdb11e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,16 +6,18 @@
brouter
1.2
pom
- http://brensche.de/brouter/
+ http://brouter.de/brouter/
brouter
configurable OSM offline router with elevation awareness, Java + Android
brouter-util
+ brouter-codec
brouter-expressions
brouter-mapaccess
brouter-core
brouter-map-creator
+
brouter-server
brouter-routing-app