proof of concept delta-rd5

This commit is contained in:
Arndt Brenschede 2019-09-08 13:35:12 +02:00
parent c32d3dc165
commit a69fb1c99a
4 changed files with 209 additions and 10 deletions

View file

@ -314,4 +314,56 @@ public class MicroCache extends ByteDataWriter
} }
return null; return null;
} }
public void calcDelta( MicroCache mc1, MicroCache mc2 )
{
int idx1 = 0;
int idx2 = 0;
while( idx1 < mc1.size || idx2 < mc2.size )
{
int id1 = idx1 < mc1.size ? mc1.faid[idx1] : Integer.MAX_VALUE;
int id2 = idx2 < mc2.size ? mc2.faid[idx2] : Integer.MAX_VALUE;
int id;
if ( id1 >= id2 )
{
id = id2;
int start2 = idx2 > 0 ? mc2.fapos[idx2 - 1] : 0;
int len2 = mc2.fapos[idx2++] - start2;
if ( id1 == id2 )
{
// id exists in both caches, compare data
int start1 = idx1 > 0 ? mc1.fapos[idx1 - 1] : 0;
int len1 = mc1.fapos[idx1++] - start1;
if ( len1 == len2 )
{
int i = 0;
while( i<len1 )
{
if ( mc1.ab[start1+i] != mc2.ab[start2+i] )
{
break;
}
i++;
}
if ( i == len1 )
{
continue; // same data -> do nothing
}
}
}
write( mc2.ab, start2, len2 );
}
else
{
idx1++;
id = id1; // deleted node
}
fapos[size] = aboffset;
faid[size] = id;
size++;
}
}
} }

View file

@ -87,10 +87,14 @@ public final class MicroCache2 extends MicroCache
// future escapes (turn restrictions?) // future escapes (turn restrictions?)
short trExceptions = 0; short trExceptions = 0;
for(;;)
{
int featureId = bc.decodeVarBits(); int featureId = bc.decodeVarBits();
if ( featureId == 0 ) break; if ( featureId == 13 )
{
fapos[n] = aboffset;
continue; // empty node escape (delta files only)
}
while( featureId != 0 )
{
int bitsize = bc.decodeNoisyNumber( 5 ); int bitsize = bc.decodeNoisyNumber( 5 );
if ( featureId == 2 ) // exceptions to turn-restriction if ( featureId == 2 ) // exceptions to turn-restriction
@ -113,6 +117,7 @@ public final class MicroCache2 extends MicroCache
{ {
for( int i=0; i< bitsize; i++ ) bc.decodeBit(); // unknown feature, just skip for( int i=0; i< bitsize; i++ ) bc.decodeBit(); // unknown feature, just skip
} }
featureId = bc.decodeVarBits();
} }
writeBoolean( false ); writeBoolean( false );
@ -147,7 +152,8 @@ public final class MicroCache2 extends MicroCache
TagValueWrapper wayTags = wayTagCoder.decodeTagValueSet(); TagValueWrapper wayTags = wayTagCoder.decodeTagValueSet();
if ( wayTags != null ) boolean linkValid = wayTags != null || wayValidator == null;
if ( linkValid )
{ {
int startPointer = aboffset; int startPointer = aboffset;
sizeoffset = writeSizePlaceHolder(); sizeoffset = writeSizePlaceHolder();
@ -162,7 +168,7 @@ public final class MicroCache2 extends MicroCache
finaldatasize += 1 + aboffset-startPointer; // reserve place for reverse finaldatasize += 1 + aboffset-startPointer; // reserve place for reverse
validBits[ nodeIdx >> 5 ] |= 1 << nodeIdx; // mark target-node valid validBits[ nodeIdx >> 5 ] |= 1 << nodeIdx; // mark target-node valid
} }
writeModeAndDesc( isReverse, wayTags.data ); writeModeAndDesc( isReverse, wayTags == null ? null : wayTags.data );
} }
if ( !isReverse ) // write geometry for forward links only if ( !isReverse ) // write geometry for forward links only
@ -200,7 +206,7 @@ public final class MicroCache2 extends MicroCache
} }
if ( matcher != null ) matcher.end(); if ( matcher != null ) matcher.end();
} }
if ( wayTags != null ) if ( linkValid )
{ {
injectSize( sizeoffset ); injectSize( sizeoffset );
} }
@ -375,6 +381,12 @@ public final class MicroCache2 extends MicroCache
int ilon = (int)(id64 >> 32); int ilon = (int)(id64 >> 32);
int ilat = (int)(id64 & 0xffffffff); int ilat = (int)(id64 & 0xffffffff);
if ( aboffset == aboffsetEnd )
{
bc.encodeVarBits( 13 ); // empty node escape (delta files only)
continue;
}
// write turn restrictions // write turn restrictions
while( readBoolean() ) while( readBoolean() )
{ {
@ -430,7 +442,10 @@ public final class MicroCache2 extends MicroCache
readFully( description ); readFully( description );
} }
boolean isInternal = isInternal( ilonlink, ilatlink ); long link64 = ((long)ilonlink)<<32 | ilatlink;
Integer idx = idMap.get( Long.valueOf( link64 ) );
boolean isInternal = idx != null;
if ( isReverse && isInternal ) if ( isReverse && isInternal )
{ {
if ( dodebug ) System.out.println( "*** NOT encoding link reverse=" + isReverse + " internal=" + isInternal ); if ( dodebug ) System.out.println( "*** NOT encoding link reverse=" + isReverse + " internal=" + isInternal );
@ -442,9 +457,6 @@ public final class MicroCache2 extends MicroCache
if ( isInternal ) if ( isInternal )
{ {
long link64 = ((long)ilonlink)<<32 | ilatlink;
Integer idx = idMap.get( Long.valueOf( link64 ) );
if ( idx == null ) throw new RuntimeException( "ups: internal not found?" );
int nodeIdx = idx.intValue(); int nodeIdx = idx.intValue();
if ( dodebug ) System.out.println( "*** target nodeIdx=" + nodeIdx ); if ( dodebug ) System.out.println( "*** target nodeIdx=" + nodeIdx );
if ( nodeIdx == n ) throw new RuntimeException( "ups: self ref?" ); if ( nodeIdx == n ) throw new RuntimeException( "ups: self ref?" );

View file

@ -62,6 +62,11 @@ public final class TagValueCoder
{ {
if ( ++pass == 3 ) if ( ++pass == 3 )
{ {
if ( identityMap.size() == 0 )
{
TagValueSet dummy = new TagValueSet();
identityMap.put( dummy, dummy );
}
PriorityQueue<TagValueSet> queue = new PriorityQueue<TagValueSet>(2*identityMap.size(), new TagValueSet.FrequencyComparator()); PriorityQueue<TagValueSet> queue = new PriorityQueue<TagValueSet>(2*identityMap.size(), new TagValueSet.FrequencyComparator());
queue.addAll(identityMap.values()); queue.addAll(identityMap.values());
while (queue.size() > 1) while (queue.size() > 1)

View file

@ -0,0 +1,130 @@
/**
* Proof of concept for delta rd5's
*
* @author ab
*/
package btools.mapaccess;
import java.io.File;
import btools.codec.DataBuffers;
import btools.codec.MicroCache;
import btools.codec.MicroCache2;
import btools.codec.StatCoderContext;
final public class Rd5DiffTool
{
public static void main( String[] args ) throws Exception
{
diff2files( new File( args[0] ),new File( args[1] ) );
}
/**
* Compute the delta between 2 RD5 files and
* show statistics on the expected size of the delta file
*/
public static void diff2files( File f1, File f2 ) throws Exception
{
byte[] abBuf1 = new byte[10 * 1024 * 1024];
byte[] abBuf2 = new byte[10 * 1024 * 1024];
int nodesTotal = 0;
int nodesDiff = 0;
long bytesDiff = 0L;
PhysicalFile pf1 = null;
PhysicalFile pf2 = null;
try
{
DataBuffers dataBuffers = new DataBuffers();
pf1 = new PhysicalFile( f1, dataBuffers, -1, -1 );
pf2 = new PhysicalFile( f2, dataBuffers, -1, -1 );
int div = pf1.divisor;
for ( int lonDegree = 0; lonDegree < 5; lonDegree++ ) // does'nt really matter..
{
for ( int latDegree = 0; latDegree < 5; latDegree++ ) // ..where on earth we are
{
OsmFile osmf1 = new OsmFile( pf1, lonDegree, latDegree, dataBuffers );
OsmFile osmf2 = new OsmFile( pf2, lonDegree, latDegree, dataBuffers );
for ( int lonIdx = 0; lonIdx < div; lonIdx++ )
{
for ( int latIdx = 0; latIdx < div; latIdx++ )
{
int lonIdxDiv = lonDegree * div + lonIdx;
int latIdxDiv = latDegree * div + latIdx;
MicroCache mc1 = osmf1.hasData() ?
osmf1.createMicroCache( lonIdxDiv, latIdxDiv, dataBuffers, null, null, true, null )
: MicroCache.emptyCache();
MicroCache mc2 = osmf2.hasData() ?
osmf2.createMicroCache( lonIdxDiv, latIdxDiv, dataBuffers, null, null, true, null )
: MicroCache.emptyCache();
MicroCache mc = new MicroCache2( mc1.getSize() + mc2.getSize(), abBuf2, lonIdxDiv, latIdxDiv, div );
mc.calcDelta( mc1, mc2 );
nodesTotal += mc2.getSize();
if ( latIdx == 15 )
{
// System.out.println( "hier!" );
}
if ( mc.getSize() > 0 )
{
int len = mc.encodeMicroCache( abBuf1 );
byte[] bytes = new byte[len];
System.arraycopy( abBuf1, 0, bytes, 0, len );
bytesDiff += len;
nodesDiff += mc.getSize();
// cross-check the encoding: re-instantiate the cache
MicroCache mcCheck = new MicroCache2( new StatCoderContext( bytes ), new DataBuffers( null ), lonIdxDiv, latIdxDiv, div, null, null );
// ..and check if still the same
if ( mc.size() != mcCheck.size() )
{
// mc.compareWith finds link-ordering differences,
// so we compare only if there's also a size missmatch...
String diffMessage = mc.compareWith( mcCheck );
if ( diffMessage != null )
{
throw new RuntimeException( "files differ: " + diffMessage );
}
}
}
}
}
}
}
System.out.println( "nodesTotal=" + nodesTotal + " nodesDiff=" + nodesDiff + " bytesDiff=" + bytesDiff );
}
finally
{
if ( pf1 != null )
{
try
{
pf1.ra.close();
}
catch (Exception ee)
{
}
}
if ( pf2 != null )
{
try
{
pf2.ra.close();
}
catch (Exception ee)
{
}
}
}
}
}