some more cleanup and performance squeezing

This commit is contained in:
Arndt 2016-08-26 08:40:52 +02:00
parent 12d8cae46f
commit 686d693103
9 changed files with 145 additions and 78 deletions

View file

@ -3,8 +3,8 @@ package btools.codec;
import java.util.BitSet;
import java.util.HashMap;
import btools.util.ByteArrayUnifier;
import btools.util.ByteDataReader;
import btools.util.IByteArrayUnifier;
/**
* MicroCache2 is the new format that uses statistical encoding and
@ -28,7 +28,7 @@ public final class MicroCache2 extends MicroCache
latBase = latIdx*cellsize;
}
public byte[] readUnified( int len, ByteArrayUnifier u )
public byte[] readUnified( int len, IByteArrayUnifier u )
{
byte[] b = u.unify( ab, aboffset, len );
aboffset += len;

View file

@ -14,15 +14,16 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.StringTokenizer;
import java.util.TreeMap;
import btools.util.BitCoderContext;
import btools.util.Crc32;
import java.util.Random;
import btools.util.IByteArrayUnifier;
public abstract class BExpressionContext
public abstract class BExpressionContext implements IByteArrayUnifier
{
private static final String CONTEXT_TAG = "---context:";
@ -70,6 +71,11 @@ public abstract class BExpressionContext
return hashBucketVars[idx];
}
protected final float getInverseBuildInVariable( int idx )
{
return arrayBuildInVariablesCache[currentHashBucket | 1][idx];
}
private int linenr;
public BExpressionMetaData meta;
@ -98,7 +104,7 @@ public abstract class BExpressionContext
// create the expression cache
_arrayBitmap = new byte[hashSize][];
_arrayCrc = new int[hashSize];
arrayBuildInVariablesCache = new float[hashSize][];
arrayBuildInVariablesCache = new float[hashSize << 1][];
}
/**
@ -249,9 +255,14 @@ public abstract class BExpressionContext
lookupDataFrozen = true;
}
public void evaluate( int[] lookupData2 )
public final void evaluate( int[] lookupData2 )
{
lookupData = lookupData2;
evaluate();
}
private void evaluate()
{
int n = expressionList.size();
for( int expidx = 0; expidx < n; expidx++ )
{
@ -268,32 +279,62 @@ public abstract class BExpressionContext
return "requests=" + requests + " requests2=" + requests2 + " cachemisses=" + cachemisses;
}
@Override
public final byte[] unify( byte[] ab, int offset, int len )
{
int crc = Crc32.crc( ab, offset, len );
int hashSize = _arrayBitmap.length;
int idx = ( crc & 0xfffffff ) % hashSize;
byte[] abc = _arrayBitmap[idx];
if ( abc != null && abc.length == len )
{
int i = 0;
while (i < len)
{
if ( ab[offset + i] != abc[i] )
break;
i++;
}
if ( i == len )
return abc;
}
byte[] nab = new byte[len];
System.arraycopy( ab, offset, nab, 0, len );
return nab;
}
/**
* evaluates the data in the given byte array
*
* @return true if the data is equivilant to the last calls data
*/
public void evaluate( boolean inverseDirection, byte[] ab )
public final void evaluate( boolean inverseDirection, byte[] ab )
{
requests++;
lookupDataValid = false; // this is an assertion for a nasty pifall
// calc hash bucket from crc
int crc = Crc32.crcWithInverseBit( ab, inverseDirection );
int crc = Crc32.crc( ab, 0, ab.length );
int hashSize = _arrayBitmap.length;
currentHashBucket = ( crc & 0xfffffff ) % hashSize;
int hashBucket2 = ( crc & 0xfffffff ) % hashSize;
int hashBucket01 = hashBucket2 << 1;
int hashBucket02 = hashBucket01 | 1;
int hashBucket = inverseDirection ? hashBucket02 : hashBucket01;
int nBuildInVars = buildInVariableIdx.length;
hashBucketVars = arrayBuildInVariablesCache[currentHashBucket];
if ( hashBucketVars == null )
if ( hashBucket != currentHashBucket )
{
hashBucketVars = new float[nBuildInVars];
arrayBuildInVariablesCache[currentHashBucket] = hashBucketVars;
currentHashBucket = hashBucket;
hashBucketVars = arrayBuildInVariablesCache[hashBucket];
if ( hashBucketVars == null )
{
arrayBuildInVariablesCache[hashBucket01] = new float[buildInVariableIdx.length];
arrayBuildInVariablesCache[hashBucket02] = new float[buildInVariableIdx.length];
hashBucketVars = arrayBuildInVariablesCache[hashBucket];
}
}
if ( crc == _arrayCrc[currentHashBucket] )
if ( crc == _arrayCrc[hashBucket2] )
{
byte[] abBucket = _arrayBitmap[currentHashBucket];
byte[] abBucket = _arrayBitmap[hashBucket2];
if ( ab == abBucket ) // fast identity check
{
return;
@ -322,17 +363,31 @@ public abstract class BExpressionContext
}
cachemisses++;
_arrayBitmap[currentHashBucket] = ab;
_arrayCrc[currentHashBucket] = crc;
_arrayBitmap[hashBucket2] = ab;
_arrayCrc[hashBucket2] = crc;
decode( lookupData, inverseDirection, ab );
evaluate( lookupData );
int nBuildInVars = buildInVariableIdx.length;
// forward direction
decode( lookupData, false, ab );
evaluate();
float[] vars = arrayBuildInVariablesCache[hashBucket01];
for ( int vi = 0; vi < nBuildInVars; vi++ )
{
int idx = buildInVariableIdx[vi];
hashBucketVars[vi] = idx == -1 ? 0.f : variableData[idx];
vars[vi] = idx == -1 ? 0.f : variableData[idx];
}
// inverse direction
lookupData[0] = 2; // inverse shortcut: reuse decoding
evaluate();
vars = arrayBuildInVariablesCache[hashBucket02];
for ( int vi = 0; vi < nBuildInVars; vi++ )
{
int idx = buildInVariableIdx[vi];
vars[vi] = idx == -1 ? 0.f : variableData[idx];
}
}
public void dumpStatistics()

View file

@ -32,6 +32,8 @@ public final class BExpressionContextWay extends BExpressionContext implements T
public float getPriorityClassifier() { return getBuildInVariable(9); }
public float getClassifierMask() { return getBuildInVariable(10); }
public float getInverseCostfactor() { return getInverseBuildInVariable(0); }
public BExpressionContextWay( BExpressionMetaData meta )
{
super( "way", meta );
@ -54,8 +56,7 @@ public final class BExpressionContextWay extends BExpressionContext implements T
float minCostFactor = getCostfactor();
if ( minCostFactor >= 9999.f )
{
evaluate( true, description );
float reverseCostFactor = getCostfactor();
float reverseCostFactor = getInverseCostfactor();
if ( reverseCostFactor < minCostFactor )
{
minCostFactor = reverseCostFactor;

View file

@ -215,7 +215,7 @@ public final class NodesCache
long id = node.getIdFromPos();
if ( segment.getAndClear( id ) )
{
node.parseNodeBody( segment, nodesMap );
node.parseNodeBody( segment, nodesMap, expCtxWay );
}
if ( garbageCollectionEnabled ) // garbage collection

View file

@ -7,7 +7,9 @@ package btools.mapaccess;
import btools.codec.MicroCache;
import btools.codec.MicroCache2;
import btools.expressions.BExpressionContextWay;
import btools.util.ByteArrayUnifier;
import btools.util.IByteArrayUnifier;
public class OsmNode implements OsmPos
{
@ -45,22 +47,22 @@ public class OsmNode implements OsmPos
public byte[] nodeDescription;
// interface OsmPos
public int getILat()
public final int getILat()
{
return ilat;
}
public int getILon()
public final int getILon()
{
return ilon;
}
public short getSElev()
public final short getSElev()
{
return selev;
}
public double getElev()
public final double getElev()
{
return selev / 4.;
}
@ -71,7 +73,7 @@ public class OsmNode implements OsmPos
public OsmLink firstlink = null;
// preliminry in forward order to avoid regressions
public void addLink( OsmLink link )
public final void addLink( OsmLink link )
{
if ( firstlink == null )
{
@ -117,7 +119,7 @@ public class OsmNode implements OsmPos
return null;
}
public int calcDistance( OsmPos p )
public final int calcDistance( OsmPos p )
{
double l = ( ilat - 90000000 ) * 0.00000001234134;
double l2 = l * l;
@ -135,17 +137,17 @@ public class OsmNode implements OsmPos
return "" + getIdFromPos();
}
public void parseNodeBody( MicroCache mc, OsmNodesMap hollowNodes )
public final void parseNodeBody( MicroCache mc, OsmNodesMap hollowNodes, IByteArrayUnifier expCtxWay )
{
if ( mc instanceof MicroCache2 )
{
parseNodeBody2( (MicroCache2) mc, hollowNodes );
parseNodeBody2( (MicroCache2) mc, hollowNodes, expCtxWay );
}
else
throw new IllegalArgumentException( "unknown cache version: " + mc.getClass() );
}
public void parseNodeBody2( MicroCache2 mc, OsmNodesMap hollowNodes )
public final void parseNodeBody2( MicroCache2 mc, OsmNodesMap hollowNodes, IByteArrayUnifier expCtxWay )
{
ByteArrayUnifier abUnifier = hollowNodes.getByteArrayUnifier();
@ -165,7 +167,7 @@ public class OsmNode implements OsmPos
int descSize = sizecode >> 1;
if ( descSize > 0 )
{
description = mc.readUnified( descSize, abUnifier );
description = mc.readUnified( descSize, expCtxWay );
}
byte[] geometry = mc.readDataUntil( endPointer );
@ -221,22 +223,22 @@ public class OsmNode implements OsmPos
}
public boolean isHollow()
public final boolean isHollow()
{
return selev == -12345;
}
public void setHollow()
public final void setHollow()
{
selev = -12345;
}
public long getIdFromPos()
public final long getIdFromPos()
{
return ( (long) ilon ) << 32 | ilat;
}
public void unlinkLink( OsmLink link )
public final void unlinkLink( OsmLink link )
{
if ( link == firstlink )
{
@ -254,7 +256,7 @@ public class OsmNode implements OsmPos
}
@Override
public boolean equals( Object o )
public final boolean equals( Object o )
{
if ( o instanceof OsmNode )
{
@ -265,7 +267,7 @@ public class OsmNode implements OsmPos
}
@Override
public int hashCode()
public final int hashCode()
{
return ilon + ilat;
}

View file

@ -1,6 +1,6 @@
package btools.util;
public final class ByteArrayUnifier
public final class ByteArrayUnifier implements IByteArrayUnifier
{
private byte[][] byteArrayCache;
private int[] crcCrosscheck;

View file

@ -107,18 +107,44 @@ public class ByteDataReader
return ( v & 1 ) == 0 ? v >> 1 : -(v >> 1 );
}
public final int readVarLengthUnsigned_old()
{
int v = 0;
int shift = 0;
for ( ;; )
{
byte b = ab[aboffset++];
v |= ( ( b & 0x7f ) << shift );
if ( ( b & 0x80 ) == 0 )
{
break;
}
shift += 7;
}
return v;
}
public final int readVarLengthUnsigned()
{
int v = 0;
int shift = 0;
for(;;)
{
int i7 = ab[aboffset++] & 0xff;
v |= (( i7 & 0x7f ) << shift);
if ( ( i7 & 0x80 ) == 0 ) break;
shift += 7;
}
return v;
byte b;
int v = (b=ab[aboffset++]) & 0x7f;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 7;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 14;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 21;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 28;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 35;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 42;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 49;
if ( b >= 0 ) return v;
v |= ( (b=ab[aboffset++]) & 0x7f ) << 56;
return v;
}
public final void readFully( byte[] ta )

View file

@ -5,20 +5,8 @@ package btools.util;
*
* @author ab
*/
public class Crc32
public final class Crc32
{
public static int crc( long bitmap )
{
int crc = 0xFFFFFFFF;
long bm = bitmap;
for( int j=0; j<8; j++ )
{
crc = (crc >>> 8) ^ crctable[(crc ^ (int)bm) & 0xff];
bm >>= 8;
}
return crc;
}
public static int crc( byte[] ab, int offset, int len )
{
int crc = 0xFFFFFFFF;
@ -30,17 +18,6 @@ public class Crc32
return crc;
}
public static int crcWithInverseBit( byte[] ab, boolean isInverse )
{
int crc = 0xFFFFFF ^ ( isInverse ? 0x990951ba : 0x706af48f ); // inverse is webbed into crc...
int end = ab.length;
for( int j=0; j<end; j++ )
{
crc = (crc >>> 8) ^ crctable[(crc ^ ab[j]) & 0xff];
}
return isInverse ? crc | 0x80000000 : crc & 0x7fffffff; // ... and set as high bit
}
private static int[] crctable = {
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,
0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,

View file

@ -0,0 +1,6 @@
package btools.util;
public interface IByteArrayUnifier
{
byte[] unify( byte[] ab, int offset, int len );
}