feat: move pmtiles to it's own file

Signed-off-by: Andrew Calcutt <acalcutt@techidiots.net>
This commit is contained in:
Andrew Calcutt 2023-10-07 18:27:56 -04:00
parent ca51104ece
commit 4772f0efdc
5 changed files with 426 additions and 401 deletions

View file

@ -7,9 +7,8 @@ import path from 'path';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
import request from 'request'; import request from 'request';
import { server } from './server.js'; import { server } from './server.js';
import { GetPMtilesInfo } from './utils.js';
import MBTiles from '@mapbox/mbtiles'; import MBTiles from '@mapbox/mbtiles';
import { GetPMtilesInfo } from './pmtiles_adapter.js';
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename); const __dirname = path.dirname(__filename);
@ -104,7 +103,7 @@ const startWithPMTiles = async (pmtilesFile) => {
} }
const info = await GetPMtilesInfo(pmtilesFile); const info = await GetPMtilesInfo(pmtilesFile);
const metadata = info.metadata const metadata = info.metadata;
console.log(info); console.log(info);
@ -168,7 +167,6 @@ const startWithPMTiles = async (pmtilesFile) => {
} }
return startServer(null, config); return startServer(null, config);
}; };
const startWithMBTiles = (mbtilesFile) => { const startWithMBTiles = (mbtilesFile) => {

304
src/pmtiles_adapter.js Normal file
View file

@ -0,0 +1,304 @@
import fs from 'node:fs';
import * as fflate from 'fflate';
import PMTiles from 'pmtiles';
export const GetPMtilesHeader = async (pmtilesFile) => {
var buffer = await ReadBytes(pmtilesFile, 0, 127);
const header = PMTiles.bytesToHeader(buffer, undefined);
return header;
};
export const GetPMtilesInfo = async (pmtilesFile) => {
//Get metadata from pmtiles file
var header = await GetPMtilesHeader(pmtilesFile);
const metadataBytes = await ReadBytes(
pmtilesFile,
header.jsonMetadataOffset,
header.jsonMetadataLength,
);
const metadataDecomp = await GetPMtilesDecompress(header, metadataBytes);
const dec = new TextDecoder('utf-8');
const metadata = JSON.parse(dec.decode(metadataDecomp));
//Add missing metadata from header
metadata['format'] = GetPmtilesTileType(header.tileType).type;
if (
header.minLat != 0 &&
header.minLon != 0 &&
header.maxLat != 0 &&
header.maxLon != 0
) {
const bounds = [header.minLat, header.minLon, header.maxLat, header.maxLon];
metadata['bounds'] = bounds;
}
if (header.centerLon != 0 && header.centerLat != 0) {
const center = [header.centerLon, header.centerLat, header.centerLat];
metadata['center'] = center;
}
metadata['minzoom'] = header.minZoom;
metadata['maxzoom'] = header.maxZoom;
return { header: header, metadata: metadata };
};
export const GetPMtilesTile = async (pmtilesFile, z, x, y) => {
const tile_id = PMTiles.zxyToTileId(z, x, y);
const header = await GetPMtilesHeader(pmtilesFile);
if (z < header.minZoom || z > header.maxZoom) {
return undefined;
}
let rootDirectoryOffset = header.rootDirectoryOffset;
let rootDirectoryLength = header.rootDirectoryLength;
for (let depth = 0; depth <= 3; depth++) {
const RootDirectoryBytes = await ReadBytes(
pmtilesFile,
rootDirectoryOffset,
rootDirectoryLength,
);
const RootDirectoryBytesaDecomp = await GetPMtilesDecompress(
header,
RootDirectoryBytes,
);
const Directory = deserializeIndex(RootDirectoryBytesaDecomp);
const entry = PMTiles.findTile(Directory, tile_id);
if (entry) {
if (entry.runLength > 0) {
const EntryBytesArrayBuff = await ReadBytes(
pmtilesFile,
header.tileDataOffset + entry.offset,
entry.length,
);
const EntryBytes = ArrayBufferToBuffer(EntryBytesArrayBuff);
const EntryTileType = GetPmtilesTileType(header.tileType);
return { data: EntryBytes, header: EntryTileType.header };
} else {
rootDirectoryOffset = header.leafDirectoryOffset + entry.offset;
rootDirectoryLength = entry.length;
}
} else {
return undefined;
}
}
};
/**
*
* @param typenum
*/
function GetPmtilesTileType(typenum) {
let head = {};
let tileType;
switch (typenum) {
case 0:
tileType = 'Unknown';
break;
case 1:
tileType = 'pbf';
head['Content-Type'] = 'application/x-protobuf';
break;
case 2:
tileType = 'png';
head['Content-Type'] = 'image/png';
break;
case 3:
tileType = 'jpg';
head['Content-Type'] = 'image/jpeg';
break;
case 4:
tileType = 'webp';
head['Content-Type'] = 'image/webp';
break;
case 5:
tileType = 'avif';
head['Content-Type'] = 'image/avif';
break;
}
return { type: tileType, header: head };
}
/**
*
* @param buffer
*/
function BufferToArrayBuffer(buffer) {
const arrayBuffer = new ArrayBuffer(buffer.length);
const view = new Uint8Array(arrayBuffer);
for (let i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
return arrayBuffer;
}
/**
*
* @param ab
*/
function ArrayBufferToBuffer(ab) {
var buffer = Buffer.alloc(ab.byteLength);
var view = new Uint8Array(ab);
for (var i = 0; i < buffer.length; ++i) {
buffer[i] = view[i];
}
return buffer;
}
const ReadBytes = async (filePath, offset, size) => {
const sharedBuffer = Buffer.alloc(size);
const stats = fs.statSync(filePath); // file details
const fd = fs.openSync(filePath); // file descriptor
let bytesRead = 0; // how many bytes were read
let end = size;
for (let i = 0; i < size; i++) {
let postion = offset + i;
await ReadFileBytes(fd, sharedBuffer, postion);
bytesRead = (i + 1) * size;
if (bytesRead > stats.size) {
// When we reach the end of file,
// we have to calculate how many bytes were actually read
end = size - (bytesRead - stats.size);
}
if (bytesRead === size) {
break;
}
}
return BufferToArrayBuffer(sharedBuffer);
};
/**
*
* @param fd
* @param sharedBuffer
* @param offset
*/
function ReadFileBytes(fd, sharedBuffer, offset) {
return new Promise((resolve, reject) => {
fs.read(fd, sharedBuffer, 0, sharedBuffer.length, offset, (err) => {
if (err) {
return reject(err);
}
resolve();
});
});
}
export const GetPMtilesDecompress = async (header, buffer) => {
const compression = header.internalCompression;
var decompressed;
if (
compression === PMTiles.Compression.None ||
compression === PMTiles.Compression.Unknown
) {
decompressed = buffer;
} else if (compression === PMTiles.Compression.Gzip) {
decompressed = fflate.decompressSync(new Uint8Array(buffer));
} else {
throw Error('Compression method not supported');
}
return decompressed;
};
/**
*
* @param low
* @param high
*/
function toNum(low, high) {
return (high >>> 0) * 0x100000000 + (low >>> 0);
}
/**
*
* @param l
* @param p
*/
function readVarintRemainder(l, p) {
const buf = p.buf;
let h, b;
b = buf[p.pos++];
h = (b & 0x70) >> 4;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 3;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 10;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 17;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 24;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x01) << 31;
if (b < 0x80) return toNum(l, h);
throw new Error('Expected varint not more than 10 bytes');
}
/**
*
* @param p
*/
export function readVarint(p) {
const buf = p.buf;
let val, b;
b = buf[p.pos++];
val = b & 0x7f;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 7;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 14;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 21;
if (b < 0x80) return val;
b = buf[p.pos];
val |= (b & 0x0f) << 28;
return readVarintRemainder(val, p);
}
/**
*
* @param buffer
*/
function deserializeIndex(buffer) {
const p = { buf: new Uint8Array(buffer), pos: 0 };
const numEntries = readVarint(p);
var entries = [];
let lastId = 0;
for (let i = 0; i < numEntries; i++) {
const v = readVarint(p);
entries.push({ tileId: lastId + v, offset: 0, length: 0, runLength: 1 });
lastId += v;
}
for (let i = 0; i < numEntries; i++) {
entries[i].runLength = readVarint(p);
}
for (let i = 0; i < numEntries; i++) {
entries[i].length = readVarint(p);
}
for (let i = 0; i < numEntries; i++) {
const v = readVarint(p);
if (v === 0 && i > 0) {
entries[i].offset = entries[i - 1].offset + entries[i - 1].length;
} else {
entries[i].offset = v - 1;
}
}
return entries;
}

View file

@ -7,11 +7,11 @@ import zlib from 'zlib';
import clone from 'clone'; import clone from 'clone';
import express from 'express'; import express from 'express';
import MBTiles from '@mapbox/mbtiles'; import MBTiles from '@mapbox/mbtiles';
import PMTiles from 'pmtiles';
import Pbf from 'pbf'; import Pbf from 'pbf';
import { VectorTile } from '@mapbox/vector-tile'; import { VectorTile } from '@mapbox/vector-tile';
import { getTileUrls, fixTileJSONCenter, GetPMtilesInfo, GetPMtilesTile } from './utils.js'; import { getTileUrls, fixTileJSONCenter } from './utils.js';
import { GetPMtilesInfo, GetPMtilesTile } from './pmtiles_adapter.js';
export const serve_data = { export const serve_data = {
init: (options, repo) => { init: (options, repo) => {
@ -54,10 +54,10 @@ export const serve_data = {
if (tileJSONExtension === 'pmtiles') { if (tileJSONExtension === 'pmtiles') {
let isGzipped; let isGzipped;
let tileinfo = await GetPMtilesTile(item.source, z, x, y); let tileinfo = await GetPMtilesTile(item.source, z, x, y);
let data = tileinfo.data let data = tileinfo.data;
let headers = tileinfo.header let headers = tileinfo.header;
console.log(data) console.log(data);
console.log(headers) console.log(headers);
if (data == undefined) { if (data == undefined) {
return res.status(404).send('Not found'); return res.status(404).send('Not found');
} else { } else {
@ -109,75 +109,73 @@ export const serve_data = {
return res.status(200).send(data); return res.status(200).send(data);
} }
} else { } else {
item.source.getTile(z, x, y, (err, data, headers) => { item.source.getTile(z, x, y, (err, data, headers) => {
let isGzipped; let isGzipped;
if (err) { if (err) {
if (/does not exist/.test(err.message)) { if (/does not exist/.test(err.message)) {
return res.status(204).send(); return res.status(204).send();
} else {
return res
.status(500)
.header('Content-Type', 'text/plain')
.send(err.message);
}
} else { } else {
return res if (data == null) {
.status(500) return res.status(404).send('Not found');
.header('Content-Type', 'text/plain') } else {
.send(err.message); if (tileJSONFormat === 'pbf') {
} isGzipped =
} else { data.slice(0, 2).indexOf(Buffer.from([0x1f, 0x8b])) === 0;
if (data == null) { if (options.dataDecoratorFunc) {
return res.status(404).send('Not found'); if (isGzipped) {
} else { data = zlib.unzipSync(data);
if (tileJSONFormat === 'pbf') { isGzipped = false;
isGzipped = }
data.slice(0, 2).indexOf(Buffer.from([0x1f, 0x8b])) === 0; data = options.dataDecoratorFunc(id, 'data', data, z, x, y);
if (options.dataDecoratorFunc) { }
}
if (format === 'pbf') {
headers['Content-Type'] = 'application/x-protobuf';
} else if (format === 'geojson') {
headers['Content-Type'] = 'application/json';
if (isGzipped) { if (isGzipped) {
data = zlib.unzipSync(data); data = zlib.unzipSync(data);
isGzipped = false; isGzipped = false;
} }
data = options.dataDecoratorFunc(id, 'data', data, z, x, y);
}
}
if (format === 'pbf') {
headers['Content-Type'] = 'application/x-protobuf';
} else if (format === 'geojson') {
headers['Content-Type'] = 'application/json';
if (isGzipped) { const tile = new VectorTile(new Pbf(data));
data = zlib.unzipSync(data); const geojson = {
isGzipped = false; type: 'FeatureCollection',
} features: [],
};
const tile = new VectorTile(new Pbf(data)); for (const layerName in tile.layers) {
const geojson = { const layer = tile.layers[layerName];
type: 'FeatureCollection', for (let i = 0; i < layer.length; i++) {
features: [], const feature = layer.feature(i);
}; const featureGeoJSON = feature.toGeoJSON(x, y, z);
for (const layerName in tile.layers) { featureGeoJSON.properties.layer = layerName;
const layer = tile.layers[layerName]; geojson.features.push(featureGeoJSON);
for (let i = 0; i < layer.length; i++) { }
const feature = layer.feature(i);
const featureGeoJSON = feature.toGeoJSON(x, y, z);
featureGeoJSON.properties.layer = layerName;
geojson.features.push(featureGeoJSON);
} }
data = JSON.stringify(geojson);
} }
data = JSON.stringify(geojson); delete headers['ETag']; // do not trust the tile ETag -- regenerate
} headers['Content-Encoding'] = 'gzip';
delete headers['ETag']; // do not trust the tile ETag -- regenerate res.set(headers);
headers['Content-Encoding'] = 'gzip';
res.set(headers);
if (!isGzipped) { if (!isGzipped) {
data = zlib.gzipSync(data); data = zlib.gzipSync(data);
isGzipped = true; isGzipped = true;
} }
return res.status(200).send(data); return res.status(200).send(data);
}
} }
} });
}
});
}
}, },
); );

View file

@ -18,7 +18,8 @@ import MBTiles from '@mapbox/mbtiles';
import polyline from '@mapbox/polyline'; import polyline from '@mapbox/polyline';
import proj4 from 'proj4'; import proj4 from 'proj4';
import request from 'request'; import request from 'request';
import { getFontsPbf, getTileUrls, fixTileJSONCenter, GetPMtilesInfo } from './utils.js'; import { getFontsPbf, getTileUrls, fixTileJSONCenter } from './utils.js';
import { GetPMtilesInfo } from './pmtiles_adapter.js';
const FLOAT_PATTERN = '[+-]?(?:\\d+|\\d+.?\\d+)'; const FLOAT_PATTERN = '[+-]?(?:\\d+|\\d+.?\\d+)';
const PATH_PATTERN = const PATH_PATTERN =
@ -1451,7 +1452,8 @@ export const serve_rendered = {
// how to do this for multiple sources with different proj4 defs? // how to do this for multiple sources with different proj4 defs?
const to3857 = proj4('EPSG:3857'); const to3857 = proj4('EPSG:3857');
const toDataProj = proj4(metadata.proj4); const toDataProj = proj4(metadata.proj4);
repoobj.dataProjWGStoInternalWGS = (xy) => to3857.inverse(toDataProj.forward(xy)); repoobj.dataProjWGStoInternalWGS = (xy) =>
to3857.inverse(toDataProj.forward(xy));
} }
const type = source.type; const type = source.type;
@ -1465,9 +1467,11 @@ export const serve_rendered = {
delete source.scheme; delete source.scheme;
console.log(source); console.log(source);
if (!attributionOverride && if (
!attributionOverride &&
source.attribution && source.attribution &&
source.attribution.length > 0) { source.attribution.length > 0
) {
if (!tileJSON.attribution.includes(source.attribution)) { if (!tileJSON.attribution.includes(source.attribution)) {
if (tileJSON.attribution.length > 0) { if (tileJSON.attribution.length > 0) {
tileJSON.attribution += ' | '; tileJSON.attribution += ' | ';
@ -1483,52 +1487,59 @@ export const serve_rendered = {
if (!mbtilesFileStats.isFile() || mbtilesFileStats.size === 0) { if (!mbtilesFileStats.isFile() || mbtilesFileStats.size === 0) {
throw Error(`Not valid MBTiles file: ${mbtilesFile}`); throw Error(`Not valid MBTiles file: ${mbtilesFile}`);
} }
map.sources[name] = new MBTiles(mbtilesFile + '?mode=ro', (err) => { map.sources[name] = new MBTiles(
map.sources[name].getInfo((err, info) => { mbtilesFile + '?mode=ro',
if (err) { (err) => {
console.error(err); map.sources[name].getInfo((err, info) => {
return; if (err) {
} console.error(err);
return;
if (!repoobj.dataProjWGStoInternalWGS && info.proj4) {
// how to do this for multiple sources with different proj4 defs?
const to3857 = proj4('EPSG:3857');
const toDataProj = proj4(info.proj4);
repoobj.dataProjWGStoInternalWGS = (xy) =>
to3857.inverse(toDataProj.forward(xy));
}
const type = source.type;
info['extension'] = 'mbtiles';
Object.assign(source, info);
source.type = type;
source.tiles = [
// meta url which will be detected when requested
`mbtiles://${name}/{z}/{x}/{y}.${info.format || 'pbf'}`,
];
delete source.scheme;
if (options.dataDecoratorFunc) {
source = options.dataDecoratorFunc(name, 'tilejson', source);
}
if (
!attributionOverride &&
source.attribution &&
source.attribution.length > 0
) {
if (!tileJSON.attribution.includes(source.attribution)) {
if (tileJSON.attribution.length > 0) {
tileJSON.attribution += ' | ';
}
tileJSON.attribution += source.attribution;
} }
}
resolve(); if (!repoobj.dataProjWGStoInternalWGS && info.proj4) {
}); // how to do this for multiple sources with different proj4 defs?
}); const to3857 = proj4('EPSG:3857');
const toDataProj = proj4(info.proj4);
repoobj.dataProjWGStoInternalWGS = (xy) =>
to3857.inverse(toDataProj.forward(xy));
}
const type = source.type;
info['extension'] = 'mbtiles';
Object.assign(source, info);
source.type = type;
source.tiles = [
// meta url which will be detected when requested
`mbtiles://${name}/{z}/{x}/{y}.${info.format || 'pbf'}`,
];
delete source.scheme;
if (options.dataDecoratorFunc) {
source = options.dataDecoratorFunc(
name,
'tilejson',
source,
);
}
if (
!attributionOverride &&
source.attribution &&
source.attribution.length > 0
) {
if (!tileJSON.attribution.includes(source.attribution)) {
if (tileJSON.attribution.length > 0) {
tileJSON.attribution += ' | ';
}
tileJSON.attribution += source.attribution;
}
}
resolve();
});
},
);
}), }),
) );
} }
} }
} }

View file

@ -2,11 +2,8 @@
import path from 'path'; import path from 'path';
import fs from 'node:fs'; import fs from 'node:fs';
import * as fflate from 'fflate';
import clone from 'clone'; import clone from 'clone';
import glyphCompose from '@mapbox/glyph-pbf-composite'; import glyphCompose from '@mapbox/glyph-pbf-composite';
import PMTiles from 'pmtiles';
/** /**
* Generate new URL object * Generate new URL object
@ -165,286 +162,3 @@ export const getFontsPbf = (
return Promise.all(queue).then((values) => glyphCompose.combine(values)); return Promise.all(queue).then((values) => glyphCompose.combine(values));
}; };
function ReadFileBytes(fd, sharedBuffer, offset) {
return new Promise((resolve, reject) => {
fs.read(
fd,
sharedBuffer,
0,
sharedBuffer.length,
offset,
(err) => {
if(err) { return reject(err); }
resolve();
}
);
});
}
const ReadBytes = async (filePath, offset, size) => {
const sharedBuffer = Buffer.alloc(size);
const stats = fs.statSync(filePath); // file details
const fd = fs.openSync(filePath); // file descriptor
let bytesRead = 0; // how many bytes were read
let end = size;
for(let i = 0; i < size; i++) {
let postion = offset + i
await ReadFileBytes(fd, sharedBuffer, postion);
bytesRead = (i + 1) * size;
if(bytesRead > stats.size) {
// When we reach the end of file,
// we have to calculate how many bytes were actually read
end = size - (bytesRead - stats.size);
}
if(bytesRead === size) {break;}
}
return BufferToArrayBuffer(sharedBuffer);
}
function BufferToArrayBuffer(buffer) {
const arrayBuffer = new ArrayBuffer(buffer.length);
const view = new Uint8Array(arrayBuffer);
for (let i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
return arrayBuffer;
}
function ArrayBufferToBuffer(ab) {
var buffer = Buffer.alloc(ab.byteLength);
var view = new Uint8Array(ab);
for (var i = 0; i < buffer.length; ++i) {
buffer[i] = view[i];
}
return buffer;
}
const PMTilesLocalSource = class {
constructor(file) {
this.file = file;
}
getKey() {
return this.file.name;
}
async getBytes(offset, length) {
const blob = this.file.slice(offset, offset + length);
return { data: blob };
}
};
export const GetPMtilesHeader = async (pmtilesFile) => {
var buffer = await ReadBytes(pmtilesFile, 0, 127)
const header = PMTiles.bytesToHeader(buffer, undefined)
return header
}
export const GetPMtilesDecompress = async (header, buffer) => {
const compression = header.internalCompression;
var decompressed;
if (compression === PMTiles.Compression.None || compression === PMTiles.Compression.Unknown) {
decompressed = buffer;
} else if (compression === PMTiles.Compression.Gzip) {
decompressed = fflate.decompressSync(new Uint8Array(buffer));
} else {
throw Error("Compression method not supported");
}
return decompressed
}
export const GetPMtilesInfo = async (pmtilesFile) => {
var header = await GetPMtilesHeader(pmtilesFile)
const jsonMetadataOffset = header.jsonMetadataOffset;
const jsonMetadataLength = header.jsonMetadataLength;
const compression = header.internalCompression;
const metadataBytes = await ReadBytes(pmtilesFile, jsonMetadataOffset, jsonMetadataLength)
const metadataDecomp = await GetPMtilesDecompress(header, metadataBytes)
const dec = new TextDecoder("utf-8");
const metadata = JSON.parse(dec.decode(metadataDecomp));
var tileType
switch (header.tileType) {
case 0:
tileType = "Unknown"
break;
case 1:
tileType = "pbf"
break;
case 2:
tileType = "png"
break;
case 3:
tileType = "jpg"
break;
case 4:
tileType = "webp"
break;
case 5:
tileType = "avif"
break;
}
metadata['format'] = tileType;
if(header.minLat != 0 && header.minLon != 0 && header.maxLat != 0 && header.maxLon != 0) {
const bounds = [header.minLat, header.minLon, header.maxLat, header.maxLon]
metadata['bounds'] = bounds;
}
if(header.centerLon != 0 && header.centerLat != 0) {
const center = [header.centerLon, header.centerLat, header.centerLat]
metadata['center'] = center;
}
metadata['minzoom'] = header.minZoom;
metadata['maxzoom'] = header.maxZoom;
return { header: header, metadata: metadata };
}
function toNum(low, high) {
return (high >>> 0) * 0x100000000 + (low >>> 0);
}
function readVarintRemainder(l, p) {
const buf = p.buf;
let h, b;
b = buf[p.pos++];
h = (b & 0x70) >> 4;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 3;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 10;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 17;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x7f) << 24;
if (b < 0x80) return toNum(l, h);
b = buf[p.pos++];
h |= (b & 0x01) << 31;
if (b < 0x80) return toNum(l, h);
throw new Error("Expected varint not more than 10 bytes");
}
export function readVarint(p) {
const buf = p.buf;
let val, b;
b = buf[p.pos++];
val = b & 0x7f;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 7;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 14;
if (b < 0x80) return val;
b = buf[p.pos++];
val |= (b & 0x7f) << 21;
if (b < 0x80) return val;
b = buf[p.pos];
val |= (b & 0x0f) << 28;
return readVarintRemainder(val, p);
}
function deserializeIndex(buffer) {
const p = { buf: new Uint8Array(buffer), pos: 0 };
const numEntries = readVarint(p);
var entries = [];
let lastId = 0;
for (let i = 0; i < numEntries; i++) {
const v = readVarint(p);
entries.push({ tileId: lastId + v, offset: 0, length: 0, runLength: 1 });
lastId += v;
}
for (let i = 0; i < numEntries; i++) {
entries[i].runLength = readVarint(p);
}
for (let i = 0; i < numEntries; i++) {
entries[i].length = readVarint(p);
}
for (let i = 0; i < numEntries; i++) {
const v = readVarint(p);
if (v === 0 && i > 0) {
entries[i].offset = entries[i - 1].offset + entries[i - 1].length;
} else {
entries[i].offset = v - 1;
}
}
return entries;
}
export const GetPMtilesTile = async (pmtilesFile, z, x, y) => {
const tile_id = PMTiles.zxyToTileId(z, x, y);
const header = await GetPMtilesHeader(pmtilesFile)
if (z < header.minZoom || z > header.maxZoom) {
return undefined;
}
let rootDirectoryOffset = header.rootDirectoryOffset;
let rootDirectoryLength = header.rootDirectoryLength;
for (let depth = 0; depth <= 3; depth++) {
const RootDirectoryBytes = await ReadBytes(pmtilesFile, rootDirectoryOffset, rootDirectoryLength)
const RootDirectoryBytesaDecomp = await GetPMtilesDecompress(header, RootDirectoryBytes)
const Directory = deserializeIndex(RootDirectoryBytesaDecomp)
const entry = PMTiles.findTile(Directory, tile_id);
if (entry) {
if (entry.runLength > 0) {
const EntryBytesArrayBuff = await ReadBytes(pmtilesFile, header.tileDataOffset + entry.offset, entry.length)
const EntryBytes = ArrayBufferToBuffer(EntryBytesArrayBuff)
//const EntryDecomp = await GetPMtilesDecompress(header, EntryBytes)
const EntryTileType = GetPmtilesTileType(header.tileType)
return {data: EntryBytes, header: EntryTileType.header}
} else {
rootDirectoryOffset = header.leafDirectoryOffset + entry.offset;
rootDirectoryLength = entry.length;
}
} else {
return undefined;
}
}
}
function GetPmtilesTileType(typenum) {
let head = {};
let tileType
switch (typenum) {
case 0:
tileType = "Unknown"
break;
case 1:
tileType = "pbf"
head['Content-Type'] = 'application/x-protobuf';
break;
case 2:
tileType = "png"
head['Content-Type'] = 'image/png';
break;
case 3:
tileType = "jpg"
head['Content-Type'] = 'image/jpeg';
break;
case 4:
tileType = "webp"
head['Content-Type'] = 'image/webp';
break;
case 5:
tileType = "avif"
head['Content-Type'] = 'image/avif';
break;
}
return {type: tileType, header: head}
}