feat: failed attempt to load big files
Signed-off-by: Andrew Calcutt <acalcutt@techidiots.net>
This commit is contained in:
parent
3df423e55f
commit
b1488c65bc
1 changed files with 75 additions and 13 deletions
88
src/utils.js
88
src/utils.js
|
|
@ -6,6 +6,7 @@ import fs from 'node:fs';
|
|||
import clone from 'clone';
|
||||
import glyphCompose from '@mapbox/glyph-pbf-composite';
|
||||
import PMTiles from 'pmtiles';
|
||||
import { decompressSync } from "fflate";
|
||||
|
||||
/**
|
||||
* Generate new URL object
|
||||
|
|
@ -165,6 +166,53 @@ export const getFontsPbf = (
|
|||
return Promise.all(queue).then((values) => glyphCompose.combine(values));
|
||||
};
|
||||
|
||||
function readBytes(fd, sharedBuffer) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.read(
|
||||
fd,
|
||||
sharedBuffer,
|
||||
0,
|
||||
sharedBuffer.length,
|
||||
null,
|
||||
(err) => {
|
||||
if(err) { return reject(err); }
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
const ReadBytes = async (filePath, offset, size) => {
|
||||
const sharedBuffer = Buffer.alloc(size);
|
||||
const stats = fs.statSync(filePath); // file details
|
||||
const fd = fs.openSync(filePath); // file descriptor
|
||||
let bytesRead = 0; // how many bytes were read
|
||||
let end = size;
|
||||
|
||||
for(let i = offset; i < Math.ceil(stats.size / size); i++) {
|
||||
await readBytes(fd, sharedBuffer);
|
||||
bytesRead = (i + 1) * size;
|
||||
if(bytesRead > stats.size) {
|
||||
// When we reach the end of file,
|
||||
// we have to calculate how many bytes were actually read
|
||||
end = size - (bytesRead - stats.size);
|
||||
}
|
||||
if(bytesRead === size) {break;}
|
||||
}
|
||||
|
||||
return sharedBuffer;
|
||||
}
|
||||
|
||||
function BufferToArrayBuffer(buffer) {
|
||||
const arrayBuffer = new ArrayBuffer(buffer.length);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
view[i] = buffer[i];
|
||||
}
|
||||
const v = new DataView(arrayBuffer);
|
||||
return arrayBuffer;
|
||||
}
|
||||
|
||||
const PMTilesLocalSource = class {
|
||||
constructor(file) {
|
||||
this.file = file;
|
||||
|
|
@ -178,19 +226,33 @@ const PMTilesLocalSource = class {
|
|||
}
|
||||
};
|
||||
|
||||
export const GetPMtilesInfo = async (pmtilesFile) => {
|
||||
const buffer = fs.readFileSync(pmtilesFile)
|
||||
const arrayBuffer = new ArrayBuffer(buffer.length);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
view[i] = buffer[i];
|
||||
}
|
||||
let source = new PMTilesLocalSource(arrayBuffer);
|
||||
let pmtiles = new PMTiles.PMTiles(source);
|
||||
|
||||
const header = await pmtiles.getHeader();
|
||||
const metadata = await pmtiles.getMetadata();
|
||||
|
||||
|
||||
export const GetPMtilesInfo = async (pmtilesFile) => {
|
||||
var buffer = await ReadBytes(pmtilesFile, 0, 16384)
|
||||
const headerBuf = BufferToArrayBuffer(buffer);
|
||||
const header = PMTiles.bytesToHeader(headerBuf, undefined)
|
||||
const compression = header.internalCompression
|
||||
console.log(header);
|
||||
|
||||
const jsonMetadataOffset = header.jsonMetadataOffset;
|
||||
const jsonMetadataLength = header.jsonMetadataLength;
|
||||
var metadataBytes = await ReadBytes(pmtilesFile, jsonMetadataOffset, jsonMetadataLength)
|
||||
const metadataBuf = BufferToArrayBuffer(metadataBytes);
|
||||
|
||||
console.log(metadataBuf)
|
||||
var metadata;
|
||||
if (compression === PMTiles.Compression.None || compression === PMTiles.Compression.Unknown) {
|
||||
metadata = metadataBuf;
|
||||
} else if (compression === PMTiles.Compression.Gzip) {
|
||||
metadata = decompressSync(new Uint8Array(metadataBuf));
|
||||
console.log(metadata)
|
||||
} else {
|
||||
throw Error("Compression method not supported");
|
||||
}
|
||||
|
||||
const bounds = [header.minLat, header.minLon, header.maxLat, header.maxLon]
|
||||
const center = [header.centerLon, header.centerLat, header.centerZoom]
|
||||
return { source: pmtiles, header: header, metadata: metadata, bounds: bounds, center: center };
|
||||
const center = [header.centerLon, header.centerLat, header.centerLat]
|
||||
return { header: header, metadata: metadata, bounds: bounds, center: center };
|
||||
}
|
||||
Loading…
Reference in a new issue