Revert "feat: failed attempt to load big files"

This reverts commit 727187bd7a2327c9df0b62bbf04cfc523554996f.
This commit is contained in:
Andrew Calcutt 2023-10-07 01:25:39 -04:00
parent b1488c65bc
commit 1984c143ac

View file

@ -6,7 +6,6 @@ import fs from 'node:fs';
import clone from 'clone';
import glyphCompose from '@mapbox/glyph-pbf-composite';
import PMTiles from 'pmtiles';
import { decompressSync } from "fflate";
/**
* Generate new URL object
@ -166,53 +165,6 @@ export const getFontsPbf = (
return Promise.all(queue).then((values) => glyphCompose.combine(values));
};
function readBytes(fd, sharedBuffer) {
return new Promise((resolve, reject) => {
fs.read(
fd,
sharedBuffer,
0,
sharedBuffer.length,
null,
(err) => {
if(err) { return reject(err); }
resolve();
}
);
});
}
const ReadBytes = async (filePath, offset, size) => {
const sharedBuffer = Buffer.alloc(size);
const stats = fs.statSync(filePath); // file details
const fd = fs.openSync(filePath); // file descriptor
let bytesRead = 0; // how many bytes were read
let end = size;
for(let i = offset; i < Math.ceil(stats.size / size); i++) {
await readBytes(fd, sharedBuffer);
bytesRead = (i + 1) * size;
if(bytesRead > stats.size) {
// When we reach the end of file,
// we have to calculate how many bytes were actually read
end = size - (bytesRead - stats.size);
}
if(bytesRead === size) {break;}
}
return sharedBuffer;
}
function BufferToArrayBuffer(buffer) {
const arrayBuffer = new ArrayBuffer(buffer.length);
const view = new Uint8Array(arrayBuffer);
for (let i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
const v = new DataView(arrayBuffer);
return arrayBuffer;
}
const PMTilesLocalSource = class {
constructor(file) {
this.file = file;
@ -226,33 +178,19 @@ const PMTilesLocalSource = class {
}
};
export const GetPMtilesInfo = async (pmtilesFile) => {
var buffer = await ReadBytes(pmtilesFile, 0, 16384)
const headerBuf = BufferToArrayBuffer(buffer);
const header = PMTiles.bytesToHeader(headerBuf, undefined)
const compression = header.internalCompression
console.log(header);
const jsonMetadataOffset = header.jsonMetadataOffset;
const jsonMetadataLength = header.jsonMetadataLength;
var metadataBytes = await ReadBytes(pmtilesFile, jsonMetadataOffset, jsonMetadataLength)
const metadataBuf = BufferToArrayBuffer(metadataBytes);
console.log(metadataBuf)
var metadata;
if (compression === PMTiles.Compression.None || compression === PMTiles.Compression.Unknown) {
metadata = metadataBuf;
} else if (compression === PMTiles.Compression.Gzip) {
metadata = decompressSync(new Uint8Array(metadataBuf));
console.log(metadata)
} else {
throw Error("Compression method not supported");
const buffer = fs.readFileSync(pmtilesFile)
const arrayBuffer = new ArrayBuffer(buffer.length);
const view = new Uint8Array(arrayBuffer);
for (let i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
let source = new PMTilesLocalSource(arrayBuffer);
let pmtiles = new PMTiles.PMTiles(source);
const header = await pmtiles.getHeader();
const metadata = await pmtiles.getMetadata();
const bounds = [header.minLat, header.minLon, header.maxLat, header.maxLon]
const center = [header.centerLon, header.centerLat, header.centerLat]
return { header: header, metadata: metadata, bounds: bounds, center: center };
const center = [header.centerLon, header.centerLat, header.centerZoom]
return { source: pmtiles, header: header, metadata: metadata, bounds: bounds, center: center };
}