Merge pull request #555 from zod/pbfparser

Replace XML parser with pbfparser
This commit is contained in:
afischerdev 2023-05-20 15:37:12 +02:00 committed by GitHub
commit 14b1ece960
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 116 additions and 366 deletions

View file

@ -2,11 +2,12 @@ plugins {
id 'java-library' id 'java-library'
} }
dependencies { dependencies {
implementation project(':brouter-codec') implementation project(':brouter-codec')
implementation project(':brouter-util') implementation project(':brouter-util')
implementation project(':brouter-expressions') implementation project(':brouter-expressions')
implementation group: 'org.openstreetmap.osmosis', name: 'osmosis-osm-binary', version: '0.48.3'
testImplementation('junit:junit:4.13.1') testImplementation('junit:junit:4.13.1')
} }

View file

@ -5,14 +5,18 @@ import com.google.protobuf.InvalidProtocolBufferException;
import org.openstreetmap.osmosis.osmbinary.Fileformat; import org.openstreetmap.osmosis.osmbinary.Fileformat;
import org.openstreetmap.osmosis.osmbinary.Osmformat; import org.openstreetmap.osmosis.osmbinary.Osmformat;
import btools.util.LongList;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.ArrayList;
import java.util.logging.Level; import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.zip.DataFormatException; import java.util.zip.DataFormatException;
import java.util.zip.Inflater; import java.util.zip.Inflater;
import btools.util.LongList;
/** /**
* Converts PBF block data into decoded entities ready to be passed into an Osmosis pipeline. This * Converts PBF block data into decoded entities ready to be passed into an Osmosis pipeline. This
* class is designed to be passed into a pool of worker threads to allow multi-threaded decoding. * class is designed to be passed into a pool of worker threads to allow multi-threaded decoding.
@ -82,8 +86,8 @@ public class BPbfBlobDecoder {
// Build the list of active and unsupported features in the file. // Build the list of active and unsupported features in the file.
List<String> supportedFeatures = Arrays.asList("OsmSchema-V0.6", "DenseNodes"); List<String> supportedFeatures = Arrays.asList("OsmSchema-V0.6", "DenseNodes");
List<String> activeFeatures = new ArrayList<String>(); List<String> activeFeatures = new ArrayList<>();
List<String> unsupportedFeatures = new ArrayList<String>(); List<String> unsupportedFeatures = new ArrayList<>();
for (String feature : header.getRequiredFeaturesList()) { for (String feature : header.getRequiredFeaturesList()) {
if (supportedFeatures.contains(feature)) { if (supportedFeatures.contains(feature)) {
activeFeatures.add(feature); activeFeatures.add(feature);
@ -106,7 +110,7 @@ public class BPbfBlobDecoder {
Iterator<Integer> keyIterator = keys.iterator(); Iterator<Integer> keyIterator = keys.iterator();
Iterator<Integer> valueIterator = values.iterator(); Iterator<Integer> valueIterator = values.iterator();
if (keyIterator.hasNext()) { if (keyIterator.hasNext()) {
Map<String, String> tags = new HashMap<String, String>(); Map<String, String> tags = new HashMap<>();
while (keyIterator.hasNext()) { while (keyIterator.hasNext()) {
String key = fieldDecoder.decodeString(keyIterator.next()); String key = fieldDecoder.decodeString(keyIterator.next());
String value = fieldDecoder.decodeString(valueIterator.next()); String value = fieldDecoder.decodeString(valueIterator.next());
@ -155,7 +159,7 @@ public class BPbfBlobDecoder {
int valueIndex = keysValuesIterator.next(); int valueIndex = keysValuesIterator.next();
if (tags == null) { if (tags == null) {
tags = new HashMap<String, String>(); tags = new HashMap<>();
} }
tags.put(fieldDecoder.decodeString(keyIndex), fieldDecoder.decodeString(valueIndex)); tags.put(fieldDecoder.decodeString(keyIndex), fieldDecoder.decodeString(valueIndex));

View file

@ -1,10 +1,17 @@
package btools.mapcreator; package btools.mapcreator;
import org.openstreetmap.osmosis.osmbinary.Fileformat;
import java.io.BufferedInputStream;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.InputStreamReader; import java.util.HashMap;
import java.util.zip.GZIPInputStream; import java.util.Map;
import btools.util.LongList;
/** /**
* Parser for OSM data * Parser for OSM data
@ -22,179 +29,118 @@ public class OsmParser extends MapCreatorBase {
NodeListener nListener, NodeListener nListener,
WayListener wListener, WayListener wListener,
RelationListener rListener) throws Exception { RelationListener rListener) throws Exception {
this.nListener = nListener; this.nListener = nListener;
this.wListener = wListener; this.wListener = wListener;
this.rListener = rListener; this.rListener = rListener;
if (mapFile == null) { System.out.println("*** PBF Parsing: " + mapFile);
_br = new BufferedReader(new InputStreamReader(System.in));
} else { // once more for testing
if (mapFile.getName().endsWith(".gz")) { int rawBlobCount = 0;
_br = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(mapFile))));
} else { long bytesRead = 0L;
_br = new BufferedReader(new InputStreamReader(new FileInputStream(mapFile))); Boolean avoidMapPolling = Boolean.getBoolean("avoidMapPolling");
if (!avoidMapPolling) {
// wait for file to become available
while (!mapFile.exists()) {
System.out.println("--- waiting for " + mapFile + " to become available");
Thread.sleep(10000);
} }
} }
long currentSize = mapFile.length();
long currentSizeTime = System.currentTimeMillis();
DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(mapFile)));
for (; ; ) { for (; ; ) {
String line = _br.readLine(); if (!avoidMapPolling) {
if (line == null) break; // continue reading if either more then a 100 MB unread, or the current-size is known for more than 2 Minutes
while (currentSize - bytesRead < 100000000L) {
if (checkNode(line)) continue; long newSize = mapFile.length();
if (checkWay(line)) continue; if (newSize != currentSize) {
if (checkRelation(line)) continue; currentSize = newSize;
if (checkChangeset(line)) continue; currentSizeTime = System.currentTimeMillis();
} } else if (System.currentTimeMillis() - currentSizeTime > 120000) {
break;
if (mapFile != null) { }
_br.close(); if (currentSize - bytesRead < 100000000L) {
} System.out.println("--- waiting for more data, currentSize=" + currentSize + " bytesRead=" + bytesRead);
} Thread.sleep(10000);
private boolean checkNode(String line) throws Exception {
int idx0 = line.indexOf("<node id=\"");
if (idx0 < 0) return false;
idx0 += 10;
int idx1 = line.indexOf('"', idx0);
long nodeId = Long.parseLong(line.substring(idx0, idx1));
int idx2 = line.indexOf(" lat=\"");
if (idx2 < 0) return false;
idx2 += 6;
int idx3 = line.indexOf('"', idx2);
double lat = Double.parseDouble(line.substring(idx2, idx3));
int idx4 = line.indexOf(" lon=\"");
if (idx4 < 0) return false;
idx4 += 6;
int idx5 = line.indexOf('"', idx4);
double lon = Double.parseDouble(line.substring(idx4, idx5));
NodeData n = new NodeData(nodeId, lon, lat);
if (!line.endsWith("/>")) {
// read additional tags
for (; ; ) {
String l2 = _br.readLine();
if (l2 == null) return false;
int i2;
if ((i2 = l2.indexOf("<tag k=\"")) >= 0) { // property-tag
i2 += 8;
int ri2 = l2.indexOf('"', i2);
String key = l2.substring(i2, ri2);
i2 = l2.indexOf(" v=\"", ri2);
if (i2 >= 0) {
i2 += 4;
int ri3 = l2.indexOf('"', i2);
String value = l2.substring(i2, ri3);
n.putTag(key, value);
} }
} else if (l2.indexOf("</node>") >= 0) { // end-tag
break;
} }
} }
}
nListener.nextNode(n);
return true;
}
int headerLength;
private boolean checkWay(String line) throws Exception { try {
int idx0 = line.indexOf("<way id=\""); headerLength = dis.readInt();
if (idx0 < 0) return false; bytesRead += 4;
} catch (EOFException e) {
idx0 += 9;
int idx1 = line.indexOf('"', idx0);
long id = Long.parseLong(line.substring(idx0, idx1));
WayData w = new WayData(id);
// read the nodes
for (; ; ) {
String l2 = _br.readLine();
if (l2 == null) return false;
int i2;
if ((i2 = l2.indexOf("<nd ref=\"")) >= 0) { // node reference
i2 += 9;
int ri2 = l2.indexOf('"', i2);
long nid = Long.parseLong(l2.substring(i2, ri2));
w.nodes.add(nid);
} else if ((i2 = l2.indexOf("<tag k=\"")) >= 0) { // property-tag
i2 += 8;
int ri2 = l2.indexOf('"', i2);
String key = l2.substring(i2, ri2);
i2 = l2.indexOf(" v=\"", ri2);
if (i2 >= 0) {
i2 += 4;
int ri3 = l2.indexOf('"', i2);
String value = l2.substring(i2, ri3);
w.putTag(key, value);
}
} else if (l2.indexOf("</way>") >= 0) { // end-tag
break; break;
} }
byte[] headerBuffer = new byte[headerLength];
dis.readFully(headerBuffer);
bytesRead += headerLength;
Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer);
byte[] blobData = new byte[blobHeader.getDatasize()];
dis.readFully(blobData);
bytesRead += blobData.length;
new BPbfBlobDecoder(blobHeader.getType(), blobData, this).process();
rawBlobCount++;
} }
wListener.nextWay(w); dis.close();
return true; System.out.println("read raw blobs: " + rawBlobCount);
} }
private boolean checkChangeset(String line) throws Exception {
int idx0 = line.indexOf("<changeset id=\"");
if (idx0 < 0) return false;
if (!line.endsWith("/>")) { public void addNode(long nid, Map<String, String> tags, double lat, double lon) {
int loopcheck = 0; NodeData n = new NodeData(nid, lon, lat);
for (; ; ) { n.setTags((HashMap<String, String>) tags);
String l2 = _br.readLine(); try {
if (l2.indexOf("</changeset>") >= 0 || ++loopcheck > 10000) break; nListener.nextNode(n);
} } catch (Exception e) {
throw new RuntimeException("error writing node: " + e);
} }
return true;
} }
private boolean checkRelation(String line) throws Exception { public void addWay(long wid, Map<String, String> tags, LongList nodes) {
int idx0 = line.indexOf("<relation id=\""); WayData w = new WayData(wid, nodes);
if (idx0 < 0) return false; w.setTags((HashMap<String, String>) tags);
idx0 += 14; try {
int idx1 = line.indexOf('"', idx0); wListener.nextWay(w);
long rid = Long.parseLong(line.substring(idx0, idx1)); } catch (Exception e) {
throw new RuntimeException("error writing way: " + e);
}
}
RelationData r = new RelationData(rid); public void addRelation(long rid, Map<String, String> tags, LongList wayIds, LongList fromWid, LongList toWid, LongList viaNid) {
RelationData r = new RelationData(rid, wayIds);
r.setTags((HashMap<String, String>) tags);
// read the nodes try {
for (; ; ) { rListener.nextRelation(r);
String l2 = _br.readLine(); if (fromWid == null || toWid == null || viaNid == null || viaNid.size() != 1) {
if (l2 == null) return false; // dummy-TR for each viaNid
for (int vi = 0; vi < (viaNid == null ? 0 : viaNid.size()); vi++) {
int i2; rListener.nextRestriction(r, 0L, 0L, viaNid.get(vi));
if ((i2 = l2.indexOf("<member type=\"way\" ref=\"")) >= 0) { // node reference
i2 += 24;
int ri2 = l2.indexOf('"', i2);
long wid = Long.parseLong(l2.substring(i2, ri2));
r.ways.add(wid);
} else if ((i2 = l2.indexOf("<tag k=\"")) >= 0) { // property-tag
i2 += 8;
int ri2 = l2.indexOf('"', i2);
String key = l2.substring(i2, ri2);
i2 = l2.indexOf(" v=\"", ri2);
if (i2 >= 0) {
i2 += 4;
int ri3 = l2.indexOf('"', i2);
String value = l2.substring(i2, ri3);
r.putTag(key, value);
} }
} else if (l2.indexOf("</relation>") >= 0) { // end-tag return;
break;
} }
for (int fi = 0; fi < fromWid.size(); fi++) {
for (int ti = 0; ti < toWid.size(); ti++) {
rListener.nextRestriction(r, fromWid.get(fi), toWid.get(ti), viaNid.get(0));
}
}
} catch (Exception e) {
throw new RuntimeException("error writing relation", e);
} }
rListener.nextRelation(r);
return true;
} }
} }

View file

@ -9,8 +9,10 @@ import java.net.URL;
public class MapcreatorTest { public class MapcreatorTest {
@Test @Test
public void mapcreatorTest() throws Exception { public void mapcreatorTest() throws Exception {
URL mapurl = this.getClass().getResource("/dreieich.osm.gz"); System.setProperty("avoidMapPolling", "true");
Assert.assertNotNull("test-osm-map dreieich.osm not found", mapurl);
URL mapurl = this.getClass().getResource("/dreieich.pbf");
Assert.assertNotNull("test-osm-map dreieich.pbf not found", mapurl);
File mapFile = new File(mapurl.getFile()); File mapFile = new File(mapurl.getFile());
File workingDir = mapFile.getParentFile(); File workingDir = mapFile.getParentFile();
File profileDir = new File(workingDir, "/../../../../misc/profiles2"); File profileDir = new File(workingDir, "/../../../../misc/profiles2");

Binary file not shown.

View file

@ -13,43 +13,6 @@ also build them yourself from an OSM dump (e.g. planet or [GeoFabrik
extract](https://download.geofabrik.de/)) extract](https://download.geofabrik.de/))
## Build the pbfparser
First, there are two file formats available to download OSM data: `bzip`-ed
XML files (very large) and `.pbf`
([Protobuf](https://github.com/protocolbuffers/protobuf) format) which is much
more efficient. If you want to use the latter one, you will have to build the
`pbfparser` (located in `misc/pbfparser` first):
* Download [the latest
version](https://github.com/openstreetmap/osmosis/releases)
of [Osmosis](https://wiki.openstreetmap.org/wiki/Osmosis) and unzip it
somewhere.
* Copy the `lib/default/protobuf-java-*.jar` and
`lib/default/osmosis-osm-binary-*.jar` files from the unzipped Osmosis
archive to `misc/pbfparser/protobuf.jar` and `misc/pbfparser/osmosis.jar`.
* Build BRouter and copy
`brouter-server/build/libs/brouter-*-all.jar` to
`misc/pbfparser/brouter.jar`.
* You can build the `pbfparser` using, in the `misc/pbfparser/`
folder,
```
javac -d . -cp "brouter.jar:protobuf.jar:osmosis.jar" *.java
```
* Finally, you can build a `jar` file from these files using
```
jar cf pbfparser.jar btools/**/*.class
```
_Note:_ If the `jar` file is not properly created, everything else will seem
to work normally but there will not be any data extracted from the OSM data
dump. You can check what is actually inside the built `jar` file using
`jar tf pbfparser.jar`.
## Run the map creation script ## Run the map creation script
If you want to have elevation information in the generated segments files, you If you want to have elevation information in the generated segments files, you

View file

@ -1,3 +0,0 @@
*.jar
*.BAK
btools/

View file

@ -1,136 +0,0 @@
package btools.mapcreator;
import java.io.*;
import java.util.*;
import java.util.zip.*;
import btools.util.*;
import org.openstreetmap.osmosis.osmbinary.Fileformat;
/**
* Parser for OSM data
*
* @author ab
*/
public class OsmParser extends MapCreatorBase {
private BufferedReader _br;
private NodeListener nListener;
private WayListener wListener;
private RelationListener rListener;
public void readMap(File mapFile,
NodeListener nListener,
WayListener wListener,
RelationListener rListener) throws Exception {
this.nListener = nListener;
this.wListener = wListener;
this.rListener = rListener;
System.out.println("*** PBF Parsing: " + mapFile);
// once more for testing
int rawBlobCount = 0;
long bytesRead = 0L;
// wait for file to become available
while (!mapFile.exists()) {
System.out.println("--- waiting for " + mapFile + " to become available");
Thread.sleep(10000);
}
long currentSize = mapFile.length();
long currentSizeTime = System.currentTimeMillis();
DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(mapFile)));
for (; ; ) {
// continue reading if either more then a 100 MB unread, or the current-size is known for more then 2 Minutes
while (currentSize - bytesRead < 100000000L) {
long newSize = mapFile.length();
if (newSize != currentSize) {
currentSize = newSize;
currentSizeTime = System.currentTimeMillis();
} else if (System.currentTimeMillis() - currentSizeTime > 120000) {
break;
}
if (currentSize - bytesRead < 100000000L) {
System.out.println("--- waiting for more data, currentSize=" + currentSize + " bytesRead=" + bytesRead);
Thread.sleep(10000);
}
}
int headerLength;
try {
headerLength = dis.readInt();
bytesRead += 4;
} catch (EOFException e) {
break;
}
byte[] headerBuffer = new byte[headerLength];
dis.readFully(headerBuffer);
bytesRead += headerLength;
Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer);
byte[] blobData = new byte[blobHeader.getDatasize()];
dis.readFully(blobData);
bytesRead += blobData.length;
new BPbfBlobDecoder(blobHeader.getType(), blobData, this).process();
rawBlobCount++;
}
dis.close();
System.out.println("read raw blobs: " + rawBlobCount);
}
public void addNode(long nid, Map<String, String> tags, double lat, double lon) {
NodeData n = new NodeData(nid, lon, lat);
n.setTags(tags);
try {
nListener.nextNode(n);
} catch (Exception e) {
throw new RuntimeException("error writing node: " + e);
}
}
public void addWay(long wid, Map<String, String> tags, LongList nodes) {
WayData w = new WayData(wid, nodes);
w.setTags((HashMap<String, String>) tags);
try {
wListener.nextWay(w);
} catch (Exception e) {
throw new RuntimeException("error writing way: " + e);
}
}
public void addRelation(long rid, Map<String, String> tags, LongList wayIds, LongList fromWid, LongList toWid, LongList viaNid) {
RelationData r = new RelationData(rid, wayIds);
r.setTags((HashMap<String, String>) tags);
try {
rListener.nextRelation(r);
if (fromWid == null || toWid == null || viaNid == null || viaNid.size() != 1) {
// dummy-TR for each viaNid
for (int vi = 0; vi < (viaNid == null ? 0 : viaNid.size()); vi++) {
rListener.nextRestriction(r, 0L, 0L, viaNid.get(vi));
}
return;
}
for (int fi = 0; fi < fromWid.size(); fi++) {
for (int ti = 0; ti < toWid.size(); ti++) {
rListener.nextRestriction(r, fromWid.get(fi), toWid.get(ti), viaNid.get(0));
}
}
} catch (Exception e) {
throw new RuntimeException("error writing relation", e);
}
}
}

View file

@ -1,23 +0,0 @@
The pbf-parse is not included in the regular source tree
to avoid the library dependencies to "osmosis" and "protobuf"
In order to run the mapcreator from a pbf-file (as it is
done in the process_pbf_planet.sh script included in
the git-repo), you have to build yourself the "pbfparser.jar"
by doing the following:
-> get osmosis from https://bretth.dev.openstreetmap.org/osmosis-build/osmosis-latest.zip
-> copy lib/default/osmosis-osm-binary-*.jar in the archive to osmosis.jar in
this folder
-> copy lib/default/protobuf-java-*.jar in the archive to protobuf.jar in this
folder
-> copy the brouter-server/build/libs/brouter-...-all.jar to
brouter.jar in this folder
-> compile the PBF-Parser using:
javac -d . -cp protobuf.jar:osmosis.jar:brouter.jar *.java
-> pack all the compiled class files together in a jar
"pbfparser.jar" with "jar cf pbfparser.jar btools/**/*.class"
Alternatively, just for testing you can run the Mapcreator against a *xml.bz2 Database-Extract,
then you don't need the pbf-parser. However, the XML-Parser does not (yet) parse
Turn-Restrictions, so really just for testing...

View file

@ -1 +0,0 @@
javac -d . -cp pbfparser.jar;brouter.jar BPbfFieldDecoder.java BPbfBlobDecoder.java OsmParser.java

View file

@ -24,14 +24,11 @@ touch lastmaprun.date
rm -rf /var/www/brouter/segments4_lastrun rm -rf /var/www/brouter/segments4_lastrun
JAVA='/java/bin/java -Xmx2600m -Xms2600m -Xmn32m' JAVA='java -Xmx2600m -Xms2600m -Xmn32m'
BROUTER_PROFILES=$(realpath "../../profiles2") BROUTER_PROFILES=$(realpath "../../profiles2")
BROUTER_JAR=$(realpath $(ls ../../../brouter-server/build/libs/brouter-*-all.jar)) BROUTER_JAR=$(realpath $(ls ../../../brouter-server/build/libs/brouter-*-all.jar))
OSMOSIS_JAR=$(realpath "../../pbfparser/osmosis.jar")
PROTOBUF_JAR=$(realpath "../../pbfparser/protobuf.jar")
PBFPARSER_JAR=$(realpath "../../pbfparser/pbfparser.jar")
PLANET_FILE=${PLANET_FILE:-$(realpath "./planet-latest.osm.pbf")} PLANET_FILE=${PLANET_FILE:-$(realpath "./planet-latest.osm.pbf")}
# Download SRTM zip files from # Download SRTM zip files from
@ -43,7 +40,7 @@ SRTM_PATH="/private-backup/srtm"
mkdir tmp mkdir tmp
cd tmp cd tmp
mkdir nodetiles mkdir nodetiles
${JAVA} -cp "${OSMOSIS_JAR}:${PROTOBUF_JAR}:${PBFPARSER_JAR}:${BROUTER_JAR}" btools.mapcreator.OsmCutter ${BROUTER_PROFILES}/lookups.dat nodetiles ways.dat relations.dat restrictions.dat ${BROUTER_PROFILES}/all.brf ${PLANET_FILE} ${JAVA} -cp ${BROUTER_JAR} -DavoidMapPolling=true btools.mapcreator.OsmCutter ${BROUTER_PROFILES}/lookups.dat nodetiles ways.dat relations.dat restrictions.dat ${BROUTER_PROFILES}/all.brf ${PLANET_FILE}
mkdir ftiles mkdir ftiles
${JAVA} -cp ${BROUTER_JAR} -Ddeletetmpfiles=true -DuseDenseMaps=true btools.mapcreator.NodeFilter nodetiles ways.dat ftiles ${JAVA} -cp ${BROUTER_JAR} -Ddeletetmpfiles=true -DuseDenseMaps=true btools.mapcreator.NodeFilter nodetiles ways.dat ftiles

View file

@ -15,7 +15,7 @@ mkdir waytiles
mkdir waytiles55 mkdir waytiles55
mkdir nodes55 mkdir nodes55
../../jdk8/bin/java -Xmx6144M -Xms6144M -Xmn256M -cp ../pbfparser.jar:../brouter_fc.jar -Ddeletetmpfiles=true -DuseDenseMaps=true btools.util.StackSampler btools.mapcreator.OsmFastCutter ../lookups.dat nodetiles waytiles nodes55 waytiles55 bordernids.dat relations.dat restrictions.dat ../all.brf ../trekking.brf ../softaccess.brf ../planet-new.osm.pbf ../../jdk8/bin/java -Xmx6144M -Xms6144M -Xmn256M -cp ../brouter_fc.jar -Ddeletetmpfiles=true -DuseDenseMaps=true btools.util.StackSampler btools.mapcreator.OsmFastCutter ../lookups.dat nodetiles waytiles nodes55 waytiles55 bordernids.dat relations.dat restrictions.dat ../all.brf ../trekking.brf ../softaccess.brf ../planet-new.osm.pbf
mv ../planet-latest.osm.pbf ../planet-old.osm.pbf mv ../planet-latest.osm.pbf ../planet-old.osm.pbf
mv ../planet-new.osm.pbf ../planet-latest.osm.pbf mv ../planet-new.osm.pbf ../planet-latest.osm.pbf