forked from MapComplete/MapComplete
Optimize geojsonsource: do not calculate all tiles when unneeded
This commit is contained in:
parent
5d4e98dcf9
commit
4fd233e557
1 changed files with 47 additions and 43 deletions
|
@ -25,7 +25,7 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
let url = flayer.layerDef.source.geojsonSource.replace("{layer}", flayer.layerDef.id);
|
||||
this.name = "GeoJsonSource of " + url;
|
||||
const zoomLevel = flayer.layerDef.source.geojsonZoomLevel;
|
||||
|
||||
|
||||
this.isOsmCache = flayer.layerDef.source.isOsmCacheLayer;
|
||||
|
||||
this.features = new UIEventSource<{ feature: any; freshness: Date }[]>([])
|
||||
|
@ -50,7 +50,7 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
* @param locationControl
|
||||
* @constructor
|
||||
*/
|
||||
public static ConstructMultiSource(flayers: { isDisplayed: UIEventSource<boolean>, layerDef: LayerConfig }[], locationControl: UIEventSource<Loc>): GeoJsonSource[] {
|
||||
public static ConstructMultiSource(flayers: { isDisplayed: UIEventSource<boolean>, layerDef: LayerConfig }[], locationControl: UIEventSource<Loc>): FeatureSource[] {
|
||||
|
||||
const flayersPerSource = new Map<string, { isDisplayed: UIEventSource<boolean>, layerDef: LayerConfig }[]>();
|
||||
for (const flayer of flayers) {
|
||||
|
@ -65,7 +65,7 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
flayersPerSource.get(url).push(flayer)
|
||||
}
|
||||
|
||||
const sources: GeoJsonSource[] = []
|
||||
const sources: FeatureSource[] = []
|
||||
|
||||
flayersPerSource.forEach((flayers, key) => {
|
||||
if (flayers.length == 1) {
|
||||
|
@ -112,7 +112,19 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
}
|
||||
|
||||
const neededTiles = locationControl.map(
|
||||
_ => {
|
||||
location => {
|
||||
if (!flayer.isDisplayed.data) {
|
||||
// No need to download! - the layer is disabled
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (location.zoom < flayer.layerDef.minzoom ||
|
||||
location.zoom > flayer.layerDef.maxzoom) {
|
||||
// No need to download! - the layer is disabled
|
||||
console.log("Not loading layers for " + url, "zoom" + location.zoom, " not between", flayer.layerDef.minzoom, "and", flayer.layerDef.maxzoom)
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Yup, this is cheating to just get the bounds here
|
||||
const bounds = State.state.leafletMap.data.getBounds()
|
||||
const tileRange = Utils.TileRangeBetween(zoomLevel, bounds.getNorth(), bounds.getEast(), bounds.getSouth(), bounds.getWest())
|
||||
|
@ -126,14 +138,6 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
if (needed === undefined) {
|
||||
return;
|
||||
}
|
||||
if (!flayer.isDisplayed.data) {
|
||||
// No need to download! - the layer is disabled
|
||||
return;
|
||||
}
|
||||
|
||||
if (locationControl.data.zoom < flayer.layerDef.minzoom) {
|
||||
return;
|
||||
}
|
||||
|
||||
needed.forEach(neededTile => {
|
||||
if (loadedTiles.has(neededTile)) {
|
||||
|
@ -153,42 +157,42 @@ export default class GeoJsonSource implements FeatureSource {
|
|||
const self = this;
|
||||
Utils.downloadJson(url)
|
||||
.then(json => {
|
||||
if (json.elements === [] && json.remarks.indexOf("runtime error") > 0) {
|
||||
self.onFail("Runtime error (timeout)", url)
|
||||
return;
|
||||
}
|
||||
const time = new Date();
|
||||
const newFeatures: { feature: any, freshness: Date } [] = []
|
||||
let i = 0;
|
||||
let skipped = 0;
|
||||
for (const feature of json.features) {
|
||||
if (feature.properties.id === undefined) {
|
||||
feature.properties.id = url + "/" + i;
|
||||
feature.id = url + "/" + i;
|
||||
i++;
|
||||
if (json.elements === [] && json.remarks.indexOf("runtime error") > 0) {
|
||||
self.onFail("Runtime error (timeout)", url)
|
||||
return;
|
||||
}
|
||||
if (self.seenids.has(feature.properties.id)) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
self.seenids.add(feature.properties.id)
|
||||
const time = new Date();
|
||||
const newFeatures: { feature: any, freshness: Date } [] = []
|
||||
let i = 0;
|
||||
let skipped = 0;
|
||||
for (const feature of json.features) {
|
||||
if (feature.properties.id === undefined) {
|
||||
feature.properties.id = url + "/" + i;
|
||||
feature.id = url + "/" + i;
|
||||
i++;
|
||||
}
|
||||
if (self.seenids.has(feature.properties.id)) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
self.seenids.add(feature.properties.id)
|
||||
|
||||
let freshness: Date = time;
|
||||
if (feature.properties["_last_edit:timestamp"] !== undefined) {
|
||||
freshness = new Date(feature.properties["_last_edit:timestamp"])
|
||||
let freshness: Date = time;
|
||||
if (feature.properties["_last_edit:timestamp"] !== undefined) {
|
||||
freshness = new Date(feature.properties["_last_edit:timestamp"])
|
||||
}
|
||||
|
||||
newFeatures.push({feature: feature, freshness: freshness})
|
||||
}
|
||||
console.debug("Downloaded " + newFeatures.length + " new features and " + skipped + " already seen features from " + url);
|
||||
|
||||
if (newFeatures.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
newFeatures.push({feature: feature, freshness: freshness})
|
||||
}
|
||||
console.debug("Downloaded " + newFeatures.length + " new features and " + skipped + " already seen features from " + url);
|
||||
eventSource.setData(eventSource.data.concat(newFeatures))
|
||||
|
||||
if (newFeatures.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventSource.setData(eventSource.data.concat(newFeatures))
|
||||
|
||||
}).catch(msg => self.onFail(msg, url))
|
||||
}).catch(msg => self.onFail(msg, url))
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue