Merge master

This commit is contained in:
Pieter Vander Vennet 2024-02-26 16:11:41 +01:00
commit 89a0be8903
150 changed files with 4201 additions and 9581 deletions

View file

@ -13,8 +13,15 @@ export default abstract class Script {
ScriptUtils.fixUtils()
const args = [...process.argv]
args.splice(0, 2)
const start = new Date()
this.main(args)
.then((_) => console.log("All done"))
.then((_) =>{
const end = new Date()
const millisNeeded = end.getTime() - start.getTime()
const green = (s) => "\x1b[92m" + s + "\x1b[0m"
console.log(green("All done! (" + millisNeeded + " ms)"))
})
.catch((e) => console.log("ERROR:", e))
}

View file

@ -1,571 +0,0 @@
/**
* Generates a collection of geojson files based on an overpass query for a given theme
*/
import { Utils } from "../src/Utils"
import { Overpass } from "../src/Logic/Osm/Overpass"
import { existsSync, readFileSync, writeFileSync } from "fs"
import { TagsFilter } from "../src/Logic/Tags/TagsFilter"
import { Or } from "../src/Logic/Tags/Or"
import { AllKnownLayouts } from "../src/Customizations/AllKnownLayouts"
import * as OsmToGeoJson from "osmtogeojson"
import MetaTagging from "../src/Logic/MetaTagging"
import { UIEventSource } from "../src/Logic/UIEventSource"
import { TileRange, Tiles } from "../src/Models/TileRange"
import LayoutConfig from "../src/Models/ThemeConfig/LayoutConfig"
import ScriptUtils from "./ScriptUtils"
import PerLayerFeatureSourceSplitter from "../src/Logic/FeatureSource/PerLayerFeatureSourceSplitter"
import FilteredLayer from "../src/Models/FilteredLayer"
import StaticFeatureSource from "../src/Logic/FeatureSource/Sources/StaticFeatureSource"
import Constants from "../src/Models/Constants"
import { GeoOperations } from "../src/Logic/GeoOperations"
import SimpleMetaTaggers, { ReferencingWaysMetaTagger } from "../src/Logic/SimpleMetaTagger"
import FilteringFeatureSource from "../src/Logic/FeatureSource/Sources/FilteringFeatureSource"
import { Feature } from "geojson"
import { BBox } from "../src/Logic/BBox"
import { FeatureSource } from "../src/Logic/FeatureSource/FeatureSource"
import OsmObjectDownloader from "../src/Logic/Osm/OsmObjectDownloader"
import FeaturePropertiesStore from "../src/Logic/FeatureSource/Actors/FeaturePropertiesStore"
ScriptUtils.fixUtils()
function createOverpassObject(theme: LayoutConfig, backend: string) {
let filters: TagsFilter[] = []
let extraScripts: string[] = []
for (const layer of theme.layers) {
if (typeof layer === "string") {
throw "A layer was not expanded!"
}
if (layer.doNotDownload) {
continue
}
if (!layer.source) {
continue
}
if (layer.source.geojsonSource) {
// This layer defines a geoJson-source
// SHould it be cached?
if (layer.source.isOsmCacheLayer !== true) {
continue
}
}
filters.push(layer.source.osmTags)
}
filters = Utils.NoNull(filters)
extraScripts = Utils.NoNull(extraScripts)
if (filters.length + extraScripts.length === 0) {
throw "Nothing to download! The theme doesn't declare anything to download"
}
return new Overpass(new Or(filters), extraScripts, backend, new UIEventSource<number>(60))
}
function rawJsonName(targetDir: string, x: number, y: number, z: number): string {
return targetDir + "_" + z + "_" + x + "_" + y + ".json"
}
function geoJsonName(targetDir: string, x: number, y: number, z: number): string {
return targetDir + "_" + z + "_" + x + "_" + y + ".geojson"
}
/// Downloads the given tilerange from overpass and saves them to disk
async function downloadRaw(
targetdir: string,
r: TileRange,
theme: LayoutConfig
): Promise<{ failed: number; skipped: number }> {
let downloaded = 0
let failed = 0
let skipped = 0
const startTime = new Date().getTime()
for (let x = r.xstart; x <= r.xend; x++) {
for (let y = r.ystart; y <= r.yend; y++) {
downloaded++
const filename = rawJsonName(targetdir, x, y, r.zoomlevel)
if (existsSync(filename)) {
console.log("Already exists (not downloading again): ", filename)
skipped++
continue
}
const runningSeconds = (new Date().getTime() - startTime) / 1000
const resting = failed + (r.total - downloaded)
const perTile = runningSeconds / (downloaded - skipped)
const estimated = Math.floor(resting * perTile)
console.log(
"total: ",
downloaded,
"/",
r.total,
"failed: ",
failed,
"skipped: ",
skipped,
"running time: ",
Utils.toHumanTime(runningSeconds) + "s",
"estimated left: ",
Utils.toHumanTime(estimated),
"(" + Math.floor(perTile) + "s/tile)"
)
const boundsArr = Tiles.tile_bounds(r.zoomlevel, x, y)
const bounds = {
north: Math.max(boundsArr[0][0], boundsArr[1][0]),
south: Math.min(boundsArr[0][0], boundsArr[1][0]),
east: Math.max(boundsArr[0][1], boundsArr[1][1]),
west: Math.min(boundsArr[0][1], boundsArr[1][1]),
}
const overpass = createOverpassObject(
theme,
Constants.defaultOverpassUrls[failed % Constants.defaultOverpassUrls.length]
)
const url = overpass.buildQuery(
"[bbox:" +
bounds.south +
"," +
bounds.west +
"," +
bounds.north +
"," +
bounds.east +
"]"
)
try {
const json = await Utils.downloadJson(url)
if ((<string>json.remark ?? "").startsWith("runtime error")) {
console.error("Got a runtime error: ", json.remark)
failed++
} else if (json.elements.length === 0) {
console.log("Got an empty response! Writing anyway")
}
console.log(
"Got the response - writing ",
json.elements.length,
" elements to ",
filename
)
writeFileSync(filename, JSON.stringify(json, null, " "))
} catch (err) {
console.log(url)
console.log(
"Could not download - probably hit the rate limit; waiting a bit. (" + err + ")"
)
failed++
await ScriptUtils.sleep(1)
}
}
}
return { failed: failed, skipped: skipped }
}
/*
* Downloads extra geojson sources and returns the features.
* Extra geojson layers should not be tiled
*/
async function downloadExtraData(theme: LayoutConfig) /* : any[] */ {
const allFeatures: any[] = []
for (const layer of theme.layers) {
if (!layer.source?.geojsonSource) {
continue
}
const source = layer.source.geojsonSource
if (layer.source.isOsmCacheLayer !== undefined && layer.source.isOsmCacheLayer !== false) {
// Cached layers are not considered here
continue
}
if (source.startsWith("https://api.openstreetmap.org/api/0.6/notes.json")) {
// We ignore map notes
continue
}
console.log("Downloading extra data: ", source)
await Utils.downloadJson(source).then((json) => allFeatures.push(...json.features))
}
return allFeatures
}
function loadAllTiles(
targetdir: string,
r: TileRange,
theme: LayoutConfig,
extraFeatures: any[]
): FeatureSource {
let allFeatures = [...extraFeatures]
let processed = 0
for (let x = r.xstart; x <= r.xend; x++) {
for (let y = r.ystart; y <= r.yend; y++) {
processed++
const filename = rawJsonName(targetdir, x, y, r.zoomlevel)
console.log(" Loading and processing", processed, "/", r.total, filename)
if (!existsSync(filename)) {
console.error("Not found - and not downloaded. Run this script again!: " + filename)
continue
}
// We read the raw OSM-file and convert it to a geojson
const rawOsm = JSON.parse(readFileSync(filename, { encoding: "utf8" }))
// Create and save the geojson file - which is the main chunk of the data
const geojson = OsmToGeoJson.default(rawOsm)
console.log(" which as", geojson.features.length, "features")
allFeatures.push(...geojson.features)
}
}
return StaticFeatureSource.fromGeojson(allFeatures)
}
/**
* Load all the tiles into memory from disk
*/
async function sliceToTiles(
allFeatures: FeatureSource,
theme: LayoutConfig,
targetdir: string,
pointsOnlyLayers: string[],
clip: boolean,
targetzoomLevel: number = 9
) {
const skippedLayers = new Set<string>()
const indexedFeatures: Map<string, any> = new Map<string, any>()
let indexisBuilt = false
const osmObjectDownloader = new OsmObjectDownloader()
function buildIndex() {
for (const f of allFeatures.features.data) {
indexedFeatures.set(f.properties.id, f)
}
indexisBuilt = true
}
function getFeatureById(id) {
if (!indexisBuilt) {
buildIndex()
}
return indexedFeatures.get(id)
}
const flayers: FilteredLayer[] = theme.layers.map((l) => new FilteredLayer(l))
const perLayer = new PerLayerFeatureSourceSplitter(flayers, allFeatures)
for (const [layerId, source] of perLayer.perLayer) {
const layer = flayers.find((flayer) => flayer.layerDef.id === layerId).layerDef
const targetZoomLevel = layer.source.geojsonZoomLevel ?? targetzoomLevel
if (layer.source.geojsonSource && layer.source.isOsmCacheLayer !== true) {
console.log("Skipping layer ", layerId, ": not a caching layer")
skippedLayers.add(layer.id)
continue
}
const flayer: FilteredLayer = new FilteredLayer(layer)
console.log(
"Handling layer ",
layerId,
"which has",
source.features.data.length,
"features"
)
if (source.features.data.length === 0) {
continue
}
const featureProperties: FeaturePropertiesStore = new FeaturePropertiesStore(source)
MetaTagging.addMetatags(
source.features.data,
{
getFeaturesWithin: (_) => {
return <any>[allFeatures.features.data]
},
getFeatureById: getFeatureById,
},
layer,
theme,
osmObjectDownloader,
featureProperties,
{
includeDates: false,
includeNonDates: true,
evaluateStrict: true,
}
)
while (SimpleMetaTaggers.country.runningTasks.size > 0) {
console.log(
"Still waiting for ",
SimpleMetaTaggers.country.runningTasks.size,
" features which don't have a country yet"
)
await ScriptUtils.sleep(250)
}
const createdTiles = []
// At this point, we have all the features of the entire area.
// However, we want to export them per tile of a fixed size, so we use a dynamicTileSOurce to split it up
const features = source.features.data
const perBbox = GeoOperations.spreadIntoBboxes(features, targetZoomLevel)
for (let [tileIndex, features] of perBbox) {
const bbox = BBox.fromTileIndex(tileIndex).asGeoJson({})
console.log("Got tile:", tileIndex, layer.id)
if (features.length === 0) {
continue
}
const filteredTile = new FilteringFeatureSource(
flayer,
new StaticFeatureSource(features)
)
console.log(
"Tile " +
layer.id +
"." +
tileIndex +
" contains " +
filteredTile.features.data.length +
" features after filtering (" +
features.length +
") features before"
)
if (filteredTile.features.data.length === 0) {
continue
}
let strictlyCalculated = 0
let featureCount = 0
for (const feature of features) {
// Some cleanup
if (layer.calculatedTags !== undefined) {
// Evaluate all the calculated tags strictly
const calculatedTagKeys = layer.calculatedTags.map((ct) => ct[0])
featureCount++
const props = feature.properties
for (const calculatedTagKey of calculatedTagKeys) {
const strict = props[calculatedTagKey]
if (props.hasOwnProperty(calculatedTagKey)) {
delete props[calculatedTagKey]
}
props[calculatedTagKey] = strict
strictlyCalculated++
if (strictlyCalculated % 100 === 0) {
console.log(
"Strictly calculated ",
strictlyCalculated,
"values for tile",
tileIndex,
": now at ",
featureCount,
"/",
filteredTile.features.data.length,
"examle value: ",
strict
)
}
}
}
delete feature["bbox"]
}
if (clip) {
console.log("Clipping features")
features = [].concat(
...features.map((f: Feature) => GeoOperations.clipWith(<any>f, bbox))
)
}
// Lets save this tile!
const [z, x, y] = Tiles.tile_from_index(tileIndex)
// console.log("Writing tile ", z, x, y, layerId)
const targetPath = geoJsonName(targetdir + "_" + layerId, x, y, z)
createdTiles.push(tileIndex)
// This is the geojson file containing all features for this tile
writeFileSync(
targetPath,
JSON.stringify(
{
type: "FeatureCollection",
features,
},
null,
" "
)
)
console.log("Written tile", targetPath, "with", filteredTile.features.data.length)
}
// All the tiles are written at this point
// Only thing left to do is to create the index
const path = targetdir + "_" + layerId + "_" + targetZoomLevel + "_overview.json"
const perX = {}
createdTiles
.map((i) => Tiles.tile_from_index(i))
.forEach(([z, x, y]) => {
const key = "" + x
if (perX[key] === undefined) {
perX[key] = []
}
perX[key].push(y)
})
console.log("Written overview: ", path, "with ", createdTiles.length, "tiles")
writeFileSync(path, JSON.stringify(perX))
// And, if needed, to create a points-only layer
if (pointsOnlyLayers.indexOf(layer.id) >= 0) {
const filtered = new FilteringFeatureSource(flayer, source)
const features = filtered.features.data
const points = features.map((feature) => GeoOperations.centerpoint(feature))
console.log("Writing points overview for ", layerId)
const targetPath = targetdir + "_" + layerId + "_points.geojson"
// This is the geojson file containing all features for this tile
writeFileSync(
targetPath,
JSON.stringify(
{
type: "FeatureCollection",
features: points,
},
null,
" "
)
)
}
}
const skipped = Array.from(skippedLayers)
if (skipped.length > 0) {
console.warn(
"Did not save any cache files for layers " +
skipped.join(", ") +
" as these didn't set the flag `isOsmCache` to true"
)
}
}
export async function main(args: string[]) {
console.log("Cache builder started with args ", args.join(" "))
ReferencingWaysMetaTagger.enabled = false
if (args.length < 6) {
console.error(
"Expected arguments are: theme zoomlevel targetdirectory lat0 lon0 lat1 lon1 [--generate-point-overview layer-name,layer-name,...] [--force-zoom-level z] [--clip]" +
"--force-zoom-level causes non-cached-layers to be donwnloaded\n" +
"--clip will erase parts of the feature falling outside of the bounding box"
)
return
}
const themeName = args[0]
const zoomlevel = Number(args[1])
console.log(
"Target zoomlevel for the tiles is",
zoomlevel,
"; this can be overridden by the individual layers"
)
const targetdir = args[2] + "/" + themeName
if (!existsSync(args[2])) {
console.log("Directory not found")
throw `The directory ${args[2]} does not exist`
}
const lat0 = Number(args[3])
const lon0 = Number(args[4])
const lat1 = Number(args[5])
const lon1 = Number(args[6])
const clip = args.indexOf("--clip") >= 0
if (isNaN(lat0)) {
throw "The first number (a latitude) is not a valid number"
}
if (isNaN(lon0)) {
throw "The second number (a longitude) is not a valid number"
}
if (isNaN(lat1)) {
throw "The third number (a latitude) is not a valid number"
}
if (isNaN(lon1)) {
throw "The fourth number (a longitude) is not a valid number"
}
const tileRange = Tiles.TileRangeBetween(zoomlevel, lat0, lon0, lat1, lon1)
if (isNaN(tileRange.total)) {
throw "Something has gone wrong: tilerange is NAN"
}
if (tileRange.total === 0) {
console.log("Tilerange has zero tiles - this is probably an error")
return
}
const theme = AllKnownLayouts.allKnownLayouts.get(themeName)
if (theme === undefined) {
const keys = Array.from(AllKnownLayouts.allKnownLayouts.keys())
console.error("The theme " + themeName + " was not found; try one of ", keys)
return
}
theme.layers = theme.layers.filter(
(l) =>
Constants.priviliged_layers.indexOf(<any>l.id) < 0 && !l.id.startsWith("note_import_")
)
console.log("Layers to download:", theme.layers.map((l) => l.id).join(", "))
let generatePointLayersFor = []
if (args[7] == "--generate-point-overview") {
if (args[8] === undefined) {
throw "--generate-point-overview needs a list of layers to generate the overview for (or * for all)"
} else if (args[8] === "*") {
generatePointLayersFor = theme.layers.map((l) => l.id)
} else {
generatePointLayersFor = args[8].split(",")
}
console.log(
"Also generating a point overview for layers ",
generatePointLayersFor.join(",")
)
}
{
const index = args.indexOf("--force-zoom-level")
if (index >= 0) {
const forcedZoomLevel = Number(args[index + 1])
for (const layer of theme.layers) {
layer.source.geojsonSource = "https://127.0.0.1/cache_{layer}_{z}_{x}_{y}.geojson"
layer.source.isOsmCacheLayer = true
layer.source.geojsonZoomLevel = forcedZoomLevel
}
}
}
let failed = 0
do {
try {
const cachingResult = await downloadRaw(targetdir, tileRange, theme)
failed = cachingResult.failed
if (failed > 0) {
await ScriptUtils.sleep(30000)
}
} catch (e) {
console.error(e)
return
}
} while (failed > 0)
const extraFeatures = await downloadExtraData(theme)
const allFeaturesSource = loadAllTiles(targetdir, tileRange, theme, extraFeatures)
await sliceToTiles(allFeaturesSource, theme, targetdir, generatePointLayersFor, clip, zoomlevel)
}
let args = [...process.argv]
if (!args[1]?.endsWith("test/TestAll.ts")) {
args.splice(0, 2)
try {
main(args)
.then(() => console.log("All done!"))
.catch((e) => console.error("Error building cache:", e))
} catch (e) {
console.error("Error building cache:", e)
}
}

View file

@ -10,6 +10,7 @@ import {
PrevalidateTheme,
ValidateLayer,
ValidateThemeAndLayers,
ValidateThemeEnsemble,
} from "../src/Models/ThemeConfig/Conversion/Validation"
import { Translation } from "../src/UI/i18n/Translation"
import { PrepareLayer } from "../src/Models/ThemeConfig/Conversion/PrepareLayer"
@ -29,6 +30,8 @@ import LayerConfig from "../src/Models/ThemeConfig/LayerConfig"
import PointRenderingConfig from "../src/Models/ThemeConfig/PointRenderingConfig"
import { ConversionContext } from "../src/Models/ThemeConfig/Conversion/ConversionContext"
import { GenerateFavouritesLayer } from "./generateFavouritesLayer"
import LayoutConfig from "../src/Models/ThemeConfig/LayoutConfig"
import { TagsFilter } from "../src/Logic/Tags/TagsFilter"
// This scripts scans 'src/assets/layers/*.json' for layer definition files and 'src/assets/themes/*.json' for theme definition files.
// It spits out an overview of those to be used to load them
@ -367,7 +370,6 @@ class LayerOverviewUtils extends Script {
?.split(",") ?? []
)
const start = new Date()
const forceReload = args.some((a) => a == "--force")
const licensePaths = new Set<string>()
@ -397,6 +399,10 @@ class LayerOverviewUtils extends Script {
themeWhitelist
)
new ValidateThemeEnsemble().convertStrict(
Array.from(sharedThemes.values()).map((th) => new LayoutConfig(th, true))
)
if (recompiledThemes.length > 0) {
writeFileSync(
"./src/assets/generated/known_layers.json",
@ -458,17 +464,8 @@ class LayerOverviewUtils extends Script {
)
}
const end = new Date()
const millisNeeded = end.getTime() - start.getTime()
if (AllSharedLayers.getSharedLayersConfigs().size == 0) {
console.error(
"This was a bootstrapping-run. Run generate layeroverview again!(" +
millisNeeded +
" ms)"
)
} else {
const green = (s) => "\x1b[92m" + s + "\x1b[0m"
console.log(green("All done! (" + millisNeeded + " ms)"))
console.error("This was a bootstrapping-run. Run generate layeroverview again!")
}
}

View file

@ -279,6 +279,7 @@ async function generateCsp(
"https://www.openstreetmap.org",
"https://api.openstreetmap.org",
"https://pietervdvn.goatcounter.com",
"https://cache.mapcomplete.org",
].concat(...(await eliUrls()))
SpecialVisualizations.specialVisualizations.forEach((sv) => {

View file

@ -1,42 +0,0 @@
hosted.mapcomplete.org {
root * public/
file_server
header {
+Permissions-Policy "interest-cohort=()"
+Report-To `\{"group":"csp-endpoint", "max_age": 86400,"endpoints": [\{"url": "https://report.mapcomplete.org/csp"}], "include_subdomains": true}`
}
}
countrycoder.mapcomplete.org {
root * tiles/
file_server
header {
+Permissions-Policy "interest-cohort=()"
+Access-Control-Allow-Origin https://hosted.mapcomplete.org https://dev.mapcomplete.org https://mapcomplete.org
}
}
report.mapcomplete.org {
reverse_proxy {
to http://127.0.0.1:2600
}
}
studio.mapcomplete.org {
reverse_proxy {
to http://127.0.0.1:1235
}
}
bounce.mapcomplete.org {
reverse_proxy {
to http://127.0.0.1:1236
}
}
mapcomplete.osm.be {
reverse_proxy {
to http://127.0.0.1:1236
}
}

View file

@ -1,7 +0,0 @@
{
"store": "console",
"allowedOrigin": null,
"port": 2600,
"domainWhitelist": ["localhost:10179", "localhost:2600","hosted.mapcomplete.org", "dev.mapcomplete.org", "mapcomplete.org","*"],
"sourceBlacklist": ["chrome-extension://gighmmpiobklfepjocnamgkkbiglidom"]
}

View file

@ -16,9 +16,9 @@ npm run test &&
npm run prepare-deploy &&
zip dist.zip -r dist/* &&
mv config.json.bu config.json &&
scp ./scripts/hetzner/config/* hetzner:/root/ &&
scp ./Docs/ServerConfig/hetzner/* hetzner:/root/ &&
rsync -rzh --progress dist.zip hetzner:/root/ &&
echo "Upload completed, deploying config and booting" &&
ssh hetzner -t "unzip dist.zip && rm dist.zip && rm -rf public/ && mv dist public && caddy stop && caddy start" &&
rm dist.zip
# rm dist.zip
npm run clean

View file

@ -0,0 +1,306 @@
import { TagsFilter } from "../../src/Logic/Tags/TagsFilter"
import { Tag } from "../../src/Logic/Tags/Tag"
import { And } from "../../src/Logic/Tags/And"
import Script from "../Script"
import fs from "fs"
import { Or } from "../../src/Logic/Tags/Or"
import { RegexTag } from "../../src/Logic/Tags/RegexTag"
import { ValidateThemeEnsemble } from "../../src/Models/ThemeConfig/Conversion/Validation"
import { AllKnownLayouts } from "../../src/Customizations/AllKnownLayouts"
import { OsmObject } from "../../src/Logic/Osm/OsmObject"
class LuaSnippets {
public static helpers = [
"function countTbl(tbl)\n" +
" local c = 0\n" +
" for n in pairs(tbl) do \n" +
" c = c + 1 \n" +
" end\n" +
" return c\n" +
"end",
].join("\n")
public static isPolygonFeature(): { blacklist: TagsFilter; whitelisted: TagsFilter } {
const dict = OsmObject.polygonFeatures
const or: TagsFilter[] = []
const blacklisted: TagsFilter[] = []
dict.forEach(({ values, blacklist }, k) => {
if (blacklist) {
if (values === undefined) {
blacklisted.push(new RegexTag(k, /.+/is))
return
}
values.forEach((v) => {
blacklisted.push(new RegexTag(k, v))
})
return
}
if (values === undefined || values === null) {
or.push(new RegexTag(k, /.+/is))
return
}
values.forEach((v) => {
or.push(new RegexTag(k, v))
})
})
console.log(
"Polygon features are:",
or.map((t) => t.asHumanString(false, false, {}))
)
return { blacklist: new Or(blacklisted), whitelisted: new Or(or) }
}
public static toLuaFilter(tag: TagsFilter, useParens: boolean = false): string {
if (tag instanceof Tag) {
return `object.tags["${tag.key}"] == "${tag.value}"`
}
if (tag instanceof And) {
const expr = tag.and.map((t) => this.toLuaFilter(t, true)).join(" and ")
if (useParens) {
return "(" + expr + ")"
}
return expr
}
if (tag instanceof Or) {
const expr = tag.or.map((t) => this.toLuaFilter(t, true)).join(" or ")
if (useParens) {
return "(" + expr + ")"
}
return expr
}
if (tag instanceof RegexTag) {
let expr = LuaSnippets.regexTagToLua(tag)
if (useParens) {
expr = "(" + expr + ")"
}
return expr
}
let msg = "Could not handle" + tag.asHumanString(false, false, {})
console.error(msg)
throw msg
}
private static regexTagToLua(tag: RegexTag) {
if (typeof tag.value === "string" && tag.invert) {
return `object.tags["${tag.key}"] ~= "${tag.value}"`
}
if (typeof tag.value === "string" && !tag.invert) {
return `object.tags["${tag.key}"] == "${tag.value}"`
}
let v: string = (<RegExp>tag.value).source.replace(/\\\//g, "/")
if ("" + tag.value === "/.+/is" && !tag.invert) {
return `object.tags["${tag.key}"] ~= nil`
}
if ("" + tag.value === "/.+/is" && tag.invert) {
return `object.tags["${tag.key}"] == nil`
}
if (tag.matchesEmpty && !tag.invert) {
return `object.tags["${tag.key}"] == nil or object.tags["${tag.key}"] == ""`
}
if (tag.matchesEmpty && tag.invert) {
return `object.tags["${tag.key}"] ~= nil or object.tags["${tag.key}"] ~= ""`
}
let head = "^((.*;)?"
let tail = "(;.*)?)$"
if (v.startsWith(head)) {
v = "(" + v.substring(head.length)
}
if (v.endsWith(tail)) {
v = v.substring(0, v.length - tail.length) + ")"
// We basically remove the optional parts at the start and the end, as object.find has this freedom anyway.
// This might result in _some_ incorrect values that end up in the database (e.g. when matching 'friture', it might als match "abc;foo_friture_bar;xyz", but the frontend will filter this out
}
if (v.indexOf(")?") > 0) {
throw (
"LUA regexes have a bad support for (optional) capture groups, as such, " +
v +
" is not supported"
)
}
if (tag.invert) {
return `object.tags["${tag.key}"] == nil or not string.find(object.tags["${tag.key}"], "${v}")`
}
return `(object.tags["${tag.key}"] ~= nil and string.find(object.tags["${tag.key}"], "${v}"))`
}
}
class GenerateLayerLua {
private readonly _id: string
private readonly _tags: TagsFilter
private readonly _foundInThemes: string[]
constructor(id: string, tags: TagsFilter, foundInThemes: string[] = []) {
this._tags = tags
this._id = id
this._foundInThemes = foundInThemes
}
public generateTables(): string {
if (!this._tags) {
return undefined
}
return [
`db_tables.pois_${this._id} = osm2pgsql.define_table({`,
this._foundInThemes ? "-- used in themes: " + this._foundInThemes.join(", ") : "",
` name = 'pois_${this._id}',`,
" ids = { type = 'any', type_column = 'osm_type', id_column = 'osm_id' },",
" columns = {",
" { column = 'tags', type = 'jsonb' },",
" { column = 'geom', type = 'point', projection = 4326, not_null = true },",
" }",
"})",
"",
`db_tables.lines_${this._id} = osm2pgsql.define_table({`,
this._foundInThemes ? "-- used in themes: " + this._foundInThemes.join(", ") : "",
` name = 'lines_${this._id}',`,
" ids = { type = 'any', type_column = 'osm_type', id_column = 'osm_id' },",
" columns = {",
" { column = 'tags', type = 'jsonb' },",
" { column = 'geom', type = 'linestring', projection = 4326, not_null = true },",
" }",
"})",
`db_tables.polygons_${this._id} = osm2pgsql.define_table({`,
this._foundInThemes ? "-- used in themes: " + this._foundInThemes.join(", ") : "",
` name = 'polygons_${this._id}',`,
" ids = { type = 'any', type_column = 'osm_type', id_column = 'osm_id' },",
" columns = {",
" { column = 'tags', type = 'jsonb' },",
" { column = 'geom', type = 'polygon', projection = 4326, not_null = true },",
" }",
"})",
"",
].join("\n")
}
}
class GenerateBuildDbScript extends Script {
constructor() {
super("Generates a .lua-file to use with osm2pgsql")
}
async main(args: string[]) {
const allNeededLayers = new ValidateThemeEnsemble().convertStrict(
AllKnownLayouts.allKnownLayouts.values()
)
const generators: GenerateLayerLua[] = []
allNeededLayers.forEach(({ tags, foundInTheme }, layerId) => {
generators.push(new GenerateLayerLua(layerId, tags, foundInTheme))
})
const script = [
"local db_tables = {}",
LuaSnippets.helpers,
...generators.map((g) => g.generateTables()),
this.generateProcessPoi(allNeededLayers),
this.generateProcessWay(allNeededLayers),
].join("\n\n\n")
const path = "build_db.lua"
fs.writeFileSync(path, script, "utf-8")
console.log("Written", path)
console.log(
allNeededLayers.size +
" layers will be created with 3 tables each. Make sure to set 'max_connections' to at least " +
(10 + 3 * allNeededLayers.size)
)
}
private earlyAbort() {
return [" if countTbl(object.tags) == 0 then", " return", " end", ""].join("\n")
}
private generateProcessPoi(
allNeededLayers: Map<string, { tags: TagsFilter; foundInTheme: string[] }>
) {
const body: string[] = []
allNeededLayers.forEach(({ tags }, layerId) => {
body.push(this.insertInto(tags, layerId, "pois_").join("\n"))
})
return [
"function osm2pgsql.process_node(object)",
this.earlyAbort(),
" local geom = object:as_point()",
" local matches_filter = false",
body.join("\n"),
"end",
].join("\n")
}
/**
* If matches_filter
* @param tags
* @param layerId
* @param tableprefix
* @private
*/
private insertInto(
tags: TagsFilter,
layerId: string,
tableprefix: "pois_" | "lines_" | "polygons_"
) {
const filter = LuaSnippets.toLuaFilter(tags)
return [
" matches_filter = " + filter,
" if matches_filter then",
" db_tables." + tableprefix + layerId + ":insert({",
" geom = geom,",
" tags = object.tags",
" })",
" end",
]
}
private generateProcessWay(allNeededLayers: Map<string, { tags: TagsFilter }>) {
const bodyLines: string[] = []
allNeededLayers.forEach(({ tags }, layerId) => {
bodyLines.push(this.insertInto(tags, layerId, "lines_").join("\n"))
})
const bodyPolygons: string[] = []
allNeededLayers.forEach(({ tags }, layerId) => {
bodyPolygons.push(this.insertInto(tags, layerId, "polygons_").join("\n"))
})
const isPolygon = LuaSnippets.isPolygonFeature()
return [
"function process_polygon(object, geom)",
" local matches_filter",
...bodyPolygons,
"end",
"function process_linestring(object, geom)",
" local matches_filter",
...bodyLines,
"end",
"",
"function osm2pgsql.process_way(object)",
this.earlyAbort(),
" local object_is_line = not object.is_closed or " +
LuaSnippets.toLuaFilter(isPolygon.blacklist),
` local object_is_area = object.is_closed and (object.tags["area"] == "yes" or (not object_is_line and ${LuaSnippets.toLuaFilter(
isPolygon.whitelisted,
true
)}))`,
" if object_is_area then",
" process_polygon(object, object:as_polygon())",
" else",
" process_linestring(object, object:as_linestring())",
" end",
"end",
].join("\n")
}
}
new GenerateBuildDbScript().run()

View file

@ -0,0 +1,217 @@
import { Client } from "pg"
import { Tiles } from "../../src/Models/TileRange"
import { Server } from "../server"
/**
* Just the OSM2PGSL default database
*/
interface PoiDatabaseMeta {
attributes
current_timestamp
db_format
flat_node_file
import_timestamp
output
prefix
replication_base_url
replication_sequence_number
replication_timestamp
style
updatable
version
}
/**
* Connects with a Postgis database, gives back how much items there are within the given BBOX
*/
class OsmPoiDatabase {
private static readonly prefixes: ReadonlyArray<string> = ["pois", "lines", "polygons"]
private readonly _client: Client
private isConnected = false
private supportedLayers: Set<string> = undefined
private supportedLayersDate: Date = undefined
private metaCache: PoiDatabaseMeta = undefined
private metaCacheDate: Date = undefined
constructor(connectionString: string) {
this._client = new Client(connectionString)
}
async getCount(
layer: string,
bbox: [[number, number], [number, number]] = undefined
): Promise<{ count: number; lat: number; lon: number }> {
if (!this.isConnected) {
await this._client.connect()
this.isConnected = true
}
let total: number = 0
let latSum = 0
let lonSum = 0
for (const prefix of OsmPoiDatabase.prefixes) {
let query =
"SELECT COUNT(*), ST_AsText(ST_Centroid(ST_Collect(geom))) FROM " +
prefix +
"_" +
layer
if (bbox) {
query += ` WHERE ST_MakeEnvelope (${bbox[0][0]}, ${bbox[0][1]}, ${bbox[1][0]}, ${bbox[1][1]}, 4326) ~ geom`
}
const result = await this._client.query(query)
const count = Number(result.rows[0].count)
let point = result.rows[0].st_astext
if (count === 0) {
continue
}
total += count
if (!point) {
continue
}
point = point.substring(6, point.length - 1)
const [lon, lat] = point.split(" ")
latSum += lat * count
lonSum += lon * count
}
return { count: total, lat: latSum / total, lon: lonSum / total }
}
disconnect() {
this._client.end()
}
async getLayers(): Promise<Set<string>> {
if (
this.supportedLayers !== undefined &&
new Date().getTime() - this.supportedLayersDate.getTime() < 1000 * 60 * 60 * 24
) {
return this.supportedLayers
}
const q =
"SELECT table_name \n" +
"FROM information_schema.tables \n" +
"WHERE table_schema = 'public' AND table_name LIKE 'lines_%';"
const result = await this._client.query(q)
const layers = result.rows.map((r) => r.table_name.substring("lines_".length))
this.supportedLayers = new Set(layers)
this.supportedLayersDate = new Date()
return this.supportedLayers
}
async getMeta(): Promise<PoiDatabaseMeta> {
const now = new Date()
if (this.metaCache !== undefined) {
const diffSec = (this.metaCacheDate.getTime() - now.getTime()) / 1000
if (diffSec < 120) {
return this.metaCache
}
}
const result = await this._client.query("SELECT * FROM public.osm2pgsql_properties")
const meta = {}
for (const { property, value } of result.rows) {
meta[property] = value
}
this.metaCacheDate = now
this.metaCache = <any>meta
return this.metaCache
}
}
class CachedSqlCount {
private readonly _cache: Record<
string,
Record<
number,
{
date: Date
entry: { count: number; lat: number; lon: number }
}
>
> = {}
private readonly _poiDatabase: OsmPoiDatabase
private readonly _maxAge: number
constructor(poiDatabase: OsmPoiDatabase, maxAge: number) {
this._poiDatabase = poiDatabase
this._maxAge = maxAge
}
public async getCount(
layer: string,
tileId: number
): Promise<{ count: number; lat: number; lon: number }> {
const cachedEntry = this._cache[layer]?.[tileId]
if (cachedEntry) {
const age = (new Date().getTime() - cachedEntry.date.getTime()) / 1000
if (age < this._maxAge) {
return cachedEntry.entry
}
}
const bbox = Tiles.tile_bounds_lon_lat(...Tiles.tile_from_index(tileId))
const count = await this._poiDatabase.getCount(layer, bbox)
if (!this._cache[layer]) {
this._cache[layer] = {}
}
this._cache[layer][tileId] = { entry: count, date: new Date() }
return count
}
}
const connectionString = "postgresql://user:password@localhost:5444/osm-poi"
const tcs = new OsmPoiDatabase(connectionString)
const withCache = new CachedSqlCount(tcs, 14 * 60 * 60 * 24)
new Server(2345, { ignorePathPrefix: ["summary"] }, [
{
mustMatch: "status.json",
mimetype: "application/json",
handle: async (path: string) => {
const layers = await tcs.getLayers()
const meta = await tcs.getMeta()
return JSON.stringify({ meta, layers: Array.from(layers) })
},
},
{
mustMatch: /[a-zA-Z0-9+_-]+\/[0-9]+\/[0-9]+\/[0-9]+\.json/,
mimetype: "application/json", // "application/vnd.geo+json",
async handle(path) {
const [layers, z, x, y] = path.split(".")[0].split("/")
let sum = 0
let properties: Record<string, number> = {}
const availableLayers = await tcs.getLayers()
let latSum = 0
let lonSum = 0
for (const layer of layers.split("+")) {
if (!availableLayers.has(layer)) {
continue
}
const count = await withCache.getCount(
layer,
Tiles.tile_index(Number(z), Number(x), Number(y))
)
properties[layer] = count.count
if (count.count !== 0) {
latSum += count.lat * count.count
lonSum += count.lon * count.count
sum += count.count
}
}
properties["lon"] = lonSum / sum
properties["lat"] = latSum / sum
return JSON.stringify({ ...properties, total: sum })
},
},
])
console.log(
">>>",
await tcs.getCount("drinking_water", [
[3.194358020772171, 51.228073636083394],
[3.2839964396059145, 51.172701162680994],
])
)

View file

@ -1,5 +0,0 @@
#! /bin/bash
# npm run generate:layeroverview
cd ../..
ts-node scripts/generateCache.ts postal_codes 8 /home/pietervdvn/Downloads/postal_codes 49.69606181911566 2.373046875 51.754240074033525 6.459960937499999 --generate-point-overview '*' --force-zoom-level 1

View file

@ -1,5 +1,5 @@
import Script from "../scripts/Script"
import { Server } from "../scripts/server"
import { Server } from "./server"
import { Utils } from "../src/Utils"
import parse from "node-html-parser"
class ServerLdScrape extends Script {
@ -8,20 +8,24 @@ class ServerLdScrape extends Script {
}
async main(args: string[]): Promise<void> {
const port = Number(args[0] ?? 2346)
const cache: Record<string, any> = []
const cache: Record<string, { date: Date; contents: any }> = {}
new Server(port, {}, [
{
mustMatch: "extractgraph",
mimetype: "application/ld+json",
async handle(content, searchParams: URLSearchParams) {
const url = searchParams.get("url")
console.log("Fetching", url)
if (cache[url]) {
return JSON.stringify(cache[url])
if (cache[url] !== undefined) {
const { date, contents } = cache[url]
// In seconds
const tdiff = (new Date().getTime() - date.getTime()) / 1000
if (tdiff < 24 * 60 * 60) {
return contents
}
}
const dloaded = await Utils.download(url, {
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", // "MapComplete/openstreetmap scraper; pietervdvn@posteo.net; https://github.com/pietervdvn/MapComplete",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.52 Safari/537.36", // MapComplete/openstreetmap scraper; pietervdvn@posteo.net; https://github.com/pietervdvn/MapComplete",
})
// return dloaded
const parsed = parse(dloaded)
@ -41,8 +45,6 @@ class ServerLdScrape extends Script {
console.error(e)
}
}
return JSON.stringify({})
},
},
])