Merge develop

This commit is contained in:
Pieter Vander Vennet 2025-01-29 21:19:11 +01:00
commit 3fe1f39c46
457 changed files with 6866 additions and 218533 deletions

View file

@ -0,0 +1,133 @@
import Script from "./Script"
import { CommunityResource } from "../src/Logic/Web/CommunityIndex"
import { Utils } from "../src/Utils"
import { FeatureCollection, MultiPolygon, Polygon } from "geojson"
import { existsSync, mkdirSync, writeFileSync } from "fs"
import { GeoOperations } from "../src/Logic/GeoOperations"
import { Tiles } from "../src/Models/TileRange"
import ScriptUtils from "./ScriptUtils"
class DownloadCommunityIndex extends Script {
constructor() {
super("Updates the community index")
}
printHelp() {
console.log("Arguments are:\noutputdirectory")
}
private static targetZoomlevel: number = 6
private static upstreamUrl: string = "https://raw.githubusercontent.com/osmlab/osm-community-index/main/dist/"
/**
* Prunes away unnecessary fields from a CommunityResource
* @private
*/
private static stripResource(r: Readonly<CommunityResource>): CommunityResource {
return {
id: r.id,
languageCodes: r.languageCodes,
account: r.account,
type: r.type,
resolved: {
name: r.resolved.name,
description: r.resolved.description,
url: r.resolved.url
}
}
}
private static stripResourcesObj(resources: Readonly<Record<string, Readonly<CommunityResource>>>) {
const stripped: Record<string, CommunityResource> = {}
for (const k in resources) {
const type = resources[k].type
if (type === "twitter" || type === "facebook" || type === "x") {
// These channels are toxic nowadays - we simply omit them
continue
}
stripped[k] = DownloadCommunityIndex.stripResource(resources[k])
}
return stripped
}
public static async update(targetDirectory: string) {
const data = await Utils.downloadJson<FeatureCollection<Polygon | MultiPolygon, {
resources: Record<string, CommunityResource>,
nameEn: string,
id: string
}>>(DownloadCommunityIndex.upstreamUrl + "completeFeatureCollection.json"
)
if (!existsSync(targetDirectory)) {
mkdirSync(targetDirectory)
}
const features = data.features
const global = features.find(
f => f.id === "Q2"
)
const globalProperties = DownloadCommunityIndex.stripResourcesObj(global.properties.resources)
writeFileSync(targetDirectory + "/global.json", JSON.stringify(globalProperties), "utf8")
console.log("Written global properties")
const types = new Set<string>()
for (const f of features) {
const res = f.properties.resources
for (const k in res) {
types.add(res[k].type)
}
}
for (const type of types) {
const url = `${DownloadCommunityIndex.upstreamUrl}img/${type}.svg`
await ScriptUtils.DownloadFileTo(url, `${targetDirectory}/${type}.svg`)
}
const local = features.filter(f => f.id !== "Q2")
const spread = GeoOperations.spreadIntoBboxes(local, DownloadCommunityIndex.targetZoomlevel)
let written = 0
let skipped = 0
const writtenTilesOverview: Record<number, number[]> = {}
writeFileSync(targetDirectory + "local.geojson", JSON.stringify({ type: "FeatureCollection", features: local }))
for (const tileIndex of spread.keys()) {
const features = spread.get(tileIndex)
const clipped = GeoOperations.clipAllInBox(features, tileIndex)
if (clipped.length === 0) {
skipped++
continue
}
for (const f of clipped) {
f.properties.resources = DownloadCommunityIndex.stripResourcesObj(f.properties.resources)
}
const [z, x, y] = Tiles.tile_from_index(tileIndex)
const path = `${targetDirectory}/tile_${z}_${x}_${y}.geojson`
clipped.forEach((f) => {
delete f.bbox
})
writeFileSync(path, JSON.stringify({ type: "FeatureCollection", features: clipped }), "utf8")
written++
let yList = writtenTilesOverview[x]
if (!yList) {
yList = []
writtenTilesOverview[x] = yList
}
yList.push(y)
console.log(`Written tile ${path}`)
}
console.log(`Created ${written} tiles, skipped ${skipped}`)
writeFileSync(targetDirectory + "/tile_6_overview.json", JSON.stringify(writtenTilesOverview), "utf8")
console.log("Created overview file")
}
async main(args: string[]): Promise<void> {
const path = args[0]
if (!path) {
this.printHelp()
return
}
await DownloadCommunityIndex.update(path)
}
}
new DownloadCommunityIndex().run()

View file

@ -235,7 +235,6 @@ function extractHintsFrom(
)
}
}
}
return hints
}

View file

@ -30,6 +30,7 @@ import TableOfContents from "../src/UI/Base/TableOfContents"
import MarkdownUtils from "../src/Utils/MarkdownUtils"
import { parse as parse_html } from "node-html-parser"
import { AvailableRasterLayers } from "../src/Models/RasterLayers"
import { ImmutableStore } from "../src/Logic/UIEventSource"
/**
* Converts a markdown-file into a .json file, which a walkthrough/slideshow element can use
@ -135,6 +136,11 @@ export class GenerateDocs extends Script {
async main(args: string[]) {
console.log("Starting documentation generation...")
ScriptUtils.fixUtils()
this.WriteMarkdownFile("./Docs/SpecialRenderings.md", SpecialVisualizations.HelpMessage(), [
"src/UI/SpecialVisualizations.ts"
])
if (!existsSync("./Docs/Themes")) {
mkdirSync("./Docs/Themes")
}
@ -165,13 +171,12 @@ export class GenerateDocs extends Script {
ScriptUtils.erasableLog("Written docs for theme", theme.id)
})
this.WriteMarkdownFile("./Docs/SpecialRenderings.md", SpecialVisualizations.HelpMessage(), [
"src/UI/SpecialVisualizations.ts",
])
this.WriteMarkdownFile(
"./Docs/CalculatedTags.md",
["# Metatags", SimpleMetaTaggers.HelpText(), ExtraFunctions.HelpText()].join("\n"),
["src/Logic/SimpleMetaTagger.ts", "src/Logic/ExtraFunctions.ts"]
["src/Logic/SimpleMetaTagger.ts", "src/Logic/ExtraFunctions.ts"],
{ noTableOfContents: false }
)
this.WriteMarkdownFile("./Docs/SpecialInputElements.md", Validators.HelpText(), [
"src/UI/InputElement/Validators.ts",
@ -211,7 +216,7 @@ export class GenerateDocs extends Script {
markdown: string,
autogenSource: string[],
options?: {
noTableOfContents: boolean
noTableOfContents?: boolean
}
): void {
for (const source of autogenSource) {
@ -245,7 +250,10 @@ export class GenerateDocs extends Script {
const generatedFrom = [
"This document is autogenerated from",
autogenSource
.map((s) => `[${s}](https://github.com/pietervdvn/MapComplete/blob/develop/${s})`)
.map(
(s) =>
`[${s}](https://source.mapcomplete.org/MapComplete/MapComplete/src/branch/develop/${s})`
)
.join(", "),
].join(" ")
@ -253,7 +261,7 @@ export class GenerateDocs extends Script {
}
private generateHotkeyDocs() {
new ThemeViewState(new ThemeConfig(<any>bookcases), new Set())
new ThemeViewState(new ThemeConfig(<any>bookcases), new ImmutableStore(new Set()))
this.WriteMarkdownFile("./Docs/Hotkeys.md", Hotkeys.generateDocumentation(), [
"src/UI/Base/Hotkeys.ts",
])

View file

@ -338,6 +338,7 @@ class GenerateLayouts extends Script {
"https://api.panoramax.xyz",
"https://panoramax.mapcomplete.org",
"https://data.velopark.be",
"https://data.mapcomplete.org"
].concat(...(await this.eliUrls()))
SpecialVisualizations.specialVisualizations.forEach((sv) => {

View file

@ -130,9 +130,9 @@ ${changeset}`
const changesObj = new Changes(
{
osmConnection,
reportError: (err) => console.error(err)
reportError: (err) => console.error(err),
},
false,
false
)
const all: ErrorMessage[] = []

View file

@ -7,19 +7,24 @@ import { Utils } from "../src/Utils"
import { LayerConfigJson } from "../src/Models/ThemeConfig/Json/LayerConfigJson"
import { FilterConfigOptionJson } from "../src/Models/ThemeConfig/Json/FilterConfigJson"
import { TagUtils } from "../src/Logic/Tags/TagUtils"
import { TagRenderingConfigJson } from "../src/Models/ThemeConfig/Json/TagRenderingConfigJson"
import { openSync, readSync } from "node:fs"
import { QuestionableTagRenderingConfigJson } from "../src/Models/ThemeConfig/Json/QuestionableTagRenderingConfigJson"
class NsiLogos extends Script {
constructor() {
super("Contains various subcommands for NSI logo maintainance")
}
private async downloadLogo(nsiItem: NSIItem, type: string, basePath: string, alreadyDownloaded: Map<string, string>) {
if(nsiItem === undefined){
private async downloadLogo(
nsiItem: NSIItem,
type: string,
basePath: string,
alreadyDownloaded: Map<string, string>
) {
if (nsiItem === undefined) {
return false
}
if(alreadyDownloaded.has(nsiItem.id)){
if (alreadyDownloaded.has(nsiItem.id)) {
return false
}
try {
@ -54,7 +59,7 @@ class NsiLogos extends Script {
await ScriptUtils.DownloadFileTo(logos.facebook, path)
// Validate
const content = readFileSync(path, "utf8")
if (content.startsWith("{\"error\"")) {
if (content.startsWith('{"error"')) {
unlinkSync(path)
console.error("Attempted to fetch", logos.facebook, " but this gave an error")
} else {
@ -109,13 +114,16 @@ class NsiLogos extends Script {
const stepcount = 50
for (let i = 0; i < items.length; i += stepcount) {
if (downloadCount > 0 || i % 200 === 0) {
console.log(i + "/" + items.length, `downloaded ${downloadCount}; failed ${errored}; skipped ${skipped} for NSI type ${type}`)
console.log(
i + "/" + items.length,
`downloaded ${downloadCount}; failed ${errored}; skipped ${skipped} for NSI type ${type}`
)
}
const results = await Promise.all(
Utils.TimesT(stepcount, (j) => j).map(async (j) => {
return await this.downloadLogo(items[i + j], type, basePath, alreadyDownloaded)
}),
})
)
for (let j = 0; j < results.length; j++) {
let didDownload = results[j]
@ -129,7 +137,12 @@ class NsiLogos extends Script {
continue
}
console.log("Retrying", items[i + j].id, type)
didDownload = await this.downloadLogo(items[i + j], type, basePath, alreadyDownloaded)
didDownload = await this.downloadLogo(
items[i + j],
type,
basePath,
alreadyDownloaded
)
if (didDownload === "error") {
errored++
console.log("Failed again:", items[i + j].id)
@ -139,7 +152,8 @@ class NsiLogos extends Script {
}
}
return {
downloadCount, errored,
downloadCount,
errored,
}
}
@ -149,20 +163,30 @@ class NsiLogos extends Script {
const filterOptions: FilterConfigOptionJson[] = items.map((item) => {
return {
question: item.displayName,
icon: nsi.getIconUrl(item, type),
icon: nsi.getIconUrl(item),
osmTags: NameSuggestionIndex.asFilterTags(item),
}
})
const mappings = items.map((item) => ({
if: NameSuggestionIndex.asFilterTags(item),
then: nsi.getIconUrl(item, type),
}))
const mappings = items
.map((item) => ({
if: NameSuggestionIndex.asFilterTags(item),
then: nsi.getIconUrl(item),
}))
.filter((mapping) => mapping.then !== undefined)
console.log("Checking for shadow-mappings... This will take a while")
let deleted = 0
for (let i = mappings.length - 1; i >= 0; i--) {
const condition = TagUtils.Tag(mappings[i].if)
if (i % 100 === 0) {
console.log("Checking for shadow-mappings...", i, "/", mappings.length)
console.log(
"Checking for shadow-mappings...",
i,
"/",
mappings.length,
"deleted",
deleted
)
}
const shadowsSomething = mappings.some((m, j) => {
if (i === j) {
@ -173,6 +197,7 @@ class NsiLogos extends Script {
// If this one matches, the other one will match as well
// We can thus remove this one in favour of the other one
if (shadowsSomething) {
deleted++
mappings.splice(i, 1)
}
}
@ -229,16 +254,19 @@ class NsiLogos extends Script {
private static readonly path: string = "./public/assets/data/nsi/logos"
private static headers: Readonly<Record<string, ReadonlyArray<ReadonlyArray<number>>>> = {
"png": [[137, 80, 78, 71, 13, 10, 26, 10]],
"jpg": [[255, 216], [255, 232]],
"gif": [[71, 73]],
png: [[137, 80, 78, 71, 13, 10, 26, 10]],
jpg: [
[255, 216],
[255, 232],
],
gif: [[71, 73]],
}
private static downloadedFiles(): Map<string, string> {
const allFiles = ScriptUtils.readDirRecSync(NsiLogos.path, 1)
const ids = new Map<string, string>()
for (const f of allFiles) {
const match = f.match("^.*/\([a-zA-Z0-9-]+\)\(.[a-z]{3}\)?")
const match = f.match("^.*/([a-zA-Z0-9-]+)(.[a-z]{3})?")
const id = match[1]
ids.set(id, f)
}
@ -262,7 +290,7 @@ class NsiLogos extends Script {
const allFiles = ScriptUtils.readDirRecSync(NsiLogos.path, 1)
let pruned = 0
for (const f of allFiles) {
const match = f.match("^.*/\([a-zA-Z0-9-]+\)\(.[a-z]{3}\)?")
const match = f.match("^.*/([a-zA-Z0-9-]+)(.[a-z]{3})?")
const id = match[1]
if (!ids.has(id)) {
console.log("Obsolete file:", f, id)
@ -271,8 +299,6 @@ class NsiLogos extends Script {
}
}
console.log("Removed ", pruned, "files")
}
private startsWith(buffer: Buffer, header: ReadonlyArray<number>): boolean {
@ -283,8 +309,11 @@ class NsiLogos extends Script {
return doesMatch
}
private startsWithAnyOf(buffer: Buffer, headers: ReadonlyArray<ReadonlyArray<number>>): boolean {
return headers.some(header => this.startsWith(buffer, header))
private startsWithAnyOf(
buffer: Buffer,
headers: ReadonlyArray<ReadonlyArray<number>>
): boolean {
return headers.some((header) => this.startsWith(buffer, header))
}
private async addExtensions() {
@ -297,8 +326,7 @@ class NsiLogos extends Script {
const fd = openSync(f, "r")
const buffer = Buffer.alloc(10)
const num = readSync(fd, buffer, 0, 10, null)
if (num === 0)
throw "INvalid file:" + f
if (num === 0) throw "INvalid file:" + f
let matchFound = false
for (const format in NsiLogos.headers) {
@ -318,16 +346,23 @@ class NsiLogos extends Script {
unlinkSync(f)
continue
}
throw "No format found for " + f + buffer.slice(0, 10).join(" ") + " ascii: " + text.slice(0, 40)
throw (
"No format found for " +
f +
buffer.slice(0, 10).join(" ") +
" ascii: " +
text.slice(0, 40)
)
}
}
private async patchNsiFile(){
private async patchNsiFile() {
const files = NsiLogos.downloadedFiles()
const path = "./public/assets/data/nsi/nsi.min.json"
const pathOut = "./public/assets/data/nsi/nsi.patched.json"
let path = "./public/assets/data/nsi/nsi.min.json"
const otherPath = "./assets/data/nsi/nsi.min.json"
if (existsSync(otherPath) && !existsSync(path)) {
path = otherPath
}
const nsi = JSON.parse(readFileSync(path, "utf8"))
const types = nsi.nsi
@ -335,34 +370,33 @@ class NsiLogos extends Script {
const t: NSIItem[] = types[k].items
for (const nsiItem of t) {
const file = files.get(nsiItem.id)
if(!file){
delete nsiItem.fromTemplate
if (!file) {
continue
}
const extension = file.match(".*\.\([a-z]{3}\)")[1]
const extension = file.match(/.*\.([a-z]{3})/)[1]
nsiItem["ext"] = extension
delete nsiItem.fromTemplate
}
}
writeFileSync(path, JSON.stringify(nsi), "utf8")
}
private commands: Record<string, { f: () => Promise<void>, doc?: string }> = {
"download": { f: () => this.download(), doc: "Download all icons" },
"generateRenderings": {
private commands: Record<string, { f: () => Promise<void>; doc?: string }> = {
download: { f: () => this.download(), doc: "Download all icons" },
generateRenderings: {
f: () => this.generateRenderings(),
doc: "Generates the layer files 'nsi_brand' and 'nsi_operator' which allows to reuse the icons in renderings",
},
"prune": { f: () => NsiLogos.prune(), doc: "Remove no longer needed files" },
"addExtensions": {
prune: { f: () => NsiLogos.prune(), doc: "Remove no longer needed files" },
addExtensions: {
f: () => this.addExtensions(),
doc: "Inspects all files without an extension; might remove invalid files",
},
"patch": {
patch: {
f: () => this.patchNsiFile(),
doc: "Reads nsi.min.json, adds the 'ext' (extension) field to every relevant entry"
doc: "Reads nsi.min.json, adds the 'ext' (extension) field to every relevant entry",
},
"all": {
all: {
doc: "Run `download`, `generateRenderings`, `prune` and `addExtensions`",
f: async () => {
await NsiLogos.prune()
@ -389,7 +423,6 @@ class NsiLogos extends Script {
}
for (const command of args) {
const c = this.commands[command]
if (!c) {
console.log("Unrecognized command:", c)

View file

@ -1,13 +1,12 @@
import * as fs from "fs"
import StaticFeatureSource from "../Logic/FeatureSource/Sources/StaticFeatureSource"
import * as readline from "readline"
import ScriptUtils from "./ScriptUtils"
import { Utils } from "../Utils"
import Script from "./Script"
import { BBox } from "../Logic/BBox"
import { GeoOperations } from "../Logic/GeoOperations"
import { Tiles } from "../Models/TileRange"
import { Feature } from "geojson"
import { features } from "monaco-editor/esm/metadata"
/**
* This script slices a big newline-delimeted geojson file into tiled geojson
@ -96,34 +95,15 @@ class Slice extends Script {
features: Feature[],
tileIndex: number,
outputDirectory: string,
doSlice: boolean,
doClip: boolean,
handled: number,
maxNumberOfTiles: number
) {
): boolean {
if (doClip) {
features = GeoOperations.clipAllInBox(features, tileIndex)
}
const [z, x, y] = Tiles.tile_from_index(tileIndex)
const path = `${outputDirectory}/tile_${z}_${x}_${y}.geojson`
const box = BBox.fromTileIndex(tileIndex)
if (doSlice) {
features = Utils.NoNull(
features.map((f) => {
const bbox = box.asGeoJson({})
const properties = {
...f.properties,
id: (f.properties?.id ?? "") + "_" + z + "_" + x + "_" + y,
}
if (GeoOperations.completelyWithin(bbox, <any>f)) {
bbox.properties = properties
return bbox
}
const intersection = GeoOperations.intersect(f, box.asGeoJson({}))
if (intersection) {
intersection.properties = properties
}
return intersection
})
)
}
features.forEach((f) => {
delete f.bbox
})
@ -177,7 +157,7 @@ class Slice extends Script {
}
console.log("Using directory ", outputDirectory)
let allFeatures: any[]
let allFeatures: Feature[]
if (inputFile.endsWith(".geojson")) {
console.log("Detected geojson")
allFeatures = await this.readFeaturesFromGeoJson(inputFile)
@ -202,18 +182,16 @@ class Slice extends Script {
}
const maxNumberOfTiles = Math.pow(2, zoomlevel) * Math.pow(2, zoomlevel)
let handled = 0
StaticFeatureSource.fromGeojson(allFeatures).features.addCallbackAndRun((feats) => {
GeoOperations.slice(zoomlevel, feats).forEach((tileData, tileIndex) => {
handled = handled + 1
this.handleTileData(
tileData,
tileIndex,
outputDirectory,
doSlice,
handled,
maxNumberOfTiles
)
})
GeoOperations.slice(zoomlevel, features).forEach((tileData, tileIndex) => {
handled = handled + 1
this.handleTileData(
tileData,
tileIndex,
outputDirectory,
doSlice,
handled,
maxNumberOfTiles
)
})
}
}