forked from MapComplete/MapComplete
		
	Chore: formatting
This commit is contained in:
		
							parent
							
								
									35eff07c80
								
							
						
					
					
						commit
						c08fe03ed0
					
				
					 422 changed files with 31594 additions and 43019 deletions
				
			
		|  | @ -136,7 +136,7 @@ class StatsDownloader { | |||
|             ScriptUtils.erasableLog( | ||||
|                 `Downloading stats for ${year}-${month}-${day}, page ${page} ${url}` | ||||
|             ) | ||||
|             const result = await Utils.downloadJson<{features: [], next: string}>(url, headers) | ||||
|             const result = await Utils.downloadJson<{ features: []; next: string }>(url, headers) | ||||
|             page++ | ||||
|             allFeatures.push(...result.features) | ||||
|             if (result.features === undefined) { | ||||
|  | @ -206,7 +206,7 @@ class GenerateSeries extends Script { | |||
|             targetDir + "/changeset-metadata", | ||||
|             targetDir + "/mapcomplete-changes/", | ||||
|             { | ||||
|                 zoomlevel: 8 | ||||
|                 zoomlevel: 8, | ||||
|             } | ||||
|         ) | ||||
|     } | ||||
|  | @ -250,7 +250,7 @@ class GenerateSeries extends Script { | |||
|             (p) => p.startsWith("stats.") && p.endsWith(".json") | ||||
|         ) | ||||
|         let allFeatures: ChangeSetData[] = allPaths.flatMap( | ||||
|                 (path) => JSON.parse(readFileSync(sourceDir + "/" + path, "utf-8")).features | ||||
|             (path) => JSON.parse(readFileSync(sourceDir + "/" + path, "utf-8")).features | ||||
|         ) | ||||
|         allFeatures = allFeatures.filter( | ||||
|             (f) => | ||||
|  | @ -269,7 +269,9 @@ class GenerateSeries extends Script { | |||
|                     f.properties.editor.toLowerCase().startsWith("mapcomplete")) | ||||
|         ) | ||||
| 
 | ||||
|         allFeatures = allFeatures.filter((f) => f.properties.metadata?.theme !== "EMPTY CS" && f.geometry.coordinates.length > 0) | ||||
|         allFeatures = allFeatures.filter( | ||||
|             (f) => f.properties.metadata?.theme !== "EMPTY CS" && f.geometry.coordinates.length > 0 | ||||
|         ) | ||||
|         const centerpointsAll = allFeatures.map((f) => { | ||||
|             const centerpoint = GeoOperations.centerpoint(f) | ||||
|             const c = centerpoint.geometry.coordinates | ||||
|  | @ -277,9 +279,9 @@ class GenerateSeries extends Script { | |||
|             centerpoint.geometry.coordinates = [c[1], c[0]] | ||||
|             return centerpoint | ||||
|         }) | ||||
|         const centerpoints = centerpointsAll.filter(p => { | ||||
|             const bbox= BBox.get(p) | ||||
|             if(bbox.minLat === -90 && bbox.maxLat === -90){ | ||||
|         const centerpoints = centerpointsAll.filter((p) => { | ||||
|             const bbox = BBox.get(p) | ||||
|             if (bbox.minLat === -90 && bbox.maxLat === -90) { | ||||
|                 // Due to some bug somewhere, those invalid bboxes might appear if the latitude is < 90
 | ||||
|                 // This crashes the 'spreadIntoBBoxes
 | ||||
|                 // As workaround, we simply ignore them for now
 | ||||
|  |  | |||
|  | @ -1,7 +1,7 @@ | |||
| import * as fs from "fs" | ||||
| import { existsSync, lstatSync, readdirSync, readFileSync } from "fs" | ||||
| import { Utils } from "../src/Utils" | ||||
| import {https} from "follow-redirects" | ||||
| import { https } from "follow-redirects" | ||||
| import { LayoutConfigJson } from "../src/Models/ThemeConfig/Json/LayoutConfigJson" | ||||
| import { LayerConfigJson } from "../src/Models/ThemeConfig/Json/LayerConfigJson" | ||||
| import xml2js from "xml2js" | ||||
|  | @ -175,7 +175,7 @@ export default class ScriptUtils { | |||
|         const requestPromise = new Promise((resolve, reject) => { | ||||
|             try { | ||||
|                 headers = headers ?? {} | ||||
|                 if(!headers.Accept){ | ||||
|                 if (!headers.Accept) { | ||||
|                     headers.accept ??= "application/json" | ||||
|                 } | ||||
|                 ScriptUtils.erasableLog(" > ScriptUtils.Download(", url, ")") | ||||
|  | @ -222,15 +222,12 @@ export default class ScriptUtils { | |||
|             } | ||||
|         }) | ||||
|         const timeoutPromise = new Promise<any>((resolve, reject) => { | ||||
|             setTimeout( | ||||
|                 () => { | ||||
|                     if(timeoutSecs === undefined){ | ||||
|                         return // No resolve
 | ||||
|                     } | ||||
|                    resolve("timeout") | ||||
|                 }, | ||||
|                 (timeoutSecs ?? 10) * 1000 | ||||
|             ) | ||||
|             setTimeout(() => { | ||||
|                 if (timeoutSecs === undefined) { | ||||
|                     return // No resolve
 | ||||
|                 } | ||||
|                 resolve("timeout") | ||||
|             }, (timeoutSecs ?? 10) * 1000) | ||||
|         }) | ||||
|         return Promise.race([requestPromise, timeoutPromise]) | ||||
|     } | ||||
|  |  | |||
|  | @ -35,7 +35,6 @@ class DownloadNsiLogos extends Script { | |||
|             return false | ||||
|         } | ||||
| 
 | ||||
| 
 | ||||
|         if (!logos) { | ||||
|             return false | ||||
|         } | ||||
|  | @ -51,7 +50,8 @@ class DownloadNsiLogos extends Script { | |||
|             do { | ||||
|                 ttl-- | ||||
|                 const dloaded = await Utils.downloadAdvanced(url, { | ||||
|                     "User-Agent": "MapComplete NSI scraper/0.1 (https://github.com/pietervdvn/MapComplete; pietervdvn@posteo.net)" | ||||
|                     "User-Agent": | ||||
|                         "MapComplete NSI scraper/0.1 (https://github.com/pietervdvn/MapComplete; pietervdvn@posteo.net)", | ||||
|                 }) | ||||
|                 const redirect: string | undefined = dloaded["redirect"] | ||||
|                 if (redirect) { | ||||
|  | @ -77,7 +77,6 @@ class DownloadNsiLogos extends Script { | |||
|         } | ||||
| 
 | ||||
|         return false | ||||
| 
 | ||||
|     } | ||||
| 
 | ||||
|     async main(): Promise<void> { | ||||
|  | @ -94,12 +93,14 @@ class DownloadNsiLogos extends Script { | |||
|             if (i % 100 === 0) { | ||||
|                 console.log(i + "/" + items.length, "downloaded " + downloadCount) | ||||
|             } | ||||
|             await Promise.all(Utils.TimesT(stepcount, j => j).map(async j => { | ||||
|                 const downloaded = await this.downloadLogo(items[i + j], type, basePath) | ||||
|                 if (downloaded) { | ||||
|                     downloadCount++ | ||||
|                 } | ||||
|             })) | ||||
|             await Promise.all( | ||||
|                 Utils.TimesT(stepcount, (j) => j).map(async (j) => { | ||||
|                     const downloaded = await this.downloadLogo(items[i + j], type, basePath) | ||||
|                     if (downloaded) { | ||||
|                         downloadCount++ | ||||
|                     } | ||||
|                 }) | ||||
|             ) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  |  | |||
|  | @ -207,8 +207,12 @@ function extractHintsFrom( | |||
|             validators: Validators, | ||||
|             Constants: Constants, | ||||
|         }) | ||||
|         if(hints["suggestions"]?.indexOf(null) >= 0){ | ||||
|             throw "A suggestion generated 'null' for "+path.join(".")+". Check the docstring, specifically 'suggestions'. Pay attention to double commas" | ||||
|         if (hints["suggestions"]?.indexOf(null) >= 0) { | ||||
|             throw ( | ||||
|                 "A suggestion generated 'null' for " + | ||||
|                 path.join(".") + | ||||
|                 ". Check the docstring, specifically 'suggestions'. Pay attention to double commas" | ||||
|             ) | ||||
|         } | ||||
|     } | ||||
|     return hints | ||||
|  |  | |||
|  | @ -56,15 +56,15 @@ class ToSlideshowJson { | |||
|                 sections.push(currentSection) | ||||
|                 currentSection = [] | ||||
|             } | ||||
|             line = line.replace("src=\"../../public/", "src=\"./") | ||||
|             line = line.replace("src=\"../../", "src=\"./") | ||||
|             line = line.replace('src="../../public/', 'src="./') | ||||
|             line = line.replace('src="../../', 'src="./') | ||||
|             currentSection.push(line) | ||||
|         } | ||||
|         sections.push(currentSection) | ||||
|         writeFileSync( | ||||
|             this._target, | ||||
|             JSON.stringify({ | ||||
|                 sections: sections.map((s) => s.join("\n")).filter((s) => s.length > 0) | ||||
|                 sections: sections.map((s) => s.join("\n")).filter((s) => s.length > 0), | ||||
|             }) | ||||
|         ) | ||||
|     } | ||||
|  | @ -83,7 +83,7 @@ class WikiPageGenerator { | |||
| 
 | ||||
|     generate() { | ||||
|         let wikiPage = | ||||
|             "{|class=\"wikitable sortable\"\n" + | ||||
|             '{|class="wikitable sortable"\n' + | ||||
|             "! Name, link !! Genre !! Covered region !! Language !! Description !! Free materials !! Image\n" + | ||||
|             "|-" | ||||
| 
 | ||||
|  | @ -141,7 +141,7 @@ export class GenerateDocs extends Script { | |||
|         } | ||||
| 
 | ||||
|         this.WriteFile("./Docs/Tags_format.md", TagUtils.generateDocs(), [ | ||||
|             "src/Logic/Tags/TagUtils.ts" | ||||
|             "src/Logic/Tags/TagUtils.ts", | ||||
|         ]) | ||||
| 
 | ||||
|         new ToSlideshowJson( | ||||
|  | @ -166,24 +166,24 @@ export class GenerateDocs extends Script { | |||
|         }) | ||||
| 
 | ||||
|         this.WriteMarkdownFile("./Docs/SpecialRenderings.md", SpecialVisualizations.HelpMessage(), [ | ||||
|             "src/UI/SpecialVisualizations.ts" | ||||
|             "src/UI/SpecialVisualizations.ts", | ||||
|         ]) | ||||
|         this.WriteFile( | ||||
|             "./Docs/CalculatedTags.md", | ||||
|             new Combine([ | ||||
|                 new Title("Metatags", 1), | ||||
|                 SimpleMetaTaggers.HelpText(), | ||||
|                 ExtraFunctions.HelpText() | ||||
|                 ExtraFunctions.HelpText(), | ||||
|             ]).SetClass("flex-col"), | ||||
|             ["src/Logic/SimpleMetaTagger.ts", "src/Logic/ExtraFunctions.ts"] | ||||
|         ) | ||||
|         this.WriteFile("./Docs/SpecialInputElements.md", Validators.HelpText(), [ | ||||
|             "src/UI/InputElement/Validators.ts" | ||||
|             "src/UI/InputElement/Validators.ts", | ||||
|         ]) | ||||
| 
 | ||||
|         this.WriteFile("./Docs/ChangesetMeta.md", Changes.getDocs(), [ | ||||
|             "src/Logic/Osm/Changes.ts", | ||||
|             "src/Logic/Osm/ChangesetHandler.ts" | ||||
|             "src/Logic/Osm/ChangesetHandler.ts", | ||||
|         ]) | ||||
|         new WikiPageGenerator().generate() | ||||
| 
 | ||||
|  | @ -208,17 +208,16 @@ export class GenerateDocs extends Script { | |||
|         let md = new Combine([ | ||||
|             Translations.W(html), | ||||
|             "\n\nThis document is autogenerated from " + | ||||
|             autogenSource | ||||
|                 .map( | ||||
|                     (file) => | ||||
|                         `[${file}](https://github.com/pietervdvn/MapComplete/blob/develop/${file})` | ||||
|                 ) | ||||
|                 .join(", ") | ||||
|                 autogenSource | ||||
|                     .map( | ||||
|                         (file) => | ||||
|                             `[${file}](https://github.com/pietervdvn/MapComplete/blob/develop/${file})` | ||||
|                     ) | ||||
|                     .join(", "), | ||||
|         ]).AsMarkdown() | ||||
|         this.WriteMarkdownFile(filename, md, autogenSource, options) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     private WriteMarkdownFile( | ||||
|         filename: string, | ||||
|         markdown: string, | ||||
|  | @ -240,11 +239,10 @@ export class GenerateDocs extends Script { | |||
|             } | ||||
|         } | ||||
| 
 | ||||
| 
 | ||||
|         let md = markdown | ||||
| 
 | ||||
|         if (options?.noTableOfContents !== false) { | ||||
|            md = TableOfContents.insertTocIntoMd(md) | ||||
|             md = TableOfContents.insertTocIntoMd(md) | ||||
|         } | ||||
| 
 | ||||
|         md.replace(/\n\n\n+/g, "\n\n") | ||||
|  | @ -294,7 +292,7 @@ export class GenerateDocs extends Script { | |||
|         } | ||||
| 
 | ||||
|         this.WriteFile("./Docs/builtin_units.md", new Combine([new Title("Units", 1), ...els]), [ | ||||
|             `assets/layers/unit/unit.json` | ||||
|             `assets/layers/unit/unit.json`, | ||||
|         ]) | ||||
|     } | ||||
| 
 | ||||
|  | @ -419,7 +417,7 @@ export class GenerateDocs extends Script { | |||
|             builtinsPerLayer.set(layer.id, usedBuiltins) | ||||
|         } | ||||
| 
 | ||||
|         let docs =` | ||||
|         let docs = ` | ||||
|             # Index of builtin TagRenderings | ||||
|             ## Existing builtin tagrenderings | ||||
|         ` | ||||
|  | @ -428,7 +426,7 @@ export class GenerateDocs extends Script { | |||
|             docs += ` | ||||
|             ### ${builtin} | ||||
| 
 | ||||
|             ${usedByLayers.map(item => " - "+item).join("\n")} | ||||
|             ${usedByLayers.map((item) => " - " + item).join("\n")} | ||||
|             ` | ||||
|         } | ||||
|         this.WriteMarkdownFile("./Docs/BuiltinIndex.md", docs, ["assets/layers/*.json"]) | ||||
|  | @ -467,7 +465,7 @@ export class GenerateDocs extends Script { | |||
|                     theme.title, | ||||
|                     "(", | ||||
|                     new Link(theme.id, "https://mapcomplete.org/" + theme.id), | ||||
|                     ")" | ||||
|                     ")", | ||||
|                 ]), | ||||
|                 2 | ||||
|             ), | ||||
|  | @ -479,7 +477,7 @@ export class GenerateDocs extends Script { | |||
|                     .map((l) => new Link(l.id, "../Layers/" + l.id + ".md")) | ||||
|             ), | ||||
|             "Available languages:", | ||||
|             new List(theme.language.filter((ln) => ln !== "_context")) | ||||
|             new List(theme.language.filter((ln) => ln !== "_context")), | ||||
|         ]).SetClass("flex flex-col") | ||||
|         this.WriteFile( | ||||
|             "./Docs/Themes/" + theme.id + ".md", | ||||
|  | @ -557,7 +555,7 @@ export class GenerateDocs extends Script { | |||
|                 Array.from(AllSharedLayers.sharedLayers.keys()).map( | ||||
|                     (id) => new Link(id, "./Layers/" + id + ".md") | ||||
|                 ) | ||||
|             ) | ||||
|             ), | ||||
|         ]) | ||||
|         this.WriteFile("./Docs/BuiltinLayers.md", el, ["src/Customizations/AllKnownLayouts.ts"]) | ||||
|     } | ||||
|  |  | |||
|  | @ -284,13 +284,16 @@ class LayerOverviewUtils extends Script { | |||
|         doesImageExist: DoesImageExist, | ||||
|         bootstrapTagRenderings: Map<string, QuestionableTagRenderingConfigJson> = null | ||||
|     ): Map<string, QuestionableTagRenderingConfigJson> { | ||||
|         const prepareLayer = new PrepareLayer({ | ||||
|             tagRenderings: bootstrapTagRenderings, | ||||
|             sharedLayers: null, | ||||
|             publicLayers: null, | ||||
|         }, { | ||||
|             addTagRenderingsToContext: true | ||||
|         }) | ||||
|         const prepareLayer = new PrepareLayer( | ||||
|             { | ||||
|                 tagRenderings: bootstrapTagRenderings, | ||||
|                 sharedLayers: null, | ||||
|                 publicLayers: null, | ||||
|             }, | ||||
|             { | ||||
|                 addTagRenderingsToContext: true, | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
|         const path = "assets/layers/questions/questions.json" | ||||
|         const sharedQuestions = this.parseLayer(doesImageExist, prepareLayer, path).raw | ||||
|  |  | |||
|  | @ -11,18 +11,19 @@ import NameSuggestionIndex from "../src/Logic/Web/NameSuggestionIndex" | |||
| import TagInfo from "../src/Logic/Web/TagInfo" | ||||
| 
 | ||||
| class Utilities { | ||||
|     static mapValues<X extends string | number, T, TOut>(record: Record<X, T>, f: ((t: T) => TOut)): Record<X, TOut> { | ||||
|     static mapValues<X extends string | number, T, TOut>( | ||||
|         record: Record<X, T>, | ||||
|         f: (t: T) => TOut | ||||
|     ): Record<X, TOut> { | ||||
|         const newR = <Record<X, TOut>>{} | ||||
|         for (const x in record) { | ||||
|             newR[x] = f(record[x]) | ||||
|         } | ||||
|         return newR | ||||
|     } | ||||
| 
 | ||||
| } | ||||
| 
 | ||||
| class GenerateStats extends Script { | ||||
| 
 | ||||
|     async createOptimizationFile(includeTags = true) { | ||||
|         ScriptUtils.fixUtils() | ||||
|         const layers = <LayerConfigJson[]>known_layers.layers | ||||
|  | @ -72,15 +73,15 @@ class GenerateStats extends Script { | |||
|                     tagTotal.set(key, new Map<string, number>()) | ||||
|                     await Promise.all( | ||||
|                         Array.from(values).map(async (value) => { | ||||
|                                 const tagData = await TagInfo.global.getStats(key, value) | ||||
|                                 const count = tagData.data.find((item) => item.type === "all").count | ||||
|                                 tagTotal.get(key).set(value, count) | ||||
|                                 console.log(key + "=" + value, "-->", count) | ||||
|                             } | ||||
|                         ) | ||||
|                             const tagData = await TagInfo.global.getStats(key, value) | ||||
|                             const count = tagData.data.find((item) => item.type === "all").count | ||||
|                             tagTotal.get(key).set(value, count) | ||||
|                             console.log(key + "=" + value, "-->", count) | ||||
|                         }) | ||||
|                     ) | ||||
|                 } | ||||
|             })) | ||||
|             }) | ||||
|         ) | ||||
|         writeFileSync( | ||||
|             "./src/assets/key_totals.json", | ||||
|             JSON.stringify( | ||||
|  | @ -88,7 +89,7 @@ class GenerateStats extends Script { | |||
|                     "#": "Generated with generateStats.ts", | ||||
|                     date: new Date().toISOString(), | ||||
|                     keys: Utils.MapToObj(keyTotal, (t) => t), | ||||
|                     tags: Utils.MapToObj(tagTotal, (v) => Utils.MapToObj(v, (t) => t)) | ||||
|                     tags: Utils.MapToObj(tagTotal, (v) => Utils.MapToObj(v, (t) => t)), | ||||
|                 }, | ||||
|                 null, | ||||
|                 "  " | ||||
|  | @ -97,7 +98,9 @@ class GenerateStats extends Script { | |||
|     } | ||||
| 
 | ||||
|     private summarizeNSI(sourcefile: string, pathNoExtension: string): void { | ||||
|         const data = <Record<string, Record<string, number>>>JSON.parse(readFileSync(sourcefile, "utf8")) | ||||
|         const data = <Record<string, Record<string, number>>>( | ||||
|             JSON.parse(readFileSync(sourcefile, "utf8")) | ||||
|         ) | ||||
| 
 | ||||
|         const allCountries: Set<string> = new Set() | ||||
|         for (const brand in data) { | ||||
|  | @ -112,8 +115,7 @@ class GenerateStats extends Script { | |||
|         } | ||||
| 
 | ||||
|         const pathOut = pathNoExtension + ".summarized.json" | ||||
|         writeFileSync(pathOut, JSON.stringify( | ||||
|             data, null, "  "), "utf8") | ||||
|         writeFileSync(pathOut, JSON.stringify(data, null, "  "), "utf8") | ||||
|         console.log("Written", pathOut) | ||||
| 
 | ||||
|         const allBrands = Object.keys(data) | ||||
|  | @ -122,7 +124,8 @@ class GenerateStats extends Script { | |||
|             const summary = <Record<string, number>>{} | ||||
|             for (const brand of allBrands) { | ||||
|                 const count = data[brand][country] | ||||
|                 if (count > 2) { // Eéntje is geentje
 | ||||
|                 if (count > 2) { | ||||
|                     // Eéntje is geentje
 | ||||
|                     // We ignore count == 1 as they are rather exceptional
 | ||||
|                     summary[brand] = data[brand][country] | ||||
|                 } | ||||
|  | @ -134,17 +137,24 @@ class GenerateStats extends Script { | |||
|         } | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     async createNameSuggestionIndexFile(basepath: string, type: "brand" | "operator" | string) { | ||||
|         const path = basepath + type + ".json" | ||||
|         let allBrands = <Record<string, Record<string, number>>>{} | ||||
|         if (existsSync(path)) { | ||||
|             allBrands = JSON.parse(readFileSync(path, "utf8")) | ||||
|             console.log("Loaded", Object.keys(allBrands).length, " previously loaded " + type,"from",path) | ||||
|             console.log( | ||||
|                 "Loaded", | ||||
|                 Object.keys(allBrands).length, | ||||
|                 " previously loaded " + type, | ||||
|                 "from", | ||||
|                 path | ||||
|             ) | ||||
|         } | ||||
|         let skipped = 0 | ||||
|         const allBrandNames: string[] = Utils.Dedup(NameSuggestionIndex.allPossible(type).map(item => item.tags[type])) | ||||
|         const missingBrandNames : string[] = [] | ||||
|         const allBrandNames: string[] = Utils.Dedup( | ||||
|             NameSuggestionIndex.allPossible(type).map((item) => item.tags[type]) | ||||
|         ) | ||||
|         const missingBrandNames: string[] = [] | ||||
|         for (let i = 0; i < allBrandNames.length; i++) { | ||||
|             const brand = allBrandNames[i] | ||||
|             if (!!allBrands[brand] && Object.keys(allBrands[brand]).length == 0) { | ||||
|  | @ -165,20 +175,32 @@ class GenerateStats extends Script { | |||
|                 } | ||||
|             } | ||||
|             missingBrandNames.push(brand) | ||||
| 
 | ||||
|         } | ||||
|         const batchSize = 101 | ||||
|         for (let i = 0; i < missingBrandNames.length; i += batchSize) { | ||||
|             console.warn( | ||||
|                 "Downloading", | ||||
|                 batchSize, | ||||
|                 "items: ", | ||||
|                 i + "/" + missingBrandNames.length, | ||||
|                 "; skipped", | ||||
|                 skipped, | ||||
|                 "total:", | ||||
|                 allBrandNames.length | ||||
|             ) | ||||
| 
 | ||||
|             console.warn("Downloading",batchSize,"items: ", i + "/" + (missingBrandNames.length), "; skipped", skipped, "total:",allBrandNames.length) | ||||
| 
 | ||||
|             const distributions = await Promise.all(Utils.TimesT(batchSize, async j => { | ||||
|                 await ScriptUtils.sleep(j * 250) | ||||
|                 return TagInfo.getGlobalDistributionsFor(type, missingBrandNames[i + j]) | ||||
|             })) | ||||
|             const distributions = await Promise.all( | ||||
|                 Utils.TimesT(batchSize, async (j) => { | ||||
|                     await ScriptUtils.sleep(j * 250) | ||||
|                     return TagInfo.getGlobalDistributionsFor(type, missingBrandNames[i + j]) | ||||
|                 }) | ||||
|             ) | ||||
|             for (let j = 0; j < distributions.length; j++) { | ||||
|                 const brand = missingBrandNames[i + j] | ||||
|                 const distribution: Record<string, number> = Utilities.mapValues(distributions[j], s => s.data.find(t => t.type === "all").count) | ||||
|                 const distribution: Record<string, number> = Utilities.mapValues( | ||||
|                     distributions[j], | ||||
|                     (s) => s.data.find((t) => t.type === "all").count | ||||
|                 ) | ||||
|                 allBrands[brand] = distribution | ||||
|             } | ||||
|             writeFileSync(path, JSON.stringify(allBrands), "utf8") | ||||
|  | @ -188,7 +210,9 @@ class GenerateStats extends Script { | |||
|     } | ||||
| 
 | ||||
|     constructor() { | ||||
|         super("Downloads stats on osmSource-tags and keys from tagInfo. There are two usecases with separate outputs:\n 1. To optimize the query before sending it to overpass (generates ./src/assets/key_totals.json) \n 2. To amend the Name Suggestion Index ") | ||||
|         super( | ||||
|             "Downloads stats on osmSource-tags and keys from tagInfo. There are two usecases with separate outputs:\n 1. To optimize the query before sending it to overpass (generates ./src/assets/key_totals.json) \n 2. To amend the Name Suggestion Index " | ||||
|         ) | ||||
|     } | ||||
| 
 | ||||
|     async main(_: string[]) { | ||||
|  | @ -199,11 +223,7 @@ class GenerateStats extends Script { | |||
|             await this.createNameSuggestionIndexFile(basepath, type) | ||||
|             this.summarizeNSI(basepath + type + ".json", "./public/assets/data/nsi/stats/" + type) | ||||
|         } | ||||
| 
 | ||||
| 
 | ||||
|     } | ||||
| 
 | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| new GenerateStats().run() | ||||
|  |  | |||
|  | @ -431,7 +431,7 @@ function transformTranslation( | |||
|  */ | ||||
| function sortKeys(o: object): object { | ||||
|     const keys = Object.keys(o) | ||||
|     keys.sort((a,b) => (""+a) < (""+b) ? -1 : 1) | ||||
|     keys.sort((a, b) => ("" + a < "" + b ? -1 : 1)) | ||||
|     const nw = {} | ||||
|     for (const key of keys) { | ||||
|         const v = o[key] | ||||
|  | @ -582,9 +582,7 @@ function MergeTranslation(source: any, target: any, language: string, context: s | |||
|             // Add the translation
 | ||||
|             if (targetV === undefined) { | ||||
|                 if (typeof target === "string") { | ||||
|                     throw ( | ||||
|                         `Trying to merge a translation for ${language} into a fixed string at ${context} for key ${key}` | ||||
|                     ) | ||||
|                     throw `Trying to merge a translation for ${language} into a fixed string at ${context} for key ${key}` | ||||
|                 } | ||||
|                 target[key] = source[key] | ||||
|                 continue | ||||
|  |  | |||
|  | @ -2,32 +2,28 @@ import Script from "./Script" | |||
| import { Server } from "./server" | ||||
| import ScriptUtils from "./ScriptUtils" | ||||
| 
 | ||||
| class OpenProxy  extends Script{ | ||||
| 
 | ||||
| class OpenProxy extends Script { | ||||
|     constructor() { | ||||
|         super("Allows any MapComplete-related domain to access the open internet via the proxy. No caching is done") | ||||
|         super( | ||||
|             "Allows any MapComplete-related domain to access the open internet via the proxy. No caching is done" | ||||
|         ) | ||||
|     } | ||||
|    async main(args: string[]): Promise<void> { | ||||
|        new Server(1237,{ | ||||
| 
 | ||||
|        },[ | ||||
|            { | ||||
|                mustMatch: "json", | ||||
|                mimetype: "application/json", | ||||
|                handle: async (_, params) => { | ||||
|                    const url = decodeURIComponent(params.get("url")) | ||||
|                    let content = await ScriptUtils.Download(url) | ||||
|                    while(content["redirect"]){ | ||||
|                        content = await ScriptUtils.Download(content["redirect"]) | ||||
|                    } | ||||
|                    return content["content"] | ||||
|                } | ||||
|            } | ||||
|        ]) | ||||
|     async main(args: string[]): Promise<void> { | ||||
|         new Server(1237, {}, [ | ||||
|             { | ||||
|                 mustMatch: "json", | ||||
|                 mimetype: "application/json", | ||||
|                 handle: async (_, params) => { | ||||
|                     const url = decodeURIComponent(params.get("url")) | ||||
|                     let content = await ScriptUtils.Download(url) | ||||
|                     while (content["redirect"]) { | ||||
|                         content = await ScriptUtils.Download(content["redirect"]) | ||||
|                     } | ||||
|                     return content["content"] | ||||
|                 }, | ||||
|             }, | ||||
|         ]) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| } | ||||
| 
 | ||||
| new OpenProxy().run() | ||||
|  |  | |||
|  | @ -38,7 +38,6 @@ class OsmPoiDatabase { | |||
|         this._client = new Client(connectionString) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     async getCount( | ||||
|         layer: string, | ||||
|         bbox: [[number, number], [number, number]] = undefined | ||||
|  |  | |||
|  | @ -33,16 +33,16 @@ async function main(args: string[]) { | |||
|         const json = JSON.parse(rawContents) | ||||
|         Utils.WalkPath(path, json, (found) => { | ||||
|             removedLanguages.push(file) | ||||
|             console.log("Removing ",found) | ||||
|             console.log("Removing ", found) | ||||
|             return undefined | ||||
|         }) | ||||
|         const lastChar = rawContents.endsWith("\n") ? "\n" : "" | ||||
|         fs.writeFileSync(file, JSON.stringify(json, null, "    ") + lastChar) | ||||
|     } | ||||
|     if(removedLanguages.length === 0){ | ||||
|      console.warn("No items removed. Doublecheck the paths") | ||||
|     }else{ | ||||
|     console.log("Removed items in "+removedLanguages.join(", ")) | ||||
|     if (removedLanguages.length === 0) { | ||||
|         console.warn("No items removed. Doublecheck the paths") | ||||
|     } else { | ||||
|         console.log("Removed items in " + removedLanguages.join(", ")) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -91,7 +91,7 @@ export class Server { | |||
| 
 | ||||
|                 try { | ||||
|                     const result = await handler.handle(path, url.searchParams) | ||||
|                     if(result === undefined){ | ||||
|                     if (result === undefined) { | ||||
|                         res.writeHead(500) | ||||
|                         res.write("Could not fetch this website, probably blocked by them") | ||||
|                         res.end() | ||||
|  | @ -109,7 +109,7 @@ export class Server { | |||
|                     } | ||||
|                     const extraHeaders = handler.addHeaders ?? {} | ||||
|                     res.writeHead(200, { "Content-Type": handler.mimetype, ...extraHeaders }) | ||||
|                     res.write(""+result) | ||||
|                     res.write("" + result) | ||||
|                     res.end() | ||||
|                 } catch (e) { | ||||
|                     console.error("Could not handle request:", e) | ||||
|  |  | |||
|  | @ -4,23 +4,21 @@ import parse from "node-html-parser" | |||
| import ScriptUtils from "./ScriptUtils" | ||||
| 
 | ||||
| class ServerLdScrape extends Script { | ||||
| 
 | ||||
|     constructor() { | ||||
|         super("Starts a server which fetches a webpage and returns embedded LD+JSON") | ||||
|     } | ||||
| 
 | ||||
|     private static async attemptDownload(url: string) { | ||||
|         const host = new URL(url).host | ||||
|         const random = Math.floor(Math.random()*100) | ||||
|         const random1 = Math.floor(Math.random()*100) | ||||
|         const random = Math.floor(Math.random() * 100) | ||||
|         const random1 = Math.floor(Math.random() * 100) | ||||
| 
 | ||||
|         const headers = [ | ||||
|             { | ||||
|                 "User-Agent": | ||||
|                     `Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.${random}.${random1} Safari/537.36`, | ||||
|                 "accept": "application/html" | ||||
|             } | ||||
|            /* { | ||||
|                 "User-Agent": `Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.${random}.${random1} Safari/537.36`, | ||||
|                 accept: "application/html", | ||||
|             }, | ||||
|             /* { | ||||
|                 "User-Agent": "MapComplete/openstreetmap scraper; pietervdvn@posteo.net; https://github.com/pietervdvn/MapComplete", | ||||
|                 "accept": "application/html" | ||||
|             }, | ||||
|  | @ -44,12 +42,7 @@ class ServerLdScrape extends Script { | |||
|         ] | ||||
|         for (let i = 0; i < headers.length; i++) { | ||||
|             try { | ||||
| 
 | ||||
|                 return await ScriptUtils.Download( | ||||
|                     url, | ||||
|                     headers[i], | ||||
|                     10 | ||||
|                 ) | ||||
|                 return await ScriptUtils.Download(url, headers[i], 10) | ||||
|             } catch (e) { | ||||
|                 console.error("Could not download", url, "with headers", headers[i], "due to", e) | ||||
|             } | ||||
|  | @ -64,7 +57,7 @@ class ServerLdScrape extends Script { | |||
|                 mustMatch: "extractgraph", | ||||
|                 mimetype: "application/ld+json", | ||||
|                 addHeaders: { | ||||
|                     "Cache-control": "max-age=3600, public" | ||||
|                     "Cache-control": "max-age=3600, public", | ||||
|                 }, | ||||
|                 async handle(content, searchParams: URLSearchParams) { | ||||
|                     const url = searchParams.get("url") | ||||
|  | @ -78,15 +71,15 @@ class ServerLdScrape extends Script { | |||
|                         } | ||||
|                     } | ||||
|                     let dloaded: { content: string } | { redirect: string } | "timeout" = { | ||||
|                         redirect: url | ||||
|                         redirect: url, | ||||
|                     } | ||||
| 
 | ||||
|                     do { | ||||
|                         dloaded = await ServerLdScrape.attemptDownload(dloaded["redirect"]) | ||||
|                         if (dloaded === "timeout") { | ||||
|                             return "{\"#\":\"timout reached\"}" | ||||
|                             return '{"#":"timout reached"}' | ||||
|                         } | ||||
|                         if(dloaded === undefined){ | ||||
|                         if (dloaded === undefined) { | ||||
|                             return undefined | ||||
|                         } | ||||
|                     } while (dloaded["redirect"]) | ||||
|  | @ -116,8 +109,8 @@ class ServerLdScrape extends Script { | |||
|                             console.error(e) | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|                 }, | ||||
|             }, | ||||
|         ]) | ||||
|     } | ||||
| } | ||||
|  |  | |||
|  | @ -65,8 +65,10 @@ class Compare extends Script { | |||
| 
 | ||||
|     async main(args: string[]): Promise<void> { | ||||
|         let [velopark, osm, key] = args | ||||
|         if(velopark === undefined || osm === undefined){ | ||||
|             console.log("Needed argument: velopark.geojson osm.geojson [key]\nThe key is optional and will be `ref:velopark` by default\nUse overpass to get a geojson with ref:velopark") | ||||
|         if (velopark === undefined || osm === undefined) { | ||||
|             console.log( | ||||
|                 "Needed argument: velopark.geojson osm.geojson [key]\nThe key is optional and will be `ref:velopark` by default\nUse overpass to get a geojson with ref:velopark" | ||||
|             ) | ||||
|             return | ||||
|         } | ||||
|         key ??= "ref:velopark" | ||||
|  |  | |||
|  | @ -7,32 +7,42 @@ interface DiffItem { | |||
|     /** | ||||
|      * Velopark-id | ||||
|      */ | ||||
|     "ref": string, | ||||
|     "osmid": OsmId, | ||||
|     "distance": number, | ||||
|     "diffs": { | ||||
|         key: string, | ||||
|     ref: string | ||||
|     osmid: OsmId | ||||
|     distance: number | ||||
|     diffs: { | ||||
|         key: string | ||||
|         /** | ||||
|          * The value in OpenStreetMap | ||||
|          * Might be undefined if OSM doesn't have an appropriate value | ||||
|          */ | ||||
|         osm?: string, | ||||
|         velopark: string | number } [] | ||||
|         osm?: string | ||||
|         velopark: string | number | ||||
|     }[] | ||||
| } | ||||
| 
 | ||||
| export class DiffToCsv extends Script { | ||||
|     constructor() { | ||||
|         super("Converts a 'report.diff' to a CSV file for people who prefer LibreOffice Calc (or other Spreadsheet Software)") | ||||
|         super( | ||||
|             "Converts a 'report.diff' to a CSV file for people who prefer LibreOffice Calc (or other Spreadsheet Software)" | ||||
|         ) | ||||
|     } | ||||
| 
 | ||||
|     async main(args: string[]): Promise<void> { | ||||
|         const file = args[0] ?? "report_diff.json" | ||||
|         const json = <{diffs:DiffItem[], distanceBinds: number[]}> JSON.parse(readFileSync(file, "utf8")) | ||||
|         const json = <{ diffs: DiffItem[]; distanceBinds: number[] }>( | ||||
|             JSON.parse(readFileSync(file, "utf8")) | ||||
|         ) | ||||
|         const diffs = json.diffs | ||||
|         const allKeys = Utils.Dedup(diffs.flatMap(item => item.diffs.map(d => d.key))) | ||||
|         const allKeys = Utils.Dedup(diffs.flatMap((item) => item.diffs.map((d) => d.key))) | ||||
|         allKeys.sort() | ||||
| 
 | ||||
|         const header = ["osm_id","velopark_id", "distance",...allKeys.flatMap(k => ["osm:"+k, "velopark:"+k])] | ||||
|         const header = [ | ||||
|             "osm_id", | ||||
|             "velopark_id", | ||||
|             "distance", | ||||
|             ...allKeys.flatMap((k) => ["osm:" + k, "velopark:" + k]), | ||||
|         ] | ||||
|         const lines = [header] | ||||
|         for (const diffItem of diffs) { | ||||
|             const line = [] | ||||
|  | @ -43,19 +53,16 @@ export class DiffToCsv extends Script { | |||
| 
 | ||||
|             const d = diffItem.diffs | ||||
|             for (const k of allKeys) { | ||||
|                 const found = d.find(i => i.key === k) | ||||
|                 if(!found){ | ||||
|                     line.push("","") | ||||
|                 const found = d.find((i) => i.key === k) | ||||
|                 if (!found) { | ||||
|                     line.push("", "") | ||||
|                     continue | ||||
|                 } | ||||
|                 line.push(found.osm, found.velopark) | ||||
|             } | ||||
| 
 | ||||
|         } | ||||
|         const path = "report_diff.csv" | ||||
|         writeFileSync(path, | ||||
|             lines.map(l => l.join(",")).join("\n") | ||||
|             ,"utf8") | ||||
|         writeFileSync(path, lines.map((l) => l.join(",")).join("\n"), "utf8") | ||||
|         console.log("Written", path) | ||||
|     } | ||||
| } | ||||
|  |  | |||
|  | @ -23,9 +23,9 @@ class VeloParkToGeojson extends Script { | |||
|             JSON.stringify( | ||||
|                 extension === ".geojson" | ||||
|                     ? { | ||||
|                         type: "FeatureCollection", | ||||
|                         features | ||||
|                     } | ||||
|                           type: "FeatureCollection", | ||||
|                           features, | ||||
|                       } | ||||
|                     : features, | ||||
|                 null, | ||||
|                 "    " | ||||
|  | @ -34,10 +34,9 @@ class VeloParkToGeojson extends Script { | |||
|         console.log("Written", file, "(" + features.length, " features)") | ||||
|     } | ||||
| 
 | ||||
|     private static async downloadDataFor( | ||||
|         url: string | ||||
|     ): Promise<Feature[]> { | ||||
|         const cachePath = "/home/pietervdvn/data/velopark_cache_refined/" + url.replace(/[/:.]/g, "_") | ||||
|     private static async downloadDataFor(url: string): Promise<Feature[]> { | ||||
|         const cachePath = | ||||
|             "/home/pietervdvn/data/velopark_cache_refined/" + url.replace(/[/:.]/g, "_") | ||||
|         if (fs.existsSync(cachePath)) { | ||||
|             return JSON.parse(fs.readFileSync(cachePath, "utf8")) | ||||
|         } | ||||
|  | @ -50,7 +49,7 @@ class VeloParkToGeojson extends Script { | |||
|             if (Object.keys(sectionInfo).length === 0) { | ||||
|                 console.warn("No result for", url) | ||||
|             } | ||||
|             if(!sectionInfo.geometry?.coordinates){ | ||||
|             if (!sectionInfo.geometry?.coordinates) { | ||||
|                 throw "Invalid properties!" | ||||
|             } | ||||
|             allVelopark.push(sectionInfo) | ||||
|  | @ -70,8 +69,8 @@ class VeloParkToGeojson extends Script { | |||
|         const allVelopark: Feature[] = [] | ||||
|         const batchSize = 50 | ||||
|         for (let i = 0; i < allVeloparkRaw.length; i += batchSize) { | ||||
|             await Promise.all(Utils.TimesT(batchSize, j => j).map( | ||||
|                 async j => { | ||||
|             await Promise.all( | ||||
|                 Utils.TimesT(batchSize, (j) => j).map(async (j) => { | ||||
|                     const f = allVeloparkRaw[i + j] | ||||
|                     if (!f) { | ||||
|                         return | ||||
|  | @ -83,9 +82,8 @@ class VeloParkToGeojson extends Script { | |||
|                         console.error("Loading ", f.url, " failed due to", e) | ||||
|                         failed++ | ||||
|                     } | ||||
|                 } | ||||
|             )) | ||||
| 
 | ||||
|                 }) | ||||
|             ) | ||||
|         } | ||||
|         console.log( | ||||
|             "Fetching data done, got ", | ||||
|  | @ -140,7 +138,7 @@ class VeloParkToGeojson extends Script { | |||
|     private static async createDiff(allVelopark: Feature[]) { | ||||
|         const bboxBelgium = new BBox([ | ||||
|             [2.51357303225, 49.5294835476], | ||||
|             [6.15665815596, 51.4750237087] | ||||
|             [6.15665815596, 51.4750237087], | ||||
|         ]) | ||||
| 
 | ||||
|         const alreadyLinkedQuery = new Overpass( | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue