forked from MapComplete/MapComplete
		
	Chore: reformat all files with prettier
This commit is contained in:
		
							parent
							
								
									5757ae5dea
								
							
						
					
					
						commit
						d008dcb54d
					
				
					 214 changed files with 8926 additions and 8196 deletions
				
			
		|  | @ -1,6 +1,6 @@ | |||
| import ScriptUtils from "./ScriptUtils" | ||||
| import { appendFileSync, readFileSync, writeFileSync } from "fs" | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader"; | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader" | ||||
| 
 | ||||
| ScriptUtils.fixUtils() | ||||
| 
 | ||||
|  |  | |||
|  | @ -16,7 +16,7 @@ export default abstract class Script { | |||
|         this.main(args).then((_) => console.log("All done")) | ||||
|     } | ||||
| 
 | ||||
|     public printHelp(){ | ||||
|     public printHelp() { | ||||
|         console.log(this._docs) | ||||
|     } | ||||
| } | ||||
|  |  | |||
|  | @ -1,14 +1,14 @@ | |||
| import Script from "./Script" | ||||
| import fs from "fs" | ||||
| import {Feature} from "geojson" | ||||
| import {GeoOperations} from "../Logic/GeoOperations" | ||||
| import {Utils} from "../Utils" | ||||
| import {OsmObject} from "../Logic/Osm/OsmObject" | ||||
| import {OsmId} from "../Models/OsmFeature" | ||||
| import { Feature } from "geojson" | ||||
| import { GeoOperations } from "../Logic/GeoOperations" | ||||
| import { Utils } from "../Utils" | ||||
| import { OsmObject } from "../Logic/Osm/OsmObject" | ||||
| import { OsmId } from "../Models/OsmFeature" | ||||
| import ScriptUtils from "./ScriptUtils" | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader"; | ||||
| import PhoneValidator from "../UI/InputElement/Validators/PhoneValidator"; | ||||
| import UrlValidator from "../UI/InputElement/Validators/UrlValidator"; | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader" | ||||
| import PhoneValidator from "../UI/InputElement/Validators/PhoneValidator" | ||||
| import UrlValidator from "../UI/InputElement/Validators/UrlValidator" | ||||
| 
 | ||||
| interface PossibleMatch { | ||||
|     /** | ||||
|  | @ -48,15 +48,12 @@ export class Conflate extends Script { | |||
|     } | ||||
| 
 | ||||
|     private static toXml(changedObjects: OsmObject[]): string { | ||||
| 
 | ||||
|         return [ | ||||
|             "<?xml version='1.0' encoding='UTF-8'?>", | ||||
|             "<osm version=\"0.6\" generator='mapcomplete-conflate-script'>", | ||||
|             ...changedObjects.map(obj => | ||||
|                 obj.ChangesetXML(undefined, ' action="modify" ') | ||||
|             ), | ||||
|             "</osm>" | ||||
|         ].join("\n"); | ||||
|             ...changedObjects.map((obj) => obj.ChangesetXML(undefined, ' action="modify" ')), | ||||
|             "</osm>", | ||||
|         ].join("\n") | ||||
|     } | ||||
| 
 | ||||
|     async main(args: string[]): Promise<void> { | ||||
|  | @ -84,10 +81,10 @@ export class Conflate extends Script { | |||
|         } | ||||
| 
 | ||||
|         const external_features: Feature[] = JSON.parse( | ||||
|             fs.readFileSync(external_file_path, {encoding: "utf-8"}) | ||||
|             fs.readFileSync(external_file_path, { encoding: "utf-8" }) | ||||
|         ).features | ||||
|         const osm_features: Feature[] = JSON.parse( | ||||
|             fs.readFileSync(osm_file_path, {encoding: "utf-8"}) | ||||
|             fs.readFileSync(osm_file_path, { encoding: "utf-8" }) | ||||
|         ).features | ||||
| 
 | ||||
|         const bestMatches = await this.calculateMatches(external_features, osm_features, max_range) | ||||
|  | @ -106,9 +103,9 @@ export class Conflate extends Script { | |||
|         ] | ||||
| 
 | ||||
|         const changedObjects: OsmObject[] = [] | ||||
|         for (const {match, replayed} of bestMatches) { | ||||
|             const {external_feature, d, osm_feature} = match | ||||
|             const {possibly_imported, certainly_imported, resting_properties} = replayed | ||||
|         for (const { match, replayed } of bestMatches) { | ||||
|             const { external_feature, d, osm_feature } = match | ||||
|             const { possibly_imported, certainly_imported, resting_properties } = replayed | ||||
|             const status = resting_properties["status"] | ||||
|             delete resting_properties["status"] | ||||
|             if (Object.keys(resting_properties).length === 0) { | ||||
|  | @ -125,7 +122,7 @@ export class Conflate extends Script { | |||
|             ]) | ||||
| 
 | ||||
|             const osmObj = await new OsmObjectDownloader().DownloadObjectAsync(id) | ||||
|             if(osmObj === "deleted"){ | ||||
|             if (osmObj === "deleted") { | ||||
|                 return | ||||
|             } | ||||
|             for (const key in resting_properties) { | ||||
|  | @ -142,13 +139,10 @@ export class Conflate extends Script { | |||
|             match_lengths.map((l) => l.join("\t")).join("\n") | ||||
|         ) | ||||
| 
 | ||||
|         fs.writeFileSync(targetDir + "/changeset.xml", | ||||
|             Conflate.toXml(changedObjects) | ||||
|         ) | ||||
|         fs.writeFileSync(targetDir + "/changeset.xml", Conflate.toXml(changedObjects)) | ||||
| 
 | ||||
| 
 | ||||
|         fs.writeFileSync(targetDir + | ||||
|             "/unmatched.geojson", | ||||
|         fs.writeFileSync( | ||||
|             targetDir + "/unmatched.geojson", | ||||
|             JSON.stringify( | ||||
|                 { | ||||
|                     type: "FeatureCollection", | ||||
|  | @ -166,7 +160,9 @@ export class Conflate extends Script { | |||
|         osmName: string, | ||||
|         osmId: OsmId | ||||
|     ): Promise<{ earliestDateOfImport; latestDateOfImport }> { | ||||
|         const history = await new OsmObjectDownloader().DownloadHistory(osmId).AsPromise((h) => h.length > 0) | ||||
|         const history = await new OsmObjectDownloader() | ||||
|             .DownloadHistory(osmId) | ||||
|             .AsPromise((h) => h.length > 0) | ||||
|         let earliest: Date = undefined | ||||
|         let latest: Date = undefined | ||||
|         for (const historyElement of history) { | ||||
|  | @ -196,7 +192,7 @@ export class Conflate extends Script { | |||
|             this.latestDate = latest | ||||
|         } | ||||
| 
 | ||||
|         return {earliestDateOfImport: earliest, latestDateOfImport: latest} | ||||
|         return { earliestDateOfImport: earliest, latestDateOfImport: latest } | ||||
|     } | ||||
| 
 | ||||
|     private findPossibleMatchesFor( | ||||
|  | @ -232,7 +228,7 @@ export class Conflate extends Script { | |||
|         } | ||||
|         const cachePath = this.historyCacheDir + "/urls/    " + url.replace(/[/\\:]/g, "_") | ||||
|         if (fs.existsSync(cachePath)) { | ||||
|             return JSON.parse(fs.readFileSync(cachePath, {encoding: "utf-8"})) | ||||
|             return JSON.parse(fs.readFileSync(cachePath, { encoding: "utf-8" })) | ||||
|         } | ||||
|         let online: boolean | string = false | ||||
|         try { | ||||
|  | @ -244,7 +240,7 @@ export class Conflate extends Script { | |||
|                 console.log("Maybe trying the homepage will help?") | ||||
|             } | ||||
|         } | ||||
|         fs.writeFileSync(cachePath, JSON.stringify(online, null, "  "), {encoding: "utf-8"}) | ||||
|         fs.writeFileSync(cachePath, JSON.stringify(online, null, "  "), { encoding: "utf-8" }) | ||||
|         return online | ||||
|     } | ||||
| 
 | ||||
|  | @ -255,7 +251,8 @@ export class Conflate extends Script { | |||
|         url = url.replace("http://", "https://") | ||||
|         try { | ||||
|             const result = await ScriptUtils.Download(url, { | ||||
|                 "User-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/114.0" | ||||
|                 "User-agent": | ||||
|                     "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/114.0", | ||||
|             }) | ||||
|             if (result["redirect"]) { | ||||
|                 if (result["redirect"].startsWith("/")) { | ||||
|  | @ -280,10 +277,12 @@ export class Conflate extends Script { | |||
|             fs.mkdirSync(this.historyCacheDir) | ||||
|         } | ||||
|         if (fs.existsSync(cachePath)) { | ||||
|             return JSON.parse(fs.readFileSync(cachePath, {encoding: "utf-8"})) | ||||
|             return JSON.parse(fs.readFileSync(cachePath, { encoding: "utf-8" })) | ||||
|         } | ||||
|         const history = await new OsmObjectDownloader().DownloadHistory(id).AsPromise((l) => l.length > 0) | ||||
|         fs.writeFileSync(cachePath, JSON.stringify(history, null, "  "), {encoding: "utf-8"}) | ||||
|         const history = await new OsmObjectDownloader() | ||||
|             .DownloadHistory(id) | ||||
|             .AsPromise((l) => l.length > 0) | ||||
|         fs.writeFileSync(cachePath, JSON.stringify(history, null, "  "), { encoding: "utf-8" }) | ||||
|         return history | ||||
|     } | ||||
| 
 | ||||
|  | @ -328,7 +327,7 @@ export class Conflate extends Script { | |||
|         let certainly_imported = match.d < 0.0001 | ||||
|         let possibly_imported = false | ||||
| 
 | ||||
|         const resting_properties = {...match.external_feature.properties} | ||||
|         const resting_properties = { ...match.external_feature.properties } | ||||
|         await this.normalize(resting_properties) | ||||
| 
 | ||||
|         for (const historyElement of history) { | ||||
|  |  | |||
|  | @ -1,37 +1,34 @@ | |||
| /** | ||||
|  * Generates a collection of geojson files based on an overpass query for a given theme | ||||
|  */ | ||||
| import {Utils} from "../Utils" | ||||
| import {Overpass} from "../Logic/Osm/Overpass" | ||||
| import {existsSync, readFileSync, writeFileSync} from "fs" | ||||
| import {TagsFilter} from "../Logic/Tags/TagsFilter" | ||||
| import {Or} from "../Logic/Tags/Or" | ||||
| import {AllKnownLayouts} from "../Customizations/AllKnownLayouts" | ||||
| import { Utils } from "../Utils" | ||||
| import { Overpass } from "../Logic/Osm/Overpass" | ||||
| import { existsSync, readFileSync, writeFileSync } from "fs" | ||||
| import { TagsFilter } from "../Logic/Tags/TagsFilter" | ||||
| import { Or } from "../Logic/Tags/Or" | ||||
| import { AllKnownLayouts } from "../Customizations/AllKnownLayouts" | ||||
| import * as OsmToGeoJson from "osmtogeojson" | ||||
| import MetaTagging from "../Logic/MetaTagging" | ||||
| import {UIEventSource} from "../Logic/UIEventSource" | ||||
| import {TileRange, Tiles} from "../Models/TileRange" | ||||
| import { UIEventSource } from "../Logic/UIEventSource" | ||||
| import { TileRange, Tiles } from "../Models/TileRange" | ||||
| import LayoutConfig from "../Models/ThemeConfig/LayoutConfig" | ||||
| import ScriptUtils from "./ScriptUtils" | ||||
| import PerLayerFeatureSourceSplitter from "../Logic/FeatureSource/PerLayerFeatureSourceSplitter" | ||||
| import FilteredLayer from "../Models/FilteredLayer" | ||||
| import StaticFeatureSource from "../Logic/FeatureSource/Sources/StaticFeatureSource" | ||||
| import Constants from "../Models/Constants" | ||||
| import {GeoOperations} from "../Logic/GeoOperations" | ||||
| import SimpleMetaTaggers, {ReferencingWaysMetaTagger} from "../Logic/SimpleMetaTagger" | ||||
| import { GeoOperations } from "../Logic/GeoOperations" | ||||
| import SimpleMetaTaggers, { ReferencingWaysMetaTagger } from "../Logic/SimpleMetaTagger" | ||||
| import FilteringFeatureSource from "../Logic/FeatureSource/Sources/FilteringFeatureSource" | ||||
| import {Feature} from "geojson" | ||||
| import {BBox} from "../Logic/BBox" | ||||
| import {FeatureSource} from "../Logic/FeatureSource/FeatureSource"; | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader"; | ||||
| import FeaturePropertiesStore from "../Logic/FeatureSource/Actors/FeaturePropertiesStore"; | ||||
| import { Feature } from "geojson" | ||||
| import { BBox } from "../Logic/BBox" | ||||
| import { FeatureSource } from "../Logic/FeatureSource/FeatureSource" | ||||
| import OsmObjectDownloader from "../Logic/Osm/OsmObjectDownloader" | ||||
| import FeaturePropertiesStore from "../Logic/FeatureSource/Actors/FeaturePropertiesStore" | ||||
| 
 | ||||
| ScriptUtils.fixUtils() | ||||
| 
 | ||||
| function createOverpassObject( | ||||
|     theme: LayoutConfig, | ||||
|     backend: string | ||||
| ) { | ||||
| function createOverpassObject(theme: LayoutConfig, backend: string) { | ||||
|     let filters: TagsFilter[] = [] | ||||
|     let extraScripts: string[] = [] | ||||
|     for (const layer of theme.layers) { | ||||
|  | @ -59,12 +56,7 @@ function createOverpassObject( | |||
|     if (filters.length + extraScripts.length === 0) { | ||||
|         throw "Nothing to download! The theme doesn't declare anything to download" | ||||
|     } | ||||
|     return new Overpass( | ||||
|         new Or(filters), | ||||
|         extraScripts, | ||||
|         backend, | ||||
|         new UIEventSource<number>(60), | ||||
|     ) | ||||
|     return new Overpass(new Or(filters), extraScripts, backend, new UIEventSource<number>(60)) | ||||
| } | ||||
| 
 | ||||
| function rawJsonName(targetDir: string, x: number, y: number, z: number): string { | ||||
|  | @ -79,7 +71,7 @@ function geoJsonName(targetDir: string, x: number, y: number, z: number): string | |||
| async function downloadRaw( | ||||
|     targetdir: string, | ||||
|     r: TileRange, | ||||
|     theme: LayoutConfig, | ||||
|     theme: LayoutConfig | ||||
| ): Promise<{ failed: number; skipped: number }> { | ||||
|     let downloaded = 0 | ||||
|     let failed = 0 | ||||
|  | @ -127,14 +119,14 @@ async function downloadRaw( | |||
|             ) | ||||
|             const url = overpass.buildQuery( | ||||
|                 "[bbox:" + | ||||
|                 bounds.south + | ||||
|                 "," + | ||||
|                 bounds.west + | ||||
|                 "," + | ||||
|                 bounds.north + | ||||
|                 "," + | ||||
|                 bounds.east + | ||||
|                 "]" | ||||
|                     bounds.south + | ||||
|                     "," + | ||||
|                     bounds.west + | ||||
|                     "," + | ||||
|                     bounds.north + | ||||
|                     "," + | ||||
|                     bounds.east + | ||||
|                     "]" | ||||
|             ) | ||||
| 
 | ||||
|             try { | ||||
|  | @ -164,7 +156,7 @@ async function downloadRaw( | |||
|         } | ||||
|     } | ||||
| 
 | ||||
|     return {failed: failed, skipped: skipped} | ||||
|     return { failed: failed, skipped: skipped } | ||||
| } | ||||
| 
 | ||||
| /* | ||||
|  | @ -174,7 +166,7 @@ async function downloadRaw( | |||
| async function downloadExtraData(theme: LayoutConfig) /* : any[] */ { | ||||
|     const allFeatures: any[] = [] | ||||
|     for (const layer of theme.layers) { | ||||
|         if(!layer.source?.geojsonSource){ | ||||
|         if (!layer.source?.geojsonSource) { | ||||
|             continue | ||||
|         } | ||||
|         const source = layer.source.geojsonSource | ||||
|  | @ -182,7 +174,7 @@ async function downloadExtraData(theme: LayoutConfig) /* : any[] */ { | |||
|             // Cached layers are not considered here
 | ||||
|             continue | ||||
|         } | ||||
|         if(source.startsWith("https://api.openstreetmap.org/api/0.6/notes.json")){ | ||||
|         if (source.startsWith("https://api.openstreetmap.org/api/0.6/notes.json")) { | ||||
|             // We ignore map notes
 | ||||
|             continue | ||||
|         } | ||||
|  | @ -211,7 +203,7 @@ function loadAllTiles( | |||
|             } | ||||
| 
 | ||||
|             // We read the raw OSM-file and convert it to a geojson
 | ||||
|             const rawOsm = JSON.parse(readFileSync(filename, {encoding: "utf8"})) | ||||
|             const rawOsm = JSON.parse(readFileSync(filename, { encoding: "utf8" })) | ||||
| 
 | ||||
|             // Create and save the geojson file - which is the main chunk of the data
 | ||||
|             const geojson = OsmToGeoJson.default(rawOsm) | ||||
|  | @ -254,80 +246,76 @@ async function sliceToTiles( | |||
|         return indexedFeatures.get(id) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     const flayers: FilteredLayer[] = theme.layers.map((l) => new FilteredLayer(l)) | ||||
|     const perLayer = new PerLayerFeatureSourceSplitter( | ||||
|         flayers, | ||||
|         allFeatures, | ||||
|     ) | ||||
|     const perLayer = new PerLayerFeatureSourceSplitter(flayers, allFeatures) | ||||
|     for (const [layerId, source] of perLayer.perLayer) { | ||||
|             const layer = flayers.find(flayer => flayer.layerDef.id === layerId).layerDef | ||||
|             const targetZoomLevel = layer.source.geojsonZoomLevel ?? targetzoomLevel | ||||
|         const layer = flayers.find((flayer) => flayer.layerDef.id === layerId).layerDef | ||||
|         const targetZoomLevel = layer.source.geojsonZoomLevel ?? targetzoomLevel | ||||
| 
 | ||||
|             if (layer.source.geojsonSource && layer.source.isOsmCacheLayer !== true) { | ||||
|                 console.log("Skipping layer ", layerId, ": not a caching layer") | ||||
|                 skippedLayers.add(layer.id) | ||||
|                 continue | ||||
|             } | ||||
|             const flayer: FilteredLayer = new FilteredLayer(layer) | ||||
|             console.log( | ||||
|                 "Handling layer ", | ||||
|                 layerId, | ||||
|                 "which has", | ||||
|                 source.features.data.length, | ||||
|                 "features" | ||||
|             ) | ||||
|             if (source.features.data.length === 0) { | ||||
|                 continue | ||||
|             } | ||||
|             const featureProperties: FeaturePropertiesStore = new FeaturePropertiesStore(source) | ||||
|         if (layer.source.geojsonSource && layer.source.isOsmCacheLayer !== true) { | ||||
|             console.log("Skipping layer ", layerId, ": not a caching layer") | ||||
|             skippedLayers.add(layer.id) | ||||
|             continue | ||||
|         } | ||||
|         const flayer: FilteredLayer = new FilteredLayer(layer) | ||||
|         console.log( | ||||
|             "Handling layer ", | ||||
|             layerId, | ||||
|             "which has", | ||||
|             source.features.data.length, | ||||
|             "features" | ||||
|         ) | ||||
|         if (source.features.data.length === 0) { | ||||
|             continue | ||||
|         } | ||||
|         const featureProperties: FeaturePropertiesStore = new FeaturePropertiesStore(source) | ||||
| 
 | ||||
|             MetaTagging.addMetatags( | ||||
|                 source.features.data, | ||||
|                 { | ||||
|                     getFeaturesWithin: (_) => { | ||||
|                         return <any>[allFeatures.features.data] | ||||
|                     }, | ||||
|                     getFeatureById: getFeatureById, | ||||
|         MetaTagging.addMetatags( | ||||
|             source.features.data, | ||||
|             { | ||||
|                 getFeaturesWithin: (_) => { | ||||
|                     return <any>[allFeatures.features.data] | ||||
|                 }, | ||||
|                 layer, | ||||
|                 theme, | ||||
|                 osmObjectDownloader, | ||||
|                 featureProperties, | ||||
|                 { | ||||
|                     includeDates: false, | ||||
|                     includeNonDates: true, | ||||
|                     evaluateStrict: true, | ||||
|                 } | ||||
|             ) | ||||
| 
 | ||||
|             while (SimpleMetaTaggers.country.runningTasks.size > 0) { | ||||
|                 console.log( | ||||
|                     "Still waiting for ", | ||||
|                     SimpleMetaTaggers.country.runningTasks.size, | ||||
|                     " features which don't have a country yet" | ||||
|                 ) | ||||
|                 await ScriptUtils.sleep(250) | ||||
|                 getFeatureById: getFeatureById, | ||||
|             }, | ||||
|             layer, | ||||
|             theme, | ||||
|             osmObjectDownloader, | ||||
|             featureProperties, | ||||
|             { | ||||
|                 includeDates: false, | ||||
|                 includeNonDates: true, | ||||
|                 evaluateStrict: true, | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
|             const createdTiles = [] | ||||
|             // At this point, we have all the features of the entire area.
 | ||||
|             // However, we want to export them per tile of a fixed size, so we use a dynamicTileSOurce to split it up
 | ||||
|             const features = source.features.data | ||||
|             const perBbox = GeoOperations.spreadIntoBboxes(features, targetZoomLevel) | ||||
|         while (SimpleMetaTaggers.country.runningTasks.size > 0) { | ||||
|             console.log( | ||||
|                 "Still waiting for ", | ||||
|                 SimpleMetaTaggers.country.runningTasks.size, | ||||
|                 " features which don't have a country yet" | ||||
|             ) | ||||
|             await ScriptUtils.sleep(250) | ||||
|         } | ||||
| 
 | ||||
|             for (let [tileIndex, features] of perBbox) { | ||||
|                 const bbox = BBox.fromTileIndex(tileIndex).asGeoJson({}) | ||||
|                 console.log("Got tile:", tileIndex, layer.id) | ||||
|                 if (features.length === 0) { | ||||
|                     continue | ||||
|                 } | ||||
|                 const filteredTile = new FilteringFeatureSource( | ||||
|                     flayer, | ||||
|                     new StaticFeatureSource(features) | ||||
|                 ) | ||||
|                 console.log( | ||||
|                     "Tile " + | ||||
|         const createdTiles = [] | ||||
|         // At this point, we have all the features of the entire area.
 | ||||
|         // However, we want to export them per tile of a fixed size, so we use a dynamicTileSOurce to split it up
 | ||||
|         const features = source.features.data | ||||
|         const perBbox = GeoOperations.spreadIntoBboxes(features, targetZoomLevel) | ||||
| 
 | ||||
|         for (let [tileIndex, features] of perBbox) { | ||||
|             const bbox = BBox.fromTileIndex(tileIndex).asGeoJson({}) | ||||
|             console.log("Got tile:", tileIndex, layer.id) | ||||
|             if (features.length === 0) { | ||||
|                 continue | ||||
|             } | ||||
|             const filteredTile = new FilteringFeatureSource( | ||||
|                 flayer, | ||||
|                 new StaticFeatureSource(features) | ||||
|             ) | ||||
|             console.log( | ||||
|                 "Tile " + | ||||
|                     layer.id + | ||||
|                     "." + | ||||
|                     tileIndex + | ||||
|  | @ -336,128 +324,121 @@ async function sliceToTiles( | |||
|                     " features after filtering (" + | ||||
|                     features.length + | ||||
|                     ") features before" | ||||
|                 ) | ||||
|                 if (filteredTile.features.data.length === 0) { | ||||
|                     continue | ||||
|                 } | ||||
|             ) | ||||
|             if (filteredTile.features.data.length === 0) { | ||||
|                 continue | ||||
|             } | ||||
| 
 | ||||
|                 let strictlyCalculated = 0 | ||||
|                 let featureCount = 0 | ||||
|             let strictlyCalculated = 0 | ||||
|             let featureCount = 0 | ||||
| 
 | ||||
|                 for (const feature of features) { | ||||
|                     // Some cleanup
 | ||||
|             for (const feature of features) { | ||||
|                 // Some cleanup
 | ||||
| 
 | ||||
|                     if (layer.calculatedTags !== undefined) { | ||||
|                         // Evaluate all the calculated tags strictly
 | ||||
|                         const calculatedTagKeys = layer.calculatedTags.map( | ||||
|                             (ct) => ct[0] | ||||
|                         ) | ||||
|                         featureCount++ | ||||
|                         const props = feature.properties | ||||
|                         for (const calculatedTagKey of calculatedTagKeys) { | ||||
|                             const strict = props[calculatedTagKey] | ||||
|                 if (layer.calculatedTags !== undefined) { | ||||
|                     // Evaluate all the calculated tags strictly
 | ||||
|                     const calculatedTagKeys = layer.calculatedTags.map((ct) => ct[0]) | ||||
|                     featureCount++ | ||||
|                     const props = feature.properties | ||||
|                     for (const calculatedTagKey of calculatedTagKeys) { | ||||
|                         const strict = props[calculatedTagKey] | ||||
| 
 | ||||
|                             if (props.hasOwnProperty(calculatedTagKey)) { | ||||
|                                 delete props[calculatedTagKey] | ||||
|                             } | ||||
|                         if (props.hasOwnProperty(calculatedTagKey)) { | ||||
|                             delete props[calculatedTagKey] | ||||
|                         } | ||||
| 
 | ||||
|                             props[calculatedTagKey] = strict | ||||
|                             strictlyCalculated++ | ||||
|                             if (strictlyCalculated % 100 === 0) { | ||||
|                                 console.log( | ||||
|                                     "Strictly calculated ", | ||||
|                                     strictlyCalculated, | ||||
|                                     "values for tile", | ||||
|                                     tileIndex, | ||||
|                                     ": now at ", | ||||
|                                     featureCount, | ||||
|                                     "/", | ||||
|                                     filteredTile.features.data.length, | ||||
|                                     "examle value: ", | ||||
|                                     strict | ||||
|                                 ) | ||||
|                             } | ||||
|                         props[calculatedTagKey] = strict | ||||
|                         strictlyCalculated++ | ||||
|                         if (strictlyCalculated % 100 === 0) { | ||||
|                             console.log( | ||||
|                                 "Strictly calculated ", | ||||
|                                 strictlyCalculated, | ||||
|                                 "values for tile", | ||||
|                                 tileIndex, | ||||
|                                 ": now at ", | ||||
|                                 featureCount, | ||||
|                                 "/", | ||||
|                                 filteredTile.features.data.length, | ||||
|                                 "examle value: ", | ||||
|                                 strict | ||||
|                             ) | ||||
|                         } | ||||
|                     } | ||||
|                     delete feature["bbox"] | ||||
|                 } | ||||
| 
 | ||||
|                 if (clip) { | ||||
|                     console.log("Clipping features") | ||||
|                     features = [].concat( | ||||
|                         ...features.map((f: Feature) => GeoOperations.clipWith(<any>f, bbox)) | ||||
|                     ) | ||||
|                 } | ||||
|                 // Lets save this tile!
 | ||||
|                 const [z, x, y] = Tiles.tile_from_index(tileIndex) | ||||
|                 // console.log("Writing tile ", z, x, y, layerId)
 | ||||
|                 const targetPath = geoJsonName(targetdir + "_" + layerId, x, y, z) | ||||
|                 createdTiles.push(tileIndex) | ||||
|                 // This is the geojson file containing all features for this tile
 | ||||
|                 writeFileSync( | ||||
|                     targetPath, | ||||
|                     JSON.stringify( | ||||
|                         { | ||||
|                             type: "FeatureCollection", | ||||
|                             features, | ||||
|                         }, | ||||
|                         null, | ||||
|                         " " | ||||
|                     ) | ||||
|                 ) | ||||
|                 console.log("Written tile", targetPath, "with", filteredTile.features.data.length) | ||||
| 
 | ||||
|                 delete feature["bbox"] | ||||
|             } | ||||
| 
 | ||||
| 
 | ||||
|             // All the tiles are written at this point
 | ||||
|             // Only thing left to do is to create the index
 | ||||
|             const path = targetdir + "_" + layerId + "_" + targetZoomLevel + "_overview.json" | ||||
|             const perX = {} | ||||
|             createdTiles | ||||
|                 .map((i) => Tiles.tile_from_index(i)) | ||||
|                 .forEach(([z, x, y]) => { | ||||
|                     const key = "" + x | ||||
|                     if (perX[key] === undefined) { | ||||
|                         perX[key] = [] | ||||
|                     } | ||||
|                     perX[key].push(y) | ||||
|                 }) | ||||
|             console.log("Written overview: ", path, "with ", createdTiles.length, "tiles") | ||||
|             writeFileSync(path, JSON.stringify(perX)) | ||||
| 
 | ||||
|             // And, if needed, to create a points-only layer
 | ||||
|             if (pointsOnlyLayers.indexOf(layer.id) >= 0) { | ||||
|                 const filtered = new FilteringFeatureSource( | ||||
|                     flayer, | ||||
|                     source | ||||
|                 ) | ||||
|                 const features = filtered.features.data | ||||
| 
 | ||||
|                 const points = features.map((feature) => GeoOperations.centerpoint(feature)) | ||||
|                 console.log("Writing points overview for ", layerId) | ||||
|                 const targetPath = targetdir + "_" + layerId + "_points.geojson" | ||||
|                 // This is the geojson file containing all features for this tile
 | ||||
|                 writeFileSync( | ||||
|                     targetPath, | ||||
|                     JSON.stringify( | ||||
|                         { | ||||
|                             type: "FeatureCollection", | ||||
|                             features: points, | ||||
|                         }, | ||||
|                         null, | ||||
|                         " " | ||||
|                     ) | ||||
|             if (clip) { | ||||
|                 console.log("Clipping features") | ||||
|                 features = [].concat( | ||||
|                     ...features.map((f: Feature) => GeoOperations.clipWith(<any>f, bbox)) | ||||
|                 ) | ||||
|             } | ||||
|             // Lets save this tile!
 | ||||
|             const [z, x, y] = Tiles.tile_from_index(tileIndex) | ||||
|             // console.log("Writing tile ", z, x, y, layerId)
 | ||||
|             const targetPath = geoJsonName(targetdir + "_" + layerId, x, y, z) | ||||
|             createdTiles.push(tileIndex) | ||||
|             // This is the geojson file containing all features for this tile
 | ||||
|             writeFileSync( | ||||
|                 targetPath, | ||||
|                 JSON.stringify( | ||||
|                     { | ||||
|                         type: "FeatureCollection", | ||||
|                         features, | ||||
|                     }, | ||||
|                     null, | ||||
|                     " " | ||||
|                 ) | ||||
|             ) | ||||
|             console.log("Written tile", targetPath, "with", filteredTile.features.data.length) | ||||
|         } | ||||
| 
 | ||||
|         // All the tiles are written at this point
 | ||||
|         // Only thing left to do is to create the index
 | ||||
|         const path = targetdir + "_" + layerId + "_" + targetZoomLevel + "_overview.json" | ||||
|         const perX = {} | ||||
|         createdTiles | ||||
|             .map((i) => Tiles.tile_from_index(i)) | ||||
|             .forEach(([z, x, y]) => { | ||||
|                 const key = "" + x | ||||
|                 if (perX[key] === undefined) { | ||||
|                     perX[key] = [] | ||||
|                 } | ||||
|                 perX[key].push(y) | ||||
|             }) | ||||
|         console.log("Written overview: ", path, "with ", createdTiles.length, "tiles") | ||||
|         writeFileSync(path, JSON.stringify(perX)) | ||||
| 
 | ||||
|         // And, if needed, to create a points-only layer
 | ||||
|         if (pointsOnlyLayers.indexOf(layer.id) >= 0) { | ||||
|             const filtered = new FilteringFeatureSource(flayer, source) | ||||
|             const features = filtered.features.data | ||||
| 
 | ||||
|             const points = features.map((feature) => GeoOperations.centerpoint(feature)) | ||||
|             console.log("Writing points overview for ", layerId) | ||||
|             const targetPath = targetdir + "_" + layerId + "_points.geojson" | ||||
|             // This is the geojson file containing all features for this tile
 | ||||
|             writeFileSync( | ||||
|                 targetPath, | ||||
|                 JSON.stringify( | ||||
|                     { | ||||
|                         type: "FeatureCollection", | ||||
|                         features: points, | ||||
|                     }, | ||||
|                     null, | ||||
|                     " " | ||||
|                 ) | ||||
|             ) | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     const skipped = Array.from(skippedLayers) | ||||
|     if (skipped.length > 0) { | ||||
|         console.warn( | ||||
|             "Did not save any cache files for layers " + | ||||
|             skipped.join(", ") + | ||||
|             " as these didn't set the flag `isOsmCache` to true" | ||||
|                 skipped.join(", ") + | ||||
|                 " as these didn't set the flag `isOsmCache` to true" | ||||
|         ) | ||||
|     } | ||||
| } | ||||
|  | @ -468,14 +449,18 @@ export async function main(args: string[]) { | |||
|     if (args.length < 6) { | ||||
|         console.error( | ||||
|             "Expected arguments are: theme zoomlevel targetdirectory lat0 lon0 lat1 lon1 [--generate-point-overview layer-name,layer-name,...] [--force-zoom-level z] [--clip]" + | ||||
|             "--force-zoom-level causes non-cached-layers to be donwnloaded\n" + | ||||
|             "--clip will erase parts of the feature falling outside of the bounding box" | ||||
|                 "--force-zoom-level causes non-cached-layers to be donwnloaded\n" + | ||||
|                 "--clip will erase parts of the feature falling outside of the bounding box" | ||||
|         ) | ||||
|         return | ||||
|     } | ||||
|     const themeName = args[0] | ||||
|     const zoomlevel = Number(args[1]) | ||||
|     console.log("Target zoomlevel for the tiles is",zoomlevel,"; this can be overridden by the individual layers") | ||||
|     console.log( | ||||
|         "Target zoomlevel for the tiles is", | ||||
|         zoomlevel, | ||||
|         "; this can be overridden by the individual layers" | ||||
|     ) | ||||
| 
 | ||||
|     const targetdir = args[2] + "/" + themeName | ||||
|     if (!existsSync(args[2])) { | ||||
|  | @ -522,8 +507,11 @@ export async function main(args: string[]) { | |||
|         return | ||||
|     } | ||||
| 
 | ||||
|     theme.layers = theme.layers.filter(l => Constants.priviliged_layers.indexOf(<any> l.id) < 0 && !l.id.startsWith("note_import_")) | ||||
|     console.log("Layers to download:", theme.layers.map(l => l.id).join(", ")) | ||||
|     theme.layers = theme.layers.filter( | ||||
|         (l) => | ||||
|             Constants.priviliged_layers.indexOf(<any>l.id) < 0 && !l.id.startsWith("note_import_") | ||||
|     ) | ||||
|     console.log("Layers to download:", theme.layers.map((l) => l.id).join(", ")) | ||||
| 
 | ||||
|     let generatePointLayersFor = [] | ||||
|     if (args[7] == "--generate-point-overview") { | ||||
|  | @ -553,14 +541,13 @@ export async function main(args: string[]) { | |||
| 
 | ||||
|     let failed = 0 | ||||
|     do { | ||||
|         try{ | ||||
| 
 | ||||
|         const cachingResult = await downloadRaw(targetdir, tileRange, theme) | ||||
|         failed = cachingResult.failed | ||||
|         if (failed > 0) { | ||||
|             await ScriptUtils.sleep(30000) | ||||
|         } | ||||
|         }catch(e){ | ||||
|         try { | ||||
|             const cachingResult = await downloadRaw(targetdir, tileRange, theme) | ||||
|             failed = cachingResult.failed | ||||
|             if (failed > 0) { | ||||
|                 await ScriptUtils.sleep(30000) | ||||
|             } | ||||
|         } catch (e) { | ||||
|             console.error(e) | ||||
|             return | ||||
|         } | ||||
|  |  | |||
|  | @ -356,12 +356,16 @@ class LayerOverviewUtils extends Script { | |||
|             const context = "While building builtin layer " + sharedLayerPath | ||||
|             const fixed = prepLayer.convertStrict(parsed, context) | ||||
| 
 | ||||
|             if(!fixed.source){ | ||||
|                 console.error(sharedLayerPath,"has no source configured:",fixed) | ||||
|                 throw sharedLayerPath+" layer has no source configured" | ||||
|             if (!fixed.source) { | ||||
|                 console.error(sharedLayerPath, "has no source configured:", fixed) | ||||
|                 throw sharedLayerPath + " layer has no source configured" | ||||
|             } | ||||
| 
 | ||||
|             if (typeof fixed.source !== "string" && fixed.source["osmTags"] && fixed.source["osmTags"]["and"] === undefined) { | ||||
|             if ( | ||||
|                 typeof fixed.source !== "string" && | ||||
|                 fixed.source["osmTags"] && | ||||
|                 fixed.source["osmTags"]["and"] === undefined | ||||
|             ) { | ||||
|                 fixed.source["osmTags"] = { and: [fixed.source["osmTags"]] } | ||||
|             } | ||||
| 
 | ||||
|  | @ -426,8 +430,8 @@ class LayerOverviewUtils extends Script { | |||
|         }) | ||||
| 
 | ||||
|         const skippedThemes: string[] = [] | ||||
|         for (let i = 0; i < themeFiles.length; i++){ | ||||
|             const themeInfo = themeFiles[i]; | ||||
|         for (let i = 0; i < themeFiles.length; i++) { | ||||
|             const themeInfo = themeFiles[i] | ||||
|             const themePath = themeInfo.path | ||||
|             let themeFile = themeInfo.parsed | ||||
|             console.log(`Validating ${i}/${themeFiles.length} '${themeInfo.parsed.id}'`) | ||||
|  |  | |||
|  | @ -280,7 +280,11 @@ async function createLandingPage(layout: LayoutConfig, manifest, whiteIcons, alr | |||
|         .replace( | ||||
|             /<!-- DESCRIPTION START -->.*<!-- DESCRIPTION END -->/s, | ||||
|             asLangSpan(layout.shortDescription) | ||||
|         )  .replace(/<!-- IMAGE-START -->.*<!-- IMAGE-END -->/s, "<img class='p-8 h-32 w-32 self-start' src='"+ icon+"' />") | ||||
|         ) | ||||
|         .replace( | ||||
|             /<!-- IMAGE-START -->.*<!-- IMAGE-END -->/s, | ||||
|             "<img class='p-8 h-32 w-32 self-start' src='" + icon + "' />" | ||||
|         ) | ||||
| 
 | ||||
|         .replace( | ||||
|             '<script type="module" src="./index.ts"></script>', | ||||
|  |  | |||
|  | @ -227,10 +227,10 @@ export class GenerateLicenseInfo extends Script { | |||
| 
 | ||||
|             licenses.sort((a, b) => (a.path < b.path ? -1 : 1)) | ||||
|             const path = dir + "/license_info.json" | ||||
|             if(licenses.length === 0){ | ||||
|                 console.log("Removing",path,"as it is empty") | ||||
|             if (licenses.length === 0) { | ||||
|                 console.log("Removing", path, "as it is empty") | ||||
|                 // No need to _actually_ unlik, this is done above
 | ||||
|             }else{ | ||||
|             } else { | ||||
|                 writeFileSync(path, JSON.stringify(licenses, null, 2)) | ||||
|             } | ||||
|         }) | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| import * as fs from "fs" | ||||
| import {existsSync, mkdirSync, readFileSync, writeFileSync} from "fs" | ||||
| import {Utils} from "../Utils" | ||||
| import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs" | ||||
| import { Utils } from "../Utils" | ||||
| import ScriptUtils from "./ScriptUtils" | ||||
| 
 | ||||
| const knownLanguages = ["en", "nl", "de", "fr", "es", "gl", "ca"] | ||||
|  | @ -12,7 +12,7 @@ class TranslationPart { | |||
|         const files = ScriptUtils.readDirRecSync(path, 1).filter((file) => file.endsWith(".json")) | ||||
|         const rootTranslation = new TranslationPart() | ||||
|         for (const file of files) { | ||||
|             const content = JSON.parse(readFileSync(file, {encoding: "utf8"})) | ||||
|             const content = JSON.parse(readFileSync(file, { encoding: "utf8" })) | ||||
|             rootTranslation.addTranslation(file.substr(0, file.length - ".json".length), content) | ||||
|         } | ||||
|         return rootTranslation | ||||
|  | @ -52,10 +52,10 @@ class TranslationPart { | |||
|             if (typeof v != "string") { | ||||
|                 console.error( | ||||
|                     `Non-string object at ${context} in translation while trying to add the translation ` + | ||||
|                     JSON.stringify(v) + | ||||
|                     ` to '` + | ||||
|                     translationsKey + | ||||
|                     "'. The offending object which _should_ be a translation is: ", | ||||
|                         JSON.stringify(v) + | ||||
|                         ` to '` + | ||||
|                         translationsKey + | ||||
|                         "'. The offending object which _should_ be a translation is: ", | ||||
|                     v, | ||||
|                     "\n\nThe current object is (only showing en):", | ||||
|                     this.toJson(), | ||||
|  | @ -94,9 +94,9 @@ class TranslationPart { | |||
|             if (noTranslate !== undefined) { | ||||
|                 console.log( | ||||
|                     "Ignoring some translations for " + | ||||
|                     context + | ||||
|                     ": " + | ||||
|                     dontTranslateKeys.join(", ") | ||||
|                         context + | ||||
|                         ": " + | ||||
|                         dontTranslateKeys.join(", ") | ||||
|                 ) | ||||
|             } | ||||
|         } | ||||
|  | @ -242,14 +242,14 @@ class TranslationPart { | |||
|                 } | ||||
|                 subparts = subparts.map((p) => p.split(/\(.*\)/)[0]) | ||||
|                 for (const subpart of subparts) { | ||||
|                     neededSubparts.add({part: subpart, usedByLanguage: lang}) | ||||
|                     neededSubparts.add({ part: subpart, usedByLanguage: lang }) | ||||
|                 } | ||||
|             } | ||||
|         }) | ||||
| 
 | ||||
|         // Actually check for the needed sub-parts, e.g. that {key} isn't translated into {sleutel}
 | ||||
|         this.contents.forEach((value, key) => { | ||||
|             neededSubparts.forEach(({part, usedByLanguage}) => { | ||||
|             neededSubparts.forEach(({ part, usedByLanguage }) => { | ||||
|                 if (typeof value !== "string") { | ||||
|                     return | ||||
|                 } | ||||
|  | @ -659,8 +659,12 @@ function mergeLayerTranslations() { | |||
|     const layerFiles = ScriptUtils.getLayerFiles() | ||||
|     for (const layerFile of layerFiles) { | ||||
|         mergeLayerTranslation(layerFile.parsed, layerFile.path, loadTranslationFilesFrom("layers")) | ||||
|         const endsWithNewline = readFileSync(layerFile.path, {encoding: "utf8"})?.endsWith("\n") ?? true | ||||
|         writeFileSync(layerFile.path, JSON.stringify(layerFile.parsed, null, "  ") + (endsWithNewline ? "\n" : "")) // layers use 2 spaces
 | ||||
|         const endsWithNewline = | ||||
|             readFileSync(layerFile.path, { encoding: "utf8" })?.endsWith("\n") ?? true | ||||
|         writeFileSync( | ||||
|             layerFile.path, | ||||
|             JSON.stringify(layerFile.parsed, null, "  ") + (endsWithNewline ? "\n" : "") | ||||
|         ) // layers use 2 spaces
 | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -675,8 +679,12 @@ function mergeThemeTranslations() { | |||
| 
 | ||||
|         const allTranslations = new TranslationPart() | ||||
|         allTranslations.recursiveAdd(config, themeFile.path) | ||||
|         const endsWithNewline = readFileSync(themeFile.path, {encoding: "utf8"})?.endsWith("\n") ?? true | ||||
|         writeFileSync(themeFile.path, JSON.stringify(config, null, "  ") + (endsWithNewline ? "\n" : "")) // Themefiles use 2 spaces
 | ||||
|         const endsWithNewline = | ||||
|             readFileSync(themeFile.path, { encoding: "utf8" })?.endsWith("\n") ?? true | ||||
|         writeFileSync( | ||||
|             themeFile.path, | ||||
|             JSON.stringify(config, null, "  ") + (endsWithNewline ? "\n" : "") | ||||
|         ) // Themefiles use 2 spaces
 | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -695,8 +703,11 @@ if (!themeOverwritesWeblate) { | |||
|         questionsPath, | ||||
|         loadTranslationFilesFrom("shared-questions") | ||||
|     ) | ||||
|     const endsWithNewline = readFileSync(questionsPath, {encoding: "utf8"}).endsWith("\n") | ||||
|     writeFileSync(questionsPath, JSON.stringify(questionsParsed, null, "  ") + (endsWithNewline ? "\n" : "")) | ||||
|     const endsWithNewline = readFileSync(questionsPath, { encoding: "utf8" }).endsWith("\n") | ||||
|     writeFileSync( | ||||
|         questionsPath, | ||||
|         JSON.stringify(questionsParsed, null, "  ") + (endsWithNewline ? "\n" : "") | ||||
|     ) | ||||
| } else { | ||||
|     console.log("Ignore weblate") | ||||
| } | ||||
|  | @ -707,13 +718,13 @@ const l2 = generateTranslationsObjectFrom( | |||
|     "themes" | ||||
| ) | ||||
| const l3 = generateTranslationsObjectFrom( | ||||
|     [{path: questionsPath, parsed: questionsParsed}], | ||||
|     [{ path: questionsPath, parsed: questionsParsed }], | ||||
|     "shared-questions" | ||||
| ) | ||||
| 
 | ||||
| const usedLanguages: string[] = Utils.Dedup(l1.concat(l2).concat(l3)).filter((v) => v !== "*") | ||||
| usedLanguages.sort() | ||||
| fs.writeFileSync("./assets/used_languages.json", JSON.stringify({languages: usedLanguages})) | ||||
| fs.writeFileSync("./assets/used_languages.json", JSON.stringify({ languages: usedLanguages })) | ||||
| 
 | ||||
| if (!themeOverwritesWeblate) { | ||||
|     // Generates the core translations
 | ||||
|  |  | |||
|  | @ -1,54 +1,48 @@ | |||
| import fs from "fs" | ||||
| import {OH} from "../../UI/OpeningHours/OpeningHours"; | ||||
| import { OH } from "../../UI/OpeningHours/OpeningHours" | ||||
| 
 | ||||
| const cashpunten = JSON.parse(fs.readFileSync("/home/pietervdvn/Downloads/cash_punten.json", "utf8")).data | ||||
| const cashpunten = JSON.parse( | ||||
|     fs.readFileSync("/home/pietervdvn/Downloads/cash_punten.json", "utf8") | ||||
| ).data | ||||
| 
 | ||||
| const features: any[] = [] | ||||
| const weekdays = [ | ||||
|     "MO", | ||||
|     "TU", | ||||
|     "WE", | ||||
|     "TH", | ||||
|     "FR", | ||||
|     "SA", | ||||
|     "SU" | ||||
| ] | ||||
| const weekdays = ["MO", "TU", "WE", "TH", "FR", "SA", "SU"] | ||||
| for (const atm of cashpunten) { | ||||
|     const properties = { | ||||
|         "amenity": "atm", | ||||
|         amenity: "atm", | ||||
|         "addr:street": atm.adr_street, | ||||
|         "addr:housenumber": atm.adr_street_number, | ||||
|         "phone": <string>atm.phone_number, | ||||
|         "operator": "Batopin", | ||||
|         phone: <string>atm.phone_number, | ||||
|         operator: "Batopin", | ||||
|         network: "CASH", | ||||
|         fee: "no", | ||||
|         "speech_output": "yes", | ||||
|         "brand": "CASH", | ||||
|         speech_output: "yes", | ||||
|         brand: "CASH", | ||||
|         website: "https://batopin.be", | ||||
|         "source": "https://batopin.be", | ||||
|         source: "https://batopin.be", | ||||
|         "brand:wikidata": "Q112875867", | ||||
|         "operator:wikidata": "Q97142699", | ||||
|         "currency:EUR": "yes" | ||||
|         "currency:EUR": "yes", | ||||
|     } | ||||
|     features.push({ | ||||
|         geometry: {type: "Point", coordinates: [atm.adr_longitude, atm.adr_latitude]}, | ||||
|         geometry: { type: "Point", coordinates: [atm.adr_longitude, atm.adr_latitude] }, | ||||
|         properties: { | ||||
|             tags: properties | ||||
|         } | ||||
|             tags: properties, | ||||
|         }, | ||||
|     }) | ||||
| 
 | ||||
|     switch (atm.accessibility) { | ||||
|         case "Green": | ||||
|             properties["wheelchair"] = "yes"; | ||||
|             break; | ||||
|             properties["wheelchair"] = "yes" | ||||
|             break | ||||
|         case "Orange": | ||||
|             properties["wheelchair"] = "limited"; | ||||
|             break; | ||||
|             properties["wheelchair"] = "limited" | ||||
|             break | ||||
|         case "Red": | ||||
|             properties["wheelchair"] = "no"; | ||||
|             break; | ||||
|             properties["wheelchair"] = "no" | ||||
|             break | ||||
|         default: | ||||
|             break; | ||||
|             break | ||||
|     } | ||||
|     delete atm.accessibility | ||||
| 
 | ||||
|  | @ -57,18 +51,20 @@ for (const atm of cashpunten) { | |||
|         delete atm.deposit_cash | ||||
|     } | ||||
| 
 | ||||
|     if (!weekdays.some(wd => atm.regular_hours[wd] !== "00:00-00:00")) { | ||||
|     if (!weekdays.some((wd) => atm.regular_hours[wd] !== "00:00-00:00")) { | ||||
|         properties["opening_hours"] = "24/7" | ||||
|         delete atm.regular_hours | ||||
|     } else { | ||||
|         const rules = weekdays.filter(wd => atm.regular_hours[wd] !== undefined).map(wd => wd[0] + wd.toLowerCase()[1] + " " + atm.regular_hours[wd]).join(";") | ||||
|         const rules = weekdays | ||||
|             .filter((wd) => atm.regular_hours[wd] !== undefined) | ||||
|             .map((wd) => wd[0] + wd.toLowerCase()[1] + " " + atm.regular_hours[wd]) | ||||
|             .join(";") | ||||
|         properties["opening_hours"] = OH.ToString(OH.MergeTimes(OH.Parse(rules))) | ||||
|         delete atm.regular_hours | ||||
|     } | ||||
| 
 | ||||
|     delete atm.special_hours // Only one data point has this
 | ||||
| 
 | ||||
| 
 | ||||
|     delete atm.location_language | ||||
|     delete atm.location_name | ||||
|     delete atm.shop_code | ||||
|  | @ -88,5 +84,4 @@ for (const atm of cashpunten) { | |||
|     break | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| fs.writeFileSync("atms.geojson", JSON.stringify({type: "FeatureCollection", features})) | ||||
| fs.writeFileSync("atms.geojson", JSON.stringify({ type: "FeatureCollection", features })) | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue