Merge branch 'develop' into RobinLinde-patch-10

This commit is contained in:
Robin van der Linde 2023-07-17 22:34:16 +02:00
commit ff8442f90b
Signed by untrusted user: Robin-van-der-Linde
GPG key ID: 53956B3252478F0D
654 changed files with 17365 additions and 15965 deletions

View file

@ -0,0 +1,62 @@
import known_themes from "../assets/generated/known_themes.json"
import LayoutConfig from "../Models/ThemeConfig/LayoutConfig"
import { LayoutConfigJson } from "../Models/ThemeConfig/Json/LayoutConfigJson"
/**
* Somewhat of a dictionary, which lazily parses needed themes
*/
export class AllKnownLayoutsLazy {
private readonly dict: Map<string, { data: LayoutConfig } | { func: () => LayoutConfig }> =
new Map()
constructor() {
for (const layoutConfigJson of known_themes["themes"]) {
this.dict.set(layoutConfigJson.id, {
func: () => {
const layout = new LayoutConfig(<LayoutConfigJson>layoutConfigJson, true)
for (let i = 0; i < layout.layers.length; i++) {
let layer = layout.layers[i]
if (typeof layer === "string") {
throw "Layer " + layer + " was not expanded in " + layout.id
}
}
return layout
},
})
}
}
public get(key: string): LayoutConfig {
const thunk = this.dict.get(key)
if (thunk === undefined) {
return undefined
}
if (thunk["data"]) {
return thunk["data"]
}
const layout = thunk["func"]()
this.dict.set(key, { data: layout })
return layout
}
public keys() {
return this.dict.keys()
}
public values() {
return Array.from(this.keys()).map((k) => this.get(k))
}
}
export class AllKnownLayouts {
public static allKnownLayouts: AllKnownLayoutsLazy = new AllKnownLayoutsLazy()
static AllPublicLayers() {
const layers = [].concat(
...this.allKnownLayouts
.values()
.filter((layout) => !layout.hideFromOverview)
.map((layout) => layout.layers)
)
return layers
}
}

View file

@ -0,0 +1,68 @@
import LayerConfig from "../Models/ThemeConfig/LayerConfig"
import { Utils } from "../Utils"
import known_themes from "../assets/generated/known_layers.json"
import { LayerConfigJson } from "../Models/ThemeConfig/Json/LayerConfigJson"
import { AllKnownLayouts } from "./AllKnownLayouts"
export class AllSharedLayers {
public static sharedLayers: Map<string, LayerConfig> = AllSharedLayers.getSharedLayers()
public static getSharedLayersConfigs(): Map<string, LayerConfigJson> {
const sharedLayers = new Map<string, LayerConfigJson>()
for (const layer of known_themes.layers) {
// @ts-ignore
sharedLayers.set(layer.id, layer)
}
return sharedLayers
}
private static getSharedLayers(): Map<string, LayerConfig> {
const sharedLayers = new Map<string, LayerConfig>()
for (const layer of known_themes.layers) {
try {
// @ts-ignore
const parsed = new LayerConfig(layer, "shared_layers")
sharedLayers.set(layer.id, parsed)
} catch (e) {
if (!Utils.runningFromConsole) {
console.error(
"CRITICAL: Could not parse a layer configuration!",
layer.id,
" due to",
e
)
}
}
}
return sharedLayers
}
public static AllPublicLayers(options?: {
includeInlineLayers: true | boolean
}): LayerConfig[] {
const allLayers: LayerConfig[] = []
const seendIds = new Set<string>()
AllSharedLayers.sharedLayers.forEach((layer, key) => {
seendIds.add(key)
allLayers.push(layer)
})
if (options?.includeInlineLayers ?? true) {
const publicLayouts = Array.from(AllKnownLayouts.allKnownLayouts.values()).filter(
(l) => !l.hideFromOverview
)
for (const layout of publicLayouts) {
if (layout.hideFromOverview) {
continue
}
for (const layer of layout.layers) {
if (seendIds.has(layer.id)) {
continue
}
seendIds.add(layer.id)
allLayers.push(layer)
}
}
}
return allLayers
}
}

View file

@ -0,0 +1,71 @@
import { Utils } from "../Utils"
import TagRenderingConfig from "../Models/ThemeConfig/TagRenderingConfig"
import { TagRenderingConfigJson } from "../Models/ThemeConfig/Json/TagRenderingConfigJson"
import BaseUIElement from "../UI/BaseUIElement"
import Combine from "../UI/Base/Combine"
import Title from "../UI/Base/Title"
import { FixedUiElement } from "../UI/Base/FixedUiElement"
import List from "../UI/Base/List"
export default class SharedTagRenderings {
public static SharedTagRendering: Map<string, TagRenderingConfig> =
SharedTagRenderings.generatedSharedFields()
public static SharedTagRenderingJson: Map<string, TagRenderingConfigJson> =
SharedTagRenderings.generatedSharedFieldsJsons()
private static generatedSharedFields(): Map<string, TagRenderingConfig> {
const configJsons = SharedTagRenderings.generatedSharedFieldsJsons()
const d = new Map<string, TagRenderingConfig>()
for (const key of Array.from(configJsons.keys())) {
try {
d.set(
key,
new TagRenderingConfig(configJsons.get(key), `SharedTagRenderings.${key}`)
)
} catch (e) {
if (!Utils.runningFromConsole) {
console.error(
"BUG: could not parse",
key,
" from questions.json - this error happened during the build step of the SharedTagRenderings",
e
)
}
}
}
return d
}
private static generatedSharedFieldsJsons(): Map<string, TagRenderingConfigJson> {
const dict = new Map<string, TagRenderingConfigJson>()
// TODO FIXME
return dict
}
public static HelpText(): BaseUIElement {
return new Combine([
new Combine([
new Title("Builtin questions", 1),
"The following items can be easily reused in your layers",
]).SetClass("flex flex-col"),
...Array.from(SharedTagRenderings.SharedTagRendering.keys()).map((key) => {
const tr = SharedTagRenderings.SharedTagRendering.get(key)
let mappings: BaseUIElement = undefined
if (tr.mappings?.length > 0) {
mappings = new List(tr.mappings.map((m) => m.then.textFor("en")))
}
return new Combine([
new Title(key),
tr.render?.textFor("en"),
tr.question?.textFor("en") ??
new FixedUiElement("Read-only tagrendering").SetClass("font-bold"),
mappings,
]).SetClass("flex flex-col")
}),
]).SetClass("flex flex-col")
}
}

View file

@ -0,0 +1,45 @@
import { Store, UIEventSource } from "../UIEventSource"
import { Utils } from "../../Utils"
import { RasterLayerPolygon, RasterLayerUtils } from "../../Models/RasterLayers"
/**
* When a user pans around on the map, they might pan out of the range of the current background raster layer.
* This actor will then quickly select a (best) raster layer of the same category which is available
*/
export default class BackgroundLayerResetter {
constructor(
currentBackgroundLayer: UIEventSource<RasterLayerPolygon>,
availableLayers: Store<RasterLayerPolygon[]>
) {
if (Utils.runningFromConsole) {
return
}
// Change the baseLayer back to OSM if we go out of the current range of the layer
availableLayers.addCallbackAndRunD((availableLayers) => {
// We only check on move/on change of the availableLayers
const currentBgPolygon: RasterLayerPolygon | undefined = currentBackgroundLayer.data
if (currentBackgroundLayer === undefined) {
return
}
if (availableLayers.findIndex((available) => currentBgPolygon == available) >= 0) {
// Still available!
return
}
console.log("Current layer properties:", currentBgPolygon)
// Oops, we panned out of range for this layer!
// What is the 'best' map of the same category which is available?
const availableInSameCat = RasterLayerUtils.SelectBestLayerAccordingTo(
availableLayers,
currentBgPolygon?.properties?.category
)
if (!availableInSameCat) {
return
}
console.log("Selecting a different layer:", availableInSameCat.properties.id)
currentBackgroundLayer.setData(availableInSameCat)
})
}
}

View file

@ -0,0 +1,37 @@
import { Changes } from "../Osm/Changes"
import FeaturePropertiesStore from "../FeatureSource/Actors/FeaturePropertiesStore"
/**
* Applies tag changes onto the featureStore
*/
export default class ChangeToElementsActor {
constructor(changes: Changes, allElements: FeaturePropertiesStore) {
changes.pendingChanges.addCallbackAndRun((changes) => {
for (const change of changes) {
const id = change.type + "/" + change.id
if (!allElements.has(id)) {
continue // Ignored as the geometryFixer will introduce this
}
const src = allElements.getStore(id)
let changed = false
for (const kv of change.tags ?? []) {
// Apply tag changes and ping the consumers
const k = kv.k
let v = kv.v
if (v === "") {
v = undefined
}
if (src.data[k] === v) {
continue
}
changed = true
src.data[k] = v
}
if (changed) {
src.ping()
}
}
})
}
}

View file

@ -0,0 +1,264 @@
import { QueryParameters } from "../Web/QueryParameters"
import { BBox } from "../BBox"
import Constants from "../../Models/Constants"
import { GeoLocationState } from "../State/GeoLocationState"
import { UIEventSource } from "../UIEventSource"
import { Feature, LineString, Point } from "geojson"
import { FeatureSource, WritableFeatureSource } from "../FeatureSource/FeatureSource"
import { LocalStorageSource } from "../Web/LocalStorageSource"
import { GeoOperations } from "../GeoOperations"
import { OsmTags } from "../../Models/OsmFeature"
import StaticFeatureSource from "../FeatureSource/Sources/StaticFeatureSource"
import { MapProperties } from "../../Models/MapProperties"
/**
* The geolocation-handler takes a map-location and a geolocation state.
* It'll move the map as appropriate given the state of the geolocation-API
* It will also copy the geolocation into the appropriate FeatureSource to display on the map
*/
export default class GeoLocationHandler {
public readonly geolocationState: GeoLocationState
/**
* The location as delivered by the GPS, wrapped as FeatureSource
*/
public currentUserLocation: FeatureSource
/**
* All previously visited points (as 'Point'-objects), with their metadata
*/
public historicalUserLocations: WritableFeatureSource<Feature<Point>>
/**
* A featureSource containing a single linestring which has the GPS-history of the user.
* However, metadata (such as when every single point was visited) is lost here (but is kept in `historicalUserLocations`.
* Note that this featureSource is _derived_ from 'historicalUserLocations'
*/
public readonly historicalUserLocationsTrack: FeatureSource
/**
* The last moment that the map has moved
*/
public readonly mapHasMoved: UIEventSource<Date | undefined> = new UIEventSource<
Date | undefined
>(undefined)
private readonly selectedElement: UIEventSource<any>
private readonly mapProperties?: MapProperties
private readonly gpsLocationHistoryRetentionTime?: UIEventSource<number>
constructor(
geolocationState: GeoLocationState,
selectedElement: UIEventSource<any>,
mapProperties?: MapProperties,
gpsLocationHistoryRetentionTime?: UIEventSource<number>
) {
this.geolocationState = geolocationState
const mapLocation = mapProperties.location
this.selectedElement = selectedElement
this.mapProperties = mapProperties
this.gpsLocationHistoryRetentionTime = gpsLocationHistoryRetentionTime
// Did an interaction move the map?
let self = this
let initTime = new Date()
mapLocation.addCallbackD((_) => {
if (new Date().getTime() - initTime.getTime() < 250) {
return
}
self.mapHasMoved.setData(new Date())
return true // Unsubscribe
})
const latLonGivenViaUrl =
QueryParameters.wasInitialized("lat") || QueryParameters.wasInitialized("lon")
if (latLonGivenViaUrl) {
// The URL counts as a 'user interaction'
this.mapHasMoved.setData(new Date())
}
this.geolocationState.currentGPSLocation.addCallbackAndRunD((_) => {
const timeSinceLastRequest =
(new Date().getTime() - geolocationState.requestMoment.data?.getTime() ?? 0) / 1000
if (!this.mapHasMoved.data) {
// The map hasn't moved yet; we received our first coordinates, so let's move there!
self.MoveMapToCurrentLocation()
}
if (
timeSinceLastRequest < Constants.zoomToLocationTimeout &&
(this.mapHasMoved.data === undefined ||
this.mapHasMoved.data.getTime() <
geolocationState.requestMoment.data?.getTime())
) {
// still within request time and the map hasn't moved since requesting to jump to the current location
self.MoveMapToCurrentLocation()
}
if (!this.geolocationState.allowMoving.data) {
// Jup, the map is locked to the bound location: move automatically
self.MoveMapToCurrentLocation()
return
}
})
geolocationState.allowMoving.syncWith(mapProperties.allowMoving, true)
this.CopyGeolocationIntoMapstate()
this.historicalUserLocationsTrack = this.initUserLocationTrail()
}
/**
* Move the map to the GPS-location, except:
* - If there is a selected element
* - The location is out of the locked bound
* - The GPS-location iss NULL-island
* @constructor
*/
public MoveMapToCurrentLocation() {
const newLocation = this.geolocationState.currentGPSLocation.data
const mapLocation = this.mapProperties.location
// We got a new location.
// Do we move the map to it?
if (this.selectedElement.data !== undefined) {
// Nope, there is something selected, so we don't move to the current GPS-location
return
}
if (newLocation.latitude === 0 && newLocation.longitude === 0) {
console.debug("Not moving to GPS-location: it is null island")
return
}
// We check that the GPS location is not out of bounds
const bounds = this.mapProperties.maxbounds.data
if (bounds !== undefined) {
// B is an array with our lock-location
const inRange = new BBox(bounds).contains([newLocation.longitude, newLocation.latitude])
if (!inRange) {
return
}
}
console.trace("Moving the map to the GPS-location")
mapLocation.setData({
lon: newLocation.longitude,
lat: newLocation.latitude,
})
const zoom = this.mapProperties.zoom
zoom.setData(Math.min(Math.max(zoom.data, 14), 18))
this.mapHasMoved.setData(new Date())
this.geolocationState.requestMoment.setData(undefined)
}
private CopyGeolocationIntoMapstate() {
const features: UIEventSource<Feature[]> = new UIEventSource<Feature[]>([])
this.currentUserLocation = new StaticFeatureSource(features)
const keysToCopy = ["speed", "accuracy", "altitude", "altitudeAccuracy", "heading"]
let i = 0
this.geolocationState.currentGPSLocation.addCallbackAndRun((location) => {
if (location === undefined) {
return
}
const properties = {
id: "gps-" + i,
"user:location": "yes",
date: new Date().toISOString(),
}
i++
for (const k in keysToCopy) {
// For some weird reason, the 'Object.keys' method doesn't work for the 'location: GeolocationCoordinates'-object and will thus not copy all the properties when using {...location}
// As such, they are copied here
if (location[k]) {
properties[k] = location[k]
}
}
const feature = <Feature>{
type: "Feature",
properties,
geometry: {
type: "Point",
coordinates: [location.longitude, location.latitude],
},
}
features.setData([feature])
})
}
private initUserLocationTrail() {
const features = LocalStorageSource.GetParsed<Feature[]>("gps_location_history", [])
const now = new Date().getTime()
features.data = features.data.filter((ff) => {
if (ff.properties === undefined) {
return false
}
const point_time = new Date(ff.properties["date"])
return (
now - point_time.getTime() <
1000 * (this.gpsLocationHistoryRetentionTime?.data ?? 24 * 60 * 60 * 1000)
)
})
features.ping()
let i = 0
this.currentUserLocation?.features?.addCallbackAndRunD(([location]: [Feature<Point>]) => {
if (location === undefined) {
return
}
const previousLocation = <Feature<Point>>features.data[features.data.length - 1]
if (previousLocation !== undefined) {
const previousLocationFreshness = new Date(previousLocation.properties.date)
const d = GeoOperations.distanceBetween(
<[number, number]>previousLocation.geometry.coordinates,
<[number, number]>location.geometry.coordinates
)
let timeDiff = Number.MAX_VALUE // in seconds
const olderLocation = features.data[features.data.length - 2]
if (olderLocation !== undefined) {
const olderLocationFreshness = new Date(olderLocation.properties.date)
timeDiff =
(new Date(previousLocationFreshness).getTime() -
new Date(olderLocationFreshness).getTime()) /
1000
}
if (d < 20 && timeDiff < 60) {
// Do not append changes less then 20m - it's probably noise anyway
return
}
}
const feature = JSON.parse(JSON.stringify(location))
feature.properties.id = "gps/" + features.data.length
i++
features.data.push(feature)
features.ping()
})
this.historicalUserLocations = <any>new StaticFeatureSource(features)
const asLine = features.map((allPoints) => {
if (allPoints === undefined || allPoints.length < 2) {
return []
}
const feature: Feature<LineString, OsmTags> = {
type: "Feature",
properties: {
id: "location_track",
"_date:now": new Date().toISOString(),
},
geometry: {
type: "LineString",
coordinates: allPoints.map(
(ff: Feature<Point>) => <[number, number]>ff.geometry.coordinates
),
},
}
return [feature]
})
return new StaticFeatureSource(asLine)
}
}

View file

@ -0,0 +1,64 @@
import { UIEventSource } from "../UIEventSource"
import LayoutConfig from "../../Models/ThemeConfig/LayoutConfig"
import { LocalStorageSource } from "../Web/LocalStorageSource"
import { QueryParameters } from "../Web/QueryParameters"
/**
* This actor is responsible to set the map location.
* It will attempt to
* - Set the map to the position as passed in by the query parameters (if available)
* - Set the map to the position remembered in LocalStorage (if available)
* - Set the map to the layout default
*
* Additionally, it will save the map location to local storage
*/
export default class InitialMapPositioning {
public zoom: UIEventSource<number>
public location: UIEventSource<{ lon: number; lat: number }>
constructor(layoutToUse: LayoutConfig) {
function localStorageSynced(
key: string,
deflt: number,
docs: string
): UIEventSource<number> {
const localStorage = LocalStorageSource.Get(key)
const previousValue = localStorage.data
const src = UIEventSource.asFloat(
QueryParameters.GetQueryParameter(key, "" + deflt, docs).syncWith(localStorage)
)
if (src.data === deflt) {
const prev = Number(previousValue)
if (!isNaN(prev)) {
src.setData(prev)
}
}
return src
}
// -- Location control initialization
this.zoom = localStorageSynced(
"z",
layoutToUse?.startZoom ?? 1,
"The initial/current zoom level"
)
const lat = localStorageSynced(
"lat",
layoutToUse?.startLat ?? 0,
"The initial/current latitude"
)
const lon = localStorageSynced(
"lon",
layoutToUse?.startLon ?? 0,
"The initial/current longitude of the app"
)
this.location = new UIEventSource({ lon: lon.data, lat: lat.data })
this.location.addCallbackD((loc) => {
lat.setData(loc.lat)
lon.setData(loc.lon)
})
// Note: this syncs only in one direction
}
}

View file

@ -0,0 +1,97 @@
import { BBox } from "../BBox"
import { Store } from "../UIEventSource"
import ThemeViewState from "../../Models/ThemeViewState"
import Constants from "../../Models/Constants"
export type FeatureViewState =
| "no-data"
| "zoom-to-low"
| "has-visible-feature"
| "all-filtered-away"
export default class NoElementsInViewDetector {
public readonly hasFeatureInView: Store<FeatureViewState>
constructor(themeViewState: ThemeViewState) {
const state = themeViewState
const minZoom = Math.min(
...themeViewState.layout.layers
.filter((l) => Constants.priviliged_layers.indexOf(<any>l.id) < 0)
.filter((l) => !l.id.startsWith("note_import"))
.map((l) => l.minzoom)
)
const mapProperties = themeViewState.mapProperties
const priviliged: Set<string> = new Set(Constants.priviliged_layers)
this.hasFeatureInView = state.mapProperties.bounds.stabilized(100).map(
(bbox) => {
if (!bbox) {
return undefined
}
if (mapProperties.zoom.data < minZoom) {
// Not a single layer will display anything as the zoom is to low
return "zoom-to-low"
}
let minzoomWithData = 9999
for (const [layerName, source] of themeViewState.perLayerFiltered) {
if (priviliged.has(layerName)) {
continue
}
const feats = source.features.data
if (!(feats?.length > 0)) {
// Nope, no data loaded
continue
}
const layer = themeViewState.layout.getLayer(layerName)
if (mapProperties.zoom.data < layer.minzoom) {
minzoomWithData = Math.min(layer.minzoom)
continue
}
if (!state.layerState.filteredLayers.get(layerName).isDisplayed.data) {
continue
}
for (const feat of feats) {
if (BBox.get(feat).overlapsWith(bbox)) {
// We found at least one item which has visible data
return "has-visible-feature"
}
}
}
// If we arrive here, data might have been filtered away
for (const [layerName, source] of themeViewState.perLayerFiltered) {
if (priviliged.has(layerName)) {
continue
}
const layer = themeViewState.layout.getLayer(layerName)
if (mapProperties.zoom.data < layer.minzoom) {
continue
}
const feats = source.features.data
if (!(feats?.length > 0)) {
// Nope, no data loaded
continue
}
for (const feat of feats) {
if (BBox.get(feat).overlapsWith(bbox)) {
// We found at least one item, but as we didn't find it before, it is filtered away
return "all-filtered-away"
}
}
}
return "no-data"
},
[
...Array.from(themeViewState.perLayerFiltered.values()).map((f) => f.features),
mapProperties.zoom,
...Array.from(state.layerState.filteredLayers.values()).map((fl) => fl.isDisplayed),
]
)
}
}

View file

@ -0,0 +1,74 @@
import { Changes } from "../Osm/Changes"
import Constants from "../../Models/Constants"
import { UIEventSource } from "../UIEventSource"
import { Utils } from "../../Utils"
import { Feature } from "geojson"
export default class PendingChangesUploader {
private lastChange: Date
constructor(changes: Changes, selectedFeature: UIEventSource<Feature>) {
const self = this
this.lastChange = new Date()
changes.pendingChanges.addCallback(() => {
self.lastChange = new Date()
window.setTimeout(() => {
const diff = (new Date().getTime() - self.lastChange.getTime()) / 1000
if (Constants.updateTimeoutSec >= diff - 1) {
changes.flushChanges("Flushing changes due to timeout")
}
}, Constants.updateTimeoutSec * 1000)
})
selectedFeature.stabilized(10000).addCallback((feature) => {
if (feature === undefined) {
// The popup got closed - we flush
changes.flushChanges("Flushing changes due to popup closed")
}
})
if (Utils.runningFromConsole) {
return
}
document.addEventListener("mouseout", (e) => {
// @ts-ignore
if (!e.toElement && !e.relatedTarget) {
changes.flushChanges("Flushing changes due to focus lost")
}
})
document.onfocus = () => {
changes.flushChanges("OnFocus")
}
document.onblur = () => {
changes.flushChanges("OnFocus")
}
try {
document.addEventListener(
"visibilitychange",
() => {
changes.flushChanges("Visibility change")
},
false
)
} catch (e) {
console.warn("Could not register visibility change listener", e)
}
function onunload(e) {
if (changes.pendingChanges.data.length == 0) {
return
}
changes.flushChanges("onbeforeunload - probably closing or something similar")
e.preventDefault()
return "Saving your last changes..."
}
window.onbeforeunload = onunload
// https://stackoverflow.com/questions/3239834/window-onbeforeunload-not-working-on-the-ipad#4824156
window.addEventListener("pagehide", onunload)
}
}

View file

@ -0,0 +1,9 @@
Actors
======
An **actor** is a module which converts one UIEventSource into another while performing logic.
Typically, it will only expose the constructor taking some UIEventSources (and configuration) and a few fields which are
UIEVentSources.
An actor should _never_ have a dependency on 'State' and should _never_ import it

View file

@ -0,0 +1,174 @@
/**
* This actor will download the latest version of the selected element from OSM and update the tags if necessary.
*/
import { UIEventSource } from "../UIEventSource"
import { Changes } from "../Osm/Changes"
import { OsmConnection } from "../Osm/OsmConnection"
import LayoutConfig from "../../Models/ThemeConfig/LayoutConfig"
import SimpleMetaTagger from "../SimpleMetaTagger"
import FeaturePropertiesStore from "../FeatureSource/Actors/FeaturePropertiesStore"
import { Feature } from "geojson"
import { OsmTags } from "../../Models/OsmFeature"
import OsmObjectDownloader from "../Osm/OsmObjectDownloader"
import { IndexedFeatureSource } from "../FeatureSource/FeatureSource"
import { Utils } from "../../Utils"
export default class SelectedElementTagsUpdater {
private static readonly metatags = new Set([
"timestamp",
"version",
"changeset",
"user",
"uid",
"id",
])
private readonly state: {
selectedElement: UIEventSource<Feature>
featureProperties: FeaturePropertiesStore
changes: Changes
osmConnection: OsmConnection
layout: LayoutConfig
osmObjectDownloader: OsmObjectDownloader
indexedFeatures: IndexedFeatureSource
}
constructor(state: {
selectedElement: UIEventSource<Feature>
featureProperties: FeaturePropertiesStore
indexedFeatures: IndexedFeatureSource
changes: Changes
osmConnection: OsmConnection
layout: LayoutConfig
osmObjectDownloader: OsmObjectDownloader
}) {
this.state = state
state.osmConnection.isLoggedIn.addCallbackAndRun((isLoggedIn) => {
if (!isLoggedIn && !Utils.runningFromConsole) {
return
}
this.installCallback()
// We only have to do this once...
return true
})
}
private installCallback() {
const state = this.state
state.selectedElement.addCallbackAndRunD(async (s) => {
let id = s.properties?.id
if (!id) {
return
}
const backendUrl = state.osmConnection._oauth_config.url
if (id.startsWith(backendUrl)) {
id = id.substring(backendUrl.length)
}
if (!(id.startsWith("way") || id.startsWith("node") || id.startsWith("relation"))) {
// This object is _not_ from OSM, so we skip it!
return
}
if (id.indexOf("-") >= 0) {
// This is a new object
return
}
try {
const osmObject = await state.osmObjectDownloader.DownloadObjectAsync(id)
if (osmObject === "deleted") {
console.debug("The current selected element has been deleted upstream!", id)
const currentTagsSource = state.featureProperties.getStore(id)
currentTagsSource.data["_deleted"] = "yes"
currentTagsSource.addCallbackAndRun((tags) => console.trace("Tags are", tags))
currentTagsSource.ping()
return
}
const latestTags = osmObject.tags
const newGeometry = osmObject.asGeoJson()?.geometry
const oldFeature = state.indexedFeatures.featuresById.data.get(id)
const oldGeometry = oldFeature?.geometry
if (oldGeometry !== undefined && !Utils.SameObject(newGeometry, oldGeometry)) {
console.log("Detected a difference in geometry for ", id)
oldFeature.geometry = newGeometry
state.featureProperties.getStore(id)?.ping()
}
this.applyUpdate(latestTags, id)
console.log("Updated", id)
} catch (e) {
console.warn("Could not update", id, " due to", e)
}
})
}
private applyUpdate(latestTags: OsmTags, id: string) {
const state = this.state
try {
const leftRightSensitive = state.layout.isLeftRightSensitive()
if (leftRightSensitive) {
SimpleMetaTagger.removeBothTagging(latestTags)
}
const pendingChanges = state.changes.pendingChanges.data
.filter((change) => change.type + "/" + change.id === id)
.filter((change) => change.tags !== undefined)
for (const pendingChange of pendingChanges) {
const tagChanges = pendingChange.tags
for (const tagChange of tagChanges) {
const key = tagChange.k
const v = tagChange.v
if (v === undefined || v === "") {
delete latestTags[key]
} else {
latestTags[key] = v
}
}
}
// With the changes applied, we merge them onto the upstream object
let somethingChanged = false
const currentTagsSource = state.featureProperties.getStore(id)
const currentTags = currentTagsSource.data
for (const key in latestTags) {
let osmValue = latestTags[key]
if (typeof osmValue === "number") {
osmValue = "" + osmValue
}
const localValue = currentTags[key]
if (localValue !== osmValue) {
somethingChanged = true
currentTags[key] = osmValue
}
}
for (const currentKey in currentTags) {
if (currentKey.startsWith("_")) {
continue
}
if (SelectedElementTagsUpdater.metatags.has(currentKey)) {
continue
}
if (currentKey in latestTags) {
continue
}
console.log("Removing key as deleted upstream", currentKey)
delete currentTags[currentKey]
somethingChanged = true
}
if (somethingChanged) {
console.log("Detected upstream changes to the object when opening it, updating...")
currentTagsSource.ping()
} else {
console.debug("Fetched latest tags for ", id, "but detected no changes")
}
} catch (e) {
console.error("Updating the tags of selected element ", id, "failed due to", e)
}
}
}

View file

@ -0,0 +1,60 @@
import { Store, UIEventSource } from "../UIEventSource"
import Locale from "../../UI/i18n/Locale"
import Combine from "../../UI/Base/Combine"
import { Utils } from "../../Utils"
import LayerConfig from "../../Models/ThemeConfig/LayerConfig"
import { Feature } from "geojson"
import FeaturePropertiesStore from "../FeatureSource/Actors/FeaturePropertiesStore"
import SvelteUIElement from "../../UI/Base/SvelteUIElement"
import TagRenderingAnswer from "../../UI/Popup/TagRendering/TagRenderingAnswer.svelte"
import { SpecialVisualizationState } from "../../UI/SpecialVisualization"
export default class TitleHandler {
constructor(
selectedElement: Store<Feature>,
selectedLayer: Store<LayerConfig>,
allElements: FeaturePropertiesStore,
state: SpecialVisualizationState
) {
const currentTitle: Store<string> = selectedElement.map(
(selected) => {
const defaultTitle = state.layout?.title?.txt ?? "MapComplete"
if (selected === undefined || selectedLayer.data === undefined) {
return defaultTitle
}
const tags = selected.properties
const layer = selectedLayer.data
if (layer.title === undefined) {
return defaultTitle
}
const tagsSource =
allElements.getStore(tags.id) ?? new UIEventSource<Record<string, string>>(tags)
const title = new SvelteUIElement(TagRenderingAnswer, {
tags: tagsSource,
state,
config: layer.title,
selectedElement: selectedElement.data,
layer,
})
return (
new Combine([defaultTitle, " | ", title]).ConstructElement()?.textContent ??
defaultTitle
)
},
[Locale.language, selectedLayer]
)
currentTitle.addCallbackAndRunD((title) => {
if (Utils.runningFromConsole) {
return
}
try {
document.title = title
} catch (e) {
console.error(e)
}
})
}
}

291
src/Logic/BBox.ts Normal file
View file

@ -0,0 +1,291 @@
import * as turf from "@turf/turf"
import { TileRange, Tiles } from "../Models/TileRange"
import { GeoOperations } from "./GeoOperations"
import { Feature, Polygon } from "geojson"
export class BBox {
static global: BBox = new BBox([
[-180, -90],
[180, 90],
])
readonly maxLat: number
readonly maxLon: number
readonly minLat: number
readonly minLon: number
/***
* Coordinates should be [[lon, lat],[lon, lat]]
* @param coordinates
*/
constructor(coordinates) {
this.maxLat = -90
this.maxLon = -180
this.minLat = 90
this.minLon = 180
for (const coordinate of coordinates) {
this.maxLon = Math.max(this.maxLon, coordinate[0])
this.maxLat = Math.max(this.maxLat, coordinate[1])
this.minLon = Math.min(this.minLon, coordinate[0])
this.minLat = Math.min(this.minLat, coordinate[1])
}
this.maxLon = Math.min(this.maxLon, 180)
this.maxLat = Math.min(this.maxLat, 90)
this.minLon = Math.max(this.minLon, -180)
this.minLat = Math.max(this.minLat, -90)
this.check()
}
static fromLeafletBounds(bounds) {
return new BBox([
[bounds.getWest(), bounds.getNorth()],
[bounds.getEast(), bounds.getSouth()],
])
}
static get(feature): BBox {
if (feature.bbox?.overlapsWith === undefined) {
const turfBbox: number[] = turf.bbox(feature)
feature.bbox = new BBox([
[turfBbox[0], turfBbox[1]],
[turfBbox[2], turfBbox[3]],
])
}
return feature.bbox
}
static bboxAroundAll(bboxes: BBox[]): BBox {
let maxLat: number = -90
let maxLon: number = -180
let minLat: number = 80
let minLon: number = 180
for (const bbox of bboxes) {
maxLat = Math.max(maxLat, bbox.maxLat)
maxLon = Math.max(maxLon, bbox.maxLon)
minLat = Math.min(minLat, bbox.minLat)
minLon = Math.min(minLon, bbox.minLon)
}
return new BBox([
[maxLon, maxLat],
[minLon, minLat],
])
}
/**
* Calculates the BBox based on a slippy map tile number
*
* const bbox = BBox.fromTile(16, 32754, 21785)
* bbox.minLon // => -0.076904296875
* bbox.maxLon // => -0.0714111328125
* bbox.minLat // => 51.5292513551899
* bbox.maxLat // => 51.53266860674158
*/
static fromTile(z: number, x: number, y: number): BBox {
return new BBox(Tiles.tile_bounds_lon_lat(z, x, y))
}
static fromTileIndex(i: number): BBox {
if (i === 0) {
return BBox.global
}
return BBox.fromTile(...Tiles.tile_from_index(i))
}
public unionWith(other: BBox) {
return new BBox([
[Math.max(this.maxLon, other.maxLon), Math.max(this.maxLat, other.maxLat)],
[Math.min(this.minLon, other.minLon), Math.min(this.minLat, other.minLat)],
])
}
/**
* Constructs a tilerange which fully contains this bbox (thus might be a bit larger)
* @param zoomlevel
*/
public containingTileRange(zoomlevel: number): TileRange {
return Tiles.TileRangeBetween(zoomlevel, this.minLat, this.minLon, this.maxLat, this.maxLon)
}
public overlapsWith(other: BBox) {
if (this.maxLon < other.minLon) {
return false
}
if (this.maxLat < other.minLat) {
return false
}
if (this.minLon > other.maxLon) {
return false
}
return this.minLat <= other.maxLat
}
public isContainedIn(other: BBox) {
if (this.maxLon > other.maxLon) {
return false
}
if (this.maxLat > other.maxLat) {
return false
}
if (this.minLon < other.minLon) {
return false
}
if (this.minLat < other.minLat) {
return false
}
return true
}
squarify(): BBox {
const w = this.maxLon - this.minLon
const h = this.maxLat - this.minLat
const s = Math.sqrt(w * h)
const lon = (this.maxLon + this.minLon) / 2
const lat = (this.maxLat + this.minLat) / 2
// we want to have a more-or-less equal surface, so the new side 's' should be
// w * h = s * s
// The ratio for w is:
return new BBox([
[lon - s / 2, lat - s / 2],
[lon + s / 2, lat + s / 2],
])
}
isNearby(location: [number, number], maxRange: number): boolean {
if (this.contains(location)) {
return true
}
const [lon, lat] = location
// We 'project' the point onto the near edges. If they are close to a horizontal _and_ vertical edge, it is nearby
// Vertically nearby: either wihtin minLat range or at most maxRange away
const nearbyVertical =
(this.minLat <= lat &&
this.maxLat >= lat &&
GeoOperations.distanceBetween(location, [lon, this.minLat]) <= maxRange) ||
GeoOperations.distanceBetween(location, [lon, this.maxLat]) <= maxRange
if (!nearbyVertical) {
return false
}
const nearbyHorizontal =
(this.minLon <= lon &&
this.maxLon >= lon &&
GeoOperations.distanceBetween(location, [this.minLon, lat]) <= maxRange) ||
GeoOperations.distanceBetween(location, [this.maxLon, lat]) <= maxRange
return nearbyHorizontal
}
getEast() {
return this.maxLon
}
getNorth() {
return this.maxLat
}
getWest() {
return this.minLon
}
getSouth() {
return this.minLat
}
contains(lonLat: [number, number]) {
return (
this.minLat <= lonLat[1] &&
lonLat[1] <= this.maxLat &&
this.minLon <= lonLat[0] &&
lonLat[0] <= this.maxLon
)
}
pad(factor: number, maxIncrease = 2): BBox {
const latDiff = Math.min(maxIncrease / 2, Math.abs(this.maxLat - this.minLat) * factor)
const lonDiff = Math.min(maxIncrease / 2, Math.abs(this.maxLon - this.minLon) * factor)
return new BBox([
[this.minLon - lonDiff, this.minLat - latDiff],
[this.maxLon + lonDiff, this.maxLat + latDiff],
])
}
padAbsolute(degrees: number): BBox {
return new BBox([
[this.minLon - degrees, this.minLat - degrees],
[this.maxLon + degrees, this.maxLat + degrees],
])
}
toLngLat(): [[number, number], [number, number]] {
return [
[this.minLon, this.minLat],
[this.maxLon, this.maxLat],
]
}
public asGeojsonCached() {
if (this["geojsonCache"] === undefined) {
this["geojsonCache"] = this.asGeoJson({})
}
return this["geojsonCache"]
}
public asGeoJson<T = {}>(properties?: T): Feature<Polygon, T> {
return {
type: "Feature",
properties: properties,
geometry: this.asGeometry(),
}
}
public asGeometry(): Polygon {
return {
type: "Polygon",
coordinates: [
[
[this.minLon, this.minLat],
[this.maxLon, this.minLat],
[this.maxLon, this.maxLat],
[this.minLon, this.maxLat],
[this.minLon, this.minLat],
],
],
}
}
/**
* Expands the BBOx so that it contains complete tiles for the given zoomlevel
* @param zoomlevel
*/
expandToTileBounds(zoomlevel: number): BBox {
if (zoomlevel === undefined) {
return this
}
const ul = Tiles.embedded_tile(this.minLat, this.minLon, zoomlevel)
const lr = Tiles.embedded_tile(this.maxLat, this.maxLon, zoomlevel)
const boundsul = Tiles.tile_bounds_lon_lat(ul.z, ul.x, ul.y)
const boundslr = Tiles.tile_bounds_lon_lat(lr.z, lr.x, lr.y)
return new BBox([].concat(boundsul, boundslr))
}
toMercator(): { minLat: number; maxLat: number; minLon: number; maxLon: number } {
const [minLon, minLat] = GeoOperations.ConvertWgs84To900913([this.minLon, this.minLat])
const [maxLon, maxLat] = GeoOperations.ConvertWgs84To900913([this.maxLon, this.maxLat])
return {
minLon,
maxLon,
minLat,
maxLat,
}
}
private check() {
if (isNaN(this.maxLon) || isNaN(this.maxLat) || isNaN(this.minLon) || isNaN(this.minLat)) {
console.trace("BBox with NaN detected:", this)
throw "BBOX has NAN"
}
}
}

View file

@ -0,0 +1,50 @@
/// Given a feature source, calculates a list of OSM-contributors who mapped the latest versions
import { Store, UIEventSource } from "./UIEventSource"
import { BBox } from "./BBox"
import GeoIndexedStore from "./FeatureSource/Actors/GeoIndexedStore"
export default class ContributorCount {
public readonly Contributors: UIEventSource<Map<string, number>> = new UIEventSource<
Map<string, number>
>(new Map<string, number>())
private readonly perLayer: ReadonlyMap<string, GeoIndexedStore>
private lastUpdate: Date = undefined
constructor(state: {
mapProperties: { bounds: Store<BBox> }
dataIsLoading: Store<boolean>
perLayer: ReadonlyMap<string, GeoIndexedStore>
}) {
this.perLayer = state.perLayer
const self = this
state.mapProperties.bounds.mapD(
(bbox) => {
self.update(bbox)
},
[state.dataIsLoading]
)
}
private update(bbox: BBox) {
const now = new Date()
if (
this.lastUpdate !== undefined &&
now.getTime() - this.lastUpdate.getTime() < 1000 * 60
) {
return
}
this.lastUpdate = now
const featuresList = [].concat(
Array.from(this.perLayer.values()).map((fs) => fs.GetFeaturesWithin(bbox))
)
const hist = new Map<string, number>()
for (const list of featuresList) {
for (const feature of list) {
const contributor = feature.properties["_last_edit:contributor"]
const count = hist.get(contributor) ?? 0
hist.set(contributor, count + 1)
}
}
this.Contributors.setData(hist)
}
}

View file

@ -0,0 +1,243 @@
import LayoutConfig from "../Models/ThemeConfig/LayoutConfig"
import { QueryParameters } from "./Web/QueryParameters"
import { AllKnownLayouts } from "../Customizations/AllKnownLayouts"
import { FixedUiElement } from "../UI/Base/FixedUiElement"
import { Utils } from "../Utils"
import Combine from "../UI/Base/Combine"
import { SubtleButton } from "../UI/Base/SubtleButton"
import BaseUIElement from "../UI/BaseUIElement"
import { UIEventSource } from "./UIEventSource"
import { LocalStorageSource } from "./Web/LocalStorageSource"
import LZString from "lz-string"
import { FixLegacyTheme } from "../Models/ThemeConfig/Conversion/LegacyJsonConvert"
import { LayerConfigJson } from "../Models/ThemeConfig/Json/LayerConfigJson"
import known_layers from "../assets/generated/known_layers.json"
import { PrepareTheme } from "../Models/ThemeConfig/Conversion/PrepareTheme"
import licenses from "../assets/generated/license_info.json"
import TagRenderingConfig from "../Models/ThemeConfig/TagRenderingConfig"
import { FixImages } from "../Models/ThemeConfig/Conversion/FixImages"
import Svg from "../Svg"
import questions from "../assets/generated/layers/questions.json"
import {
DoesImageExist,
PrevalidateTheme,
ValidateTagRenderings,
ValidateThemeAndLayers,
} from "../Models/ThemeConfig/Conversion/Validation"
import { DesugaringContext } from "../Models/ThemeConfig/Conversion/Conversion"
import { RewriteSpecial } from "../Models/ThemeConfig/Conversion/PrepareLayer"
import { TagRenderingConfigJson } from "../Models/ThemeConfig/Json/TagRenderingConfigJson"
import Hash from "./Web/Hash"
export default class DetermineLayout {
private static readonly _knownImages = new Set(Array.from(licenses).map((l) => l.path))
private static readonly loadCustomThemeParam = QueryParameters.GetQueryParameter(
"userlayout",
"false",
"If not 'false', a custom (non-official) theme is loaded. This custom layout can be done in multiple ways: \n\n- The hash of the URL contains a base64-encoded .json-file containing the theme definition\n- The hash of the URL contains a lz-compressed .json-file, as generated by the custom theme generator\n- The parameter itself is an URL, in which case that URL will be downloaded. It should point to a .json of a theme"
)
public static getCustomDefinition(): string {
const layoutFromBase64 = decodeURIComponent(DetermineLayout.loadCustomThemeParam.data)
if (layoutFromBase64.startsWith("http")) {
return layoutFromBase64
}
if (layoutFromBase64 !== "false") {
// We have to load something from the hash (or from disk)
const hash = Hash.hash.data
try {
JSON.parse(atob(hash))
return atob(hash)
} catch (e) {
// We try to decode with lz-string
JSON.parse(Utils.UnMinify(LZString.decompressFromBase64(hash)))
return Utils.UnMinify(LZString.decompressFromBase64(hash))
}
}
return undefined
}
/**
* Gets the correct layout for this website
*/
public static async GetLayout(): Promise<LayoutConfig | undefined> {
const layoutFromBase64 = decodeURIComponent(DetermineLayout.loadCustomThemeParam.data)
if (layoutFromBase64.startsWith("http")) {
return await DetermineLayout.LoadRemoteTheme(layoutFromBase64)
}
if (layoutFromBase64 !== "false") {
// We have to load something from the hash (or from disk)
return DetermineLayout.LoadLayoutFromHash(DetermineLayout.loadCustomThemeParam)
}
let layoutId: string = undefined
const path = window.location.pathname.split("/").slice(-1)[0]
if (path !== "theme.html" && path !== "") {
layoutId = path
if (path.endsWith(".html")) {
layoutId = path.substr(0, path.length - 5)
}
console.log("Using layout", layoutId)
}
layoutId = QueryParameters.GetQueryParameter(
"layout",
layoutId,
"The layout to load into MapComplete"
).data
const layout = AllKnownLayouts.allKnownLayouts.get(layoutId?.toLowerCase())
if (layout === undefined) {
throw "No builtin map theme with name " + layoutId + " exists"
}
return layout
}
public static LoadLayoutFromHash(userLayoutParam: UIEventSource<string>): LayoutConfig | null {
let hash = location.hash.substr(1)
let json: any
// layoutFromBase64 contains the name of the theme. This is partly to do tracking with goat counter
const dedicatedHashFromLocalStorage = LocalStorageSource.Get(
"user-layout-" + userLayoutParam.data?.replace(" ", "_")
)
if (dedicatedHashFromLocalStorage.data?.length < 10) {
dedicatedHashFromLocalStorage.setData(undefined)
}
const hashFromLocalStorage = LocalStorageSource.Get("last-loaded-user-layout")
if (hash.length < 10) {
hash = dedicatedHashFromLocalStorage.data ?? hashFromLocalStorage.data
} else {
console.log("Saving hash to local storage")
hashFromLocalStorage.setData(hash)
dedicatedHashFromLocalStorage.setData(hash)
}
try {
json = JSON.parse(atob(hash))
} catch (e) {
// We try to decode with lz-string
json = JSON.parse(Utils.UnMinify(LZString.decompressFromBase64(hash)))
}
const layoutToUse = DetermineLayout.prepCustomTheme(json)
userLayoutParam.setData(layoutToUse.id)
return layoutToUse
}
public static ShowErrorOnCustomTheme(
intro: string = "Error: could not parse the custom layout:",
error: BaseUIElement,
json?: any
) {
new Combine([
intro,
error.SetClass("alert"),
new SubtleButton(Svg.back_svg(), "Go back to the theme overview", {
url: window.location.protocol + "//" + window.location.host + "/index.html",
newTab: false,
}),
json !== undefined
? new SubtleButton(Svg.download_svg(), "Download the JSON file").onClick(() => {
Utils.offerContentsAsDownloadableFile(
JSON.stringify(json, null, " "),
"theme_definition.json"
)
})
: undefined,
])
.SetClass("flex flex-col clickable")
.AttachTo("maindiv")
}
private static getSharedTagRenderings(): Map<string, TagRenderingConfigJson> {
const dict = new Map<string, TagRenderingConfigJson>()
for (const tagRendering of questions.tagRenderings) {
dict.set(tagRendering.id, tagRendering)
}
return dict
}
private static prepCustomTheme(json: any, sourceUrl?: string, forceId?: string): LayoutConfig {
if (json.layers === undefined && json.tagRenderings !== undefined) {
const iconTr = json.mapRendering.map((mr) => mr.icon).find((icon) => icon !== undefined)
const icon = new TagRenderingConfig(iconTr).render.txt
json = {
id: json.id,
description: json.description,
descriptionTail: {
en: "<div class='alert'>Layer only mode.</div> The loaded custom theme actually isn't a custom theme, but only contains a layer.",
},
icon,
title: json.name,
layers: [json],
}
}
const knownLayersDict = new Map<string, LayerConfigJson>()
for (const key in known_layers.layers) {
const layer = known_layers.layers[key]
knownLayersDict.set(layer.id, <LayerConfigJson>layer)
}
const convertState: DesugaringContext = {
tagRenderings: DetermineLayout.getSharedTagRenderings(),
sharedLayers: knownLayersDict,
publicLayers: new Set<string>(),
}
json = new FixLegacyTheme().convertStrict(json, "While loading a dynamic theme")
const raw = json
json = new FixImages(DetermineLayout._knownImages).convertStrict(
json,
"While fixing the images"
)
json.enableNoteImports = json.enableNoteImports ?? false
json = new PrepareTheme(convertState).convertStrict(json, "While preparing a dynamic theme")
console.log("The layoutconfig is ", json)
json.id = forceId ?? json.id
{
let { errors } = new PrevalidateTheme().convert(json, "validation")
if (errors.length > 0) {
throw "Detected errors: " + errors.join("\n")
}
}
{
let { errors } = new ValidateThemeAndLayers(
new DoesImageExist(new Set<string>(), (_) => true),
"",
false
).convert(json, "validation")
if (errors.length > 0) {
throw "Detected errors: " + errors.join("\n")
}
}
return new LayoutConfig(json, false, {
definitionRaw: JSON.stringify(raw, null, " "),
definedAtUrl: sourceUrl,
})
}
private static async LoadRemoteTheme(link: string): Promise<LayoutConfig | null> {
console.log("Downloading map theme from ", link)
new FixedUiElement(`Downloading the theme from the <a href="${link}">link</a>...`).AttachTo(
"maindiv"
)
let parsed = await Utils.downloadJson(link)
let forcedId = parsed.id
const url = new URL(link)
if (!(url.hostname === "localhost" || url.hostname === "127.0.0.1")) {
forcedId = link
}
console.log("Loaded remote link:", link)
return DetermineLayout.prepCustomTheme(parsed, link, forcedId)
}
}

518
src/Logic/ExtraFunctions.ts Normal file
View file

@ -0,0 +1,518 @@
import { GeoOperations } from "./GeoOperations"
import Combine from "../UI/Base/Combine"
import BaseUIElement from "../UI/BaseUIElement"
import List from "../UI/Base/List"
import Title from "../UI/Base/Title"
import { BBox } from "./BBox"
import { Feature, Geometry, MultiPolygon, Polygon } from "geojson"
import { GeoJSONFeature } from "maplibre-gl"
export interface ExtraFuncParams {
/**
* Gets all the features from the given layer within the given BBOX.
* Note that more features then requested can be given back.
* Format: [ [ geojson, geojson, geojson, ... ], [geojson, ...], ...]
*/
getFeaturesWithin: (
layerId: string,
bbox: BBox
) => Feature<Geometry, Record<string, string>>[][]
getFeatureById: (id: string) => Feature<Geometry, Record<string, string>>
}
/**
* Describes a function that is added to a geojson object in order to calculate calculated tags
*/
interface ExtraFunction {
readonly _name: string
readonly _args: string[]
readonly _doc: string
readonly _f: (params: ExtraFuncParams, feat: Feature<Geometry, any>) => any
}
class EnclosingFunc implements ExtraFunction {
_name = "enclosingFeatures"
_doc = [
"Gives a list of all features in the specified layers which fully contain this object. Returned features will always be (multi)polygons. (LineStrings and Points from the other layers are ignored)",
"",
"The result is a list of features: `{feat: Polygon}[]`",
"This function will never return the feature itself.",
].join("\n")
_args = [
"...layerIds - one or more layer ids of the layer from which every feature is checked for overlap)",
]
_f(params: ExtraFuncParams, feat: Feature<Geometry, any>) {
return (...layerIds: string[]) => {
const result: { feat: any }[] = []
const bbox = BBox.get(feat)
const seenIds = new Set<string>()
seenIds.add(feat.properties.id)
for (const layerId of layerIds) {
const otherFeaturess = params.getFeaturesWithin(layerId, bbox)
if (otherFeaturess === undefined) {
continue
}
if (otherFeaturess.length === 0) {
continue
}
for (const otherFeatures of otherFeaturess) {
for (const otherFeature of otherFeatures) {
if (seenIds.has(otherFeature.properties.id)) {
continue
}
seenIds.add(otherFeature.properties.id)
if (
otherFeature.geometry.type !== "Polygon" &&
otherFeature.geometry.type !== "MultiPolygon"
) {
continue
}
if (
GeoOperations.completelyWithin(
<Feature>feat,
<Feature<Polygon | MultiPolygon, any>>otherFeature
)
) {
result.push({ feat: otherFeature })
}
}
}
}
return result
}
}
}
class OverlapFunc implements ExtraFunction {
_name = "overlapWith"
_doc = [
"Gives a list of features from the specified layer which this feature (partly) overlaps with. A point which is embedded in the feature is detected as well.",
"If the current feature is a point, all features that this point is embeded in are given.",
"",
"The returned value is `{ feat: GeoJSONFeature, overlap: number}[]` where `overlap` is the overlapping surface are (in m²) for areas, the overlapping length (in meter) if the current feature is a line or `undefined` if the current feature is a point.",
"The resulting list is sorted in descending order by overlap. The feature with the most overlap will thus be the first in the list.",
"",
"For example to get all objects which overlap or embed from a layer, use `_contained_climbing_routes_properties=overlapWith(feat)('climbing_route')`",
"",
"Also see [enclosingFeatures](#enclosingFeatures) which can be used to get all objects which fully contain this feature",
].join("\n")
_args = [
"...layerIds - one or more layer ids of the layer from which every feature is checked for overlap)",
]
_f(params, feat) {
return (...layerIds: string[]) => {
const result: { feat: any; overlap: number }[] = []
const seenIds = new Set<string>()
const bbox = BBox.get(feat)
for (const layerId of layerIds) {
const otherFeaturess = params.getFeaturesWithin(layerId, bbox)
if (otherFeaturess === undefined) {
continue
}
if (otherFeaturess.length === 0) {
continue
}
for (const otherFeatures of otherFeaturess) {
const overlap = GeoOperations.calculateOverlap(feat, otherFeatures)
for (const overlappingFeature of overlap) {
if (seenIds.has(overlappingFeature.feat.properties.id)) {
continue
}
seenIds.add(overlappingFeature.feat.properties.id)
result.push(overlappingFeature)
}
}
}
result.sort((a, b) => b.overlap - a.overlap)
return result
}
}
}
class IntersectionFunc implements ExtraFunction {
_name = "intersectionsWith"
_doc =
"Gives the intersection points with selected features. Only works with (Multi)Polygons and LineStrings.\n\n" +
"Returns a `{feat: GeoJson, intersections: [number,number][]}` where `feat` is the full, original feature. This list is in random order.\n\n" +
"If the current feature is a point, this function will return an empty list.\n" +
"Points from other layers are ignored - even if the points are parts of the current linestring."
_args = [
"...layerIds - one or more layer ids of the layer from which every feature is checked for intersection)",
]
_f(params: ExtraFuncParams, feat) {
return (...layerIds: string[]) => {
const result: { feat: any; intersections: [number, number][] }[] = []
const bbox = BBox.get(feat)
for (const layerId of layerIds) {
const otherLayers = params.getFeaturesWithin(layerId, bbox)
if (otherLayers === undefined) {
continue
}
if (otherLayers.length === 0) {
continue
}
for (const otherFeatures of otherLayers) {
for (const otherFeature of otherFeatures) {
const intersections = GeoOperations.LineIntersections(
feat,
<Feature<any, Record<string, string>>>otherFeature
)
if (intersections.length === 0) {
continue
}
result.push({ feat: otherFeature, intersections })
}
}
}
return result
}
}
}
class DistanceToFunc implements ExtraFunction {
_name = "distanceTo"
_doc =
"Calculates the distance between the feature and a specified point in meter. The input should either be a pair of coordinates, a geojson feature or the ID of an object"
_args = ["feature OR featureID OR longitude", "undefined OR latitude"]
_f(featuresPerLayer, feature) {
return (arg0, lat) => {
if (arg0 === undefined) {
return undefined
}
if (typeof arg0 === "number") {
// Feature._lon and ._lat is conveniently place by one of the other metatags
return GeoOperations.distanceBetween(
[arg0, lat],
GeoOperations.centerpointCoordinates(feature)
)
}
if (typeof arg0 === "string") {
// This is an identifier
const feature = featuresPerLayer.getFeatureById(arg0)
if (feature === undefined) {
return undefined
}
arg0 = feature
}
// arg0 is probably a geojsonfeature
return GeoOperations.distanceBetween(
GeoOperations.centerpointCoordinates(arg0),
GeoOperations.centerpointCoordinates(feature)
)
}
}
}
class ClosestObjectFunc implements ExtraFunction {
_name = "closest"
_doc =
"Given either a list of geojson features or a single layer name, gives the single object which is nearest to the feature. In the case of ways/polygons, only the centerpoint is considered. Returns a single geojson feature or undefined if nothing is found (or not yet loaded)"
_args = ["list of features or a layer name or '*' to get all features"]
_f(params, feature) {
return (features) =>
ClosestNObjectFunc.GetClosestNFeatures(params, feature, features)?.[0]?.feat
}
}
class ClosestNObjectFunc implements ExtraFunction {
_name = "closestn"
_doc =
"Given either a list of geojson features or a single layer name, gives the n closest objects which are nearest to the feature (excluding the feature itself). In the case of ways/polygons, only the centerpoint is considered. " +
"Returns a list of `{feat: geojson, distance:number}` the empty list if nothing is found (or not yet loaded)\n\n" +
"If a 'unique tag key' is given, the tag with this key will only appear once (e.g. if 'name' is given, all features will have a different name)"
_args = [
"list of features or layer name or '*' to get all features",
"amount of features",
"unique tag key (optional)",
"maxDistanceInMeters (optional)",
]
/**
* Gets the closes N features, sorted by ascending distance.
*
* @param params: The link to mapcomplete state
* @param feature: The central feature under consideration
* @param features: The other features
* @param options: maxFeatures: The maximum amount of features to be returned. Default: 1; uniqueTag: returned features are not allowed to have the same value for this key; maxDistance: stop searching if it is too far away (in meter). Default: 500m
* @constructor
* @private
*/
static GetClosestNFeatures(
params: ExtraFuncParams,
feature: any,
features: string | Feature[],
options?: { maxFeatures?: number; uniqueTag?: string | undefined; maxDistance?: number }
): { feat: any; distance: number }[] {
const maxFeatures = options?.maxFeatures ?? 1
const maxDistance = options?.maxDistance ?? 500
const uniqueTag: string | undefined = options?.uniqueTag
let allFeatures: Feature[][]
if (typeof features === "string") {
const name = features
const bbox = GeoOperations.bbox(
GeoOperations.buffer(GeoOperations.bbox(feature), maxDistance)
)
allFeatures = params.getFeaturesWithin(name, new BBox(bbox.geometry.coordinates))
} else {
allFeatures = [features]
}
if (features === undefined) {
return
}
const selfCenter = GeoOperations.centerpointCoordinates(feature)
let closestFeatures: { feat: any; distance: number }[] = []
for (const feats of allFeatures) {
for (const otherFeature of feats) {
if (
otherFeature === feature ||
otherFeature.properties.id === feature.properties.id
) {
continue // We ignore self
}
const distance = GeoOperations.distanceBetween(
GeoOperations.centerpointCoordinates(otherFeature),
selfCenter
)
if (distance === undefined || distance === null || isNaN(distance)) {
console.error(
"Could not calculate the distance between",
feature,
"and",
otherFeature
)
throw "Undefined distance!"
}
if (distance === 0) {
console.trace(
"Got a suspiciously zero distance between",
otherFeature,
"and self-feature",
feature
)
}
if (distance > maxDistance) {
continue
}
if (closestFeatures.length === 0) {
// This is the first matching feature we find - always add it
closestFeatures.push({
feat: otherFeature,
distance: distance,
})
continue
}
if (
closestFeatures.length >= maxFeatures &&
closestFeatures[maxFeatures - 1].distance < distance
) {
// The last feature of the list (and thus the furthest away is still closer
// No use for checking, as we already have plenty of features!
continue
}
let targetIndex = closestFeatures.length
for (let i = 0; i < closestFeatures.length; i++) {
const closestFeature = closestFeatures[i]
if (uniqueTag !== undefined) {
const uniqueTagsMatch =
otherFeature.properties[uniqueTag] !== undefined &&
closestFeature.feat.properties[uniqueTag] ===
otherFeature.properties[uniqueTag]
if (uniqueTagsMatch) {
targetIndex = -1
if (closestFeature.distance > distance) {
// This is a very special situation:
// We want to see the tag `uniquetag=some_value` only once in the entire list (e.g. to prevent road segements of identical names to fill up the list of 'names of nearby roads')
// AT this point, we have found a closer segment with the same, identical tag
// so we replace directly
closestFeatures[i] = { feat: otherFeature, distance: distance }
}
break
}
}
if (closestFeature.distance > distance) {
targetIndex = i
if (uniqueTag !== undefined) {
const uniqueValue = otherFeature.properties[uniqueTag]
// We might still have some other values later one with the same uniquetag that have to be cleaned
for (let j = i; j < closestFeatures.length; j++) {
if (closestFeatures[j].feat.properties[uniqueTag] === uniqueValue) {
closestFeatures.splice(j, 1)
}
}
}
break
}
}
if (targetIndex == -1) {
continue // value is already swapped by the unique tag
}
if (targetIndex < maxFeatures) {
// insert and drop one
closestFeatures.splice(targetIndex, 0, {
feat: otherFeature,
distance: distance,
})
if (closestFeatures.length >= maxFeatures) {
closestFeatures.splice(maxFeatures, 1)
}
} else {
// Overwrite the last element
closestFeatures[targetIndex] = {
feat: otherFeature,
distance: distance,
}
}
}
}
return closestFeatures
}
_f(params, feature) {
return (features, amount, uniqueTag, maxDistanceInMeters) => {
let distance: number = Number(maxDistanceInMeters)
if (isNaN(distance)) {
distance = undefined
}
return ClosestNObjectFunc.GetClosestNFeatures(params, feature, features, {
maxFeatures: Number(amount),
uniqueTag: uniqueTag,
maxDistance: distance,
})
}
}
}
class GetParsed implements ExtraFunction {
_name = "get"
_doc =
"Gets the property of the feature, parses it (as JSON) and returns it. Might return 'undefined' if not defined, null, ..."
_args = ["key"]
_f(params, feat) {
return (key) => {
const value = feat.properties[key]
if (value === undefined) {
return undefined
}
if (typeof value !== "string") {
return value
}
try {
const parsed = JSON.parse(value)
if (parsed === null) {
return undefined
}
return parsed
} catch (e) {
console.warn(
"Could not parse property " + key + " due to: " + e + ", the value is " + value
)
return undefined
}
}
}
}
export type ExtraFuncType = (typeof ExtraFunctions.types)[number]
export class ExtraFunctions {
static readonly intro = new Combine([
new Title("Calculating tags with Javascript", 2),
"In some cases, it is useful to have some tags calculated based on other properties. Some useful tags are available by default (e.g. `lat`, `lon`, `_country`), as detailed above.",
"It is also possible to calculate your own tags - but this requires some javascript knowledge.",
"",
"Before proceeding, some warnings:",
new List([
"DO NOT DO THIS AS BEGINNER",
"**Only do this if all other techniques fail** This should _not_ be done to create a rendering effect, only to calculate a specific value",
"**THIS MIGHT BE DISABLED WITHOUT ANY NOTICE ON UNOFFICIAL THEMES** As unofficial themes might be loaded from the internet, this is the equivalent of injecting arbitrary code into the client. It'll be disabled if abuse occurs.",
]),
"To enable this feature, add a field `calculatedTags` in the layer object, e.g.:",
"````",
'"calculatedTags": [',
' "_someKey=javascript-expression",',
' "name=feat.properties.name ?? feat.properties.ref ?? feat.properties.operator",',
" \"_distanceCloserThen3Km=distanceTo(feat)( some_lon, some_lat) < 3 ? 'yes' : 'no'\" ",
" ]",
"````",
"",
"The above code will be executed for every feature in the layer. The feature is accessible as `feat` and is an amended geojson object:",
new List([
"`area` contains the surface area (in square meters) of the object",
"`lat` and `lon` contain the latitude and longitude",
]),
"Some advanced functions are available as well. Due to technical reasons, they should be used as `funcname(feat)(arguments)`.",
])
.SetClass("flex-col")
.AsMarkdown()
static readonly types = [
"distanceTo",
"overlapWith",
"enclosingFeatures",
"intersectionsWith",
"closest",
"closestn",
"get",
] as const
private static readonly allFuncs = [
new DistanceToFunc(),
new OverlapFunc(),
new EnclosingFunc(),
new IntersectionFunc(),
new ClosestObjectFunc(),
new ClosestNObjectFunc(),
new GetParsed(),
]
public static constructHelpers(
params: ExtraFuncParams
): Record<ExtraFuncType, (feature: Feature) => Function> {
const record: Record<string, (feature: GeoJSONFeature) => Function> = {}
for (const f of ExtraFunctions.allFuncs) {
if (this.types.indexOf(<any>f._name) < 0) {
throw "Invalid extraFunc-type: " + f._name
}
record[f._name] = (feat) => f._f(params, feat)
}
return record
}
public static HelpText(): BaseUIElement {
const elems = []
for (const func of ExtraFunctions.allFuncs) {
elems.push(new Title(func._name, 3), func._doc, new List(func._args ?? [], true))
}
return new Combine([
ExtraFunctions.intro,
new List(ExtraFunctions.allFuncs.map((func) => `[${func._name}](#${func._name})`)),
...elems,
])
}
}

View file

@ -0,0 +1,112 @@
import { FeatureSource } from "../FeatureSource"
import { UIEventSource } from "../../UIEventSource"
/**
* Constructs a UIEventStore for the properties of every Feature, indexed by id
*/
export default class FeaturePropertiesStore {
private readonly _elements = new Map<string, UIEventSource<Record<string, string>>>()
constructor(...sources: FeatureSource[]) {
for (const source of sources) {
this.trackFeatureSource(source)
}
}
public getStore(id: string): UIEventSource<Record<string, string>> {
return this._elements.get(id)
}
public trackFeatureSource(source: FeatureSource) {
const self = this
source.features.addCallbackAndRunD((features) => {
for (const feature of features) {
const id = feature.properties.id
if (id === undefined) {
console.trace("Error: feature without ID:", feature)
throw "Error: feature without ID"
}
const source = self._elements.get(id)
if (source === undefined) {
self._elements.set(id, new UIEventSource<any>(feature.properties))
continue
}
if (source.data === feature.properties) {
continue
}
// Update the tags in the old store and link them
const changeMade = FeaturePropertiesStore.mergeTags(source.data, feature.properties)
feature.properties = source.data
if (changeMade) {
source.ping()
}
}
})
}
/**
* Overwrites the tags of the old properties object, returns true if a change was made.
* Metatags are overriden if they are in the new properties, but not removed
* @param oldProperties
* @param newProperties
* @private
*/
private static mergeTags(
oldProperties: Record<string, any>,
newProperties: Record<string, any>
): boolean {
let changeMade = false
for (const oldPropertiesKey in oldProperties) {
// Delete properties from the old record if it is not in the new store anymore
if (oldPropertiesKey.startsWith("_")) {
continue
}
if (newProperties[oldPropertiesKey] === undefined) {
changeMade = true
delete oldProperties[oldPropertiesKey]
}
}
// Copy all properties from the new record into the old
for (const newPropertiesKey in newProperties) {
const v = newProperties[newPropertiesKey]
if (oldProperties[newPropertiesKey] !== v) {
oldProperties[newPropertiesKey] = v
changeMade = true
}
}
return changeMade
}
// noinspection JSUnusedGlobalSymbols
public addAlias(oldId: string, newId: string): void {
if (newId === undefined) {
// We removed the node/way/relation with type 'type' and id 'oldId' on openstreetmap!
const element = this._elements.get(oldId)
element.data._deleted = "yes"
element.ping()
return
}
if (oldId == newId) {
return
}
const element = this._elements.get(oldId)
if (element === undefined) {
// Element to rewrite not found, probably a node or relation that is not rendered
return
}
element.data.id = newId
this._elements.set(newId, element)
element.ping()
}
has(id: string) {
return this._elements.has(id)
}
}

View file

@ -0,0 +1,53 @@
import { FeatureSource, FeatureSourceForLayer } from "../FeatureSource"
import { Feature } from "geojson"
import { BBox } from "../../BBox"
import { GeoOperations } from "../../GeoOperations"
import { Store } from "../../UIEventSource"
import FilteredLayer from "../../../Models/FilteredLayer"
/**
* Allows the retrieval of all features in the requested BBox; useful for one-shot queries;
*
* Use a ClippedFeatureSource for a continuously updating featuresource
*/
export default class GeoIndexedStore implements FeatureSource {
public features: Store<Feature[]>
constructor(features: FeatureSource | Store<Feature[]>) {
this.features = features["features"] ?? features
}
/**
* Gets the current features within the given bbox.
*
* @param bbox
* @constructor
*/
public GetFeaturesWithin(bbox: BBox): Feature[] {
const bboxFeature = bbox.asGeojsonCached()
return this.features.data.filter((f) => {
if (f.geometry.type === "Point") {
return bbox.contains(<[number, number]>f.geometry.coordinates)
}
if (f.geometry.type === "LineString") {
const intersection = GeoOperations.intersect(
BBox.get(f).asGeojsonCached(),
bboxFeature
)
return intersection !== undefined
}
if (f.geometry.type === "Polygon" || f.geometry.type === "MultiPolygon") {
return GeoOperations.intersect(f, bboxFeature) !== undefined
}
return GeoOperations.intersect(f, bboxFeature) !== undefined
})
}
}
export class GeoIndexedStoreForLayer extends GeoIndexedStore implements FeatureSourceForLayer {
readonly layer: FilteredLayer
constructor(features: FeatureSource | Store<Feature[]>, layer: FilteredLayer) {
super(features)
this.layer = layer
}
}

View file

@ -0,0 +1,83 @@
import { FeatureSource } from "../FeatureSource"
import { Feature } from "geojson"
import TileLocalStorage from "./TileLocalStorage"
import { GeoOperations } from "../../GeoOperations"
import FeaturePropertiesStore from "./FeaturePropertiesStore"
import { UIEventSource } from "../../UIEventSource"
import { Utils } from "../../../Utils"
class SingleTileSaver {
private readonly _storage: UIEventSource<Feature[]>
private readonly _registeredIds = new Set<string>()
private readonly _featureProperties: FeaturePropertiesStore
private readonly _isDirty = new UIEventSource(false)
constructor(
storage: UIEventSource<Feature[]> & { flush: () => void },
featureProperties: FeaturePropertiesStore
) {
this._storage = storage
this._featureProperties = featureProperties
this._isDirty.stabilized(1000).addCallbackD((isDirty) => {
if (!isDirty) {
return
}
// 'isDirty' is set when tags of some object have changed
storage.flush()
this._isDirty.setData(false)
})
}
public saveFeatures(features: Feature[]) {
if (Utils.sameList(features, this._storage.data)) {
return
}
for (const feature of features) {
const id = feature.properties.id
if (this._registeredIds.has(id)) {
continue
}
this._featureProperties.getStore(id)?.addCallbackAndRunD(() => {
this._isDirty.setData(true)
})
this._registeredIds.add(id)
}
this._storage.setData(features)
}
}
/***
* Saves all the features that are passed in to localstorage, so they can be retrieved on the next run
*
* The data is saved in a tiled way on a fixed zoomlevel and is retrievable per layer.
*
* Also see the sibling class
*/
export default class SaveFeatureSourceToLocalStorage {
constructor(
backend: string,
layername: string,
zoomlevel: number,
features: FeatureSource,
featureProperties: FeaturePropertiesStore,
maxCacheAge: number
) {
const storage = TileLocalStorage.construct<Feature[]>(backend, layername, maxCacheAge)
const singleTileSavers: Map<number, SingleTileSaver> = new Map<number, SingleTileSaver>()
features.features.addCallbackAndRunD((features) => {
const sliced = GeoOperations.slice(zoomlevel, features)
sliced.forEach((features, tileIndex) => {
let tileSaver = singleTileSavers.get(tileIndex)
if (tileSaver === undefined) {
const src = storage.getTileSource(tileIndex)
tileSaver = new SingleTileSaver(src, featureProperties)
singleTileSavers.set(tileIndex, tileSaver)
}
// Don't cache not-uploaded features yet - they'll be cached when the receive their id
features = features.filter((f) => !f.properties.id.match(/(node|way)\/-[0-9]+/))
tileSaver.saveFeatures(features)
})
})
}
}

View file

@ -0,0 +1,108 @@
import { IdbLocalStorage } from "../../Web/IdbLocalStorage"
import { UIEventSource } from "../../UIEventSource"
/**
* A class which allows to read/write a tile to local storage.
*
* Does the heavy lifting for LocalStorageFeatureSource and SaveFeatureToLocalStorage.
*
* Note: OSM-features with a negative id are ignored
*/
export default class TileLocalStorage<T> {
private static perLayer: Record<string, TileLocalStorage<any>> = {}
private static readonly useIndexedDb = typeof indexedDB !== "undefined"
private readonly _layername: string
private readonly inUse = new UIEventSource(false)
private readonly cachedSources: Record<number, UIEventSource<T> & { flush: () => void }> = {}
private readonly _maxAgeSeconds: number
private constructor(layername: string, maxAgeSeconds: number) {
this._layername = layername
this._maxAgeSeconds = maxAgeSeconds
}
public static construct<T>(
backend: string,
layername: string,
maxAgeS: number
): TileLocalStorage<T> {
const key = backend + "_" + layername
const cached = TileLocalStorage.perLayer[key]
if (cached) {
return cached
}
const tls = new TileLocalStorage<T>(key, maxAgeS)
TileLocalStorage.perLayer[key] = tls
return tls
}
/**
* Constructs a UIEventSource element which is synced with localStorage.
* Supports 'flush'
*/
public getTileSource(tileIndex: number): UIEventSource<T> & { flush: () => void } {
const cached = this.cachedSources[tileIndex]
if (cached) {
return cached
}
const src = <UIEventSource<T> & { flush: () => void }>(
UIEventSource.FromPromise(this.GetIdb(tileIndex))
)
src.flush = () => this.SetIdb(tileIndex, src.data)
src.addCallbackD((data) => this.SetIdb(tileIndex, data))
this.cachedSources[tileIndex] = src
return src
}
private async SetIdb(tileIndex: number, data: any): Promise<void> {
if (!TileLocalStorage.useIndexedDb) {
return
}
try {
await this.inUse.AsPromise((inUse) => !inUse)
this.inUse.setData(true)
await IdbLocalStorage.SetDirectly(this._layername + "_" + tileIndex, data)
await IdbLocalStorage.SetDirectly(
this._layername + "_" + tileIndex + "_date",
Date.now()
)
this.inUse.setData(false)
} catch (e) {
console.error(
"Could not save tile to indexed-db: ",
e,
"tileIndex is:",
tileIndex,
"for layer",
this._layername,
"data is",
data
)
}
}
private async GetIdb(tileIndex: number): Promise<any> {
if (!TileLocalStorage.useIndexedDb) {
return undefined
}
const date = <any>(
await IdbLocalStorage.GetDirectly(this._layername + "_" + tileIndex + "_date")
)
const maxAge = this._maxAgeSeconds
const timeDiff = Date.now() - date
if (timeDiff >= maxAge) {
console.debug("Dropping cache for", this._layername, tileIndex, "out of date")
await IdbLocalStorage.SetDirectly(this._layername + "_" + tileIndex, undefined)
return undefined
}
const data = await IdbLocalStorage.GetDirectly(this._layername + "_" + tileIndex)
return <any>data
}
invalidate(zoomlevel: number, tileIndex) {
this.getTileSource(tileIndex).setData(undefined)
}
}

View file

@ -0,0 +1,24 @@
import { Store, UIEventSource } from "../UIEventSource"
import FilteredLayer from "../../Models/FilteredLayer"
import { Feature } from "geojson"
export interface FeatureSource<T extends Feature = Feature> {
features: Store<T[]>
}
export interface WritableFeatureSource<T extends Feature = Feature> extends FeatureSource<T> {
features: UIEventSource<T[]>
}
/**
* A feature source which only contains features for the defined layer
*/
export interface FeatureSourceForLayer<T extends Feature = Feature> extends FeatureSource<T> {
readonly layer: FilteredLayer
}
/**
* A feature source which is aware of the indexes it contains
*/
export interface IndexedFeatureSource extends FeatureSource {
readonly featuresById: Store<Map<string, Feature>>
}

View file

@ -0,0 +1,122 @@
import { FeatureSource } from "./FeatureSource"
import FilteredLayer from "../../Models/FilteredLayer"
import SimpleFeatureSource from "./Sources/SimpleFeatureSource"
import { Feature } from "geojson"
import { UIEventSource } from "../UIEventSource"
/**
* In some rare cases, some elements are shown on multiple layers (when 'passthrough' is enabled)
* If this is the case, multiple objects with a different _matching_layer_id are generated.
* In any case, this featureSource marks the objects with _matching_layer_id
*/
export default class PerLayerFeatureSourceSplitter<T extends FeatureSource = FeatureSource> {
public readonly perLayer: ReadonlyMap<string, T>
constructor(
layers: FilteredLayer[],
upstream: FeatureSource,
options?: {
constructStore?: (features: UIEventSource<Feature[]>, layer: FilteredLayer) => T
handleLeftovers?: (featuresWithoutLayer: Feature[]) => void
}
) {
const knownLayers = new Map<string, T>()
/**
* Keeps track of the ids that are included per layer.
* Used to know if the downstream feature source needs to be pinged
*/
let layerIndexes: ReadonlySet<string>[] = layers.map((_) => new Set<string>())
this.perLayer = knownLayers
const layerSources = new Map<string, UIEventSource<Feature[]>>()
const constructStore =
options?.constructStore ?? ((store, layer) => new SimpleFeatureSource(layer, store))
for (const layer of layers) {
const src = new UIEventSource<Feature[]>([])
layerSources.set(layer.layerDef.id, src)
knownLayers.set(layer.layerDef.id, <T>constructStore(src, layer))
}
upstream.features.addCallbackAndRunD((features) => {
if (layers === undefined) {
return
}
// We try to figure out (for each feature) in which feature store it should be saved.
const featuresPerLayer = new Map<string, Feature[]>()
/**
* Indexed on layer-position
* Will be true if a new id pops up
*/
const hasChanged: boolean[] = layers.map((_) => false)
const newIndices: Set<string>[] = layers.map((_) => new Set<string>())
const noLayerFound: Feature[] = []
for (const layer of layers) {
featuresPerLayer.set(layer.layerDef.id, [])
}
for (const f of features) {
let foundALayer = false
for (let i = 0; i < layers.length; i++) {
const layer = layers[i]
if (!layer.layerDef?.source) {
console.error(
"PerLayerFeatureSourceSplitter got a layer without a source:",
layer.layerDef.id
)
continue
}
if (layer.layerDef.source.osmTags.matchesProperties(f.properties)) {
const id = f.properties.id
// We have found our matching layer!
const previousIndex = layerIndexes[i]
hasChanged[i] = hasChanged[i] || !previousIndex.has(id)
newIndices[i].add(id)
featuresPerLayer.get(layer.layerDef.id).push(f)
foundALayer = true
if (!layer.layerDef.passAllFeatures) {
// If not 'passAllFeatures', we are done for this feature
break
}
}
}
if (!foundALayer) {
noLayerFound.push(f)
}
}
// At this point, we have our features per layer as a list
// We assign them to the correct featureSources
for (let i = 0; i < layers.length; i++) {
const layer = layers[i]
const id = layer.layerDef.id
const features = featuresPerLayer.get(id)
if (features === undefined) {
// No such features for this layer
continue
}
if (!hasChanged[i] && layerIndexes[i].size === newIndices[i].size) {
// No new id has been added and the sizes are the same (thus: nothing has been removed as well)
// We can safely assume that no changes were made
continue
}
layerSources.get(id).setData(features)
}
layerIndexes = newIndices
// AT last, the leftovers are handled
if (options?.handleLeftovers !== undefined && noLayerFound.length > 0) {
options.handleLeftovers(noLayerFound)
}
})
}
public forEach(f: (featureSource: T) => void) {
for (const fs of this.perLayer.values()) {
f(fs)
}
}
}

View file

@ -0,0 +1,90 @@
/**
* Applies geometry changes from 'Changes' onto every feature of a featureSource
*/
import { Changes } from "../../Osm/Changes"
import { UIEventSource } from "../../UIEventSource"
import { FeatureSource, IndexedFeatureSource } from "../FeatureSource"
import { ChangeDescription, ChangeDescriptionTools } from "../../Osm/Actions/ChangeDescription"
import { Feature } from "geojson"
import { Utils } from "../../../Utils"
export default class ChangeGeometryApplicator implements FeatureSource {
public readonly features: UIEventSource<Feature[]> = new UIEventSource<Feature[]>([])
private readonly source: IndexedFeatureSource
private readonly changes: Changes
constructor(source: IndexedFeatureSource, changes: Changes) {
this.source = source
this.changes = changes
this.features = new UIEventSource<Feature[]>(undefined)
const self = this
source.features.addCallbackAndRunD((_) => self.update())
changes.allChanges.addCallbackAndRunD((_) => self.update())
}
private update() {
const upstreamFeatures = this.source.features.data
const upstreamIds = this.source.featuresById.data
const changesToApply = this.changes.allChanges.data?.filter(
(ch) =>
// Does upstream have this element? If not, we skip
upstreamIds.has(ch.type + "/" + ch.id) &&
// Are any (geometry) changes defined?
ch.changes !== undefined &&
// Ignore new elements, they are handled by the NewGeometryFromChangesFeatureSource
ch.id > 0
)
if (changesToApply === undefined || changesToApply.length === 0) {
// No changes to apply!
// Pass the original feature and lets continue our day
this.features.setData(upstreamFeatures)
return
}
const changesPerId = new Map<string, ChangeDescription[]>()
for (const ch of changesToApply) {
const key = ch.type + "/" + ch.id
if (changesPerId.has(key)) {
changesPerId.get(key).push(ch)
} else {
changesPerId.set(key, [ch])
}
}
const newFeatures: Feature[] = []
for (const feature of upstreamFeatures) {
const changesForFeature = changesPerId.get(feature.properties.id)
if (changesForFeature === undefined) {
// No changes for this element - simply pass it along to downstream
newFeatures.push(feature)
continue
}
// Allright! We have a feature to rewrite!
const copy = {
...feature,
}
// We only apply the last change as that one'll have the latest geometry
const change = changesForFeature[changesForFeature.length - 1]
copy.geometry = ChangeDescriptionTools.getGeojsonGeometry(change)
if (Utils.SameObject(copy.geometry, feature.geometry)) {
// No actual changes: pass along the original
newFeatures.push(feature)
continue
}
console.log(
"Applying a geometry change onto:",
feature,
"The change is:",
change,
"which becomes:",
copy
)
newFeatures.push(copy)
}
this.features.setData(newFeatures)
}
}

View file

@ -0,0 +1,19 @@
import { FeatureSource } from "../FeatureSource"
import { Feature, Polygon } from "geojson"
import StaticFeatureSource from "./StaticFeatureSource"
import { GeoOperations } from "../../GeoOperations"
/**
* Returns a clipped version of the original geojson. Ways which partially intersect the given feature will be split up
*
* Also @see: GeoOperations.spreadIntoBboxes
*/
export default class ClippedFeatureSource extends StaticFeatureSource {
constructor(features: FeatureSource, clipTo: Feature<Polygon>) {
super(
features.features.mapD((features) => {
return [].concat(features.map((feature) => GeoOperations.clipWith(feature, clipTo)))
})
)
}
}

View file

@ -0,0 +1,91 @@
import { Store, UIEventSource } from "../../UIEventSource"
import { FeatureSource, IndexedFeatureSource } from "../FeatureSource"
import { Feature } from "geojson"
import { Utils } from "../../../Utils"
/**
*
*/
export default class FeatureSourceMerger implements IndexedFeatureSource {
public features: UIEventSource<Feature[]> = new UIEventSource([])
public readonly featuresById: Store<Map<string, Feature>>
private readonly _featuresById: UIEventSource<Map<string, Feature>>
private readonly _sources: FeatureSource[] = []
/**
* Merges features from different featureSources.
* In case that multiple features have the same id, the latest `_version_number` will be used. Otherwise, we will take the last one
*/
constructor(...sources: FeatureSource[]) {
this._featuresById = new UIEventSource<Map<string, Feature>>(new Map<string, Feature>())
this.featuresById = this._featuresById
const self = this
sources = Utils.NoNull(sources)
for (let source of sources) {
source.features.addCallback(() => {
self.addData(sources.map((s) => s.features.data))
})
}
this.addData(sources.map((s) => s.features.data))
this._sources = sources
}
public addSource(source: FeatureSource) {
if (!source) {
return
}
this._sources.push(source)
source.features.addCallbackAndRun(() => {
this.addData(this._sources.map((s) => s.features.data))
})
}
protected addData(featuress: Feature[][]) {
featuress = Utils.NoNull(featuress)
let somethingChanged = false
const all: Map<string, Feature> = new Map()
const unseen = new Set<string>()
// We seed the dictionary with the previously loaded features
const oldValues = this.features.data ?? []
for (const oldValue of oldValues) {
all.set(oldValue.properties.id, oldValue)
unseen.add(oldValue.properties.id)
}
for (const features of featuress) {
for (const f of features) {
const id = f.properties.id
unseen.delete(id)
if (!all.has(id)) {
// This is a new feature
somethingChanged = true
all.set(id, f)
continue
}
// This value has been seen already, either in a previous run or by a previous datasource
// Let's figure out if something changed
const oldV = all.get(id)
if (oldV == f) {
continue
}
all.set(id, f)
somethingChanged = true
}
}
somethingChanged ||= unseen.size > 0
unseen.forEach((id) => all.delete(id))
if (!somethingChanged) {
// We don't bother triggering an update
return
}
const newList = []
all.forEach((value, key) => {
newList.push(value)
})
this.features.setData(newList)
this._featuresById.setData(all)
}
}

View file

@ -0,0 +1,113 @@
import { Store, UIEventSource } from "../../UIEventSource"
import FilteredLayer from "../../../Models/FilteredLayer"
import { FeatureSource } from "../FeatureSource"
import { Feature } from "geojson"
import { GlobalFilter } from "../../../Models/GlobalFilter"
export default class FilteringFeatureSource implements FeatureSource {
public features: UIEventSource<Feature[]> = new UIEventSource([])
private readonly upstream: FeatureSource
private readonly _fetchStore?: (id: string) => Store<Record<string, string>>
private readonly _globalFilters?: Store<GlobalFilter[]>
private readonly _alreadyRegistered = new Set<Store<any>>()
private readonly _is_dirty = new UIEventSource(false)
private readonly _layer: FilteredLayer
private previousFeatureSet: Set<any> = undefined
constructor(
layer: FilteredLayer,
upstream: FeatureSource,
fetchStore?: (id: string) => Store<Record<string, string>>,
globalFilters?: Store<GlobalFilter[]>,
metataggingUpdated?: Store<any>
) {
this.upstream = upstream
this._fetchStore = fetchStore
this._layer = layer
this._globalFilters = globalFilters
const self = this
upstream.features.addCallback(() => {
self.update()
})
layer.appliedFilters.forEach((value) =>
value.addCallback((_) => {
self.update()
})
)
this._is_dirty.stabilized(1000).addCallbackAndRunD((dirty) => {
if (dirty) {
self.update()
}
})
metataggingUpdated?.addCallback((_) => {
self._is_dirty.setData(true)
})
globalFilters?.addCallback((_) => {
self.update()
})
this.update()
}
private update() {
const self = this
const layer = this._layer
const features: Feature[] = this.upstream.features.data ?? []
const includedFeatureIds = new Set<string>()
const globalFilters = self._globalFilters?.data?.map((f) => f)
const newFeatures = (features ?? []).filter((f) => {
self.registerCallback(f.properties.id)
if (!layer.isShown(f.properties, globalFilters)) {
return false
}
includedFeatureIds.add(f.properties.id)
return true
})
const previousSet = this.previousFeatureSet
this._is_dirty.setData(false)
// Is there any difference between the two sets?
if (previousSet !== undefined && previousSet.size === includedFeatureIds.size) {
// The size of the sets is the same - they _might_ be identical
const newItemFound = Array.from(includedFeatureIds).some((id) => !previousSet.has(id))
if (!newItemFound) {
// We know that:
// - The sets have the same size
// - Every item from the new set has been found in the old set
// which means they are identical!
return
}
}
// Something new has been found (or something was deleted)!
this.features.setData(newFeatures)
}
private registerCallback(featureId: string) {
if (this._fetchStore === undefined) {
return
}
const src = this._fetchStore(featureId)
if (src == undefined) {
return
}
if (this._alreadyRegistered.has(src)) {
return
}
this._alreadyRegistered.add(src)
const self = this
// Add a callback as a changed tag might change the filter
src.addCallbackAndRunD((_) => {
self._is_dirty.setData(true)
})
}
}

View file

@ -0,0 +1,150 @@
/**
* Fetches a geojson file somewhere and passes it along
*/
import { Store, UIEventSource } from "../../UIEventSource"
import { Utils } from "../../../Utils"
import { FeatureSource } from "../FeatureSource"
import { BBox } from "../../BBox"
import { GeoOperations } from "../../GeoOperations"
import { Feature } from "geojson"
import LayerConfig from "../../../Models/ThemeConfig/LayerConfig"
import { Tiles } from "../../../Models/TileRange"
export default class GeoJsonSource implements FeatureSource {
public readonly features: Store<Feature[]>
private readonly seenids: Set<string>
private readonly idKey?: string
public constructor(
layer: LayerConfig,
options?: {
zxy?: number | [number, number, number] | BBox
featureIdBlacklist?: Set<string>
isActive?: Store<boolean>
}
) {
if (layer.source.geojsonZoomLevel !== undefined && options?.zxy === undefined) {
throw "Dynamic layers are not supported. Use 'DynamicGeoJsonTileSource instead"
}
this.idKey = layer.source.idKey
this.seenids = options?.featureIdBlacklist ?? new Set<string>()
let url = layer.source.geojsonSource.replace("{layer}", layer.id)
let zxy = options?.zxy
if (zxy !== undefined) {
let tile_bbox: BBox
if (typeof zxy === "number") {
zxy = Tiles.tile_from_index(zxy)
}
if (zxy instanceof BBox) {
tile_bbox = zxy
} else {
const [z, x, y] = zxy
tile_bbox = BBox.fromTile(z, x, y)
url = url
.replace("{z}", "" + z)
.replace("{x}", "" + x)
.replace("{y}", "" + y)
}
let bounds: Record<"minLat" | "maxLat" | "minLon" | "maxLon", number> = tile_bbox
if (layer.source.mercatorCrs) {
bounds = tile_bbox.toMercator()
}
url = url
.replace("{y_min}", "" + bounds.minLat)
.replace("{y_max}", "" + bounds.maxLat)
.replace("{x_min}", "" + bounds.minLon)
.replace("{x_max}", "" + bounds.maxLon)
}
const eventsource = new UIEventSource<Feature[]>([])
if (options?.isActive !== undefined) {
options.isActive.addCallbackAndRunD(async (active) => {
if (!active) {
return
}
this.LoadJSONFrom(url, eventsource, layer)
.then((fs) => console.debug("Loaded", fs.length, "features from", url))
.catch((err) => console.warn("Could not load ", url, "due to", err))
return true // data is loaded, we can safely unregister
})
} else {
this.LoadJSONFrom(url, eventsource, layer)
.then((fs) => console.debug("Loaded", fs.length, "features from", url))
.catch((err) => console.warn("Could not load ", url, "due to", err))
}
this.features = eventsource
}
/**
* Init the download, write into the specified event source for the given layer.
* Note this method caches the requested geojson for five minutes
*/
private async LoadJSONFrom(
url: string,
eventSource: UIEventSource<Feature[]>,
layer: LayerConfig,
options?: {
maxCacheAgeSec?: number | 300
}
): Promise<Feature[]> {
const self = this
let json = await Utils.downloadJsonCached(url, (options?.maxCacheAgeSec ?? 300) * 1000)
if (json.features === undefined || json.features === null) {
json.features = []
}
if (layer.source.mercatorCrs) {
json = GeoOperations.GeoJsonToWGS84(json)
}
const time = new Date()
const newFeatures: Feature[] = []
let i = 0
let skipped = 0
for (const feature of json.features) {
if (feature.geometry.type === "Point") {
// See https://github.com/maproulette/maproulette-backend/issues/242
feature.geometry.coordinates = feature.geometry.coordinates.map(Number)
}
const props = feature.properties
for (const key in props) {
if (props[key] === null) {
delete props[key]
}
if (typeof props[key] !== "string") {
// Make sure all the values are string, it crashes stuff otherwise
props[key] = JSON.stringify(props[key])
}
}
if (self.idKey !== undefined) {
props.id = props[self.idKey]
}
if (props.id === undefined) {
props.id = url + "/" + i
feature.id = url + "/" + i
i++
}
if (self.seenids.has(props.id)) {
skipped++
continue
}
self.seenids.add(props.id)
let freshness: Date = time
if (feature.properties["_last_edit:timestamp"] !== undefined) {
freshness = new Date(props["_last_edit:timestamp"])
}
newFeatures.push(feature)
}
eventSource.setData(newFeatures)
return newFeatures
}
}

View file

@ -0,0 +1,60 @@
import LayoutConfig from "../../../Models/ThemeConfig/LayoutConfig"
import { WritableFeatureSource } from "../FeatureSource"
import { ImmutableStore, Store, UIEventSource } from "../../UIEventSource"
import { Feature, Point } from "geojson"
import { TagUtils } from "../../Tags/TagUtils"
import BaseUIElement from "../../../UI/BaseUIElement"
import { Utils } from "../../../Utils"
/**
* Highly specialized feature source.
* Based on a lon/lat UIEVentSource, will generate the corresponding feature with the correct properties
*/
export class LastClickFeatureSource implements WritableFeatureSource {
public readonly features: UIEventSource<Feature[]> = new UIEventSource<Feature[]>([])
constructor(location: Store<{ lon: number; lat: number }>, layout: LayoutConfig) {
const allPresets: BaseUIElement[] = []
for (const layer of layout.layers)
for (let i = 0; i < (layer.presets ?? []).length; i++) {
const preset = layer.presets[i]
const tags = new ImmutableStore(TagUtils.KVtoProperties(preset.tags))
const { html } = layer.mapRendering[0].RenderIcon(tags, false, {
noSize: true,
includeBadges: false,
})
allPresets.push(html)
}
const renderings = Utils.Dedup(
allPresets.map((uiElem) =>
Utils.runningFromConsole ? "" : uiElem.ConstructElement().innerHTML
)
)
let i = 0
location.addCallbackAndRunD(({ lon, lat }) => {
const properties = {
lastclick: "yes",
id: "last_click_" + i,
has_note_layer: layout.layers.some((l) => l.id === "note") ? "yes" : "no",
has_presets: layout.layers.some((l) => l.presets?.length > 0) ? "yes" : "no",
renderings: renderings.join(""),
number_of_presets: "" + renderings.length,
first_preset: renderings[0],
}
i++
const point = <Feature<Point>>{
type: "Feature",
properties,
geometry: {
type: "Point",
coordinates: [lon, lat],
},
}
this.features.setData([point])
})
}
}

View file

@ -0,0 +1,171 @@
import GeoJsonSource from "./GeoJsonSource"
import LayerConfig from "../../../Models/ThemeConfig/LayerConfig"
import { FeatureSource } from "../FeatureSource"
import { Or } from "../../Tags/Or"
import FeatureSwitchState from "../../State/FeatureSwitchState"
import OverpassFeatureSource from "./OverpassFeatureSource"
import { Store, UIEventSource } from "../../UIEventSource"
import OsmFeatureSource from "./OsmFeatureSource"
import FeatureSourceMerger from "./FeatureSourceMerger"
import DynamicGeoJsonTileSource from "../TiledFeatureSource/DynamicGeoJsonTileSource"
import { BBox } from "../../BBox"
import LocalStorageFeatureSource from "../TiledFeatureSource/LocalStorageFeatureSource"
import FullNodeDatabaseSource from "../TiledFeatureSource/FullNodeDatabaseSource"
/**
* This source will fetch the needed data from various sources for the given layout.
*
* Note that special layers (with `source=null` will be ignored)
*/
export default class LayoutSource extends FeatureSourceMerger {
private readonly _isLoading: UIEventSource<boolean> = new UIEventSource<boolean>(false)
/**
* Indicates if a data source is loading something
*/
public readonly isLoading: Store<boolean> = this._isLoading
constructor(
layers: LayerConfig[],
featureSwitches: FeatureSwitchState,
mapProperties: { bounds: Store<BBox>; zoom: Store<number> },
backend: string,
isDisplayed: (id: string) => Store<boolean>,
fullNodeDatabaseSource?: FullNodeDatabaseSource
) {
const { bounds, zoom } = mapProperties
// remove all 'special' layers
layers = layers.filter((layer) => layer.source !== null && layer.source !== undefined)
const geojsonlayers = layers.filter((layer) => layer.source.geojsonSource !== undefined)
const osmLayers = layers.filter((layer) => layer.source.geojsonSource === undefined)
const fromCache = osmLayers.map(
(l) =>
new LocalStorageFeatureSource(backend, l.id, 15, mapProperties, {
isActive: isDisplayed(l.id),
maxAge: l.maxAgeOfCache,
})
)
const overpassSource = LayoutSource.setupOverpass(
backend,
osmLayers,
bounds,
zoom,
featureSwitches
)
const osmApiSource = LayoutSource.setupOsmApiSource(
osmLayers,
bounds,
zoom,
backend,
featureSwitches,
fullNodeDatabaseSource
)
const geojsonSources: FeatureSource[] = geojsonlayers.map((l) =>
LayoutSource.setupGeojsonSource(l, mapProperties, isDisplayed(l.id))
)
super(overpassSource, osmApiSource, ...geojsonSources, ...fromCache)
const self = this
function setIsLoading() {
const loading = overpassSource?.runningQuery?.data || osmApiSource?.isRunning?.data
self._isLoading.setData(loading)
}
overpassSource?.runningQuery?.addCallbackAndRun((_) => setIsLoading())
osmApiSource?.isRunning?.addCallbackAndRun((_) => setIsLoading())
}
private static setupGeojsonSource(
layer: LayerConfig,
mapProperties: { zoom: Store<number>; bounds: Store<BBox> },
isActive?: Store<boolean>
): FeatureSource {
const source = layer.source
isActive = mapProperties.zoom.map(
(z) => (isActive?.data ?? true) && z >= layer.minzoom,
[isActive]
)
if (source.geojsonZoomLevel === undefined) {
// This is a 'load everything at once' geojson layer
return new GeoJsonSource(layer, { isActive })
} else {
return new DynamicGeoJsonTileSource(layer, mapProperties, { isActive })
}
}
private static setupOsmApiSource(
osmLayers: LayerConfig[],
bounds: Store<BBox>,
zoom: Store<number>,
backend: string,
featureSwitches: FeatureSwitchState,
fullNodeDatabase: FullNodeDatabaseSource
): OsmFeatureSource | undefined {
if (osmLayers.length == 0) {
return undefined
}
const minzoom = Math.min(...osmLayers.map((layer) => layer.minzoom))
const isActive = zoom.mapD((z) => {
if (z < minzoom) {
// We are zoomed out over the zoomlevel of any layer
console.debug("Disabling overpass source: zoom < minzoom")
return false
}
// Overpass should handle this if zoomed out a bit
return z > featureSwitches.overpassMaxZoom.data
})
const allowedFeatures = new Or(osmLayers.map((l) => l.source.osmTags)).optimize()
if (typeof allowedFeatures === "boolean") {
throw "Invalid filter to init OsmFeatureSource: it optimizes away to " + allowedFeatures
}
return new OsmFeatureSource({
allowedFeatures,
bounds,
backend,
isActive,
patchRelations: true,
fullNodeDatabase,
})
}
private static setupOverpass(
backend: string,
osmLayers: LayerConfig[],
bounds: Store<BBox>,
zoom: Store<number>,
featureSwitches: FeatureSwitchState
): OverpassFeatureSource | undefined {
if (osmLayers.length == 0) {
return undefined
}
const minzoom = Math.min(...osmLayers.map((layer) => layer.minzoom))
const isActive = zoom.mapD((z) => {
if (z < minzoom) {
// We are zoomed out over the zoomlevel of any layer
console.debug("Disabling overpass source: zoom < minzoom")
return false
}
return z <= featureSwitches.overpassMaxZoom.data
})
return new OverpassFeatureSource(
{
zoom,
bounds,
layers: osmLayers,
widenFactor: featureSwitches.layoutToUse.widenFactor,
overpassUrl: featureSwitches.overpassUrl,
overpassTimeout: featureSwitches.overpassTimeout,
overpassMaxZoom: featureSwitches.overpassMaxZoom,
},
{
padToTiles: zoom.map((zoom) => Math.min(15, zoom + 1)),
isActive,
}
)
}
}

View file

@ -0,0 +1,129 @@
import { Changes } from "../../Osm/Changes"
import { OsmNode, OsmRelation, OsmWay } from "../../Osm/OsmObject"
import { IndexedFeatureSource, WritableFeatureSource } from "../FeatureSource"
import { UIEventSource } from "../../UIEventSource"
import { ChangeDescription } from "../../Osm/Actions/ChangeDescription"
import { OsmId, OsmTags } from "../../../Models/OsmFeature"
import { Feature } from "geojson"
import OsmObjectDownloader from "../../Osm/OsmObjectDownloader"
export class NewGeometryFromChangesFeatureSource implements WritableFeatureSource {
// This class name truly puts the 'Java' into 'Javascript'
/**
* A feature source containing exclusively new elements.
*
* These elements are probably created by the 'SimpleAddUi' which generates a new point, but the import functionality might create a line or polygon too.
* Other sources of new points are e.g. imports from nodes
*/
public readonly features: UIEventSource<Feature[]> = new UIEventSource<Feature[]>([])
constructor(changes: Changes, allElementStorage: IndexedFeatureSource, backendUrl: string) {
const seenChanges = new Set<ChangeDescription>()
const features = this.features.data
const self = this
const backend = changes.backend
changes.pendingChanges.addCallbackAndRunD((changes) => {
if (changes.length === 0) {
return
}
let somethingChanged = false
function add(feature) {
feature.id = feature.properties.id
features.push(feature)
somethingChanged = true
}
for (const change of changes) {
if (seenChanges.has(change)) {
// Already handled
continue
}
seenChanges.add(change)
if (change.tags === undefined) {
// If tags is undefined, this is probably a new point that is part of a split road
continue
}
console.log("Handling pending change")
if (change.id > 0) {
// This is an already existing object
// In _most_ of the cases, this means that this _isn't_ a new object
// However, when a point is snapped to an already existing point, we have to create a representation for this point!
// For this, we introspect the change
if (allElementStorage.featuresById.data.has(change.type + "/" + change.id)) {
// The current point already exists, we don't have to do anything here
continue
}
console.debug("Detected a reused point")
// The 'allElementsStore' does _not_ have this point yet, so we have to create it
new OsmObjectDownloader(backend)
.DownloadObjectAsync(change.type + "/" + change.id)
.then((feat) => {
console.log("Got the reused point:", feat)
if (feat === "deleted") {
throw "Panic: snapping to a point, but this point has been deleted in the meantime"
}
for (const kv of change.tags) {
feat.tags[kv.k] = kv.v
}
const geojson = feat.asGeoJson()
self.features.data.push(geojson)
self.features.ping()
})
continue
} else if (change.changes === undefined) {
// The geometry is not described - not a new point or geometry change, but probably a tagchange to a newly created point
// Not something that should be handled here
continue
}
try {
const tags: OsmTags & { id: OsmId & string } = {
id: <OsmId & string>(change.type + "/" + change.id),
}
for (const kv of change.tags) {
tags[kv.k] = kv.v
}
tags["_backend"] = backendUrl
switch (change.type) {
case "node":
const n = new OsmNode(change.id)
n.tags = tags
n.lat = change.changes["lat"]
n.lon = change.changes["lon"]
const geojson = n.asGeoJson()
add(geojson)
break
case "way":
const w = new OsmWay(change.id)
w.tags = tags
w.nodes = change.changes["nodes"]
w.coordinates = change.changes["coordinates"].map(([lon, lat]) => [
lat,
lon,
])
add(w.asGeoJson())
break
case "relation":
const r = new OsmRelation(change.id)
r.tags = tags
r.members = change.changes["members"]
add(r.asGeoJson())
break
}
} catch (e) {
console.error("Could not generate a new geometry to render on screen for:", e)
}
}
if (somethingChanged) {
self.features.ping()
}
})
}
}

View file

@ -0,0 +1,212 @@
import { Utils } from "../../../Utils"
import OsmToGeoJson from "osmtogeojson"
import { ImmutableStore, Store, UIEventSource } from "../../UIEventSource"
import { Tiles } from "../../../Models/TileRange"
import { BBox } from "../../BBox"
import { TagsFilter } from "../../Tags/TagsFilter"
import { Feature } from "geojson"
import FeatureSourceMerger from "../Sources/FeatureSourceMerger"
import OsmObjectDownloader from "../../Osm/OsmObjectDownloader"
import FullNodeDatabaseSource from "../TiledFeatureSource/FullNodeDatabaseSource"
/**
* If a tile is needed (requested via the UIEventSource in the constructor), will download the appropriate tile and pass it via 'handleTile'
*/
export default class OsmFeatureSource extends FeatureSourceMerger {
private readonly _bounds: Store<BBox>
private readonly isActive: Store<boolean>
private readonly _backend: string
private readonly allowedTags: TagsFilter
private options: {
bounds: Store<BBox>
readonly allowedFeatures: TagsFilter
backend?: "https://openstreetmap.org/" | string
/**
* If given: this featureSwitch will not update if the store contains 'false'
*/
isActive?: Store<boolean>
patchRelations?: true | boolean
fullNodeDatabase?: FullNodeDatabaseSource
}
public readonly isRunning: UIEventSource<boolean> = new UIEventSource<boolean>(false)
private readonly _downloadedTiles: Set<number> = new Set<number>()
private readonly _downloadedData: Feature[][] = []
private readonly _patchRelations: boolean
/**
* Downloads data directly from the OSM-api within the given bounds.
* All features which match the TagsFilter 'allowedFeatures' are kept and converted into geojson
*/
constructor(options: {
bounds: Store<BBox>
readonly allowedFeatures: TagsFilter
backend?: "https://openstreetmap.org/" | string
/**
* If given: this featureSwitch will not update if the store contains 'false'
*/
isActive?: Store<boolean>
patchRelations?: true | boolean
fullNodeDatabase?: FullNodeDatabaseSource
}) {
super()
this.options = options
this._bounds = options.bounds
this.allowedTags = options.allowedFeatures
this.isActive = options.isActive ?? new ImmutableStore(true)
this._backend = options.backend ?? "https://www.openstreetmap.org"
this._bounds.addCallbackAndRunD((bbox) => this.loadData(bbox))
this._patchRelations = options?.patchRelations ?? true
}
private async loadData(bbox: BBox) {
if (this.isActive?.data === false) {
console.log("OsmFeatureSource: not triggering: inactive")
return
}
const z = 15
const neededTiles = Tiles.tileRangeFrom(bbox, z)
if (neededTiles.total == 0) {
return
}
this.isRunning.setData(true)
try {
const tileNumbers = Tiles.MapRange(neededTiles, (x, y) => {
return Tiles.tile_index(z, x, y)
})
await Promise.all(tileNumbers.map((i) => this.LoadTile(...Tiles.tile_from_index(i))))
} catch (e) {
console.error(e)
} finally {
this.isRunning.setData(false)
}
}
private registerFeatures(features: Feature[]): void {
this._downloadedData.push(features)
super.addData(this._downloadedData)
}
/**
* The requested tile might only contain part of the relation.
*
* This method will download the full relation and return it as geojson if it was incomplete.
* If the feature is already complete (or is not a relation), the feature will be returned as is
*/
private async patchIncompleteRelations(
feature: { properties: { id: string } },
originalJson: { elements: { type: "node" | "way" | "relation"; id: number }[] }
): Promise<any> {
if (!feature.properties.id.startsWith("relation") || !this._patchRelations) {
return feature
}
const relationSpec = originalJson.elements.find(
(f) => "relation/" + f.id === feature.properties.id
)
const members: { type: string; ref: number }[] = relationSpec["members"]
for (const member of members) {
const isFound = originalJson.elements.some(
(f) => f.id === member.ref && f.type === member.type
)
if (isFound) {
continue
}
// This member is missing. We redownload the entire relation instead
console.debug("Fetching incomplete relation " + feature.properties.id)
const dfeature = await new OsmObjectDownloader(this._backend).DownloadObjectAsync(
feature.properties.id
)
if (dfeature === "deleted") {
console.warn(
"This relation has been deleted in the meantime: ",
feature.properties.id
)
return
}
return dfeature.asGeoJson()
}
return feature
}
private async LoadTile(z: number, x: number, y: number): Promise<void> {
console.log("OsmFeatureSource: loading ", z, x, y, "from", this._backend)
if (z >= 22) {
throw "This is an absurd high zoom level"
}
if (z < 15) {
throw `Zoom ${z} is too much for OSM to handle! Use a higher zoom level!`
}
const index = Tiles.tile_index(z, x, y)
if (this._downloadedTiles.has(index)) {
return
}
this._downloadedTiles.add(index)
const bbox = BBox.fromTile(z, x, y)
const url = `${this._backend}/api/0.6/map?bbox=${bbox.minLon},${bbox.minLat},${bbox.maxLon},${bbox.maxLat}`
let error = undefined
try {
const osmJson = await Utils.downloadJsonCached(url, 2000)
try {
this.options?.fullNodeDatabase?.handleOsmJson(osmJson, z, x, y)
let features = <Feature<any, { id: string }>[]>OsmToGeoJson(
osmJson,
// @ts-ignore
{
flatProperties: true,
}
).features
// The geojson contains _all_ features at the given location
// We only keep what is needed
features = features.filter((feature) =>
this.allowedTags.matchesProperties(feature.properties)
)
for (let i = 0; i < features.length; i++) {
features[i] = await this.patchIncompleteRelations(features[i], osmJson)
}
features = Utils.NoNull(features)
features.forEach((f) => {
f.properties["_backend"] = this._backend
})
this.registerFeatures(features)
} catch (e) {
console.error(
"PANIC: got the tile from the OSM-api, but something crashed handling this tile"
)
error = e
}
} catch (e) {
console.error(
"Could not download tile",
z,
x,
y,
"due to",
e,
e === "rate limited" ? "; stopping now" : "; retrying with smaller bounds"
)
if (e === "rate limited") {
return
}
await Promise.all([
this.LoadTile(z + 1, x * 2, y * 2),
this.LoadTile(z + 1, 1 + x * 2, y * 2),
this.LoadTile(z + 1, x * 2, 1 + y * 2),
this.LoadTile(z + 1, 1 + x * 2, 1 + y * 2),
])
}
if (error !== undefined) {
throw error
}
}
}

View file

@ -0,0 +1,218 @@
import { Feature } from "geojson"
import { FeatureSource } from "../FeatureSource"
import { ImmutableStore, Store, UIEventSource } from "../../UIEventSource"
import LayerConfig from "../../../Models/ThemeConfig/LayerConfig"
import { Or } from "../../Tags/Or"
import { Overpass } from "../../Osm/Overpass"
import { Utils } from "../../../Utils"
import { TagsFilter } from "../../Tags/TagsFilter"
import { BBox } from "../../BBox"
/**
* A wrapper around the 'Overpass'-object.
* It has more logic and will automatically fetch the data for the right bbox and the active layers
*/
export default class OverpassFeatureSource implements FeatureSource {
/**
* The last loaded features, as geojson
*/
public readonly features: UIEventSource<Feature[]> = new UIEventSource(undefined)
public readonly runningQuery: UIEventSource<boolean> = new UIEventSource<boolean>(false)
public readonly timeout: UIEventSource<number> = new UIEventSource<number>(0)
private readonly retries: UIEventSource<number> = new UIEventSource<number>(0)
private readonly state: {
readonly zoom: Store<number>
readonly layers: LayerConfig[]
readonly widenFactor: number
readonly overpassUrl: Store<string[]>
readonly overpassTimeout: Store<number>
readonly bounds: Store<BBox>
}
private readonly _isActive: Store<boolean>
private readonly padToZoomLevel?: Store<number>
private _lastQueryBBox: BBox
constructor(
state: {
readonly layers: LayerConfig[]
readonly widenFactor: number
readonly zoom: Store<number>
readonly overpassUrl: Store<string[]>
readonly overpassTimeout: Store<number>
readonly overpassMaxZoom: Store<number>
readonly bounds: Store<BBox>
},
options?: {
padToTiles?: Store<number>
isActive?: Store<boolean>
}
) {
this.state = state
this._isActive = options?.isActive ?? new ImmutableStore(true)
this.padToZoomLevel = options?.padToTiles
const self = this
state.bounds.addCallbackD((_) => {
self.updateAsyncIfNeeded()
})
}
/**
* Creates the 'Overpass'-object for the given layers
* @param interpreterUrl
* @param layersToDownload
* @constructor
* @private
*/
private GetFilter(interpreterUrl: string, layersToDownload: LayerConfig[]): Overpass {
let filters: TagsFilter[] = layersToDownload.map((layer) => layer.source.osmTags)
filters = Utils.NoNull(filters)
if (filters.length === 0) {
return undefined
}
return new Overpass(new Or(filters), [], interpreterUrl, this.state.overpassTimeout)
}
/**
*
* @private
*/
private async updateAsyncIfNeeded(): Promise<void> {
if (!this._isActive?.data) {
console.log("OverpassFeatureSource: not triggering as not active")
return
}
if (this.runningQuery.data) {
console.log("Still running a query, not updating")
return undefined
}
if (this.timeout.data > 0) {
console.log("Still in timeout - not updating")
return undefined
}
const requestedBounds = this.state.bounds.data
if (
this._lastQueryBBox !== undefined &&
requestedBounds.isContainedIn(this._lastQueryBBox)
) {
return undefined
}
const result = await this.updateAsync()
if (!result) {
return
}
const [bounds, date, updatedLayers] = result
this._lastQueryBBox = bounds
}
/**
* Download the relevant data from overpass. Attempt to use a different server; only downloads the relevant layers
* @private
*/
private async updateAsync(): Promise<[BBox, Date, LayerConfig[]]> {
let data: any = undefined
let date: Date = undefined
let lastUsed = 0
const layersToDownload = []
for (const layer of this.state.layers) {
if (typeof layer === "string") {
throw "A layer was not expanded!"
}
if (layer.source === undefined) {
continue
}
if (this.state.zoom.data < layer.minzoom) {
continue
}
if (layer.doNotDownload) {
continue
}
if (layer.source === null) {
// This is a special layer. Should not have been here
console.warn(
"OverpassFeatureSource received a layer for which the source is null:",
layer.id
)
continue
}
if (layer.source.geojsonSource !== undefined) {
// Not our responsibility to download this layer!
continue
}
layersToDownload.push(layer)
}
if (layersToDownload.length == 0) {
return
}
const self = this
const overpassUrls = self.state.overpassUrl.data
if (overpassUrls === undefined || overpassUrls.length === 0) {
throw "Panic: overpassFeatureSource didn't receive any overpassUrls"
}
// Note: the bounds are updated between attempts, in case that the user zoomed around
let bounds: BBox
do {
try {
bounds = this.state.bounds.data
?.pad(this.state.widenFactor)
?.expandToTileBounds(this.padToZoomLevel?.data)
if (bounds === undefined) {
return undefined
}
const overpass = this.GetFilter(overpassUrls[lastUsed], layersToDownload)
if (overpass === undefined) {
return undefined
}
this.runningQuery.setData(true)
;[data, date] = await overpass.queryGeoJson(bounds)
} catch (e) {
self.retries.data++
self.retries.ping()
console.error(`QUERY FAILED due to`, e)
await Utils.waitFor(1000)
if (lastUsed + 1 < overpassUrls.length) {
lastUsed++
console.log("Trying next time with", overpassUrls[lastUsed])
} else {
lastUsed = 0
self.timeout.setData(self.retries.data * 5)
while (self.timeout.data > 0) {
await Utils.waitFor(1000)
self.timeout.data--
self.timeout.ping()
}
}
}
} while (data === undefined && this._isActive.data)
try {
if (data === undefined) {
return undefined
}
// Some metatags are delivered by overpass _without_ underscore-prefix; we fix them below
// TODO FIXME re-enable this data.features.forEach((f) => SimpleMetaTaggers.objectMetaInfo.applyMetaTagsOnFeature(f))
console.log("Overpass returned", data.features.length, "features")
self.features.setData(data.features)
return [bounds, date, layersToDownload]
} catch (e) {
console.error("Got the overpass response, but could not process it: ", e, e.stack)
return undefined
} finally {
self.retries.setData(0)
self.runningQuery.setData(false)
}
}
}

View file

@ -0,0 +1,14 @@
import { UIEventSource } from "../../UIEventSource"
import FilteredLayer from "../../../Models/FilteredLayer"
import { FeatureSourceForLayer } from "../FeatureSource"
import { Feature } from "geojson"
export default class SimpleFeatureSource implements FeatureSourceForLayer {
public readonly features: UIEventSource<Feature[]>
public readonly layer: FilteredLayer
constructor(layer: FilteredLayer, featureSource?: UIEventSource<Feature[]>) {
this.layer = layer
this.features = featureSource ?? new UIEventSource<Feature[]>([])
}
}

View file

@ -0,0 +1,105 @@
import { FeatureSource } from "../FeatureSource"
import { Store, UIEventSource } from "../../UIEventSource"
import { Feature, Point } from "geojson"
import { GeoOperations } from "../../GeoOperations"
import { BBox } from "../../BBox"
export interface SnappingOptions {
/**
* If the distance is bigger then this amount, don't snap.
* In meter
*/
maxDistance: number
allowUnsnapped?: false | boolean
/**
* The snapped-to way will be written into this
*/
snappedTo?: UIEventSource<string>
/**
* The resulting snap coordinates will be written into this UIEventSource
*/
snapLocation?: UIEventSource<{ lon: number; lat: number }>
/**
* If the projected point is within `reusePointWithin`-meter of an already existing point
*/
reusePointWithin?: number
}
export default class SnappingFeatureSource implements FeatureSource {
public readonly features: Store<Feature<Point>[]>
/*Contains the id of the way it snapped to*/
public readonly snappedTo: Store<string>
private readonly _snappedTo: UIEventSource<string>
constructor(
snapTo: FeatureSource,
location: Store<{ lon: number; lat: number }>,
options: SnappingOptions
) {
const maxDistance = options?.maxDistance
this._snappedTo = options.snappedTo ?? new UIEventSource<string>(undefined)
this.snappedTo = this._snappedTo
const simplifiedFeatures = snapTo.features
.mapD((features) =>
features
.filter((feature) => feature.geometry.type !== "Point")
.map((f) => GeoOperations.forceLineString(<any>f))
)
.map(
(features) => {
const { lon, lat } = location.data
const loc: [number, number] = [lon, lat]
return features.filter((f) => BBox.get(f).isNearby(loc, maxDistance))
},
[location]
)
this.features = location.mapD(
({ lon, lat }) => {
const features = simplifiedFeatures.data
const loc: [number, number] = [lon, lat]
const maxDistance = (options?.maxDistance ?? 1000) / 1000
let bestSnap: Feature<Point, { "snapped-to": string; dist: number }> = undefined
for (const feature of features) {
if (feature.geometry.type !== "LineString") {
// TODO handle Polygons with holes
continue
}
const snapped = GeoOperations.nearestPoint(<any>feature, loc)
if (snapped.properties.dist > maxDistance) {
continue
}
if (
bestSnap === undefined ||
bestSnap.properties.dist > snapped.properties.dist
) {
snapped.properties["snapped-to"] = feature.properties.id
bestSnap = <any>snapped
}
}
this._snappedTo.setData(bestSnap?.properties?.["snapped-to"])
if (bestSnap === undefined && options?.allowUnsnapped) {
bestSnap = {
type: "Feature",
geometry: {
type: "Point",
coordinates: [lon, lat],
},
properties: {
"snapped-to": undefined,
dist: -1,
},
}
}
const c = bestSnap.geometry.coordinates
options?.snapLocation?.setData({ lon: c[0], lat: c[1] })
return [bestSnap]
},
[snapTo.features]
)
}
}

View file

@ -0,0 +1,32 @@
import { FeatureSource } from "../FeatureSource"
import { ImmutableStore, Store } from "../../UIEventSource"
import { Feature } from "geojson"
/**
* A simple, read only feature store.
*/
export default class StaticFeatureSource<T extends Feature = Feature> implements FeatureSource<T> {
public readonly features: Store<T[]>
constructor(features: Store<T[]> | T[] | { features: T[] } | { features: Store<T[]> }) {
if (features === undefined) {
throw "Static feature source received undefined as source"
}
let feats: T[] | Store<T[]>
if (features["features"]) {
feats = features["features"]
} else {
feats = <T[] | Store<T[]>>features
}
if (Array.isArray(feats)) {
this.features = new ImmutableStore(feats)
} else {
this.features = feats
}
}
public static fromGeojson<T extends Feature>(geojson: T[]): StaticFeatureSource<T> {
return new StaticFeatureSource(geojson)
}
}

View file

@ -0,0 +1,43 @@
import { FeatureSource, FeatureSourceForLayer } from "../FeatureSource"
import StaticFeatureSource from "./StaticFeatureSource"
import { BBox } from "../../BBox"
import FilteredLayer from "../../../Models/FilteredLayer"
import { Store } from "../../UIEventSource"
import { Feature } from "geojson"
/**
* Results in a feature source which has all the elements that touch the given features
*/
export default class BBoxFeatureSource<T extends Feature = Feature> extends StaticFeatureSource<T> {
constructor(features: FeatureSource<T>, mustTouch: Store<BBox>) {
super(
features.features.mapD(
(features) => {
if (mustTouch.data === undefined) {
return features
}
const box = mustTouch.data
return features.filter((feature) => {
if (feature.geometry.type === "Point") {
return box.contains(<[number, number]>feature.geometry.coordinates)
}
return box.overlapsWith(BBox.get(feature))
})
},
[mustTouch]
)
)
}
}
export class BBoxFeatureSourceForLayer<T extends Feature = Feature>
extends BBoxFeatureSource<T>
implements FeatureSourceForLayer
{
readonly layer: FilteredLayer
constructor(features: FeatureSourceForLayer<T>, mustTouch: Store<BBox>) {
super(features, mustTouch)
this.layer = features.layer
}
}

View file

@ -0,0 +1,96 @@
import { Store } from "../../UIEventSource"
import DynamicTileSource from "./DynamicTileSource"
import { Utils } from "../../../Utils"
import GeoJsonSource from "../Sources/GeoJsonSource"
import { BBox } from "../../BBox"
import LayerConfig from "../../../Models/ThemeConfig/LayerConfig"
export default class DynamicGeoJsonTileSource extends DynamicTileSource {
private static whitelistCache = new Map<string, any>()
constructor(
layer: LayerConfig,
mapProperties: {
zoom: Store<number>
bounds: Store<BBox>
},
options?: {
isActive?: Store<boolean>
}
) {
const source = layer.source
if (source.geojsonZoomLevel === undefined) {
throw "Invalid layer: geojsonZoomLevel expected"
}
if (source.geojsonSource === undefined) {
throw "Invalid layer: geojsonSource expected"
}
console.log("Creating a dynamic geojson source for", layer.source.geojsonSource)
let whitelist = undefined
if (source.geojsonSource.indexOf("{x}_{y}.geojson") > 0) {
const whitelistUrl = source.geojsonSource
.replace("{z}", "" + source.geojsonZoomLevel)
.replace("{x}_{y}.geojson", "overview.json")
.replace("{layer}", layer.id)
if (DynamicGeoJsonTileSource.whitelistCache.has(whitelistUrl)) {
whitelist = DynamicGeoJsonTileSource.whitelistCache.get(whitelistUrl)
} else {
Utils.downloadJsonCached(whitelistUrl, 1000 * 60 * 60)
.then((json) => {
const data = new Map<number, Set<number>>()
for (const x in json) {
if (x === "zoom") {
continue
}
data.set(Number(x), new Set(json[x]))
}
console.log(
"The whitelist is",
data,
"based on ",
json,
"from",
whitelistUrl
)
whitelist = data
DynamicGeoJsonTileSource.whitelistCache.set(whitelistUrl, whitelist)
})
.catch((err) => {
console.warn("No whitelist found for ", layer.id, err)
})
}
}
const blackList = new Set<string>()
super(
source.geojsonZoomLevel,
(zxy) => {
if (whitelist !== undefined) {
const isWhiteListed = whitelist.get(zxy[1])?.has(zxy[2])
if (!isWhiteListed) {
console.debug(
"Not downloading tile",
zxy,
"for layer",
layer.id,
"as it is not on the whitelist"
)
return undefined
}
}
return new GeoJsonSource(layer, {
zxy,
featureIdBlacklist: blackList,
isActive: options?.isActive,
})
},
mapProperties,
{
isActive: options?.isActive,
}
)
}
}

View file

@ -0,0 +1,63 @@
import { Store, Stores } from "../../UIEventSource"
import { Tiles } from "../../../Models/TileRange"
import { BBox } from "../../BBox"
import { FeatureSource } from "../FeatureSource"
import FeatureSourceMerger from "../Sources/FeatureSourceMerger"
/***
* A tiled source which dynamically loads the required tiles at a fixed zoom level.
* A single featureSource will be initiliased for every tile in view; which will alter be merged into this featureSource
*/
export default class DynamicTileSource extends FeatureSourceMerger {
constructor(
zoomlevel: number,
constructSource: (tileIndex) => FeatureSource,
mapProperties: {
bounds: Store<BBox>
zoom: Store<number>
},
options?: {
isActive?: Store<boolean>
}
) {
super()
const loadedTiles = new Set<number>()
const neededTiles: Store<number[]> = Stores.ListStabilized(
mapProperties.bounds
.mapD(
(bounds) => {
const tileRange = Tiles.TileRangeBetween(
zoomlevel,
bounds.getNorth(),
bounds.getEast(),
bounds.getSouth(),
bounds.getWest()
)
if (tileRange.total > 10000) {
console.error(
"Got a really big tilerange, bounds and location might be out of sync"
)
return undefined
}
const needed = Tiles.MapRange(tileRange, (x, y) =>
Tiles.tile_index(zoomlevel, x, y)
).filter((i) => !loadedTiles.has(i))
if (needed.length === 0) {
return undefined
}
return needed
},
[options?.isActive, mapProperties.zoom]
)
.stabilized(250)
)
neededTiles.addCallbackAndRunD((neededIndexes) => {
for (const neededIndex of neededIndexes) {
loadedTiles.add(neededIndex)
super.addSource(constructSource(neededIndex))
}
})
}
}

View file

@ -0,0 +1,95 @@
import { OsmNode, OsmObject, OsmWay } from "../../Osm/OsmObject"
import { UIEventSource } from "../../UIEventSource"
import { BBox } from "../../BBox"
import StaticFeatureSource from "../Sources/StaticFeatureSource"
import { Tiles } from "../../../Models/TileRange"
export default class FullNodeDatabaseSource {
private readonly loadedTiles = new Map<number, Map<number, OsmNode>>()
private readonly nodeByIds = new Map<number, OsmNode>()
private readonly parentWays = new Map<number, UIEventSource<OsmWay[]>>()
private smallestZoom = 99
private largestZoom = 0
public handleOsmJson(osmJson: any, z: number, x: number, y: number): void {
const allObjects = OsmObject.ParseObjects(osmJson.elements)
const nodesById = new Map<number, OsmNode>()
this.smallestZoom = Math.min(this.smallestZoom, z)
this.largestZoom = Math.max(this.largestZoom, z)
for (const osmObj of allObjects) {
if (osmObj.type !== "node") {
continue
}
const osmNode = <OsmNode>osmObj
nodesById.set(osmNode.id, osmNode)
this.nodeByIds.set(osmNode.id, osmNode)
}
for (const osmObj of allObjects) {
if (osmObj.type !== "way") {
continue
}
const osmWay = <OsmWay>osmObj
for (const nodeId of osmWay.nodes) {
if (!this.parentWays.has(nodeId)) {
const src = new UIEventSource<OsmWay[]>([])
this.parentWays.set(nodeId, src)
src.addCallback((parentWays) => {
const tgs = nodesById.get(nodeId).tags
tgs["parent_ways"] = JSON.stringify(parentWays.map((w) => w.tags))
tgs["parent_way_ids"] = JSON.stringify(parentWays.map((w) => w.id))
})
}
const src = this.parentWays.get(nodeId)
src.data.push(osmWay)
src.ping()
}
}
const asGeojsonFeatures = Array.from(nodesById.values()).map((osmNode) =>
osmNode.asGeoJson()
)
const featureSource = new StaticFeatureSource(asGeojsonFeatures)
const tileId = Tiles.tile_index(z, x, y)
this.loadedTiles.set(tileId, nodesById)
}
/**
* Returns the OsmNode with the corresponding id (undefined if not found)
* Note that this OsmNode will have a calculated tag 'parent_ways' and 'parent_way_ids', which are resp. stringified lists of parent way tags and ids
* @param id
* @constructor
*/
public GetNode(id: number): OsmNode {
return this.nodeByIds.get(id)
}
/**
* Gets all the ways that the given node is a part of
* @param nodeId
* @constructor
*/
public GetParentWays(nodeId: number): UIEventSource<OsmWay[]> {
return this.parentWays.get(nodeId)
}
/**
* Gets (at least) all nodes which are part of this BBOX; might also return some nodes that fall outside of the bbox but are closeby
* @param bbox
*/
getNodesWithin(bbox: BBox): Map<number, OsmNode> {
const allById = new Map<number, OsmNode>()
for (let z = this.smallestZoom; z < this.largestZoom; z++) {
const range = Tiles.tileRangeFrom(bbox, z)
Tiles.MapRange(range, (x, y) => {
const tileId = Tiles.tile_index(z, x, y)
const nodesById = this.loadedTiles.get(tileId)
nodesById?.forEach((v, k) => allById.set(k, v))
})
}
return allById
}
}

View file

@ -0,0 +1,44 @@
import DynamicTileSource from "./DynamicTileSource"
import { Store } from "../../UIEventSource"
import { BBox } from "../../BBox"
import TileLocalStorage from "../Actors/TileLocalStorage"
import { Feature } from "geojson"
import StaticFeatureSource from "../Sources/StaticFeatureSource"
export default class LocalStorageFeatureSource extends DynamicTileSource {
constructor(
backend: string,
layername: string,
zoomlevel: number,
mapProperties: {
bounds: Store<BBox>
zoom: Store<number>
},
options?: {
isActive?: Store<boolean>
maxAge?: number // In seconds
}
) {
const storage = TileLocalStorage.construct<Feature[]>(
backend,
layername,
options?.maxAge ?? 24 * 60 * 60
)
super(
zoomlevel,
(tileIndex) =>
new StaticFeatureSource(
storage.getTileSource(tileIndex).mapD((features) => {
if (features.length === undefined) {
console.trace("These are not features:", features)
storage.invalidate(zoomlevel, tileIndex)
return []
}
return features.filter((f) => !f.properties.id.match(/(node|way)\/-[0-9]+/))
})
),
mapProperties,
options
)
}
}

1049
src/Logic/GeoOperations.ts Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,90 @@
import { Mapillary } from "./Mapillary"
import { WikimediaImageProvider } from "./WikimediaImageProvider"
import { Imgur } from "./Imgur"
import GenericImageProvider from "./GenericImageProvider"
import { Store, UIEventSource } from "../UIEventSource"
import ImageProvider, { ProvidedImage } from "./ImageProvider"
import { WikidataImageProvider } from "./WikidataImageProvider"
/**
* A generic 'from the interwebz' image picker, without attribution
*/
export default class AllImageProviders {
public static ImageAttributionSource: ImageProvider[] = [
Imgur.singleton,
Mapillary.singleton,
WikidataImageProvider.singleton,
WikimediaImageProvider.singleton,
new GenericImageProvider(
[].concat(
...Imgur.defaultValuePrefix,
...WikimediaImageProvider.commonsPrefixes,
...Mapillary.valuePrefixes
)
),
]
private static providersByName = {
imgur: Imgur.singleton,
mapillary: Mapillary.singleton,
wikidata: WikidataImageProvider.singleton,
wikimedia: WikimediaImageProvider.singleton,
}
public static byName(name: string) {
return AllImageProviders.providersByName[name.toLowerCase()]
}
public static defaultKeys = [].concat(
AllImageProviders.ImageAttributionSource.map((provider) => provider.defaultKeyPrefixes)
)
private static _cache: Map<string, UIEventSource<ProvidedImage[]>> = new Map<
string,
UIEventSource<ProvidedImage[]>
>()
public static LoadImagesFor(
tags: Store<Record<string, string>>,
tagKey?: string[]
): Store<ProvidedImage[]> {
if (tags.data.id === undefined) {
return undefined
}
const cacheKey = tags.data.id + tagKey
const cached = this._cache.get(cacheKey)
if (cached !== undefined) {
return cached
}
const source = new UIEventSource([])
this._cache.set(cacheKey, source)
const allSources = []
for (const imageProvider of AllImageProviders.ImageAttributionSource) {
let prefixes = imageProvider.defaultKeyPrefixes
if (tagKey !== undefined) {
prefixes = tagKey
}
const singleSource = imageProvider.GetRelevantUrls(tags, {
prefixes: prefixes,
})
allSources.push(singleSource)
singleSource.addCallbackAndRunD((_) => {
const all: ProvidedImage[] = [].concat(...allSources.map((source) => source.data))
const uniq = []
const seen = new Set<string>()
for (const img of all) {
if (seen.has(img.url)) {
continue
}
seen.add(img.url)
uniq.push(img)
}
source.setData(uniq)
})
}
return source
}
}

View file

@ -0,0 +1,41 @@
import ImageProvider, { ProvidedImage } from "./ImageProvider"
export default class GenericImageProvider extends ImageProvider {
public defaultKeyPrefixes: string[] = ["image"]
private readonly _valuePrefixBlacklist: string[]
public constructor(valuePrefixBlacklist: string[]) {
super()
this._valuePrefixBlacklist = valuePrefixBlacklist
}
async ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]> {
if (this._valuePrefixBlacklist.some((prefix) => value.startsWith(prefix))) {
return []
}
try {
new URL(value)
} catch (_) {
// Not a valid URL
return []
}
return [
Promise.resolve({
key: key,
url: value,
provider: this,
}),
]
}
SourceIcon(backlinkSource?: string) {
return undefined
}
public DownloadAttribution(url: string) {
return undefined
}
}

View file

@ -0,0 +1,68 @@
import { Store, UIEventSource } from "../UIEventSource"
import BaseUIElement from "../../UI/BaseUIElement"
import { LicenseInfo } from "./LicenseInfo"
import { Utils } from "../../Utils"
export interface ProvidedImage {
url: string
key: string
provider: ImageProvider
}
export default abstract class ImageProvider {
public abstract readonly defaultKeyPrefixes: string[]
public abstract SourceIcon(backlinkSource?: string): BaseUIElement
/**
* Given a properies object, maps it onto _all_ the available pictures for this imageProvider
*/
public GetRelevantUrls(
allTags: Store<any>,
options?: {
prefixes?: string[]
}
): UIEventSource<ProvidedImage[]> {
const prefixes = options?.prefixes ?? this.defaultKeyPrefixes
if (prefixes === undefined) {
throw "No `defaultKeyPrefixes` defined by this image provider"
}
const relevantUrls = new UIEventSource<
{ url: string; key: string; provider: ImageProvider }[]
>([])
const seenValues = new Set<string>()
allTags.addCallbackAndRunD((tags) => {
for (const key in tags) {
if (!prefixes.some((prefix) => key.startsWith(prefix))) {
continue
}
const values = Utils.NoEmpty(tags[key]?.split(";")?.map((v) => v.trim()) ?? [])
for (const value of values) {
if (seenValues.has(value)) {
continue
}
seenValues.add(value)
this.ExtractUrls(key, value).then((promises) => {
for (const promise of promises ?? []) {
if (promise === undefined) {
continue
}
promise.then((providedImage) => {
if (providedImage === undefined) {
return
}
relevantUrls.data.push(providedImage)
relevantUrls.ping()
})
}
})
}
}
})
return relevantUrls
}
public abstract ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]>
public abstract DownloadAttribution(url: string): Promise<LicenseInfo>
}

View file

@ -0,0 +1,141 @@
import ImageProvider, { ProvidedImage } from "./ImageProvider"
import BaseUIElement from "../../UI/BaseUIElement"
import { Utils } from "../../Utils"
import Constants from "../../Models/Constants"
import { LicenseInfo } from "./LicenseInfo"
export class Imgur extends ImageProvider {
public static readonly defaultValuePrefix = ["https://i.imgur.com"]
public static readonly singleton = new Imgur()
public readonly defaultKeyPrefixes: string[] = ["image"]
private constructor() {
super()
}
static uploadMultiple(
title: string,
description: string,
blobs: FileList,
handleSuccessfullUpload: (imageURL: string) => Promise<void>,
allDone: () => void,
onFail: (reason: string) => void,
offset: number = 0
) {
if (blobs.length == offset) {
allDone()
return
}
const blob = blobs.item(offset)
const self = this
this.uploadImage(
title,
description,
blob,
async (imageUrl) => {
await handleSuccessfullUpload(imageUrl)
self.uploadMultiple(
title,
description,
blobs,
handleSuccessfullUpload,
allDone,
onFail,
offset + 1
)
},
onFail
)
}
static uploadImage(
title: string,
description: string,
blob: File,
handleSuccessfullUpload: (imageURL: string) => Promise<void>,
onFail: (reason: string) => void
) {
const apiUrl = "https://api.imgur.com/3/image"
const apiKey = Constants.ImgurApiKey
const formData = new FormData()
formData.append("image", blob)
formData.append("title", title)
formData.append("description", description)
const settings: RequestInit = {
method: "POST",
body: formData,
redirect: "follow",
headers: new Headers({
Authorization: `Client-ID ${apiKey}`,
Accept: "application/json",
}),
}
// Response contains stringified JSON
// Image URL available at response.data.link
fetch(apiUrl, settings)
.then(async function (response) {
const content = await response.json()
await handleSuccessfullUpload(content.data.link)
})
.catch((reason) => {
console.log("Uploading to IMGUR failed", reason)
// @ts-ignore
onFail(reason)
})
}
SourceIcon(): BaseUIElement {
return undefined
}
public async ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]> {
if (Imgur.defaultValuePrefix.some((prefix) => value.startsWith(prefix))) {
return [
Promise.resolve({
url: value,
key: key,
provider: this,
}),
]
}
return []
}
/**
* Download the attribution and license info for the picture at the given URL
*
* const data = {"data":{"id":"I9t6B7B","title":"Station Knokke","description":"author:Pieter Vander Vennet\r\nlicense:CC-BY 4.0\r\nosmid:node\/9812712386","datetime":1655052078,"type":"image\/jpeg","animated":false,"width":2400,"height":1795,"size":910872,"views":2,"bandwidth":1821744,"vote":null,"favorite":false,"nsfw":false,"section":null,"account_url":null,"account_id":null,"is_ad":false,"in_most_viral":false,"has_sound":false,"tags":[],"ad_type":0,"ad_url":"","edited":"0","in_gallery":false,"link":"https:\/\/i.imgur.com\/I9t6B7B.jpg","ad_config":{"safeFlags":["not_in_gallery","share"],"highRiskFlags":[],"unsafeFlags":["sixth_mod_unsafe"],"wallUnsafeFlags":[],"showsAds":false,"showAdLevel":1}},"success":true,"status":200}
* Utils.injectJsonDownloadForTests("https://api.imgur.com/3/image/E0RuAK3", data)
* const licenseInfo = await Imgur.singleton.DownloadAttribution("https://i.imgur.com/E0RuAK3.jpg")
* const expected = new LicenseInfo()
* expected.licenseShortName = "CC-BY 4.0"
* expected.artist = "Pieter Vander Vennet"
* licenseInfo // => expected
*/
public async DownloadAttribution(url: string): Promise<LicenseInfo> {
const hash = url.substr("https://i.imgur.com/".length).split(".jpg")[0]
const apiUrl = "https://api.imgur.com/3/image/" + hash
const response = await Utils.downloadJsonCached(apiUrl, 365 * 24 * 60 * 60, {
Authorization: "Client-ID " + Constants.ImgurApiKey,
})
const descr: string = response.data.description ?? ""
const data: any = {}
for (const tag of descr.split("\n")) {
const kv = tag.split(":")
const k = kv[0]
data[k] = kv[1]?.replace(/\r/g, "")
}
const licenseInfo = new LicenseInfo()
licenseInfo.licenseShortName = data.license
licenseInfo.artist = data.author
return licenseInfo
}
}

View file

@ -0,0 +1,43 @@
import { UIEventSource } from "../UIEventSource"
import { Imgur } from "./Imgur"
export default class ImgurUploader {
public readonly queue: UIEventSource<string[]> = new UIEventSource<string[]>([])
public readonly failed: UIEventSource<string[]> = new UIEventSource<string[]>([])
public readonly success: UIEventSource<string[]> = new UIEventSource<string[]>([])
public maxFileSizeInMegabytes = 10
private readonly _handleSuccessUrl: (string) => Promise<void>
constructor(handleSuccessUrl: (string) => Promise<void>) {
this._handleSuccessUrl = handleSuccessUrl
}
public uploadMany(title: string, description: string, files: FileList): void {
for (let i = 0; i < files.length; i++) {
this.queue.data.push(files.item(i).name)
}
this.queue.ping()
const self = this
this.queue.setData([...self.queue.data])
Imgur.uploadMultiple(
title,
description,
files,
async function (url) {
console.log("File saved at", url)
self.success.data.push(url)
self.success.ping()
await self._handleSuccessUrl(url)
},
function () {
console.log("All uploads completed")
},
function (failReason) {
console.log("Upload failed due to ", failReason)
self.failed.setData([...self.failed.data, failReason])
}
)
}
}

View file

@ -0,0 +1,12 @@
export class LicenseInfo {
title: string = ""
artist: string = ""
license: string = undefined
licenseShortName: string = ""
usageTerms: string = ""
attributionRequired: boolean = false
copyrighted: boolean = false
credit: string = ""
description: string = ""
informationLocation: URL = undefined
}

View file

@ -0,0 +1,115 @@
import ImageProvider, { ProvidedImage } from "./ImageProvider"
import BaseUIElement from "../../UI/BaseUIElement"
import Svg from "../../Svg"
import { Utils } from "../../Utils"
import { LicenseInfo } from "./LicenseInfo"
import Constants from "../../Models/Constants"
export class Mapillary extends ImageProvider {
public static readonly singleton = new Mapillary()
private static readonly valuePrefix = "https://a.mapillary.com"
public static readonly valuePrefixes = [
Mapillary.valuePrefix,
"http://mapillary.com",
"https://mapillary.com",
"http://www.mapillary.com",
"https://www.mapillary.com",
]
defaultKeyPrefixes = ["mapillary", "image"]
/**
* Indicates that this is the same URL
* Ignores 'stp' parameter
*
* const a = "https://scontent-bru2-1.xx.fbcdn.net/m1/v/t6/An8xm5SGLt20ETziNqzhhBd8b8S5GHLiIu8N6BbyqHFohFAQoaJJPG8i5yQiSwjYmEqXSfVeoCmpiyBJICEkQK98JOB21kkJoBS8VdhYa-Ty93lBnznQyesJBtKcb32foGut2Hgt10hEMWJbE3dDgA?stp=s1024x768&ccb=10-5&oh=00_AT-ZGTXHzihoaQYBILmEiAEKR64z_IWiTlcAYq_D7Ka0-Q&oe=6278C456&_nc_sid=122ab1"
* const b = "https://scontent-bru2-1.xx.fbcdn.net/m1/v/t6/An8xm5SGLt20ETziNqzhhBd8b8S5GHLiIu8N6BbyqHFohFAQoaJJPG8i5yQiSwjYmEqXSfVeoCmpiyBJICEkQK98JOB21kkJoBS8VdhYa-Ty93lBnznQyesJBtKcb32foGut2Hgt10hEMWJbE3dDgA?stp=s256x192&ccb=10-5&oh=00_AT9BZ1Rpc9zbY_uNu92A_4gj1joiy1b6VtgtLIu_7wh9Bg&oe=6278C456&_nc_sid=122ab1"
* Mapillary.sameUrl(a, b) => true
*/
static sameUrl(a: string, b: string): boolean {
if (a === b) {
return true
}
try {
const aUrl = new URL(a)
const bUrl = new URL(b)
if (aUrl.host !== bUrl.host || aUrl.pathname !== bUrl.pathname) {
return false
}
let allSame = true
aUrl.searchParams.forEach((value, key) => {
if (key === "stp") {
// This is the key indicating the image size on mapillary; we ignore it
return
}
if (value !== bUrl.searchParams.get(key)) {
allSame = false
return
}
})
return allSame
} catch (e) {
console.debug("Could not compare ", a, "and", b, "due to", e)
}
return false
}
/**
* Returns the correct key for API v4.0
*/
private static ExtractKeyFromURL(value: string): number {
let key: string
const newApiFormat = value.match(/https?:\/\/www.mapillary.com\/app\/\?pKey=([0-9]*)/)
if (newApiFormat !== null) {
key = newApiFormat[1]
} else if (value.startsWith(Mapillary.valuePrefix)) {
key = value.substring(0, value.lastIndexOf("?")).substring(value.lastIndexOf("/") + 1)
} else if (value.match("[0-9]*")) {
key = value
}
const keyAsNumber = Number(key)
if (!isNaN(keyAsNumber)) {
return keyAsNumber
}
return undefined
}
SourceIcon(backlinkSource?: string): BaseUIElement {
return Svg.mapillary_svg()
}
async ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]> {
return [this.PrepareUrlAsync(key, value)]
}
public async DownloadAttribution(url: string): Promise<LicenseInfo> {
const license = new LicenseInfo()
license.artist = "Contributor name unavailable"
license.license = "CC BY-SA 4.0"
// license.license = "Creative Commons Attribution-ShareAlike 4.0 International License";
license.attributionRequired = true
return license
}
private async PrepareUrlAsync(key: string, value: string): Promise<ProvidedImage> {
const mapillaryId = Mapillary.ExtractKeyFromURL(value)
if (mapillaryId === undefined) {
return undefined
}
const metadataUrl =
"https://graph.mapillary.com/" +
mapillaryId +
"?fields=thumb_1024_url&&access_token=" +
Constants.mapillary_client_token_v4
const response = await Utils.downloadJsonCached(metadataUrl, 60 * 60)
const url = <string>response["thumb_1024_url"]
return {
url: url,
provider: this,
key: key,
}
}
}

View file

@ -0,0 +1,54 @@
import ImageProvider, { ProvidedImage } from "./ImageProvider"
import BaseUIElement from "../../UI/BaseUIElement"
import Svg from "../../Svg"
import { WikimediaImageProvider } from "./WikimediaImageProvider"
import Wikidata from "../Web/Wikidata"
export class WikidataImageProvider extends ImageProvider {
public static readonly singleton = new WikidataImageProvider()
public readonly defaultKeyPrefixes = ["wikidata"]
private constructor() {
super()
}
public SourceIcon(_?: string): BaseUIElement {
return Svg.wikidata_svg()
}
public async ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]> {
const entity = await Wikidata.LoadWikidataEntryAsync(value)
if (entity === undefined) {
return []
}
const allImages: Promise<ProvidedImage>[] = []
// P18 is the claim 'depicted in this image'
for (const img of Array.from(entity.claims.get("P18") ?? [])) {
const promises = await WikimediaImageProvider.singleton.ExtractUrls(undefined, img)
allImages.push(...promises)
}
// P373 is 'commons category'
for (let cat of Array.from(entity.claims.get("P373") ?? [])) {
if (!cat.startsWith("Category:")) {
cat = "Category:" + cat
}
const promises = await WikimediaImageProvider.singleton.ExtractUrls(undefined, cat)
allImages.push(...promises)
}
const commons = entity.commons
if (
commons !== undefined &&
(commons.startsWith("Category:") || commons.startsWith("File:"))
) {
const promises = await WikimediaImageProvider.singleton.ExtractUrls(undefined, commons)
allImages.push(...promises)
}
return allImages
}
public DownloadAttribution(_: string): Promise<any> {
throw new Error("Method not implemented; shouldn't be needed!")
}
}

View file

@ -0,0 +1,174 @@
import ImageProvider, { ProvidedImage } from "./ImageProvider"
import BaseUIElement from "../../UI/BaseUIElement"
import Svg from "../../Svg"
import Link from "../../UI/Base/Link"
import { Utils } from "../../Utils"
import { LicenseInfo } from "./LicenseInfo"
import Wikimedia from "../Web/Wikimedia"
/**
* This module provides endpoints for wikimedia and others
*/
export class WikimediaImageProvider extends ImageProvider {
public static readonly singleton = new WikimediaImageProvider()
public static readonly commonsPrefixes = [
"https://commons.wikimedia.org/wiki/",
"https://upload.wikimedia.org",
"File:",
]
private readonly commons_key = "wikimedia_commons"
public readonly defaultKeyPrefixes = [this.commons_key, "image"]
private constructor() {
super()
}
private static ExtractFileName(url: string) {
if (!url.startsWith("http")) {
return url
}
const path = new URL(url).pathname
return path.substring(path.lastIndexOf("/") + 1)
}
private static PrepareUrl(value: string): string {
if (value.toLowerCase().startsWith("https://commons.wikimedia.org/wiki/")) {
return value
}
return `https://commons.wikimedia.org/wiki/Special:FilePath/${encodeURIComponent(
value
)}?width=500&height=400`
}
private static startsWithCommonsPrefix(value: string): boolean {
return WikimediaImageProvider.commonsPrefixes.some((prefix) => value.startsWith(prefix))
}
private static removeCommonsPrefix(value: string): string {
if (value.startsWith("https://upload.wikimedia.org/")) {
value = value.substring(value.lastIndexOf("/") + 1)
value = decodeURIComponent(value)
if (!value.startsWith("File:")) {
value = "File:" + value
}
return value
}
for (const prefix of WikimediaImageProvider.commonsPrefixes) {
if (value.startsWith(prefix)) {
let part = value.substr(prefix.length)
if (prefix.startsWith("http")) {
part = decodeURIComponent(part)
}
return part
}
}
return value
}
SourceIcon(backlink: string): BaseUIElement {
const img = Svg.wikimedia_commons_white_svg().SetStyle("width:2em;height: 2em")
if (backlink === undefined) {
return img
}
return new Link(
Svg.wikimedia_commons_white_svg(),
`https://commons.wikimedia.org/wiki/${backlink}`,
true
)
}
public PrepUrl(value: string): ProvidedImage {
value = WikimediaImageProvider.removeCommonsPrefix(value)
if (value.startsWith("File:")) {
return this.UrlForImage(value)
}
// We do a last effort and assume this is a file
return this.UrlForImage("File:" + value)
}
public async ExtractUrls(key: string, value: string): Promise<Promise<ProvidedImage>[]> {
const hasCommonsPrefix = WikimediaImageProvider.startsWithCommonsPrefix(value)
if (key !== undefined && key !== this.commons_key && !hasCommonsPrefix) {
return []
}
value = WikimediaImageProvider.removeCommonsPrefix(value)
if (value.startsWith("Category:")) {
const urls = await Wikimedia.GetCategoryContents(value)
return urls
.filter((url) => url.startsWith("File:"))
.map((image) => Promise.resolve(this.UrlForImage(image)))
}
if (value.startsWith("File:")) {
return [Promise.resolve(this.UrlForImage(value))]
}
if (value.startsWith("http")) {
// PRobably an error
return []
}
// We do a last effort and assume this is a file
return [Promise.resolve(this.UrlForImage("File:" + value))]
}
public async DownloadAttribution(filename: string): Promise<LicenseInfo> {
filename = WikimediaImageProvider.ExtractFileName(filename)
if (filename === "") {
return undefined
}
const url =
"https://en.wikipedia.org/w/" +
"api.php?action=query&prop=imageinfo&iiprop=extmetadata&" +
"titles=" +
filename +
"&format=json&origin=*"
const data = await Utils.downloadJsonCached(url, 365 * 24 * 60 * 60)
const licenseInfo = new LicenseInfo()
const pageInfo = data.query.pages[-1]
if (pageInfo === undefined) {
return undefined
}
const license = (pageInfo.imageinfo ?? [])[0]?.extmetadata
if (license === undefined) {
console.warn(
"The file",
filename,
"has no usable metedata or license attached... Please fix the license info file yourself!"
)
return undefined
}
let title = pageInfo.title
if (title.startsWith("File:")) {
title = title.substr("File:".length)
}
if (title.endsWith(".jpg") || title.endsWith(".png")) {
title = title.substring(0, title.length - 4)
}
licenseInfo.title = title
licenseInfo.artist = license.Artist?.value
licenseInfo.license = license.License?.value
licenseInfo.copyrighted = license.Copyrighted?.value
licenseInfo.attributionRequired = license.AttributionRequired?.value
licenseInfo.usageTerms = license.UsageTerms?.value
licenseInfo.licenseShortName = license.LicenseShortName?.value
licenseInfo.credit = license.Credit?.value
licenseInfo.description = license.ImageDescription?.value
licenseInfo.informationLocation = new URL("https://en.wikipedia.org/wiki/" + pageInfo.title)
return licenseInfo
}
private UrlForImage(image: string): ProvidedImage {
if (!image.startsWith("File:")) {
image = "File:" + image
}
return { url: WikimediaImageProvider.PrepareUrl(image), key: undefined, provider: this }
}
}

94
src/Logic/Maproulette.ts Normal file
View file

@ -0,0 +1,94 @@
import Constants from "../Models/Constants"
export default class Maproulette {
public static readonly STATUS_OPEN = 0
public static readonly STATUS_FIXED = 1
public static readonly STATUS_FALSE_POSITIVE = 2
public static readonly STATUS_SKIPPED = 3
public static readonly STATUS_DELETED = 4
public static readonly STATUS_ALREADY_FIXED = 5
public static readonly STATUS_TOO_HARD = 6
public static readonly STATUS_DISABLED = 9
public static readonly STATUS_MEANING = {
0: "Open",
1: "Fixed",
2: "False positive",
3: "Skipped",
4: "Deleted",
5: "Already fixed",
6: "Too hard",
9: "Disabled",
}
/*
* The API endpoint to use
*/
endpoint: string
/**
* The API key to use for all requests
*/
private readonly apiKey: string
public static singleton = new Maproulette()
/**
* Creates a new Maproulette instance
* @param endpoint The API endpoint to use
*/
constructor(endpoint: string = "https://maproulette.org/api/v2") {
this.endpoint = endpoint
this.apiKey = Constants.MaprouletteApiKey
}
/**
* Close a task; might throw an error
*
* Also see:https://maproulette.org/docs/swagger-ui/index.html?url=/assets/swagger.json&docExpansion=none#/Task/setTaskStatus
* @param taskId The task to close
* @param status A number indicating the status. Use MapRoulette.STATUS_*
* @param options Additional settings to pass. Refer to the API-docs for more information
*/
async closeTask(
taskId: number,
status = Maproulette.STATUS_FIXED,
options?: {
comment?: string
tags?: string
requestReview?: boolean
completionResponses?: Record<string, string>
}
): Promise<void> {
const response = await fetch(`${this.endpoint}/task/${taskId}/${status}`, {
method: "PUT",
headers: {
"Content-Type": "application/json",
apiKey: this.apiKey,
},
body: options !== undefined ? JSON.stringify(options) : undefined,
})
if (response.status !== 204) {
console.log(`Failed to close task: ${response.status}`)
throw `Failed to close task: ${response.status}`
}
}
/**
* Converts a status text into the corresponding number
*
* Maproulette.codeToIndex("Created") // => 0
* Maproulette.codeToIndex("qdsf") // => undefined
*
*/
public static codeToIndex(code: string): number | undefined {
if (code === "Created") {
return Maproulette.STATUS_OPEN
}
for (let i = 0; i < 9; i++) {
if (Maproulette.STATUS_MEANING["" + i] === code) {
return i
}
}
return undefined
}
}

314
src/Logic/MetaTagging.ts Normal file
View file

@ -0,0 +1,314 @@
import SimpleMetaTaggers, { MetataggingState, SimpleMetaTagger } from "./SimpleMetaTagger"
import { ExtraFuncParams, ExtraFunctions, ExtraFuncType } from "./ExtraFunctions"
import LayerConfig from "../Models/ThemeConfig/LayerConfig"
import { Feature } from "geojson"
import FeaturePropertiesStore from "./FeatureSource/Actors/FeaturePropertiesStore"
import LayoutConfig from "../Models/ThemeConfig/LayoutConfig"
import { GeoIndexedStoreForLayer } from "./FeatureSource/Actors/GeoIndexedStore"
import { IndexedFeatureSource } from "./FeatureSource/FeatureSource"
import OsmObjectDownloader from "./Osm/OsmObjectDownloader"
import { Utils } from "../Utils"
import { UIEventSource } from "./UIEventSource"
/**
* Metatagging adds various tags to the elements, e.g. lat, lon, surface area, ...
*
* All metatags start with an underscore
*/
export default class MetaTagging {
private static errorPrintCount = 0
private static readonly stopErrorOutputAt = 10
private static retaggingFuncCache = new Map<
string,
((feature: Feature, propertiesStore: UIEventSource<any>) => void)[]
>()
constructor(state: {
layout: LayoutConfig
osmObjectDownloader: OsmObjectDownloader
perLayer: ReadonlyMap<string, GeoIndexedStoreForLayer>
indexedFeatures: IndexedFeatureSource
featureProperties: FeaturePropertiesStore
}) {
const params = MetaTagging.createExtraFuncParams(state)
for (const layer of state.layout.layers) {
if (layer.source === null) {
continue
}
const featureSource = state.perLayer.get(layer.id)
featureSource.features?.stabilized(1000)?.addCallbackAndRunD((features) => {
if (!(features?.length > 0)) {
// No features to handle
return
}
console.debug(
"Recalculating metatags for layer ",
layer.id,
"due to a change in the upstream features. Contains ",
features.length,
"items"
)
MetaTagging.addMetatags(
features,
params,
layer,
state.layout,
state.osmObjectDownloader,
state.featureProperties
)
})
}
}
/**
* This method (re)calculates all metatags and calculated tags on every given feature.
* The given features should be part of the given layer
*
* Returns true if at least one feature has changed properties
*/
public static addMetatags(
features: Feature[],
params: ExtraFuncParams,
layer: LayerConfig,
layout: LayoutConfig,
osmObjectDownloader: OsmObjectDownloader,
featurePropertiesStores?: FeaturePropertiesStore,
options?: {
includeDates?: true | boolean
includeNonDates?: true | boolean
evaluateStrict?: false | boolean
}
): boolean {
if (features === undefined || features.length === 0) {
return
}
const metatagsToApply: SimpleMetaTagger[] = []
for (const metatag of SimpleMetaTaggers.metatags) {
if (metatag.includesDates) {
if (options?.includeDates ?? true) {
metatagsToApply.push(metatag)
}
} else {
if (options?.includeNonDates ?? true) {
metatagsToApply.push(metatag)
}
}
}
// The calculated functions - per layer - which add the new keys
// Calculated functions are defined by the layer
const layerFuncs = this.createRetaggingFunc(layer, ExtraFunctions.constructHelpers(params))
const state: MetataggingState = { layout, osmObjectDownloader }
let atLeastOneFeatureChanged = false
let strictlyEvaluated = 0
for (let i = 0; i < features.length; i++) {
const feature = features[i]
const tags = featurePropertiesStores?.getStore(feature.properties.id)
let somethingChanged = false
let definedTags = new Set(Object.getOwnPropertyNames(feature.properties))
for (const metatag of metatagsToApply) {
try {
if (!metatag.keys.some((key) => !(key in feature.properties))) {
// All keys are already defined, we probably already ran this one
// Note that we use 'key in properties', not 'properties[key] === undefined'. The latter will cause evaluation of lazy properties
continue
}
if (metatag.isLazy) {
if (!metatag.keys.some((key) => !definedTags.has(key))) {
// All keys are defined - lets skip!
continue
}
somethingChanged = true
metatag.applyMetaTagsOnFeature(feature, layer, tags, state)
if (options?.evaluateStrict) {
for (const key of metatag.keys) {
const evaluated = feature.properties[key]
if (evaluated !== undefined) {
strictlyEvaluated++
}
}
}
} else {
const newValueAdded = metatag.applyMetaTagsOnFeature(
feature,
layer,
tags,
state
)
/* Note that the expression:
* `somethingChanged = newValueAdded || metatag.applyMetaTagsOnFeature(feature, freshness)`
* Is WRONG
*
* IF something changed is `true` due to an earlier run, it will short-circuit and _not_ evaluate the right hand of the OR,
* thus not running an update!
*/
somethingChanged = newValueAdded || somethingChanged
}
} catch (e) {
console.error(
"Could not calculate metatag for ",
metatag.keys.join(","),
":",
e,
e.stack
)
}
}
if (layerFuncs !== undefined) {
try {
// We cannot do `somethingChanged || layerFuncs(feature)', due to the shortcutting behaviour it would not calculate the lazy functions
somethingChanged = layerFuncs(feature, tags) || somethingChanged
} catch (e) {
console.error(e)
}
}
if (somethingChanged) {
try {
tags?.ping()
} catch (e) {
console.error("Could not ping a store for a changed property due to", e)
}
atLeastOneFeatureChanged = true
}
}
return atLeastOneFeatureChanged
}
public static createExtraFuncParams(state: {
indexedFeatures: IndexedFeatureSource
perLayer: ReadonlyMap<string, GeoIndexedStoreForLayer>
}) {
return {
getFeatureById: (id) => state.indexedFeatures.featuresById.data.get(id),
getFeaturesWithin: (layerId, bbox) => {
if (layerId === "*" || layerId === null || layerId === undefined) {
const feats: Feature[][] = []
state.perLayer.forEach((layer) => {
feats.push(layer.GetFeaturesWithin(bbox))
})
return feats
}
return [state.perLayer.get(layerId).GetFeaturesWithin(bbox)]
},
}
}
/**
* Creates a function that implements that calculates a property and adds this property onto the feature properties
* @param specification
* @param helperFunctions
* @param layerId
* @private
*/
private static createFunctionForFeature(
[key, code, isStrict]: [string, string, boolean],
helperFunctions: Record<ExtraFuncType, (feature: Feature) => Function>,
layerId: string = "unkown layer"
): ((feature: Feature, propertiesStore?: UIEventSource<any>) => void) | undefined {
if (code === undefined) {
return undefined
}
const calculateAndAssign: (feat: Feature, store?: UIEventSource<any>) => string | any = (
feat,
store
) => {
try {
let result = new Function(
"feat",
"{" + ExtraFunctions.types.join(", ") + "}",
"return " + code + ";"
)(feat, helperFunctions)
if (result === "") {
result = undefined
}
const oldValue = feat.properties[key]
if (oldValue == result) {
return oldValue
}
delete feat.properties[key]
feat.properties[key] = result
store?.ping()
return result
} catch (e) {
if (MetaTagging.errorPrintCount < MetaTagging.stopErrorOutputAt) {
console.warn(
"Could not calculate a " +
(isStrict ? "strict " : "") +
" calculated tag for key " +
key +
" defined by " +
code +
" (in layer" +
layerId +
") due to \n" +
e +
"\n. Are you the theme creator? Doublecheck your code. Note that the metatags might not be stable on new features",
e,
e.stack
)
MetaTagging.errorPrintCount++
if (MetaTagging.errorPrintCount == MetaTagging.stopErrorOutputAt) {
console.error(
"Got ",
MetaTagging.stopErrorOutputAt,
" errors calculating this metatagging - stopping output now"
)
}
}
return undefined
}
}
if (isStrict) {
return calculateAndAssign
}
return (feature: Feature, store?: UIEventSource<any>) => {
delete feature.properties[key]
Utils.AddLazyProperty(feature.properties, key, () => calculateAndAssign(feature, store))
}
}
/**
* Creates the function which adds all the calculated tags to a feature. Called once per layer
*/
private static createRetaggingFunc(
layer: LayerConfig,
helpers: Record<ExtraFuncType, (feature: Feature) => Function>
): (feature: Feature, tags: UIEventSource<Record<string, any>>) => boolean {
const calculatedTags: [string, string, boolean][] = layer.calculatedTags
if (calculatedTags === undefined || calculatedTags.length === 0) {
return undefined
}
let functions: ((feature: Feature, propertiesStore?: UIEventSource<any>) => void)[] =
MetaTagging.retaggingFuncCache.get(layer.id)
if (functions === undefined) {
functions = calculatedTags.map((spec) =>
this.createFunctionForFeature(spec, helpers, layer.id)
)
MetaTagging.retaggingFuncCache.set(layer.id, functions)
}
return (feature: Feature, store: UIEventSource<Record<string, any>>) => {
const tags = feature.properties
if (tags === undefined) {
return
}
try {
for (const f of functions) {
f(feature, store)
}
} catch (e) {
console.error("Invalid syntax in calculated tags or some other error: ", e)
}
return true // Something changed
}
}
}

View file

@ -0,0 +1,202 @@
import { OsmNode, OsmRelation, OsmWay } from "../OsmObject"
/**
* Represents a single change to an object
*/
export interface ChangeDescription {
/**
* Metadata to be included in the changeset
*/
meta: {
/*
* The theme with which this changeset was made
*/
theme: string
/**
* The type of the change
*/
changeType: "answer" | "create" | "split" | "delete" | "move" | "import" | string | null
/**
* THe motivation for the change, e.g. 'deleted because does not exist anymore'
*/
specialMotivation?: string
/**
* Added by Changes.ts
*/
distanceToObject?: number
}
/**
* Identifier of the object
*/
type: "node" | "way" | "relation"
/**
* Identifier of the object
* Negative for new objects
*/
id: number
/**
* All changes to tags
* v = "" or v = undefined to erase this tag
*
* Note that this list will only contain the _changes_ to the tags, not the full set of tags
*/
tags?: { k: string; v: string }[]
/**
* A change to the geometry:
* 1) Change of node location
* 2) Change of way geometry
* 3) Change of relation members (untested)
*/
changes?:
| {
lat: number
lon: number
}
| {
/* Coordinates are only used for rendering. They should be LON, LAT
* */
coordinates: [number, number][]
nodes: number[]
}
| {
members: { type: "node" | "way" | "relation"; ref: number; role: string }[]
}
/*
Set to delete the object
*/
doDelete?: boolean
}
export class ChangeDescriptionTools {
/**
* Rewrites all the ids in a changeDescription
*
* // should rewrite the id of the changed object
* const change = <ChangeDescription> {
* id: -1234,
* type: "node",
* meta:{
* theme:"test",
* changeType: "answer"
* },
* tags:[
* {
* k: "key",
* v: "value"
* }
* ]
* }
* }
* const mapping = new Map<string, string>([["node/-1234", "node/42"]])
* const rewritten = ChangeDescriptionTools.rewriteIds(change, mapping)
* rewritten.id // => 42
*
* // should rewrite ids in nodes of a way
* const change = <ChangeDescription> {
* type: "way",
* id: 789,
* changes: {
* nodes: [-1, -2, -3, 68453],
* coordinates: []
* },
* meta:{
* theme:"test",
* changeType: "create"
* }
* }
* const mapping = new Map<string, string>([["node/-1", "node/42"],["node/-2", "node/43"],["node/-3", "node/44"]])
* const rewritten = ChangeDescriptionTools.rewriteIds(change, mapping)
* rewritten.id // => 789
* rewritten.changes["nodes"] // => [42,43,44, 68453]
*
* // should rewrite ids in relationship members
* const change = <ChangeDescription> {
* type: "way",
* id: 789,
* changes: {
* members: [{type: "way", ref: -1, role: "outer"},{type: "way", ref: 48, role: "outer"}],
* },
* meta:{
* theme:"test",
* changeType: "create"
* }
* }
* const mapping = new Map<string, string>([["way/-1", "way/42"],["node/-2", "node/43"],["node/-3", "node/44"]])
* const rewritten = ChangeDescriptionTools.rewriteIds(change, mapping)
* rewritten.id // => 789
* rewritten.changes["members"] // => [{type: "way", ref: 42, role: "outer"},{type: "way", ref: 48, role: "outer"}]
*
*/
public static rewriteIds(
change: ChangeDescription,
mappings: Map<string, string>
): ChangeDescription {
const key = change.type + "/" + change.id
const wayHasChangedNode = ((change.changes ?? {})["nodes"] ?? []).some((id) =>
mappings.has("node/" + id)
)
const relationHasChangedMembers = ((change.changes ?? {})["members"] ?? []).some(
(obj: { type: string; ref: number }) => mappings.has(obj.type + "/" + obj.ref)
)
const hasSomeChange = mappings.has(key) || wayHasChangedNode || relationHasChangedMembers
if (hasSomeChange) {
change = { ...change }
}
if (mappings.has(key)) {
const [_, newId] = mappings.get(key).split("/")
change.id = Number.parseInt(newId)
}
if (wayHasChangedNode) {
change.changes = { ...change.changes }
change.changes["nodes"] = change.changes["nodes"].map((id) => {
const key = "node/" + id
if (!mappings.has(key)) {
return id
}
const [_, newId] = mappings.get(key).split("/")
return Number.parseInt(newId)
})
}
if (relationHasChangedMembers) {
change.changes = { ...change.changes }
change.changes["members"] = change.changes["members"].map(
(obj: { type: string; ref: number }) => {
const key = obj.type + "/" + obj.ref
if (!mappings.has(key)) {
return obj
}
const [_, newId] = mappings.get(key).split("/")
return { ...obj, ref: Number.parseInt(newId) }
}
)
}
return change
}
public static getGeojsonGeometry(change: ChangeDescription): any {
switch (change.type) {
case "node":
const n = new OsmNode(change.id)
n.lat = change.changes["lat"]
n.lon = change.changes["lon"]
return n.asGeoJson().geometry
case "way":
const w = new OsmWay(change.id)
w.nodes = change.changes["nodes"]
w.coordinates = change.changes["coordinates"].map(([lon, lat]) => [lat, lon])
return w.asGeoJson().geometry
case "relation":
const r = new OsmRelation(change.id)
r.members = change.changes["members"]
return r.asGeoJson().geometry
}
}
}

View file

@ -0,0 +1,43 @@
import { ChangeDescription } from "./ChangeDescription"
import OsmChangeAction from "./OsmChangeAction"
export default class ChangeLocationAction extends OsmChangeAction {
private readonly _id: number
private readonly _newLonLat: [number, number]
private readonly _meta: { theme: string; reason: string }
constructor(
id: string,
newLonLat: [number, number],
meta: {
theme: string
reason: string
}
) {
super(id, true)
if (!id.startsWith("node/")) {
throw "Invalid ID: only 'node/number' is accepted"
}
this._id = Number(id.substring("node/".length))
this._newLonLat = newLonLat
this._meta = meta
}
protected async CreateChangeDescriptions(): Promise<ChangeDescription[]> {
const d: ChangeDescription = {
changes: {
lat: this._newLonLat[1],
lon: this._newLonLat[0],
},
type: "node",
id: this._id,
meta: {
changeType: "move",
theme: this._meta.theme,
specialMotivation: this._meta.reason,
},
}
return [d]
}
}

View file

@ -0,0 +1,89 @@
import OsmChangeAction from "./OsmChangeAction"
import { ChangeDescription } from "./ChangeDescription"
import { TagsFilter } from "../../Tags/TagsFilter"
import { OsmTags } from "../../../Models/OsmFeature"
export default class ChangeTagAction extends OsmChangeAction {
private readonly _elementId: string
/**
* The tags to apply onto the object
*/
private readonly _tagsFilter: TagsFilter
/**
* The current tags of the object to change
*/
private readonly _currentTags: Record<string, string> | OsmTags
private readonly _meta: { theme: string; changeType: string }
/**
*
* @param elementId: the element to change
* @param tagsFilter: the tags to apply
* @param currentTags: the current tags of the object
* @param meta: some metainformation
*/
constructor(
elementId: string,
tagsFilter: TagsFilter,
currentTags: Record<string, string>,
meta: {
theme: string
changeType: "answer" | "soft-delete" | "add-image" | string
}
) {
super(elementId, true)
this._elementId = elementId
this._tagsFilter = tagsFilter
this._currentTags = currentTags
this._meta = meta
}
/**
* Doublechecks that no stupid values are added
*/
private static checkChange(kv: { k: string; v: string }): { k: string; v: string } {
const key = kv.k
const value = kv.v
if (key === undefined || key === null) {
console.error("Invalid key:", key)
return undefined
}
if (value === undefined || value === null) {
console.error("Invalid value for ", key, ":", value)
return undefined
}
if (typeof value !== "string") {
console.error("Invalid value for ", key, "as it is not a string:", value)
return undefined
}
if (
key.startsWith(" ") ||
value.startsWith(" ") ||
value.endsWith(" ") ||
key.endsWith(" ")
) {
console.warn("Tag starts with or ends with a space - trimming anyway")
}
return { k: key.trim(), v: value.trim() }
}
async CreateChangeDescriptions(): Promise<ChangeDescription[]> {
const changedTags: { k: string; v: string }[] = this._tagsFilter
.asChange(this._currentTags)
.map(ChangeTagAction.checkChange)
const typeId = this._elementId.split("/")
const type = typeId[0]
const id = Number(typeId[1])
return [
{
type: <"node" | "way" | "relation">type,
id: id,
tags: changedTags,
meta: this._meta,
},
]
}
}

View file

@ -0,0 +1,112 @@
import { OsmCreateAction, PreviewableAction } from "./OsmChangeAction"
import { Tag } from "../../Tags/Tag"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import CreateNewWayAction from "./CreateNewWayAction"
import CreateWayWithPointReuseAction, { MergePointConfig } from "./CreateWayWithPointReuseAction"
import { And } from "../../Tags/And"
import { TagUtils } from "../../Tags/TagUtils"
import { FeatureSource, IndexedFeatureSource } from "../../FeatureSource/FeatureSource"
import LayoutConfig from "../../../Models/ThemeConfig/LayoutConfig"
import { Position } from "geojson"
import FullNodeDatabaseSource from "../../FeatureSource/TiledFeatureSource/FullNodeDatabaseSource"
/**
* More or less the same as 'CreateNewWay', except that it'll try to reuse already existing points
*/
export default class CreateMultiPolygonWithPointReuseAction
extends OsmCreateAction
implements PreviewableAction
{
public newElementId: string = undefined
public newElementIdNumber: number = undefined
private readonly _tags: Tag[]
private readonly createOuterWay: CreateWayWithPointReuseAction
private readonly createInnerWays: CreateNewWayAction[]
private readonly geojsonPreview: any
private readonly theme: string
private readonly changeType: "import" | "create" | string
constructor(
tags: Tag[],
outerRingCoordinates: Position[],
innerRingsCoordinates: Position[][],
state: {
layout: LayoutConfig
changes: Changes
indexedFeatures: IndexedFeatureSource
fullNodeDatabase?: FullNodeDatabaseSource
},
config: MergePointConfig[],
changeType: "import" | "create" | string
) {
super(null, true)
this._tags = [...tags, new Tag("type", "multipolygon")]
this.changeType = changeType
this.theme = state?.layout?.id ?? ""
this.createOuterWay = new CreateWayWithPointReuseAction(
[],
<[number, number][]>outerRingCoordinates,
state,
config
)
this.createInnerWays = innerRingsCoordinates.map(
(ringCoordinates) =>
new CreateNewWayAction(
[],
ringCoordinates.map(([lon, lat]) => ({ lat, lon })),
{ theme: state?.layout?.id }
)
)
this.geojsonPreview = {
type: "Feature",
properties: TagUtils.changeAsProperties(new And(this._tags).asChange({})),
geometry: {
type: "Polygon",
coordinates: [outerRingCoordinates, ...innerRingsCoordinates],
},
}
}
protected async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const descriptions: ChangeDescription[] = []
descriptions.push(...(await this.createOuterWay.CreateChangeDescriptions(changes)))
for (const innerWay of this.createInnerWays) {
descriptions.push(...(await innerWay.CreateChangeDescriptions(changes)))
}
this.newElementIdNumber = changes.getNewID()
this.newElementId = "relation/" + this.newElementIdNumber
descriptions.push({
type: "relation",
id: this.newElementIdNumber,
tags: new And(this._tags).asChange({}),
meta: {
theme: this.theme,
changeType: this.changeType,
},
changes: {
members: [
{
type: "way",
ref: this.createOuterWay.newElementIdNumber,
role: "outer",
},
// @ts-ignore
...this.createInnerWays.map((a) => ({
type: "way",
ref: a.newElementIdNumber,
role: "inner",
})),
],
},
})
return descriptions
}
getPreview(): Promise<FeatureSource> {
return undefined
}
}

View file

@ -0,0 +1,178 @@
import { Tag } from "../../Tags/Tag"
import { OsmCreateAction } from "./OsmChangeAction"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import { And } from "../../Tags/And"
import { OsmWay } from "../OsmObject"
import { GeoOperations } from "../../GeoOperations"
export default class CreateNewNodeAction extends OsmCreateAction {
/**
* Maps previously created points onto their assigned ID, to reuse the point if uplaoded
* "lat,lon" --> id
*/
private static readonly previouslyCreatedPoints = new Map<string, number>()
public newElementId: string = undefined
public newElementIdNumber: number = undefined
private readonly _basicTags: Tag[]
private readonly _lat: number
private readonly _lon: number
private readonly _snapOnto: OsmWay
private readonly _reusePointDistance: number
private readonly meta: {
changeType: "create" | "import"
theme: string
specialMotivation?: string
}
private readonly _reusePreviouslyCreatedPoint: boolean
constructor(
basicTags: Tag[],
lat: number,
lon: number,
options: {
allowReuseOfPreviouslyCreatedPoints?: boolean
snapOnto?: OsmWay
reusePointWithinMeters?: number
theme: string
changeType: "create" | "import" | null
specialMotivation?: string
}
) {
super(null, basicTags !== undefined && basicTags.length > 0)
this._basicTags = basicTags
this._lat = lat
this._lon = lon
if (lat === undefined || lon === undefined) {
throw "Lat or lon are undefined!"
}
this._snapOnto = options?.snapOnto
this._reusePointDistance = options?.reusePointWithinMeters ?? 1
this._reusePreviouslyCreatedPoint =
options?.allowReuseOfPreviouslyCreatedPoints ?? basicTags.length === 0
this.meta = {
theme: options.theme,
changeType: options.changeType,
specialMotivation: options.specialMotivation,
}
}
async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
if (this._reusePreviouslyCreatedPoint) {
const key = this._lat + "," + this._lon
const prev = CreateNewNodeAction.previouslyCreatedPoints
if (prev.has(key)) {
this.newElementIdNumber = prev.get(key)
this.newElementId = "node/" + this.newElementIdNumber
return []
}
}
const id = changes.getNewID()
const properties = {
id: "node/" + id,
}
this.setElementId(id)
for (const kv of this._basicTags) {
if (typeof kv.value !== "string") {
throw (
"Invalid value: don't use non-string value in a preset. The tag " +
kv.key +
"=" +
kv.value +
" is not a string, the value is a " +
typeof kv.value
)
}
properties[kv.key] = kv.value
}
const newPointChange: ChangeDescription = {
tags: new And(this._basicTags).asChange(properties),
type: "node",
id: id,
changes: {
lat: this._lat,
lon: this._lon,
},
meta: this.meta,
}
if (this._snapOnto === undefined) {
return [newPointChange]
}
// Project the point onto the way
console.log("Snapping a node onto an existing way...")
const geojson = this._snapOnto.asGeoJson()
const projected = GeoOperations.nearestPoint(GeoOperations.outerRing(geojson), [
this._lon,
this._lat,
])
const projectedCoor = <[number, number]>projected.geometry.coordinates
const index = projected.properties.index
console.log("Attempting to snap:", { geojson, projected, projectedCoor, index })
// We check that it isn't close to an already existing point
let reusedPointId = undefined
let outerring: [number, number][]
if (geojson.geometry.type === "LineString") {
outerring = <[number, number][]>geojson.geometry.coordinates
} else if (geojson.geometry.type === "Polygon") {
outerring = <[number, number][]>geojson.geometry.coordinates[0]
}
const prev = outerring[index]
if (GeoOperations.distanceBetween(prev, projectedCoor) < this._reusePointDistance) {
// We reuse this point instead!
reusedPointId = this._snapOnto.nodes[index]
}
const next = outerring[index + 1]
if (GeoOperations.distanceBetween(next, projectedCoor) < this._reusePointDistance) {
// We reuse this point instead!
reusedPointId = this._snapOnto.nodes[index + 1]
}
if (reusedPointId !== undefined) {
this.setElementId(reusedPointId)
return [
{
tags: new And(this._basicTags).asChange(properties),
type: "node",
id: reusedPointId,
meta: this.meta,
},
]
}
const locations = [
...this._snapOnto.coordinates.map(([lat, lon]) => <[number, number]>[lon, lat]),
]
const ids = [...this._snapOnto.nodes]
locations.splice(index + 1, 0, [this._lon, this._lat])
ids.splice(index + 1, 0, id)
// Allright, we have to insert a new point in the way
return [
newPointChange,
{
type: "way",
id: this._snapOnto.id,
changes: {
coordinates: locations,
nodes: ids,
},
meta: this.meta,
},
]
}
private setElementId(id: number) {
this.newElementIdNumber = id
this.newElementId = "node/" + id
if (!this._reusePreviouslyCreatedPoint) {
return
}
const key = this._lat + "," + this._lon
CreateNewNodeAction.previouslyCreatedPoints.set(key, id)
}
}

View file

@ -0,0 +1,101 @@
import { ChangeDescription } from "./ChangeDescription"
import { OsmCreateAction } from "./OsmChangeAction"
import { Changes } from "../Changes"
import { Tag } from "../../Tags/Tag"
import CreateNewNodeAction from "./CreateNewNodeAction"
import { And } from "../../Tags/And"
export default class CreateNewWayAction extends OsmCreateAction {
public newElementId: string = undefined
public newElementIdNumber: number = undefined
private readonly coordinates: { nodeId?: number; lat: number; lon: number }[]
private readonly tags: Tag[]
private readonly _options: {
theme: string
}
/***
* Creates a new way to upload to OSM
* @param tags: the tags to apply to the way
* @param coordinates: the coordinates. Might have a nodeId, in this case, this node will be used
* @param options
*/
constructor(
tags: Tag[],
coordinates: { nodeId?: number; lat: number; lon: number }[],
options: {
theme: string
}
) {
super(null, true)
this.coordinates = []
for (const coordinate of coordinates) {
/* The 'PointReuseAction' is a bit buggy and might generate duplicate ids.
We filter those here, as the CreateWayWithPointReuseAction delegates the actual creation to here.
Filtering here also prevents similar bugs in other actions
*/
if (
this.coordinates.length > 0 &&
coordinate.nodeId !== undefined &&
this.coordinates[this.coordinates.length - 1].nodeId === coordinate.nodeId
) {
// This is a duplicate id
console.warn(
"Skipping a node in createWay to avoid a duplicate node:",
coordinate,
"\nThe previous coordinates are: ",
this.coordinates
)
continue
}
this.coordinates.push(coordinate)
}
this.tags = tags
this._options = options
}
public async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const newElements: ChangeDescription[] = []
const pointIds: number[] = []
for (const coordinate of this.coordinates) {
if (coordinate.nodeId !== undefined) {
pointIds.push(coordinate.nodeId)
continue
}
const newPoint = new CreateNewNodeAction([], coordinate.lat, coordinate.lon, {
allowReuseOfPreviouslyCreatedPoints: true,
changeType: null,
theme: this._options.theme,
})
newElements.push(...(await newPoint.CreateChangeDescriptions(changes)))
pointIds.push(newPoint.newElementIdNumber)
}
// We have all created (or reused) all the points!
// Time to create the actual way
const id = changes.getNewID()
this.newElementIdNumber = id
const newWay = <ChangeDescription>{
id,
type: "way",
meta: {
theme: this._options.theme,
changeType: "import",
},
tags: new And(this.tags).asChange({}),
changes: {
nodes: pointIds,
coordinates: this.coordinates.map((c) => [c.lon, c.lat]),
},
}
newElements.push(newWay)
this.newElementId = "way/" + id
return newElements
}
}

View file

@ -0,0 +1,382 @@
import { OsmCreateAction, PreviewableAction } from "./OsmChangeAction"
import { Tag } from "../../Tags/Tag"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import { BBox } from "../../BBox"
import { TagsFilter } from "../../Tags/TagsFilter"
import { GeoOperations } from "../../GeoOperations"
import { FeatureSource, IndexedFeatureSource } from "../../FeatureSource/FeatureSource"
import StaticFeatureSource from "../../FeatureSource/Sources/StaticFeatureSource"
import CreateNewNodeAction from "./CreateNewNodeAction"
import CreateNewWayAction from "./CreateNewWayAction"
import LayoutConfig from "../../../Models/ThemeConfig/LayoutConfig"
import FullNodeDatabaseSource from "../../FeatureSource/TiledFeatureSource/FullNodeDatabaseSource"
import { Position } from "geojson"
export interface MergePointConfig {
withinRangeOfM: number
ifMatches: TagsFilter
mode: "reuse_osm_point" | "move_osm_point"
}
/**
* CreateWayWithPointreuse will create a 'CoordinateInfo' for _every_ point in the way to be created.
*
* The CoordinateInfo indicates the action to take, e.g.:
*
* - Create a new point
* - Reuse an existing OSM point (and don't move it)
* - Reuse an existing OSM point (and leave it where it is)
* - Reuse another Coordinate info (and don't do anything else with it)
*
*/
interface CoordinateInfo {
/**
* The new coordinate
*/
lngLat: [number, number]
/**
* If set: indicates that this point is identical to an earlier point in the way and that that point should be used.
* This is especially needed in closed ways, where the last CoordinateInfo will have '0' as identicalTo
*/
identicalTo?: number
/**
* Information about the closebyNode which might be reused
*/
closebyNodes?: {
/**
* Distance in meters between the target coordinate and this candidate coordinate
*/
d: number
node: any
config: MergePointConfig
}[]
}
/**
* More or less the same as 'CreateNewWay', except that it'll try to reuse already existing points
*/
export default class CreateWayWithPointReuseAction
extends OsmCreateAction
implements PreviewableAction
{
public newElementId: string = undefined
public newElementIdNumber: number = undefined
private readonly _tags: Tag[]
/**
* lngLat-coordinates
* @private
*/
private readonly _coordinateInfo: CoordinateInfo[]
private readonly _state: {
layout: LayoutConfig
changes: Changes
indexedFeatures: IndexedFeatureSource
fullNodeDatabase?: FullNodeDatabaseSource
}
private readonly _config: MergePointConfig[]
constructor(
tags: Tag[],
coordinates: Position[],
state: {
layout: LayoutConfig
changes: Changes
indexedFeatures: IndexedFeatureSource
fullNodeDatabase?: FullNodeDatabaseSource
},
config: MergePointConfig[]
) {
super(null, true)
this._tags = tags
this._state = state
this._config = config
// The main logic of this class: the coordinateInfo contains all the changes
this._coordinateInfo = this.CalculateClosebyNodes(<[number, number][]>coordinates)
}
public async getPreview(): Promise<FeatureSource> {
const features = []
let geometryMoved = false
for (let i = 0; i < this._coordinateInfo.length; i++) {
const coordinateInfo = this._coordinateInfo[i]
if (coordinateInfo.identicalTo !== undefined) {
continue
}
if (
coordinateInfo.closebyNodes === undefined ||
coordinateInfo.closebyNodes.length === 0
) {
const newPoint = {
type: "Feature",
properties: {
newpoint: "yes",
id: "new-geometry-with-reuse-" + i,
},
geometry: {
type: "Point",
coordinates: coordinateInfo.lngLat,
},
}
features.push(newPoint)
continue
}
const reusedPoint = coordinateInfo.closebyNodes[0]
if (reusedPoint.config.mode === "move_osm_point") {
const moveDescription = {
type: "Feature",
properties: {
move: "yes",
"osm-id": reusedPoint.node.properties.id,
id: "new-geometry-move-existing" + i,
distance: GeoOperations.distanceBetween(
coordinateInfo.lngLat,
reusedPoint.node.geometry.coordinates
),
},
geometry: {
type: "LineString",
coordinates: [reusedPoint.node.geometry.coordinates, coordinateInfo.lngLat],
},
}
features.push(moveDescription)
} else {
// The geometry is moved, the point is reused
geometryMoved = true
const reuseDescription = {
type: "Feature",
properties: {
move: "no",
"osm-id": reusedPoint.node.properties.id,
id: "new-geometry-reuse-existing" + i,
distance: GeoOperations.distanceBetween(
coordinateInfo.lngLat,
reusedPoint.node.geometry.coordinates
),
},
geometry: {
type: "LineString",
coordinates: [coordinateInfo.lngLat, reusedPoint.node.geometry.coordinates],
},
}
features.push(reuseDescription)
}
}
if (geometryMoved) {
const coords: [number, number][] = []
for (const info of this._coordinateInfo) {
if (info.identicalTo !== undefined) {
coords.push(coords[info.identicalTo])
continue
}
if (info.closebyNodes === undefined || info.closebyNodes.length === 0) {
coords.push(coords[info.identicalTo])
continue
}
const closest = info.closebyNodes[0]
if (closest.config.mode === "reuse_osm_point") {
coords.push(closest.node.geometry.coordinates)
} else {
coords.push(info.lngLat)
}
}
const newGeometry = {
type: "Feature",
properties: {
"resulting-geometry": "yes",
id: "new-geometry",
},
geometry: {
type: "LineString",
coordinates: coords,
},
}
features.push(newGeometry)
}
return StaticFeatureSource.fromGeojson(features)
}
public async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const theme = this._state?.layout?.id
const allChanges: ChangeDescription[] = []
const nodeIdsToUse: { lat: number; lon: number; nodeId?: number }[] = []
for (let i = 0; i < this._coordinateInfo.length; i++) {
const info = this._coordinateInfo[i]
const lat = info.lngLat[1]
const lon = info.lngLat[0]
if (info.identicalTo !== undefined) {
nodeIdsToUse.push(nodeIdsToUse[info.identicalTo])
continue
}
if (info.closebyNodes === undefined || info.closebyNodes[0] === undefined) {
const newNodeAction = new CreateNewNodeAction([], lat, lon, {
allowReuseOfPreviouslyCreatedPoints: true,
changeType: null,
theme,
})
allChanges.push(...(await newNodeAction.CreateChangeDescriptions(changes)))
nodeIdsToUse.push({
lat,
lon,
nodeId: newNodeAction.newElementIdNumber,
})
continue
}
const closestPoint = info.closebyNodes[0]
const id = Number(closestPoint.node.properties.id.split("/")[1])
if (closestPoint.config.mode === "move_osm_point") {
allChanges.push({
type: "node",
id,
changes: {
lat,
lon,
},
meta: {
theme,
changeType: null,
},
})
}
nodeIdsToUse.push({ lat, lon, nodeId: id })
}
const newWay = new CreateNewWayAction(this._tags, nodeIdsToUse, {
theme,
})
allChanges.push(...(await newWay.CreateChangeDescriptions(changes)))
this.newElementId = newWay.newElementId
this.newElementIdNumber = newWay.newElementIdNumber
return allChanges
}
/**
* Calculates the main changes.
*/
private CalculateClosebyNodes(coordinates: [number, number][]): CoordinateInfo[] {
const bbox = new BBox(coordinates)
const state = this._state
const allNodes = state.fullNodeDatabase?.getNodesWithin(bbox.pad(1.2)) ?? []
const maxDistance = Math.max(...this._config.map((c) => c.withinRangeOfM))
// Init coordianteinfo with undefined but the same length as coordinates
const coordinateInfo: {
lngLat: [number, number]
identicalTo?: number
closebyNodes?: {
d: number
node: any
config: MergePointConfig
}[]
}[] = coordinates.map((_) => undefined)
// First loop: gather all information...
for (let i = 0; i < coordinates.length; i++) {
if (coordinateInfo[i] !== undefined) {
// Already seen, probably a duplicate coordinate
continue
}
const coor = coordinates[i]
// Check closeby (and probably identical) points further in the coordinate list, mark them as duplicate
for (let j = i + 1; j < coordinates.length; j++) {
// We look into the 'future' of the way and mark those 'future' locations as being the same as this location
// The continue just above will make sure they get ignored
// This code is important to 'close' ways
if (GeoOperations.distanceBetween(coor, coordinates[j]) < 0.1) {
coordinateInfo[j] = {
lngLat: coor,
identicalTo: i,
}
break
}
}
// Gather the actual info for this point
// Lets search applicable points and determine the merge mode
const closebyNodes: {
d: number
node: any
config: MergePointConfig
}[] = []
for (const node of allNodes) {
const center = node.geometry.coordinates
const d = GeoOperations.distanceBetween(coor, center)
if (d > maxDistance) {
continue
}
for (const config of this._config) {
if (d > config.withinRangeOfM) {
continue
}
if (!config.ifMatches.matchesProperties(node.properties)) {
continue
}
closebyNodes.push({ node, d, config })
}
}
// Sort by distance, closest first
closebyNodes.sort((n0, n1) => {
return n0.d - n1.d
})
coordinateInfo[i] = {
identicalTo: undefined,
lngLat: coor,
closebyNodes,
}
}
// Second loop: figure out which point moves where without creating conflicts
let conflictFree = true
do {
conflictFree = true
for (let i = 0; i < coordinateInfo.length; i++) {
const coorInfo = coordinateInfo[i]
if (coorInfo.identicalTo !== undefined) {
continue
}
if (coorInfo.closebyNodes === undefined || coorInfo.closebyNodes[0] === undefined) {
continue
}
for (let j = i + 1; j < coordinates.length; j++) {
const other = coordinateInfo[j]
if (other.closebyNodes === undefined || other.closebyNodes[0] === undefined) {
continue
}
if (coorInfo.closebyNodes[0] === undefined) {
continue
}
if (other.closebyNodes[0].node === coorInfo.closebyNodes[0].node) {
conflictFree = false
// We have found a conflict!
// We only keep the closest point
if (other.closebyNodes[0].d > coorInfo.closebyNodes[0].d) {
other.closebyNodes.shift()
} else {
coorInfo.closebyNodes.shift()
}
}
}
}
} while (!conflictFree)
return coordinateInfo
}
}

View file

@ -0,0 +1,99 @@
import { OsmObject } from "../OsmObject"
import OsmChangeAction from "./OsmChangeAction"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import ChangeTagAction from "./ChangeTagAction"
import { TagsFilter } from "../../Tags/TagsFilter"
import { And } from "../../Tags/And"
import { Tag } from "../../Tags/Tag"
import { OsmId } from "../../../Models/OsmFeature"
import { Utils } from "../../../Utils"
import OsmObjectDownloader from "../OsmObjectDownloader"
export default class DeleteAction extends OsmChangeAction {
private readonly _softDeletionTags: TagsFilter
private readonly meta: {
theme: string
specialMotivation: string
changeType: "deletion"
}
private readonly _id: OsmId
private readonly _hardDelete: boolean
constructor(
id: OsmId,
softDeletionTags: TagsFilter | undefined,
meta: {
theme: string
specialMotivation: string
},
hardDelete: boolean
) {
super(id, true)
this._id = id
this._hardDelete = hardDelete
this.meta = { ...meta, changeType: "deletion" }
if (softDeletionTags?.usedKeys()?.indexOf("fixme") >= 0) {
this._softDeletionTags = softDeletionTags
} else {
this._softDeletionTags = new And(
Utils.NoNull([
softDeletionTags,
new Tag(
"fixme",
`A mapcomplete user marked this feature to be deleted (${meta.specialMotivation})`
),
])
)
}
}
/**
*
* import {OsmNode} from "../OsmObject"
* import { ImmutableStore } from "../../UIEventSource";
* import { OsmConnection } from "../OsmConnection";
*
* const obj : OsmNode= new OsmNode(1)
* obj.tags = {id:"node/1",name:"Monte Piselli - San Giacomo"}
* const da = new DeleteAction("node/1", new Tag("man_made",""), {theme: "test", specialMotivation: "Testcase"}, true)
* const state = { dryRun: new ImmutableStore(true), osmConnection: new OsmConnection() }
* const descr = await da.CreateChangeDescriptions(new Changes(state), obj)
* descr[0] // => {doDelete: true, meta: {theme: "test", specialMotivation: "Testcase",changeType: "deletion"}, type: "node",id: 1 }
*
* // Must not crash if softDeletionTags are undefined
* const da = new DeleteAction("node/1", undefined, {theme: "test", specialMotivation: "Testcase"}, true)
* const obj : OsmNode= new OsmNode(1)
* obj.tags = {id:"node/1",name:"Monte Piselli - San Giacomo"}
* const state = { dryRun: new ImmutableStore(true), osmConnection: new OsmConnection() }
* const descr = await da.CreateChangeDescriptions(new Changes(state), obj)
* descr[0] // => {doDelete: true, meta: {theme: "test", specialMotivation: "Testcase", changeType: "deletion"}, type: "node",id: 1 }
*/
public async CreateChangeDescriptions(
changes: Changes,
object?: OsmObject
): Promise<ChangeDescription[]> {
const osmObject =
object ??
(await new OsmObjectDownloader(changes.backend, changes).DownloadObjectAsync(this._id))
if (osmObject === "deleted") {
// already deleted in the meantime - no more changes necessary
return []
}
if (this._hardDelete) {
return [
{
meta: this.meta,
doDelete: true,
type: osmObject.type,
id: osmObject.id,
},
]
} else {
return await new ChangeTagAction(this._id, this._softDeletionTags, osmObject.tags, {
...this.meta,
changeType: "soft-delete",
}).CreateChangeDescriptions()
}
}
}

View file

@ -0,0 +1,42 @@
/**
* An action is a change to the OSM-database
* It will generate some new/modified/deleted objects, which are all bundled by the 'changes'-object
*/
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import { FeatureSource } from "../../FeatureSource/FeatureSource"
export default abstract class OsmChangeAction {
public readonly trackStatistics: boolean
/**
* The ID of the object that is the center of this change.
* Null if the action creates a new object (at initialization)
* Undefined if such an id does not make sense
*/
public readonly mainObjectId: string
private isUsed = false
constructor(mainObjectId: string, trackStatistics: boolean = true) {
this.trackStatistics = trackStatistics
this.mainObjectId = mainObjectId
}
public async Perform(changes: Changes) {
if (this.isUsed) {
throw "This ChangeAction is already used"
}
this.isUsed = true
return await this.CreateChangeDescriptions(changes)
}
protected abstract CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]>
}
export abstract class OsmCreateAction extends OsmChangeAction {
public newElementId: string
public newElementIdNumber: number
}
export interface PreviewableAction {
getPreview(): Promise<FeatureSource>
}

View file

@ -0,0 +1,257 @@
import OsmChangeAction from "./OsmChangeAction"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import { OsmRelation, OsmWay } from "../OsmObject"
import OsmObjectDownloader from "../OsmObjectDownloader"
export interface RelationSplitInput {
relation: OsmRelation
originalWayId: number
allWayIdsInOrder: number[]
originalNodes: number[]
allWaysNodesInOrder: number[][]
}
abstract class AbstractRelationSplitHandler extends OsmChangeAction {
protected readonly _input: RelationSplitInput
protected readonly _theme: string
protected readonly _objectDownloader: OsmObjectDownloader
constructor(input: RelationSplitInput, theme: string, objectDownloader: OsmObjectDownloader) {
super("relation/" + input.relation.id, false)
this._input = input
this._theme = theme
this._objectDownloader = objectDownloader
}
/**
* Returns which node should border the member at the given index
*/
protected async targetNodeAt(i: number, first: boolean) {
const member = this._input.relation.members[i]
if (member === undefined) {
return undefined
}
if (member.type === "node") {
return member.ref
}
if (member.type === "way") {
const osmWay = <OsmWay>(
await this._objectDownloader.DownloadObjectAsync("way/" + member.ref)
)
const nodes = osmWay.nodes
if (first) {
return nodes[0]
} else {
return nodes[nodes.length - 1]
}
}
if (member.type === "relation") {
return undefined
}
return undefined
}
}
/**
* When a way is split and this way is part of a relation, the relation should be updated too to have the new segment if relevant.
*/
export default class RelationSplitHandler extends AbstractRelationSplitHandler {
constructor(input: RelationSplitInput, theme: string, objectDownloader: OsmObjectDownloader) {
super(input, theme, objectDownloader)
}
async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
if (this._input.relation.tags["type"] === "restriction") {
// This is a turn restriction
return new TurnRestrictionRSH(
this._input,
this._theme,
this._objectDownloader
).CreateChangeDescriptions(changes)
}
return new InPlaceReplacedmentRTSH(
this._input,
this._theme,
this._objectDownloader
).CreateChangeDescriptions(changes)
}
}
export class TurnRestrictionRSH extends AbstractRelationSplitHandler {
constructor(input: RelationSplitInput, theme: string, objectDownloader: OsmObjectDownloader) {
super(input, theme, objectDownloader)
}
public async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const relation = this._input.relation
const members = relation.members
const selfMembers = members.filter(
(m) => m.type === "way" && m.ref === this._input.originalWayId
)
if (selfMembers.length > 1) {
console.warn(
"Detected a turn restriction where this way has multiple occurances. This is an error"
)
}
const selfMember = selfMembers[0]
if (selfMember.role === "via") {
// A via way can be replaced in place
return new InPlaceReplacedmentRTSH(
this._input,
this._theme,
this._objectDownloader
).CreateChangeDescriptions(changes)
}
// We have to keep only the way with a common point with the rest of the relation
// Let's figure out which member is neighbouring our way
let commonStartPoint: number = await this.targetNodeAt(members.indexOf(selfMember), true)
let commonEndPoint: number = await this.targetNodeAt(members.indexOf(selfMember), false)
// In normal circumstances, only one of those should be defined
let commonPoint = commonStartPoint ?? commonEndPoint
// Let's select the way to keep
const idToKeep: { id: number } = this._input.allWaysNodesInOrder
.map((nodes, i) => ({
nodes: nodes,
id: this._input.allWayIdsInOrder[i],
}))
.filter((nodesId) => {
const nds = nodesId.nodes
return nds[0] == commonPoint || nds[nds.length - 1] == commonPoint
})[0]
if (idToKeep === undefined) {
console.error("No common point found, this was a broken turn restriction!", relation.id)
return []
}
const originalWayId = this._input.originalWayId
if (idToKeep.id === originalWayId) {
console.log("Turn_restriction fixer: the original ID can be kept, nothing to do")
return []
}
const newMembers: {
ref: number
type: "way" | "node" | "relation"
role: string
}[] = relation.members.map((m) => {
if (m.type === "way" && m.ref === originalWayId) {
return {
ref: idToKeep.id,
type: "way",
role: m.role,
}
}
return m
})
return [
{
type: "relation",
id: relation.id,
changes: {
members: newMembers,
},
meta: {
theme: this._theme,
changeType: "relation-fix:turn_restriction",
},
},
]
}
}
/**
* A simple strategy to split relations:
* -> Download the way members just before and just after the original way
* -> Make sure they are still aligned
*
* Note that the feature might appear multiple times.
*/
export class InPlaceReplacedmentRTSH extends AbstractRelationSplitHandler {
constructor(input: RelationSplitInput, theme: string, objectDownloader: OsmObjectDownloader) {
super(input, theme, objectDownloader)
}
async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const wayId = this._input.originalWayId
const relation = this._input.relation
const members = relation.members
const originalNodes = this._input.originalNodes
const firstNode = originalNodes[0]
const lastNode = originalNodes[originalNodes.length - 1]
const newMembers: { type: "node" | "way" | "relation"; ref: number; role: string }[] = []
for (let i = 0; i < members.length; i++) {
const member = members[i]
if (member.type !== "way" || member.ref !== wayId) {
newMembers.push(member)
continue
}
const nodeIdBefore = await this.targetNodeAt(i - 1, false)
const nodeIdAfter = await this.targetNodeAt(i + 1, true)
const firstNodeMatches = nodeIdBefore === undefined || nodeIdBefore === firstNode
const lastNodeMatches = nodeIdAfter === undefined || nodeIdAfter === lastNode
if (firstNodeMatches && lastNodeMatches) {
// We have a classic situation, forward situation
for (const wId of this._input.allWayIdsInOrder) {
newMembers.push({
ref: wId,
type: "way",
role: member.role,
})
}
continue
}
const firstNodeMatchesRev = nodeIdBefore === undefined || nodeIdBefore === lastNode
const lastNodeMatchesRev = nodeIdAfter === undefined || nodeIdAfter === firstNode
if (firstNodeMatchesRev || lastNodeMatchesRev) {
// We (probably) have a reversed situation, backward situation
for (let i1 = this._input.allWayIdsInOrder.length - 1; i1 >= 0; i1--) {
// Iterate BACKWARDS
const wId = this._input.allWayIdsInOrder[i1]
newMembers.push({
ref: wId,
type: "way",
role: member.role,
})
}
continue
}
// Euhm, allright... Something weird is going on, but let's not care too much
// Lets pretend this is forward going
for (const wId of this._input.allWayIdsInOrder) {
newMembers.push({
ref: wId,
type: "way",
role: member.role,
})
}
}
return [
{
id: relation.id,
type: "relation",
changes: { members: newMembers },
meta: {
changeType: "relation-fix",
theme: this._theme,
},
},
]
}
}

View file

@ -0,0 +1,571 @@
import OsmChangeAction, { PreviewableAction } from "./OsmChangeAction"
import { Changes } from "../Changes"
import { ChangeDescription } from "./ChangeDescription"
import { Tag } from "../../Tags/Tag"
import { FeatureSource } from "../../FeatureSource/FeatureSource"
import { OsmNode, OsmObject, OsmWay } from "../OsmObject"
import { GeoOperations } from "../../GeoOperations"
import StaticFeatureSource from "../../FeatureSource/Sources/StaticFeatureSource"
import CreateNewNodeAction from "./CreateNewNodeAction"
import ChangeTagAction from "./ChangeTagAction"
import { And } from "../../Tags/And"
import { Utils } from "../../../Utils"
import { OsmConnection } from "../OsmConnection"
import { Feature } from "@turf/turf"
import { Geometry, LineString, Point } from "geojson"
import FullNodeDatabaseSource from "../../FeatureSource/TiledFeatureSource/FullNodeDatabaseSource"
export default class ReplaceGeometryAction extends OsmChangeAction implements PreviewableAction {
/**
* The target feature - mostly used for the metadata
*/
private readonly feature: any
private readonly state: {
osmConnection: OsmConnection
fullNodeDatabase?: FullNodeDatabaseSource
}
private readonly wayToReplaceId: string
private readonly theme: string
/**
* The target coordinates that should end up in OpenStreetMap.
* This is identical to either this.feature.geometry.coordinates or -in case of a polygon- feature.geometry.coordinates[0]
* Format: [lon, lat]
*/
private readonly targetCoordinates: [number, number][]
/**
* If a target coordinate is close to another target coordinate, 'identicalTo' will point to the first index.
*/
private readonly identicalTo: number[]
private readonly newTags: Tag[] | undefined
/**
* Not really the 'new' element, but the target that has been applied.
* Added for compatibility with other systems
*/
public readonly newElementId: string
constructor(
state: {
osmConnection: OsmConnection
fullNodeDatabase?: FullNodeDatabaseSource
},
feature: any,
wayToReplaceId: string,
options: {
theme: string
newTags?: Tag[]
}
) {
super(wayToReplaceId, false)
this.state = state
this.feature = feature
this.wayToReplaceId = wayToReplaceId
this.theme = options.theme
this.newElementId = wayToReplaceId
const geom = this.feature.geometry
let coordinates: [number, number][]
if (geom.type === "LineString") {
coordinates = geom.coordinates
} else if (geom.type === "Polygon") {
coordinates = geom.coordinates[0]
}
this.targetCoordinates = coordinates
this.identicalTo = coordinates.map((_) => undefined)
for (let i = 0; i < coordinates.length; i++) {
if (this.identicalTo[i] !== undefined) {
continue
}
for (let j = i + 1; j < coordinates.length; j++) {
const d = GeoOperations.distanceBetween(coordinates[i], coordinates[j])
if (d < 0.1) {
this.identicalTo[j] = i
}
}
}
this.newTags = options.newTags
}
public async getPreview(): Promise<FeatureSource> {
const { closestIds, allNodesById, detachedNodes, reprojectedNodes } =
await this.GetClosestIds()
const preview: Feature<Geometry>[] = closestIds.map((newId, i) => {
if (this.identicalTo[i] !== undefined) {
return undefined
}
if (newId === undefined) {
return {
type: "Feature",
properties: {
newpoint: "yes",
id: "replace-geometry-move-" + i,
},
geometry: {
type: "Point",
coordinates: this.targetCoordinates[i],
},
}
}
const origNode = allNodesById.get(newId)
return {
type: "Feature",
properties: {
move: "yes",
"osm-id": newId,
id: "replace-geometry-move-" + i,
"original-node-tags": JSON.stringify(origNode.tags),
},
geometry: {
type: "LineString",
coordinates: [[origNode.lon, origNode.lat], this.targetCoordinates[i]],
},
}
})
reprojectedNodes.forEach(({ newLat, newLon, nodeId }) => {
const origNode = allNodesById.get(nodeId)
const feature: Feature<LineString> = {
type: "Feature",
properties: {
move: "yes",
reprojection: "yes",
"osm-id": nodeId,
id: "replace-geometry-reproject-" + nodeId,
"original-node-tags": JSON.stringify(origNode.tags),
},
geometry: {
type: "LineString",
coordinates: [
[origNode.lon, origNode.lat],
[newLon, newLat],
],
},
}
preview.push(feature)
})
detachedNodes.forEach(({ reason }, id) => {
const origNode = allNodesById.get(id)
const feature: Feature<Point> = {
type: "Feature",
properties: {
detach: "yes",
id: "replace-geometry-detach-" + id,
"detach-reason": reason,
"original-node-tags": JSON.stringify(origNode.tags),
},
geometry: {
type: "Point",
coordinates: [origNode.lon, origNode.lat],
},
}
preview.push(feature)
})
return StaticFeatureSource.fromGeojson(Utils.NoNull(preview))
}
/**
* For 'this.feature`, gets a corresponding closest node that alreay exsists.
*
* This method contains the main logic for this module, as it decides which node gets moved where.
*
*/
public async GetClosestIds(): Promise<{
// A list of the same length as targetCoordinates, containing which OSM-point to move. If undefined, a new point will be created
closestIds: number[]
allNodesById: Map<number, OsmNode>
osmWay: OsmWay
detachedNodes: Map<
number,
{
reason: string
hasTags: boolean
}
>
reprojectedNodes: Map<
number,
{
/*Move the node with this ID into the way as extra node, as it has some relation with the original object*/
projectAfterIndex: number
distance: number
newLat: number
newLon: number
nodeId: number
}
>
}> {
// TODO FIXME: if a new point has to be created, snap to already existing ways
const nodeDb = this.state.fullNodeDatabase
if (nodeDb === undefined) {
throw "PANIC: replaceGeometryAction needs the FullNodeDatabase, which is undefined. This should be initialized by having the 'type_node'-layer enabled in your theme. (NB: the replacebutton has type_node as dependency)"
}
const self = this
let parsed: OsmObject[]
{
// Gather the needed OsmObjects
const splitted = this.wayToReplaceId.split("/")
const type = splitted[0]
const idN = Number(splitted[1])
if (idN < 0 || type !== "way") {
throw "Invalid ID to conflate: " + this.wayToReplaceId
}
const url = `${
this.state.osmConnection?._oauth_config?.url ?? "https://openstreetmap.org"
}/api/0.6/${this.wayToReplaceId}/full`
const rawData = await Utils.downloadJsonCached(url, 1000)
parsed = OsmObject.ParseObjects(rawData.elements)
}
const allNodes = parsed.filter((o) => o.type === "node")
const osmWay = <OsmWay>parsed[parsed.length - 1]
if (osmWay.type !== "way") {
throw "WEIRD: expected an OSM-way as last element here!"
}
const allNodesById = new Map<number, OsmNode>()
for (const node of allNodes) {
allNodesById.set(node.id, <OsmNode>node)
}
/**
* For every already existing OSM-point, we calculate:
*
* - the distance to every target point.
* - Wether this node has (other) parent ways, which might restrict movement
* - Wether this node has tags set
*
* Having tags and/or being connected to another way indicate that there is some _relation_ with objects in the neighbourhood.
*
* The Replace-geometry action should try its best to honour these. Some 'wiggling' is allowed (e.g. moving an entrance a bit), but these relations should not be broken.l
*/
const distances = new Map<
number /* osmId*/,
/** target coordinate index --> distance (or undefined if a duplicate)*/
number[]
>()
const nodeInfo = new Map<
number /* osmId*/,
{
distances: number[]
// Part of some other way then the one that should be replaced
partOfWay: boolean
hasTags: boolean
}
>()
for (const node of allNodes) {
const parentWays = nodeDb.GetParentWays(node.id)
if (parentWays === undefined) {
throw "PANIC: the way to replace has a node which has no parents at all. Is it deleted in the meantime?"
}
const parentWayIds = parentWays.data.map((w) => w.type + "/" + w.id)
const idIndex = parentWayIds.indexOf(this.wayToReplaceId)
if (idIndex < 0) {
throw "PANIC: the way to replace has a node, which is _not_ part of this was according to the node..."
}
parentWayIds.splice(idIndex, 1)
const partOfSomeWay = parentWayIds.length > 0
const hasTags = Object.keys(node.tags).length > 1
const nodeDistances = this.targetCoordinates.map((_) => undefined)
for (let i = 0; i < this.targetCoordinates.length; i++) {
if (this.identicalTo[i] !== undefined) {
continue
}
const targetCoordinate = this.targetCoordinates[i]
const cp = node.centerpoint()
const d = GeoOperations.distanceBetween(targetCoordinate, [cp[1], cp[0]])
if (d > 25) {
// This is too much to move
continue
}
if (d < 3 || !(hasTags || partOfSomeWay)) {
// If there is some relation: cap the move distance to 3m
nodeDistances[i] = d
}
}
distances.set(node.id, nodeDistances)
nodeInfo.set(node.id, {
distances: nodeDistances,
partOfWay: partOfSomeWay,
hasTags,
})
}
const closestIds = this.targetCoordinates.map((_) => undefined)
const unusedIds = new Map<
number,
{
reason: string
hasTags: boolean
}
>()
{
// Search best merge candidate
/**
* Then, we search the node that has to move the least distance and add this as mapping.
* We do this until no points are left
*/
let candidate: number
let moveDistance: number
/**
* The list of nodes that are _not_ used anymore, typically if there are less targetCoordinates then source coordinates
*/
do {
candidate = undefined
moveDistance = Infinity
distances.forEach((distances, nodeId) => {
const minDist = Math.min(...Utils.NoNull(distances))
if (moveDistance > minDist) {
// We have found a candidate to move
candidate = nodeId
moveDistance = minDist
}
})
if (candidate !== undefined) {
// We found a candidate... Search the corresponding target id:
let targetId: number = undefined
let lowestDistance = Number.MAX_VALUE
let nodeDistances = distances.get(candidate)
for (let i = 0; i < nodeDistances.length; i++) {
const d = nodeDistances[i]
if (d !== undefined && d < lowestDistance) {
lowestDistance = d
targetId = i
}
}
// This candidates role is done, it can be removed from the distance matrix
distances.delete(candidate)
if (targetId !== undefined) {
// At this point, we have our target coordinate index: targetId!
// Lets map it...
closestIds[targetId] = candidate
// To indicate that this targetCoordinate is taken, we remove them from the distances matrix
distances.forEach((dists) => {
dists[targetId] = undefined
})
} else {
// Seems like all the targetCoordinates have found a source point
unusedIds.set(candidate, {
reason: "Unused by new way",
hasTags: nodeInfo.get(candidate).hasTags,
})
}
}
} while (candidate !== undefined)
}
// If there are still unused values in 'distances', they are definitively unused
distances.forEach((_, nodeId) => {
unusedIds.set(nodeId, {
reason: "Unused by new way",
hasTags: nodeInfo.get(nodeId).hasTags,
})
})
const reprojectedNodes = new Map<
number,
{
/*Move the node with this ID into the way as extra node, as it has some relation with the original object*/
projectAfterIndex: number
distance: number
newLat: number
newLon: number
nodeId: number
}
>()
{
// Lets check the unused ids: can they be detached or do they signify some relation with the object?
unusedIds.forEach(({}, id) => {
const info = nodeInfo.get(id)
if (!(info.hasTags || info.partOfWay)) {
// Nothing special here, we detach
return
}
// The current node has tags and/or has an attached other building.
// We should project them and move them onto the building on an appropriate place
const node = allNodesById.get(id)
// Project the node onto the target way to calculate the new coordinates
const way = <Feature<LineString>>{
type: "Feature",
properties: {},
geometry: {
type: "LineString",
coordinates: self.targetCoordinates,
},
}
const projected = GeoOperations.nearestPoint(way, [node.lon, node.lat])
reprojectedNodes.set(id, {
newLon: projected.geometry.coordinates[0],
newLat: projected.geometry.coordinates[1],
projectAfterIndex: projected.properties.index,
distance: projected.properties.dist,
nodeId: id,
})
})
reprojectedNodes.forEach((_, nodeId) => unusedIds.delete(nodeId))
}
return { closestIds, allNodesById, osmWay, detachedNodes: unusedIds, reprojectedNodes }
}
protected async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const nodeDb = this.state.fullNodeDatabase
if (nodeDb === undefined) {
throw "PANIC: replaceGeometryAction needs the FullNodeDatabase, which is undefined. This should be initialized by having the 'type_node'-layer enabled in your theme. (NB: the replacebutton has type_node as dependency)"
}
const { closestIds, osmWay, detachedNodes, reprojectedNodes } = await this.GetClosestIds()
const allChanges: ChangeDescription[] = []
const actualIdsToUse: number[] = []
for (let i = 0; i < closestIds.length; i++) {
if (this.identicalTo[i] !== undefined) {
const j = this.identicalTo[i]
actualIdsToUse.push(actualIdsToUse[j])
continue
}
const closestId = closestIds[i]
const [lon, lat] = this.targetCoordinates[i]
if (closestId === undefined) {
const newNodeAction = new CreateNewNodeAction([], lat, lon, {
allowReuseOfPreviouslyCreatedPoints: true,
theme: this.theme,
changeType: null,
})
const changeDescr = await newNodeAction.CreateChangeDescriptions(changes)
allChanges.push(...changeDescr)
actualIdsToUse.push(newNodeAction.newElementIdNumber)
} else {
const change = <ChangeDescription>{
id: closestId,
type: "node",
meta: {
theme: this.theme,
changeType: "move",
},
changes: { lon, lat },
}
actualIdsToUse.push(closestId)
allChanges.push(change)
}
}
console.log("Adding tags", this.newTags, "to conflated way nr", this.wayToReplaceId)
if (this.newTags !== undefined && this.newTags.length > 0) {
const addExtraTags = new ChangeTagAction(
this.wayToReplaceId,
new And(this.newTags),
osmWay.tags,
{
theme: this.theme,
changeType: "conflation",
}
)
allChanges.push(...(await addExtraTags.CreateChangeDescriptions()))
}
const newCoordinates = [...this.targetCoordinates]
{
// Add reprojected nodes to the way
const proj = Array.from(reprojectedNodes.values())
proj.sort((a, b) => {
// Sort descending
const diff = b.projectAfterIndex - a.projectAfterIndex
if (diff !== 0) {
return diff
}
return b.distance - a.distance
})
for (const reprojectedNode of proj) {
const change = <ChangeDescription>{
id: reprojectedNode.nodeId,
type: "node",
meta: {
theme: this.theme,
changeType: "move",
},
changes: { lon: reprojectedNode.newLon, lat: reprojectedNode.newLat },
}
allChanges.push(change)
actualIdsToUse.splice(
reprojectedNode.projectAfterIndex + 1,
0,
reprojectedNode.nodeId
)
newCoordinates.splice(reprojectedNode.projectAfterIndex + 1, 0, [
reprojectedNode.newLon,
reprojectedNode.newLat,
])
}
}
// Actually change the nodes of the way!
allChanges.push({
type: "way",
id: osmWay.id,
changes: {
nodes: actualIdsToUse,
coordinates: newCoordinates,
},
meta: {
theme: this.theme,
changeType: "conflation",
},
})
// Some nodes might need to be deleted
if (detachedNodes.size > 0) {
detachedNodes.forEach(({ hasTags, reason }, nodeId) => {
const parentWays = nodeDb.GetParentWays(nodeId)
const index = parentWays.data.map((w) => w.id).indexOf(osmWay.id)
if (index < 0) {
console.error(
"ReplaceGeometryAction is trying to detach node " +
nodeId +
", but it isn't listed as being part of way " +
osmWay.id
)
return
}
// We detachted this node - so we unregister
parentWays.data.splice(index, 1)
parentWays.ping()
if (hasTags) {
// Has tags: we leave this node alone
return
}
if (parentWays.data.length != 0) {
// Still part of other ways: we leave this node alone!
return
}
console.log("Removing node " + nodeId, "as it isn't needed anymore by any way")
allChanges.push({
meta: {
theme: this.theme,
changeType: "delete",
},
doDelete: true,
type: "node",
id: nodeId,
})
})
}
return allChanges
}
}

View file

@ -0,0 +1,306 @@
import { OsmWay } from "../OsmObject"
import { Changes } from "../Changes"
import { GeoOperations } from "../../GeoOperations"
import OsmChangeAction from "./OsmChangeAction"
import { ChangeDescription } from "./ChangeDescription"
import RelationSplitHandler from "./RelationSplitHandler"
import { Feature, LineString } from "geojson"
import OsmObjectDownloader from "../OsmObjectDownloader"
interface SplitInfo {
originalIndex?: number // or negative for new elements
lngLat: [number, number]
doSplit: boolean
}
export default class SplitAction extends OsmChangeAction {
private readonly wayId: string
private readonly _splitPointsCoordinates: [number, number][] // lon, lat
private readonly _meta: { theme: string; changeType: "split" }
private readonly _toleranceInMeters: number
private readonly _withNewCoordinates: (coordinates: [number, number][]) => void
/**
* Create a changedescription for splitting a point.
* Will attempt to reuse existing points
* @param wayId
* @param splitPointCoordinates: lon, lat
* @param meta
* @param toleranceInMeters: if a splitpoint closer then this amount of meters to an existing point, the existing point will be used to split the line instead of a new point
* @param withNewCoordinates: an optional callback which will leak the new coordinates of the original way
*/
constructor(
wayId: string,
splitPointCoordinates: [number, number][],
meta: { theme: string },
toleranceInMeters = 5,
withNewCoordinates?: (coordinates: [number, number][]) => void
) {
super(wayId, true)
this.wayId = wayId
this._splitPointsCoordinates = splitPointCoordinates
this._toleranceInMeters = toleranceInMeters
this._withNewCoordinates = withNewCoordinates
this._meta = { ...meta, changeType: "split" }
}
private static SegmentSplitInfo(splitInfo: SplitInfo[]): SplitInfo[][] {
const wayParts = []
let currentPart = []
for (const splitInfoElement of splitInfo) {
currentPart.push(splitInfoElement)
if (splitInfoElement.doSplit) {
// We have to do a split!
// We add the current index to the currentParts, flush it and add it again
wayParts.push(currentPart)
currentPart = [splitInfoElement]
}
}
wayParts.push(currentPart)
return wayParts.filter((wp) => wp.length > 0)
}
async CreateChangeDescriptions(changes: Changes): Promise<ChangeDescription[]> {
const originalElement = <OsmWay>(
await new OsmObjectDownloader(changes.backend, changes).DownloadObjectAsync(this.wayId)
)
const originalNodes = originalElement.nodes
// First, calculate the splitpoints and remove points close to one another
const splitInfo = this.CalculateSplitCoordinates(originalElement, this._toleranceInMeters)
// Now we have a list with e.g.
// [ { originalIndex: 0}, {originalIndex: 1, doSplit: true}, {originalIndex: 2}, {originalIndex: undefined, doSplit: true}, {originalIndex: 3}]
// Lets change 'originalIndex' to the actual node id first (or assign a new id if needed):
for (const element of splitInfo) {
if (element.originalIndex >= 0) {
element.originalIndex = originalElement.nodes[element.originalIndex]
} else {
element.originalIndex = changes.getNewID()
}
}
// Next up is creating actual parts from this
const wayParts: SplitInfo[][] = SplitAction.SegmentSplitInfo(splitInfo)
// Allright! At this point, we have our new ways!
// Which one is the longest of them (and can keep the id)?
let longest = undefined
for (const wayPart of wayParts) {
if (longest === undefined) {
longest = wayPart
continue
}
if (wayPart.length > longest.length) {
longest = wayPart
}
}
const changeDescription: ChangeDescription[] = []
// Let's create the new nodes as needed
for (const element of splitInfo) {
if (element.originalIndex >= 0) {
continue
}
changeDescription.push({
type: "node",
id: element.originalIndex,
changes: {
lon: element.lngLat[0],
lat: element.lngLat[1],
},
meta: this._meta,
})
}
// The ids of all the ways (including the original)
const allWayIdsInOrder: number[] = []
const allWaysNodesInOrder: number[][] = []
// Lets create OsmWays based on them
for (const wayPart of wayParts) {
let isOriginal = wayPart === longest
if (isOriginal) {
// We change the existing way
const nodeIds = wayPart.map((p) => p.originalIndex)
const newCoordinates = wayPart.map((p) => p.lngLat)
changeDescription.push({
type: "way",
id: originalElement.id,
changes: {
coordinates: newCoordinates,
nodes: nodeIds,
},
meta: this._meta,
})
if (this._withNewCoordinates) {
this._withNewCoordinates(newCoordinates)
}
allWayIdsInOrder.push(originalElement.id)
allWaysNodesInOrder.push(nodeIds)
} else {
let id = changes.getNewID()
// Copy the tags from the original object onto the new
const kv = []
for (const k in originalElement.tags) {
if (!originalElement.tags.hasOwnProperty(k)) {
continue
}
if (k.startsWith("_") || k === "id") {
continue
}
kv.push({ k: k, v: originalElement.tags[k] })
}
const nodeIds = wayPart.map((p) => p.originalIndex)
if (nodeIds.length <= 1) {
console.error("Got a segment with only one node - skipping")
continue
}
changeDescription.push({
type: "way",
id: id,
tags: kv,
changes: {
coordinates: wayPart.map((p) => p.lngLat),
nodes: nodeIds,
},
meta: this._meta,
})
allWayIdsInOrder.push(id)
allWaysNodesInOrder.push(nodeIds)
}
}
// At last, we still have to check that we aren't part of a relation...
// At least, the order of the ways is identical, so we can keep the same roles
const downloader = new OsmObjectDownloader(changes.backend, changes)
const relations = await downloader.DownloadReferencingRelations(this.wayId)
for (const relation of relations) {
const changDescrs = await new RelationSplitHandler(
{
relation: relation,
allWayIdsInOrder: allWayIdsInOrder,
originalNodes: originalNodes,
allWaysNodesInOrder: allWaysNodesInOrder,
originalWayId: originalElement.id,
},
this._meta.theme,
downloader
).CreateChangeDescriptions(changes)
changeDescription.push(...changDescrs)
}
// And we have our objects!
// Time to upload
return changeDescription
}
/**
* Calculates the actual points to split
* If another point is closer then ~5m, we reuse that point
*/
private CalculateSplitCoordinates(osmWay: OsmWay, toleranceInM = 5): SplitInfo[] {
const wayGeoJson = <Feature<LineString>>osmWay.asGeoJson()
// Should be [lon, lat][]
const originalPoints: [number, number][] = osmWay.coordinates.map((c) => [c[1], c[0]])
const allPoints: {
// lon, lat
coordinates: [number, number]
isSplitPoint: boolean
originalIndex?: number // Original index
dist: number // Distance from the nearest point on the original line
location: number // Distance from the start of the way
}[] = this._splitPointsCoordinates.map((c) => {
// From the turf.js docs:
// The properties object will contain three values:
// - `index`: closest point was found on nth line part,
// - `dist`: distance between pt and the closest point,
// `location`: distance along the line between start and the closest point.
let projected = GeoOperations.nearestPoint(wayGeoJson, c)
// c is lon lat
return {
coordinates: c,
isSplitPoint: true,
dist: projected.properties.dist,
location: projected.properties.location,
}
})
// We have a bunch of coordinates here: [ [lon, lon], [lat, lon], ...] ...
// We project them onto the line (which should yield pretty much the same point and add them to allPoints
for (let i = 0; i < originalPoints.length; i++) {
let originalPoint = originalPoints[i]
let projected = GeoOperations.nearestPoint(wayGeoJson, originalPoint)
allPoints.push({
coordinates: originalPoint,
isSplitPoint: false,
location: projected.properties.location,
originalIndex: i,
dist: projected.properties.dist,
})
}
// At this point, we have a list of both the split point and the old points, with some properties to discriminate between them
// We sort this list so that the new points are at the same location
allPoints.sort((a, b) => a.location - b.location)
for (let i = allPoints.length - 2; i >= 1; i--) {
// We 'merge' points with already existing nodes if they are close enough to avoid closeby elements
// Note the loop bounds: we skip the first two and last two elements:
// The first and last element are always part of the original way and should be kept
// Furthermore, we run in reverse order as we'll delete elements on the go
const point = allPoints[i]
if (point.originalIndex !== undefined) {
// We keep the original points
continue
}
// At this point, 'dist' told us the point is pretty close to an already existing point.
// Lets see which (already existing) point is closer and mark it as splitpoint
const nextPoint = allPoints[i + 1]
const prevPoint = allPoints[i - 1]
const distToNext = nextPoint.location - point.location
const distToPrev = point.location - prevPoint.location
if (distToNext * 1000 > toleranceInM && distToPrev * 1000 > toleranceInM) {
// Both are too far away to mark them as the split point
continue
}
let closest = nextPoint
if (distToNext > distToPrev) {
closest = prevPoint
}
// Ok, we have a closest point!
if (closest.originalIndex === 0 || closest.originalIndex === originalPoints.length) {
// We can not split on the first or last points...
continue
}
closest.isSplitPoint = true
allPoints.splice(i, 1)
}
const splitInfo: SplitInfo[] = []
let nextId = -1 // Note: these IDs are overwritten later on, no need to use a global counter here
for (const p of allPoints) {
let index = p.originalIndex
if (index === undefined) {
index = nextId
nextId--
}
const splitInfoElement = {
originalIndex: index,
lngLat: p.coordinates,
doSplit: p.isSplitPoint,
}
splitInfo.push(splitInfoElement)
}
return splitInfo
}
}

603
src/Logic/Osm/Changes.ts Normal file
View file

@ -0,0 +1,603 @@
import { OsmNode, OsmObject, OsmRelation, OsmWay } from "./OsmObject"
import { Store, UIEventSource } from "../UIEventSource"
import Constants from "../../Models/Constants"
import OsmChangeAction from "./Actions/OsmChangeAction"
import { ChangeDescription, ChangeDescriptionTools } from "./Actions/ChangeDescription"
import { Utils } from "../../Utils"
import { LocalStorageSource } from "../Web/LocalStorageSource"
import SimpleMetaTagger from "../SimpleMetaTagger"
import { FeatureSource, IndexedFeatureSource } from "../FeatureSource/FeatureSource"
import { GeoLocationPointProperties } from "../State/GeoLocationState"
import { GeoOperations } from "../GeoOperations"
import { ChangesetHandler, ChangesetTag } from "./ChangesetHandler"
import { OsmConnection } from "./OsmConnection"
import FeaturePropertiesStore from "../FeatureSource/Actors/FeaturePropertiesStore"
import OsmObjectDownloader from "./OsmObjectDownloader"
/**
* Handles all changes made to OSM.
* Needs an authenticator via OsmConnection
*/
export class Changes {
public readonly pendingChanges: UIEventSource<ChangeDescription[]> =
LocalStorageSource.GetParsed<ChangeDescription[]>("pending-changes", [])
public readonly allChanges = new UIEventSource<ChangeDescription[]>(undefined)
public readonly state: { allElements?: IndexedFeatureSource; osmConnection: OsmConnection }
public readonly extraComment: UIEventSource<string> = new UIEventSource(undefined)
public readonly backend: string
public readonly isUploading = new UIEventSource(false)
private readonly historicalUserLocations?: FeatureSource
private _nextId: number = -1 // Newly assigned ID's are negative
private readonly previouslyCreated: OsmObject[] = []
private readonly _leftRightSensitive: boolean
private readonly _changesetHandler: ChangesetHandler
constructor(
state: {
dryRun: Store<boolean>
allElements?: IndexedFeatureSource
featurePropertiesStore?: FeaturePropertiesStore
osmConnection: OsmConnection
historicalUserLocations?: FeatureSource
},
leftRightSensitive: boolean = false
) {
this._leftRightSensitive = leftRightSensitive
// We keep track of all changes just as well
this.allChanges.setData([...this.pendingChanges.data])
// If a pending change contains a negative ID, we save that
this._nextId = Math.min(-1, ...(this.pendingChanges.data?.map((pch) => pch.id) ?? []))
this.state = state
this.backend = state.osmConnection.Backend()
this._changesetHandler = new ChangesetHandler(
state.dryRun,
state.osmConnection,
state.featurePropertiesStore,
this
)
this.historicalUserLocations = state.historicalUserLocations
// Note: a changeset might be reused which was opened just before and might have already used some ids
// This doesn't matter however, as the '-1' is per piecewise upload, not global per changeset
}
static createChangesetFor(
csId: string,
allChanges: {
modifiedObjects: OsmObject[]
newObjects: OsmObject[]
deletedObjects: OsmObject[]
}
): string {
const changedElements = allChanges.modifiedObjects ?? []
const newElements = allChanges.newObjects ?? []
const deletedElements = allChanges.deletedObjects ?? []
let changes = `<osmChange version='0.6' generator='Mapcomplete ${Constants.vNumber}'>`
if (newElements.length > 0) {
changes +=
"\n<create>\n" +
newElements.map((e) => e.ChangesetXML(csId)).join("\n") +
"</create>"
}
if (changedElements.length > 0) {
changes +=
"\n<modify>\n" +
changedElements.map((e) => e.ChangesetXML(csId)).join("\n") +
"\n</modify>"
}
if (deletedElements.length > 0) {
changes +=
"\n<delete>\n" +
deletedElements.map((e) => e.ChangesetXML(csId)).join("\n") +
"\n</delete>"
}
changes += "</osmChange>"
return changes
}
private static GetNeededIds(changes: ChangeDescription[]) {
return Utils.Dedup(changes.filter((c) => c.id >= 0).map((c) => c.type + "/" + c.id))
}
/**
* Returns a new ID and updates the value for the next ID
*/
public getNewID() {
return this._nextId--
}
/**
* Uploads all the pending changes in one go.
* Triggered by the 'PendingChangeUploader'-actor in Actors
*/
public async flushChanges(flushreason: string = undefined): Promise<void> {
if (this.pendingChanges.data.length === 0) {
return
}
if (this.isUploading.data) {
console.log("Is already uploading... Abort")
return
}
console.log("Uploading changes due to: ", flushreason)
this.isUploading.setData(true)
try {
const csNumber = await this.flushChangesAsync()
this.isUploading.setData(false)
console.log("Changes flushed. Your changeset is " + csNumber)
} catch (e) {
this.isUploading.setData(false)
console.error("Flushing changes failed due to", e)
}
}
public async applyAction(action: OsmChangeAction): Promise<void> {
const changeDescriptions = await action.Perform(this)
changeDescriptions[0].meta.distanceToObject = this.calculateDistanceToChanges(
action,
changeDescriptions
)
this.applyChanges(changeDescriptions)
}
public applyChanges(changes: ChangeDescription[]) {
console.log("Received changes:", changes)
this.pendingChanges.data.push(...changes)
this.pendingChanges.ping()
this.allChanges.data.push(...changes)
this.allChanges.ping()
}
public CreateChangesetObjects(
changes: ChangeDescription[],
downloadedOsmObjects: OsmObject[]
): {
newObjects: OsmObject[]
modifiedObjects: OsmObject[]
deletedObjects: OsmObject[]
} {
const objects: Map<string, OsmObject> = new Map<string, OsmObject>()
const states: Map<string, "unchanged" | "created" | "modified" | "deleted"> = new Map()
for (const o of downloadedOsmObjects) {
objects.set(o.type + "/" + o.id, o)
states.set(o.type + "/" + o.id, "unchanged")
}
for (const o of this.previouslyCreated) {
objects.set(o.type + "/" + o.id, o)
states.set(o.type + "/" + o.id, "unchanged")
}
for (const change of changes) {
let changed = false
const id = change.type + "/" + change.id
if (!objects.has(id)) {
// The object hasn't been seen before, so it doesn't exist yet and is newly created by its very definition
if (change.id >= 0) {
// Might be a failed fetch for simply this object
throw "Did not get an object that should be known: " + id
}
if (change.changes === undefined) {
// This object is a change to a newly created object. However, we have not seen the creation changedescription yet!
throw "Not a creation of the object"
}
// This is a new object that should be created
states.set(id, "created")
console.log("Creating object for changeDescription", change)
let osmObj: OsmObject = undefined
switch (change.type) {
case "node":
const n = new OsmNode(change.id)
n.lat = change.changes["lat"]
n.lon = change.changes["lon"]
osmObj = n
break
case "way":
const w = new OsmWay(change.id)
w.nodes = change.changes["nodes"]
osmObj = w
break
case "relation":
const r = new OsmRelation(change.id)
r.members = change.changes["members"]
osmObj = r
break
}
if (osmObj === undefined) {
throw "Hmm? This is a bug"
}
objects.set(id, osmObj)
this.previouslyCreated.push(osmObj)
}
const state = states.get(id)
if (change.doDelete) {
if (state === "created") {
states.set(id, "unchanged")
} else {
states.set(id, "deleted")
}
}
const obj = objects.get(id)
// Apply tag changes
for (const kv of change.tags ?? []) {
const k = kv.k
let v = kv.v
if (v === "") {
v = undefined
}
const oldV = obj.tags[k]
if (oldV === v) {
continue
}
obj.tags[k] = v
changed = true
}
if (change.changes !== undefined) {
switch (change.type) {
case "node":
// @ts-ignore
const nlat = Utils.Round7(change.changes.lat)
// @ts-ignore
const nlon = Utils.Round7(change.changes.lon)
const n = <OsmNode>obj
if (n.lat !== nlat || n.lon !== nlon) {
console.log("Node moved:", n.lat, nlat, n.lon, nlon)
n.lat = nlat
n.lon = nlon
changed = true
}
break
case "way":
const nnodes = change.changes["nodes"]
const w = <OsmWay>obj
if (!Utils.Identical(nnodes, w.nodes)) {
w.nodes = nnodes
changed = true
}
break
case "relation":
const nmembers: {
type: "node" | "way" | "relation"
ref: number
role: string
}[] = change.changes["members"]
const r = <OsmRelation>obj
if (
!Utils.Identical(nmembers, r.members, (a, b) => {
return a.role === b.role && a.type === b.type && a.ref === b.ref
})
) {
r.members = nmembers
changed = true
}
break
}
}
if (changed && states.get(id) === "unchanged") {
states.set(id, "modified")
}
}
const result = {
newObjects: [],
modifiedObjects: [],
deletedObjects: [],
}
objects.forEach((v, id) => {
const state = states.get(id)
if (state === "created") {
result.newObjects.push(v)
}
if (state === "modified") {
result.modifiedObjects.push(v)
}
if (state === "deleted") {
result.deletedObjects.push(v)
}
})
console.debug(
"Calculated the pending changes: ",
result.newObjects.length,
"new; ",
result.modifiedObjects.length,
"modified;",
result.deletedObjects,
"deleted"
)
return result
}
private calculateDistanceToChanges(
change: OsmChangeAction,
changeDescriptions: ChangeDescription[]
) {
const locations = this.historicalUserLocations?.features?.data
if (locations === undefined) {
// No state loaded or no locations -> we can't calculate...
return
}
if (!change.trackStatistics) {
// Probably irrelevant, such as a new helper node
return
}
const now = new Date()
const recentLocationPoints = locations
.filter((feat) => feat.geometry.type === "Point")
.filter((feat) => {
const visitTime = new Date(
(<GeoLocationPointProperties>(<any>feat.properties)).date
)
// In seconds
const diff = (now.getTime() - visitTime.getTime()) / 1000
return diff < Constants.nearbyVisitTime
})
if (recentLocationPoints.length === 0) {
// Probably no GPS enabled/no fix
return
}
// The applicable points, contain information in their properties about location, time and GPS accuracy
// They are all GeoLocationPointProperties
// We walk every change and determine the closest distance possible
// Only if the change itself does _not_ contain any coordinates, we fall back and search the original feature in the state
const changedObjectCoordinates: [number, number][] = []
{
const feature = this.state.allElements?.featuresById?.data.get(change.mainObjectId)
if (feature !== undefined) {
changedObjectCoordinates.push(GeoOperations.centerpointCoordinates(feature))
}
}
for (const changeDescription of changeDescriptions) {
const chng:
| { lat: number; lon: number }
| { coordinates: [number, number][] }
| { members } = changeDescription.changes
if (chng === undefined) {
continue
}
if (chng["lat"] !== undefined) {
changedObjectCoordinates.push([chng["lat"], chng["lon"]])
}
if (chng["coordinates"] !== undefined) {
changedObjectCoordinates.push(...chng["coordinates"])
}
}
return Math.min(
...changedObjectCoordinates.map((coor) =>
Math.min(
...recentLocationPoints.map((gpsPoint) => {
const otherCoor = GeoOperations.centerpointCoordinates(gpsPoint)
return GeoOperations.distanceBetween(coor, otherCoor)
})
)
)
)
}
/**
* Upload the selected changes to OSM.
* Returns 'true' if successful and if they can be removed
*/
private async flushSelectChanges(
pending: ChangeDescription[],
openChangeset: UIEventSource<number>
): Promise<boolean> {
const self = this
const neededIds = Changes.GetNeededIds(pending)
// We _do not_ pass in the Changes object itself - we want the data from OSM directly in order to apply the changes
const downloader = new OsmObjectDownloader(this.backend, undefined)
let osmObjects = await Promise.all<{ id: string; osmObj: OsmObject | "deleted" }>(
neededIds.map(async (id) => {
try {
const osmObj = await downloader.DownloadObjectAsync(id)
return { id, osmObj }
} catch (e) {
console.error(
"Could not download OSM-object",
id,
" dropping it from the changes (" + e + ")"
)
return undefined
}
})
)
osmObjects = Utils.NoNull(osmObjects)
for (const { osmObj, id } of osmObjects) {
if (osmObj === "deleted") {
pending = pending.filter((ch) => ch.type + "/" + ch.id !== id)
}
}
const objects = osmObjects
.filter((obj) => obj.osmObj !== "deleted")
.map((obj) => <OsmObject>obj.osmObj)
if (this._leftRightSensitive) {
objects.forEach((obj) => SimpleMetaTagger.removeBothTagging(obj.tags))
}
console.log("Got the fresh objects!", objects, "pending: ", pending)
if (pending.length == 0) {
console.log("No pending changes...")
return true
}
const perType = Array.from(
Utils.Hist(
pending
.filter(
(descr) =>
descr.meta.changeType !== undefined && descr.meta.changeType !== null
)
.map((descr) => descr.meta.changeType)
),
([key, count]) => ({
key: key,
value: count,
aggregate: true,
})
)
const motivations = pending
.filter((descr) => descr.meta.specialMotivation !== undefined)
.map((descr) => ({
key: descr.meta.changeType + ":" + descr.type + "/" + descr.id,
value: descr.meta.specialMotivation,
}))
const distances = Utils.NoNull(pending.map((descr) => descr.meta.distanceToObject))
distances.sort((a, b) => a - b)
const perBinCount = Constants.distanceToChangeObjectBins.map((_) => 0)
let j = 0
const maxDistances = Constants.distanceToChangeObjectBins
for (let i = 0; i < maxDistances.length; i++) {
const maxDistance = maxDistances[i]
// distances is sorted in ascending order, so as soon as one is to big, all the resting elements will be bigger too
while (j < distances.length && distances[j] < maxDistance) {
perBinCount[i]++
j++
}
}
const perBinMessage = Utils.NoNull(
perBinCount.map((count, i) => {
if (count === 0) {
return undefined
}
const maxD = maxDistances[i]
let key = `change_within_${maxD}m`
if (maxD === Number.MAX_VALUE) {
key = `change_over_${maxDistances[i - 1]}m`
}
return {
key,
value: count,
aggregate: true,
}
})
)
// This method is only called with changedescriptions for this theme
const theme = pending[0].meta.theme
let comment = "Adding data with #MapComplete for theme #" + theme
if (this.extraComment.data !== undefined) {
comment += "\n\n" + this.extraComment.data
}
const metatags: ChangesetTag[] = [
{
key: "comment",
value: comment,
},
{
key: "theme",
value: theme,
},
...perType,
...motivations,
...perBinMessage,
]
await this._changesetHandler.UploadChangeset(
(csId, remappings) => {
if (remappings.size > 0) {
console.log("Rewriting pending changes from", pending, "with", remappings)
pending = pending.map((ch) => ChangeDescriptionTools.rewriteIds(ch, remappings))
console.log("Result is", pending)
}
const changes: {
newObjects: OsmObject[]
modifiedObjects: OsmObject[]
deletedObjects: OsmObject[]
} = self.CreateChangesetObjects(pending, objects)
return Changes.createChangesetFor("" + csId, changes)
},
metatags,
openChangeset
)
console.log("Upload successfull!")
return true
}
private async flushChangesAsync(): Promise<void> {
const self = this
try {
// At last, we build the changeset and upload
const pending = self.pendingChanges.data
const pendingPerTheme = new Map<string, ChangeDescription[]>()
for (const changeDescription of pending) {
const theme = changeDescription.meta.theme
if (!pendingPerTheme.has(theme)) {
pendingPerTheme.set(theme, [])
}
pendingPerTheme.get(theme).push(changeDescription)
}
const successes = await Promise.all(
Array.from(pendingPerTheme, async ([theme, pendingChanges]) => {
try {
const openChangeset = this.state.osmConnection
.GetPreference("current-open-changeset-" + theme)
.sync(
(str) => {
const n = Number(str)
if (isNaN(n)) {
return undefined
}
return n
},
[],
(n) => "" + n
)
console.log(
"Using current-open-changeset-" +
theme +
" from the preferences, got " +
openChangeset.data
)
return await self.flushSelectChanges(pendingChanges, openChangeset)
} catch (e) {
console.error("Could not upload some changes:", e)
return false
}
})
)
if (!successes.some((s) => s == false)) {
// All changes successfull, we clear the data!
this.pendingChanges.setData([])
}
} catch (e) {
console.error(
"Could not handle changes - probably an old, pending changeset in localstorage with an invalid format; erasing those",
e
)
self.pendingChanges.setData([])
} finally {
self.isUploading.setData(false)
}
}
}

View file

@ -0,0 +1,407 @@
import escapeHtml from "escape-html"
import UserDetails, { OsmConnection } from "./OsmConnection"
import { Store, UIEventSource } from "../UIEventSource"
import Locale from "../../UI/i18n/Locale"
import Constants from "../../Models/Constants"
import { Changes } from "./Changes"
import { Utils } from "../../Utils"
export interface ChangesetTag {
key: string
value: string | number
aggregate?: boolean
}
export class ChangesetHandler {
private readonly allElements: { addAlias: (id0: String, id1: string) => void }
private osmConnection: OsmConnection
private readonly changes: Changes
private readonly _dryRun: Store<boolean>
private readonly userDetails: UIEventSource<UserDetails>
private readonly backend: string
/**
* Contains previously rewritten IDs
* @private
*/
private readonly _remappings = new Map<string, string>()
constructor(
dryRun: Store<boolean>,
osmConnection: OsmConnection,
allElements: { addAlias: (id0: string, id1: string) => void } | undefined,
changes: Changes
) {
this.osmConnection = osmConnection
this.allElements = allElements
this.changes = changes
this._dryRun = dryRun
this.userDetails = osmConnection.userDetails
this.backend = osmConnection._oauth_config.url
if (dryRun) {
console.log("DRYRUN ENABLED")
}
}
/**
* Creates a new list which contains every key at most once
*
* ChangesetHandler.removeDuplicateMetaTags([{key: "k", value: "v"}, {key: "k0", value: "v0"}, {key: "k", value:"v"}] // => [{key: "k", value: "v"}, {key: "k0", value: "v0"}]
*/
private static removeDuplicateMetaTags(extraMetaTags: ChangesetTag[]): ChangesetTag[] {
const r: ChangesetTag[] = []
const seen = new Set<string>()
for (const extraMetaTag of extraMetaTags) {
if (seen.has(extraMetaTag.key)) {
continue
}
r.push(extraMetaTag)
seen.add(extraMetaTag.key)
}
return r
}
/**
* Inplace rewrite of extraMetaTags
* If the metatags contain a special motivation of the format "<change-type>:node/-<number>", this method will rewrite this negative number to the actual ID
* The key is changed _in place_; true will be returned if a change has been applied
* @param extraMetaTags
* @param rewriteIds
* @public for testing purposes
*/
public static rewriteMetaTags(extraMetaTags: ChangesetTag[], rewriteIds: Map<string, string>) {
let hasChange = false
for (const tag of extraMetaTags) {
const match = tag.key.match(/^([a-zA-Z0-9_]+):(node\/-[0-9])$/)
if (match == null) {
continue
}
// This is a special motivation which has a negative ID -> we check for rewrites
const [_, reason, id] = match
if (rewriteIds.has(id)) {
tag.key = reason + ":" + rewriteIds.get(id)
hasChange = true
}
}
return hasChange
}
/**
* The full logic to upload a change to one or more elements.
*
* This method will attempt to reuse an existing, open changeset for this theme (or open one if none available).
* Then, it will upload a changes-xml within this changeset (and leave the changeset open)
* When upload is successfull, eventual id-rewriting will be handled (aka: don't worry about that)
*
* If 'dryrun' is specified, the changeset XML will be printed to console instead of being uploaded
*
*/
public async UploadChangeset(
generateChangeXML: (csid: number, remappings: Map<string, string>) => string,
extraMetaTags: ChangesetTag[],
openChangeset: UIEventSource<number>
): Promise<void> {
if (
!extraMetaTags.some((tag) => tag.key === "comment") ||
!extraMetaTags.some((tag) => tag.key === "theme")
) {
throw "The meta tags should at least contain a `comment` and a `theme`"
}
extraMetaTags = [...extraMetaTags, ...this.defaultChangesetTags()]
extraMetaTags = ChangesetHandler.removeDuplicateMetaTags(extraMetaTags)
if (this.userDetails.data.csCount == 0) {
// The user became a contributor!
this.userDetails.data.csCount = 1
this.userDetails.ping()
}
if (this._dryRun.data) {
const changesetXML = generateChangeXML(123456, this._remappings)
console.log("Metatags are", extraMetaTags)
console.log(changesetXML)
return
}
if (openChangeset.data === undefined) {
// We have to open a new changeset
try {
const csId = await this.OpenChangeset(extraMetaTags)
openChangeset.setData(csId)
const changeset = generateChangeXML(csId, this._remappings)
console.log(
"Opened a new changeset (openChangeset.data is undefined):",
changeset,
extraMetaTags
)
const changes = await this.UploadChange(csId, changeset)
const hasSpecialMotivationChanges = ChangesetHandler.rewriteMetaTags(
extraMetaTags,
changes
)
if (hasSpecialMotivationChanges) {
// At this point, 'extraMetaTags' will have changed - we need to set the tags again
await this.UpdateTags(csId, extraMetaTags)
}
} catch (e) {
console.error("Could not open/upload changeset due to ", e)
openChangeset.setData(undefined)
}
} else {
// There still exists an open changeset (or at least we hope so)
// Let's check!
const csId = openChangeset.data
try {
const oldChangesetMeta = await this.GetChangesetMeta(csId)
if (!oldChangesetMeta.open) {
// Mark the CS as closed...
console.log("Could not fetch the metadata from the already open changeset")
openChangeset.setData(undefined)
// ... and try again. As the cs is closed, no recursive loop can exist
await this.UploadChangeset(generateChangeXML, extraMetaTags, openChangeset)
return
}
const rewritings = await this.UploadChange(
csId,
generateChangeXML(csId, this._remappings)
)
const rewrittenTags = this.RewriteTagsOf(
extraMetaTags,
rewritings,
oldChangesetMeta
)
await this.UpdateTags(csId, rewrittenTags)
} catch (e) {
console.warn("Could not upload, changeset is probably closed: ", e)
openChangeset.setData(undefined)
}
}
}
/**
* Given an existing changeset with metadata and extraMetaTags to add, will fuse them to a new set of metatags
* Does not yet send data
* @param extraMetaTags: new changeset tags to add/fuse with this changeset
* @param rewriteIds: the mapping of ids
* @param oldChangesetMeta: the metadata-object of the already existing changeset
*
* @public for testing purposes
*/
public RewriteTagsOf(
extraMetaTags: ChangesetTag[],
rewriteIds: Map<string, string>,
oldChangesetMeta: {
open: boolean
id: number
uid: number // User ID
changes_count: number
tags: any
}
): ChangesetTag[] {
// Note: extraMetaTags is where all the tags are collected into
// same as 'extraMetaTag', but indexed
// Note that updates to 'extraTagsById.get(<key>).value = XYZ' is shared with extraMetatags
const extraTagsById = new Map<string, ChangesetTag>()
for (const extraMetaTag of extraMetaTags) {
extraTagsById.set(extraMetaTag.key, extraMetaTag)
}
const oldCsTags = oldChangesetMeta.tags
for (const key in oldCsTags) {
const newMetaTag = extraTagsById.get(key)
const existingValue = oldCsTags[key]
if (newMetaTag !== undefined && newMetaTag.value === existingValue) {
continue
}
if (newMetaTag === undefined) {
extraMetaTags.push({
key: key,
value: oldCsTags[key],
})
continue
}
if (newMetaTag.aggregate) {
let n = Number(newMetaTag.value)
if (isNaN(n)) {
n = 0
}
let o = Number(oldCsTags[key])
if (isNaN(o)) {
o = 0
}
// We _update_ the tag itself, as it'll be updated in 'extraMetaTags' straight away
newMetaTag.value = "" + (n + o)
} else {
// The old value is overwritten, thus we drop this old key
}
}
ChangesetHandler.rewriteMetaTags(extraMetaTags, rewriteIds)
return extraMetaTags
}
/**
* Updates the id in the AllElements store, returns the new ID
* @param node: the XML-element, e.g. <node old_id="-1" new_id="9650458521" new_version="1"/>
* @param type
* @private
*/
private static parseIdRewrite(node: any, type: string): [string, string] {
const oldId = parseInt(node.attributes.old_id.value)
if (node.attributes.new_id === undefined) {
return [type + "/" + oldId, undefined]
}
const newId = parseInt(node.attributes.new_id.value)
// The actual mapping
const result: [string, string] = [type + "/" + oldId, type + "/" + newId]
if (oldId === newId) {
return undefined
}
return result
}
/**
* Given a diff-result XML of the form
* <diffResult version="0.6">
* <node old_id="-1" new_id="9650458521" new_version="1"/>
* <way old_id="-2" new_id="1050127772" new_version="1"/>
* </diffResult>,
* will:
*
* - create a mapping `{'node/-1' --> "node/9650458521", 'way/-2' --> "way/9650458521"}
* - Call this.changes.registerIdRewrites
* - Call handleIdRewrites as needed
* @param response
* @private
*/
private parseUploadChangesetResponse(response: XMLDocument): Map<string, string> {
const nodes = response.getElementsByTagName("node")
const mappings: [string, string][] = []
for (const node of Array.from(nodes)) {
const mapping = ChangesetHandler.parseIdRewrite(node, "node")
if (mapping !== undefined) {
mappings.push(mapping)
}
}
const ways = response.getElementsByTagName("way")
for (const way of Array.from(ways)) {
const mapping = ChangesetHandler.parseIdRewrite(way, "way")
if (mapping !== undefined) {
mappings.push(mapping)
}
}
for (const mapping of mappings) {
const [oldId, newId] = mapping
this.allElements?.addAlias(oldId, newId)
if (newId !== undefined) {
this._remappings.set(mapping[0], mapping[1])
}
}
return new Map<string, string>(mappings)
}
// noinspection JSUnusedLocalSymbols
private async CloseChangeset(changesetId: number = undefined): Promise<void> {
if (changesetId === undefined) {
return
}
await this.osmConnection.put("changeset/" + changesetId + "/close")
console.log("Closed changeset ", changesetId)
}
private async GetChangesetMeta(csId: number): Promise<{
id: number
open: boolean
uid: number
changes_count: number
tags: any
}> {
const url = `${this.backend}/api/0.6/changeset/${csId}`
const csData = await Utils.downloadJson(url)
return csData.elements[0]
}
/**
* Puts the specified tags onto the changesets as they are.
* This method will erase previously set tags
*/
private async UpdateTags(csId: number, tags: ChangesetTag[]) {
tags = ChangesetHandler.removeDuplicateMetaTags(tags)
tags = Utils.NoNull(tags).filter(
(tag) =>
tag.key !== undefined &&
tag.value !== undefined &&
tag.key !== "" &&
tag.value !== ""
)
const metadata = tags.map((kv) => `<tag k="${kv.key}" v="${escapeHtml(kv.value)}"/>`)
const content = [`<osm><changeset>`, metadata, `</changeset></osm>`].join("")
return this.osmConnection.put("changeset/" + csId, content, { "Content-Type": "text/xml" })
}
private defaultChangesetTags(): ChangesetTag[] {
return [
["created_by", `MapComplete ${Constants.vNumber}`],
["locale", Locale.language.data],
["host", `${window.location.origin}${window.location.pathname}`],
[
"source",
this.changes.state["currentUserLocation"]?.features?.data?.length > 0
? "survey"
: undefined,
],
["imagery", this.changes.state["backgroundLayer"]?.data?.id],
].map(([key, value]) => ({
key,
value,
aggretage: false,
}))
}
/**
* Opens a changeset with the specified tags
* @param changesetTags
* @constructor
* @private
*/
private async OpenChangeset(changesetTags: ChangesetTag[]): Promise<number> {
const metadata = changesetTags
.map((cstag) => [cstag.key, cstag.value])
.filter((kv) => (kv[1] ?? "") !== "")
.map((kv) => `<tag k="${kv[0]}" v="${escapeHtml(kv[1])}"/>`)
.join("\n")
const csId = await this.osmConnection.put(
"changeset/create",
[`<osm><changeset>`, metadata, `</changeset></osm>`].join(""),
{ "Content-Type": "text/xml" }
)
return Number(csId)
}
/**
* Upload a changesetXML
*/
private async UploadChange(
changesetId: number,
changesetXML: string
): Promise<Map<string, string>> {
const response = await this.osmConnection.post(
"changeset/" + changesetId + "/upload",
changesetXML,
{ "Content-Type": "text/xml" }
)
const changes = this.parseUploadChangesetResponse(response)
console.log("Uploaded changeset ", changesetId)
return changes
}
}

View file

@ -0,0 +1,30 @@
import { Utils } from "../../Utils"
import { BBox } from "../BBox"
export interface GeoCodeResult {
display_name: string
lat: number
lon: number
/**
* Format:
* [lat, lat, lon, lon]
*/
boundingbox: number[]
osm_type: "node" | "way" | "relation"
osm_id: string
}
export class Geocoding {
private static readonly host = "https://nominatim.openstreetmap.org/search?"
static async Search(query: string, bbox: BBox): Promise<GeoCodeResult[]> {
const b = bbox ?? BBox.global
const url =
Geocoding.host +
"format=json&limit=1&viewbox=" +
`${b.getEast()},${b.getNorth()},${b.getWest()},${b.getSouth()}` +
"&accept-language=nl&q=" +
query
return Utils.downloadJson(url)
}
}

View file

@ -0,0 +1,536 @@
import osmAuth from "osm-auth"
import { Store, Stores, UIEventSource } from "../UIEventSource"
import { OsmPreferences } from "./OsmPreferences"
import { Utils } from "../../Utils"
export default class UserDetails {
public loggedIn = false
public name = "Not logged in"
public uid: number
public csCount = 0
public img?: string
public unreadMessages = 0
public totalMessages: number = 0
public home: { lon: number; lat: number }
public backend: string
public account_created: string
public tracesCount: number = 0
public description: string
constructor(backend: string) {
this.backend = backend
}
}
export type OsmServiceState = "online" | "readonly" | "offline" | "unknown" | "unreachable"
export class OsmConnection {
public static readonly oauth_configs = {
osm: {
oauth_consumer_key: "hivV7ec2o49Two8g9h8Is1VIiVOgxQ1iYexCbvem",
oauth_secret: "wDBRTCem0vxD7txrg1y6p5r8nvmz8tAhET7zDASI",
url: "https://www.openstreetmap.org",
// OAUTH 1.0 application
// https://www.openstreetmap.org/user/Pieter%20Vander%20Vennet/oauth_clients/7404
},
"osm-test": {
oauth_consumer_key: "Zgr7EoKb93uwPv2EOFkIlf3n9NLwj5wbyfjZMhz2",
oauth_secret: "3am1i1sykHDMZ66SGq4wI2Z7cJMKgzneCHp3nctn",
url: "https://master.apis.dev.openstreetmap.org",
},
}
public auth
public userDetails: UIEventSource<UserDetails>
public isLoggedIn: Store<boolean>
public gpxServiceIsOnline: UIEventSource<OsmServiceState> = new UIEventSource<OsmServiceState>(
"unknown"
)
public apiIsOnline: UIEventSource<OsmServiceState> = new UIEventSource<OsmServiceState>(
"unknown"
)
public loadingStatus = new UIEventSource<"not-attempted" | "loading" | "error" | "logged-in">(
"not-attempted"
)
public preferencesHandler: OsmPreferences
public readonly _oauth_config: {
oauth_consumer_key: string
oauth_secret: string
url: string
}
private readonly _dryRun: Store<boolean>
private fakeUser: boolean
private _onLoggedIn: ((userDetails: UserDetails) => void)[] = []
private readonly _iframeMode: Boolean | boolean
private readonly _singlePage: boolean
private isChecking = false
constructor(options?: {
dryRun?: Store<boolean>
fakeUser?: false | boolean
oauth_token?: UIEventSource<string>
// Used to keep multiple changesets open and to write to the correct changeset
singlePage?: boolean
osmConfiguration?: "osm" | "osm-test"
attemptLogin?: true | boolean
}) {
options = options ?? {}
this.fakeUser = options.fakeUser ?? false
this._singlePage = options.singlePage ?? true
this._oauth_config =
OsmConnection.oauth_configs[options.osmConfiguration ?? "osm"] ??
OsmConnection.oauth_configs.osm
console.debug("Using backend", this._oauth_config.url)
this._iframeMode = Utils.runningFromConsole ? false : window !== window.top
this.userDetails = new UIEventSource<UserDetails>(
new UserDetails(this._oauth_config.url),
"userDetails"
)
if (options.fakeUser) {
const ud = this.userDetails.data
ud.csCount = 5678
ud.loggedIn = true
ud.unreadMessages = 0
ud.name = "Fake user"
ud.totalMessages = 42
}
const self = this
this.UpdateCapabilities()
this.isLoggedIn = this.userDetails.map(
(user) =>
user.loggedIn &&
(self.apiIsOnline.data === "unknown" || self.apiIsOnline.data === "online"),
[this.apiIsOnline]
)
this.isLoggedIn.addCallback((isLoggedIn) => {
if (self.userDetails.data.loggedIn == false && isLoggedIn == true) {
// We have an inconsistency: the userdetails say we _didn't_ log in, but this actor says we do
// This means someone attempted to toggle this; so we attempt to login!
self.AttemptLogin()
}
})
this._dryRun = options.dryRun ?? new UIEventSource<boolean>(false)
this.updateAuthObject()
this.preferencesHandler = new OsmPreferences(
this.auth,
<any /*This is needed to make the tests work*/>this
)
if (options.oauth_token?.data !== undefined) {
console.log(options.oauth_token.data)
const self = this
this.auth.bootstrapToken(
options.oauth_token.data,
(x) => {
console.log("Called back: ", x)
self.AttemptLogin()
},
this.auth
)
options.oauth_token.setData(undefined)
}
if (this.auth.authenticated() && options.attemptLogin !== false) {
this.AttemptLogin() // Also updates the user badge
} else {
console.log("Not authenticated")
}
}
public GetPreference(
key: string,
defaultValue: string = undefined,
options?: {
documentation?: string
prefix?: string
}
): UIEventSource<string> {
return this.preferencesHandler.GetPreference(key, defaultValue, options)
}
public GetLongPreference(key: string, prefix: string = "mapcomplete-"): UIEventSource<string> {
return this.preferencesHandler.GetLongPreference(key, prefix)
}
public OnLoggedIn(action: (userDetails: UserDetails) => void) {
this._onLoggedIn.push(action)
}
public LogOut() {
this.auth.logout()
this.userDetails.data.loggedIn = false
this.userDetails.data.csCount = 0
this.userDetails.data.name = ""
this.userDetails.ping()
console.log("Logged out")
this.loadingStatus.setData("not-attempted")
}
/**
* The backend host, without path or trailing '/'
*
* new OsmConnection().Backend() // => "https://www.openstreetmap.org"
*/
public Backend(): string {
return this._oauth_config.url
}
public AttemptLogin() {
this.UpdateCapabilities()
this.loadingStatus.setData("loading")
if (this.fakeUser) {
this.loadingStatus.setData("logged-in")
console.log("AttemptLogin called, but ignored as fakeUser is set")
return
}
const self = this
console.log("Trying to log in...")
this.updateAuthObject()
this.auth.xhr(
{
method: "GET",
path: "/api/0.6/user/details",
},
function (err, details) {
if (err != null) {
console.log(err)
self.loadingStatus.setData("error")
if (err.status == 401) {
console.log("Clearing tokens...")
// Not authorized - our token probably got revoked
// Reset all the tokens
const tokens = [
"https://www.openstreetmap.orgoauth_request_token_secret",
"https://www.openstreetmap.orgoauth_token",
"https://www.openstreetmap.orgoauth_token_secret",
]
tokens.forEach((token) => localStorage.removeItem(token))
}
return
}
if (details == null) {
self.loadingStatus.setData("error")
return
}
self.CheckForMessagesContinuously()
// details is an XML DOM of user details
let userInfo = details.getElementsByTagName("user")[0]
// let moreDetails = new DOMParser().parseFromString(userInfo.innerHTML, "text/xml");
let data = self.userDetails.data
data.loggedIn = true
console.log("Login completed, userinfo is ", userInfo)
data.name = userInfo.getAttribute("display_name")
data.account_created = userInfo.getAttribute("account_created")
data.uid = Number(userInfo.getAttribute("id"))
data.csCount = Number.parseInt(
userInfo.getElementsByTagName("changesets")[0].getAttribute("count") ?? 0
)
data.tracesCount = Number.parseInt(
userInfo.getElementsByTagName("traces")[0].getAttribute("count") ?? 0
)
data.img = undefined
const imgEl = userInfo.getElementsByTagName("img")
if (imgEl !== undefined && imgEl[0] !== undefined) {
data.img = imgEl[0].getAttribute("href")
}
const description = userInfo.getElementsByTagName("description")
if (description !== undefined && description[0] !== undefined) {
data.description = description[0]?.innerHTML
}
const homeEl = userInfo.getElementsByTagName("home")
if (homeEl !== undefined && homeEl[0] !== undefined) {
const lat = parseFloat(homeEl[0].getAttribute("lat"))
const lon = parseFloat(homeEl[0].getAttribute("lon"))
data.home = { lat: lat, lon: lon }
}
self.loadingStatus.setData("logged-in")
const messages = userInfo
.getElementsByTagName("messages")[0]
.getElementsByTagName("received")[0]
data.unreadMessages = parseInt(messages.getAttribute("unread"))
data.totalMessages = parseInt(messages.getAttribute("count"))
self.userDetails.ping()
for (const action of self._onLoggedIn) {
action(self.userDetails.data)
}
self._onLoggedIn = []
}
)
}
/**
* Interact with the API.
*
* @param path: the path to query, without host and without '/api/0.6'. Example 'notes/1234/close'
*/
public async interact(
path: string,
method: "GET" | "POST" | "PUT" | "DELETE",
header?: Record<string, string | number>,
content?: string
): Promise<any> {
return new Promise((ok, error) => {
this.auth.xhr(
{
method,
options: {
header,
},
content,
path: `/api/0.6/${path}`,
},
function (err, response) {
if (err !== null) {
error(err)
} else {
ok(response)
}
}
)
})
}
public async post(
path: string,
content?: string,
header?: Record<string, string | number>
): Promise<any> {
return await this.interact(path, "POST", header, content)
}
public async put(
path: string,
content?: string,
header?: Record<string, string | number>
): Promise<any> {
return await this.interact(path, "PUT", header, content)
}
public async get(path: string, header?: Record<string, string | number>): Promise<any> {
return await this.interact(path, "GET", header)
}
public closeNote(id: number | string, text?: string): Promise<void> {
let textSuffix = ""
if ((text ?? "") !== "") {
textSuffix = "?text=" + encodeURIComponent(text)
}
if (this._dryRun.data) {
console.warn("Dryrun enabled - not actually closing note ", id, " with text ", text)
return new Promise((ok) => {
ok()
})
}
return this.post(`notes/${id}/close${textSuffix}`)
}
public reopenNote(id: number | string, text?: string): Promise<void> {
if (this._dryRun.data) {
console.warn("Dryrun enabled - not actually reopening note ", id, " with text ", text)
return new Promise((ok) => {
ok()
})
}
let textSuffix = ""
if ((text ?? "") !== "") {
textSuffix = "?text=" + encodeURIComponent(text)
}
return this.post(`notes/${id}/reopen${textSuffix}`)
}
public async openNote(lat: number, lon: number, text: string): Promise<{ id: number }> {
if (this._dryRun.data) {
console.warn("Dryrun enabled - not actually opening note with text ", text)
return new Promise<{ id: number }>((ok) => {
window.setTimeout(
() => ok({ id: Math.floor(Math.random() * 1000) }),
Math.random() * 5000
)
})
}
const auth = this.auth
const content = { lat, lon, text }
const response = await this.post("notes.json", JSON.stringify(content), {
"Content-Type": "application/json",
})
const parsed = JSON.parse(response)
const id = parsed.properties
console.log("OPENED NOTE", id)
return id
}
public async uploadGpxTrack(
gpx: string,
options: {
description: string
visibility: "private" | "public" | "trackable" | "identifiable"
filename?: string
/**
* Some words to give some properties;
*
* Note: these are called 'tags' on the wiki, but I opted to name them 'labels' instead as they aren't "key=value" tags, but just words.
*/
labels: string[]
}
): Promise<{ id: number }> {
if (this._dryRun.data) {
console.warn("Dryrun enabled - not actually uploading GPX ", gpx)
return new Promise<{ id: number }>((ok, error) => {
window.setTimeout(
() => ok({ id: Math.floor(Math.random() * 1000) }),
Math.random() * 5000
)
})
}
const contents = {
file: gpx,
description: options.description ?? "",
tags: options.labels?.join(",") ?? "",
visibility: options.visibility,
}
const extras = {
file:
'; filename="' +
(options.filename ?? "gpx_track_mapcomplete_" + new Date().toISOString()) +
'"\r\nContent-Type: application/gpx+xml',
}
const auth = this.auth
const boundary = "987654"
let body = ""
for (const key in contents) {
body += "--" + boundary + "\r\n"
body += 'Content-Disposition: form-data; name="' + key + '"'
if (extras[key] !== undefined) {
body += extras[key]
}
body += "\r\n\r\n"
body += contents[key] + "\r\n"
}
body += "--" + boundary + "--\r\n"
const response = await this.post("gpx/create", body, {
"Content-Type": "multipart/form-data; boundary=" + boundary,
"Content-Length": body.length,
})
const parsed = JSON.parse(response)
console.log("Uploaded GPX track", parsed)
return { id: parsed }
}
public addCommentToNote(id: number | string, text: string): Promise<void> {
if (this._dryRun.data) {
console.warn("Dryrun enabled - not actually adding comment ", text, "to note ", id)
return new Promise((ok) => {
ok()
})
}
if ((text ?? "") === "") {
throw "Invalid text!"
}
return new Promise((ok, error) => {
this.auth.xhr(
{
method: "POST",
path: `/api/0.6/notes/${id}/comment?text=${encodeURIComponent(text)}`,
},
function (err, _) {
if (err !== null) {
error(err)
} else {
ok()
}
}
)
})
}
private updateAuthObject() {
let pwaStandAloneMode = false
try {
if (Utils.runningFromConsole) {
pwaStandAloneMode = true
} else if (
window.matchMedia("(display-mode: standalone)").matches ||
window.matchMedia("(display-mode: fullscreen)").matches
) {
pwaStandAloneMode = true
}
} catch (e) {
console.warn(
"Detecting standalone mode failed",
e,
". Assuming in browser and not worrying furhter"
)
}
const standalone = this._iframeMode || pwaStandAloneMode || !this._singlePage
// In standalone mode, we DON'T use single page login, as 'redirecting' opens a new window anyway...
// Same for an iframe...
this.auth = new osmAuth({
oauth_consumer_key: this._oauth_config.oauth_consumer_key,
oauth_secret: this._oauth_config.oauth_secret,
url: this._oauth_config.url,
landing: standalone ? undefined : window.location.href,
singlepage: !standalone,
auto: true,
})
}
private CheckForMessagesContinuously() {
const self = this
if (this.isChecking) {
return
}
this.isChecking = true
Stores.Chronic(5 * 60 * 1000).addCallback((_) => {
if (self.isLoggedIn.data) {
console.log("Checking for messages")
self.AttemptLogin()
}
})
}
private UpdateCapabilities(): void {
const self = this
this.FetchCapabilities().then(({ api, gpx }) => {
self.apiIsOnline.setData(api)
self.gpxServiceIsOnline.setData(gpx)
})
}
private async FetchCapabilities(): Promise<{ api: OsmServiceState; gpx: OsmServiceState }> {
if (Utils.runningFromConsole) {
return { api: "online", gpx: "online" }
}
const result = await Utils.downloadAdvanced(this.Backend() + "/api/0.6/capabilities")
if (result["content"] === undefined) {
console.log("Something went wrong:", result)
return { api: "unreachable", gpx: "unreachable" }
}
const xmlRaw = result["content"]
const parsed = new DOMParser().parseFromString(xmlRaw, "text/xml")
const statusEl = parsed.getElementsByTagName("status")[0]
const api = <OsmServiceState>statusEl.getAttribute("api")
const gpx = <OsmServiceState>statusEl.getAttribute("gpx")
return { api, gpx }
}
}

396
src/Logic/Osm/OsmObject.ts Normal file
View file

@ -0,0 +1,396 @@
import { Utils } from "../../Utils"
import polygon_features from "../../assets/polygon-features.json"
import { OsmFeature, OsmId, OsmTags, WayId } from "../../Models/OsmFeature"
import OsmToGeoJson from "osmtogeojson"
import { Feature, LineString, Polygon } from "geojson"
export abstract class OsmObject {
private static defaultBackend = "https://www.openstreetmap.org/"
protected static backendURL = OsmObject.defaultBackend
private static polygonFeatures = OsmObject.constructPolygonFeatures()
type: "node" | "way" | "relation"
id: number
/**
* The OSM tags as simple object
*/
tags: OsmTags & { id: OsmId }
version: number
public changed: boolean = false
timestamp: Date
protected constructor(type: string, id: number) {
this.id = id
// @ts-ignore
this.type = type
this.tags = {
id: `${this.type}/${id}`,
}
}
public static ParseObjects(elements: any[]): OsmObject[] {
const objects: OsmObject[] = []
const allNodes: Map<number, OsmNode> = new Map<number, OsmNode>()
for (const element of elements) {
const type = element.type
const idN = element.id
let osmObject: OsmObject = null
switch (type) {
case "node":
const node = new OsmNode(idN)
allNodes.set(idN, node)
osmObject = node
node.SaveExtraData(element)
break
case "way":
osmObject = new OsmWay(idN)
const nodes = element.nodes.map((i) => allNodes.get(i))
osmObject.SaveExtraData(element, nodes)
break
case "relation":
osmObject = new OsmRelation(idN)
const allGeojsons = OsmToGeoJson(
{ elements },
// @ts-ignore
{
flatProperties: true,
}
)
const feature = allGeojsons.features.find(
(f) => f.id === osmObject.type + "/" + osmObject.id
)
osmObject.SaveExtraData(element, feature)
break
}
if (osmObject !== undefined && OsmObject.backendURL !== OsmObject.defaultBackend) {
osmObject.tags["_backend"] = OsmObject.backendURL
}
osmObject?.LoadData(element)
objects.push(osmObject)
}
return objects
}
/**
* Uses the list of polygon features to determine if the given tags are a polygon or not.
*
* OsmObject.isPolygon({"building":"yes"}) // => true
* OsmObject.isPolygon({"highway":"residential"}) // => false
* */
protected static isPolygon(tags: any): boolean {
for (const tagsKey in tags) {
if (!tags.hasOwnProperty(tagsKey)) {
continue
}
const polyGuide: { values: Set<string>; blacklist: boolean } =
OsmObject.polygonFeatures.get(tagsKey)
if (polyGuide === undefined) {
continue
}
if (polyGuide.values === null) {
// .values is null, thus merely _having_ this key is enough to be a polygon (or if blacklist, being a line)
return !polyGuide.blacklist
}
// is the key contained? Then we have a match if the value is contained
const doesMatch = polyGuide.values.has(tags[tagsKey])
if (polyGuide.blacklist) {
return !doesMatch
}
return doesMatch
}
return false
}
private static constructPolygonFeatures(): Map<
string,
{ values: Set<string>; blacklist: boolean }
> {
const result = new Map<string, { values: Set<string>; blacklist: boolean }>()
for (const polygonFeature of polygon_features) {
const key = polygonFeature.key
if (polygonFeature.polygon === "all") {
result.set(key, { values: null, blacklist: false })
continue
}
const blacklist = polygonFeature.polygon === "blacklist"
result.set(key, {
values: new Set<string>(polygonFeature.values),
blacklist: blacklist,
})
}
return result
}
// The centerpoint of the feature, as [lat, lon]
public abstract centerpoint(): [number, number]
public abstract asGeoJson(): any
abstract SaveExtraData(element: any, allElements: OsmObject[] | any)
/**
* Generates the changeset-XML for tags
* @constructor
*/
TagsXML(): string {
let tags = ""
for (const key in this.tags) {
if (key.startsWith("_")) {
continue
}
if (key === "id") {
continue
}
const v = this.tags[key]
if (v !== "" && v !== undefined) {
tags +=
' <tag k="' +
Utils.EncodeXmlValue(key) +
'" v="' +
Utils.EncodeXmlValue(this.tags[key]) +
'"/>\n'
}
}
return tags
}
abstract ChangesetXML(changesetId: string, header?: string): string
protected VersionXML() {
if (this.version === undefined) {
return ""
}
return 'version="' + this.version + '"'
}
protected LoadData(element: any): void {
if (element === undefined) {
return
}
this.tags = element?.tags ?? this.tags
const tgs = this.tags
tgs["id"] = <OsmId>(this.type + "/" + this.id)
this.version = element?.version
this.timestamp = element?.timestamp
if (element?.tags === undefined) {
// Simple node which is part of a way - not important
return
}
tgs["_last_edit:contributor"] = element.user
tgs["_last_edit:contributor:uid"] = element.uid
tgs["_last_edit:changeset"] = element.changeset
tgs["_last_edit:timestamp"] = element.timestamp
tgs["_version_number"] = element.version
}
}
export class OsmNode extends OsmObject {
lat: number
lon: number
constructor(id: number, extraData?) {
super("node", id)
this.LoadData(extraData)
}
/**
*
* const obj = new OsmNode(1234)
* obj.tags.key = "value"
* obj.lat = 1
* obj.lon = 2
* obj.ChangesetXML("123").trim() // => '<node id="1234" changeset="123" lat="1" lon="2">\n <tag k="key" v="value"/>\n </node>'
*
* @param changesetId
* @param header
* @constructor
*/
ChangesetXML(changesetId: string, header?: string): string {
let tags = this.TagsXML()
return ` <node id="${this.id}" ${header ?? ""} ${
changesetId ? ' changeset="' + changesetId + '" ' : ""
}${this.VersionXML()} lat="${this.lat}" lon="${this.lon}">
${tags} </node>
`
}
SaveExtraData(element) {
this.lat = element.lat
this.lon = element.lon
}
centerpoint(): [number, number] {
return [this.lat, this.lon]
}
asGeoJson(): OsmFeature {
return {
type: "Feature",
properties: this.tags,
geometry: {
type: "Point",
coordinates: [this.lon, this.lat],
},
}
}
}
export class OsmWay extends OsmObject {
nodes: number[] = []
// The coordinates of the way, [lat, lon][]
coordinates: [number, number][] = []
lat: number
lon: number
constructor(id: number, wayInfo?) {
super("way", id)
this.LoadData(wayInfo)
}
centerpoint(): [number, number] {
return [this.lat, this.lon]
}
/**
* const obj = new OsmWay(1234)
* obj.tags.key = "value"
* obj.ChangesetXML("123").trim() // => '<way id="1234" changeset="123" >\n <tag k="key" v="value"/>\n </way>'
*/
ChangesetXML(changesetId: string, header?: string): string {
let tags = this.TagsXML()
let nds = ""
for (const node in this.nodes) {
nds += ' <nd ref="' + this.nodes[node] + '"/>\n'
}
return ` <way id="${this.id}" ${header ?? ""} ${
changesetId ? 'changeset="' + changesetId + '" ' : ""
} ${this.VersionXML()}>
${nds}${tags} </way>
`
}
SaveExtraData(element, allNodes: OsmNode[]) {
let latSum = 0
let lonSum = 0
const nodeDict = new Map<number, OsmNode>()
for (const node of allNodes) {
nodeDict.set(node.id, node)
}
if (element.nodes === undefined) {
console.error("PANIC: no nodes!")
}
for (const nodeId of element.nodes) {
const node = nodeDict.get(nodeId)
if (node === undefined) {
console.error("Error: node ", nodeId, "not found in ", nodeDict)
// This is probably part of a relation which hasn't been fully downloaded
continue
}
this.coordinates.push(node.centerpoint())
latSum += node.lat
lonSum += node.lon
}
let count = this.coordinates.length
this.lat = latSum / count
this.lon = lonSum / count
this.nodes = element.nodes
}
public asGeoJson(): Feature<Polygon | LineString> & { properties: { id: WayId } } {
let coordinates: [number, number][] | [number, number][][] = this.coordinates.map(
([lat, lon]) => [lon, lat]
)
let geometry: LineString | Polygon
if (this.isPolygon()) {
geometry = {
type: "Polygon",
coordinates: [coordinates],
}
} else {
geometry = {
type: "LineString",
coordinates: coordinates,
}
}
return {
type: "Feature",
properties: <any>this.tags,
geometry,
}
}
private isPolygon(): boolean {
// Compare lat and lon seperately, as the coordinate array might not be a reference to the same object
if (
this.coordinates[0][0] !== this.coordinates[this.coordinates.length - 1][0] ||
this.coordinates[0][1] !== this.coordinates[this.coordinates.length - 1][1]
) {
return false // Not closed
}
return OsmObject.isPolygon(this.tags)
}
}
export class OsmRelation extends OsmObject {
public members: {
type: "node" | "way" | "relation"
ref: number
role: string
}[]
private geojson = undefined
constructor(id: number, extraInfo?: any) {
super("relation", id)
this.LoadData(extraInfo)
}
centerpoint(): [number, number] {
return [0, 0] // TODO
}
ChangesetXML(changesetId: string, header?: string): string {
let members = ""
for (const member of this.members) {
members +=
' <member type="' +
member.type +
'" ref="' +
member.ref +
'" role="' +
member.role +
'"/>\n'
}
let tags = this.TagsXML()
let cs = ""
if (changesetId !== undefined) {
cs = `changeset="${changesetId}"`
}
return ` <relation id="${this.id}" ${header ?? ""} ${cs} ${this.VersionXML()}>
${members}${tags} </relation>
`
}
SaveExtraData(element, geojson) {
this.members = element.members
this.geojson = geojson
}
asGeoJson(): any {
if (this.geojson !== undefined) {
return this.geojson
}
throw "Not Implemented"
}
}

View file

@ -0,0 +1,243 @@
import { Utils } from "../../Utils"
import { OsmNode, OsmObject, OsmRelation, OsmWay } from "./OsmObject"
import { NodeId, OsmId, RelationId, WayId } from "../../Models/OsmFeature"
import { Store, UIEventSource } from "../UIEventSource"
import { ChangeDescription } from "./Actions/ChangeDescription"
/**
* The OSM-Object downloader downloads the latest version of the object, but applies 'pendingchanges' to them,
* so that we always have a consistent view
*/
export default class OsmObjectDownloader {
private readonly _changes?: {
readonly pendingChanges: UIEventSource<ChangeDescription[]>
readonly isUploading: Store<boolean>
}
private readonly backend: string
private historyCache = new Map<string, UIEventSource<OsmObject[]>>()
constructor(
backend: string = "https://www.openstreetmap.org",
changes?: {
readonly pendingChanges: UIEventSource<ChangeDescription[]>
readonly isUploading: Store<boolean>
}
) {
this._changes = changes
if (!backend.endsWith("/")) {
backend += "/"
}
if (!backend.startsWith("http")) {
throw "Backend URL must begin with http"
}
this.backend = backend
}
async DownloadObjectAsync(id: NodeId, maxCacheAgeInSecs?: number): Promise<OsmNode | "deleted">
async DownloadObjectAsync(id: WayId, maxCacheAgeInSecs?: number): Promise<OsmWay | "deleted">
async DownloadObjectAsync(
id: RelationId,
maxCacheAgeInSecs?: number
): Promise<OsmRelation | undefined>
async DownloadObjectAsync(id: OsmId, maxCacheAgeInSecs?: number): Promise<OsmObject | "deleted">
async DownloadObjectAsync(
id: string,
maxCacheAgeInSecs?: number
): Promise<OsmObject | "deleted">
async DownloadObjectAsync(id: string, maxCacheAgeInSecs?: number) {
// Wait until uploading is done
if (this._changes) {
await this._changes.isUploading.AsPromise((o) => o === false)
}
const splitted = id.split("/")
const type = splitted[0]
const idN = Number(splitted[1])
let obj: OsmObject | "deleted"
if (idN < 0) {
obj = this.constructObject(<"node" | "way" | "relation">type, idN)
} else {
obj = await this.RawDownloadObjectAsync(type, idN, maxCacheAgeInSecs)
}
if (obj === "deleted") {
return obj
}
return await this.applyPendingChanges(obj)
}
public DownloadHistory(id: NodeId): UIEventSource<OsmNode[]>
public DownloadHistory(id: WayId): UIEventSource<OsmWay[]>
public DownloadHistory(id: RelationId): UIEventSource<OsmRelation[]>
public DownloadHistory(id: OsmId): UIEventSource<OsmObject[]>
public DownloadHistory(id: string): UIEventSource<OsmObject[]> {
if (this.historyCache.has(id)) {
return this.historyCache.get(id)
}
const splitted = id.split("/")
const type = splitted[0]
const idN = Number(splitted[1])
const src = new UIEventSource<OsmObject[]>([])
this.historyCache.set(id, src)
Utils.downloadJsonCached(
`${this.backend}api/0.6/${type}/${idN}/history`,
10 * 60 * 1000
).then((data) => {
const elements: any[] = data.elements
const osmObjects: OsmObject[] = []
for (const element of elements) {
let osmObject: OsmObject = null
element.nodes = []
switch (type) {
case "node":
osmObject = new OsmNode(idN, element)
break
case "way":
osmObject = new OsmWay(idN, element)
break
case "relation":
osmObject = new OsmRelation(idN, element)
break
}
osmObject?.SaveExtraData(element, [])
osmObjects.push(osmObject)
}
src.setData(osmObjects)
})
return src
}
/**
* Downloads the ways that are using this node.
* Beware: their geometry will be incomplete!
*/
public async DownloadReferencingWays(id: string): Promise<OsmWay[]> {
const data = await Utils.downloadJsonCached(`${this.backend}api/0.6/${id}/ways`, 60 * 1000)
return data.elements.map((wayInfo) => new OsmWay(wayInfo.id, wayInfo))
}
/**
* Downloads the relations that are using this feature.
* Beware: their geometry will be incomplete!
*/
public async DownloadReferencingRelations(id: string): Promise<OsmRelation[]> {
const data = await Utils.downloadJsonCached(
`${this.backend}api/0.6/${id}/relations`,
60 * 1000
)
return data.elements.map((wayInfo) => {
const rel = new OsmRelation(wayInfo.id, wayInfo)
rel.SaveExtraData(wayInfo, undefined)
return rel
})
}
private applyNodeChange(object: OsmNode, change: { lat: number; lon: number }) {
object.lat = change.lat
object.lon = change.lon
}
private applyWayChange(object: OsmWay, change: { nodes: number[]; coordinates }) {
object.nodes = change.nodes
object.coordinates = change.coordinates.map(([lat, lon]) => [lon, lat])
}
private applyRelationChange(
object: OsmRelation,
change: { members: { type: "node" | "way" | "relation"; ref: number; role: string }[] }
) {
object.members = change.members
}
private async applyPendingChanges(object: OsmObject): Promise<OsmObject | "deleted"> {
if (!this._changes) {
return object
}
const pendingChanges = this._changes.pendingChanges.data
for (const pendingChange of pendingChanges) {
if (object.id !== pendingChange.id || object.type !== pendingChange.type) {
continue
}
if (pendingChange.doDelete) {
return "deleted"
}
if (pendingChange.tags) {
for (const { k, v } of pendingChange.tags) {
if (v === undefined) {
delete object.tags[k]
} else {
object.tags[k] = v
}
}
}
if (pendingChange.changes) {
switch (pendingChange.type) {
case "node":
this.applyNodeChange(<OsmNode>object, <any>pendingChange.changes)
break
case "way":
this.applyWayChange(<OsmWay>object, <any>pendingChange.changes)
break
case "relation":
this.applyRelationChange(<OsmRelation>object, <any>pendingChange.changes)
break
}
}
}
return object
}
/**
* Creates an empty object of the specified type with the specified id.
* We assume that the pending changes will be applied on them, filling in details such as coordinates, tags, ...
*/
private constructObject(type: "node" | "way" | "relation", id: number): OsmObject {
switch (type) {
case "node":
return new OsmNode(id)
case "way":
return new OsmWay(id)
case "relation":
return new OsmRelation(id)
}
}
private async RawDownloadObjectAsync(
type: string,
idN: number,
maxCacheAgeInSecs?: number
): Promise<OsmObject | "deleted"> {
const full = type !== "node" ? "/full" : ""
const url = `${this.backend}api/0.6/${type}/${idN}${full}`
const rawData = await Utils.downloadJsonCachedAdvanced(
url,
(maxCacheAgeInSecs ?? 10) * 1000
)
if (rawData["error"] !== undefined && rawData["statuscode"] === 410) {
return "deleted"
}
// A full query might contain more then just the requested object (e.g. nodes that are part of a way, where we only want the way)
const parsed = OsmObject.ParseObjects(rawData["content"].elements)
// Lets fetch the object we need
for (const osmObject of parsed) {
if (osmObject.type !== type) {
continue
}
if (osmObject.id !== idN) {
continue
}
// Found the one!
return osmObject
}
throw "PANIC: requested object is not part of the response"
}
}

View file

@ -0,0 +1,277 @@
import { UIEventSource } from "../UIEventSource"
import UserDetails, { OsmConnection } from "./OsmConnection"
import { Utils } from "../../Utils"
export class OsmPreferences {
public preferences = new UIEventSource<Record<string, string>>({}, "all-osm-preferences")
private readonly preferenceSources = new Map<string, UIEventSource<string>>()
private auth: any
private userDetails: UIEventSource<UserDetails>
private longPreferences = {}
constructor(auth, osmConnection: OsmConnection) {
this.auth = auth
this.userDetails = osmConnection.userDetails
const self = this
osmConnection.OnLoggedIn(() => self.UpdatePreferences())
}
/**
* OSM preferences can be at most 255 chars
* @param key
* @param prefix
* @constructor
*/
public GetLongPreference(key: string, prefix: string = "mapcomplete-"): UIEventSource<string> {
if (this.longPreferences[prefix + key] !== undefined) {
return this.longPreferences[prefix + key]
}
const source = new UIEventSource<string>(undefined, "long-osm-preference:" + prefix + key)
this.longPreferences[prefix + key] = source
const allStartWith = prefix + key + "-combined"
const subOptions = { prefix: "" }
// Gives the number of combined preferences
const length = this.GetPreference(allStartWith + "-length", "", subOptions)
if ((allStartWith + "-length").length > 255) {
throw (
"This preference key is too long, it has " +
key.length +
" characters, but at most " +
(255 - "-length".length - "-combined".length - prefix.length) +
" characters are allowed"
)
}
const self = this
source.addCallback((str) => {
if (str === undefined || str === "") {
return
}
if (str === null) {
console.error("Deleting " + allStartWith)
let count = parseInt(length.data)
for (let i = 0; i < count; i++) {
// Delete all the preferences
self.GetPreference(allStartWith + "-" + i, "", subOptions).setData("")
}
self.GetPreference(allStartWith + "-length", "", subOptions).setData("")
return
}
let i = 0
while (str !== "") {
if (str === undefined || str === "undefined") {
throw "Long pref became undefined?"
}
if (i > 100) {
throw "This long preference is getting very long... "
}
self.GetPreference(allStartWith + "-" + i, "", subOptions).setData(
str.substr(0, 255)
)
str = str.substr(255)
i++
}
length.setData("" + i) // We use I, the number of preference fields used
})
function updateData(l: number) {
if (Object.keys(self.preferences.data).length === 0) {
// The preferences are still empty - they are not yet updated, so we delay updating for now
return
}
const prefsCount = Number(l)
if (prefsCount > 100) {
throw "Length to long"
}
let str = ""
for (let i = 0; i < prefsCount; i++) {
const key = allStartWith + "-" + i
if (self.preferences.data[key] === undefined) {
console.warn(
"Detected a broken combined preference:",
key,
"is undefined",
self.preferences
)
}
str += self.preferences.data[key] ?? ""
}
source.setData(str)
}
length.addCallback((l) => {
updateData(Number(l))
})
this.preferences.addCallbackAndRun((_) => {
updateData(Number(length.data))
})
return source
}
public GetPreference(
key: string,
defaultValue: string = undefined,
options?: {
documentation?: string
prefix?: string
}
): UIEventSource<string> {
const prefix: string = options?.prefix ?? "mapcomplete-"
if (key.startsWith(prefix) && prefix !== "") {
console.trace(
"A preference was requested which has a duplicate prefix in its key. This is probably a bug"
)
}
key = prefix + key
key = key.replace(/[:\\\/"' {}.%]/g, "")
if (key.length >= 255) {
throw "Preferences: key length to big"
}
const cached = this.preferenceSources.get(key)
if (cached !== undefined) {
return cached
}
if (this.userDetails.data.loggedIn && this.preferences.data[key] === undefined) {
this.UpdatePreferences()
}
const pref = new UIEventSource<string>(
this.preferences.data[key] ?? defaultValue,
"osm-preference:" + key
)
pref.addCallback((v) => {
this.UploadPreference(key, v)
})
this.preferences.addCallbackD((allPrefs) => {
const v = allPrefs[key]
if (v === undefined) {
return
}
pref.setData(v)
})
this.preferenceSources.set(key, pref)
return pref
}
public ClearPreferences() {
let isRunning = false
const self = this
this.preferences.addCallback((prefs) => {
console.log("Cleaning preferences...")
if (Object.keys(prefs).length == 0) {
return
}
if (isRunning) {
return
}
isRunning = true
const prefixes = ["mapcomplete-"]
for (const key in prefs) {
const matches = prefixes.some((prefix) => key.startsWith(prefix))
if (matches) {
console.log("Clearing ", key)
self.GetPreference(key, "", { prefix: "" }).setData("")
}
}
isRunning = false
return
})
}
private UpdatePreferences() {
const self = this
this.auth.xhr(
{
method: "GET",
path: "/api/0.6/user/preferences",
},
function (error, value: XMLDocument) {
if (error) {
console.log("Could not load preferences", error)
return
}
const prefs = value.getElementsByTagName("preference")
for (let i = 0; i < prefs.length; i++) {
const pref = prefs[i]
const k = pref.getAttribute("k")
const v = pref.getAttribute("v")
self.preferences.data[k] = v
}
// We merge all the preferences: new keys are uploaded
// For differing values, the server overrides local changes
self.preferenceSources.forEach((preference, key) => {
const osmValue = self.preferences.data[key]
if (osmValue === undefined && preference.data !== undefined) {
// OSM doesn't know this value yet
self.UploadPreference(key, preference.data)
} else {
// OSM does have a value - set it
preference.setData(osmValue)
}
})
self.preferences.ping()
}
)
}
private UploadPreference(k: string, v: string) {
if (!this.userDetails.data.loggedIn) {
console.debug(`Not saving preference ${k}: user not logged in`)
return
}
if (this.preferences.data[k] === v) {
return
}
const self = this
console.debug("Updating preference", k, " to ", Utils.EllipsesAfter(v, 15))
if (v === undefined || v === "") {
this.auth.xhr(
{
method: "DELETE",
path: "/api/0.6/user/preferences/" + encodeURIComponent(k),
options: { header: { "Content-Type": "text/plain" } },
},
function (error) {
if (error) {
console.warn("Could not remove preference", error)
return
}
delete self.preferences.data[k]
self.preferences.ping()
console.debug("Preference ", k, "removed!")
}
)
return
}
this.auth.xhr(
{
method: "PUT",
path: "/api/0.6/user/preferences/" + encodeURIComponent(k),
options: { header: { "Content-Type": "text/plain" } },
content: v,
},
function (error) {
if (error) {
console.warn(`Could not set preference "${k}"'`, error)
return
}
self.preferences.data[k] = v
self.preferences.ping()
console.debug(`Preference ${k} written!`)
}
)
}
}

143
src/Logic/Osm/Overpass.ts Normal file
View file

@ -0,0 +1,143 @@
import { TagsFilter } from "../Tags/TagsFilter"
import { Utils } from "../../Utils"
import { ImmutableStore, Store } from "../UIEventSource"
import { BBox } from "../BBox"
import osmtogeojson from "osmtogeojson"
import { FeatureCollection } from "@turf/turf"
/**
* Interfaces overpass to get all the latest data
*/
export class Overpass {
private _filter: TagsFilter
private readonly _interpreterUrl: string
private readonly _timeout: Store<number>
private readonly _extraScripts: string[]
private readonly _includeMeta: boolean
constructor(
filter: TagsFilter,
extraScripts: string[],
interpreterUrl: string,
timeout?: Store<number>,
includeMeta = true
) {
this._timeout = timeout ?? new ImmutableStore<number>(90)
this._interpreterUrl = interpreterUrl
const optimized = filter.optimize()
if (optimized === true || optimized === false) {
throw "Invalid filter: optimizes to true of false"
}
this._filter = optimized
this._extraScripts = extraScripts
this._includeMeta = includeMeta
}
public async queryGeoJson(bounds: BBox): Promise<[FeatureCollection, Date]> {
const bbox =
"[bbox:" +
bounds.getSouth() +
"," +
bounds.getWest() +
"," +
bounds.getNorth() +
"," +
bounds.getEast() +
"]"
const query = this.buildScript(bbox)
return await this.ExecuteQuery(query)
}
public buildUrl(query: string) {
return `${this._interpreterUrl}?data=${encodeURIComponent(query)}`
}
public async ExecuteQuery(query: string): Promise<[FeatureCollection, Date]> {
const json = await Utils.downloadJson(this.buildUrl(query))
if (json.elements.length === 0 && json.remark !== undefined) {
console.warn("Timeout or other runtime error while querying overpass", json.remark)
throw `Runtime error (timeout or similar)${json.remark}`
}
if (json.elements.length === 0) {
console.warn("No features for", json)
}
const geojson = osmtogeojson(json)
const osmTime = new Date(json.osm3s.timestamp_osm_base)
return [<any>geojson, osmTime]
}
/**
* Constructs the actual script to execute on Overpass
* 'PostCall' can be used to set an extra range, see 'AsOverpassTurboLink'
*
* import {Tag} from "../Tags/Tag";
*
* new Overpass(new Tag("key","value"), [], "").buildScript("{{bbox}}") // => `[out:json][timeout:90]{{bbox}};(nwr["key"="value"];);out body;out meta;>;out skel qt;`
*/
public buildScript(bbox: string, postCall: string = "", pretty = false): string {
const filters = this._filter.asOverpass()
let filter = ""
for (const filterOr of filters) {
if (pretty) {
filter += " "
}
filter += "nwr" + filterOr + postCall + ";"
if (pretty) {
filter += "\n"
}
}
for (const extraScript of this._extraScripts) {
filter += "(" + extraScript + ");"
}
return `[out:json][timeout:${this._timeout.data}]${bbox};(${filter});out body;${
this._includeMeta ? "out meta;" : ""
}>;out skel qt;`
}
/**
* Constructs the actual script to execute on Overpass with geocoding
* 'PostCall' can be used to set an extra range, see 'AsOverpassTurboLink'
*/
public buildScriptInArea(
area: { osm_type: "way" | "relation"; osm_id: number },
pretty = false
): string {
const filters = this._filter.asOverpass()
let filter = ""
for (const filterOr of filters) {
if (pretty) {
filter += " "
}
filter += "nwr" + filterOr + "(area.searchArea);"
if (pretty) {
filter += "\n"
}
}
for (const extraScript of this._extraScripts) {
filter += "(" + extraScript + ");"
}
let id = area.osm_id
if (area.osm_type === "relation") {
id += 3600000000
}
return `[out:json][timeout:${this._timeout.data}];
area(id:${id})->.searchArea;
(${filter});
out body;${this._includeMeta ? "out meta;" : ""}>;out skel qt;`
}
public buildQuery(bbox: string) {
return this.buildUrl(this.buildScript(bbox))
}
/**
* Little helper method to quickly open overpass-turbo in the browser
*/
public static AsOverpassTurboLink(tags: TagsFilter) {
const overpass = new Overpass(tags, [], "", undefined, false)
const script = overpass.buildScript("", "({{bbox}})", true)
const url = "http://overpass-turbo.eu/?Q="
return url + encodeURIComponent(script)
}
}

View file

@ -0,0 +1,200 @@
export default class AspectedRouting {
public readonly name: string
public readonly description: string
public readonly units: string
public readonly program: any
public constructor(program) {
this.name = program.name
this.description = program.description
this.units = program.unit
this.program = JSON.parse(JSON.stringify(program))
delete this.program.name
delete this.program.description
delete this.program.unit
}
/**
* Interprets the given Aspected-routing program for the given properties
*/
public static interpret(program: any, properties: any) {
if (typeof program !== "object") {
return program
}
let functionName /*: string*/ = undefined
let functionArguments /*: any */ = undefined
let otherValues = {}
// @ts-ignore
Object.entries(program).forEach((tag) => {
const [key, value] = tag
if (key.startsWith("$")) {
functionName = key
functionArguments = value
} else {
otherValues[key] = value
}
})
if (functionName === undefined) {
return AspectedRouting.interpretAsDictionary(program, properties)
}
if (functionName === "$multiply") {
return AspectedRouting.multiplyScore(properties, functionArguments)
} else if (functionName === "$firstMatchOf") {
return AspectedRouting.getFirstMatchScore(properties, functionArguments)
} else if (functionName === "$min") {
return AspectedRouting.getMinValue(properties, functionArguments)
} else if (functionName === "$max") {
return AspectedRouting.getMaxValue(properties, functionArguments)
} else if (functionName === "$default") {
return AspectedRouting.defaultV(functionArguments, otherValues, properties)
} else {
console.error(
`Error: Program ${functionName} is not implemented yet. ${JSON.stringify(program)}`
)
}
}
/**
* Given a 'program' without function invocation, interprets it as a dictionary
*
* E.g., given the program
*
* {
* highway: {
* residential: 30,
* living_street: 20
* },
* surface: {
* sett : 0.9
* }
*
* }
*
* in combination with the tags {highway: residential},
*
* the result should be [30, undefined];
*
* For the tags {highway: residential, surface: sett} we should get [30, 0.9]
*
*
* @param program
* @param tags
* @return {(undefined|*)[]}
*/
private static interpretAsDictionary(program, tags) {
// @ts-ignore
return Object.entries(tags).map((tag) => {
const [key, value] = tag
const propertyValue = program[key]
if (propertyValue === undefined) {
return undefined
}
if (typeof propertyValue !== "object") {
return propertyValue
}
// @ts-ignore
return propertyValue[value]
})
}
private static defaultV(subProgram, otherArgs, tags) {
// @ts-ignore
const normalProgram = Object.entries(otherArgs)[0][1]
const value = AspectedRouting.interpret(normalProgram, tags)
if (value !== undefined) {
return value
}
return AspectedRouting.interpret(subProgram, tags)
}
/**
* Multiplies the default score with the proper values
* @param tags {object} the active tags to check against
* @param subprograms which should generate a list of values
* @returns score after multiplication
*/
private static multiplyScore(tags, subprograms) {
let number = 1
let subResults: any[]
if (subprograms.length !== undefined) {
subResults = AspectedRouting.concatMap(subprograms, (subprogram) =>
AspectedRouting.interpret(subprogram, tags)
)
} else {
subResults = AspectedRouting.interpret(subprograms, tags)
}
subResults.filter((r) => r !== undefined).forEach((r) => (number *= parseFloat(r)))
return number.toFixed(2)
}
private static getFirstMatchScore(tags, order: any) {
/*Order should be a list of arguments after evaluation*/
order = <string[]>AspectedRouting.interpret(order, tags)
for (let key of order) {
// @ts-ignore
for (let entry of Object.entries(JSON.parse(tags))) {
const [tagKey, value] = entry
if (key === tagKey) {
// We have a match... let's evaluate the subprogram
const evaluated = AspectedRouting.interpret(value, tags)
if (evaluated !== undefined) {
return evaluated
}
}
}
}
// Not a single match found...
return undefined
}
private static getMinValue(tags, subprogram) {
const minArr = subprogram
.map((part) => {
if (typeof part === "object") {
const calculatedValue = this.interpret(part, tags)
return parseFloat(calculatedValue)
} else {
return parseFloat(part)
}
})
.filter((v) => !isNaN(v))
return Math.min(...minArr)
}
private static getMaxValue(tags, subprogram) {
const maxArr = subprogram
.map((part) => {
if (typeof part === "object") {
return parseFloat(AspectedRouting.interpret(part, tags))
} else {
return parseFloat(part)
}
})
.filter((v) => !isNaN(v))
return Math.max(...maxArr)
}
private static concatMap(list, f): any[] {
const result = []
list = list.map(f)
for (const elem of list) {
if (elem.length !== undefined) {
// This is a list
result.push(...elem)
} else {
result.push(elem)
}
}
return result
}
public evaluate(properties) {
return AspectedRouting.interpret(this.program, properties)
}
}

View file

@ -0,0 +1,705 @@
import { GeoOperations } from "./GeoOperations"
import { Utils } from "../Utils"
import opening_hours from "opening_hours"
import Combine from "../UI/Base/Combine"
import BaseUIElement from "../UI/BaseUIElement"
import Title from "../UI/Base/Title"
import { FixedUiElement } from "../UI/Base/FixedUiElement"
import LayerConfig from "../Models/ThemeConfig/LayerConfig"
import { CountryCoder } from "latlon2country"
import Constants from "../Models/Constants"
import { TagUtils } from "./Tags/TagUtils"
import { Feature, LineString } from "geojson"
import { OsmTags } from "../Models/OsmFeature"
import { UIEventSource } from "./UIEventSource"
import LayoutConfig from "../Models/ThemeConfig/LayoutConfig"
import OsmObjectDownloader from "./Osm/OsmObjectDownloader"
/**
* All elements that are needed to perform metatagging
*/
export interface MetataggingState {
layout: LayoutConfig
osmObjectDownloader: OsmObjectDownloader
}
export abstract class SimpleMetaTagger {
public readonly keys: string[]
public readonly doc: string
public readonly isLazy: boolean
public readonly includesDates: boolean
/***
* A function that adds some extra data to a feature
* @param docs: what does this extra data do?
*/
protected constructor(docs: {
keys: string[]
doc: string
/**
* Set this flag if the data is volatile or date-based.
* It'll _won't_ be cached in this case
*/
includesDates?: boolean
isLazy?: boolean
cleanupRetagger?: boolean
}) {
this.keys = docs.keys
this.doc = docs.doc
this.isLazy = docs.isLazy
this.includesDates = docs.includesDates ?? false
if (!docs.cleanupRetagger) {
for (const key of docs.keys) {
if (!key.startsWith("_") && key.toLowerCase().indexOf("theme") < 0) {
throw `Incorrect key for a calculated meta value '${key}': it should start with underscore (_)`
}
}
}
}
/**
* Applies the metatag-calculation, returns 'true' if the upstream source needs to be pinged
* @param feature
* @param layer
* @param tagsStore
* @param state
*/
public abstract applyMetaTagsOnFeature(
feature: any,
layer: LayerConfig,
tagsStore: UIEventSource<Record<string, string>>,
state: MetataggingState
): boolean
}
export class ReferencingWaysMetaTagger extends SimpleMetaTagger {
/**
* Disable this metatagger, e.g. for caching or tests
* This is a bit a work-around
*/
public static enabled = true
constructor() {
super({
keys: ["_referencing_ways"],
isLazy: true,
doc: "_referencing_ways contains - for a node - which ways use this this node as point in their geometry. ",
})
}
public applyMetaTagsOnFeature(feature, layer, tags, state) {
if (!ReferencingWaysMetaTagger.enabled) {
return false
}
//this function has some extra code to make it work in SimpleAddUI.ts to also work for newly added points
const id = feature.properties.id
if (!id.startsWith("node/")) {
return false
}
Utils.AddLazyPropertyAsync(feature.properties, "_referencing_ways", async () => {
const referencingWays = await state.osmObjectDownloader.DownloadReferencingWays(id)
const wayIds = referencingWays.map((w) => "way/" + w.id)
wayIds.sort()
return wayIds.join(";")
})
return true
}
}
class CountryTagger extends SimpleMetaTagger {
private static readonly coder = new CountryCoder(
Constants.countryCoderEndpoint,
Utils.downloadJson
)
public runningTasks: Set<any> = new Set<any>()
constructor() {
super({
keys: ["_country"],
doc: "The country code of the property (with latlon2country)",
includesDates: false,
})
}
applyMetaTagsOnFeature(feature, _, tagsSource) {
let centerPoint: any = GeoOperations.centerpoint(feature)
const runningTasks = this.runningTasks
const lat = centerPoint.geometry.coordinates[1]
const lon = centerPoint.geometry.coordinates[0]
runningTasks.add(feature)
CountryTagger.coder
.GetCountryCodeAsync(lon, lat)
.then((countries) => {
if (!countries) {
console.warn("Country coder returned ", countries)
return
}
const oldCountry = feature.properties["_country"]
const newCountry = countries[0].trim().toLowerCase()
if (oldCountry !== newCountry) {
tagsSource.data["_country"] = newCountry
tagsSource?.ping()
}
})
.catch((e) => {
console.warn(e)
})
.finally(() => runningTasks.delete(feature))
return false
}
}
class InlineMetaTagger extends SimpleMetaTagger {
public readonly applyMetaTagsOnFeature: (
feature: any,
layer: LayerConfig,
tagsStore: UIEventSource<OsmTags>,
state: MetataggingState
) => boolean
constructor(
docs: {
keys: string[]
doc: string
/**
* Set this flag if the data is volatile or date-based.
* It'll _won't_ be cached in this case
*/
includesDates?: boolean
isLazy?: boolean
cleanupRetagger?: boolean
},
f: (
feature: any,
layer: LayerConfig,
tagsStore: UIEventSource<OsmTags>,
state: MetataggingState
) => boolean
) {
super(docs)
this.applyMetaTagsOnFeature = f
}
}
class RewriteMetaInfoTags extends SimpleMetaTagger {
constructor() {
super({
keys: [
"_last_edit:contributor",
"_last_edit:contributor:uid",
"_last_edit:changeset",
"_last_edit:timestamp",
"_version_number",
"_backend",
],
doc: "Information about the last edit of this object. This object will actually _rewrite_ some tags for features coming from overpass",
})
}
applyMetaTagsOnFeature(feature: Feature): boolean {
/*Note: also called by 'UpdateTagsFromOsmAPI'*/
const tgs = feature.properties
let movedSomething = false
function move(src: string, target: string) {
if (tgs[src] === undefined) {
return
}
tgs[target] = tgs[src]
delete tgs[src]
movedSomething = true
}
move("user", "_last_edit:contributor")
move("uid", "_last_edit:contributor:uid")
move("changeset", "_last_edit:changeset")
move("timestamp", "_last_edit:timestamp")
move("version", "_version_number")
feature.properties._backend = feature.properties._backend ?? "https://openstreetmap.org"
return movedSomething
}
}
export default class SimpleMetaTaggers {
/**
* A simple metatagger which rewrites various metatags as needed
*/
public static readonly objectMetaInfo = new RewriteMetaInfoTags()
public static country = new CountryTagger()
public static geometryType = new InlineMetaTagger(
{
keys: ["_geometry:type"],
doc: "Adds the geometry type as property. This is identical to the GoeJson geometry type and is one of `Point`,`LineString`, `Polygon` and exceptionally `MultiPolygon` or `MultiLineString`",
},
(feature, _) => {
const changed = feature.properties["_geometry:type"] === feature.geometry.type
feature.properties["_geometry:type"] = feature.geometry.type
return changed
}
)
public static referencingWays = new ReferencingWaysMetaTagger()
private static readonly cardinalDirections = {
N: 0,
NNE: 22.5,
NE: 45,
ENE: 67.5,
E: 90,
ESE: 112.5,
SE: 135,
SSE: 157.5,
S: 180,
SSW: 202.5,
SW: 225,
WSW: 247.5,
W: 270,
WNW: 292.5,
NW: 315,
NNW: 337.5,
}
private static latlon = new InlineMetaTagger(
{
keys: ["_lat", "_lon"],
doc: "The latitude and longitude of the point (or centerpoint in the case of a way/area)",
},
(feature) => {
const centerPoint = GeoOperations.centerpoint(feature)
const lat = centerPoint.geometry.coordinates[1]
const lon = centerPoint.geometry.coordinates[0]
feature.properties["_lat"] = "" + lat
feature.properties["_lon"] = "" + lon
return true
}
)
private static layerInfo = new InlineMetaTagger(
{
doc: "The layer-id to which this feature belongs. Note that this might be return any applicable if `passAllFeatures` is defined.",
keys: ["_layer"],
includesDates: false,
},
(feature, layer) => {
if (feature.properties._layer === layer.id) {
return false
}
feature.properties._layer = layer.id
return true
}
)
private static noBothButLeftRight = new InlineMetaTagger(
{
keys: [
"sidewalk:left",
"sidewalk:right",
"generic_key:left:property",
"generic_key:right:property",
],
doc: "Rewrites tags from 'generic_key:both:property' as 'generic_key:left:property' and 'generic_key:right:property' (and similar for sidewalk tagging). Note that this rewritten tags _will be reuploaded on a change_. To prevent to much unrelated retagging, this is only enabled if the layer has at least some lineRenderings with offset defined",
includesDates: false,
cleanupRetagger: true,
},
(feature, layer) => {
if (!layer.lineRendering.some((lr) => lr.leftRightSensitive)) {
return
}
return SimpleMetaTaggers.removeBothTagging(feature.properties)
}
)
private static surfaceArea = new InlineMetaTagger(
{
keys: ["_surface"],
doc: "The surface area of the feature in square meters. Not set on points and ways",
isLazy: true,
},
(feature) => {
Utils.AddLazyProperty(feature.properties, "_surface", () => {
return "" + GeoOperations.surfaceAreaInSqMeters(feature)
})
return true
}
)
private static surfaceAreaHa = new InlineMetaTagger(
{
keys: ["_surface:ha"],
doc: "The surface area of the feature in hectare. Not set on points and ways",
isLazy: true,
},
(feature) => {
Utils.AddLazyProperty(feature.properties, "_surface:ha", () => {
const sqMeters = GeoOperations.surfaceAreaInSqMeters(feature)
return "" + Math.floor(sqMeters / 1000) / 10
})
return true
}
)
private static levels = new InlineMetaTagger(
{
doc: "Extract the 'level'-tag into a normalized, ';'-separated value",
keys: ["_level"],
},
(feature) => {
if (feature.properties["level"] === undefined) {
return false
}
const l = feature.properties["level"]
const newValue = TagUtils.LevelsParser(l).join(";")
if (l === newValue) {
return false
}
feature.properties["level"] = newValue
return true
}
)
private static canonicalize = new InlineMetaTagger(
{
doc: "If 'units' is defined in the layoutConfig, then this metatagger will rewrite the specified keys to have the canonical form (e.g. `1meter` will be rewritten to `1m`; `1` will be rewritten to `1m` as well)",
keys: ["Theme-defined keys"],
},
(feature, _, __, state) => {
const units = Utils.NoNull(
[].concat(...(state?.layout?.layers?.map((layer) => layer.units) ?? []))
)
if (units.length == 0) {
return
}
let rewritten = false
for (const key in feature.properties) {
if (!feature.properties.hasOwnProperty(key)) {
continue
}
for (const unit of units) {
if (unit === undefined) {
continue
}
if (unit.appliesToKeys === undefined) {
console.error("The unit ", unit, "has no appliesToKey defined")
continue
}
if (!unit.appliesToKeys.has(key)) {
continue
}
const value = feature.properties[key]
const denom = unit.findDenomination(value, () => feature.properties["_country"])
if (denom === undefined) {
// no valid value found
break
}
const [, denomination] = denom
const defaultDenom = unit.getDefaultDenomination(
() => feature.properties["_country"]
)
let canonical =
denomination?.canonicalValue(value, defaultDenom == denomination) ??
undefined
if (canonical === value) {
break
}
console.log("Rewritten ", key, ` from '${value}' into '${canonical}'`)
if (canonical === undefined && !unit.eraseInvalid) {
break
}
feature.properties[key] = canonical
rewritten = true
break
}
}
return rewritten
}
)
private static lngth = new InlineMetaTagger(
{
keys: ["_length", "_length:km"],
doc: "The total length of a feature in meters (and in kilometers, rounded to one decimal for '_length:km'). For a surface, the length of the perimeter",
},
(feature) => {
const l = GeoOperations.lengthInMeters(feature)
feature.properties["_length"] = "" + l
const km = Math.floor(l / 1000)
const kmRest = Math.round((l - km * 1000) / 100)
feature.properties["_length:km"] = "" + km + "." + kmRest
return true
}
)
private static isOpen = new InlineMetaTagger(
{
keys: ["_isOpen"],
doc: "If 'opening_hours' is present, it will add the current state of the feature (being 'yes' or 'no')",
includesDates: true,
isLazy: true,
},
(feature) => {
if (Utils.runningFromConsole) {
// We are running from console, thus probably creating a cache
// isOpen is irrelevant
return false
}
if (feature.properties.opening_hours === "24/7") {
feature.properties._isOpen = "yes"
return true
}
// _isOpen is calculated dynamically on every call
Object.defineProperty(feature.properties, "_isOpen", {
enumerable: false,
configurable: true,
get: () => {
const tags = feature.properties
if (tags.opening_hours === undefined) {
return
}
if (tags._country === undefined) {
return
}
try {
const [lon, lat] = GeoOperations.centerpointCoordinates(feature)
const oh = new opening_hours(
tags["opening_hours"],
{
lat: lat,
lon: lon,
address: {
country_code: tags._country.toLowerCase(),
state: undefined,
},
},
<any>{ tag_key: "opening_hours" }
)
// Recalculate!
return oh.getState() ? "yes" : "no"
} catch (e) {
console.warn("Error while parsing opening hours of ", tags.id, e)
delete tags._isOpen
tags["_isOpen"] = "parse_error"
}
},
})
}
)
private static directionSimplified = new InlineMetaTagger(
{
keys: ["_direction:numerical", "_direction:leftright"],
doc: "_direction:numerical is a normalized, numerical direction based on 'camera:direction' or on 'direction'; it is only present if a valid direction is found (e.g. 38.5 or NE). _direction:leftright is either 'left' or 'right', which is left-looking on the map or 'right-looking' on the map",
},
(feature) => {
const tags = feature.properties
const direction = tags["camera:direction"] ?? tags["direction"]
if (direction === undefined) {
return false
}
const n = SimpleMetaTaggers.cardinalDirections[direction] ?? Number(direction)
if (isNaN(n)) {
return false
}
// The % operator has range (-360, 360). We apply a trick to get [0, 360).
const normalized = ((n % 360) + 360) % 360
tags["_direction:numerical"] = normalized
tags["_direction:leftright"] = normalized <= 180 ? "right" : "left"
return true
}
)
private static directionCenterpoint = new InlineMetaTagger(
{
keys: ["_direction:centerpoint"],
isLazy: true,
doc: "_direction:centerpoint is the direction of the linestring (in degrees) if one were standing at the projected centerpoint.",
},
(feature: Feature) => {
if (feature.geometry.type !== "LineString") {
return false
}
const ls = <Feature<LineString>>feature
Object.defineProperty(feature.properties, "_direction:centerpoint", {
enumerable: false,
configurable: true,
get: () => {
const centroid = GeoOperations.centerpoint(feature)
const projected = GeoOperations.nearestPoint(
ls,
<[number, number]>centroid.geometry.coordinates
)
const nextPoint = ls.geometry.coordinates[projected.properties.index + 1]
const bearing = GeoOperations.bearing(projected.geometry.coordinates, nextPoint)
delete feature.properties["_direction:centerpoint"]
feature.properties["_direction:centerpoint"] = bearing
return bearing
},
})
return true
}
)
private static currentTime = new InlineMetaTagger(
{
keys: ["_now:date", "_now:datetime"],
doc: "Adds the time that the data got loaded - pretty much the time of downloading from overpass. The format is YYYY-MM-DD hh:mm, aka 'sortable' aka ISO-8601-but-not-entirely",
includesDates: true,
},
(feature) => {
const now = new Date()
function date(d: Date) {
return d.toISOString().slice(0, 10)
}
function datetime(d: Date) {
return d.toISOString().slice(0, -5).replace("T", " ")
}
feature.properties["_now:date"] = date(now)
feature.properties["_now:datetime"] = datetime(now)
return true
}
)
private static timeSinceLastEdit = new InlineMetaTagger(
{
keys: ["_last_edit:passed_time"],
doc: "Gives the number of seconds since the last edit. Note that this will _not_ update, but rather be the number of seconds elapsed at the moment this tag is read first",
isLazy: true,
includesDates: true,
},
(feature, layer, tagsStore) => {
Utils.AddLazyProperty(feature.properties, "_last_edit:passed_time", () => {
const lastEditTimestamp = new Date(
feature.properties["_last_edit:timestamp"]
).getTime()
const now: number = Date.now()
const millisElapsed = now - lastEditTimestamp
return "" + millisElapsed / 1000
})
return true
}
)
public static metatags: SimpleMetaTagger[] = [
SimpleMetaTaggers.latlon,
SimpleMetaTaggers.layerInfo,
SimpleMetaTaggers.surfaceArea,
SimpleMetaTaggers.surfaceAreaHa,
SimpleMetaTaggers.lngth,
SimpleMetaTaggers.canonicalize,
SimpleMetaTaggers.country,
SimpleMetaTaggers.isOpen,
SimpleMetaTaggers.directionSimplified,
SimpleMetaTaggers.directionCenterpoint,
SimpleMetaTaggers.currentTime,
SimpleMetaTaggers.objectMetaInfo,
SimpleMetaTaggers.noBothButLeftRight,
SimpleMetaTaggers.geometryType,
SimpleMetaTaggers.levels,
SimpleMetaTaggers.referencingWays,
SimpleMetaTaggers.timeSinceLastEdit,
]
/**
* Edits the given object to rewrite 'both'-tagging into a 'left-right' tagging scheme.
* These changes are performed in-place.
*
* Returns 'true' is at least one change has been made
* @param tags
*/
public static removeBothTagging(tags: any): boolean {
let somethingChanged = false
/**
* Sets the key onto the properties (but doesn't overwrite if already existing)
*/
function set(k, value) {
if (tags[k] === undefined || tags[k] === "") {
tags[k] = value
somethingChanged = true
}
}
if (tags["sidewalk"]) {
const v = tags["sidewalk"]
switch (v) {
case "none":
case "no":
set("sidewalk:left", "no")
set("sidewalk:right", "no")
break
case "both":
set("sidewalk:left", "yes")
set("sidewalk:right", "yes")
break
case "left":
set("sidewalk:left", "yes")
set("sidewalk:right", "no")
break
case "right":
set("sidewalk:left", "no")
set("sidewalk:right", "yes")
break
default:
set("sidewalk:left", v)
set("sidewalk:right", v)
break
}
delete tags["sidewalk"]
somethingChanged = true
}
const regex = /\([^:]*\):both:\(.*\)/
for (const key in tags) {
const v = tags[key]
if (key.endsWith(":both")) {
const strippedKey = key.substring(0, key.length - ":both".length)
set(strippedKey + ":left", v)
set(strippedKey + ":right", v)
delete tags[key]
continue
}
const match = key.match(regex)
if (match !== null) {
const strippedKey = match[1]
const property = match[1]
set(strippedKey + ":left:" + property, v)
set(strippedKey + ":right:" + property, v)
console.log("Left-right rewritten " + key)
delete tags[key]
}
}
return somethingChanged
}
public static HelpText(): BaseUIElement {
const subElements: (string | BaseUIElement)[] = [
new Combine([
"Metatags are extra tags available, in order to display more data or to give better questions.",
"They are calculated automatically on every feature when the data arrives in the webbrowser. This document gives an overview of the available metatags.",
"**Hint:** when using metatags, add the [query parameter](URL_Parameters.md) `debug=true` to the URL. This will include a box in the popup for features which shows all the properties of the object",
]).SetClass("flex-col"),
]
subElements.push(new Title("Metatags calculated by MapComplete", 2))
subElements.push(
new FixedUiElement(
"The following values are always calculated, by default, by MapComplete and are available automatically on all elements in every theme"
)
)
for (const metatag of SimpleMetaTaggers.metatags) {
subElements.push(
new Title(metatag.keys.join(", "), 3),
metatag.doc,
metatag.isLazy ? "This is a lazy metatag and is only calculated when needed" : ""
)
}
return new Combine(subElements).SetClass("flex-col")
}
}

View file

@ -0,0 +1,211 @@
/**
* The part of the global state which initializes the feature switches, based on default values and on the layoutToUse
*/
import LayoutConfig from "../../Models/ThemeConfig/LayoutConfig"
import { UIEventSource } from "../UIEventSource"
import { QueryParameters } from "../Web/QueryParameters"
import Constants from "../../Models/Constants"
import { Utils } from "../../Utils"
class FeatureSwitchUtils {
static initSwitch(key: string, deflt: boolean, documentation: string): UIEventSource<boolean> {
const defaultValue = deflt
const queryParam = QueryParameters.GetQueryParameter(key, "" + defaultValue, documentation)
// It takes the current layout, extracts the default value for this query parameter. A query parameter event source is then retrieved and flattened
return queryParam.sync(
(str) => (str === undefined ? defaultValue : str !== "false"),
[],
(b) => (b == defaultValue ? undefined : "" + b)
)
}
}
export class OsmConnectionFeatureSwitches {
public readonly featureSwitchFakeUser: UIEventSource<boolean>
public readonly featureSwitchApiURL: UIEventSource<string>
constructor() {
this.featureSwitchApiURL = QueryParameters.GetQueryParameter(
"backend",
"osm",
"The OSM backend to use - can be used to redirect mapcomplete to the testing backend when using 'osm-test'"
)
this.featureSwitchFakeUser = QueryParameters.GetBooleanQueryParameter(
"fake-user",
false,
"If true, 'dryrun' mode is activated and a fake user account is loaded"
)
}
}
export default class FeatureSwitchState extends OsmConnectionFeatureSwitches {
/**
* The layout that is being used in this run
*/
public readonly layoutToUse: LayoutConfig
public readonly featureSwitchUserbadge: UIEventSource<boolean>
public readonly featureSwitchSearch: UIEventSource<boolean>
public readonly featureSwitchBackgroundSelection: UIEventSource<boolean>
public readonly featureSwitchAddNew: UIEventSource<boolean>
public readonly featureSwitchWelcomeMessage: UIEventSource<boolean>
public readonly featureSwitchCommunityIndex: UIEventSource<boolean>
public readonly featureSwitchExtraLinkEnabled: UIEventSource<boolean>
public readonly featureSwitchMoreQuests: UIEventSource<boolean>
public readonly featureSwitchShareScreen: UIEventSource<boolean>
public readonly featureSwitchGeolocation: UIEventSource<boolean>
public readonly featureSwitchIsTesting: UIEventSource<boolean>
public readonly featureSwitchIsDebugging: UIEventSource<boolean>
public readonly featureSwitchShowAllQuestions: UIEventSource<boolean>
public readonly featureSwitchFilter: UIEventSource<boolean>
public readonly featureSwitchEnableExport: UIEventSource<boolean>
public readonly overpassUrl: UIEventSource<string[]>
public readonly overpassTimeout: UIEventSource<number>
public readonly overpassMaxZoom: UIEventSource<number>
public readonly osmApiTileSize: UIEventSource<number>
public readonly backgroundLayerId: UIEventSource<string>
public constructor(layoutToUse?: LayoutConfig) {
super()
this.layoutToUse = layoutToUse
// Helper function to initialize feature switches
this.featureSwitchUserbadge = FeatureSwitchUtils.initSwitch(
"fs-userbadge",
layoutToUse?.enableUserBadge ?? true,
"Disables/Enables the user information pill (userbadge) at the top left. Disabling this disables logging in and thus disables editing all together, effectively putting MapComplete into read-only mode."
)
this.featureSwitchSearch = FeatureSwitchUtils.initSwitch(
"fs-search",
layoutToUse?.enableSearch ?? true,
"Disables/Enables the search bar"
)
this.featureSwitchBackgroundSelection = FeatureSwitchUtils.initSwitch(
"fs-background",
layoutToUse?.enableBackgroundLayerSelection ?? true,
"Disables/Enables the background layer control"
)
this.featureSwitchFilter = FeatureSwitchUtils.initSwitch(
"fs-filter",
layoutToUse?.enableLayers ?? true,
"Disables/Enables the filter view"
)
this.featureSwitchAddNew = FeatureSwitchUtils.initSwitch(
"fs-add-new",
layoutToUse?.enableAddNewPoints ?? true,
"Disables/Enables the 'add new feature'-popup. (A theme without presets might not have it in the first place)"
)
this.featureSwitchWelcomeMessage = FeatureSwitchUtils.initSwitch(
"fs-welcome-message",
true,
"Disables/enables the help menu or welcome message"
)
this.featureSwitchCommunityIndex = FeatureSwitchUtils.initSwitch(
"fs-community-index",
true,
"Disables/enables the button to get in touch with the community"
)
this.featureSwitchExtraLinkEnabled = FeatureSwitchUtils.initSwitch(
"fs-iframe-popout",
true,
"Disables/Enables the extraLink button. By default, if in iframe mode and the welcome message is hidden, a popout button to the full mapcomplete instance is shown instead (unless disabled with this switch or another extraLink button is enabled)"
)
this.featureSwitchMoreQuests = FeatureSwitchUtils.initSwitch(
"fs-more-quests",
layoutToUse?.enableMoreQuests ?? true,
"Disables/Enables the 'More Quests'-tab in the welcome message"
)
this.featureSwitchShareScreen = FeatureSwitchUtils.initSwitch(
"fs-share-screen",
layoutToUse?.enableShareScreen ?? true,
"Disables/Enables the 'Share-screen'-tab in the welcome message"
)
this.featureSwitchGeolocation = FeatureSwitchUtils.initSwitch(
"fs-geolocation",
layoutToUse?.enableGeolocation ?? true,
"Disables/Enables the geolocation button"
)
this.featureSwitchShowAllQuestions = FeatureSwitchUtils.initSwitch(
"fs-all-questions",
layoutToUse?.enableShowAllQuestions ?? false,
"Always show all questions"
)
this.featureSwitchEnableExport = FeatureSwitchUtils.initSwitch(
"fs-export",
layoutToUse?.enableExportButton ?? true,
"Enable the export as GeoJSON and CSV button"
)
let testingDefaultValue = false
if (
this.featureSwitchApiURL.data !== "osm-test" &&
!Utils.runningFromConsole &&
(location.hostname === "localhost" || location.hostname === "127.0.0.1")
) {
testingDefaultValue = true
}
this.featureSwitchIsTesting = QueryParameters.GetBooleanQueryParameter(
"test",
testingDefaultValue,
"If true, 'dryrun' mode is activated. The app will behave as normal, except that changes to OSM will be printed onto the console instead of actually uploaded to osm.org"
)
this.featureSwitchIsDebugging = QueryParameters.GetBooleanQueryParameter(
"debug",
false,
"If true, shows some extra debugging help such as all the available tags on every object"
)
this.overpassUrl = QueryParameters.GetQueryParameter(
"overpassUrl",
(layoutToUse?.overpassUrl ?? Constants.defaultOverpassUrls).join(","),
"Point mapcomplete to a different overpass-instance. Example: https://overpass-api.de/api/interpreter"
).sync(
(param) => param?.split(","),
[],
(urls) => urls?.join(",")
)
this.overpassTimeout = UIEventSource.asFloat(
QueryParameters.GetQueryParameter(
"overpassTimeout",
"" + layoutToUse?.overpassTimeout,
"Set a different timeout (in seconds) for queries in overpass"
)
)
this.overpassMaxZoom = UIEventSource.asFloat(
QueryParameters.GetQueryParameter(
"overpassMaxZoom",
"" + layoutToUse?.overpassMaxZoom,
" point to switch between OSM-api and overpass"
)
)
this.osmApiTileSize = UIEventSource.asFloat(
QueryParameters.GetQueryParameter(
"osmApiTileSize",
"" + layoutToUse?.osmApiTileSize,
"Tilesize when the OSM-API is used to fetch data within a BBOX"
)
)
this.featureSwitchUserbadge.addCallbackAndRun((userbadge) => {
if (!userbadge) {
this.featureSwitchAddNew.setData(false)
}
})
this.backgroundLayerId = QueryParameters.GetQueryParameter(
"background",
layoutToUse?.defaultBackgroundId ?? "osm",
"The id of the background layer to start with"
)
}
}

View file

@ -0,0 +1,153 @@
import { UIEventSource } from "../UIEventSource"
import { LocalStorageSource } from "../Web/LocalStorageSource"
import { QueryParameters } from "../Web/QueryParameters"
export type GeolocationPermissionState = "prompt" | "requested" | "granted" | "denied"
export interface GeoLocationPointProperties extends GeolocationCoordinates {
id: "gps"
"user:location": "yes"
date: string
}
/**
* An abstract representation of the current state of the geolocation.
*/
export class GeoLocationState {
/**
* What do we know about the current state of having access to the GPS?
* If 'prompt', then we just started and didn't request access yet
* 'requested' means the user tapped the 'locate me' button at least once
* 'granted' means that it is granted
* 'denied' means that we don't have access
*/
public readonly permission: UIEventSource<GeolocationPermissionState> = new UIEventSource(
"prompt"
)
/**
* Important to determine e.g. if we move automatically on fix or not
*/
public readonly requestMoment: UIEventSource<Date | undefined> = new UIEventSource(undefined)
/**
* If true: the map will center (and re-center) to this location
*/
public readonly allowMoving: UIEventSource<boolean> = new UIEventSource<boolean>(true)
/**
* The latest GeoLocationCoordinates, as given by the WebAPI
*/
public readonly currentGPSLocation: UIEventSource<GeolocationCoordinates | undefined> =
new UIEventSource<GeolocationCoordinates | undefined>(undefined)
/**
* A small flag on localstorage. If the user previously granted the geolocation, it will be set.
* On firefox, the permissions api is broken (probably fingerprint resistiance) and "granted + don't ask again" doesn't stick between sessions.
*
* Instead, we set this flag. If this flag is set upon loading the page, we start geolocating immediately.
* If the user denies the geolocation this time, we unset this flag
* @private
*/
private readonly _previousLocationGrant: UIEventSource<"true" | "false"> = <any>(
LocalStorageSource.Get("geolocation-permissions")
)
/**
* Used to detect a permission retraction
*/
private readonly _grantedThisSession: UIEventSource<boolean> = new UIEventSource<boolean>(false)
constructor() {
const self = this
this.permission.addCallbackAndRunD(async (state) => {
if (state === "granted") {
self._previousLocationGrant.setData("true")
self._grantedThisSession.setData(true)
}
if (state === "prompt" && self._grantedThisSession.data) {
// This is _really_ weird: we had a grant earlier, but it's 'prompt' now?
// This means that the rights have been revoked again!
// self.permission.setData("denied")
self._previousLocationGrant.setData("false")
self.permission.setData("denied")
self.currentGPSLocation.setData(undefined)
console.warn("Detected a downgrade in permissions!")
}
if (state === "denied") {
self._previousLocationGrant.setData("false")
}
})
console.log("Previous location grant:", this._previousLocationGrant.data)
if (this._previousLocationGrant.data === "true") {
// A previous visit successfully granted permission. Chance is high that we are allowed to use it again!
// We set the flag to false again. If the user only wanted to share their location once, we are not gonna keep bothering them
this._previousLocationGrant.setData("false")
console.log("Requesting access to GPS as this was previously granted")
const latLonGivenViaUrl =
QueryParameters.wasInitialized("lat") || QueryParameters.wasInitialized("lon")
if (!latLonGivenViaUrl) {
this.requestMoment.setData(new Date())
}
this.requestPermission()
}
}
/**
* Installs the listener for updates
* @private
*/
private async startWatching() {
const self = this
navigator.geolocation.watchPosition(
function (position) {
self.currentGPSLocation.setData(position.coords)
self._previousLocationGrant.setData("true")
},
function () {
console.warn("Could not get location with navigator.geolocation")
},
{
enableHighAccuracy: true,
}
)
}
/**
* Requests the user to allow access to their position.
* When granted, will be written to the 'geolocationState'.
* This class will start watching
*/
public requestPermission() {
if (typeof navigator === "undefined") {
// Not compatible with this browser
this.permission.setData("denied")
return
}
if (this.permission.data !== "prompt" && this.permission.data !== "requested") {
// If the user denies the first prompt, revokes the deny and then tries again, we have to run the flow as well
// Hence that we continue the flow if it is "requested"
return
}
this.permission.setData("requested")
try {
navigator?.permissions
?.query({ name: "geolocation" })
.then((status) => {
console.log("Status update: received geolocation permission is ", status.state)
this.permission.setData(status.state)
const self = this
status.onchange = function () {
self.permission.setData(status.state)
}
this.permission.setData("requested")
// We _must_ call 'startWatching', as that is the actual trigger for the popup...
self.startWatching()
})
.catch((e) => console.error("Could not get geopermission", e))
} catch (e) {
console.error("Could not get permission:", e)
}
}
}

View file

@ -0,0 +1,119 @@
import { UIEventSource } from "../UIEventSource"
import { GlobalFilter } from "../../Models/GlobalFilter"
import FilteredLayer from "../../Models/FilteredLayer"
import LayerConfig from "../../Models/ThemeConfig/LayerConfig"
import { OsmConnection } from "../Osm/OsmConnection"
import { Tag } from "../Tags/Tag"
import Translations from "../../UI/i18n/Translations"
import { RegexTag } from "../Tags/RegexTag"
import { Or } from "../Tags/Or"
/**
* The layer state keeps track of:
* - Which layers are enabled
* - Which filters are used, including 'global' filters
*/
export default class LayerState {
/**
* Filters which apply onto all layers
*/
public readonly globalFilters: UIEventSource<GlobalFilter[]> = new UIEventSource(
[],
"globalFilters"
)
/**
* Which layers are enabled in the current theme and what filters are applied onto them
*/
public readonly filteredLayers: ReadonlyMap<string, FilteredLayer>
private readonly osmConnection: OsmConnection
/**
*
* @param osmConnection
* @param layers
* @param context: the context, probably the name of the theme. Used to disambiguate the upstream user preference
*/
constructor(osmConnection: OsmConnection, layers: LayerConfig[], context: string) {
this.osmConnection = osmConnection
const filteredLayers = new Map()
for (const layer of layers) {
filteredLayers.set(
layer.id,
FilteredLayer.initLinkedState(layer, context, this.osmConnection)
)
}
this.filteredLayers = filteredLayers
layers.forEach((l) => LayerState.linkFilterStates(l, filteredLayers))
}
/**
* Sets the global filter which looks to the 'level'-tag.
* Only features with the given 'level' will be shown.
*
* If undefined is passed, _all_ levels will be shown
* @param level
*/
public setLevelFilter(level?: string) {
// Remove all previous
const l = this.globalFilters.data.length
this.globalFilters.data = this.globalFilters.data.filter((f) => f.id !== "level")
if (!level) {
if (l !== this.globalFilters.data.length) {
this.globalFilters.ping()
}
return
}
const t = Translations.t.general.levelSelection
const conditionsOrred = [
new Tag("level", "" + level),
new RegexTag("level", new RegExp("(.*;)?" + level + "(;.*)?")),
]
if (level === "0") {
conditionsOrred.push(new Tag("level", "")) // No level tag is the same as level '0'
}
console.log("Setting levels filter to", conditionsOrred)
this.globalFilters.data.push({
id: "level",
state: level,
osmTags: new Or(conditionsOrred),
onNewPoint: {
tags: [new Tag("level", level)],
icon: "./assets/svg/elevator.svg",
confirmAddNew: t.confirmLevel.PartialSubs({ level }),
safetyCheck: t.addNewOnLevel.Subs({ level }),
},
})
this.globalFilters.ping()
}
/**
* Some layers copy the filter state of another layer - this is quite often the case for 'sibling'-layers,
* (where two variations of the same layer are used, e.g. a specific type of shop on all zoom levels and all shops on high zoom).
*
* This methods links those states for the given layer
*/
private static linkFilterStates(
layer: LayerConfig,
filteredLayers: Map<string, FilteredLayer>
) {
if (layer.filterIsSameAs === undefined) {
return
}
const toReuse = filteredLayers.get(layer.filterIsSameAs)
if (toReuse === undefined) {
throw (
"Error in layer " +
layer.id +
": it defines that it should be use the filters of " +
layer.filterIsSameAs +
", but this layer was not loaded"
)
}
console.warn(
"Linking filter and isDisplayed-states of " + layer.id + " and " + layer.filterIsSameAs
)
const copy = new FilteredLayer(layer, toReuse.appliedFilters, toReuse.isDisplayed)
filteredLayers.set(layer.id, copy)
}
}

View file

@ -0,0 +1,386 @@
import LayoutConfig from "../../Models/ThemeConfig/LayoutConfig"
import { OsmConnection } from "../Osm/OsmConnection"
import { MangroveIdentity } from "../Web/MangroveReviews"
import { Store, Stores, UIEventSource } from "../UIEventSource"
import StaticFeatureSource from "../FeatureSource/Sources/StaticFeatureSource"
import { FeatureSource } from "../FeatureSource/FeatureSource"
import { Feature } from "geojson"
import { Utils } from "../../Utils"
import translators from "../../assets/translators.json"
import codeContributors from "../../assets/contributors.json"
import LayerConfig from "../../Models/ThemeConfig/LayerConfig"
import { LayerConfigJson } from "../../Models/ThemeConfig/Json/LayerConfigJson"
import usersettings from "../../../src/assets/generated/layers/usersettings.json"
import Locale from "../../UI/i18n/Locale"
import LinkToWeblate from "../../UI/Base/LinkToWeblate"
import FeatureSwitchState from "./FeatureSwitchState"
import Constants from "../../Models/Constants"
/**
* The part of the state which keeps track of user-related stuff, e.g. the OSM-connection,
* which layers they enabled, ...
*/
export default class UserRelatedState {
public static readonly usersettingsConfig = UserRelatedState.initUserRelatedState()
public static readonly availableUserSettingsIds: string[] =
UserRelatedState.usersettingsConfig?.tagRenderings?.map((tr) => tr.id) ?? []
public static readonly SHOW_TAGS_VALUES = ["always", "yes", "full"] as const
/**
The user credentials
*/
public osmConnection: OsmConnection
/**
* The key for mangrove
*/
public readonly mangroveIdentity: MangroveIdentity
public readonly installedUserThemes: Store<string[]>
public readonly showAllQuestionsAtOnce: UIEventSource<boolean>
public readonly showTags: UIEventSource<"no" | undefined | "always" | "yes" | "full">
public readonly homeLocation: FeatureSource
public readonly language: UIEventSource<string>
/**
* The number of seconds that the GPS-locations are stored in memory.
* Time in seconds
*/
public readonly gpsLocationHistoryRetentionTime = new UIEventSource(
7 * 24 * 60 * 60,
"gps_location_retention"
)
/**
* Preferences as tags exposes many preferences and state properties as record.
* This is used to bridge the internal state with the usersettings.json layerconfig file
*/
public readonly preferencesAsTags: UIEventSource<Record<string, string>>
constructor(
osmConnection: OsmConnection,
availableLanguages?: string[],
layout?: LayoutConfig,
featureSwitches?: FeatureSwitchState
) {
this.osmConnection = osmConnection
{
const translationMode: UIEventSource<undefined | "true" | "false" | "mobile" | string> =
this.osmConnection.GetPreference("translation-mode", "false")
translationMode.addCallbackAndRunD((mode) => {
mode = mode.toLowerCase()
if (mode === "true" || mode === "yes") {
Locale.showLinkOnMobile.setData(false)
Locale.showLinkToWeblate.setData(true)
} else if (mode === "false" || mode === "no") {
Locale.showLinkToWeblate.setData(false)
} else if (mode === "mobile") {
Locale.showLinkOnMobile.setData(true)
Locale.showLinkToWeblate.setData(true)
} else {
Locale.showLinkOnMobile.setData(false)
Locale.showLinkToWeblate.setData(false)
}
})
}
this.showAllQuestionsAtOnce = UIEventSource.asBoolean(
this.osmConnection.GetPreference("show-all-questions", "false", {
documentation:
"Either 'true' or 'false'. If set, all questions will be shown all at once",
})
)
this.language = this.osmConnection.GetPreference("language")
this.showTags = <UIEventSource<any>>this.osmConnection.GetPreference("show_tags")
this.mangroveIdentity = new MangroveIdentity(
this.osmConnection.GetLongPreference("identity", "mangrove")
)
this.language.addCallbackAndRunD((language) => Locale.language.setData(language))
this.installedUserThemes = this.InitInstalledUserThemes()
this.homeLocation = this.initHomeLocation()
this.preferencesAsTags = this.initAmendedPrefs(layout, featureSwitches)
}
private static initUserRelatedState(): LayerConfig {
try {
return new LayerConfig(<LayerConfigJson>usersettings, "userinformationpanel")
} catch (e) {
return undefined
}
}
public GetUnofficialTheme(id: string):
| {
id: string
icon: string
title: any
shortDescription: any
definition?: any
isOfficial: boolean
}
| undefined {
console.log("GETTING UNOFFICIAL THEME")
const pref = this.osmConnection.GetLongPreference("unofficial-theme-" + id)
const str = pref.data
if (str === undefined || str === "undefined" || str === "") {
pref.setData(null)
return undefined
}
try {
const value: {
id: string
icon: string
title: any
shortDescription: any
definition?: any
isOfficial: boolean
} = JSON.parse(str)
value.isOfficial = false
return value
} catch (e) {
console.warn(
"Removing theme " +
id +
" as it could not be parsed from the preferences; the content is:",
str
)
pref.setData(null)
return undefined
}
}
public markLayoutAsVisited(layout: LayoutConfig) {
if (!layout) {
console.error("Trying to mark a layout as visited, but ", layout, " got passed")
return
}
if (layout.hideFromOverview) {
this.osmConnection.isLoggedIn.addCallbackAndRunD((loggedIn) => {
if (loggedIn) {
this.osmConnection
.GetPreference("hidden-theme-" + layout?.id + "-enabled")
.setData("true")
return true
}
})
}
if (!layout.official) {
this.osmConnection.GetLongPreference("unofficial-theme-" + layout.id).setData(
JSON.stringify({
id: layout.id,
icon: layout.icon,
title: layout.title.translations,
shortDescription: layout.shortDescription.translations,
definition: layout["definition"],
})
)
}
}
private InitInstalledUserThemes(): Store<string[]> {
const prefix = "mapcomplete-unofficial-theme-"
const postfix = "-combined-length"
return this.osmConnection.preferencesHandler.preferences.map((prefs) =>
Object.keys(prefs)
.filter((k) => k.startsWith(prefix) && k.endsWith(postfix))
.map((k) => k.substring(prefix.length, k.length - postfix.length))
)
}
private initHomeLocation(): FeatureSource {
const empty = []
const feature: Store<Feature[]> = Stores.ListStabilized(
this.osmConnection.userDetails.map((userDetails) => {
if (userDetails === undefined) {
return undefined
}
const home = userDetails.home
if (home === undefined) {
return undefined
}
return [home.lon, home.lat]
})
).map((homeLonLat) => {
if (homeLonLat === undefined) {
return empty
}
return [
<Feature>{
type: "Feature",
properties: {
id: "home",
"user:home": "yes",
_lon: homeLonLat[0],
_lat: homeLonLat[1],
},
geometry: {
type: "Point",
coordinates: homeLonLat,
},
},
]
})
return new StaticFeatureSource(feature)
}
/**
* Initialize the 'amended preferences'.
* This is inherently a dirty and chaotic method, as it shoves many properties into this EventSourcd
* */
private initAmendedPrefs(
layout?: LayoutConfig,
featureSwitches?: FeatureSwitchState
): UIEventSource<Record<string, string>> {
const amendedPrefs = new UIEventSource<Record<string, string>>({
_theme: layout?.id,
_backend: this.osmConnection.Backend(),
_applicationOpened: new Date().toISOString(),
_supports_sharing:
typeof window === "undefined" ? "no" : window.navigator.share ? "yes" : "no",
})
for (const key in Constants.userJourney) {
amendedPrefs.data["__userjourney_" + key] = Constants.userJourney[key]
}
const osmConnection = this.osmConnection
osmConnection.preferencesHandler.preferences.addCallback((newPrefs) => {
for (const k in newPrefs) {
const v = newPrefs[k]
if (k.endsWith("-combined-length")) {
const l = Number(v)
const key = k.substring(0, k.length - "length".length)
let combined = ""
for (let i = 0; i < l; i++) {
combined += newPrefs[key + i]
}
amendedPrefs.data[key.substring(0, key.length - "-combined-".length)] = combined
} else {
amendedPrefs.data[k] = newPrefs[k]
}
}
amendedPrefs.ping()
console.log("Amended prefs are:", amendedPrefs.data)
})
const usersettingsConfig = UserRelatedState.usersettingsConfig
const translationMode = osmConnection.GetPreference("translation-mode")
Locale.language.mapD(
(language) => {
amendedPrefs.data["_language"] = language
const trmode = translationMode.data
if ((trmode === "true" || trmode === "mobile") && layout !== undefined) {
const missing = layout.missingTranslations()
const total = missing.total
const untranslated = missing.untranslated.get(language) ?? []
const hasMissingTheme = untranslated.some((k) => k.startsWith("themes:"))
const missingLayers = Utils.Dedup(
untranslated
.filter((k) => k.startsWith("layers:"))
.map((k) => k.slice("layers:".length).split(".")[0])
)
const zenLinks: { link: string; id: string }[] = Utils.NoNull([
hasMissingTheme
? {
id: "theme:" + layout.id,
link: LinkToWeblate.hrefToWeblateZen(
language,
"themes",
layout.id
),
}
: undefined,
...missingLayers.map((id) => ({
id: "layer:" + id,
link: LinkToWeblate.hrefToWeblateZen(language, "layers", id),
})),
])
const untranslated_count = untranslated.length
amendedPrefs.data["_translation_total"] = "" + total
amendedPrefs.data["_translation_translated_count"] =
"" + (total - untranslated_count)
amendedPrefs.data["_translation_percentage"] =
"" + Math.floor((100 * (total - untranslated_count)) / total)
amendedPrefs.data["_translation_links"] = JSON.stringify(zenLinks)
}
amendedPrefs.ping()
},
[translationMode]
)
osmConnection.userDetails.addCallback((userDetails) => {
for (const k in userDetails) {
amendedPrefs.data["_" + k] = "" + userDetails[k]
}
for (const [name, code, _] of usersettingsConfig.calculatedTags) {
try {
let result = new Function("feat", "return " + code + ";")({
properties: amendedPrefs.data,
})
if (result !== undefined && result !== "" && result !== null) {
if (typeof result !== "string") {
result = JSON.stringify(result)
}
amendedPrefs.data[name] = result
}
} catch (e) {
console.error(
"Calculating a tag for userprofile-settings failed for variable",
name,
e
)
}
}
const simplifiedName = userDetails.name.toLowerCase().replace(/\s+/g, "")
const isTranslator = translators.contributors.find(
(c: { contributor: string; commits: number }) => {
const replaced = c.contributor.toLowerCase().replace(/\s+/g, "")
return replaced === simplifiedName
}
)
if (isTranslator) {
amendedPrefs.data["_translation_contributions"] = "" + isTranslator.commits
}
const isCodeContributor = codeContributors.contributors.find(
(c: { contributor: string; commits: number }) => {
const replaced = c.contributor.toLowerCase().replace(/\s+/g, "")
return replaced === simplifiedName
}
)
if (isCodeContributor) {
amendedPrefs.data["_code_contributions"] = "" + isCodeContributor.commits
}
amendedPrefs.ping()
})
amendedPrefs.addCallbackD((tags) => {
for (const key in tags) {
if (key.startsWith("_") || key === "mapcomplete-language") {
// Language is managed seperately
continue
}
this.osmConnection.GetPreference(key, undefined, { prefix: "" }).setData(tags[key])
}
})
for (const key in featureSwitches) {
if (featureSwitches[key].addCallbackAndRun) {
featureSwitches[key].addCallbackAndRun((v) => {
const oldV = amendedPrefs.data["__" + key]
if (oldV === v) {
return
}
amendedPrefs.data["__" + key] = "" + v
amendedPrefs.ping()
})
}
}
return amendedPrefs
}
}

401
src/Logic/Tags/And.ts Normal file
View file

@ -0,0 +1,401 @@
import { TagsFilter } from "./TagsFilter"
import { Or } from "./Or"
import { TagUtils } from "./TagUtils"
import { Tag } from "./Tag"
import { RegexTag } from "./RegexTag"
export class And extends TagsFilter {
public and: TagsFilter[]
constructor(and: TagsFilter[]) {
super()
this.and = and
}
public static construct(and: TagsFilter[]): TagsFilter {
if (and.length === 1) {
return and[0]
}
return new And(and)
}
private static combine(filter: string, choices: string[]): string[] {
const values = []
for (const or of choices) {
values.push(filter + or)
}
return values
}
normalize() {
const ands = []
for (const c of this.and) {
if (c instanceof And) {
ands.push(...c.and)
} else {
ands.push(c)
}
}
return new And(ands)
}
matchesProperties(tags: Record<string, string>): boolean {
for (const tagsFilter of this.and) {
if (!tagsFilter.matchesProperties(tags)) {
return false
}
}
return true
}
/**
*
* const and = new And([new Tag("boundary","protected_area"), new RegexTag("protect_class","98",true)])
* and.asOverpass() // => [ "[\"boundary\"=\"protected_area\"][\"protect_class\"!=\"98\"]" ]
*/
asOverpass(): string[] {
let allChoices: string[] = null
for (const andElement of this.and) {
const andElementFilter = andElement.asOverpass()
if (allChoices === null) {
allChoices = andElementFilter
continue
}
const newChoices: string[] = []
for (const choice of allChoices) {
newChoices.push(...And.combine(choice, andElementFilter))
}
allChoices = newChoices
}
return allChoices
}
asHumanString(linkToWiki: boolean, shorten: boolean, properties) {
return this.and
.map((t) => t.asHumanString(linkToWiki, shorten, properties))
.filter((x) => x !== "")
.join("&")
}
isUsableAsAnswer(): boolean {
for (const t of this.and) {
if (!t.isUsableAsAnswer()) {
return false
}
}
return true
}
/**
* const t0 = new And([
* new Tag("valves:special", "A"),
* new Tag("valves", "A")
* ])
* const t1 = new And([new Tag("valves", "A")])
* const t2 = new And([new Tag("valves", "B")])
* t0.shadows(t0) // => true
* t1.shadows(t1) // => true
* t2.shadows(t2) // => true
* t0.shadows(t1) // => false
* t0.shadows(t2) // => false
* t1.shadows(t0) // => false
* t1.shadows(t2) // => false
* t2.shadows(t0) // => false
* t2.shadows(t1) // => false
*/
shadows(other: TagsFilter): boolean {
if (!(other instanceof And)) {
return false
}
for (const selfTag of this.and) {
let matchFound = false
for (const otherTag of other.and) {
matchFound = selfTag.shadows(otherTag)
if (matchFound) {
break
}
}
if (!matchFound) {
return false
}
}
for (const otherTag of other.and) {
let matchFound = false
for (const selfTag of this.and) {
matchFound = selfTag.shadows(otherTag)
if (matchFound) {
break
}
}
if (!matchFound) {
return false
}
}
return true
}
usedKeys(): string[] {
return [].concat(...this.and.map((subkeys) => subkeys.usedKeys()))
}
usedTags(): { key: string; value: string }[] {
return [].concat(...this.and.map((subkeys) => subkeys.usedTags()))
}
asChange(properties: Record<string, string>): { k: string; v: string }[] {
const result = []
for (const tagsFilter of this.and) {
result.push(...tagsFilter.asChange(properties))
}
return result
}
/**
* IN some contexts, some expressions can be considered true, e.g.
* (X=Y | (A=B & X=Y))
* ^---------^
* When the evaluation hits (A=B & X=Y), we know _for sure_ that X=Y does _not_ match, as it would have matched the first clause otherwise.
* This means that the entire 'AND' is considered FALSE
*
* new And([ new Tag("key","value") ,new Tag("other_key","value")]).removePhraseConsideredKnown(new Tag("key","value"), true) // => new Tag("other_key","value")
* new And([ new Tag("key","value") ,new Tag("other_key","value")]).removePhraseConsideredKnown(new Tag("key","value"), false) // => false
* new And([ new RegexTag("key",/^..*$/) ,new Tag("other_key","value")]).removePhraseConsideredKnown(new Tag("key","value"), true) // => new Tag("other_key","value")
* new And([ new Tag("key","value") ]).removePhraseConsideredKnown(new Tag("key","value"), true) // => true
*
* // should remove 'club~*' if we know that 'club=climbing'
* const expr = <And> TagUtils.Tag({and: ["sport=climbing", {or:["club~*", "office~*"]}]} )
* expr.removePhraseConsideredKnown(new Tag("club","climbing"), true) // => new Tag("sport","climbing")
*
* const expr = <And> TagUtils.Tag({and: ["sport=climbing", {or:["club~*", "office~*"]}]} )
* expr.removePhraseConsideredKnown(new Tag("club","climbing"), false) // => expr
*/
removePhraseConsideredKnown(knownExpression: TagsFilter, value: boolean): TagsFilter | boolean {
const newAnds: TagsFilter[] = []
for (const tag of this.and) {
if (tag instanceof And) {
throw "Optimize expressions before using removePhraseConsideredKnown"
}
if (tag instanceof Or) {
const r = tag.removePhraseConsideredKnown(knownExpression, value)
if (r === true) {
continue
}
if (r === false) {
return false
}
newAnds.push(r)
continue
}
if (value && knownExpression.shadows(tag)) {
/**
* At this point, we do know that 'knownExpression' is true in every case
* As `shadows` does define that 'tag' MUST be true if 'knownExpression' is true,
* we can be sure that 'tag' is true as well.
*
* "True" is the neutral element in an AND, so we can skip the tag
*/
continue
}
if (!value && tag.shadows(knownExpression)) {
/**
* We know that knownExpression is unmet.
* if the tag shadows 'knownExpression' (which is the case when control flows gets here),
* then tag CANNOT be met too, as known expression is not met.
*
* This implies that 'tag' must be false too!
*/
// false is the element which absorbs all
return false
}
newAnds.push(tag)
}
if (newAnds.length === 0) {
return true
}
return And.construct(newAnds)
}
optimize(): TagsFilter | boolean {
if (this.and.length === 0) {
return true
}
const optimizedRaw = this.and
.map((t) => t.optimize())
.filter((t) => t !== true /* true is the neutral element in an AND, we drop them*/)
if (optimizedRaw.some((t) => t === false)) {
// We have an AND with a contained false: this is always 'false'
return false
}
const optimized = <TagsFilter[]>optimizedRaw
{
// Conflicting keys do return false
const properties: object = {}
for (const opt of optimized) {
if (opt instanceof Tag) {
properties[opt.key] = opt.value
}
}
for (let i = 0; i < optimized.length; i++) {
const opt = optimized[i]
if (opt instanceof Tag) {
const k = opt.key
const v = properties[k]
if (v === undefined) {
continue
}
if (v !== opt.value) {
// detected an internal conflict
return false
}
}
if (opt instanceof RegexTag) {
const k = opt.key
if (typeof k !== "string") {
continue
}
const v = properties[k]
if (v === undefined) {
continue
}
if (opt.invert) {
// We should _not_ match this value
// If 'v' is given, we already know what value it should be
// If 'v' is the not-expected value, we have a conflict and return false
// Otherwise, we can safely drop this value
const doesMatch =
(typeof opt.value === "string" && v === opt.value) ||
v.match(<RegExp>opt.value) !== null
if (doesMatch) {
// We have a conflict as 'opt' is inverted
return false
} else {
optimized.splice(i, 1)
i--
}
} else if (v !== opt.value) {
// detected an internal conflict
return false
}
}
}
}
const newAnds: TagsFilter[] = []
let containedOrs: Or[] = []
for (const tf of optimized) {
if (tf instanceof And) {
newAnds.push(...tf.and)
} else if (tf instanceof Or) {
containedOrs.push(tf)
} else {
newAnds.push(tf)
}
}
{
let dirty = false
do {
const cleanedContainedOrs: Or[] = []
outer: for (let containedOr of containedOrs) {
for (const known of newAnds) {
// input for optimazation: (K=V & (X=Y | K=V))
// containedOr: (X=Y | K=V)
// newAnds (and thus known): (K=V) --> true
const cleaned = containedOr.removePhraseConsideredKnown(known, true)
if (cleaned === true) {
// The neutral element within an AND
continue outer // skip addition too
}
if (cleaned === false) {
// zero element
return false
}
if (cleaned instanceof Or) {
containedOr = cleaned
continue
}
// the 'or' dissolved into a normal tag -> it has to be added to the newAnds
newAnds.push(cleaned)
dirty = true // rerun this algo later on
continue outer
}
cleanedContainedOrs.push(containedOr)
}
containedOrs = cleanedContainedOrs
} while (dirty)
}
containedOrs = containedOrs.filter((ca) => {
const isShadowed = TagUtils.containsEquivalents(newAnds, ca.or)
// If 'isShadowed', then at least one part of the 'OR' is matched by the outer and, so this means that this OR isn't needed at all
// XY & (XY | AB) === XY
return !isShadowed
})
// Extract common keys from the OR
if (containedOrs.length === 1) {
newAnds.push(containedOrs[0])
} else if (containedOrs.length > 1) {
let commonValues: TagsFilter[] = containedOrs[0].or
for (let i = 1; i < containedOrs.length && commonValues.length > 0; i++) {
const containedOr = containedOrs[i]
commonValues = commonValues.filter((cv) =>
containedOr.or.some((candidate) => candidate.shadows(cv))
)
}
if (commonValues.length === 0) {
newAnds.push(...containedOrs)
} else {
const newOrs: TagsFilter[] = []
for (const containedOr of containedOrs) {
const elements = containedOr.or.filter(
(candidate) => !commonValues.some((cv) => cv.shadows(candidate))
)
newOrs.push(Or.construct(elements))
}
commonValues.push(And.construct(newOrs))
const result = new Or(commonValues).optimize()
if (result === false) {
return false
} else if (result === true) {
// neutral element: skip
} else {
newAnds.push(result)
}
}
}
if (newAnds.length === 0) {
return true
}
if (TagUtils.ContainsOppositeTags(newAnds)) {
return false
}
TagUtils.sortFilters(newAnds, true)
return And.construct(newAnds)
}
isNegative(): boolean {
return !this.and.some((t) => !t.isNegative())
}
visit(f: (tagsFilter: TagsFilter) => void) {
f(this)
this.and.forEach((sub) => sub.visit(f))
}
}

View file

@ -0,0 +1,70 @@
import { TagsFilter } from "./TagsFilter"
export default class ComparingTag implements TagsFilter {
private readonly _key: string
private readonly _predicate: (value: string) => boolean
private readonly _representation: string
constructor(
key: string,
predicate: (value: string | undefined) => boolean,
representation: string = ""
) {
this._key = key
this._predicate = predicate
this._representation = representation
}
asChange(properties: Record<string, string>): { k: string; v: string }[] {
throw "A comparable tag can not be used to be uploaded to OSM"
}
asHumanString(linkToWiki: boolean, shorten: boolean, properties: Record<string, string>) {
return this._key + this._representation
}
asOverpass(): string[] {
throw "A comparable tag can not be used as overpass filter"
}
shadows(other: TagsFilter): boolean {
return other === this
}
isUsableAsAnswer(): boolean {
return false
}
/**
* Checks if the properties match
*
* const t = new ComparingTag("key", (x => Number(x) < 42))
* t.matchesProperties({key: 42}) // => false
* t.matchesProperties({key: 41}) // => true
* t.matchesProperties({key: 0}) // => true
* t.matchesProperties({differentKey: 42}) // => false
*/
matchesProperties(properties: Record<string, string>): boolean {
return this._predicate(properties[this._key])
}
usedKeys(): string[] {
return [this._key]
}
usedTags(): { key: string; value: string }[] {
return []
}
optimize(): TagsFilter | boolean {
return this
}
isNegative(): boolean {
return true
}
visit(f: (TagsFilter) => void) {
f(this)
}
}

267
src/Logic/Tags/Or.ts Normal file
View file

@ -0,0 +1,267 @@
import { TagsFilter } from "./TagsFilter"
import { TagUtils } from "./TagUtils"
import { And } from "./And"
export class Or extends TagsFilter {
public or: TagsFilter[]
constructor(or: TagsFilter[]) {
super()
this.or = or
}
public static construct(or: TagsFilter[]): TagsFilter {
if (or.length === 1) {
return or[0]
}
return new Or(or)
}
matchesProperties(properties: Record<string, string>): boolean {
for (const tagsFilter of this.or) {
if (tagsFilter.matchesProperties(properties)) {
return true
}
}
return false
}
/**
*
* import {Tag} from "./Tag";
* import {RegexTag} from "./RegexTag";
*
* const and = new And([new Tag("boundary","protected_area"), new RegexTag("protect_class","98",true)])
* const or = new Or([and, new Tag("leisure", "nature_reserve"])
* or.asOverpass() // => [ "[\"boundary\"=\"protected_area\"][\"protect_class\"!=\"98\"]", "[\"leisure\"=\"nature_reserve\"]" ]
*
* // should fuse nested ors into a single list
* const or = new Or([new Tag("key","value"), new Or([new Tag("key1","value1"), new Tag("key2","value2")])])
* or.asOverpass() // => [ `["key"="value"]`, `["key1"="value1"]`, `["key2"="value2"]` ]
*/
asOverpass(): string[] {
const choices = []
for (const tagsFilter of this.or) {
const subChoices = tagsFilter.asOverpass()
choices.push(...subChoices)
}
return choices
}
asHumanString(linkToWiki: boolean, shorten: boolean, properties) {
return this.or.map((t) => t.asHumanString(linkToWiki, shorten, properties)).join("|")
}
isUsableAsAnswer(): boolean {
return false
}
shadows(other: TagsFilter): boolean {
if (other instanceof Or) {
for (const selfTag of this.or) {
let matchFound = false
for (let i = 0; i < other.or.length && !matchFound; i++) {
let otherTag = other.or[i]
matchFound = selfTag.shadows(otherTag)
}
if (!matchFound) {
return false
}
}
return true
}
return false
}
usedKeys(): string[] {
return [].concat(...this.or.map((subkeys) => subkeys.usedKeys()))
}
usedTags(): { key: string; value: string }[] {
return [].concat(...this.or.map((subkeys) => subkeys.usedTags()))
}
asChange(properties: Record<string, string>): { k: string; v: string }[] {
const result = []
for (const tagsFilter of this.or) {
result.push(...tagsFilter.asChange(properties))
}
return result
}
/**
* IN some contexts, some expressions can be considered true, e.g.
* (X=Y & (A=B | X=Y))
* ^---------^
* When the evaluation hits (A=B | X=Y), we know _for sure_ that X=Y _does match, as it would have failed the first clause otherwise.
* This means we can safely ignore this in the OR
*
* new Or([ new Tag("key","value") ,new Tag("other_key","value")]).removePhraseConsideredKnown(new Tag("key","value"), true) // =>true
* new Or([ new Tag("key","value") ,new Tag("other_key","value")]).removePhraseConsideredKnown(new Tag("key","value"), false) // => new Tag("other_key","value")
* new Or([ new Tag("key","value") ]).removePhraseConsideredKnown(new Tag("key","value"), true) // => true
* new Or([ new Tag("key","value") ]).removePhraseConsideredKnown(new Tag("key","value"), false) // => false
* new Or([new RegexTag("x", "y", true),new RegexTag("c", "d")]).removePhraseConsideredKnown(new Tag("foo","bar"), false) // => new Or([new RegexTag("x", "y", true),new RegexTag("c", "d")])
*/
removePhraseConsideredKnown(knownExpression: TagsFilter, value: boolean): TagsFilter | boolean {
const newOrs: TagsFilter[] = []
for (const tag of this.or) {
if (tag instanceof Or) {
throw "Optimize expressions before using removePhraseConsideredKnown"
}
if (tag instanceof And) {
const r = tag.removePhraseConsideredKnown(knownExpression, value)
if (r === false) {
continue
}
if (r === true) {
return true
}
newOrs.push(r)
continue
}
if (value && knownExpression.shadows(tag)) {
/**
* At this point, we do know that 'knownExpression' is true in every case
* As `shadows` does define that 'tag' MUST be true if 'knownExpression' is true,
* we can be sure that 'tag' is true as well.
*
* "True" is the absorbing element in an OR, so we can return true
*/
return true
}
if (!value && tag.shadows(knownExpression)) {
/**
* We know that knownExpression is unmet.
* if the tag shadows 'knownExpression' (which is the case when control flows gets here),
* then tag CANNOT be met too, as known expression is not met.
*
* This implies that 'tag' must be false too!
* false is the neutral element in an OR
*/
continue
}
newOrs.push(tag)
}
if (newOrs.length === 0) {
return false
}
return Or.construct(newOrs)
}
optimize(): TagsFilter | boolean {
if (this.or.length === 0) {
return false
}
const optimizedRaw = this.or
.map((t) => t.optimize())
.filter((t) => t !== false /* false is the neutral element in an OR, we drop them*/)
if (optimizedRaw.some((t) => t === true)) {
// We have an OR with a contained true: this is always 'true'
return true
}
const optimized = <TagsFilter[]>optimizedRaw
const newOrs: TagsFilter[] = []
let containedAnds: And[] = []
for (const tf of optimized) {
if (tf instanceof Or) {
// expand all the nested ors...
newOrs.push(...tf.or)
} else if (tf instanceof And) {
// partition of all the ands
containedAnds.push(tf)
} else {
newOrs.push(tf)
}
}
{
let dirty = false
do {
const cleanedContainedANds: And[] = []
outer: for (let containedAnd of containedAnds) {
for (const known of newOrs) {
// input for optimazation: (K=V | (X=Y & K=V))
// containedAnd: (X=Y & K=V)
// newOrs (and thus known): (K=V) --> false
const cleaned = containedAnd.removePhraseConsideredKnown(known, false)
if (cleaned === false) {
// The neutral element within an OR
continue outer // skip addition too
}
if (cleaned === true) {
// zero element
return true
}
if (cleaned instanceof And) {
containedAnd = cleaned
continue // clean up with the other known values
}
// the 'and' dissolved into a normal tag -> it has to be added to the newOrs
newOrs.push(cleaned)
dirty = true // rerun this algo later on
continue outer
}
cleanedContainedANds.push(containedAnd)
}
containedAnds = cleanedContainedANds
} while (dirty)
}
// Extract common keys from the ANDS
if (containedAnds.length === 1) {
newOrs.push(containedAnds[0])
} else if (containedAnds.length > 1) {
let commonValues: TagsFilter[] = containedAnds[0].and
for (let i = 1; i < containedAnds.length && commonValues.length > 0; i++) {
const containedAnd = containedAnds[i]
commonValues = commonValues.filter((cv) =>
containedAnd.and.some((candidate) => candidate.shadows(cv))
)
}
if (commonValues.length === 0) {
newOrs.push(...containedAnds)
} else {
const newAnds: TagsFilter[] = []
for (const containedAnd of containedAnds) {
const elements = containedAnd.and.filter(
(candidate) => !commonValues.some((cv) => cv.shadows(candidate))
)
newAnds.push(And.construct(elements))
}
commonValues.push(Or.construct(newAnds))
const result = new And(commonValues).optimize()
if (result === true) {
return true
} else if (result === false) {
// neutral element: skip
} else {
newOrs.push(And.construct(commonValues))
}
}
}
if (newOrs.length === 0) {
return false
}
if (TagUtils.ContainsOppositeTags(newOrs)) {
return true
}
TagUtils.sortFilters(newOrs, false)
return Or.construct(newOrs)
}
isNegative(): boolean {
return this.or.some((t) => t.isNegative())
}
visit(f: (tagsFilter: TagsFilter) => void) {
f(this)
this.or.forEach((t) => t.visit(f))
}
}

331
src/Logic/Tags/RegexTag.ts Normal file
View file

@ -0,0 +1,331 @@
import { Tag } from "./Tag"
import { TagsFilter } from "./TagsFilter"
export class RegexTag extends TagsFilter {
public readonly key: RegExp | string
public readonly value: RegExp | string
public readonly invert: boolean
public readonly matchesEmpty: boolean
constructor(key: string | RegExp, value: RegExp | string, invert: boolean = false) {
super()
this.key = key
this.value = value
this.invert = invert
this.matchesEmpty = RegexTag.doesMatch("", this.value)
}
/**
*
* Checks that the value provided by the object properties (`fromTag`) match the specified regex `possibleRegex
*
* RegexTag.doesMatch("abc", /abc/) // => true
* RegexTag.doesMatch("ab", /abc/) // => false
* RegexTag.doesMatch("", /.+/) // => false
* RegexTag.doesMatch("", new RegExp(".*")) // => true
*
* @param fromTag
* @param possibleRegex
* @private
*/
private static doesMatch(fromTag: string | number, possibleRegex: string | RegExp): boolean {
if (fromTag === undefined) {
return
}
if (typeof fromTag === "number") {
fromTag = "" + fromTag
}
if (typeof possibleRegex === "string") {
return fromTag === possibleRegex
}
return possibleRegex.test(fromTag)
}
private static source(r: string | RegExp) {
if (typeof r === "string") {
return r
}
return r.source
}
/**
* new RegexTag("a", /^[xyz]$/).asOverpass() // => [ `["a"~"^[xyz]$"]` ]
*
* // A wildcard regextag should only give the key
* new RegexTag("a", /^..*$/).asOverpass() // => [ `["a"]` ]
* new RegexTag("a", /.+/).asOverpass() // => [ `["a"]` ]
*
* // A regextag with a regex key should give correct output
* new RegexTag(/a.*x/, /^..*$/).asOverpass() // => [ `[~"a.*x"~\"^..*$\"]` ]
*
* // A regextag with a case invariant flag should signal this to overpass
* new RegexTag("key", /^.*value.*$/i).asOverpass() // => [ `["key"~\"^.*value.*$\",i]` ]
*/
asOverpass(): string[] {
const inv = this.invert ? "!" : ""
if (typeof this.key !== "string") {
// The key is a regex too
return [`[~"${this.key.source}"${inv}~"${RegexTag.source(this.value)}"]`]
}
if (this.value instanceof RegExp) {
const src = this.value.source
if (src === "^..*$" || src === ".+") {
// anything goes
return [`[${inv}"${this.key}"]`]
}
const modifier = this.value.ignoreCase ? ",i" : ""
return [`["${this.key}"${inv}~"${src}"${modifier}]`]
} else {
// Normal key and normal value
return [`["${this.key}"${inv}="${this.value}"]`]
}
}
isUsableAsAnswer(): boolean {
return false
}
/**
* Checks if this tag matches the given properties
*
* const isNotEmpty = new RegexTag("key",/^$/, true);
* isNotEmpty.matchesProperties({"key": "value"}) // => true
* isNotEmpty.matchesProperties({"key": "other_value"}) // => true
* isNotEmpty.matchesProperties({"key": ""}) // => false
* isNotEmpty.matchesProperties({"other_key": ""}) // => false
* isNotEmpty.matchesProperties({"other_key": "value"}) // => false
*
* const isNotEmpty = new RegexTag("key",/^..*$/, true);
* isNotEmpty.matchesProperties({"key": "value"}) // => false
* isNotEmpty.matchesProperties({"key": "other_value"}) // => false
* isNotEmpty.matchesProperties({"key": ""}) // => true
* isNotEmpty.matchesProperties({"other_key": ""}) // => true
* isNotEmpty.matchesProperties({"other_key": "value"}) // => true
*
* const notRegex = new RegexTag("x", /^y$/, true)
* notRegex.matchesProperties({"x": "y"}) // => false
* notRegex.matchesProperties({"x": "z"}) // => true
* notRegex.matchesProperties({"x": ""}) // => true
* notRegex.matchesProperties({}) // => true
*
* const bicycleTubeRegex = new RegexTag("vending", /^.*bicycle_tube.*$/)
* bicycleTubeRegex.matchesProperties({"vending": "bicycle_tube"}) // => true
* bicycleTubeRegex.matchesProperties({"vending": "something;bicycle_tube"}) // => true
* bicycleTubeRegex.matchesProperties({"vending": "bicycle_tube;something"}) // => true
* bicycleTubeRegex.matchesProperties({"vending": "xyz;bicycle_tube;something"}) // => true
*
* const nameStartsWith = new RegexTag("name", /^[sS]peelbox.*$/)
* nameStartsWith.matchesProperties({"name": "Speelbos Sint-Anna"} => true
* nameStartsWith.matchesProperties({"name": "speelbos Sint-Anna"} => true
* nameStartsWith.matchesProperties({"name": "Sint-Anna"} => false
* nameStartsWith.matchesProperties({"name": ""} => false
*
* const notEmptyList = new RegexTag("xyz", /^\[\]$/, true)
* notEmptyList.matchesProperties({"xyz": undefined}) // => true
* notEmptyList.matchesProperties({"xyz": "[]"}) // => false
* notEmptyList.matchesProperties({"xyz": "[\"abc\"]"}) // => true
*
* const importMatch = new RegexTag("tags", /(^|.*;)amenity=public_bookcase($|;.*)/)
* importMatch.matchesProperties({"tags": "amenity=public_bookcase;name=test"}) // =>true
* importMatch.matchesProperties({"tags": "amenity=public_bookcase"}) // =>true
* importMatch.matchesProperties({"tags": "name=test;amenity=public_bookcase"}) // =>true
* importMatch.matchesProperties({"tags": "amenity=bench"}) // =>false
*
* new RegexTag("key","value").matchesProperties({"otherkey":"value"}) // => false
* new RegexTag("key","value",true).matchesProperties({"otherkey":"something"}) // => true
*
* const v: string = <any> {someJson: ""}
* new RegexTag("key", new RegExp(".*")).matchesProperties({"key": v}) // => true
* new RegexTag("key", new RegExp(".*")).matchesProperties({"key": ""}) // => true
* new RegexTag("key", new RegExp(".*")).matchesProperties({"key": null}) // => true
* new RegexTag("key", new RegExp(".*")).matchesProperties({"key": undefined}) // => true
*
* const v: string = <any> {someJson: ""}
* new RegexTag("key", new RegExp(".+")).matchesProperties({"key": null}) // => false
* new RegexTag("key", new RegExp(".+")).matchesProperties({"key": undefined}) // => false
* new RegexTag("key", new RegExp(".+")).matchesProperties({"key": v}) // => true
* new RegexTag("key", new RegExp(".+")).matchesProperties({"key": ""}) // => false
*
* // Show work with non-string objects
* new RegexTag("key", "true").matchesProperties({"key": true}) // => true
* new RegexTag("key", "true", true).matchesProperties({"key": true}) // => false
*/
matchesProperties(tags: Record<string, string | number | boolean>): boolean {
if (typeof this.key === "string") {
let value = tags[this.key]
if (!value || value === "") {
// No tag is known, so we assume the empty string
// If this regexTag matches the empty string, we return true, otherwise false
// (Note: if inverted, we must reverse this)
return this.invert !== this.matchesEmpty
}
if (typeof value === "string") {
return RegexTag.doesMatch(value, this.value) != this.invert
}
// The value under test is _not_ a string; it can be a culculated tag, thus be e.g. a number or a boolean
// It might also be an object
if (typeof this.value !== "string") {
const regExp = this.value
if (regExp.source === ".*") {
// We match anything, and we do have some value
return !this.invert
}
}
if (typeof value !== "string") {
value = JSON.stringify(value)
}
return RegexTag.doesMatch(value, this.value) != this.invert
}
for (const key in tags) {
if (key === undefined) {
continue
}
if (RegexTag.doesMatch(key, this.key)) {
let value = tags[key] ?? ""
if (typeof value !== "string") {
value = JSON.stringify(value)
}
return RegexTag.doesMatch(value, this.value) != this.invert
}
}
if (this.matchesEmpty) {
// The value is 'empty'
return !this.invert
}
// The matching key was not found
return this.invert
}
asHumanString() {
if (typeof this.key === "string") {
const oper = typeof this.value === "string" ? "=" : "~"
return `${this.key}${this.invert ? "!" : ""}${oper}${RegexTag.source(this.value)}`
}
return `${this.key.source}${this.invert ? "!" : ""}~~${RegexTag.source(this.value)}`
}
/**
*
* new RegexTag("key","value").shadows(new Tag("key","value")) // => true
* new RegexTag("key",/value/).shadows(new RegexTag("key","value")) // => true
* new RegexTag("key",/^..*$/).shadows(new Tag("key","value")) // => false
* new RegexTag("key",/^..*$/).shadows(new Tag("other_key","value")) // => false
* new RegexTag("key", /^a+$/).shadows(new Tag("key", "a")) // => false
*
*
* // should not shadow too eagerly: the first tag might match 'key=abc', the second won't
* new RegexTag("key", /^..*$/).shadows(new Tag("key", "some_value")) // => false
*
* // should handle 'invert'
* new RegexTag("key",/^..*$/, true).shadows(new Tag("key","value")) // => false
* new RegexTag("key",/^..*$/, true).shadows(new Tag("key","")) // => true
* new RegexTag("key","value", true).shadows(new Tag("key","value")) // => false
* new RegexTag("key","value", true).shadows(new Tag("key","some_other_value")) // => false
*/
shadows(other: TagsFilter): boolean {
if (other instanceof RegexTag) {
if ((other.key["source"] ?? other.key) !== (this.key["source"] ?? this.key)) {
// Keys don't match, never shadowing
return false
}
if (
(other.value["source"] ?? other.key) === (this.value["source"] ?? this.key) &&
this.invert == other.invert
) {
// Values (and inverts) match
return true
}
if (typeof other.value === "string") {
const valuesMatch = RegexTag.doesMatch(other.value, this.value)
if (!this.invert && !other.invert) {
// this: key~value, other: key=value
return valuesMatch
}
if (this.invert && !other.invert) {
// this: key!~value, other: key=value
return !valuesMatch
}
if (!this.invert && other.invert) {
// this: key~value, other: key!=value
return !valuesMatch
}
if (!this.invert && !other.invert) {
// this: key!~value, other: key!=value
return valuesMatch
}
}
return false
}
if (other instanceof Tag) {
if (!RegexTag.doesMatch(other.key, this.key)) {
// Keys don't match
return false
}
if (this.value["source"] === "^..*$") {
if (this.invert) {
return other.value === ""
}
return false
}
if (this.invert) {
/*
* this: "a!=b"
* other: "a=c"
* actual property: a=x
* In other words: shadowing will never occur here
*/
return false
}
// Unless the values are the same, it is pretty hard to figure out if they are shadowing. This is future work
return (this.value["source"] ?? this.value) === other.value
}
return false
}
usedKeys(): string[] {
if (typeof this.key === "string") {
return [this.key]
}
throw "Key cannot be determined as it is a regex"
}
usedTags(): { key: string; value: string }[] {
return []
}
asChange(): { k: string; v: string }[] {
if (this.invert) {
return []
}
if (typeof this.key === "string") {
if (typeof this.value === "string") {
return [{ k: this.key, v: this.value }]
}
if (this.value.toString() != "/^..*$/" || this.value.toString() != ".+") {
console.warn("Regex value in tag; using wildcard:", this.key, this.value)
}
return [{ k: this.key, v: undefined }]
}
console.error("Cannot export regex tag to asChange; ", this.key, this.value)
return []
}
optimize(): TagsFilter | boolean {
return this
}
isNegative(): boolean {
return this.invert
}
visit(f: (TagsFilter) => void) {
f(this)
}
}

View file

@ -0,0 +1,114 @@
import { TagsFilter } from "./TagsFilter"
import { Tag } from "./Tag"
import { Utils } from "../../Utils"
/**
* The substituting-tag uses the tags of a feature a variables and replaces them.
*
* e.g. key:={other_key}_{ref} will match an object that has at least 'key'.
* If {other_key} is _not_ defined, it will not be substituted.
*
* The 'key' is always fixed and should not contain substitutions.
* This cannot be used to query features
*/
export default class SubstitutingTag implements TagsFilter {
private readonly _key: string
private readonly _value: string
private readonly _invert: boolean
constructor(key: string, value: string, invert = false) {
this._key = key
this._value = value
this._invert = invert
}
private static substituteString(template: string, dict: Record<string, string>): string {
for (const k in dict) {
template = template.replace(new RegExp("\\{" + k + "\\}", "g"), dict[k])
}
return template.replace(/{.*}/g, "")
}
asTag(currentProperties: Record<string, string>) {
if (this._invert) {
throw "Cannot convert an inverted substituting tag"
}
return new Tag(this._key, Utils.SubstituteKeys(this._value, currentProperties))
}
asHumanString(linkToWiki: boolean, shorten: boolean, properties) {
return (
this._key +
(this._invert ? "!" : "") +
"=" +
SubstitutingTag.substituteString(this._value, properties)
)
}
asOverpass(): string[] {
throw "A variable with substitution can not be used to query overpass"
}
shadows(other: TagsFilter): boolean {
if (!(other instanceof SubstitutingTag)) {
return false
}
return (
other._key === this._key &&
other._value === this._value &&
other._invert === this._invert
)
}
isUsableAsAnswer(): boolean {
return !this._invert
}
/**
* const assign = new SubstitutingTag("survey:date", "{_date:now}")
* assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-03-29"}) // => true
* assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-01-01"}) // => false
* assign.matchesProperties({"survey:date": "2021-03-29"}) // => false
* assign.matchesProperties({"_date:now": "2021-03-29"}) // => false
* assign.matchesProperties({"some_key": "2021-03-29"}) // => false
*/
matchesProperties(properties: Record<string, string>): boolean {
const value = properties[this._key]
if (value === undefined || value === "") {
return false
}
const expectedValue = SubstitutingTag.substituteString(this._value, properties)
return (value === expectedValue) !== this._invert
}
usedKeys(): string[] {
return [this._key]
}
usedTags(): { key: string; value: string }[] {
return []
}
asChange(properties: Record<string, string>): { k: string; v: string }[] {
if (this._invert) {
throw "An inverted substituting tag can not be used to create a change"
}
const v = SubstitutingTag.substituteString(this._value, properties)
if (v.match(/{.*}/) !== null) {
throw "Could not calculate all the substitutions: still have " + v
}
return [{ k: this._key, v: v }]
}
optimize(): TagsFilter | boolean {
return this
}
isNegative(): boolean {
return false
}
visit(f: (tagsFilter: TagsFilter) => void) {
f(this)
}
}

155
src/Logic/Tags/Tag.ts Normal file
View file

@ -0,0 +1,155 @@
import { Utils } from "../../Utils"
import { TagsFilter } from "./TagsFilter"
export class Tag extends TagsFilter {
public key: string
public value: string
constructor(key: string, value: string) {
super()
this.key = key
this.value = value
if (key === undefined || key === "") {
throw "Invalid key: undefined or empty"
}
if (value === undefined) {
throw `Invalid value while constructing a Tag with key '${key}': value is undefined`
}
if (value === "*") {
console.warn(`Got suspicious tag ${key}=* ; did you mean ${key}~* ?`)
}
}
/**
*
* const tag = new Tag("key","value")
* tag.matchesProperties({"key": "value"}) // => true
* tag.matchesProperties({"key": "z"}) // => false
* tag.matchesProperties({"key": ""}) // => false
* tag.matchesProperties({"other_key": ""}) // => false
* tag.matchesProperties({"other_key": "value"}) // => false
*
* const isEmpty = new Tag("key","")
* isEmpty.matchesProperties({"key": "value"}) // => false
* isEmpty.matchesProperties({"key": ""}) // => true
* isEmpty.matchesProperties({"other_key": ""}) // => true
* isEmpty.matchesProperties({"other_key": "value"}) // => true
* isEmpty.matchesProperties({"key": undefined}) // => true
*
* const isTrue = new Tag("key": "true")
* isTrue.matchesProperties({"key":"true"}) // => true
* isTrue.matchesProperties({"key": true}) // => true
*/
matchesProperties(properties: Record<string, string>): boolean {
let foundValue = properties[this.key]
if (foundValue === undefined && (this.value === "" || this.value === undefined)) {
// The tag was not found
// and it shouldn't be found!
return true
}
if (typeof foundValue !== "string") {
if (foundValue === true && (this.value === "true" || this.value === "yes")) {
return true
}
if (foundValue === false && (this.value === "false" || this.value === "no")) {
return true
}
foundValue = "" + foundValue
}
return foundValue === this.value
}
asOverpass(): string[] {
if (this.value === "") {
// NOT having this key
return ['[!"' + this.key + '"]']
}
return [`["${this.key}"="${this.value}"]`]
}
/**
const t = new Tag("key", "value")
t.asHumanString() // => "key=value"
t.asHumanString(true) // => "<a href='https://wiki.openstreetmap.org/wiki/Key:key' target='_blank'>key</a>=<a href='https://wiki.openstreetmap.org/wiki/Tag:key%3Dvalue' target='_blank'>value</a>"
*/
asHumanString(
linkToWiki?: boolean,
shorten?: boolean,
currentProperties?: Record<string, string>
) {
let v = this.value
if (shorten) {
v = Utils.EllipsesAfter(v, 25)
}
if ((v === "" || v === undefined) && currentProperties !== undefined) {
// This tag will be removed if in the properties, so we indicate this with special rendering
if ((currentProperties[this.key] ?? "") === "") {
// This tag is not present in the current properties, so this tag doesn't change anything
return ""
}
return "<span class='line-through'>" + this.key + "</span>"
}
if (linkToWiki) {
return (
`<a href='https://wiki.openstreetmap.org/wiki/Key:${this.key}' target='_blank'>${this.key}</a>` +
`=` +
`<a href='https://wiki.openstreetmap.org/wiki/Tag:${this.key}%3D${this.value}' target='_blank'>${v}</a>`
)
}
return this.key + "=" + v
}
isUsableAsAnswer(): boolean {
return true
}
/**
*
* import {RegexTag} from "./RegexTag";
*
* // should handle advanced regexes
* new Tag("key", "aaa").shadows(new RegexTag("key", /a+/)) // => true
* new Tag("key","value").shadows(new RegexTag("key", /^..*$/, true)) // => false
* new Tag("key","value").shadows(new Tag("key","value")) // => true
* new Tag("key","some_other_value").shadows(new RegexTag("key", "value", true)) // => true
* new Tag("key","value").shadows(new RegexTag("key", "value", true)) // => false
* new Tag("key","value").shadows(new RegexTag("otherkey", "value", true)) // => false
* new Tag("key","value").shadows(new RegexTag("otherkey", "value", false)) // => false
*/
shadows(other: TagsFilter): boolean {
if (other["key"] !== undefined) {
if (other["key"] !== this.key) {
return false
}
}
return other.matchesProperties({ [this.key]: this.value })
}
usedKeys(): string[] {
return [this.key]
}
usedTags(): { key: string; value: string }[] {
if (this.value == "") {
return []
}
return [this]
}
asChange(): { k: string; v: string }[] {
return [{ k: this.key, v: this.value }]
}
optimize(): TagsFilter | boolean {
return this
}
isNegative(): boolean {
return false
}
visit(f: (tagsFilter: TagsFilter) => void) {
f(this)
}
}

691
src/Logic/Tags/TagUtils.ts Normal file
View file

@ -0,0 +1,691 @@
import { Tag } from "./Tag"
import { TagsFilter } from "./TagsFilter"
import { And } from "./And"
import { Utils } from "../../Utils"
import ComparingTag from "./ComparingTag"
import { RegexTag } from "./RegexTag"
import SubstitutingTag from "./SubstitutingTag"
import { Or } from "./Or"
import { TagConfigJson } from "../../Models/ThemeConfig/Json/TagConfigJson"
import key_counts from "../../assets/key_totals.json"
type Tags = Record<string, string>
export type UploadableTag = Tag | SubstitutingTag | And
export class TagUtils {
private static keyCounts: { keys: any; tags: any } = key_counts
private static comparators: [string, (a: number, b: number) => boolean][] = [
["<=", (a, b) => a <= b],
[">=", (a, b) => a >= b],
["<", (a, b) => a < b],
[">", (a, b) => a > b],
]
static KVtoProperties(tags: Tag[]): Record<string, string> {
const properties: Record<string, string> = {}
for (const tag of tags) {
properties[tag.key] = tag.value
}
return properties
}
static changeAsProperties(kvs: { k: string; v: string }[]): Record<string, string> {
const tags: Record<string, string> = {}
for (const kv of kvs) {
tags[kv.k] = kv.v
}
return tags
}
/**
* Given two hashes of {key --> values[]}, makes sure that every neededTag is present in availableTags
*/
static AllKeysAreContained(availableTags: any, neededTags: any) {
for (const neededKey in neededTags) {
const availableValues: string[] = availableTags[neededKey]
if (availableValues === undefined) {
return false
}
const neededValues: string[] = neededTags[neededKey]
for (const neededValue of neededValues) {
if (availableValues.indexOf(neededValue) < 0) {
return false
}
}
}
return true
}
static SplitKeys(tagsFilters: UploadableTag[]): Record<string, string[]> {
return <any>this.SplitKeysRegex(tagsFilters, false)
}
/***
* Creates a hash {key --> [values : string | RegexTag ]}, with all the values present in the tagsfilter
*
* TagUtils.SplitKeysRegex([new Tag("isced:level", "bachelor; master")], true) // => {"isced:level": ["bachelor","master"]}
*/
static SplitKeysRegex(
tagsFilters: UploadableTag[],
allowRegex: boolean
): Record<string, (string | RegexTag)[]> {
const keyValues: Record<string, (string | RegexTag)[]> = {}
tagsFilters = [...tagsFilters] // copy all, use as queue
while (tagsFilters.length > 0) {
const tagsFilter = tagsFilters.shift()
if (tagsFilter === undefined) {
continue
}
if (tagsFilter instanceof And) {
tagsFilters.push(...(<UploadableTag[]>tagsFilter.and))
continue
}
if (tagsFilter instanceof Tag) {
if (keyValues[tagsFilter.key] === undefined) {
keyValues[tagsFilter.key] = []
}
keyValues[tagsFilter.key].push(...tagsFilter.value.split(";").map((s) => s.trim()))
continue
}
if (allowRegex && tagsFilter instanceof RegexTag) {
const key = tagsFilter.key
if (typeof key !== "string") {
console.error(
"Invalid type to flatten the multiAnswer: key is a regex too",
tagsFilter
)
throw "Invalid type to FlattenMultiAnswer"
}
const keystr = <string>key
if (keyValues[keystr] === undefined) {
keyValues[keystr] = []
}
keyValues[keystr].push(tagsFilter)
continue
}
console.error("Invalid type to flatten the multiAnswer", tagsFilter)
throw "Invalid type to FlattenMultiAnswer"
}
return keyValues
}
/**
* Flattens an 'uploadableTag' and replaces all 'SubstitutingTags' into normal tags
*/
static FlattenAnd(tagFilters: UploadableTag, currentProperties: Record<string, string>): Tag[] {
const tags: Tag[] = []
tagFilters.visit((tf: UploadableTag) => {
if (tf instanceof Tag) {
tags.push(tf)
}
if (tf instanceof SubstitutingTag) {
tags.push(tf.asTag(currentProperties))
}
})
return tags
}
/**
* Given multiple tagsfilters which can be used as answer, will take the tags with the same keys together as set.
* E.g:
*
* const tag = TagUtils.ParseUploadableTag({"and": [
* {
* and: [ "x=a", "y=0;1"],
* },
* {
* and: ["x=", "y=3"]
* },
* {
* and: ["x=b", "y=2"]
* }
* ]})
* TagUtils.FlattenMultiAnswer([tag]) // => TagUtils.Tag({and:["x=a;b", "y=0;1;2;3"] })
*
* TagUtils.FlattenMultiAnswer(([new Tag("x","y"), new Tag("a","b")])) // => new And([new Tag("x","y"), new Tag("a","b")])
* TagUtils.FlattenMultiAnswer(([new Tag("x","")])) // => new And([new Tag("x","")])
*/
static FlattenMultiAnswer(tagsFilters: UploadableTag[]): And {
if (tagsFilters === undefined) {
return new And([])
}
let keyValues = TagUtils.SplitKeys(tagsFilters)
const and: UploadableTag[] = []
for (const key in keyValues) {
const values = Utils.Dedup(keyValues[key]).filter((v) => v !== "")
values.sort()
and.push(new Tag(key, values.join(";")))
}
return new And(and)
}
/**
* Returns true if the properties match the tagsFilter, interpreted as a multikey.
* Note that this might match a regex tag
*
* TagUtils.MatchesMultiAnswer(new Tag("isced:level","bachelor"), {"isced:level":"bachelor; master"}) // => true
* TagUtils.MatchesMultiAnswer(new Tag("isced:level","master"), {"isced:level":"bachelor;master"}) // => true
* TagUtils.MatchesMultiAnswer(new Tag("isced:level","doctorate"), {"isced:level":"bachelor; master"}) // => false
*
* // should match with a space too
* TagUtils.MatchesMultiAnswer(new Tag("isced:level","master"), {"isced:level":"bachelor; master"}) // => true
*/
static MatchesMultiAnswer(tag: UploadableTag, properties: Tags): boolean {
const splitted = TagUtils.SplitKeysRegex([tag], true)
for (const splitKey in splitted) {
const neededValues = splitted[splitKey]
if (properties[splitKey] === undefined) {
return false
}
const actualValue = properties[splitKey].split(";").map((s) => s.trim())
for (const neededValue of neededValues) {
if (neededValue instanceof RegexTag) {
if (!neededValue.matchesProperties(properties)) {
return false
}
continue
}
if (actualValue.indexOf(neededValue) < 0) {
return false
}
}
}
return true
}
public static SimpleTag(json: string, context?: string): Tag {
const tag = Utils.SplitFirst(json, "=")
if (tag.length !== 2) {
throw `Invalid tag: no (or too much) '=' found (in ${context ?? "unkown context"})`
}
return new Tag(tag[0], tag[1])
}
/**
* Returns wether or not a keys is (probably) a valid key.
* See 'Tags_format.md' for an overview of what every tag does
*
* // should accept common keys
* TagUtils.isValidKey("name") // => true
* TagUtils.isValidKey("image:0") // => true
* TagUtils.isValidKey("alt_name") // => true
*
* // should refuse short keys
* TagUtils.isValidKey("x") // => false
* TagUtils.isValidKey("xy") // => false
*
* // should refuse a string with >255 characters
* let a255 = ""
* for(let i = 0; i < 255; i++) { a255 += "a"; }
* a255.length // => 255
* TagUtils.isValidKey(a255) // => true
* TagUtils.isValidKey("a"+a255) // => false
*
* // Should refuse unexpected characters
* TagUtils.isValidKey("with space") // => false
* TagUtils.isValidKey("some$type") // => false
* TagUtils.isValidKey("_name") // => false
*/
public static isValidKey(key: string): boolean {
return key.match(/^[a-z][a-z0-9:_]{2,253}[a-z0-9]$/) !== null
}
/**
* Parses a tag configuration (a json) into a TagsFilter.
*
* Note that regexes must match the entire value
*
* TagUtils.Tag("key=value") // => new Tag("key", "value")
* TagUtils.Tag("key=") // => new Tag("key", "")
* TagUtils.Tag("key!=") // => new RegexTag("key", /.+/si)
* TagUtils.Tag("key~*") // => new RegexTag("key", /.+/si)
* TagUtils.Tag("name~i~somename") // => new RegexTag("name", /^(somename)$/si)
* TagUtils.Tag("key!=value") // => new RegexTag("key", "value", true)
* TagUtils.Tag("vending~.*bicycle_tube.*") // => new RegexTag("vending", /^(.*bicycle_tube.*)$/s)
* TagUtils.Tag("x!~y") // => new RegexTag("x", /^(y)$/s, true)
* TagUtils.Tag({"and": ["key=value", "x=y"]}) // => new And([new Tag("key","value"), new Tag("x","y")])
* TagUtils.Tag("name~[sS]peelbos.*") // => new RegexTag("name", /^([sS]peelbos.*)$/s)
* TagUtils.Tag("survey:date:={_date:now}") // => new SubstitutingTag("survey:date", "{_date:now}")
* TagUtils.Tag("xyz!~\\[\\]") // => new RegexTag("xyz", /^(\[\])$/s, true)
* TagUtils.Tag("tags~(.*;)?amenity=public_bookcase(;.*)?") // => new RegexTag("tags", /^((.*;)?amenity=public_bookcase(;.*)?)$/s)
* TagUtils.Tag("service:bicycle:.*~~*") // => new RegexTag(/^(service:bicycle:.*)$/, /.+/si)
* TagUtils.Tag("_first_comment~.*{search}.*") // => new RegexTag('_first_comment', /^(.*{search}.*)$/s)
*
* TagUtils.Tag("xyz<5").matchesProperties({xyz: 4}) // => true
* TagUtils.Tag("xyz<5").matchesProperties({xyz: 5}) // => false
*
* // RegexTags must match values with newlines
* TagUtils.Tag("note~.*aed.*").matchesProperties({note: "Hier bevindt zich wss een defibrillator. \\n\\n De aed bevindt zich op de 5de verdieping"}) // => true
* TagUtils.Tag("note~i~.*aed.*").matchesProperties({note: "Hier bevindt zich wss een defibrillator. \\n\\n De AED bevindt zich op de 5de verdieping"}) // => true
*
* // Must match case insensitive
* TagUtils.Tag("name~i~somename").matchesProperties({name: "SoMeName"}) // => true
*
* // Must match the entire value
* TagUtils.Tag("key~value").matchesProperties({key: "valueandsome"}) // => false
* TagUtils.Tag("key~value").matchesProperties({key: "value"}) // => true
* TagUtils.Tag("key~x|y") // => new RegexTag("key", /^(x|y)$/s)
* TagUtils.Tag("maxspeed~[1-9]0|1[0-4]0").matchesProperties({maxspeed: "50 mph"}) // => false
*
* // Must match entire value: with mph
* const regex = TagUtils.Tag("maxspeed~([1-9]0|1[0-4]0) mph")
* regex // => new RegexTag("maxspeed", /^(([1-9]0|1[0-4]0) mph)$/s)
* regex.matchesProperties({maxspeed: "50 mph"}) // => true
*/
public static Tag(json: TagConfigJson, context: string = ""): TagsFilter {
try {
return this.ParseTagUnsafe(json, context)
} catch (e) {
console.error("Could not parse tag", json, "in context", context, "due to ", e)
throw e
}
}
public static ParseUploadableTag(json: TagConfigJson, context: string = ""): UploadableTag {
const t = this.Tag(json, context)
t.visit((t: TagsFilter) => {
if (t instanceof And) {
return
}
if (t instanceof Tag) {
return
}
if (t instanceof SubstitutingTag) {
return
}
throw `Error at ${context}: detected a non-uploadable tag at a location where this is not supported: ${t.asHumanString(
false,
false,
{}
)}`
})
return <any>t
}
/**
* Same as `.Tag`, except that this will return undefined if the json is undefined
* @param json
* @param context
* @constructor
*/
public static TagD(json?: TagConfigJson, context: string = ""): TagsFilter | undefined {
if (json === undefined) {
return undefined
}
return TagUtils.Tag(json, context)
}
/**
* INLINE sort of the given list
*/
public static sortFilters(filters: TagsFilter[], usePopularity: boolean): void {
filters.sort((a, b) => TagUtils.order(a, b, usePopularity))
}
public static toString(f: TagsFilter, toplevel = true): string {
let r: string
if (f instanceof Or) {
r = TagUtils.joinL(f.or, "|", toplevel)
} else if (f instanceof And) {
r = TagUtils.joinL(f.and, "&", toplevel)
} else {
r = f.asHumanString(false, false, {})
}
if (toplevel) {
r = r.trim()
}
return r
}
/**
* Parses the various parts of a regex tag
*
* TagUtils.parseRegexOperator("key~value") // => {invert: false, key: "key", value: "value", modifier: ""}
* TagUtils.parseRegexOperator("key!~value") // => {invert: true, key: "key", value: "value", modifier: ""}
* TagUtils.parseRegexOperator("key~i~value") // => {invert: false, key: "key", value: "value", modifier: "i"}
* TagUtils.parseRegexOperator("key!~i~someweirdvalue~qsdf") // => {invert: true, key: "key", value: "someweirdvalue~qsdf", modifier: "i"}
* TagUtils.parseRegexOperator("_image:0~value") // => {invert: false, key: "_image:0", value: "value", modifier: ""}
* TagUtils.parseRegexOperator("key~*") // => {invert: false, key: "key", value: "*", modifier: ""}
* TagUtils.parseRegexOperator("Brugs volgnummer~*") // => {invert: false, key: "Brugs volgnummer", value: "*", modifier: ""}
* TagUtils.parseRegexOperator("socket:USB-A~*") // => {invert: false, key: "socket:USB-A", value: "*", modifier: ""}
* TagUtils.parseRegexOperator("tileId~*") // => {invert: false, key: "tileId", value: "*", modifier: ""}
*/
public static parseRegexOperator(tag: string): {
invert: boolean
key: string
value: string
modifier: "i" | ""
} | null {
const match = tag.match(/^([_|a-zA-Z0-9: -]+)(!)?~([i]~)?(.*)$/)
if (match == null) {
return null
}
const [_, key, invert, modifier, value] = match
return { key, value, invert: invert == "!", modifier: modifier == "i~" ? "i" : "" }
}
/**
* Returns 'true' is opposite tags are detected.
* Note that this method will never work perfectly
*
* // should be false for some simple cases
* TagUtils.ContainsOppositeTags([new Tag("key", "value"), new Tag("key0", "value")]) // => false
* TagUtils.ContainsOppositeTags([new Tag("key", "value"), new Tag("key", "value0")]) // => false
*
* // should detect simple cases
* TagUtils.ContainsOppositeTags([new Tag("key", "value"), new RegexTag("key", "value", true)]) // => true
* TagUtils.ContainsOppositeTags([new Tag("key", "value"), new RegexTag("key", /value/, true)]) // => true
*/
public static ContainsOppositeTags(tags: TagsFilter[]): boolean {
for (let i = 0; i < tags.length; i++) {
const tag = tags[i]
if (!(tag instanceof Tag || tag instanceof RegexTag)) {
continue
}
for (let j = i + 1; j < tags.length; j++) {
const guard = tags[j]
if (!(guard instanceof Tag || guard instanceof RegexTag)) {
continue
}
if (guard.key !== tag.key) {
// Different keys: they can _never_ be opposites
continue
}
if ((guard.value["source"] ?? guard.value) !== (tag.value["source"] ?? tag.value)) {
// different values: the can _never_ be opposites
continue
}
if ((guard["invert"] ?? false) !== (tag["invert"] ?? false)) {
// The 'invert' flags are opposite, the key and value is the same for both
// This means we have found opposite tags!
return true
}
}
}
return false
}
/**
* Returns a filtered version of 'listToFilter'.
* For a list [t0, t1, t2], If `blackList` contains an equivalent (or broader) match of any `t`, this respective `t` is dropped from the returned list
* Ignores nested ORS and ANDS
*
* TagUtils.removeShadowedElementsFrom([new Tag("key","value")], [new Tag("key","value"), new Tag("other_key","value")]) // => [new Tag("other_key","value")]
*/
public static removeShadowedElementsFrom(
blacklist: TagsFilter[],
listToFilter: TagsFilter[]
): TagsFilter[] {
return listToFilter.filter((tf) => !blacklist.some((guard) => guard.shadows(tf)))
}
/**
* Returns a filtered version of 'listToFilter', where no duplicates and no equivalents exists.
*
* TagUtils.removeEquivalents([new RegexTag("key", /^..*$/), new Tag("key","value")]) // => [new Tag("key", "value")]
*/
public static removeEquivalents(listToFilter: (Tag | RegexTag)[]): TagsFilter[] {
const result: TagsFilter[] = []
outer: for (let i = 0; i < listToFilter.length; i++) {
const tag = listToFilter[i]
for (let j = 0; j < listToFilter.length; j++) {
if (i === j) {
continue
}
const guard = listToFilter[j]
if (guard.shadows(tag)) {
// the guard 'kills' the tag: we continue the outer loop without adding the tag
continue outer
}
}
result.push(tag)
}
return result
}
/**
* Returns `true` if at least one element of the 'guards' shadows one element of the 'listToFilter'.
*
* TagUtils.containsEquivalents([new Tag("key","value")], [new Tag("key","value"), new Tag("other_key","value")]) // => true
* TagUtils.containsEquivalents([new Tag("key","value")], [ new Tag("other_key","value")]) // => false
* TagUtils.containsEquivalents([new Tag("key","value")], [ new Tag("key","other_value")]) // => false
*/
public static containsEquivalents(guards: TagsFilter[], listToFilter: TagsFilter[]): boolean {
return listToFilter.some((tf) => guards.some((guard) => guard.shadows(tf)))
}
/**
* Parses a level specifier to the various available levels
*
* TagUtils.LevelsParser("0") // => ["0"]
* TagUtils.LevelsParser("1") // => ["1"]
* TagUtils.LevelsParser("0;2") // => ["0","2"]
* TagUtils.LevelsParser("0-5") // => ["0","1","2","3","4","5"]
* TagUtils.LevelsParser("0") // => ["0"]
* TagUtils.LevelsParser("-1") // => ["-1"]
* TagUtils.LevelsParser("0;-1") // => ["0", "-1"]
* TagUtils.LevelsParser(undefined) // => []
*/
public static LevelsParser(level: string): string[] {
let spec = Utils.NoNull([level])
spec = [].concat(...spec.map((s) => s?.split(";")))
spec = [].concat(
...spec.map((s) => {
s = s.trim()
if (s.indexOf("-") < 0 || s.startsWith("-")) {
return s
}
const [start, end] = s.split("-").map((s) => Number(s.trim()))
if (isNaN(start) || isNaN(end)) {
return undefined
}
const values = []
for (let i = start; i <= end; i++) {
values.push(i + "")
}
return values
})
)
return Utils.NoNull(spec)
}
private static ParseTagUnsafe(json: TagConfigJson, context: string = ""): TagsFilter {
if (json === undefined) {
throw new Error(
`Error while parsing a tag: 'json' is undefined in ${context}. Make sure all the tags are defined and at least one tag is present in a complex expression`
)
}
if (typeof json != "string") {
if (json["and"] !== undefined && json["or"] !== undefined) {
throw `Error while parsing a TagConfig: got an object where both 'and' and 'or' are defined. Did you override a value? Perhaps use \`"=parent": { ... }\` instead of \"parent": {...}\` to trigger a replacement and not a fuse of values`
}
if (json["and"] !== undefined) {
return new And(json["and"].map((t) => TagUtils.Tag(t, context)))
}
if (json["or"] !== undefined) {
return new Or(json["or"].map((t) => TagUtils.Tag(t, context)))
}
throw `At ${context}: unrecognized tag: ${JSON.stringify(json)}`
}
const tag = json as string
for (const [operator, comparator] of TagUtils.comparators) {
if (tag.indexOf(operator) >= 0) {
const split = Utils.SplitFirst(tag, operator)
let val = Number(split[1].trim())
if (isNaN(val)) {
val = new Date(split[1].trim()).getTime()
}
const f = (value: string | number | undefined) => {
if (value === undefined) {
return false
}
let b: number
if (typeof value === "number") {
b = value
} else if (typeof b === "string") {
b = Number(value?.trim())
} else {
b = Number(value)
}
if (isNaN(b) && typeof value === "string") {
b = Utils.ParseDate(value).getTime()
if (isNaN(b)) {
return false
}
}
return comparator(b, val)
}
return new ComparingTag(split[0], f, operator + val)
}
}
if (tag.indexOf("~~") >= 0) {
const split = Utils.SplitFirst(tag, "~~")
let keyRegex: RegExp
if (split[0] === "*") {
keyRegex = new RegExp(".+", "i")
} else {
keyRegex = new RegExp("^(" + split[0] + ")$")
}
let valueRegex: RegExp
if (split[1] === "*") {
valueRegex = new RegExp(".+", "si")
} else {
valueRegex = new RegExp("^(" + split[1] + ")$", "s")
}
return new RegexTag(keyRegex, valueRegex)
}
const withRegex = TagUtils.parseRegexOperator(tag)
if (withRegex != null) {
if (withRegex.value === "*" && withRegex.invert) {
throw `Don't use 'key!~*' - use 'key=' instead (empty string as value (in the tag ${tag} while parsing ${context})`
}
if (withRegex.value === "") {
throw (
"Detected a regextag with an empty regex; this is not allowed. Use '" +
withRegex.key +
"='instead (at " +
context +
")"
)
}
let value: string | RegExp = withRegex.value
if (value === "*") {
return new RegexTag(
withRegex.key,
new RegExp(".+", "si" + withRegex.modifier),
withRegex.invert
)
}
return new RegexTag(
withRegex.key,
new RegExp("^(" + value + ")$", "s" + withRegex.modifier),
withRegex.invert
)
}
if (tag.indexOf("!:=") >= 0) {
const split = Utils.SplitFirst(tag, "!:=")
return new SubstitutingTag(split[0], split[1], true)
}
if (tag.indexOf(":=") >= 0) {
const split = Utils.SplitFirst(tag, ":=")
return new SubstitutingTag(split[0], split[1])
}
if (tag.indexOf("!=") >= 0) {
const split = Utils.SplitFirst(tag, "!=")
if (split[1] === "*") {
throw (
"At " +
context +
": invalid tag " +
tag +
". To indicate a missing tag, use '" +
split[0] +
"!=' instead"
)
}
if (split[1] === "") {
return new RegexTag(split[0], /.+/is)
}
return new RegexTag(split[0], split[1], true)
}
if (tag.indexOf("=") >= 0) {
const split = Utils.SplitFirst(tag, "=")
if (split[1] == "*") {
throw `Error while parsing tag '${tag}' in ${context}: detected a wildcard on a normal value. Use a regex pattern instead`
}
return new Tag(split[0], split[1])
}
throw `Error while parsing tag '${tag}' in ${context}: no key part and value part were found`
}
private static GetCount(key: string, value?: string) {
if (key === undefined) {
return undefined
}
const tag = TagUtils.keyCounts.tags[key]
if (tag !== undefined && tag[value] !== undefined) {
return tag[value]
}
return TagUtils.keyCounts.keys[key]
}
private static order(a: TagsFilter, b: TagsFilter, usePopularity: boolean): number {
const rta = a instanceof RegexTag
const rtb = b instanceof RegexTag
if (rta !== rtb) {
// Regex tags should always go at the end: these use a lot of computation at the overpass side, avoiding it is better
if (rta) {
return 1 // b < a
} else {
return -1
}
}
if (a["key"] !== undefined && b["key"] !== undefined) {
if (usePopularity) {
const countA = TagUtils.GetCount(a["key"], a["value"])
const countB = TagUtils.GetCount(b["key"], b["value"])
if (countA !== undefined && countB !== undefined) {
return countA - countB
}
}
if (a["key"] === b["key"]) {
return 0
}
if (a["key"] < b["key"]) {
return -1
}
return 1
}
return 0
}
private static joinL(tfs: TagsFilter[], seperator: string, toplevel: boolean) {
const joined = tfs.map((e) => TagUtils.toString(e, false)).join(seperator)
if (toplevel) {
return joined
}
return " (" + joined + ") "
}
}

View file

@ -0,0 +1,62 @@
export abstract class TagsFilter {
abstract asOverpass(): string[]
abstract isUsableAsAnswer(): boolean
/**
* Indicates some form of equivalency:
* if `this.shadows(t)`, then `this.matches(properties)` implies that `t.matches(properties)` for all possible properties
*/
abstract shadows(other: TagsFilter): boolean
abstract matchesProperties(properties: Record<string, string>): boolean
abstract asHumanString(
linkToWiki: boolean,
shorten: boolean,
properties: Record<string, string>
): string
abstract usedKeys(): string[]
/**
* Returns all normal key/value pairs
* Regex tags, substitutions, comparisons, ... are exempt
*/
abstract usedTags(): { key: string; value: string }[]
/**
* Converts the tagsFilter into a list of key-values that should be uploaded to OSM.
* Throws an error if not applicable.
*
* Note: properties are the already existing tags-object. It is only used in the substituting tag
*/
abstract asChange(properties: Record<string, string>): { k: string; v: string }[]
/**
* Returns an optimized version (or self) of this tagsFilter
*/
abstract optimize(): TagsFilter | boolean
/**
* Returns 'true' if the tagsfilter might select all features (i.e. the filter will return everything from OSM, except a few entries).
*
* A typical negative tagsfilter is 'key!=value'
*
* import {RegexTag} from "./RegexTag";
* import {Tag} from "./Tag";
* import {And} from "./And";
* import {Or} from "./Or";
*
* new Tag("key","value").isNegative() // => false
* new And([new RegexTag("key","value", true)]).isNegative() // => true
* new Or([new RegexTag("key","value", true), new Tag("x","y")]).isNegative() // => true
* new And([new RegexTag("key","value", true), new Tag("x","y")]).isNegative() // => false
*/
abstract isNegative(): boolean
/**
* Walks the entire tree, every tagsFilter will be passed into the function once
*/
abstract visit(f: (tagsFilter: TagsFilter) => void)
}

807
src/Logic/UIEventSource.ts Normal file
View file

@ -0,0 +1,807 @@
import { Utils } from "../Utils"
import { Readable, Subscriber, Unsubscriber, Updater, Writable } from "svelte/store"
/**
* Various static utils
*/
export class Stores {
public static Chronic(millis: number, asLong: () => boolean = undefined): Store<Date> {
const source = new UIEventSource<Date>(undefined)
function run() {
source.setData(new Date())
if (asLong === undefined || asLong()) {
window.setTimeout(run, millis)
}
}
run()
return source
}
public static FromPromiseWithErr<T>(
promise: Promise<T>
): Store<{ success: T } | { error: any }> {
return UIEventSource.FromPromiseWithErr(promise)
}
/**
* Converts a promise into a UIVentsource, sets the UIEVentSource when the result is calculated.
* If the promise fails, the value will stay undefined
* @param promise
* @constructor
*/
public static FromPromise<T>(promise: Promise<T>): Store<T> {
const src = new UIEventSource<T>(undefined)
promise?.then((d) => src.setData(d))
promise?.catch((err) => console.warn("Promise failed:", err))
return src
}
public static flatten<X>(source: Store<Store<X>>, possibleSources?: Store<any>[]): Store<X> {
return UIEventSource.flatten(source, possibleSources)
}
/**
* Given a UIEVentSource with a list, returns a new UIEventSource which is only updated if the _contents_ of the list are different.
* E.g.
* const src = new UIEventSource([1,2,3])
* const stable = UIEventSource.ListStabilized(src)
* src.addCallback(_ => console.log("src pinged"))
* stable.addCallback(_ => console.log("stable pinged))
* src.setDate([...src.data])
*
* This will only trigger 'src pinged'
*
* @param src
* @constructor
*/
public static ListStabilized<T>(src: Store<T[]>): Store<T[]> {
const stable = new UIEventSource<T[]>(undefined)
src.addCallbackAndRun((list) => {
if (list === undefined) {
stable.setData(undefined)
return
}
if (Utils.sameList(stable.data, list)) {
return
}
stable.setData(list)
})
return stable
}
}
export abstract class Store<T> implements Readable<T> {
abstract readonly data: T
/**
* Optional value giving a title to the UIEventSource, mainly used for debugging
*/
public readonly tag: string | undefined
constructor(tag: string = undefined) {
this.tag = tag
if (tag === undefined || tag === "") {
let createStack = Utils.runningFromConsole
if (!Utils.runningFromConsole) {
createStack = window.location.hostname === "127.0.0.1"
}
if (createStack) {
const callstack = new Error().stack.split("\n")
this.tag = callstack[1]
}
}
}
abstract map<J>(f: (t: T) => J): Store<J>
abstract map<J>(f: (t: T) => J, extraStoresToWatch: Store<any>[]): Store<J>
public mapD<J>(f: (t: T) => J, extraStoresToWatch?: Store<any>[]): Store<J> {
return this.map((t) => {
if (t === undefined) {
return undefined
}
if (t === null) {
return null
}
return f(t)
}, extraStoresToWatch)
}
/**
* Add a callback function which will run on future data changes
*/
abstract addCallback(callback: (data: T) => void): () => void
/**
* Adds a callback function, which will be run immediately.
* Only triggers if the current data is defined
*/
abstract addCallbackAndRunD(callback: (data: T) => void): () => void
/**
* Add a callback function which will run on future data changes
* Only triggers if the data is defined
*/
abstract addCallbackD(callback: (data: T) => void): () => void
/**
* Adds a callback function, which will be run immediately.
* Only triggers if the current data is defined
*/
abstract addCallbackAndRun(callback: (data: T) => void): () => void
public withEqualityStabilized(
comparator: (t: T | undefined, t1: T | undefined) => boolean
): Store<T> {
let oldValue = undefined
return this.map((v) => {
if (v == oldValue) {
return oldValue
}
if (comparator(oldValue, v)) {
return oldValue
}
oldValue = v
return v
})
}
/**
* Monadic bind function
*
* // simple test with bound and immutablestores
* const src = new UIEventSource<number>(3)
* const bound = src.bind(i => new ImmutableStore(i * 2))
* let lastValue = undefined;
* bound.addCallbackAndRun(v => lastValue = v);
* lastValue // => 6
* src.setData(21)
* lastValue // => 42
*
* // simple test with bind over a mapped value
* const src = new UIEventSource<number>(0)
* const srcs : UIEventSource<string>[] = [new UIEventSource<string>("a"), new UIEventSource<string>("b")]
* const bound = src.map(i => -i).bind(i => srcs[i])
* let lastValue : string = undefined;
* bound.addCallbackAndRun(v => lastValue = v);
* lastValue // => "a"
* src.setData(-1)
* lastValue // => "b"
* srcs[1].setData("xyz")
* lastValue // => "xyz"
* srcs[0].setData("def")
* lastValue // => "xyz"
* src.setData(0)
* lastValue // => "def"
*
*
*
* // advanced test with bound
* const src = new UIEventSource<number>(0)
* const srcs : UIEventSource<string>[] = [new UIEventSource<string>("a"), new UIEventSource<string>("b")]
* const bound = src.bind(i => srcs[i])
* let lastValue : string = undefined;
* bound.addCallbackAndRun(v => lastValue = v);
* lastValue // => "a"
* src.setData(1)
* lastValue // => "b"
* srcs[1].setData("xyz")
* lastValue // => "xyz"
* srcs[0].setData("def")
* lastValue // => "xyz"
* src.setData(0)
* lastValue // => "def"
*/
public bind<X>(f: (t: T) => Store<X>): Store<X> {
const mapped = this.map(f)
const sink = new UIEventSource<X>(undefined)
const seenEventSources = new Set<Store<X>>()
mapped.addCallbackAndRun((newEventSource) => {
if (newEventSource === null) {
sink.setData(null)
} else if (newEventSource === undefined) {
sink.setData(undefined)
} else if (!seenEventSources.has(newEventSource)) {
seenEventSources.add(newEventSource)
newEventSource.addCallbackAndRun((resultData) => {
if (mapped.data === newEventSource) {
sink.setData(resultData)
}
})
} else {
// Already seen, so we don't have to add a callback, just update the value
sink.setData(newEventSource.data)
}
})
return sink
}
public stabilized(millisToStabilize): Store<T> {
if (Utils.runningFromConsole) {
return this
}
const newSource = new UIEventSource<T>(this.data)
const self = this
this.addCallback((latestData) => {
window.setTimeout(() => {
if (self.data == latestData) {
// compare by reference.
// Note that 'latestData' and 'self.data' are both from the same UIEVentSource, but both are dereferenced at a different time
newSource.setData(latestData)
}
}, millisToStabilize)
})
return newSource
}
/**
* Converts the uiEventSource into a promise.
* The promise will return the value of the store if the given condition evaluates to true
* @param condition: an optional condition, default to 'store.value !== undefined'
* @constructor
*/
public AsPromise(condition?: (t: T) => boolean): Promise<T> {
const self = this
condition = condition ?? ((t) => t !== undefined)
return new Promise((resolve) => {
const data = self.data
if (condition(data)) {
resolve(data)
} else {
self.addCallbackD((data) => {
resolve(data)
return true // return true to unregister as we only need to be called once
})
}
})
}
/**
* Same as 'addCallbackAndRun', added to be compatible with Svelte
*/
public subscribe(run: Subscriber<T> & ((value: T) => void), _?): Unsubscriber {
// We don't need to do anything with 'invalidate', see
// https://github.com/sveltejs/svelte/issues/3859
// Note: run is wrapped in an anonymous function. 'Run' returns the value. If this value happens to be true, it would unsubscribe
return this.addCallbackAndRun((v) => {
run(v)
})
}
}
export class ImmutableStore<T> extends Store<T> {
public readonly data: T
private static readonly pass: () => void = () => {}
constructor(data: T) {
super()
this.data = data
}
addCallback(_: (data: T) => void): () => void {
// pass: data will never change
return ImmutableStore.pass
}
addCallbackAndRun(callback: (data: T) => void): () => void {
callback(this.data)
// no callback registry: data will never change
return ImmutableStore.pass
}
addCallbackAndRunD(callback: (data: T) => void): () => void {
if (this.data !== undefined) {
callback(this.data)
}
// no callback registry: data will never change
return ImmutableStore.pass
}
addCallbackD(_: (data: T) => void): () => void {
// pass: data will never change
return ImmutableStore.pass
}
map<J>(f: (t: T) => J, extraStores: Store<any>[] = undefined): ImmutableStore<J> {
if (extraStores?.length > 0) {
return new MappedStore(this, f, extraStores, undefined, f(this.data))
}
return new ImmutableStore<J>(f(this.data))
}
}
/**
* Keeps track of the callback functions
*/
class ListenerTracker<T> {
private readonly _callbacks: ((t: T) => boolean | void | any)[] = []
public pingCount = 0
/**
* Adds a callback which can be called; a function to unregister is returned
*/
public addCallback(callback: (t: T) => boolean | void | any): () => void {
if (callback === console.log) {
// This ^^^ actually works!
throw "Don't add console.log directly as a callback - you'll won't be able to find it afterwards. Wrap it in a lambda instead."
}
this._callbacks.push(callback)
// Give back an unregister-function!
return () => {
const index = this._callbacks.indexOf(callback)
if (index >= 0) {
this._callbacks.splice(index, 1)
}
}
}
/**
* Call all the callbacks.
* Returns the number of registered callbacks
*/
public ping(data: T): number {
this.pingCount++
let toDelete = undefined
let startTime = new Date().getTime() / 1000
for (const callback of this._callbacks) {
if (callback(data) === true) {
// This callback wants to be deleted
// Note: it has to return precisely true in order to avoid accidental deletions
if (toDelete === undefined) {
toDelete = [callback]
} else {
toDelete.push(callback)
}
}
}
let endTime = new Date().getTime() / 1000
if (endTime - startTime > 500) {
console.trace(
"Warning: a ping took more then 500ms; this is probably a performance issue"
)
}
if (toDelete !== undefined) {
for (const toDeleteElement of toDelete) {
this._callbacks.splice(this._callbacks.indexOf(toDeleteElement), 1)
}
}
return this._callbacks.length
}
length() {
return this._callbacks.length
}
}
/**
* The mapped store is a helper type which does the mapping of a function.
*/
class MappedStore<TIn, T> extends Store<T> {
private readonly _upstream: Store<TIn>
private readonly _upstreamCallbackHandler: ListenerTracker<TIn> | undefined
private _upstreamPingCount: number = -1
private _unregisterFromUpstream: () => void
private readonly _f: (t: TIn) => T
private readonly _extraStores: Store<any>[] | undefined
private _unregisterFromExtraStores: (() => void)[] | undefined
private _callbacks: ListenerTracker<T> = new ListenerTracker<T>()
private static readonly pass: () => {}
constructor(
upstream: Store<TIn>,
f: (t: TIn) => T,
extraStores: Store<any>[],
upstreamListenerHandler: ListenerTracker<TIn> | undefined,
initialState: T,
onDestroy?: (f: () => void) => void
) {
super()
this._upstream = upstream
this._upstreamCallbackHandler = upstreamListenerHandler
this._f = f
this._data = initialState
this._upstreamPingCount = upstreamListenerHandler?.pingCount
this._extraStores = extraStores
this.registerCallbacksToUpstream()
if (onDestroy !== undefined) {
onDestroy(() => this.unregisterFromUpstream())
}
}
private _data: T
private _callbacksAreRegistered = false
/**
* Gets the current data from the store
*
* const src = new UIEventSource(21)
* const mapped = src.map(i => i * 2)
* src.setData(3)
* mapped.data // => 6
*
*/
get data(): T {
if (!this._callbacksAreRegistered) {
// Callbacks are not registered, so we haven't been listening for updates from the upstream which might have changed
if (this._upstreamCallbackHandler?.pingCount != this._upstreamPingCount) {
// Upstream has pinged - let's update our data first
this._data = this._f(this._upstream.data)
}
return this._data
}
return this._data
}
map<J>(f: (t: T) => J, extraStores: Store<any>[] = undefined): Store<J> {
let stores: Store<any>[] = undefined
if (extraStores?.length > 0 || this._extraStores?.length > 0) {
stores = []
}
if (extraStores?.length > 0) {
stores.push(...extraStores)
}
if (this._extraStores?.length > 0) {
this._extraStores?.forEach((store) => {
if (stores.indexOf(store) < 0) {
stores.push(store)
}
})
}
return new MappedStore(
this,
f, // we could fuse the functions here (e.g. data => f(this._f(data), but this might result in _f being calculated multiple times, breaking things
stores,
this._callbacks,
f(this.data)
)
}
private unregisterFromUpstream() {
console.log("Unregistering callbacks for", this.tag)
this._callbacksAreRegistered = false
this._unregisterFromUpstream()
this._unregisterFromExtraStores?.forEach((unr) => unr())
}
private registerCallbacksToUpstream() {
const self = this
this._unregisterFromUpstream = this._upstream.addCallback((_) => self.update())
this._unregisterFromExtraStores = this._extraStores?.map((store) =>
store?.addCallback((_) => self.update())
)
this._callbacksAreRegistered = true
}
private update(): void {
const newData = this._f(this._upstream.data)
this._upstreamPingCount = this._upstreamCallbackHandler?.pingCount
if (this._data == newData) {
return
}
this._data = newData
this._callbacks.ping(this._data)
}
addCallback(callback: (data: T) => any | boolean | void): () => void {
if (!this._callbacksAreRegistered) {
// This is the first callback that is added
// We register this 'map' to the upstream object and all the streams
this.registerCallbacksToUpstream()
}
const unregister = this._callbacks.addCallback(callback)
return () => {
unregister()
if (this._callbacks.length() == 0) {
this.unregisterFromUpstream()
}
}
}
addCallbackAndRun(callback: (data: T) => any | boolean | void): () => void {
const unregister = this.addCallback(callback)
const doRemove = callback(this.data)
if (doRemove === true) {
unregister()
return MappedStore.pass
}
return unregister
}
addCallbackAndRunD(callback: (data: T) => any | boolean | void): () => void {
return this.addCallbackAndRun((data) => {
if (data !== undefined) {
return callback(data)
}
})
}
addCallbackD(callback: (data: T) => any | boolean | void): () => void {
return this.addCallback((data) => {
if (data !== undefined) {
return callback(data)
}
})
}
}
export class UIEventSource<T> extends Store<T> implements Writable<T> {
public data: T
_callbacks: ListenerTracker<T> = new ListenerTracker<T>()
private static readonly pass: () => {}
constructor(data: T, tag: string = "") {
super(tag)
this.data = data
}
public static flatten<X>(
source: Store<Store<X>>,
possibleSources?: Store<any>[]
): UIEventSource<X> {
const sink = new UIEventSource<X>(source.data?.data)
source.addCallback((latestData) => {
sink.setData(latestData?.data)
latestData.addCallback((data) => {
if (source.data !== latestData) {
return true
}
sink.setData(data)
})
})
for (const possibleSource of possibleSources ?? []) {
possibleSource?.addCallback(() => {
sink.setData(source.data?.data)
})
}
return sink
}
/**
* Converts a promise into a UIVentsource, sets the UIEVentSource when the result is calculated.
* If the promise fails, the value will stay undefined, but 'onError' will be called
*/
public static FromPromise<T>(
promise: Promise<T>,
onError: (e: any) => void = undefined
): UIEventSource<T> {
const src = new UIEventSource<T>(undefined)
promise?.then((d) => src.setData(d))
promise?.catch((err) => {
if (onError !== undefined) {
onError(err)
} else {
console.warn("Promise failed:", err)
}
})
return src
}
/**
* Converts a promise into a UIVentsource, sets the UIEVentSource when the result is calculated.
* If the promise fails, the value will stay undefined
* @param promise
* @constructor
*/
public static FromPromiseWithErr<T>(
promise: Promise<T>
): UIEventSource<{ success: T } | { error: any }> {
const src = new UIEventSource<{ success: T } | { error: any }>(undefined)
promise?.then((d) => src.setData({ success: d }))
promise?.catch((err) => src.setData({ error: err }))
return src
}
public static asFloat(source: UIEventSource<string>): UIEventSource<number> {
return source.sync(
(str) => {
let parsed = parseFloat(str)
return isNaN(parsed) ? undefined : parsed
},
[],
(fl) => {
if (fl === undefined || isNaN(fl)) {
return undefined
}
return ("" + fl).substr(0, 8)
}
)
}
/**
* Adds a callback
*
* If the result of the callback is 'true', the callback is considered finished and will be removed again
* @param callback
*/
public addCallback(callback: (latestData: T) => boolean | void | any): () => void {
return this._callbacks.addCallback(callback)
}
public addCallbackAndRun(callback: (latestData: T) => boolean | void | any): () => void {
const doDeleteCallback = callback(this.data)
if (doDeleteCallback !== true) {
return this.addCallback(callback)
} else {
return UIEventSource.pass
}
}
public addCallbackAndRunD(callback: (data: T) => void): () => void {
return this.addCallbackAndRun((data) => {
if (data !== undefined && data !== null) {
return callback(data)
}
})
}
public addCallbackD(callback: (data: T) => void): () => void {
return this.addCallback((data) => {
if (data !== undefined && data !== null) {
return callback(data)
}
})
}
public setData(t: T): UIEventSource<T> {
if (this.data == t) {
// MUST COMPARE BY REFERENCE!
return
}
this.data = t
this._callbacks.ping(t)
return this
}
public ping(): void {
this._callbacks.ping(this.data)
}
/**
* Monoidal map which results in a read-only store
* Given a function 'f', will construct a new UIEventSource where the contents will always be "f(this.data)'
* @param f: The transforming function
* @param extraSources: also trigger the update if one of these sources change
* @param onDestroy: a callback that can trigger the destroy function
*
* const src = new UIEventSource<number>(10)
* const store = src.map(i => i * 2)
* store.data // => 20
* let srcSeen = undefined;
* src.addCallback(v => {
* console.log("Triggered")
* srcSeen = v
* })
* let lastSeen = undefined
* store.addCallback(v => {
* console.log("Triggered!")
* lastSeen = v
* })
* src.setData(21)
* srcSeen // => 21
* lastSeen // => 42
*/
public map<J>(
f: (t: T) => J,
extraSources: Store<any>[] = [],
onDestroy?: (f: () => void) => void
): Store<J> {
return new MappedStore(this, f, extraSources, this._callbacks, f(this.data), onDestroy)
}
/**
* Monoidal map which results in a read-only store. 'undefined' is passed 'as is'
* Given a function 'f', will construct a new UIEventSource where the contents will always be "f(this.data)'
*/
public mapD<J>(f: (t: T) => J, extraSources: Store<any>[] = []): Store<J | undefined> {
return new MappedStore(
this,
(t) => {
if (t === undefined) {
return undefined
}
return f(t)
},
extraSources,
this._callbacks,
this.data === undefined ? undefined : f(this.data)
)
}
/**
* Two way sync with functions in both directions
* Given a function 'f', will construct a new UIEventSource where the contents will always be "f(this.data)'
* @param f: The transforming function
* @param extraSources: also trigger the update if one of these sources change
* @param g: a 'backfunction to let the sync run in two directions. (data of the new UIEVEntSource, currentData) => newData
* @param allowUnregister: if set, the update will be halted if no listeners are registered
*/
public sync<J>(
f: (t: T) => J,
extraSources: Store<any>[],
g: (j: J, t: T) => T,
allowUnregister = false
): UIEventSource<J> {
const self = this
const stack = new Error().stack.split("\n")
const callee = stack[1]
const newSource = new UIEventSource<J>(f(this.data), "map(" + this.tag + ")@" + callee)
const update = function () {
newSource.setData(f(self.data))
return allowUnregister && newSource._callbacks.length() === 0
}
this.addCallback(update)
for (const extraSource of extraSources) {
extraSource?.addCallback(update)
}
if (g !== undefined) {
newSource.addCallback((latest) => {
self.setData(g(latest, self.data))
})
}
return newSource
}
public syncWith(otherSource: UIEventSource<T>, reverseOverride = false): UIEventSource<T> {
this.addCallback((latest) => otherSource.setData(latest))
const self = this
otherSource.addCallback((latest) => self.setData(latest))
if (reverseOverride) {
if (otherSource.data !== undefined) {
this.setData(otherSource.data)
}
} else if (this.data === undefined) {
this.setData(otherSource.data)
} else {
otherSource.setData(this.data)
}
return this
}
static asBoolean(stringUIEventSource: UIEventSource<string>) {
return stringUIEventSource.sync(
(str) => str === "true",
[],
(b) => "" + b
)
}
set(value: T): void {
this.setData(value)
}
update(f: Updater<T> & ((value: T) => T)): void {
this.setData(f(this.data))
}
/**
* Create a new UIEVentSource. Whenever 'source' changes, the returned UIEventSource will get this value as well.
* However, this value can be overriden without affecting source
*/
static feedFrom<T>(store: Store<T>): UIEventSource<T> {
const src = new UIEventSource(store.data)
store.addCallback((t) => src.setData(t))
return src
}
}

52
src/Logic/Web/Hash.ts Normal file
View file

@ -0,0 +1,52 @@
import { UIEventSource } from "../UIEventSource"
import { Utils } from "../../Utils"
/**
* Wrapper around the hash to create an UIEventSource from it
*/
export default class Hash {
public static hash: UIEventSource<string> = Hash.Get()
/**
* Gets the current string, including the pound sign if there is any
* @constructor
*/
public static Current(): string {
if (Hash.hash.data === undefined || Hash.hash.data === "") {
return ""
} else {
return "#" + Hash.hash.data
}
}
private static Get(): UIEventSource<string> {
if (Utils.runningFromConsole) {
return new UIEventSource<string>(undefined)
}
const hash = new UIEventSource<string>(window.location.hash.substr(1))
hash.addCallback((h) => {
if (h === "undefined") {
console.warn("Got a literal 'undefined' as hash, ignoring")
h = undefined
}
if (h === undefined || h === "") {
window.location.hash = ""
return
}
history.pushState({}, "")
window.location.hash = "#" + h
})
window.onhashchange = () => {
let newValue = window.location.hash.substr(1)
if (newValue === "") {
newValue = undefined
}
hash.setData(newValue)
}
return hash
}
}

View file

@ -0,0 +1,49 @@
import { UIEventSource } from "../UIEventSource"
import * as idb from "idb-keyval"
import { Utils } from "../../Utils"
/**
* UIEventsource-wrapper around indexedDB key-value
*/
export class IdbLocalStorage {
private static readonly _sourceCache: Record<string, UIEventSource<any>> = {}
public static Get<T>(
key: string,
options?: { defaultValue?: T; whenLoaded?: (t: T | null) => void }
): UIEventSource<T> {
if (IdbLocalStorage._sourceCache[key] !== undefined) {
return IdbLocalStorage._sourceCache[key]
}
const src = new UIEventSource<T>(options?.defaultValue, "idb-local-storage:" + key)
if (Utils.runningFromConsole) {
return src
}
src.addCallback((v) => idb.set(key, v))
idb.get(key)
.then((v) => {
src.setData(v ?? options?.defaultValue)
if (options?.whenLoaded !== undefined) {
options?.whenLoaded(v)
}
})
.catch((err) => {
console.warn("Loading from local storage failed due to", err)
if (options?.whenLoaded !== undefined) {
options?.whenLoaded(null)
}
})
IdbLocalStorage._sourceCache[key] = src
return src
}
public static SetDirectly(key: string, value: any): Promise<void> {
const copy = Utils.Clone(value)
return idb.set(key, copy)
}
static GetDirectly(key: string): Promise<any> {
return idb.get(key)
}
}

View file

@ -0,0 +1,47 @@
import { UIEventSource } from "../UIEventSource"
import { Utils } from "../../Utils"
/**
* Fetches data from random data sources, used in the metatagging
*/
export default class LiveQueryHandler {
private static neededShorthands = {} // url -> (shorthand:paths)[]
public static FetchLiveData(
url: string,
shorthands: string[]
): UIEventSource<any /* string -> string */> {
const shorthandsSet: string[] = LiveQueryHandler.neededShorthands[url] ?? []
for (const shorthand of shorthands) {
if (shorthandsSet.indexOf(shorthand) < 0) {
shorthandsSet.push(shorthand)
}
}
LiveQueryHandler.neededShorthands[url] = shorthandsSet
if (LiveQueryHandler[url] === undefined) {
const source = new UIEventSource({})
LiveQueryHandler[url] = source
console.log("Fetching live data from a third-party (unknown) API:", url)
Utils.downloadJson(url).then((data) => {
for (const shorthandDescription of shorthandsSet) {
const descr = shorthandDescription.trim().split(":")
const shorthand = descr[0]
const path = descr[1]
const parts = path.split(".")
let trail = data
for (const part of parts) {
if (trail !== undefined) {
trail = trail[part]
}
}
source.data[shorthand] = trail
}
source.ping()
})
}
return LiveQueryHandler[url]
}
}

View file

@ -0,0 +1,43 @@
import { UIEventSource } from "../UIEventSource"
/**
* UIEventsource-wrapper around localStorage
*/
export class LocalStorageSource {
static GetParsed<T>(key: string, defaultValue: T): UIEventSource<T> {
return LocalStorageSource.Get(key).sync(
(str) => {
if (str === undefined) {
return defaultValue
}
try {
return JSON.parse(str)
} catch {
return defaultValue
}
},
[],
(value) => JSON.stringify(value)
)
}
static Get(key: string, defaultValue: string = undefined): UIEventSource<string> {
try {
const saved = localStorage.getItem(key)
const source = new UIEventSource<string>(saved ?? defaultValue, "localstorage:" + key)
source.addCallback((data) => {
try {
localStorage.setItem(key, data)
} catch (e) {
// Probably exceeded the quota with this item!
// Lets nuke everything
localStorage.clear()
}
})
return source
} catch (e) {
return new UIEventSource<string>(defaultValue)
}
}
}

View file

@ -0,0 +1,236 @@
import { ImmutableStore, Store, UIEventSource } from "../UIEventSource"
import { MangroveReviews, Review } from "mangrove-reviews-typescript"
import { Utils } from "../../Utils"
import { Feature, Position } from "geojson"
import { GeoOperations } from "../GeoOperations"
export class MangroveIdentity {
public readonly keypair: Store<CryptoKeyPair>
public readonly key_id: Store<string>
constructor(mangroveIdentity: UIEventSource<string>) {
const key_id = new UIEventSource<string>(undefined)
this.key_id = key_id
const keypairEventSource = new UIEventSource<CryptoKeyPair>(undefined)
this.keypair = keypairEventSource
mangroveIdentity.addCallbackAndRunD(async (data) => {
if (data === "") {
return
}
const keypair = await MangroveReviews.jwkToKeypair(JSON.parse(data))
keypairEventSource.setData(keypair)
const pem = await MangroveReviews.publicToPem(keypair.publicKey)
key_id.setData(pem)
})
try {
if (!Utils.runningFromConsole && (mangroveIdentity.data ?? "") === "") {
MangroveIdentity.CreateIdentity(mangroveIdentity).then((_) => {})
}
} catch (e) {
console.error("Could not create identity: ", e)
}
}
/**
* Creates an identity if none exists already.
* Is written into the UIEventsource, which was passed into the constructor
* @constructor
*/
private static async CreateIdentity(identity: UIEventSource<string>): Promise<void> {
const keypair = await MangroveReviews.generateKeypair()
const jwk = await MangroveReviews.keypairToJwk(keypair)
if ((identity.data ?? "") !== "") {
// Identity has been loaded via osmPreferences by now - we don't overwrite
return
}
identity.setData(JSON.stringify(jwk))
}
}
/**
* Tracks all reviews of a given feature, allows to create a new review
*/
export default class FeatureReviews {
private static readonly _featureReviewsCache: Record<string, FeatureReviews> = {}
public readonly subjectUri: Store<string>
private readonly _reviews: UIEventSource<(Review & { madeByLoggedInUser: Store<boolean> })[]> =
new UIEventSource([])
public readonly reviews: Store<(Review & { madeByLoggedInUser: Store<boolean> })[]> =
this._reviews
private readonly _lat: number
private readonly _lon: number
private readonly _uncertainty: number
private readonly _name: Store<string>
private readonly _identity: MangroveIdentity
private constructor(
feature: Feature,
tagsSource: UIEventSource<Record<string, string>>,
mangroveIdentity?: MangroveIdentity,
options?: {
nameKey?: "name" | string
fallbackName?: string
uncertaintyRadius?: number
}
) {
const centerLonLat = GeoOperations.centerpointCoordinates(feature)
;[this._lon, this._lat] = centerLonLat
this._identity =
mangroveIdentity ?? new MangroveIdentity(new UIEventSource<string>(undefined))
const nameKey = options?.nameKey ?? "name"
if (feature.geometry.type === "Point") {
this._uncertainty = options?.uncertaintyRadius ?? 10
} else {
let coordss: Position[][]
if (feature.geometry.type === "LineString") {
coordss = [feature.geometry.coordinates]
} else if (
feature.geometry.type === "MultiLineString" ||
feature.geometry.type === "Polygon"
) {
coordss = feature.geometry.coordinates
}
let maxDistance = 0
for (const coords of coordss) {
for (const coord of coords) {
maxDistance = Math.max(
maxDistance,
GeoOperations.distanceBetween(centerLonLat, coord)
)
}
}
this._uncertainty = options?.uncertaintyRadius ?? maxDistance
}
this._name = tagsSource.map((tags) => tags[nameKey] ?? options?.fallbackName)
this.subjectUri = this.ConstructSubjectUri()
const self = this
this.subjectUri.addCallbackAndRunD(async (sub) => {
const reviews = await MangroveReviews.getReviews({ sub })
self.addReviews(reviews.reviews)
})
/* We also construct all subject queries _without_ encoding the name to work around a previous bug
* See https://github.com/giggls/opencampsitemap/issues/30
*/
this.ConstructSubjectUri(true).addCallbackAndRunD(async (sub) => {
try {
const reviews = await MangroveReviews.getReviews({ sub })
self.addReviews(reviews.reviews)
} catch (e) {
console.log("Could not fetch reviews for partially incorrect query ", sub)
}
})
}
/**
* Construct a featureReviewsFor or fetches it from the cache
*/
public static construct(
feature: Feature,
tagsSource: UIEventSource<Record<string, string>>,
mangroveIdentity?: MangroveIdentity,
options?: {
nameKey?: "name" | string
fallbackName?: string
uncertaintyRadius?: number
}
) {
const key = feature.properties.id
const cached = FeatureReviews._featureReviewsCache[key]
if (cached !== undefined) {
return cached
}
const featureReviews = new FeatureReviews(feature, tagsSource, mangroveIdentity, options)
FeatureReviews._featureReviewsCache[key] = featureReviews
return featureReviews
}
/**
* The given review is uploaded to mangrove.reviews and added to the list of known reviews
*/
public async createReview(review: Omit<Review, "sub">): Promise<void> {
const r: Review = {
sub: this.subjectUri.data,
...review,
}
const keypair: CryptoKeyPair = this._identity.keypair.data
console.log(r)
const jwt = await MangroveReviews.signReview(keypair, r)
console.log("Signed:", jwt)
await MangroveReviews.submitReview(jwt)
this._reviews.data.push({ ...r, madeByLoggedInUser: new ImmutableStore(true) })
this._reviews.ping()
}
/**
* Adds given reviews to the 'reviews'-UI-eventsource
* @param reviews
* @private
*/
private addReviews(reviews: { payload: Review; kid: string }[]) {
const self = this
const alreadyKnown = new Set(self._reviews.data.map((r) => r.rating + " " + r.opinion))
let hasNew = false
for (const reviewData of reviews) {
const review = reviewData.payload
try {
const url = new URL(review.sub)
console.log("URL is", url)
if (url.protocol === "geo:") {
const coordinate = <[number, number]>(
url.pathname.split(",").map((n) => Number(n))
)
const distance = GeoOperations.distanceBetween(
[this._lat, this._lon],
coordinate
)
if (distance > this._uncertainty) {
continue
}
}
} catch (e) {
console.warn(e)
}
const key = review.rating + " " + review.opinion
if (alreadyKnown.has(key)) {
continue
}
self._reviews.data.push({
...review,
madeByLoggedInUser: this._identity.key_id.map((user_key_id) => {
return reviewData.kid === user_key_id
}),
})
hasNew = true
}
if (hasNew) {
self._reviews.ping()
}
}
/**
* Gets an URI which represents the item in a mangrove-compatible way
*
* See https://mangrove.reviews/standard#mangrove-core-uri-schemes
* @constructor
*/
private ConstructSubjectUri(dontEncodeName: boolean = false): Store<string> {
// https://www.rfc-editor.org/rfc/rfc5870#section-3.4.2
// `u` stands for `uncertainty`, https://www.rfc-editor.org/rfc/rfc5870#section-3.4.3
const self = this
return this._name.map(function (name) {
let uri = `geo:${self._lat},${self._lon}?u=${self._uncertainty}`
if (name) {
uri += "&q=" + (dontEncodeName ? name : encodeURIComponent(name))
}
return uri
})
}
}

1020
src/Logic/Web/PlantNet.ts Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,135 @@
/**
* Wraps the query parameters into UIEventSources
*/
import { UIEventSource } from "../UIEventSource"
import Hash from "./Hash"
import { Utils } from "../../Utils"
export class QueryParameters {
static defaults: Record<string, string> = {}
static documentation: Map<string, string> = new Map<string, string>()
private static order: string[] = ["layout", "test", "z", "lat", "lon"]
protected static readonly _wasInitialized: Set<string> = new Set()
protected static readonly knownSources: Record<string, UIEventSource<string>> = {}
private static initialized = false
public static GetQueryParameter(
key: string,
deflt: string,
documentation?: string
): UIEventSource<string> {
if (!this.initialized) {
this.init()
}
QueryParameters.documentation.set(key, documentation)
if (deflt !== undefined) {
QueryParameters.defaults[key] = deflt
}
if (QueryParameters.knownSources[key] !== undefined) {
return QueryParameters.knownSources[key]
}
QueryParameters.addOrder(key)
const source = new UIEventSource<string>(deflt, "&" + key)
QueryParameters.knownSources[key] = source
source.addCallback(() => QueryParameters.Serialize())
return source
}
public static SetDefaultFor(key: string, value: string) {
if (QueryParameters.defaults[key] === value) {
return
}
QueryParameters.defaults[key] = value
QueryParameters.Serialize()
}
public static GetBooleanQueryParameter(
key: string,
deflt: boolean,
documentation?: string
): UIEventSource<boolean> {
return UIEventSource.asBoolean(
QueryParameters.GetQueryParameter(key, "" + deflt, documentation)
)
}
public static wasInitialized(key: string): boolean {
this.init()
return QueryParameters._wasInitialized.has(key)
}
private static addOrder(key) {
if (this.order.indexOf(key) < 0) {
this.order.push(key)
}
}
private static init() {
if (this.initialized) {
return
}
this.initialized = true
if (Utils.runningFromConsole) {
return
}
if (window?.location?.search) {
const params = window.location.search.substr(1).split("&")
for (const param of params) {
const [key, value] = param.split("=")
QueryParameters.addOrder(key)
QueryParameters._wasInitialized.add(key)
const v = decodeURIComponent(value)
const source = new UIEventSource<string>(v)
source.addCallback(() => QueryParameters.Serialize())
QueryParameters.knownSources[key] = source
}
}
}
/**
* Set the query parameters of the page location
* @constructor
* @private
*/
private static Serialize() {
const parts = []
for (const key of QueryParameters.order) {
if (QueryParameters.knownSources[key]?.data === undefined) {
continue
}
if (QueryParameters.knownSources[key].data === "undefined") {
continue
}
if (QueryParameters.knownSources[key].data === QueryParameters.defaults[key]) {
continue
}
parts.push(
encodeURIComponent(key) +
"=" +
encodeURIComponent(QueryParameters.knownSources[key].data)
)
}
if (!Utils.runningFromConsole) {
// Don't pollute the history every time a parameter changes
try {
history.replaceState(null, "", "?" + parts.join("&") + Hash.Current())
} catch (e) {
console.error(e)
}
}
}
static ClearAll() {
for (const name in QueryParameters.knownSources) {
QueryParameters.knownSources[name].setData(undefined)
}
QueryParameters._wasInitialized.clear()
QueryParameters.order = []
}
}

View file

@ -0,0 +1,173 @@
import ThemeViewState from "../../Models/ThemeViewState"
import Hash from "./Hash"
export default class ThemeViewStateHashActor {
private readonly _state: ThemeViewState
/**
* Converts the hash to the appropriate themeview state and, vice versa, sets the hash.
*
* As the navigator-back-button changes the hash first, this class thus also handles the 'back'-button events.
*
* Note that there is no "real" way to intercept the back button, we can only detect the removal of the hash.
* As such, we use a change in the hash to close the appropriate windows
*
* @param state
*/
constructor(state: ThemeViewState) {
this._state = state
// First of all, try to recover the selected element
if (Hash.hash.data) {
const hash = Hash.hash.data
this.loadStateFromHash(hash)
Hash.hash.setData(hash) // reapply the previous hash
state.indexedFeatures.featuresById.addCallbackAndRunD((_) => {
let unregister = this.loadSelectedElementFromHash(hash)
// once that we have found a matching element, we can be sure the indexedFeaturesource was popuplated and that the job is done
return unregister
})
}
// Register a hash change listener to correctly handle the back button
Hash.hash.addCallback((hash) => {
if (!!hash) {
// There is still a hash
// We _only_ have to (at most) close the overlays in this case
const parts = hash.split(";")
if (parts.indexOf("background") < 0) {
state.guistate.backgroundLayerSelectionIsOpened.setData(false)
}
this.loadSelectedElementFromHash(hash)
} else {
this.back()
}
})
// At last, register callbacks on the state to update the hash when they change.
// Note: these should use 'addCallback', not 'addCallbackAndRun'
state.selectedElement.addCallback((_) => this.setHash())
state.guistate.allToggles.forEach(({ toggle, submenu }) => {
submenu?.addCallback((_) => this.setHash())
toggle.addCallback((_) => this.setHash())
})
// When all is done, set the hash. This must happen last to give the code above correct info
this.setHash()
}
/**
* Selects the appropriate element
* Returns true if this method can be unregistered for the first run
* @param hash
* @private
*/
private loadSelectedElementFromHash(hash: string): boolean {
const state = this._state
const selectedElement = state.selectedElement
// state.indexedFeatures.featuresById.stabilized(250)
hash = hash.split(";")[0] // The 'selectedElement' is always the _first_ item in the hash (if any)
// Set the hash based on the selected element...
// ... search and select an element based on the hash
if (selectedElement.data?.properties?.id === hash) {
// We already have the correct hash
return true
}
const found = state.indexedFeatures.featuresById.data?.get(hash)
if (!found) {
return false
}
if (found.properties.id === "last_click") {
return true
}
const layer = this._state.layout.getMatchingLayer(found.properties)
console.log(
"Setting selected element based on hash",
hash,
"; found",
found,
"got matching layer",
layer.id,
""
)
selectedElement.setData(found)
state.selectedLayer.setData(layer)
return true
}
private loadStateFromHash(hash: string) {
const state = this._state
const parts = hash.split(";")
outer: for (const { toggle, name, showOverOthers, submenu } of state.guistate.allToggles) {
for (const part of parts) {
if (part === name) {
toggle.setData(true)
continue outer
}
if (part.indexOf(":") < 0) {
continue
}
const [main, submenuValue] = part.split(":")
if (part !== main) {
continue
}
toggle.setData(true)
submenu?.setData(submenuValue)
continue outer
}
// If we arrive here, the loop above has not found any match
toggle.setData(false)
}
}
private setHash() {
const s = this._state
let h = ""
for (const { toggle, showOverOthers, name, submenu } of s.guistate.allToggles) {
if (showOverOthers || !toggle.data) {
continue
}
h = name
if (submenu?.data) {
h += ":" + submenu.data
}
}
if (s.selectedElement.data !== undefined) {
h = s.selectedElement.data.properties.id
}
for (const { toggle, showOverOthers, name, submenu } of s.guistate.allToggles) {
if (!showOverOthers || !toggle.data) {
continue
}
if (h) {
h += ";" + name
} else {
h = name
}
if (submenu?.data) {
h += ":" + submenu.data
}
}
Hash.hash.setData(h)
}
private back() {
console.log("Got a back event")
const state = this._state
// history.pushState(null, null, window.location.pathname);
if (state.selectedElement.data) {
state.selectedElement.setData(undefined)
return
}
if (state.guistate.closeAll()) {
return
}
}
}

426
src/Logic/Web/Wikidata.ts Normal file
View file

@ -0,0 +1,426 @@
import { Utils } from "../../Utils"
import { Store, UIEventSource } from "../UIEventSource"
import * as wds from "wikidata-sdk"
export class WikidataResponse {
public readonly id: string
public readonly labels: Map<string, string>
public readonly descriptions: Map<string, string>
public readonly claims: Map<string, Set<string>>
public readonly wikisites: Map<string, string>
public readonly commons: string
constructor(
id: string,
labels: Map<string, string>,
descriptions: Map<string, string>,
claims: Map<string, Set<string>>,
wikisites: Map<string, string>,
commons: string
) {
this.id = id
this.labels = labels
this.descriptions = descriptions
this.claims = claims
this.wikisites = wikisites
this.commons = commons
}
public static fromJson(entity: any): WikidataResponse {
const labels = new Map<string, string>()
for (const labelName in entity.labels) {
// The labelname is the language code
labels.set(labelName, entity.labels[labelName].value)
}
const descr = new Map<string, string>()
for (const labelName in entity.descriptions) {
// The labelname is the language code
descr.set(labelName, entity.descriptions[labelName].value)
}
const sitelinks = new Map<string, string>()
for (const labelName in entity.sitelinks) {
// labelName is `${language}wiki`
const language = labelName.substring(0, labelName.length - 4)
const title = entity.sitelinks[labelName].title
sitelinks.set(language, title)
}
const commons = sitelinks.get("commons")
sitelinks.delete("commons")
const claims = WikidataResponse.extractClaims(entity.claims)
return new WikidataResponse(entity.id, labels, descr, claims, sitelinks, commons)
}
static extractClaims(claimsJson: any): Map<string, Set<string>> {
const simplified = wds.simplify.claims(claimsJson, {
timeConverter: "simple-day",
})
const claims = new Map<string, Set<string>>()
for (const claimId in simplified) {
const claimsList: any[] = simplified[claimId]
claims.set(claimId, new Set(claimsList))
}
return claims
}
}
export class WikidataLexeme {
id: string
lemma: Map<string, string>
senses: Map<string, string>
claims: Map<string, Set<string>>
constructor(json) {
this.id = json.id
this.claims = WikidataResponse.extractClaims(json.claims)
this.lemma = new Map<string, string>()
for (const language in json.lemmas) {
this.lemma.set(language, json.lemmas[language].value)
}
this.senses = new Map<string, string>()
for (const sense of json.senses) {
const glosses = sense.glosses
for (const language in glosses) {
let previousSenses = this.senses.get(language)
if (previousSenses === undefined) {
previousSenses = ""
} else {
previousSenses = previousSenses + "; "
}
this.senses.set(language, previousSenses + glosses[language].value ?? "")
}
}
}
asWikidataResponse() {
return new WikidataResponse(
this.id,
this.lemma,
this.senses,
this.claims,
new Map(),
undefined
)
}
}
export interface WikidataSearchoptions {
lang?: "en" | string
maxCount?: 20 | number
}
export interface WikidataAdvancedSearchoptions extends WikidataSearchoptions {
instanceOf?: number[]
notInstanceOf?: number[]
}
/**
* Utility functions around wikidata
*/
export default class Wikidata {
private static readonly _identifierPrefixes = ["Q", "L"].map((str) => str.toLowerCase())
private static readonly _prefixesToRemove = [
"https://www.wikidata.org/wiki/Lexeme:",
"https://www.wikidata.org/wiki/",
"http://www.wikidata.org/entity/",
"Lexeme:",
].map((str) => str.toLowerCase())
private static readonly _storeCache = new Map<
string,
Store<{ success: WikidataResponse } | { error: any }>
>()
/**
* Same as LoadWikidataEntry, but wrapped into a UIEventSource
* @param value
* @constructor
*/
public static LoadWikidataEntry(
value: string | number
): Store<{ success: WikidataResponse } | { error: any }> {
const key = this.ExtractKey(value)
const cached = Wikidata._storeCache.get(key)
if (cached) {
return cached
}
const src = UIEventSource.FromPromiseWithErr(Wikidata.LoadWikidataEntryAsync(key))
Wikidata._storeCache.set(key, src)
return src
}
/**
* Given a search text, searches for the relevant wikidata entries, excluding pages "outside of the main tree", e.g. disambiguation pages.
* Optionally, an 'instance of' can be given to limit the scope, e.g. instanceOf:5 (humans) will only search for humans
*/
public static async searchAdvanced(
text: string,
options: WikidataAdvancedSearchoptions
): Promise<
{
id: string
relevance?: number
label: string
description?: string
}[]
> {
let instanceOf = ""
if (options?.instanceOf !== undefined && options.instanceOf.length > 0) {
const phrases = options.instanceOf.map((q) => `{ ?item wdt:P31/wdt:P279* wd:Q${q}. }`)
instanceOf = "{" + phrases.join(" UNION ") + "}"
}
const forbidden = (options?.notInstanceOf ?? []).concat([17379835]) // blacklist 'wikimedia pages outside of the main knowledge tree', e.g. disambiguation pages
const minusPhrases = forbidden.map((q) => `MINUS {?item wdt:P31/wdt:P279* wd:Q${q} .}`)
const sparql = `SELECT * WHERE {
SERVICE wikibase:mwapi {
bd:serviceParam wikibase:api "EntitySearch" .
bd:serviceParam wikibase:endpoint "www.wikidata.org" .
bd:serviceParam mwapi:search "${text.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}" .
bd:serviceParam mwapi:language "${options.lang}" .
?item wikibase:apiOutputItem mwapi:item .
?num wikibase:apiOrdinal true .
bd:serviceParam wikibase:limit ${
Math.round(
(options.maxCount ?? 20) * 1.5
) /*Some padding for disambiguation pages */
} .
?label wikibase:apiOutput mwapi:label .
?description wikibase:apiOutput "@description" .
}
${instanceOf}
${minusPhrases.join("\n ")}
} ORDER BY ASC(?num) LIMIT ${options.maxCount ?? 20}`
const url = wds.sparqlQuery(sparql)
const result = await Utils.downloadJson(url)
/*The full uri of the wikidata-item*/
return result.results.bindings.map(({ item, label, description, num }) => ({
relevance: num?.value,
id: item?.value,
label: label?.value,
description: description?.value,
}))
}
public static async search(
search: string,
options?: WikidataSearchoptions,
page = 1
): Promise<
{
id: string
label: string
description: string
}[]
> {
const maxCount = options?.maxCount ?? 20
let pageCount = Math.min(maxCount, 50)
const start = page * pageCount - pageCount
const lang = options?.lang ?? "en"
const url =
"https://www.wikidata.org/w/api.php?action=wbsearchentities&search=" +
search +
"&language=" +
lang +
"&limit=" +
pageCount +
"&continue=" +
start +
"&format=json&uselang=" +
lang +
"&type=item&origin=*" +
"&props=" // props= removes some unused values in the result
const response = await Utils.downloadJsonCached(url, 10000)
const result: any[] = response.search
if (result.length < pageCount) {
// No next page
return result
}
if (result.length < maxCount) {
const newOptions = { ...options }
newOptions.maxCount = maxCount - result.length
result.push(...(await Wikidata.search(search, newOptions, page + 1)))
}
return result
}
public static async searchAndFetch(
search: string,
options?: WikidataAdvancedSearchoptions
): Promise<WikidataResponse[]> {
// We provide some padding to filter away invalid values
const searchResults = await Wikidata.searchAdvanced(search, options)
const maybeResponses = await Promise.all(
searchResults.map(async (r) => {
try {
console.log("Loading ", r.id)
return await Wikidata.LoadWikidataEntry(r.id).AsPromise()
} catch (e) {
console.error(e)
return undefined
}
})
)
return Utils.NoNull(maybeResponses.map((r) => <WikidataResponse>r["success"]))
}
/**
* Gets the 'key' segment from a URL
*
* Wikidata.ExtractKey("https://www.wikidata.org/wiki/Lexeme:L614072") // => "L614072"
* Wikidata.ExtractKey("http://www.wikidata.org/entity/Q55008046") // => "Q55008046"
* Wikidata.ExtractKey("Q55008046") // => "Q55008046"
* Wikidata.ExtractKey("A55008046") // => undefined
* Wikidata.ExtractKey("Q55008046X") // => undefined
*/
public static ExtractKey(value: string | number): string {
if (typeof value === "number") {
return "Q" + value
}
if (value === undefined) {
console.error("ExtractKey: value is undefined")
return undefined
}
value = value.trim().toLowerCase()
for (const prefix of Wikidata._prefixesToRemove) {
if (value.startsWith(prefix)) {
value = value.substring(prefix.length)
}
}
if (value.startsWith("http") && value === "") {
// Probably some random link in the image field - we skip it
return undefined
}
for (const identifierPrefix of Wikidata._identifierPrefixes) {
if (value.startsWith(identifierPrefix)) {
const trimmed = value.substring(identifierPrefix.length)
if (trimmed === "") {
return undefined
}
const n = Number(trimmed)
if (isNaN(n)) {
return undefined
}
return value.toUpperCase()
}
}
if (value !== "" && !isNaN(Number(value))) {
return "Q" + value
}
return undefined
}
/**
* Converts 'Q123' into 123, returns undefined if invalid
*
* Wikidata.QIdToNumber("Q123") // => 123
* Wikidata.QIdToNumber(" Q123 ") // => 123
* Wikidata.QIdToNumber(" X123 ") // => undefined
* Wikidata.QIdToNumber(" Q123X ") // => undefined
* Wikidata.QIdToNumber(undefined) // => undefined
* Wikidata.QIdToNumber(123) // => 123
*/
public static QIdToNumber(q: string | number): number | undefined {
if (q === undefined || q === null) {
return
}
if (typeof q === "number") {
return q
}
q = q.trim()
if (!q.startsWith("Q")) {
return
}
q = q.substr(1)
const n = Number(q)
if (isNaN(n)) {
return
}
return n
}
public static IdToArticle(id: string) {
if (id.startsWith("Q")) {
return "https://wikidata.org/wiki/" + id
}
if (id.startsWith("L")) {
return "https://wikidata.org/wiki/Lexeme:" + id
}
throw "Unknown id type: " + id
}
/**
* Build a SPARQL-query, return the result
*
* @param keys: how variables are named. Every key not ending with 'Label' should appear in at least one statement
* @param statements
* @constructor
*/
public static async Sparql<T>(
keys: string[],
statements: string[]
): Promise<(T & Record<string, { type: string; value: string }>)[]> {
const query =
"SELECT " +
keys.map((k) => (k.startsWith("?") ? k : "?" + k)).join(" ") +
"\n" +
"WHERE\n" +
"{\n" +
statements.map((stmt) => (stmt.endsWith(".") ? stmt : stmt + ".")).join("\n") +
' SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE]". }\n' +
"}"
const url = wds.sparqlQuery(query)
const result = await Utils.downloadJsonCached(url, 24 * 60 * 60 * 1000)
return result.results.bindings
}
private static _cache = new Map<string, Promise<WikidataResponse>>()
public static async LoadWikidataEntryAsync(value: string | number): Promise<WikidataResponse> {
const key = "" + value
const cached = Wikidata._cache.get(key)
if (cached) {
return cached
}
const uncached = Wikidata.LoadWikidataEntryUncachedAsync(value)
Wikidata._cache.set(key, uncached)
return uncached
}
/**
* Loads a wikidata page
* @returns the entity of the given value
*/
private static async LoadWikidataEntryUncachedAsync(
value: string | number
): Promise<WikidataResponse> {
const id = Wikidata.ExtractKey(value)
if (id === undefined) {
console.warn("Could not extract a wikidata entry from", value)
return undefined
}
const url = "https://www.wikidata.org/wiki/Special:EntityData/" + id + ".json"
const entities = (await Utils.downloadJsonCached(url, 10000)).entities
const firstKey = <string>Array.from(Object.keys(entities))[0] // Roundabout way to fetch the entity; it might have been a redirect
const response = entities[firstKey]
if (id.startsWith("L")) {
// This is a lexeme:
return new WikidataLexeme(response).asWikidataResponse()
}
return WikidataResponse.fromJson(response)
}
}

View file

@ -0,0 +1,55 @@
import { Utils } from "../../Utils"
export default class Wikimedia {
/**
* Recursively walks a wikimedia commons category in order to search for entries, which can be File: or Category: entries
* Returns (a promise of) a list of URLS
* @param categoryName The name of the wikimedia category
* @param maxLoad: the maximum amount of images to return
* @param continueParameter: if the page indicates that more pages should be loaded, this uses a token to continue. Provided by wikimedia
*/
public static async GetCategoryContents(
categoryName: string,
maxLoad = 10,
continueParameter: string = undefined
): Promise<string[]> {
if (categoryName === undefined || categoryName === null || categoryName === "") {
return []
}
if (!categoryName.startsWith("Category:")) {
categoryName = "Category:" + categoryName
}
let url =
"https://commons.wikimedia.org/w/api.php?" +
"action=query&list=categorymembers&format=json&" +
"&origin=*" +
"&cmtitle=" +
encodeURIComponent(categoryName)
if (continueParameter !== undefined) {
url = `${url}&cmcontinue=${continueParameter}`
}
const response = await Utils.downloadJson(url)
const members = response.query?.categorymembers ?? []
const imageOverview: string[] = members.map((member) => member.title)
if (response.continue === undefined) {
// We are done crawling through the category - no continuation in sight
return imageOverview
}
if (maxLoad - imageOverview.length <= 0) {
console.debug(`Recursive wikimedia category load stopped for ${categoryName}`)
return imageOverview
}
// We do have a continue token - let's load the next page
const recursive = await Wikimedia.GetCategoryContents(
categoryName,
maxLoad - imageOverview.length,
response.continue.cmcontinue
)
imageOverview.push(...recursive)
return imageOverview
}
}

298
src/Logic/Web/Wikipedia.ts Normal file
View file

@ -0,0 +1,298 @@
import { Utils } from "../../Utils"
import Wikidata, { WikidataResponse } from "./Wikidata"
import { Store, UIEventSource } from "../UIEventSource"
export interface FullWikipediaDetails {
articleUrl?: string
language?: string
pagename?: string
fullArticle?: string
firstParagraph?: string
restOfArticle?: string
wikidata?: WikidataResponse
title?: string
}
export default class Wikipedia {
/**
* When getting a wikipedia page data result, some elements (e.g. navigation, infoboxes, ...) should be removed if 'removeInfoBoxes' is set.
* We do this based on the classes. This set contains a blacklist of the classes to remove
* @private
*/
private static readonly classesToRemove = [
"shortdescription",
"sidebar",
"infobox",
"infobox_v2",
"noprint",
"ambox",
"mw-editsection",
"mw-selflink",
"mw-empty-elt",
"hatnote", // Often redirects
]
private static readonly idsToRemove = ["sjabloon_zie"]
private static readonly _cache = new Map<string, Promise<string>>()
private static _fullDetailsCache = new Map<string, Store<FullWikipediaDetails>>()
public readonly backend: string
constructor(options?: { language?: "en" | string } | { backend?: string }) {
this.backend = Wikipedia.getBackendUrl(options ?? {})
}
/**
* Tries to extract the language and article name from the given string
*
* Wikipedia.extractLanguageAndName("qsdf") // => undefined
* Wikipedia.extractLanguageAndName("nl:Warandeputten") // => {language: "nl", pageName: "Warandeputten"}
*/
public static extractLanguageAndName(input: string): { language: string; pageName: string } {
const matched = input.match("([^:]+):(.*)")
if (matched === undefined || matched === null) {
return undefined
}
const [_, language, pageName] = matched
return {
language,
pageName,
}
}
/**
* Fetch all useful information for the given entity.
*
*/
public static fetchArticleAndWikidata(
wikidataOrPageId: string,
preferedLanguage: string
): Store<FullWikipediaDetails> {
const cachekey = preferedLanguage + wikidataOrPageId
const cached = Wikipedia._fullDetailsCache.get(cachekey)
if (cached) {
return cached
}
console.log("Constructing store for", cachekey)
const store = new UIEventSource<FullWikipediaDetails>({}, cachekey)
Wikipedia._fullDetailsCache.set(cachekey, store)
// Are we dealing with a wikidata item?
const wikidataId = Wikidata.ExtractKey(wikidataOrPageId)
if (!wikidataId) {
// We are dealing with a wikipedia identifier, e.g. 'NL:articlename', 'https://nl.wikipedia.org/wiki/article', ...
const { language, pageName } = Wikipedia.extractLanguageAndName(wikidataOrPageId)
store.data.articleUrl = new Wikipedia({ language }).getPageUrl(pageName)
store.data.language = language
store.data.pagename = pageName
store.data.title = pageName
} else {
// Jup, this is a wikidata item
// Lets fetch the wikidata
store.data.title = wikidataId
Wikidata.LoadWikidataEntryAsync(wikidataId).then((wikidata) => {
store.data.wikidata = wikidata
store.ping()
// With the wikidata, we can search for the appropriate wikipedia page
const preferredLanguage = [
preferedLanguage,
"en",
Array.from(wikidata.wikisites.keys())[0],
]
for (const language of preferredLanguage) {
const pagetitle = wikidata.wikisites.get(language)
if (pagetitle) {
store.data.articleUrl = new Wikipedia({ language }).getPageUrl(pagetitle)
store.data.pagename = pagetitle
store.data.language = language
store.data.title = pagetitle
store.ping()
break
}
}
})
}
// Now that the pageURL has been setup, we can focus on downloading the actual article
// We setup a listener. As soon as the article-URL is know, we'll fetch the actual page
// This url can either be set by the Wikidata-response or directly if we are dealing with a wikipedia-url
store.addCallbackAndRun((data) => {
if (data.language === undefined || data.pagename === undefined) {
return
}
const wikipedia = new Wikipedia({ language: data.language })
wikipedia.GetArticleHtml(data.pagename).then((article) => {
data.fullArticle = article
const content = document.createElement("div")
content.innerHTML = article
const firstParagraph = content.getElementsByTagName("p").item(0)
data.firstParagraph = firstParagraph.innerHTML
content.removeChild(firstParagraph)
data.restOfArticle = content.innerHTML
store.ping()
})
return true // unregister
})
return store
}
private static getBackendUrl(
options: { language?: "en" | string } | { backend?: "en.wikipedia.org" | string }
): string {
let backend = "en.wikipedia.org"
if (options["backend"]) {
backend = options["backend"]
} else if (options["language"]) {
backend = `${options["language"] ?? "en"}.wikipedia.org`
}
if (!backend.startsWith("http")) {
backend = "https://" + backend
}
return backend
}
/**
* Extracts the actual pagename; returns undefined if this came from a different wikimedia entry
*
* new Wikipedia({backend: "https://wiki.openstreetmap.org"}).extractPageName("https://wiki.openstreetmap.org/wiki/NL:Speelbos") // => "NL:Speelbos"
* new Wikipedia().extractPageName("https://wiki.openstreetmap.org/wiki/NL:Speelbos") // => undefined
*/
public extractPageName(input: string): string | undefined {
if (!input.startsWith(this.backend)) {
return undefined
}
input = input.substring(this.backend.length)
const matched = input.match("/?wiki/(.+)")
if (matched === undefined || matched === null) {
return undefined
}
const [_, pageName] = matched
return pageName
}
public getDataUrl(pageName: string): string {
return (
`${this.backend}/w/api.php?action=parse&format=json&origin=*&prop=text&page=` + pageName
)
}
public getPageUrl(pageName: string): string {
return `${this.backend}/wiki/${pageName}`
}
/**
* Textual search of the specified wiki-instance. If searching Wikipedia, we recommend using wikidata.search instead
* @param searchTerm
*/
public async search(searchTerm: string): Promise<{ title: string; snippet: string }[]> {
const url =
this.backend +
"/w/api.php?action=query&format=json&list=search&srsearch=" +
encodeURIComponent(searchTerm)
return (await Utils.downloadJson(url))["query"]["search"]
}
/**
* Searches via 'index.php' and scrapes the result.
* This gives better results then via the API
* @param searchTerm
*/
public async searchViaIndex(
searchTerm: string
): Promise<{ title: string; snippet: string; url: string }[]> {
const url = `${this.backend}/w/index.php?search=${encodeURIComponent(searchTerm)}&ns0=1`
const result = await Utils.downloadAdvanced(url)
if (result["redirect"]) {
const targetUrl = result["redirect"]
// This is an exact match
return [
{
title: this.extractPageName(targetUrl)?.trim(),
url: targetUrl,
snippet: "",
},
]
}
if (result["error"]) {
throw "Could not download: " + JSON.stringify(result)
}
const el = document.createElement("html")
el.innerHTML = result["content"].replace(/href="\//g, 'href="' + this.backend + "/")
const searchResults = el.getElementsByClassName("mw-search-results")
const individualResults = Array.from(
searchResults[0]?.getElementsByClassName("mw-search-result") ?? []
)
return individualResults.map((result) => {
const toRemove = Array.from(result.getElementsByClassName("searchalttitle"))
for (const toRm of toRemove) {
toRm.parentElement.removeChild(toRm)
}
return {
title: result
.getElementsByClassName("mw-search-result-heading")[0]
.textContent.trim(),
url: result.getElementsByTagName("a")[0].href,
snippet: result.getElementsByClassName("searchresult")[0].textContent,
}
})
}
/**
* Returns the innerHTML for the given article as string.
* Some cleanup is applied to this.
*
* This method uses a static, local cache, so each article will be retrieved only once via the network
*/
public GetArticleHtml(pageName: string): Promise<string> {
const cacheKey = this.backend + "/" + pageName
if (Wikipedia._cache.has(cacheKey)) {
return Wikipedia._cache.get(cacheKey)
}
const promise = this.GetArticleUncachedAsync(pageName)
Wikipedia._cache.set(cacheKey, promise)
return promise
}
private async GetArticleUncachedAsync(pageName: string): Promise<string> {
const response = await Utils.downloadJson(this.getDataUrl(pageName))
if (response?.parse?.text === undefined) {
return undefined
}
const html = response["parse"]["text"]["*"]
if (html === undefined) {
return undefined
}
const div = document.createElement("div")
div.innerHTML = html
const content = Array.from(div.children)[0]
for (const forbiddenClass of Wikipedia.classesToRemove) {
const toRemove = content.getElementsByClassName(forbiddenClass)
for (const toRemoveElement of Array.from(toRemove)) {
toRemoveElement.parentElement?.removeChild(toRemoveElement)
}
}
for (const forbiddenId of Wikipedia.idsToRemove) {
const toRemove = content.querySelector("#" + forbiddenId)
toRemove?.parentElement?.removeChild(toRemove)
}
const links = Array.from(content.getElementsByTagName("a"))
// Rewrite relative links to absolute links + open them in a new tab
links
.filter((link) => link.getAttribute("href")?.startsWith("/") ?? false)
.forEach((link) => {
link.target = "_blank"
// note: link.getAttribute("href") gets the textual value, link.href is the rewritten version which'll contain the host for relative paths
link.href = `${this.backend}${link.getAttribute("href")}`
})
return content.innerHTML
}
}

154
src/Models/Constants.ts Normal file
View file

@ -0,0 +1,154 @@
import { Utils } from "../Utils"
import * as meta from "../../package.json"
export type PriviligedLayerType = (typeof Constants.priviliged_layers)[number]
export default class Constants {
public static vNumber = meta.version
public static ImgurApiKey = "7070e7167f0a25a"
public static readonly mapillary_client_token_v4 =
"MLY|4441509239301885|b40ad2d3ea105435bd40c7e76993ae85"
/**
* API key for Maproulette
*
* Currently there is no user-friendly way to get the user's API key.
* See https://github.com/maproulette/maproulette2/issues/476 for more information.
* Using an empty string however does work for most actions, but will attribute all actions to the Superuser.
*/
public static readonly MaprouletteApiKey = ""
public static defaultOverpassUrls = [
// The official instance, 10000 queries per day per project allowed
"https://overpass-api.de/api/interpreter",
// 'Fair usage'
"https://overpass.kumi.systems/api/interpreter",
// Offline: "https://overpass.nchc.org.tw/api/interpreter",
"https://overpass.openstreetmap.ru/cgi/interpreter",
// Doesn't support nwr: "https://overpass.openstreetmap.fr/api/interpreter"
]
public static readonly added_by_default = [
"selected_element",
"gps_location",
"gps_location_history",
"home_location",
"gps_track",
"range",
"last_click",
] as const
/**
* Special layers which are not included in a theme by default
*/
public static readonly no_include = [
"conflation",
"split_point",
"split_road",
"current_view",
"matchpoint",
"import_candidate",
"usersettings",
] as const
/**
* Layer IDs of layers which have special properties through built-in hooks
*/
public static readonly priviliged_layers = [
...Constants.added_by_default,
...Constants.no_include,
] as const
// The user journey states thresholds when a new feature gets unlocked
public static userJourney = {
moreScreenUnlock: 1,
personalLayoutUnlock: 5,
historyLinkVisible: 10,
deletePointsOfOthersUnlock: 20,
tagsVisibleAt: 25,
tagsVisibleAndWikiLinked: 30,
mapCompleteHelpUnlock: 50,
themeGeneratorReadOnlyUnlock: 50,
themeGeneratorFullUnlock: 500,
addNewPointWithUnreadMessagesUnlock: 500,
importHelperUnlock: 5000,
}
static readonly minZoomLevelToAddNewPoint = Constants.isRetina() ? 18 : 19
/**
* Used by 'PendingChangesUploader', which waits this amount of seconds to upload changes.
* (Note that pendingChanges might upload sooner if the popup is closed or similar)
*/
static updateTimeoutSec: number = 30
/**
* If the contributor has their GPS location enabled and makes a change,
* the points visited less then `nearbyVisitTime`-seconds ago will be inspected.
* The point closest to the changed feature will be considered and this distance will be tracked.
* ALl these distances are used to calculate a nearby-score
*/
static nearbyVisitTime: number = 30 * 60
/**
* If a user makes a change, the distance to the changed object is calculated.
* If a user makes multiple changes, all these distances are put into multiple bins, depending on this distance.
* For every bin, the totals are uploaded as metadata
*/
static distanceToChangeObjectBins = [25, 50, 100, 500, 1000, 5000, Number.MAX_VALUE]
static themeOrder = [
"personal",
"cyclofix",
"waste",
"etymology",
"food",
"cafes_and_pubs",
"playgrounds",
"hailhydrant",
"toilets",
"aed",
"bookcases",
]
/**
* Upon initialization, the GPS will search the location.
* If the location is found within the given timout, it'll automatically fly to it.
*
* In seconds
*/
static zoomToLocationTimeout = 15
static countryCoderEndpoint: string =
"https://raw.githubusercontent.com/pietervdvn/MapComplete-data/main/latlon2country"
public static readonly OsmPreferenceKeyPicturesLicense = "pictures-license"
/**
* These are the values that are allowed to use as 'backdrop' icon for a map pin
*/
private static readonly _defaultPinIcons = [
"square",
"circle",
"none",
"pin",
"person",
"plus",
"ring",
"star",
"teardrop",
"triangle",
"crosshair",
] as const
public static readonly defaultPinIcons: string[] = <any>Constants._defaultPinIcons
private static isRetina(): boolean {
if (Utils.runningFromConsole) {
return false
}
// The cause for this line of code: https://github.com/pietervdvn/MapComplete/issues/115
// See https://stackoverflow.com/questions/19689715/what-is-the-best-way-to-detect-retina-support-on-a-device-using-javascript
return (
(window.matchMedia &&
(window.matchMedia(
"only screen and (min-resolution: 192dpi), only screen and (min-resolution: 2dppx), only screen and (min-resolution: 75.6dpcm)"
).matches ||
window.matchMedia(
"only screen and (-webkit-min-device-pixel-ratio: 2), only screen and (-o-min-device-pixel-ratio: 2/1), only screen and (min--moz-device-pixel-ratio: 2), only screen and (min-device-pixel-ratio: 2)"
).matches)) ||
(window.devicePixelRatio && window.devicePixelRatio >= 2)
)
}
}

170
src/Models/Denomination.ts Normal file
View file

@ -0,0 +1,170 @@
import { Translation } from "../UI/i18n/Translation"
import { DenominationConfigJson } from "./ThemeConfig/Json/UnitConfigJson"
import Translations from "../UI/i18n/Translations"
import { Store } from "../Logic/UIEventSource"
import BaseUIElement from "../UI/BaseUIElement"
import Toggle from "../UI/Input/Toggle"
export class Denomination {
public readonly canonical: string
public readonly _canonicalSingular: string
public readonly useAsDefaultInput: boolean | string[]
public readonly useIfNoUnitGiven: boolean | string[]
public readonly prefix: boolean
public readonly alternativeDenominations: string[]
private readonly _human: Translation
private readonly _humanSingular?: Translation
constructor(json: DenominationConfigJson, useAsDefaultInput: boolean, context: string) {
context = `${context}.unit(${json.canonicalDenomination})`
this.canonical = json.canonicalDenomination.trim()
if (this.canonical === undefined) {
throw `${context}: this unit has no decent canonical value defined`
}
this._canonicalSingular = json.canonicalDenominationSingular?.trim()
json.alternativeDenomination?.forEach((v, i) => {
if ((v?.trim() ?? "") === "") {
throw `${context}.alternativeDenomination.${i}: invalid alternative denomination: undefined, null or only whitespace`
}
})
this.alternativeDenominations = json.alternativeDenomination?.map((v) => v.trim()) ?? []
if (json["default" /* @code-quality: ignore*/] !== undefined) {
throw `${context} uses the old 'default'-key. Use "useIfNoUnitGiven" or "useAsDefaultInput" instead`
}
this.useIfNoUnitGiven = json.useIfNoUnitGiven
this.useAsDefaultInput = useAsDefaultInput ?? json.useIfNoUnitGiven
this._human = Translations.T(json.human, context + "human")
this._humanSingular = Translations.T(json.humanSingular, context + "humanSingular")
this.prefix = json.prefix ?? false
}
get human(): Translation {
return this._human.Clone()
}
get humanSingular(): Translation {
return (this._humanSingular ?? this._human).Clone()
}
/**
* Create a representation of the given value
* @param value: the value from OSM
* @param actAsDefault: if set and the value can be parsed as number, will be parsed and trimmed
*
* const unit = new Denomination({
* canonicalDenomination: "m",
* alternativeDenomination: ["meter"],
* human: {
* en: "meter"
* }
* }, false, "test")
* unit.canonicalValue("42m", true) // =>"42 m"
* unit.canonicalValue("42", true) // =>"42 m"
* unit.canonicalValue("42 m", true) // =>"42 m"
* unit.canonicalValue("42 meter", true) // =>"42 m"
* unit.canonicalValue("42m", true) // =>"42 m"
* unit.canonicalValue("42", true) // =>"42 m"
*
* // Should be trimmed if canonical is empty
* const unit = new Denomination({
* canonicalDenomination: "",
* alternativeDenomination: ["meter","m"],
* human: {
* en: "meter"
* }
* }, false, "test")
* unit.canonicalValue("42m", true) // =>"42"
* unit.canonicalValue("42", true) // =>"42"
* unit.canonicalValue("42 m", true) // =>"42"
* unit.canonicalValue("42 meter", true) // =>"42"
*/
public canonicalValue(value: string, actAsDefault: boolean): string {
if (value === undefined) {
return undefined
}
const stripped = this.StrippedValue(value, actAsDefault)
if (stripped === null) {
return null
}
if (stripped === "1" && this._canonicalSingular !== undefined) {
return ("1 " + this._canonicalSingular).trim()
}
return (stripped + " " + this.canonical).trim()
}
/**
* Returns the core value (without unit) if:
* - the value ends with the canonical or an alternative value (or begins with if prefix is set)
* - the value is a Number (without unit) and default is set
*
* Returns null if it doesn't match this unit
*/
public StrippedValue(value: string, actAsDefault: boolean): string {
if (value === undefined) {
return undefined
}
value = value.toLowerCase()
const self = this
function startsWith(key) {
if (self.prefix) {
return value.startsWith(key)
} else {
return value.endsWith(key)
}
}
function substr(key) {
if (self.prefix) {
return value.substr(key.length).trim()
} else {
return value.substring(0, value.length - key.length).trim()
}
}
if (this.canonical !== "" && startsWith(this.canonical.toLowerCase())) {
return substr(this.canonical)
}
if (
this._canonicalSingular !== undefined &&
this._canonicalSingular !== "" &&
startsWith(this._canonicalSingular)
) {
return substr(this._canonicalSingular)
}
for (const alternativeValue of this.alternativeDenominations) {
if (startsWith(alternativeValue)) {
return substr(alternativeValue)
}
}
if (!actAsDefault) {
return null
}
const parsed = Number(value.trim())
if (!isNaN(parsed)) {
return value.trim()
}
return null
}
isDefaultDenomination(country: () => string) {
if (this.useIfNoUnitGiven === true) {
return true
}
if (this.useIfNoUnitGiven === false) {
return false
}
return this.useIfNoUnitGiven.indexOf(country()) >= 0
}
}

Some files were not shown because too many files have changed in this diff Show more