forked from MapComplete/MapComplete
Merge develop
This commit is contained in:
commit
3fe1f39c46
457 changed files with 6866 additions and 218533 deletions
|
|
@ -8,11 +8,13 @@ import { FeatureSource, WritableFeatureSource } from "../FeatureSource/FeatureSo
|
|||
import { LocalStorageSource } from "../Web/LocalStorageSource"
|
||||
import { GeoOperations } from "../GeoOperations"
|
||||
import { OsmTags } from "../../Models/OsmFeature"
|
||||
import StaticFeatureSource, { WritableStaticFeatureSource } from "../FeatureSource/Sources/StaticFeatureSource"
|
||||
import StaticFeatureSource, {
|
||||
WritableStaticFeatureSource,
|
||||
} from "../FeatureSource/Sources/StaticFeatureSource"
|
||||
import { MapProperties } from "../../Models/MapProperties"
|
||||
import { Orientation } from "../../Sensors/Orientation"
|
||||
|
||||
("use strict")
|
||||
;("use strict")
|
||||
/**
|
||||
* The geolocation-handler takes a map-location and a geolocation state.
|
||||
* It'll move the map as appropriate given the state of the geolocation-API
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ export default class NoElementsInViewDetector {
|
|||
return "zoom-to-low"
|
||||
}
|
||||
|
||||
|
||||
for (const [layerName, source] of themeViewState.perLayerFiltered) {
|
||||
if (priviliged.has(layerName)) {
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -34,11 +34,15 @@ export default class SelectedElementTagsUpdater {
|
|||
})
|
||||
}
|
||||
|
||||
public static applyUpdate(latestTags: OsmTags, id: string, state: {
|
||||
theme: ThemeConfig,
|
||||
changes: Changes,
|
||||
featureProperties: FeaturePropertiesStore
|
||||
}) {
|
||||
public static applyUpdate(
|
||||
latestTags: OsmTags,
|
||||
id: string,
|
||||
state: {
|
||||
theme: ThemeConfig
|
||||
changes: Changes
|
||||
featureProperties: FeaturePropertiesStore
|
||||
}
|
||||
) {
|
||||
try {
|
||||
const leftRightSensitive = state.theme.isLeftRightSensitive()
|
||||
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ export default class DetermineTheme {
|
|||
tagRenderings: DetermineTheme.getSharedTagRenderings(),
|
||||
tagRenderingOrder: DetermineTheme.getSharedTagRenderingOrder(),
|
||||
sharedLayers: knownLayersDict,
|
||||
publicLayers: new Set<string>()
|
||||
publicLayers: new Set<string>(),
|
||||
}
|
||||
return convertState
|
||||
}
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ export default class SaveFeatureSourceToLocalStorage {
|
|||
this.storage = storage
|
||||
const singleTileSavers: Map<number, SingleTileSaver> = new Map<number, SingleTileSaver>()
|
||||
features.features.addCallbackAndRunD((features) => {
|
||||
const sliced = GeoOperations.slice(zoomlevel, features)
|
||||
const sliced = GeoOperations.spreadIntoBboxes(features, zoomlevel)
|
||||
|
||||
sliced.forEach((features, tileIndex) => {
|
||||
let tileSaver = singleTileSavers.get(tileIndex)
|
||||
|
|
|
|||
|
|
@ -84,10 +84,10 @@ export default class FavouritesFeatureSource extends StaticFeatureSource {
|
|||
private async updateFeature(
|
||||
feature: Feature,
|
||||
state: {
|
||||
theme: ThemeConfig,
|
||||
changes: Changes,
|
||||
featureProperties: FeaturePropertiesStore,
|
||||
osmObjectDownloader: OsmObjectDownloader,
|
||||
theme: ThemeConfig
|
||||
changes: Changes
|
||||
featureProperties: FeaturePropertiesStore
|
||||
osmObjectDownloader: OsmObjectDownloader
|
||||
}
|
||||
) {
|
||||
const id = feature.properties.id
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ export default class ThemeSource implements IndexedFeatureSource {
|
|||
public readonly featuresById: Store<Map<string, Feature>>
|
||||
private readonly core: Store<ThemeSourceCore>
|
||||
|
||||
|
||||
private readonly addedSources: FeatureSource[] = []
|
||||
private readonly addedItems: OsmFeature[] = []
|
||||
|
||||
|
|
@ -48,14 +47,23 @@ export default class ThemeSource implements IndexedFeatureSource {
|
|||
const isLoading = new UIEventSource(true)
|
||||
this.isLoading = isLoading
|
||||
|
||||
const features = this.features = new UIEventSource<Feature[]>([])
|
||||
const featuresById = this.featuresById = new UIEventSource(new Map())
|
||||
this.core = mvtAvailableLayers.mapD(mvtAvailableLayers => {
|
||||
const core = new ThemeSourceCore(layers, featureSwitches, mapProperties, backend, isDisplayed, mvtAvailableLayers, isLoading, fullNodeDatabaseSource)
|
||||
this.addedSources.forEach(src => core.addSource(src))
|
||||
this.addedItems.forEach(item => core.addItem(item))
|
||||
core.features.addCallbackAndRun(data => features.set(data))
|
||||
core.featuresById.addCallbackAndRun(data => featuresById.set(data))
|
||||
const features = (this.features = new UIEventSource<Feature[]>([]))
|
||||
const featuresById = (this.featuresById = new UIEventSource(new Map()))
|
||||
this.core = mvtAvailableLayers.mapD((mvtAvailableLayers) => {
|
||||
const core = new ThemeSourceCore(
|
||||
layers,
|
||||
featureSwitches,
|
||||
mapProperties,
|
||||
backend,
|
||||
isDisplayed,
|
||||
mvtAvailableLayers,
|
||||
isLoading,
|
||||
fullNodeDatabaseSource
|
||||
)
|
||||
this.addedSources.forEach((src) => core.addSource(src))
|
||||
this.addedItems.forEach((item) => core.addItem(item))
|
||||
core.features.addCallbackAndRun((data) => features.set(data))
|
||||
core.featuresById.addCallbackAndRun((data) => featuresById.set(data))
|
||||
return core
|
||||
})
|
||||
}
|
||||
|
|
@ -69,7 +77,6 @@ export default class ThemeSource implements IndexedFeatureSource {
|
|||
this.addedSources.push(source)
|
||||
}
|
||||
|
||||
|
||||
public addItem(obj: OsmFeature) {
|
||||
this.core.data?.addItem(obj)
|
||||
this.addedItems.push(obj)
|
||||
|
|
@ -82,7 +89,6 @@ export default class ThemeSource implements IndexedFeatureSource {
|
|||
* Note that special layers (with `source=null` will be ignored)
|
||||
*/
|
||||
class ThemeSourceCore extends FeatureSourceMerger {
|
||||
|
||||
/**
|
||||
* This source is _only_ triggered when the data is downloaded for CSV export
|
||||
* @private
|
||||
|
|
@ -116,7 +122,7 @@ class ThemeSourceCore extends FeatureSourceMerger {
|
|||
mapProperties,
|
||||
{
|
||||
isActive: isDisplayed(layer.id),
|
||||
maxAge: layer.maxAgeOfCache
|
||||
maxAge: layer.maxAgeOfCache,
|
||||
}
|
||||
)
|
||||
fromCache.set(layer.id, src)
|
||||
|
|
@ -169,11 +175,11 @@ class ThemeSourceCore extends FeatureSourceMerger {
|
|||
overpassUrl: featureSwitches.overpassUrl,
|
||||
overpassTimeout: featureSwitches.overpassTimeout,
|
||||
overpassMaxZoom: new ImmutableStore(99),
|
||||
widenFactor: 0
|
||||
widenFactor: 0,
|
||||
},
|
||||
{
|
||||
ignoreZoom: true,
|
||||
isActive: new ImmutableStore(false)
|
||||
isActive: new ImmutableStore(false),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -247,7 +253,7 @@ class ThemeSourceCore extends FeatureSourceMerger {
|
|||
backend,
|
||||
isActive,
|
||||
patchRelations: true,
|
||||
fullNodeDatabase
|
||||
fullNodeDatabase,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -279,11 +285,11 @@ class ThemeSourceCore extends FeatureSourceMerger {
|
|||
widenFactor: 1.5,
|
||||
overpassUrl: featureSwitches.overpassUrl,
|
||||
overpassTimeout: featureSwitches.overpassTimeout,
|
||||
overpassMaxZoom: featureSwitches.overpassMaxZoom
|
||||
overpassMaxZoom: featureSwitches.overpassMaxZoom,
|
||||
},
|
||||
{
|
||||
padToTiles: zoom.map((zoom) => Math.min(15, zoom + 1)),
|
||||
isActive
|
||||
isActive,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { BBox } from "../../BBox"
|
|||
import LayerConfig from "../../../Models/ThemeConfig/LayerConfig"
|
||||
|
||||
export default class DynamicGeoJsonTileSource extends UpdatableDynamicTileSource {
|
||||
private static whitelistCache = new Map<string, any>()
|
||||
private static whitelistCache = new Map<string, Map<number, Set<number>>>()
|
||||
|
||||
constructor(
|
||||
layer: LayerConfig,
|
||||
|
|
@ -27,7 +27,7 @@ export default class DynamicGeoJsonTileSource extends UpdatableDynamicTileSource
|
|||
}
|
||||
console.log("Creating a dynamic geojson source for", layer.source.geojsonSource)
|
||||
|
||||
let whitelist = undefined
|
||||
let whitelist: Map<number, Set<number>> = undefined
|
||||
if (source.geojsonSource.indexOf("{x}_{y}.geojson") > 0) {
|
||||
const whitelistUrl = source.geojsonSource
|
||||
.replace("{z}", "" + source.geojsonZoomLevel)
|
||||
|
|
@ -37,8 +37,8 @@ export default class DynamicGeoJsonTileSource extends UpdatableDynamicTileSource
|
|||
if (DynamicGeoJsonTileSource.whitelistCache.has(whitelistUrl)) {
|
||||
whitelist = DynamicGeoJsonTileSource.whitelistCache.get(whitelistUrl)
|
||||
} else {
|
||||
Utils.downloadJsonCached(whitelistUrl, 1000 * 60 * 60)
|
||||
.then((json) => {
|
||||
Utils.downloadJsonCached<Record<string | number, number[]>>(whitelistUrl, 1000 * 60 * 60)
|
||||
.then(json => {
|
||||
const data = new Map<number, Set<number>>()
|
||||
for (const x in json) {
|
||||
if (x === "zoom") {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { BBox } from "./BBox"
|
||||
import * as turf from "@turf/turf"
|
||||
import { AllGeoJSON, booleanWithin, Coord } from "@turf/turf"
|
||||
import { AllGeoJSON, booleanWithin, Coord, Polygon } from "@turf/turf"
|
||||
import {
|
||||
Feature,
|
||||
FeatureCollection,
|
||||
|
|
@ -9,13 +9,13 @@ import {
|
|||
MultiLineString,
|
||||
MultiPolygon,
|
||||
Point,
|
||||
Polygon,
|
||||
Position,
|
||||
Position
|
||||
} from "geojson"
|
||||
import { Tiles } from "../Models/TileRange"
|
||||
import { Utils } from "../Utils"
|
||||
import { NearestPointOnLine } from "@turf/nearest-point-on-line"
|
||||
;("use strict")
|
||||
|
||||
("use strict")
|
||||
|
||||
export class GeoOperations {
|
||||
private static readonly _earthRadius = 6378137
|
||||
|
|
@ -29,7 +29,7 @@ export class GeoOperations {
|
|||
"behind",
|
||||
"sharp_left",
|
||||
"left",
|
||||
"slight_left",
|
||||
"slight_left"
|
||||
] as const
|
||||
private static reverseBearing = {
|
||||
N: 0,
|
||||
|
|
@ -47,7 +47,7 @@ export class GeoOperations {
|
|||
W: 270,
|
||||
WNW: 292.5,
|
||||
NW: 315,
|
||||
NNW: 337.5,
|
||||
NNW: 337.5
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -61,8 +61,8 @@ export class GeoOperations {
|
|||
}
|
||||
|
||||
public static intersect(
|
||||
f0: Feature<Polygon | MultiPolygon>,
|
||||
f1: Feature<Polygon | MultiPolygon>
|
||||
f0: Readonly<Feature<Polygon | MultiPolygon>>,
|
||||
f1: Readonly<Feature<Polygon | MultiPolygon>>
|
||||
): Feature<Polygon | MultiPolygon> | null {
|
||||
return turf.intersect(f0, f1)
|
||||
}
|
||||
|
|
@ -309,7 +309,7 @@ export class GeoOperations {
|
|||
bufferSizeInMeter: number
|
||||
): Feature<Polygon | MultiPolygon> | FeatureCollection<Polygon | MultiPolygon> {
|
||||
return turf.buffer(feature, bufferSizeInMeter / 1000, {
|
||||
units: "kilometers",
|
||||
units: "kilometers"
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -325,9 +325,9 @@ export class GeoOperations {
|
|||
[lon0, lat],
|
||||
[lon0, lat0],
|
||||
[lon, lat0],
|
||||
[lon, lat],
|
||||
],
|
||||
},
|
||||
[lon, lat]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -368,9 +368,9 @@ export class GeoOperations {
|
|||
type: "Feature",
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: way.geometry.coordinates[0],
|
||||
coordinates: way.geometry.coordinates[0]
|
||||
},
|
||||
properties: way.properties,
|
||||
properties: way.properties
|
||||
}
|
||||
}
|
||||
if (way.geometry.type === "MultiPolygon") {
|
||||
|
|
@ -378,9 +378,9 @@ export class GeoOperations {
|
|||
type: "Feature",
|
||||
geometry: {
|
||||
type: "MultiLineString",
|
||||
coordinates: way.geometry.coordinates[0],
|
||||
coordinates: way.geometry.coordinates[0]
|
||||
},
|
||||
properties: way.properties,
|
||||
properties: way.properties
|
||||
}
|
||||
}
|
||||
if (way.geometry.type === "LineString") {
|
||||
|
|
@ -512,6 +512,8 @@ export class GeoOperations {
|
|||
/**
|
||||
* Given a list of features, will construct a map of slippy map tile-indices.
|
||||
* Features of which the BBOX overlaps with the corresponding slippy map tile are added to the corresponding array
|
||||
*
|
||||
* Also @see clipAllInBox
|
||||
* @param features
|
||||
* @param zoomlevel
|
||||
*/
|
||||
|
|
@ -535,6 +537,33 @@ export class GeoOperations {
|
|||
return perBbox
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of features, returns a new list of features so that the features are clipped into the given tile-index.
|
||||
* Note: IDs are rewritten
|
||||
* Also @see spreadIntoBBoxes
|
||||
*/
|
||||
public static clipAllInBox(features: ReadonlyArray<Readonly<Feature>>, tileIndex: number): Feature[] {
|
||||
const bbox = Tiles.asGeojson(tileIndex)
|
||||
const newFeatures: Feature[] = []
|
||||
for (const f of features) {
|
||||
const intersectionParts = GeoOperations.clipWith(f, bbox)
|
||||
for (let i = 0; i < intersectionParts.length; i++) {
|
||||
const intersectionPart = intersectionParts[i]
|
||||
let id = (f.properties?.id ?? "") + "_" + tileIndex
|
||||
if (i > 0) {
|
||||
id += "_part_" + i
|
||||
}
|
||||
const properties = {
|
||||
...f.properties,
|
||||
id
|
||||
}
|
||||
intersectionPart.properties = properties
|
||||
newFeatures.push(intersectionPart)
|
||||
}
|
||||
}
|
||||
return Utils.NoNull(newFeatures)
|
||||
}
|
||||
|
||||
public static toGpx(
|
||||
locations:
|
||||
| Feature<LineString>
|
||||
|
|
@ -558,8 +587,8 @@ export class GeoOperations {
|
|||
properties: {},
|
||||
geometry: {
|
||||
type: "Point",
|
||||
coordinates: p,
|
||||
},
|
||||
coordinates: p
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
@ -575,7 +604,7 @@ export class GeoOperations {
|
|||
trackPoints.push(trkpt)
|
||||
}
|
||||
const header =
|
||||
'<gpx version="1.1" creator="mapcomplete.org" xmlns="http://www.topografix.com/GPX/1/1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">'
|
||||
"<gpx version=\"1.1\" creator=\"mapcomplete.org\" xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd\">"
|
||||
return (
|
||||
header +
|
||||
"\n<name>" +
|
||||
|
|
@ -614,7 +643,7 @@ export class GeoOperations {
|
|||
trackPoints.push(trkpt)
|
||||
}
|
||||
const header =
|
||||
'<gpx version="1.1" creator="mapcomplete.org" xmlns="http://www.topografix.com/GPX/1/1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">'
|
||||
"<gpx version=\"1.1\" creator=\"mapcomplete.org\" xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd\">"
|
||||
return (
|
||||
header +
|
||||
"\n<name>" +
|
||||
|
|
@ -640,7 +669,7 @@ export class GeoOperations {
|
|||
|
||||
const copy = {
|
||||
...feature,
|
||||
geometry: { ...feature.geometry },
|
||||
geometry: { ...feature.geometry }
|
||||
}
|
||||
let coordinates: [number, number][]
|
||||
if (feature.geometry.type === "LineString") {
|
||||
|
|
@ -698,8 +727,8 @@ export class GeoOperations {
|
|||
type: "Feature",
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: [a, b],
|
||||
},
|
||||
coordinates: [a, b]
|
||||
}
|
||||
},
|
||||
distanceMeter,
|
||||
{ units: "meters" }
|
||||
|
|
@ -736,17 +765,26 @@ export class GeoOperations {
|
|||
* GeoOperations.completelyWithin(park, pond) // => false
|
||||
*/
|
||||
static completelyWithin(
|
||||
feature: Feature,
|
||||
possiblyEnclosingFeature: Feature<Polygon | MultiPolygon>
|
||||
feature: Readonly<Feature>,
|
||||
possiblyEnclosingFeature: Readonly<Feature<Polygon | MultiPolygon>>
|
||||
): boolean {
|
||||
if (feature.geometry.type === "MultiPolygon") {
|
||||
const polygons = feature.geometry.coordinates.map(coordinates =>
|
||||
<Feature<Polygon>>{
|
||||
type: "Feature", geometry: {
|
||||
type: "Polygon", coordinates
|
||||
}
|
||||
})
|
||||
return !polygons.some(polygon => !booleanWithin(polygon, possiblyEnclosingFeature))
|
||||
}
|
||||
return booleanWithin(feature, possiblyEnclosingFeature)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an intersection between two features.
|
||||
* One or multiple new feature is returned based on 'toSplit', which'll have a geometry that is completely withing boundary
|
||||
* One or multiple new feature are returned based on 'toSplit', which'll have a geometry that is completely withing boundary
|
||||
*/
|
||||
public static clipWith(toSplit: Feature, boundary: Feature<Polygon>): Feature[] {
|
||||
public static clipWith(toSplit: Readonly<Feature>, boundary: Readonly<Feature<Polygon>>): Feature[] {
|
||||
if (toSplit.geometry.type === "Point") {
|
||||
const p = <Feature<Point>>toSplit
|
||||
if (GeoOperations.inside(<[number, number]>p.geometry.coordinates, boundary)) {
|
||||
|
|
@ -757,9 +795,9 @@ export class GeoOperations {
|
|||
}
|
||||
|
||||
if (toSplit.geometry.type === "LineString") {
|
||||
const splitup = turf.lineSplit(<Feature<LineString>>toSplit, boundary)
|
||||
const kept = []
|
||||
for (const f of splitup.features) {
|
||||
const splitup: Feature<LineString>[] = turf.lineSplit(<Feature<LineString>>toSplit, boundary).features
|
||||
const kept: Feature[] = []
|
||||
for (const f of splitup) {
|
||||
if (!GeoOperations.inside(GeoOperations.centerpointCoordinates(f), boundary)) {
|
||||
continue
|
||||
}
|
||||
|
|
@ -787,7 +825,24 @@ export class GeoOperations {
|
|||
return kept
|
||||
}
|
||||
if (toSplit.geometry.type === "Polygon" || toSplit.geometry.type == "MultiPolygon") {
|
||||
|
||||
const splitup = turf.intersect(<Feature<Polygon>>toSplit, boundary)
|
||||
if (splitup === null) {
|
||||
// No intersection found.
|
||||
// Either: the boundary is contained fully in 'toSplit', 'toSplit' is contained fully in 'boundary' or they are unrelated at all
|
||||
if (GeoOperations.completelyWithin(toSplit, boundary)) {
|
||||
return [toSplit]
|
||||
}
|
||||
if (GeoOperations.completelyWithin(boundary, <Feature<Polygon | MultiPolygon>>toSplit)) {
|
||||
return [{
|
||||
type: "Feature",
|
||||
properties: { ...toSplit.properties },
|
||||
geometry: boundary.geometry,
|
||||
bbox: boundary.bbox
|
||||
}]
|
||||
}
|
||||
return []
|
||||
}
|
||||
splitup.properties = { ...toSplit.properties }
|
||||
return [splitup]
|
||||
}
|
||||
|
|
@ -864,32 +919,6 @@ export class GeoOperations {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs all tiles where features overlap with and puts those features in them.
|
||||
* Long features (e.g. lines or polygons) which overlap with multiple tiles are referenced in each tile they overlap with
|
||||
* @param zoomlevel
|
||||
* @param features
|
||||
*/
|
||||
public static slice(zoomlevel: number, features: Feature[]): Map<number, Feature[]> {
|
||||
const tiles = new Map<number, Feature[]>()
|
||||
|
||||
for (const feature of features) {
|
||||
const bbox = BBox.get(feature)
|
||||
Tiles.MapRange(Tiles.tileRangeFrom(bbox, zoomlevel), (x, y) => {
|
||||
const i = Tiles.tile_index(zoomlevel, x, y)
|
||||
|
||||
let tiledata = tiles.get(i)
|
||||
if (tiledata === undefined) {
|
||||
tiledata = []
|
||||
tiles.set(i, tiledata)
|
||||
}
|
||||
tiledata.push(feature)
|
||||
})
|
||||
}
|
||||
|
||||
return tiles
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a linestring object based on the outer ring of the given polygon
|
||||
*
|
||||
|
|
@ -905,8 +934,8 @@ export class GeoOperations {
|
|||
properties: p.properties,
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: p.geometry.coordinates[0],
|
||||
},
|
||||
coordinates: p.geometry.coordinates[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -934,7 +963,7 @@ export class GeoOperations {
|
|||
console.debug("SPlitting way", feature.properties.id)
|
||||
result.push({
|
||||
...feature,
|
||||
geometry: { ...feature.geometry, coordinates: coors.slice(i + 1) },
|
||||
geometry: { ...feature.geometry, coordinates: coors.slice(i + 1) }
|
||||
})
|
||||
coors = coors.slice(0, i + 1)
|
||||
break
|
||||
|
|
@ -943,7 +972,7 @@ export class GeoOperations {
|
|||
}
|
||||
result.push({
|
||||
...feature,
|
||||
geometry: { ...feature.geometry, coordinates: coors },
|
||||
geometry: { ...feature.geometry, coordinates: coors }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1117,8 +1146,8 @@ export class GeoOperations {
|
|||
properties: multiLineStringFeature.properties,
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: coors[0],
|
||||
},
|
||||
coordinates: coors[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
|
|
@ -1126,8 +1155,8 @@ export class GeoOperations {
|
|||
properties: multiLineStringFeature.properties,
|
||||
geometry: {
|
||||
type: "MultiLineString",
|
||||
coordinates: coors,
|
||||
},
|
||||
coordinates: coors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -22,29 +22,29 @@ export default class AllImageProviders {
|
|||
...WikimediaImageProvider.commonsPrefixes,
|
||||
...Mapillary.valuePrefixes,
|
||||
...AllImageProviders.dontLoadFromPrefixes,
|
||||
"Category:"
|
||||
"Category:",
|
||||
])
|
||||
|
||||
private static ImageAttributionSource: ImageProvider[] = [
|
||||
private static imageAttributionSources: ImageProvider[] = [
|
||||
Imgur.singleton,
|
||||
Mapillary.singleton,
|
||||
WikidataImageProvider.singleton,
|
||||
WikimediaImageProvider.singleton,
|
||||
Panoramax.singleton,
|
||||
AllImageProviders.genericImageProvider
|
||||
AllImageProviders.genericImageProvider,
|
||||
]
|
||||
public static apiUrls: string[] = [].concat(
|
||||
...AllImageProviders.ImageAttributionSource.map((src) => src.apiUrls())
|
||||
...AllImageProviders.imageAttributionSources.map((src) => src.apiUrls())
|
||||
)
|
||||
public static defaultKeys = [].concat(
|
||||
AllImageProviders.ImageAttributionSource.map((provider) => provider.defaultKeyPrefixes)
|
||||
AllImageProviders.imageAttributionSources.map((provider) => provider.defaultKeyPrefixes)
|
||||
)
|
||||
private static providersByName = {
|
||||
imgur: Imgur.singleton,
|
||||
mapillary: Mapillary.singleton,
|
||||
wikidata: WikidataImageProvider.singleton,
|
||||
wikimedia: WikimediaImageProvider.singleton,
|
||||
panoramax: Panoramax.singleton
|
||||
panoramax: Panoramax.singleton,
|
||||
}
|
||||
|
||||
public static byName(name: string) {
|
||||
|
|
@ -52,7 +52,7 @@ export default class AllImageProviders {
|
|||
}
|
||||
|
||||
public static async selectBestProvider(key: string, value: string): Promise<ImageProvider> {
|
||||
for (const imageProvider of AllImageProviders.ImageAttributionSource) {
|
||||
for (const imageProvider of AllImageProviders.imageAttributionSources) {
|
||||
try {
|
||||
const extracted = await Promise.all(await imageProvider.ExtractUrls(key, value))
|
||||
if (extracted?.length > 0) {
|
||||
|
|
@ -73,12 +73,21 @@ export default class AllImageProviders {
|
|||
* Will simply count all image tags
|
||||
*
|
||||
* AllImageProviders.estimateNumberOfImages({image:"abc", "mapillary": "123", "panoramax:0": "xyz"}) // => 3
|
||||
* AllImageProviders.estimateNumberOfImages({wikidata:"Q123", "wikipedia": "nl:xyz"}) // => 0
|
||||
*
|
||||
*
|
||||
*/
|
||||
public static estimateNumberOfImages(tags: Record<string, string>, prefixes: string[] = undefined): number {
|
||||
public static estimateNumberOfImages(
|
||||
tags: Record<string, string>,
|
||||
prefixes: string[] = undefined
|
||||
): number {
|
||||
let count = 0
|
||||
|
||||
const allPrefixes = Utils.Dedup(prefixes ?? [].concat(...AllImageProviders.ImageAttributionSource.map(s => s.defaultKeyPrefixes)))
|
||||
const sources = [Imgur.singleton,
|
||||
Mapillary.singleton,
|
||||
Panoramax.singleton,
|
||||
AllImageProviders.genericImageProvider]
|
||||
const allPrefixes = Utils.Dedup(prefixes ?? [].concat(...sources.map(s => s.defaultKeyPrefixes)))
|
||||
for (const prefix of allPrefixes) {
|
||||
for (const k in tags) {
|
||||
if (k === prefix || k.startsWith(prefix + ":")) {
|
||||
|
|
@ -108,7 +117,7 @@ export default class AllImageProviders {
|
|||
|
||||
const source = new UIEventSource([])
|
||||
const allSources: Store<ProvidedImage[]>[] = []
|
||||
for (const imageProvider of AllImageProviders.ImageAttributionSource) {
|
||||
for (const imageProvider of AllImageProviders.imageAttributionSources) {
|
||||
/*
|
||||
By default, 'GetRelevantUrls' uses the defaultKeyPrefixes.
|
||||
However, we override them if a custom image tag is set, e.g. 'image:menu'
|
||||
|
|
@ -131,7 +140,7 @@ export default class AllImageProviders {
|
|||
*/
|
||||
public static loadImagesFrom(urls: string[]): Store<ProvidedImage[]> {
|
||||
const tags = {
|
||||
id: urls.join(";")
|
||||
id: urls.join(";"),
|
||||
}
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
tags["image:" + i] = urls[i]
|
||||
|
|
|
|||
|
|
@ -138,7 +138,12 @@ export class WikimediaImageProvider extends ImageProvider {
|
|||
query: { pages: { title: string; imageinfo: { extmetadata }[] }[] }
|
||||
}>(url, 365 * 24 * 60 * 60)
|
||||
const licenseInfo = new LicenseInfo()
|
||||
const pageInfo = data.query.pages.at(-1)
|
||||
const pages = data.query.pages
|
||||
/*jup, a literal "-1" in an object, not a list!*/
|
||||
let pageInfo = pages["-1"]
|
||||
if (Array.isArray(pages)) {
|
||||
pageInfo = pages.at(-1)
|
||||
}
|
||||
if (pageInfo === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
|
|
|||
|
|
@ -638,13 +638,14 @@ export class OsmConnection {
|
|||
return
|
||||
}
|
||||
this.fetchCapabilities().then(({ api, gpx }) => {
|
||||
this.apiIsOnline.setData(api)
|
||||
this.gpxServiceIsOnline.setData(gpx)
|
||||
}).catch(err => {
|
||||
console.log("Could not reach the api:", err)
|
||||
this.apiIsOnline.set("unreachable")
|
||||
this.gpxServiceIsOnline.set("unreachable")
|
||||
})
|
||||
this.apiIsOnline.setData(api)
|
||||
this.gpxServiceIsOnline.setData(gpx)
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log("Could not reach the api:", err)
|
||||
this.apiIsOnline.set("unreachable")
|
||||
this.gpxServiceIsOnline.set("unreachable")
|
||||
})
|
||||
}
|
||||
|
||||
private readonly _userInfoCache: Record<number, OsmUserInfo> = {}
|
||||
|
|
|
|||
|
|
@ -210,7 +210,7 @@ export class OsmPreferences {
|
|||
* @private
|
||||
*/
|
||||
private async getPreferencesDictDirectly(): Promise<Record<string, string>> {
|
||||
if(!this.osmConnection.isLoggedIn.data){
|
||||
if (!this.osmConnection.isLoggedIn.data) {
|
||||
return {}
|
||||
}
|
||||
return new Promise<Record<string, string>>((resolve, reject) => {
|
||||
|
|
@ -260,7 +260,7 @@ export class OsmPreferences {
|
|||
*
|
||||
*/
|
||||
private async uploadKvSplit(k: string, v: string) {
|
||||
if(!this.osmConnection.isLoggedIn.data){
|
||||
if (!this.osmConnection.isLoggedIn.data) {
|
||||
return
|
||||
}
|
||||
if (v === null || v === undefined || v === "" || v === "undefined" || v === "null") {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ export default class OpenStreetMapIdSearch implements GeocodingProvider {
|
|||
}
|
||||
private readonly _osmObjectDownloader: OsmObjectDownloader
|
||||
|
||||
|
||||
constructor(osmObjectDownloader: OsmObjectDownloader) {
|
||||
this._osmObjectDownloader = osmObjectDownloader
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ export default class SearchState {
|
|||
new OpenLocationCodeSearch(),
|
||||
new OpenStreetMapIdSearch(state.osmObjectDownloader),
|
||||
new PhotonSearch(true, 2),
|
||||
new PhotonSearch()
|
||||
new PhotonSearch(),
|
||||
// new NominatimGeocoding(),
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -414,7 +414,7 @@ export default class UserRelatedState {
|
|||
typeof window === "undefined" ? "no" : window.navigator.share ? "yes" : "no",
|
||||
_iframe: Utils.isIframe ? "yes" : "no",
|
||||
})
|
||||
if(!Utils.runningFromConsole){
|
||||
if (!Utils.runningFromConsole) {
|
||||
amendedPrefs.data["_host"] = window.location.host
|
||||
amendedPrefs.data["_path"] = window.location.pathname
|
||||
amendedPrefs.data["_userAgent"] = navigator.userAgent
|
||||
|
|
@ -492,7 +492,7 @@ export default class UserRelatedState {
|
|||
})
|
||||
|
||||
const usersettingMetaTagging = new ThemeMetaTagging()
|
||||
osmConnection.isLoggedIn.addCallbackAndRun(loggedIn => {
|
||||
osmConnection.isLoggedIn.addCallbackAndRun((loggedIn) => {
|
||||
amendedPrefs.data["_loggedIn"] = "" + loggedIn
|
||||
amendedPrefs.ping()
|
||||
})
|
||||
|
|
|
|||
33
src/Logic/Web/CommunityIndex.ts
Normal file
33
src/Logic/Web/CommunityIndex.ts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* Various tools and types to work with the community index (https://openstreetmap.community/; https://github.com/osmlab/osm-community-index)
|
||||
*/
|
||||
|
||||
|
||||
export interface CommunityResource {
|
||||
/**
|
||||
* A unique identifier for the resource
|
||||
* "pattern": "^[-_.A-Za-z0-9]+$"
|
||||
*/
|
||||
id: string,
|
||||
/**
|
||||
* Type of community resource (thus: platform)
|
||||
*/
|
||||
type: string,
|
||||
/**
|
||||
* included and excluded locations for this item
|
||||
* See location-conflation documentation for compatible values: https://github.com/rapideditor/location-conflation#readme
|
||||
*/
|
||||
locationSet?,
|
||||
|
||||
/** Array of ISO-639-1 (2 letter) or ISO-639-3 (3 letter) codes in lowercase
|
||||
* */
|
||||
languageCodes?: string[]
|
||||
/**
|
||||
* Resource account string, required for some resource types
|
||||
*/
|
||||
account?: string
|
||||
|
||||
resolved?: { url: string, name: string, description: string } & Record<string, string>
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ export interface NSIItem {
|
|||
}
|
||||
readonly tags: Readonly<Record<string, string>>
|
||||
fromTemplate?: boolean
|
||||
ext? : string
|
||||
ext?: string
|
||||
}
|
||||
|
||||
export default class NameSuggestionIndex {
|
||||
|
|
@ -214,9 +214,7 @@ export default class NameSuggestionIndex {
|
|||
for (const nsiItem of actualBrands) {
|
||||
const tags = nsiItem.tags
|
||||
const frequency = frequencies[nsiItem.displayName]
|
||||
const iconUrl = this.getIconExternalUrl(nsiItem, type)
|
||||
const hasIcon = iconUrl !== undefined
|
||||
const icon = hasIcon ? this.getIconUrl(nsiItem, type) : undefined
|
||||
const icon = this.getIconUrl(nsiItem)
|
||||
mappings.push({
|
||||
if: new Tag(type, tags[type]),
|
||||
addExtraTags: Object.keys(tags)
|
||||
|
|
@ -274,7 +272,7 @@ export default class NameSuggestionIndex {
|
|||
const values = tags[osmKey]
|
||||
for (const osmValue of values) {
|
||||
const suggestions = this.getSuggestionsForKV(type, osmKey, osmValue)
|
||||
if(!suggestions){
|
||||
if (!suggestions) {
|
||||
console.warn("No suggestions found for", type, osmKey, osmValue)
|
||||
continue
|
||||
}
|
||||
|
|
@ -399,9 +397,14 @@ export default class NameSuggestionIndex {
|
|||
return logos?.facebook ?? logos?.wikidata
|
||||
}
|
||||
|
||||
public getIconUrl(nsiItem: NSIItem, type: string) {
|
||||
public getIconUrl(nsiItem: NSIItem): string | undefined {
|
||||
if (!nsiItem.ext) {
|
||||
// No extension -> there is no logo
|
||||
return undefined
|
||||
}
|
||||
return "./assets/data/nsi/logos/" + nsiItem.id + "." + nsiItem.ext
|
||||
}
|
||||
|
||||
private static readonly brandPrefix = ["name", "alt_name", "operator", "brand"] as const
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ import { Store, UIEventSource } from "../UIEventSource"
|
|||
|
||||
export default class ThemeViewStateHashActor {
|
||||
private readonly _state: {
|
||||
indexedFeatures: IndexedFeatureSource,
|
||||
selectedElement: UIEventSource<Feature>,
|
||||
indexedFeatures: IndexedFeatureSource
|
||||
selectedElement: UIEventSource<Feature>
|
||||
guistate: MenuState
|
||||
}
|
||||
private isUpdatingHash = false
|
||||
|
|
@ -23,7 +23,7 @@ export default class ThemeViewStateHashActor {
|
|||
"",
|
||||
"The possible hashes are:",
|
||||
"",
|
||||
MenuState.pageNames.map((tab) => "`" + tab + "`").join(",")
|
||||
MenuState.pageNames.map((tab) => "`" + tab + "`").join(","),
|
||||
]
|
||||
|
||||
/**
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue