forked from MapComplete/MapComplete
Merge master
This commit is contained in:
commit
1d87151228
185 changed files with 4687 additions and 2421 deletions
|
@ -49,7 +49,7 @@ export default class OverpassFeatureSource implements UpdatableFeatureSource {
|
|||
},
|
||||
options?: {
|
||||
padToTiles?: Store<number>
|
||||
isActive?: Store<boolean>,
|
||||
isActive?: Store<boolean>
|
||||
ignoreZoom?: boolean
|
||||
}
|
||||
) {
|
||||
|
@ -57,7 +57,9 @@ export default class OverpassFeatureSource implements UpdatableFeatureSource {
|
|||
this._isActive = options?.isActive ?? new ImmutableStore(true)
|
||||
this.padToZoomLevel = options?.padToTiles
|
||||
const self = this
|
||||
this._layersToDownload = options?.ignoreZoom? new ImmutableStore(state.layers) : state.zoom.map((zoom) => this.layersToDownload(zoom))
|
||||
this._layersToDownload = options?.ignoreZoom
|
||||
? new ImmutableStore(state.layers)
|
||||
: state.zoom.map((zoom) => this.layersToDownload(zoom))
|
||||
|
||||
state.bounds.mapD(
|
||||
(_) => {
|
||||
|
@ -123,9 +125,11 @@ export default class OverpassFeatureSource implements UpdatableFeatureSource {
|
|||
let bounds: BBox
|
||||
do {
|
||||
try {
|
||||
bounds = overrideBounds ?? this.state.bounds.data
|
||||
?.pad(this.state.widenFactor)
|
||||
?.expandToTileBounds(this.padToZoomLevel?.data)
|
||||
bounds =
|
||||
overrideBounds ??
|
||||
this.state.bounds.data
|
||||
?.pad(this.state.widenFactor)
|
||||
?.expandToTileBounds(this.padToZoomLevel?.data)
|
||||
if (!bounds) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
backend: string,
|
||||
isDisplayed: (id: string) => Store<boolean>,
|
||||
mvtAvailableLayers: Set<string>,
|
||||
fullNodeDatabaseSource?: FullNodeDatabaseSource,
|
||||
fullNodeDatabaseSource?: FullNodeDatabaseSource
|
||||
) {
|
||||
const supportsForceDownload: UpdatableFeatureSource[] = []
|
||||
|
||||
|
@ -63,7 +63,7 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
{
|
||||
isActive: isDisplayed(layer.id),
|
||||
maxAge: layer.maxAgeOfCache,
|
||||
},
|
||||
}
|
||||
)
|
||||
fromCache.set(layer.id, src)
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
zoom,
|
||||
backend,
|
||||
featureSwitches,
|
||||
fullNodeDatabaseSource,
|
||||
fullNodeDatabaseSource
|
||||
)
|
||||
nonMvtSources.push(osmApiSource)
|
||||
|
||||
|
@ -91,14 +91,13 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
console.log(
|
||||
"Layers ",
|
||||
nonMvtLayers.map((l) => l.id),
|
||||
" cannot be fetched from the cache server, defaulting to overpass/OSM-api",
|
||||
" cannot be fetched from the cache server, defaulting to overpass/OSM-api"
|
||||
)
|
||||
overpassSource = ThemeSource.setupOverpass(osmLayers, bounds, zoom, featureSwitches)
|
||||
nonMvtSources.push(overpassSource)
|
||||
supportsForceDownload.push(overpassSource)
|
||||
}
|
||||
|
||||
|
||||
function setIsLoading() {
|
||||
const loading = overpassSource?.runningQuery?.data || osmApiSource?.isRunning?.data
|
||||
isLoading.setData(loading)
|
||||
|
@ -108,40 +107,47 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
osmApiSource?.isRunning?.addCallbackAndRun(() => setIsLoading())
|
||||
|
||||
const geojsonSources: UpdatableFeatureSource[] = geojsonlayers.map((l) =>
|
||||
ThemeSource.setupGeojsonSource(l, mapProperties, isDisplayed(l.id)),
|
||||
ThemeSource.setupGeojsonSource(l, mapProperties, isDisplayed(l.id))
|
||||
)
|
||||
|
||||
const downloadAllBounds: UIEventSource<BBox> = new UIEventSource<BBox>(undefined)
|
||||
const downloadAll= new OverpassFeatureSource({
|
||||
layers: layers.filter(l => l.isNormal()),
|
||||
bounds: mapProperties.bounds,
|
||||
zoom: mapProperties.zoom,
|
||||
overpassUrl: featureSwitches.overpassUrl,
|
||||
overpassTimeout: featureSwitches.overpassTimeout,
|
||||
overpassMaxZoom: new ImmutableStore(99),
|
||||
widenFactor: 0,
|
||||
},{
|
||||
ignoreZoom: true
|
||||
})
|
||||
const downloadAll = new OverpassFeatureSource(
|
||||
{
|
||||
layers: layers.filter((l) => l.isNormal()),
|
||||
bounds: mapProperties.bounds,
|
||||
zoom: mapProperties.zoom,
|
||||
overpassUrl: featureSwitches.overpassUrl,
|
||||
overpassTimeout: featureSwitches.overpassTimeout,
|
||||
overpassMaxZoom: new ImmutableStore(99),
|
||||
widenFactor: 0,
|
||||
},
|
||||
{
|
||||
ignoreZoom: true,
|
||||
}
|
||||
)
|
||||
|
||||
super(...geojsonSources, ...Array.from(fromCache.values()), ...mvtSources, ...nonMvtSources, downloadAll)
|
||||
super(
|
||||
...geojsonSources,
|
||||
...Array.from(fromCache.values()),
|
||||
...mvtSources,
|
||||
...nonMvtSources,
|
||||
downloadAll
|
||||
)
|
||||
|
||||
this.isLoading = isLoading
|
||||
supportsForceDownload.push(...geojsonSources)
|
||||
supportsForceDownload.push(...mvtSources) // Non-mvt sources are handled by overpass
|
||||
|
||||
|
||||
this._mapBounds = mapProperties.bounds
|
||||
this._downloadAll = downloadAll
|
||||
|
||||
this.supportsForceDownload = supportsForceDownload
|
||||
|
||||
}
|
||||
|
||||
private static setupMvtSource(
|
||||
layer: LayerConfig,
|
||||
mapProperties: { zoom: Store<number>; bounds: Store<BBox> },
|
||||
isActive?: Store<boolean>,
|
||||
isActive?: Store<boolean>
|
||||
): UpdatableFeatureSource {
|
||||
return new DynamicMvtileSource(layer, mapProperties, { isActive })
|
||||
}
|
||||
|
@ -149,12 +155,12 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
private static setupGeojsonSource(
|
||||
layer: LayerConfig,
|
||||
mapProperties: { zoom: Store<number>; bounds: Store<BBox> },
|
||||
isActiveByFilter?: Store<boolean>,
|
||||
isActiveByFilter?: Store<boolean>
|
||||
): UpdatableFeatureSource {
|
||||
const source = layer.source
|
||||
const isActive = mapProperties.zoom.map(
|
||||
(z) => (isActiveByFilter?.data ?? true) && z >= layer.minzoom,
|
||||
[isActiveByFilter],
|
||||
[isActiveByFilter]
|
||||
)
|
||||
if (source.geojsonZoomLevel === undefined) {
|
||||
// This is a 'load everything at once' geojson layer
|
||||
|
@ -170,7 +176,7 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
zoom: Store<number>,
|
||||
backend: string,
|
||||
featureSwitches: FeatureSwitchState,
|
||||
fullNodeDatabase: FullNodeDatabaseSource,
|
||||
fullNodeDatabase: FullNodeDatabaseSource
|
||||
): OsmFeatureSource | undefined {
|
||||
if (osmLayers.length == 0) {
|
||||
return undefined
|
||||
|
@ -204,7 +210,7 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
osmLayers: LayerConfig[],
|
||||
bounds: Store<BBox>,
|
||||
zoom: Store<number>,
|
||||
featureSwitches: FeatureSwitchState,
|
||||
featureSwitches: FeatureSwitchState
|
||||
): OverpassFeatureSource | undefined {
|
||||
if (osmLayers.length == 0) {
|
||||
return undefined
|
||||
|
@ -233,14 +239,14 @@ export default class ThemeSource extends FeatureSourceMerger {
|
|||
{
|
||||
padToTiles: zoom.map((zoom) => Math.min(15, zoom + 1)),
|
||||
isActive,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
public async downloadAll() {
|
||||
console.log("Downloading all data:")
|
||||
await this._downloadAll.updateAsync(this._mapBounds.data)
|
||||
// await Promise.all(this.supportsForceDownload.map((i) => i.updateAsync()))
|
||||
// await Promise.all(this.supportsForceDownload.map((i) => i.updateAsync()))
|
||||
console.log("Done")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ export default class AllImageProviders {
|
|||
|
||||
private static readonly _cachedImageStores: Record<string, Store<ProvidedImage[]>> = {}
|
||||
/**
|
||||
* Tries to extract all image data for this image. Cachedon tags?.data?.id
|
||||
* Tries to extract all image data for this image. Cached on tags?.data?.id
|
||||
*/
|
||||
public static LoadImagesFor(
|
||||
tags: Store<Record<string, string>>,
|
||||
|
@ -78,8 +78,9 @@ export default class AllImageProviders {
|
|||
return undefined
|
||||
}
|
||||
const id = tags?.data?.id
|
||||
if (this._cachedImageStores[id]) {
|
||||
return this._cachedImageStores[id]
|
||||
const cachekey = id + (tagKey?.join(";") ?? "")
|
||||
if (this._cachedImageStores[cachekey]) {
|
||||
return this._cachedImageStores[cachekey]
|
||||
}
|
||||
|
||||
const source = new UIEventSource([])
|
||||
|
@ -90,6 +91,7 @@ export default class AllImageProviders {
|
|||
However, we override them if a custom image tag is set, e.g. 'image:menu'
|
||||
*/
|
||||
const prefixes = tagKey ?? imageProvider.defaultKeyPrefixes
|
||||
console.log("Prefixes are", tagKey, prefixes)
|
||||
const singleSource = tags.bindD((tags) => imageProvider.getRelevantUrls(tags, prefixes))
|
||||
allSources.push(singleSource)
|
||||
singleSource.addCallbackAndRunD((_) => {
|
||||
|
@ -98,21 +100,19 @@ export default class AllImageProviders {
|
|||
source.set(dedup)
|
||||
})
|
||||
}
|
||||
this._cachedImageStores[id] = source
|
||||
this._cachedImageStores[cachekey] = source
|
||||
return source
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of URLs, tries to detect the images. Used in e.g. the comments
|
||||
* @param url
|
||||
*/
|
||||
public static loadImagesFrom(urls: string[]): Store<ProvidedImage[]> {
|
||||
const tags = {
|
||||
id: "na",
|
||||
id: urls.join(";"),
|
||||
}
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
const url = urls[i]
|
||||
tags["image:" + i] = url
|
||||
tags["image:" + i] = urls[i]
|
||||
}
|
||||
return this.LoadImagesFor(new ImmutableStore(tags))
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ export class Imgur extends ImageProvider {
|
|||
public readonly defaultKeyPrefixes: string[] = ["image"]
|
||||
public static readonly apiUrl = "https://api.imgur.com/3/image"
|
||||
public static readonly supportingUrls = ["https://i.imgur.com"]
|
||||
|
||||
private constructor() {
|
||||
super()
|
||||
}
|
||||
|
@ -37,6 +38,37 @@ export class Imgur extends ImageProvider {
|
|||
return undefined
|
||||
}
|
||||
|
||||
public static parseLicense(descr: string) {
|
||||
const data: Record<string, string> = {}
|
||||
|
||||
if (!descr) {
|
||||
return undefined
|
||||
}
|
||||
if (descr.toLowerCase() === "cc0") {
|
||||
data.author = "Unknown"
|
||||
data.license = "CC0"
|
||||
} else {
|
||||
for (const tag of descr.split("\n")) {
|
||||
const kv = tag.split(":")
|
||||
if (kv.length < 2) {
|
||||
continue
|
||||
}
|
||||
const k = kv[0]
|
||||
data[k] = kv[1]?.replace(/\r/g, "")
|
||||
}
|
||||
}
|
||||
if (Object.keys(data).length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const licenseInfo = new LicenseInfo()
|
||||
|
||||
licenseInfo.licenseShortName = data.license
|
||||
licenseInfo.artist = data.author
|
||||
|
||||
return licenseInfo
|
||||
}
|
||||
|
||||
/**
|
||||
* Download the attribution and license info for the picture at the given URL
|
||||
*
|
||||
|
@ -56,9 +88,14 @@ export class Imgur extends ImageProvider {
|
|||
*
|
||||
*
|
||||
*/
|
||||
public async DownloadAttribution(providedImage: { url: string }): Promise<LicenseInfo> {
|
||||
public async DownloadAttribution(
|
||||
providedImage: {
|
||||
url: string
|
||||
},
|
||||
withResponse?: (obj) => void
|
||||
): Promise<LicenseInfo> {
|
||||
const url = providedImage.url
|
||||
const hash = url.substr("https://i.imgur.com/".length).split(/\.jpe?g/i)[0]
|
||||
const hash = url.substr("https://i.imgur.com/".length).split(/(\.jpe?g)|(\.png)/i)[0]
|
||||
|
||||
const apiUrl = "https://api.imgur.com/3/image/" + hash
|
||||
const response = await Utils.downloadJsonCached<{
|
||||
|
@ -66,24 +103,17 @@ export class Imgur extends ImageProvider {
|
|||
}>(apiUrl, 365 * 24 * 60 * 60, {
|
||||
Authorization: "Client-ID " + Constants.ImgurApiKey,
|
||||
})
|
||||
|
||||
const descr = response.data.description ?? ""
|
||||
const data: any = {}
|
||||
const imgurData = response.data
|
||||
|
||||
for (const tag of descr.split("\n")) {
|
||||
const kv = tag.split(":")
|
||||
const k = kv[0]
|
||||
data[k] = kv[1]?.replace(/\r/g, "")
|
||||
if (withResponse) {
|
||||
withResponse(response)
|
||||
}
|
||||
|
||||
const licenseInfo = new LicenseInfo()
|
||||
const imgurData = response.data
|
||||
const license = Imgur.parseLicense(imgurData.description ?? "")
|
||||
if (license) {
|
||||
license.views = imgurData.views
|
||||
license.date = new Date(Number(imgurData.datetime) * 1000)
|
||||
}
|
||||
|
||||
licenseInfo.licenseShortName = data.license
|
||||
licenseInfo.artist = data.author
|
||||
licenseInfo.date = new Date(Number(imgurData.datetime) * 1000)
|
||||
licenseInfo.views = imgurData.views
|
||||
|
||||
return licenseInfo
|
||||
return license
|
||||
}
|
||||
}
|
||||
|
|
|
@ -145,12 +145,7 @@ export default class PanoramaxImageProvider extends ImageProvider {
|
|||
)
|
||||
}
|
||||
|
||||
Stores.Chronic(1500, () => hasLoading(source.data)).addCallback((_) => {
|
||||
console.log(
|
||||
"Testing panoramax URLS again as some were loading",
|
||||
source.data,
|
||||
hasLoading(source.data)
|
||||
)
|
||||
Stores.Chronic(1500, () => hasLoading(source.data)).addCallback(() => {
|
||||
super.getRelevantUrlsFor(tags, prefixes).then((data) => {
|
||||
source.set(data)
|
||||
return !hasLoading(data)
|
||||
|
@ -204,7 +199,8 @@ export class PanoramaxUploader implements ImageUploader {
|
|||
currentGps: [number, number],
|
||||
author: string,
|
||||
noblur: boolean = false,
|
||||
sequenceId?: string
|
||||
sequenceId?: string,
|
||||
datetime?: string
|
||||
): Promise<{
|
||||
key: string
|
||||
value: string
|
||||
|
@ -213,7 +209,7 @@ export class PanoramaxUploader implements ImageUploader {
|
|||
// https://panoramax.openstreetmap.fr/api/docs/swagger#/
|
||||
|
||||
let [lon, lat] = currentGps ?? [undefined, undefined]
|
||||
let datetime = new Date().toISOString()
|
||||
datetime ??= new Date().toISOString()
|
||||
try {
|
||||
const tags = await ExifReader.load(blob)
|
||||
const [[latD], [latM], [latS, latSDenom]] = <
|
||||
|
|
|
@ -874,7 +874,7 @@ export class TagUtils {
|
|||
tag +
|
||||
". To indicate a missing tag, use '" +
|
||||
split[0] +
|
||||
"!=' instead"
|
||||
"=' instead"
|
||||
)
|
||||
}
|
||||
if (split[1] === "") {
|
||||
|
|
|
@ -371,7 +371,7 @@ export default class LinkedDataLoader {
|
|||
const match = maxstay.match(/P([0-9]+)D/)
|
||||
if (match) {
|
||||
const days = Number(match[1])
|
||||
if(days === 30){
|
||||
if (days === 30) {
|
||||
// 30 is the default which is set if velopark didn't know the actual value
|
||||
return undefined
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ export default class VeloparkLoader {
|
|||
|
||||
private static readonly coder = new CountryCoder(
|
||||
Constants.countryCoderEndpoint,
|
||||
Utils.downloadJson,
|
||||
Utils.downloadJson
|
||||
)
|
||||
|
||||
public static convert(veloparkData: VeloparkData): Feature {
|
||||
|
@ -46,14 +46,14 @@ export default class VeloparkLoader {
|
|||
|
||||
if (veloparkData.contactPoint?.email) {
|
||||
properties["operator:email"] = VeloparkLoader.emailReformatting.reformat(
|
||||
veloparkData.contactPoint?.email,
|
||||
veloparkData.contactPoint?.email
|
||||
)
|
||||
}
|
||||
|
||||
if (veloparkData.contactPoint?.telephone) {
|
||||
properties["operator:phone"] = VeloparkLoader.phoneValidator.reformat(
|
||||
veloparkData.contactPoint?.telephone,
|
||||
() => "be",
|
||||
() => "be"
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ export default class VeloparkLoader {
|
|||
) {
|
||||
const duration = g.maximumParkingDuration.substring(
|
||||
1,
|
||||
g.maximumParkingDuration.length - 1,
|
||||
g.maximumParkingDuration.length - 1
|
||||
)
|
||||
if (duration !== "30") {
|
||||
// We don't set maxstay if it is 30, they are the default value that velopark chose for "unknown"
|
||||
|
@ -97,11 +97,11 @@ export default class VeloparkLoader {
|
|||
const startHour = spec.opens
|
||||
const endHour = spec.closes === "23:59" ? "24:00" : spec.closes
|
||||
const merged = OH.MergeTimes(
|
||||
OH.ParseRule(dayOfWeek + " " + startHour + "-" + endHour),
|
||||
OH.ParseRule(dayOfWeek + " " + startHour + "-" + endHour)
|
||||
)
|
||||
return OH.ToString(merged)
|
||||
})
|
||||
.join("; "),
|
||||
.join("; ")
|
||||
)
|
||||
properties.opening_hours = oh
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue