chore: automated housekeeping...

This commit is contained in:
Pieter Vander Vennet 2024-10-19 14:44:55 +02:00
parent c9ce29f206
commit 40e894df8b
294 changed files with 14209 additions and 4192 deletions

View file

@ -5,7 +5,6 @@ import { Utils } from "../../Utils"
* UIEventsource-wrapper around localStorage
*/
export class LocalStorageSource {
private static readonly _cache: Record<string, UIEventSource<string>> = {}
static getParsed<T>(key: string, defaultValue: T): UIEventSource<T> {
@ -21,7 +20,7 @@ export class LocalStorageSource {
}
},
[],
(value) => JSON.stringify(value),
(value) => JSON.stringify(value)
)
}
@ -32,7 +31,6 @@ export class LocalStorageSource {
}
let saved = defaultValue
if (!Utils.runningFromConsole) {
try {
saved = localStorage.getItem(key)
if (saved === "undefined") {

View file

@ -22,7 +22,7 @@ export class MangroveIdentity {
this.mangroveIdentity = mangroveIdentity
this._mangroveIdentityCreationDate = mangroveIdentityCreationDate
mangroveIdentity.addCallbackAndRunD(async (data) => {
if(data === ""){
if (data === "") {
return
}
await this.setKeypair(data)

View file

@ -297,15 +297,13 @@ export default class NameSuggestionIndex {
return true
}
if (
i.locationSet.include.some((c) => countries.indexOf(c) >= 0)
) {
if (i.locationSet.include.some((c) => countries.indexOf(c) >= 0)) {
// We prefer the countries provided by lonlat2country, they are more precise and are loaded already anyway (cheap)
// Country might contain multiple countries, separated by ';'
return true
}
if (i.locationSet.exclude?.some(c => countries.indexOf(c) >= 0)) {
if (i.locationSet.exclude?.some((c) => countries.indexOf(c) >= 0)) {
return false
}
@ -313,18 +311,20 @@ export default class NameSuggestionIndex {
return true
}
const hasSpecial = i.locationSet.include?.some(i => i.endsWith(".geojson") || Array.isArray(i)) || i.locationSet.exclude?.some(i => i.endsWith(".geojson") || Array.isArray(i))
const hasSpecial =
i.locationSet.include?.some((i) => i.endsWith(".geojson") || Array.isArray(i)) ||
i.locationSet.exclude?.some((i) => i.endsWith(".geojson") || Array.isArray(i))
if (!hasSpecial) {
return false
}
const key = i.locationSet.include?.join(";") + "-" + i.locationSet.exclude?.join(";")
const fromCache = NameSuggestionIndex.resolvedSets[key]
const resolvedSet = fromCache ?? NameSuggestionIndex.loco.resolveLocationSet(i.locationSet)
const resolvedSet =
fromCache ?? NameSuggestionIndex.loco.resolveLocationSet(i.locationSet)
if (!fromCache) {
NameSuggestionIndex.resolvedSets[key] = resolvedSet
}
if (resolvedSet) {
// We actually have a location set, so we can check if the feature is in it, by determining if our point is inside the MultiPolygon using @turf/boolean-point-in-polygon
// This might occur for some extra boundaries, such as counties, ...

View file

@ -58,7 +58,7 @@ export interface P4CPicture {
author?
license?
detailsUrl?: string
direction?: number,
direction?: number
osmTags?: object /*To copy straight into OSM!*/
thumbUrl: string
details: {
@ -103,7 +103,7 @@ class P4CImageFetcher implements ImageFetcher {
{
mindate: new Date().getTime() - maxAgeSeconds,
towardscenter: false,
},
}
)
} catch (e) {
console.log("P4C image fetcher failed with", e)
@ -172,16 +172,13 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
constructor(url?: string, radius: number = 100) {
this._radius = radius
if (url) {
this._panoramax = new Panoramax(url)
} else {
this._panoramax = new PanoramaxXYZ()
}
}
public async fetchImages(lat: number, lon: number): Promise<P4CPicture[]> {
const bboxObj = new BBox([
GeoOperations.destination([lon, lat], this._radius * Math.sqrt(2), -45),
GeoOperations.destination([lon, lat], this._radius * Math.sqrt(2), 135),
@ -189,16 +186,16 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
const bbox: [number, number, number, number] = bboxObj.toLngLatFlat()
const images = await this._panoramax.search({ bbox, limit: 1000 })
return images.map(i => {
return images.map((i) => {
const [lng, lat] = i.geometry.coordinates
return ({
return {
pictureUrl: i.assets.sd.href,
coordinates: { lng, lat },
provider: "panoramax",
direction: i.properties["view:azimuth"],
osmTags: {
"panoramax": i.id,
panoramax: i.id,
},
thumbUrl: i.assets.thumb.href,
date: new Date(i.properties.datetime).getTime(),
@ -206,9 +203,10 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
author: i.providers.at(-1).name,
detailsUrl: i.id,
details: {
isSpherical: i.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular",
isSpherical:
i.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular",
},
})
}
})
}
}
@ -236,7 +234,7 @@ class ImagesFromCacheServerFetcher implements ImageFetcher {
async fetchImagesForType(
targetlat: number,
targetlon: number,
type: "lines" | "pois" | "polygons",
type: "lines" | "pois" | "polygons"
): Promise<P4CPicture[]> {
const { x, y, z } = Tiles.embedded_tile(targetlat, targetlon, 14)
@ -253,7 +251,7 @@ class ImagesFromCacheServerFetcher implements ImageFetcher {
}),
x,
y,
z,
z
)
await src.updateAsync()
return src.features.data
@ -427,7 +425,7 @@ export class CombinedFetcher {
lat: number,
lon: number,
state: UIEventSource<Record<string, "loading" | "done" | "error">>,
sink: UIEventSource<P4CPicture[]>,
sink: UIEventSource<P4CPicture[]>
): Promise<void> {
try {
const pics = await source.fetchImages(lat, lon)
@ -460,7 +458,7 @@ export class CombinedFetcher {
public getImagesAround(
lon: number,
lat: number,
lat: number
): {
images: Store<P4CPicture[]>
state: Store<Record<string, "loading" | "done" | "error">>

View file

@ -1,6 +1,6 @@
import { Utils } from "../../Utils"
import { Store, UIEventSource } from "../UIEventSource"
import { WBK} from "wikibase-sdk"
import { WBK } from "wikibase-sdk"
export class WikidataResponse {
public readonly id: string
@ -128,10 +128,9 @@ interface SparqlResult {
* Utility functions around wikidata
*/
export default class Wikidata {
public static wds = WBK({
instance: "https://wikidata.org",
sparqlEndpoint: "https://query.wikidata.org/bigdata/namespace/wdq/sparql"
sparqlEndpoint: "https://query.wikidata.org/bigdata/namespace/wdq/sparql",
})
public static readonly neededUrls = [
@ -211,7 +210,7 @@ export default class Wikidata {
${instanceOf}
${minusPhrases.join("\n ")}
} ORDER BY ASC(?num) LIMIT ${options?.maxCount ?? 20}`
const url = Wikidata. wds.sparqlQuery(sparql)
const url = Wikidata.wds.sparqlQuery(sparql)
const result = await Utils.downloadJson<SparqlResult>(url)
/*The full uri of the wikidata-item*/
@ -252,7 +251,7 @@ export default class Wikidata {
lang +
"&type=item&origin=*" +
"&props=" // props= removes some unused values in the result
const response = await Utils.downloadJsonCached<{search: any[]}>(url, 10000)
const response = await Utils.downloadJsonCached<{ search: any[] }>(url, 10000)
const result = response.search
@ -401,7 +400,7 @@ export default class Wikidata {
"}"
const url = Wikidata.wds.sparqlQuery(query)
const result = await Utils.downloadJsonCached<SparqlResult>(url, 24 * 60 * 60 * 1000)
return <any> result.results.bindings
return <any>result.results.bindings
}
private static _cache = new Map<string, Promise<WikidataResponse>>()