forked from MapComplete/MapComplete
Fix: add comment on duplicate nearby images, make sure they are always deduplicated
This commit is contained in:
parent
25880ed7d3
commit
a3aba991c5
1 changed files with 30 additions and 33 deletions
|
@ -101,7 +101,7 @@ class P4CImageFetcher implements ImageFetcher {
|
|||
searchRadius,
|
||||
{
|
||||
mindate: new Date().getTime() - maxAgeSeconds,
|
||||
towardscenter: false,
|
||||
towardscenter: false
|
||||
}
|
||||
)
|
||||
} catch (e) {
|
||||
|
@ -152,9 +152,9 @@ class ImagesInLoadedDataFetcher implements ImageFetcher {
|
|||
coordinates: { lng: centerpoint[0], lat: centerpoint[1] },
|
||||
provider: "OpenStreetMap",
|
||||
details: {
|
||||
isSpherical: false,
|
||||
isSpherical: false
|
||||
},
|
||||
osmTags: { image },
|
||||
osmTags: { image }
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -170,7 +170,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
public static readonly apiUrls: ReadonlyArray<string> = [
|
||||
"https://panoramax.openstreetmap.fr",
|
||||
"https://api.panoramax.xyz",
|
||||
"https://panoramax.mapcomplete.org",
|
||||
"https://panoramax.mapcomplete.org"
|
||||
]
|
||||
|
||||
constructor(url?: string, radius: number = 50) {
|
||||
|
@ -191,7 +191,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
provider: "panoramax",
|
||||
direction: imageData.properties["view:azimuth"],
|
||||
osmTags: {
|
||||
panoramax: imageData.id,
|
||||
panoramax: imageData.id
|
||||
},
|
||||
thumbUrl: imageData.assets.thumb.href,
|
||||
date: new Date(imageData.properties.datetime).getTime(),
|
||||
|
@ -200,8 +200,8 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
detailsUrl: imageData.id,
|
||||
details: {
|
||||
isSpherical:
|
||||
imageData.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular",
|
||||
},
|
||||
imageData.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,16 +209,16 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
const radiusSettings = [
|
||||
{
|
||||
place_fov_tolerance: 180,
|
||||
radius: 15,
|
||||
radius: 15
|
||||
},
|
||||
{
|
||||
place_fov_tolerance: 180,
|
||||
radius: 25,
|
||||
radius: 25
|
||||
},
|
||||
{
|
||||
place_fov_tolerance: 90,
|
||||
radius: 50,
|
||||
},
|
||||
radius: 50
|
||||
}
|
||||
]
|
||||
const promises: Promise<ImageData[]>[] = []
|
||||
const maxRadius = this._radius
|
||||
|
@ -233,7 +233,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
place: [lon, lat],
|
||||
place_distance: [prevRadius, Math.min(maxRadius, radiusSetting.radius)],
|
||||
place_fov_tolerance: radiusSetting.place_fov_tolerance,
|
||||
limit: 50,
|
||||
limit: 50
|
||||
})
|
||||
promises.push(promise)
|
||||
prevRadius = radiusSetting.radius
|
||||
|
@ -243,6 +243,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
|||
}
|
||||
const images = await Promise.all(promises)
|
||||
|
||||
// This might give duplicates, but worry not: they are deduped by the 'combinedImageFetcher'
|
||||
return [].concat(...images).map((i) => ImagesFromPanoramaxFetcher.convert(i))
|
||||
}
|
||||
}
|
||||
|
@ -276,7 +277,7 @@ class MapillaryFetcher implements ImageFetcher {
|
|||
boundingBox.getWest(),
|
||||
boundingBox.getSouth(),
|
||||
boundingBox.getEast(),
|
||||
boundingBox.getNorth(),
|
||||
boundingBox.getNorth()
|
||||
].join(",") +
|
||||
"&access_token=" +
|
||||
encodeURIComponent(Constants.mapillary_client_token_v4) +
|
||||
|
@ -322,10 +323,10 @@ class MapillaryFetcher implements ImageFetcher {
|
|||
coordinates: { lng: c[0], lat: c[1] },
|
||||
thumbUrl: img.thumb_256_url,
|
||||
osmTags: {
|
||||
mapillary: img.id,
|
||||
mapillary: img.id
|
||||
},
|
||||
details: {
|
||||
isSpherical: this._panoramas === "only",
|
||||
isSpherical: this._panoramas === "only"
|
||||
},
|
||||
|
||||
detailsUrl: Mapillary.singleton.visitUrl(img, { lon, lat }),
|
||||
|
@ -348,7 +349,7 @@ export class CombinedFetcher {
|
|||
Imgur.apiUrl,
|
||||
...Imgur.supportingUrls,
|
||||
...MapillaryFetcher.apiUrls,
|
||||
...ImagesFromPanoramaxFetcher.apiUrls,
|
||||
...ImagesFromPanoramaxFetcher.apiUrls
|
||||
]
|
||||
|
||||
constructor(radius: number, maxage: Date, indexedFeatures: IndexedFeatureSource) {
|
||||
|
@ -360,15 +361,15 @@ export class CombinedFetcher {
|
|||
new MapillaryFetcher({
|
||||
max_images: 25,
|
||||
start_captured_at: maxage,
|
||||
panoramas: "only",
|
||||
panoramas: "only"
|
||||
}),
|
||||
new MapillaryFetcher({
|
||||
max_images: 25,
|
||||
start_captured_at: maxage,
|
||||
panoramas: "no",
|
||||
panoramas: "no"
|
||||
}),
|
||||
new P4CImageFetcher("mapillary"),
|
||||
new P4CImageFetcher("wikicommons"),
|
||||
new P4CImageFetcher("wikicommons")
|
||||
].map((f) => new CachedFetcher(f))
|
||||
}
|
||||
|
||||
|
@ -384,22 +385,18 @@ export class CombinedFetcher {
|
|||
state.data[source.name] = "done"
|
||||
state.ping()
|
||||
|
||||
if (sink.data === undefined) {
|
||||
sink.setData(pics)
|
||||
} else {
|
||||
const newList = []
|
||||
const seenIds = new Set<string>()
|
||||
for (const p4CPicture of [...sink.data, ...pics]) {
|
||||
const id = p4CPicture.pictureUrl
|
||||
if (seenIds.has(id)) {
|
||||
continue
|
||||
}
|
||||
newList.push(p4CPicture)
|
||||
seenIds.add(id)
|
||||
const newList = []
|
||||
const seenIds = new Set<string>()
|
||||
for (const p4CPicture of [...sink.data ?? [], ...pics]) {
|
||||
const id = p4CPicture.pictureUrl
|
||||
if (seenIds.has(id)) {
|
||||
continue
|
||||
}
|
||||
NearbyImageUtils.sortByDistance(newList, lon, lat)
|
||||
sink.setData(newList)
|
||||
newList.push(p4CPicture)
|
||||
seenIds.add(id)
|
||||
}
|
||||
NearbyImageUtils.sortByDistance(newList, lon, lat)
|
||||
sink.setData(newList)
|
||||
} catch (e) {
|
||||
console.error("Could not load images from", source.name, "due to", e)
|
||||
state.data[source.name] = "error"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue