forked from MapComplete/MapComplete
Fix: add comment on duplicate nearby images, make sure they are always deduplicated
This commit is contained in:
parent
50a415f304
commit
493b563766
1 changed files with 30 additions and 33 deletions
|
@ -101,7 +101,7 @@ class P4CImageFetcher implements ImageFetcher {
|
||||||
searchRadius,
|
searchRadius,
|
||||||
{
|
{
|
||||||
mindate: new Date().getTime() - maxAgeSeconds,
|
mindate: new Date().getTime() - maxAgeSeconds,
|
||||||
towardscenter: false,
|
towardscenter: false
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -152,9 +152,9 @@ class ImagesInLoadedDataFetcher implements ImageFetcher {
|
||||||
coordinates: { lng: centerpoint[0], lat: centerpoint[1] },
|
coordinates: { lng: centerpoint[0], lat: centerpoint[1] },
|
||||||
provider: "OpenStreetMap",
|
provider: "OpenStreetMap",
|
||||||
details: {
|
details: {
|
||||||
isSpherical: false,
|
isSpherical: false
|
||||||
},
|
},
|
||||||
osmTags: { image },
|
osmTags: { image }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -170,7 +170,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
public static readonly apiUrls: ReadonlyArray<string> = [
|
public static readonly apiUrls: ReadonlyArray<string> = [
|
||||||
"https://panoramax.openstreetmap.fr",
|
"https://panoramax.openstreetmap.fr",
|
||||||
"https://api.panoramax.xyz",
|
"https://api.panoramax.xyz",
|
||||||
"https://panoramax.mapcomplete.org",
|
"https://panoramax.mapcomplete.org"
|
||||||
]
|
]
|
||||||
|
|
||||||
constructor(url?: string, radius: number = 50) {
|
constructor(url?: string, radius: number = 50) {
|
||||||
|
@ -191,7 +191,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
provider: "panoramax",
|
provider: "panoramax",
|
||||||
direction: imageData.properties["view:azimuth"],
|
direction: imageData.properties["view:azimuth"],
|
||||||
osmTags: {
|
osmTags: {
|
||||||
panoramax: imageData.id,
|
panoramax: imageData.id
|
||||||
},
|
},
|
||||||
thumbUrl: imageData.assets.thumb.href,
|
thumbUrl: imageData.assets.thumb.href,
|
||||||
date: new Date(imageData.properties.datetime).getTime(),
|
date: new Date(imageData.properties.datetime).getTime(),
|
||||||
|
@ -200,8 +200,8 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
detailsUrl: imageData.id,
|
detailsUrl: imageData.id,
|
||||||
details: {
|
details: {
|
||||||
isSpherical:
|
isSpherical:
|
||||||
imageData.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular",
|
imageData.properties["exif"]["Xmp.GPano.ProjectionType"] === "equirectangular"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -209,16 +209,16 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
const radiusSettings = [
|
const radiusSettings = [
|
||||||
{
|
{
|
||||||
place_fov_tolerance: 180,
|
place_fov_tolerance: 180,
|
||||||
radius: 15,
|
radius: 15
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
place_fov_tolerance: 180,
|
place_fov_tolerance: 180,
|
||||||
radius: 25,
|
radius: 25
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
place_fov_tolerance: 90,
|
place_fov_tolerance: 90,
|
||||||
radius: 50,
|
radius: 50
|
||||||
},
|
}
|
||||||
]
|
]
|
||||||
const promises: Promise<ImageData[]>[] = []
|
const promises: Promise<ImageData[]>[] = []
|
||||||
const maxRadius = this._radius
|
const maxRadius = this._radius
|
||||||
|
@ -233,7 +233,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
place: [lon, lat],
|
place: [lon, lat],
|
||||||
place_distance: [prevRadius, Math.min(maxRadius, radiusSetting.radius)],
|
place_distance: [prevRadius, Math.min(maxRadius, radiusSetting.radius)],
|
||||||
place_fov_tolerance: radiusSetting.place_fov_tolerance,
|
place_fov_tolerance: radiusSetting.place_fov_tolerance,
|
||||||
limit: 50,
|
limit: 50
|
||||||
})
|
})
|
||||||
promises.push(promise)
|
promises.push(promise)
|
||||||
prevRadius = radiusSetting.radius
|
prevRadius = radiusSetting.radius
|
||||||
|
@ -243,6 +243,7 @@ class ImagesFromPanoramaxFetcher implements ImageFetcher {
|
||||||
}
|
}
|
||||||
const images = await Promise.all(promises)
|
const images = await Promise.all(promises)
|
||||||
|
|
||||||
|
// This might give duplicates, but worry not: they are deduped by the 'combinedImageFetcher'
|
||||||
return [].concat(...images).map((i) => ImagesFromPanoramaxFetcher.convert(i))
|
return [].concat(...images).map((i) => ImagesFromPanoramaxFetcher.convert(i))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -276,7 +277,7 @@ class MapillaryFetcher implements ImageFetcher {
|
||||||
boundingBox.getWest(),
|
boundingBox.getWest(),
|
||||||
boundingBox.getSouth(),
|
boundingBox.getSouth(),
|
||||||
boundingBox.getEast(),
|
boundingBox.getEast(),
|
||||||
boundingBox.getNorth(),
|
boundingBox.getNorth()
|
||||||
].join(",") +
|
].join(",") +
|
||||||
"&access_token=" +
|
"&access_token=" +
|
||||||
encodeURIComponent(Constants.mapillary_client_token_v4) +
|
encodeURIComponent(Constants.mapillary_client_token_v4) +
|
||||||
|
@ -322,10 +323,10 @@ class MapillaryFetcher implements ImageFetcher {
|
||||||
coordinates: { lng: c[0], lat: c[1] },
|
coordinates: { lng: c[0], lat: c[1] },
|
||||||
thumbUrl: img.thumb_256_url,
|
thumbUrl: img.thumb_256_url,
|
||||||
osmTags: {
|
osmTags: {
|
||||||
mapillary: img.id,
|
mapillary: img.id
|
||||||
},
|
},
|
||||||
details: {
|
details: {
|
||||||
isSpherical: this._panoramas === "only",
|
isSpherical: this._panoramas === "only"
|
||||||
},
|
},
|
||||||
|
|
||||||
detailsUrl: Mapillary.singleton.visitUrl(img, { lon, lat }),
|
detailsUrl: Mapillary.singleton.visitUrl(img, { lon, lat }),
|
||||||
|
@ -348,7 +349,7 @@ export class CombinedFetcher {
|
||||||
Imgur.apiUrl,
|
Imgur.apiUrl,
|
||||||
...Imgur.supportingUrls,
|
...Imgur.supportingUrls,
|
||||||
...MapillaryFetcher.apiUrls,
|
...MapillaryFetcher.apiUrls,
|
||||||
...ImagesFromPanoramaxFetcher.apiUrls,
|
...ImagesFromPanoramaxFetcher.apiUrls
|
||||||
]
|
]
|
||||||
|
|
||||||
constructor(radius: number, maxage: Date, indexedFeatures: IndexedFeatureSource) {
|
constructor(radius: number, maxage: Date, indexedFeatures: IndexedFeatureSource) {
|
||||||
|
@ -360,15 +361,15 @@ export class CombinedFetcher {
|
||||||
new MapillaryFetcher({
|
new MapillaryFetcher({
|
||||||
max_images: 25,
|
max_images: 25,
|
||||||
start_captured_at: maxage,
|
start_captured_at: maxage,
|
||||||
panoramas: "only",
|
panoramas: "only"
|
||||||
}),
|
}),
|
||||||
new MapillaryFetcher({
|
new MapillaryFetcher({
|
||||||
max_images: 25,
|
max_images: 25,
|
||||||
start_captured_at: maxage,
|
start_captured_at: maxage,
|
||||||
panoramas: "no",
|
panoramas: "no"
|
||||||
}),
|
}),
|
||||||
new P4CImageFetcher("mapillary"),
|
new P4CImageFetcher("mapillary"),
|
||||||
new P4CImageFetcher("wikicommons"),
|
new P4CImageFetcher("wikicommons")
|
||||||
].map((f) => new CachedFetcher(f))
|
].map((f) => new CachedFetcher(f))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -384,22 +385,18 @@ export class CombinedFetcher {
|
||||||
state.data[source.name] = "done"
|
state.data[source.name] = "done"
|
||||||
state.ping()
|
state.ping()
|
||||||
|
|
||||||
if (sink.data === undefined) {
|
const newList = []
|
||||||
sink.setData(pics)
|
const seenIds = new Set<string>()
|
||||||
} else {
|
for (const p4CPicture of [...sink.data ?? [], ...pics]) {
|
||||||
const newList = []
|
const id = p4CPicture.pictureUrl
|
||||||
const seenIds = new Set<string>()
|
if (seenIds.has(id)) {
|
||||||
for (const p4CPicture of [...sink.data, ...pics]) {
|
continue
|
||||||
const id = p4CPicture.pictureUrl
|
|
||||||
if (seenIds.has(id)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
newList.push(p4CPicture)
|
|
||||||
seenIds.add(id)
|
|
||||||
}
|
}
|
||||||
NearbyImageUtils.sortByDistance(newList, lon, lat)
|
newList.push(p4CPicture)
|
||||||
sink.setData(newList)
|
seenIds.add(id)
|
||||||
}
|
}
|
||||||
|
NearbyImageUtils.sortByDistance(newList, lon, lat)
|
||||||
|
sink.setData(newList)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Could not load images from", source.name, "due to", e)
|
console.error("Could not load images from", source.name, "due to", e)
|
||||||
state.data[source.name] = "error"
|
state.data[source.name] = "error"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue