forked from MapComplete/MapComplete
Merge develop
This commit is contained in:
commit
55c6442cac
388 changed files with 16178 additions and 17860 deletions
|
@ -54,6 +54,7 @@ export class MangroveIdentity {
|
|||
export default class FeatureReviews {
|
||||
private static readonly _featureReviewsCache: Record<string, FeatureReviews> = {}
|
||||
public readonly subjectUri: Store<string>
|
||||
public readonly average: Store<number | null>
|
||||
private readonly _reviews: UIEventSource<(Review & { madeByLoggedInUser: Store<boolean> })[]> =
|
||||
new UIEventSource([])
|
||||
public readonly reviews: Store<(Review & { madeByLoggedInUser: Store<boolean> })[]> =
|
||||
|
@ -124,6 +125,23 @@ export default class FeatureReviews {
|
|||
console.log("Could not fetch reviews for partially incorrect query ", sub)
|
||||
}
|
||||
})
|
||||
this.average = this._reviews.map((reviews) => {
|
||||
if (!reviews) {
|
||||
return null
|
||||
}
|
||||
if (reviews.length === 0) {
|
||||
return null
|
||||
}
|
||||
let sum = 0
|
||||
let count = 0
|
||||
for (const review of reviews) {
|
||||
if (review.rating !== undefined) {
|
||||
count++
|
||||
sum += review.rating
|
||||
}
|
||||
}
|
||||
return Math.round(sum / count)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -211,6 +229,8 @@ export default class FeatureReviews {
|
|||
hasNew = true
|
||||
}
|
||||
if (hasNew) {
|
||||
self._reviews.data.sort((a, b) => b.iat - a.iat) // Sort with most recent first
|
||||
|
||||
self._reviews.ping()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -985,6 +985,27 @@ export default class PlantNet {
|
|||
}
|
||||
}
|
||||
|
||||
export interface PlantNetSpeciesMatch {
|
||||
score: number
|
||||
gbif: { id: string /*Actually a number*/ }
|
||||
species: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
genus: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
scientificName: string
|
||||
}
|
||||
family: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
scientificName: string
|
||||
}
|
||||
commonNames: string[]
|
||||
scientificName: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface PlantNetResult {
|
||||
query: {
|
||||
project: string
|
||||
|
@ -995,26 +1016,7 @@ export interface PlantNetResult {
|
|||
language: string
|
||||
preferedReferential: string
|
||||
bestMatch: string
|
||||
results: {
|
||||
score: number
|
||||
gbif: { id: string /*Actually a number*/ }
|
||||
species: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
genus: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
scientificName: string
|
||||
}
|
||||
family: {
|
||||
scientificNameWithoutAuthor: string
|
||||
scientificNameAuthorship: string
|
||||
scientificName: string
|
||||
}
|
||||
commonNames: string[]
|
||||
scientificName: string
|
||||
}
|
||||
}[]
|
||||
results: PlantNetSpeciesMatch[]
|
||||
version: string
|
||||
remainingIdentificationRequests: number
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ export default class Wikidata {
|
|||
*/
|
||||
public static async searchAdvanced(
|
||||
text: string,
|
||||
options: WikidataAdvancedSearchoptions
|
||||
options?: WikidataAdvancedSearchoptions
|
||||
): Promise<
|
||||
{
|
||||
id: string
|
||||
|
@ -185,7 +185,7 @@ export default class Wikidata {
|
|||
?num wikibase:apiOrdinal true .
|
||||
bd:serviceParam wikibase:limit ${
|
||||
Math.round(
|
||||
(options.maxCount ?? 20) * 1.5
|
||||
(options?.maxCount ?? 20) * 1.5
|
||||
) /*Some padding for disambiguation pages */
|
||||
} .
|
||||
?label wikibase:apiOutput mwapi:label .
|
||||
|
@ -193,7 +193,7 @@ export default class Wikidata {
|
|||
}
|
||||
${instanceOf}
|
||||
${minusPhrases.join("\n ")}
|
||||
} ORDER BY ASC(?num) LIMIT ${options.maxCount ?? 20}`
|
||||
} ORDER BY ASC(?num) LIMIT ${options?.maxCount ?? 20}`
|
||||
const url = wds.sparqlQuery(sparql)
|
||||
|
||||
const result = await Utils.downloadJson(url)
|
||||
|
|
|
@ -73,7 +73,6 @@ export default class Wikipedia {
|
|||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
console.log("Constructing store for", cachekey)
|
||||
const store = new UIEventSource<FullWikipediaDetails>({}, cachekey)
|
||||
Wikipedia._fullDetailsCache.set(cachekey, store)
|
||||
|
||||
|
@ -123,12 +122,15 @@ export default class Wikipedia {
|
|||
}
|
||||
const wikipedia = new Wikipedia({ language: data.language })
|
||||
wikipedia.GetArticleHtml(data.pagename).then((article) => {
|
||||
article = Utils.purify(article)
|
||||
data.fullArticle = article
|
||||
const content = document.createElement("div")
|
||||
content.innerHTML = article
|
||||
const firstParagraph = content.getElementsByTagName("p").item(0)
|
||||
data.firstParagraph = firstParagraph.innerHTML
|
||||
content.removeChild(firstParagraph)
|
||||
if (firstParagraph) {
|
||||
data.firstParagraph = firstParagraph.innerHTML
|
||||
content.removeChild(firstParagraph)
|
||||
}
|
||||
data.restOfArticle = content.innerHTML
|
||||
store.ping()
|
||||
})
|
||||
|
@ -194,53 +196,6 @@ export default class Wikipedia {
|
|||
encodeURIComponent(searchTerm)
|
||||
return (await Utils.downloadJson(url))["query"]["search"]
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches via 'index.php' and scrapes the result.
|
||||
* This gives better results then via the API
|
||||
* @param searchTerm
|
||||
*/
|
||||
public async searchViaIndex(
|
||||
searchTerm: string
|
||||
): Promise<{ title: string; snippet: string; url: string }[]> {
|
||||
const url = `${this.backend}/w/index.php?search=${encodeURIComponent(searchTerm)}&ns0=1`
|
||||
const result = await Utils.downloadAdvanced(url)
|
||||
if (result["redirect"]) {
|
||||
const targetUrl = result["redirect"]
|
||||
// This is an exact match
|
||||
return [
|
||||
{
|
||||
title: this.extractPageName(targetUrl)?.trim(),
|
||||
url: targetUrl,
|
||||
snippet: "",
|
||||
},
|
||||
]
|
||||
}
|
||||
if (result["error"]) {
|
||||
throw "Could not download: " + JSON.stringify(result)
|
||||
}
|
||||
const el = document.createElement("html")
|
||||
el.innerHTML = result["content"].replace(/href="\//g, 'href="' + this.backend + "/")
|
||||
const searchResults = el.getElementsByClassName("mw-search-results")
|
||||
const individualResults = Array.from(
|
||||
searchResults[0]?.getElementsByClassName("mw-search-result") ?? []
|
||||
)
|
||||
return individualResults.map((result) => {
|
||||
const toRemove = Array.from(result.getElementsByClassName("searchalttitle"))
|
||||
for (const toRm of toRemove) {
|
||||
toRm.parentElement.removeChild(toRm)
|
||||
}
|
||||
|
||||
return {
|
||||
title: result
|
||||
.getElementsByClassName("mw-search-result-heading")[0]
|
||||
.textContent.trim(),
|
||||
url: result.getElementsByTagName("a")[0].href,
|
||||
snippet: result.getElementsByClassName("searchresult")[0].textContent,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the innerHTML for the given article as string.
|
||||
* Some cleanup is applied to this.
|
||||
|
@ -262,7 +217,7 @@ export default class Wikipedia {
|
|||
if (response?.parse?.text === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const html = response["parse"]["text"]["*"]
|
||||
const html = Utils.purify(response["parse"]["text"]["*"])
|
||||
if (html === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue