forked from MapComplete/MapComplete
Add a wikidata search box
This commit is contained in:
parent
54bc4f24da
commit
b5a2ee1757
21 changed files with 4141 additions and 3590 deletions
|
@ -12,6 +12,11 @@ export interface WikidataResponse {
|
|||
commons: string
|
||||
}
|
||||
|
||||
export interface WikidataSearchoptions {
|
||||
lang?: "en" | string,
|
||||
maxCount?: 20 | number
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility functions around wikidata
|
||||
*/
|
||||
|
@ -47,10 +52,14 @@ export default class Wikidata {
|
|||
const claimsList: any[] = entity.claims[claimId]
|
||||
const values = new Set<string>()
|
||||
for (const claim of claimsList) {
|
||||
const value = claim.mainsnak?.datavalue?.value;
|
||||
if(value !== undefined){
|
||||
values.add(value)
|
||||
let value = claim.mainsnak?.datavalue?.value;
|
||||
if (value === undefined) {
|
||||
continue;
|
||||
}
|
||||
if(value.id !== undefined){
|
||||
value = value.id
|
||||
}
|
||||
values.add(value)
|
||||
}
|
||||
claims.set(claimId, values);
|
||||
}
|
||||
|
@ -77,6 +86,82 @@ export default class Wikidata {
|
|||
return src;
|
||||
}
|
||||
|
||||
public static async search(
|
||||
search: string,
|
||||
options?:WikidataSearchoptions,
|
||||
page = 1
|
||||
): Promise<{
|
||||
id: string,
|
||||
label: string,
|
||||
description: string
|
||||
}[]> {
|
||||
const maxCount = options?.maxCount ?? 20
|
||||
let pageCount = Math.min(maxCount,50)
|
||||
const start = page * pageCount - pageCount;
|
||||
const lang = (options?.lang ?? "en")
|
||||
const url =
|
||||
"https://www.wikidata.org/w/api.php?action=wbsearchentities&search=" +
|
||||
search +
|
||||
"&language=" +
|
||||
lang +
|
||||
"&limit="+pageCount+"&continue=" +
|
||||
start +
|
||||
"&format=json&uselang=" +
|
||||
lang +
|
||||
"&type=item&origin=*"+
|
||||
"&props=" ;// props= removes some unused values in the result
|
||||
const response = await Utils.downloadJson(url)
|
||||
|
||||
const result : any[] = response.search
|
||||
|
||||
if(result.length < pageCount){
|
||||
// No next page
|
||||
return result;
|
||||
}
|
||||
if(result.length < maxCount){
|
||||
const newOptions = {...options}
|
||||
newOptions.maxCount = maxCount - result.length
|
||||
result.push(...await Wikidata.search(search,
|
||||
newOptions,
|
||||
page + 1
|
||||
))
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async searchAndFetch(
|
||||
search: string,
|
||||
options?:WikidataSearchoptions
|
||||
) : Promise<WikidataResponse[]>
|
||||
{
|
||||
const maxCount = options.maxCount
|
||||
// We provide some padding to filter away invalid values
|
||||
options.maxCount = Math.ceil((options.maxCount ?? 20) * 1.5)
|
||||
const searchResults = await Wikidata.search(search, options)
|
||||
const maybeResponses = await Promise.all(searchResults.map(async r => {
|
||||
try{
|
||||
return await Wikidata.LoadWikidataEntry(r.id).AsPromise()
|
||||
}catch(e){
|
||||
console.error(e)
|
||||
return undefined;
|
||||
}
|
||||
}))
|
||||
const responses = maybeResponses
|
||||
.map(r => <WikidataResponse> r["success"])
|
||||
.filter(wd => {
|
||||
if(wd === undefined){
|
||||
return false;
|
||||
}
|
||||
if(wd.claims.get("P31" /*Instance of*/)?.has("Q4167410"/* Wikimedia Disambiguation page*/)){
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
responses.splice(maxCount, responses.length - maxCount)
|
||||
return responses
|
||||
}
|
||||
|
||||
private static ExtractKey(value: string | number) : number{
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
|
@ -99,6 +184,7 @@ export default class Wikidata {
|
|||
return n;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Loads a wikidata page
|
||||
* @returns the entity of the given value
|
||||
|
@ -109,7 +195,7 @@ export default class Wikidata {
|
|||
console.warn("Could not extract a wikidata entry from", value)
|
||||
return undefined;
|
||||
}
|
||||
console.log("Requesting wikidata with id", id)
|
||||
|
||||
const url = "https://www.wikidata.org/wiki/Special:EntityData/Q" + id + ".json";
|
||||
const response = await Utils.downloadJson(url)
|
||||
return Wikidata.ParseResponse(response.entities["Q" + id])
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue