Reformat all files with prettier

This commit is contained in:
Pieter Vander Vennet 2022-09-08 21:40:48 +02:00
parent e22d189376
commit b541d3eab4
382 changed files with 50893 additions and 35566 deletions

View file

@ -1,5 +1,5 @@
import {Utils} from "../../Utils";
import {UIEventSource} from "../UIEventSource";
import { Utils } from "../../Utils"
import { UIEventSource } from "../UIEventSource"
import * as wds from "wikidata-sdk"
export class WikidataResponse {
@ -18,14 +18,12 @@ export class WikidataResponse {
wikisites: Map<string, string>,
commons: string
) {
this.id = id
this.labels = labels
this.descriptions = descriptions
this.claims = claims
this.wikisites = wikisites
this.commons = commons
}
public static fromJson(entity: any): WikidataResponse {
@ -41,7 +39,7 @@ export class WikidataResponse {
descr.set(labelName, entity.descriptions[labelName].value)
}
const sitelinks = new Map<string, string>();
const sitelinks = new Map<string, string>()
for (const labelName in entity.sitelinks) {
// labelName is `${language}wiki`
const language = labelName.substring(0, labelName.length - 4)
@ -51,28 +49,19 @@ export class WikidataResponse {
const commons = sitelinks.get("commons")
sitelinks.delete("commons")
const claims = WikidataResponse.extractClaims(entity.claims);
return new WikidataResponse(
entity.id,
labels,
descr,
claims,
sitelinks,
commons
)
const claims = WikidataResponse.extractClaims(entity.claims)
return new WikidataResponse(entity.id, labels, descr, claims, sitelinks, commons)
}
static extractClaims(claimsJson: any): Map<string, Set<string>> {
const simplified = wds.simplify.claims(claimsJson, {
timeConverter: 'simple-day'
timeConverter: "simple-day",
})
const claims = new Map<string, Set<string>>();
const claims = new Map<string, Set<string>>()
for (const claimId in simplified) {
const claimsList: any[] = simplified[claimId]
claims.set(claimId, new Set(claimsList));
claims.set(claimId, new Set(claimsList))
}
return claims
}
@ -84,7 +73,6 @@ export class WikidataLexeme {
senses: Map<string, string>
claims: Map<string, Set<string>>
constructor(json) {
this.id = json.id
this.claims = WikidataResponse.extractClaims(json.claims)
@ -117,36 +105,40 @@ export class WikidataLexeme {
this.claims,
new Map(),
undefined
);
)
}
}
export interface WikidataSearchoptions {
lang?: "en" | string,
lang?: "en" | string
maxCount?: 20 | number
}
export interface WikidataAdvancedSearchoptions extends WikidataSearchoptions {
instanceOf?: number[];
instanceOf?: number[]
notInstanceOf?: number[]
}
/**
* Utility functions around wikidata
*/
export default class Wikidata {
private static readonly _identifierPrefixes = ["Q", "L"].map(str => str.toLowerCase())
private static readonly _prefixesToRemove = ["https://www.wikidata.org/wiki/Lexeme:",
private static readonly _identifierPrefixes = ["Q", "L"].map((str) => str.toLowerCase())
private static readonly _prefixesToRemove = [
"https://www.wikidata.org/wiki/Lexeme:",
"https://www.wikidata.org/wiki/",
"http://www.wikidata.org/entity/",
"Lexeme:"].map(str => str.toLowerCase())
"Lexeme:",
].map((str) => str.toLowerCase())
private static readonly _cache = new Map<
string,
UIEventSource<{ success: WikidataResponse } | { error: any }>
>()
private static readonly _cache = new Map<string, UIEventSource<{ success: WikidataResponse } | { error: any }>>()
public static LoadWikidataEntry(value: string | number): UIEventSource<{ success: WikidataResponse } | { error: any }> {
public static LoadWikidataEntry(
value: string | number
): UIEventSource<{ success: WikidataResponse } | { error: any }> {
const key = this.ExtractKey(value)
const cached = Wikidata._cache.get(key)
if (cached !== undefined) {
@ -154,27 +146,31 @@ export default class Wikidata {
}
const src = UIEventSource.FromPromiseWithErr(Wikidata.LoadWikidataEntryAsync(key))
Wikidata._cache.set(key, src)
return src;
return src
}
/**
* Given a search text, searches for the relevant wikidata entries, excluding pages "outside of the main tree", e.g. disambiguation pages.
* Optionally, an 'instance of' can be given to limit the scope, e.g. instanceOf:5 (humans) will only search for humans
*/
public static async searchAdvanced(text: string, options: WikidataAdvancedSearchoptions): Promise<{
id: string,
relevance?: number,
label: string,
description?: string
}[]> {
public static async searchAdvanced(
text: string,
options: WikidataAdvancedSearchoptions
): Promise<
{
id: string
relevance?: number
label: string
description?: string
}[]
> {
let instanceOf = ""
if (options?.instanceOf !== undefined && options.instanceOf.length > 0) {
const phrases = options.instanceOf.map(q => `{ ?item wdt:P31/wdt:P279* wd:Q${q}. }`)
instanceOf = "{"+ phrases.join(" UNION ") + "}"
const phrases = options.instanceOf.map((q) => `{ ?item wdt:P31/wdt:P279* wd:Q${q}. }`)
instanceOf = "{" + phrases.join(" UNION ") + "}"
}
const forbidden = (options?.notInstanceOf ?? [])
.concat([17379835]) // blacklist 'wikimedia pages outside of the main knowledge tree', e.g. disambiguation pages
const minusPhrases = forbidden.map(q => `MINUS {?item wdt:P31/wdt:P279* wd:Q${q} .}`)
const forbidden = (options?.notInstanceOf ?? []).concat([17379835]) // blacklist 'wikimedia pages outside of the main knowledge tree', e.g. disambiguation pages
const minusPhrases = forbidden.map((q) => `MINUS {?item wdt:P31/wdt:P279* wd:Q${q} .}`)
const sparql = `SELECT * WHERE {
SERVICE wikibase:mwapi {
bd:serviceParam wikibase:api "EntitySearch" .
@ -183,7 +179,11 @@ export default class Wikidata {
bd:serviceParam mwapi:language "${options.lang}" .
?item wikibase:apiOutputItem mwapi:item .
?num wikibase:apiOrdinal true .
bd:serviceParam wikibase:limit ${Math.round((options.maxCount ?? 20) * 1.5) /*Some padding for disambiguation pages */} .
bd:serviceParam wikibase:limit ${
Math.round(
(options.maxCount ?? 20) * 1.5
) /*Some padding for disambiguation pages */
} .
?label wikibase:apiOutput mwapi:label .
?description wikibase:apiOutput "@description" .
}
@ -195,11 +195,11 @@ export default class Wikidata {
const result = await Utils.downloadJson(url)
/*The full uri of the wikidata-item*/
return result.results.bindings.map(({item, label, description, num}) => ({
return result.results.bindings.map(({ item, label, description, num }) => ({
relevance: num?.value,
id: item?.value,
label: label?.value,
description: description?.value
description: description?.value,
}))
}
@ -207,47 +207,47 @@ export default class Wikidata {
search: string,
options?: WikidataSearchoptions,
page = 1
): Promise<{
id: string,
label: string,
description: string
}[]> {
): Promise<
{
id: string
label: string
description: string
}[]
> {
const maxCount = options?.maxCount ?? 20
let pageCount = Math.min(maxCount, 50)
const start = page * pageCount - pageCount;
const lang = (options?.lang ?? "en")
const start = page * pageCount - pageCount
const lang = options?.lang ?? "en"
const url =
"https://www.wikidata.org/w/api.php?action=wbsearchentities&search=" +
search +
"&language=" +
lang +
"&limit=" + pageCount + "&continue=" +
"&limit=" +
pageCount +
"&continue=" +
start +
"&format=json&uselang=" +
lang +
"&type=item&origin=*" +
"&props=";// props= removes some unused values in the result
"&props=" // props= removes some unused values in the result
const response = await Utils.downloadJsonCached(url, 10000)
const result: any[] = response.search
if (result.length < pageCount) {
// No next page
return result;
return result
}
if (result.length < maxCount) {
const newOptions = {...options}
const newOptions = { ...options }
newOptions.maxCount = maxCount - result.length
result.push(...await Wikidata.search(search,
newOptions,
page + 1
))
result.push(...(await Wikidata.search(search, newOptions, page + 1)))
}
return result;
return result
}
public static async searchAndFetch(
search: string,
options?: WikidataAdvancedSearchoptions
@ -255,16 +255,17 @@ export default class Wikidata {
// We provide some padding to filter away invalid values
const searchResults = await Wikidata.searchAdvanced(search, options)
const maybeResponses = await Promise.all(
searchResults.map(async r => {
searchResults.map(async (r) => {
try {
console.log("Loading ", r.id)
return await Wikidata.LoadWikidataEntry(r.id).AsPromise()
} catch (e) {
console.error(e)
return undefined;
return undefined
}
}))
return Utils.NoNull(maybeResponses.map(r => <WikidataResponse>r["success"]))
})
)
return Utils.NoNull(maybeResponses.map((r) => <WikidataResponse>r["success"]))
}
/**
@ -279,7 +280,7 @@ export default class Wikidata {
}
if (value === undefined) {
console.error("ExtractKey: value is undefined")
return undefined;
return undefined
}
value = value.trim().toLowerCase()
@ -296,7 +297,7 @@ export default class Wikidata {
for (const identifierPrefix of Wikidata._identifierPrefixes) {
if (value.startsWith(identifierPrefix)) {
const trimmed = value.substring(identifierPrefix.length);
const trimmed = value.substring(identifierPrefix.length)
if (trimmed === "") {
return undefined
}
@ -304,7 +305,7 @@ export default class Wikidata {
if (isNaN(n)) {
return undefined
}
return value.toUpperCase();
return value.toUpperCase()
}
}
@ -312,7 +313,7 @@ export default class Wikidata {
return "Q" + value
}
return undefined;
return undefined
}
/**
@ -326,10 +327,10 @@ export default class Wikidata {
* Wikidata.QIdToNumber(123) // => 123
*/
public static QIdToNumber(q: string | number): number | undefined {
if(q === undefined || q === null){
if (q === undefined || q === null) {
return
}
if(typeof q === "number"){
if (typeof q === "number") {
return q
}
q = q.trim()
@ -356,17 +357,23 @@ export default class Wikidata {
/**
* Build a SPARQL-query, return the result
*
*
* @param keys: how variables are named. Every key not ending with 'Label' should appear in at least one statement
* @param statements
* @constructor
*/
public static async Sparql<T>(keys: string[], statements: string[]):Promise< (T & Record<string, {type: string, value: string}>) []> {
const query = "SELECT "+keys.map(k => k.startsWith("?") ? k : "?"+k).join(" ")+"\n" +
public static async Sparql<T>(
keys: string[],
statements: string[]
): Promise<(T & Record<string, { type: string; value: string }>)[]> {
const query =
"SELECT " +
keys.map((k) => (k.startsWith("?") ? k : "?" + k)).join(" ") +
"\n" +
"WHERE\n" +
"{\n" +
statements.map(stmt => stmt.endsWith(".") ? stmt : stmt+".").join("\n") +
" SERVICE wikibase:label { bd:serviceParam wikibase:language \"[AUTO_LANGUAGE]\". }\n" +
statements.map((stmt) => (stmt.endsWith(".") ? stmt : stmt + ".")).join("\n") +
' SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE]". }\n' +
"}"
const url = wds.sparqlQuery(query)
const result = await Utils.downloadJsonCached(url, 24 * 60 * 60 * 1000)
@ -384,7 +391,7 @@ export default class Wikidata {
return undefined
}
const url = "https://www.wikidata.org/wiki/Special:EntityData/" + id + ".json";
const url = "https://www.wikidata.org/wiki/Special:EntityData/" + id + ".json"
const entities = (await Utils.downloadJsonCached(url, 10000)).entities
const firstKey = <string>Array.from(Object.keys(entities))[0] // Roundabout way to fetch the entity; it might have been a redirect
const response = entities[firstKey]
@ -396,5 +403,4 @@ export default class Wikidata {
return WikidataResponse.fromJson(response)
}
}