| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  | import { Utils } from "../../Utils" | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  | import Wikidata, { WikidataResponse } from "./Wikidata" | 
					
						
							|  |  |  | import { Store, UIEventSource } from "../UIEventSource" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | export interface FullWikipediaDetails { | 
					
						
							|  |  |  |     articleUrl?: string | 
					
						
							|  |  |  |     language?: string | 
					
						
							|  |  |  |     pagename?: string | 
					
						
							|  |  |  |     fullArticle?: string | 
					
						
							|  |  |  |     firstParagraph?: string | 
					
						
							|  |  |  |     restOfArticle?: string | 
					
						
							|  |  |  |     wikidata?: WikidataResponse | 
					
						
							|  |  |  |     title?: string | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | export default class Wikipedia { | 
					
						
							|  |  |  |     /** | 
					
						
							|  |  |  |      * When getting a wikipedia page data result, some elements (e.g. navigation, infoboxes, ...) should be removed if 'removeInfoBoxes' is set. | 
					
						
							|  |  |  |      * We do this based on the classes. This set contains a blacklist of the classes to remove | 
					
						
							|  |  |  |      * @private | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     private static readonly classesToRemove = [ | 
					
						
							|  |  |  |         "shortdescription", | 
					
						
							|  |  |  |         "sidebar", | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  |         "infobox", | 
					
						
							|  |  |  |         "infobox_v2", | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |         "noprint", | 
					
						
							|  |  |  |         "ambox", | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  |         "mw-editsection", | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |         "mw-selflink", | 
					
						
							| 
									
										
										
										
											2021-10-18 20:40:24 +02:00
										 |  |  |         "mw-empty-elt", | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  |         "hatnote", // Often redirects
 | 
					
						
							| 
									
										
										
										
											2021-10-07 22:06:47 +02:00
										 |  |  |     ] | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |     private static readonly idsToRemove = ["sjabloon_zie"] | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |     private static readonly _cache = new Map<string, Promise<string>>() | 
					
						
							|  |  |  |     private static _fullDetailsCache = new Map<string, Store<FullWikipediaDetails>>() | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |     public readonly backend: string | 
					
						
							| 
									
										
										
										
											2022-04-30 00:30:15 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     constructor(options?: { language?: "en" | string } | { backend?: string }) { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |         this.backend = Wikipedia.getBackendUrl(options ?? {}) | 
					
						
							| 
									
										
										
										
											2022-04-30 00:30:15 +02:00
										 |  |  |     } | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     /** | 
					
						
							|  |  |  |      * Tries to extract the language and article name from the given string | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |      * | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  |      * Wikipedia.extractLanguageAndName("qsdf") // => undefined
 | 
					
						
							| 
									
										
										
										
											2022-05-01 21:05:58 +02:00
										 |  |  |      * Wikipedia.extractLanguageAndName("nl:Warandeputten") // => {language: "nl", pageName: "Warandeputten"}
 | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  |      */ | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     public static extractLanguageAndName(input: string): { language: string; pageName: string } { | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  |         const matched = input.match("([^:]+):(.*)") | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         if (matched === undefined || matched === null) { | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  |             return undefined | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         const [_, language, pageName] = matched | 
					
						
							| 
									
										
										
										
											2022-05-01 20:56:16 +02:00
										 |  |  |         return { | 
					
						
							|  |  |  |             language, | 
					
						
							|  |  |  |             pageName, | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     /** | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |      * Fetch all useful information for the given entity. | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |      * | 
					
						
							|  |  |  |      */ | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |     public static fetchArticleAndWikidata( | 
					
						
							|  |  |  |         wikidataOrPageId: string, | 
					
						
							|  |  |  |         preferedLanguage: string | 
					
						
							|  |  |  |     ): Store<FullWikipediaDetails> { | 
					
						
							|  |  |  |         const cachekey = preferedLanguage + wikidataOrPageId | 
					
						
							|  |  |  |         const cached = Wikipedia._fullDetailsCache.get(cachekey) | 
					
						
							|  |  |  |         if (cached) { | 
					
						
							|  |  |  |             return cached | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |         console.log("Constructing store for", cachekey) | 
					
						
							|  |  |  |         const store = new UIEventSource<FullWikipediaDetails>({}, cachekey) | 
					
						
							|  |  |  |         Wikipedia._fullDetailsCache.set(cachekey, store) | 
					
						
							| 
									
										
										
										
											2022-09-08 21:40:48 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |         // Are we dealing with a wikidata item?
 | 
					
						
							|  |  |  |         const wikidataId = Wikidata.ExtractKey(wikidataOrPageId) | 
					
						
							|  |  |  |         if (!wikidataId) { | 
					
						
							|  |  |  |             // We are dealing with a wikipedia identifier, e.g. 'NL:articlename', 'https://nl.wikipedia.org/wiki/article', ...
 | 
					
						
							|  |  |  |             const { language, pageName } = Wikipedia.extractLanguageAndName(wikidataOrPageId) | 
					
						
							|  |  |  |             store.data.articleUrl = new Wikipedia({ language }).getPageUrl(pageName) | 
					
						
							|  |  |  |             store.data.language = language | 
					
						
							|  |  |  |             store.data.pagename = pageName | 
					
						
							|  |  |  |             store.data.title = pageName | 
					
						
							|  |  |  |         } else { | 
					
						
							|  |  |  |             // Jup, this is a wikidata item
 | 
					
						
							|  |  |  |             // Lets fetch the wikidata
 | 
					
						
							|  |  |  |             store.data.title = wikidataId | 
					
						
							|  |  |  |             Wikidata.LoadWikidataEntryAsync(wikidataId).then((wikidata) => { | 
					
						
							|  |  |  |                 store.data.wikidata = wikidata | 
					
						
							|  |  |  |                 store.ping() | 
					
						
							|  |  |  |                 // With the wikidata, we can search for the appropriate wikipedia page
 | 
					
						
							|  |  |  |                 const preferredLanguage = [ | 
					
						
							|  |  |  |                     preferedLanguage, | 
					
						
							|  |  |  |                     "en", | 
					
						
							|  |  |  |                     Array.from(wikidata.wikisites.keys())[0], | 
					
						
							|  |  |  |                 ] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 for (const language of preferredLanguage) { | 
					
						
							|  |  |  |                     const pagetitle = wikidata.wikisites.get(language) | 
					
						
							|  |  |  |                     if (pagetitle) { | 
					
						
							|  |  |  |                         store.data.articleUrl = new Wikipedia({ language }).getPageUrl(pagetitle) | 
					
						
							|  |  |  |                         store.data.pagename = pagetitle | 
					
						
							|  |  |  |                         store.data.language = language | 
					
						
							|  |  |  |                         store.data.title = pagetitle | 
					
						
							|  |  |  |                         store.ping() | 
					
						
							|  |  |  |                         break | 
					
						
							|  |  |  |                     } | 
					
						
							|  |  |  |                 } | 
					
						
							|  |  |  |             }) | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         // Now that the pageURL has been setup, we can focus on downloading the actual article
 | 
					
						
							|  |  |  |         // We setup a listener. As soon as the article-URL is know, we'll fetch the actual page
 | 
					
						
							|  |  |  |         // This url can either be set by the Wikidata-response or directly if we are dealing with a wikipedia-url
 | 
					
						
							|  |  |  |         store.addCallbackAndRun((data) => { | 
					
						
							|  |  |  |             if (data.language === undefined || data.pagename === undefined) { | 
					
						
							|  |  |  |                 return | 
					
						
							|  |  |  |             } | 
					
						
							|  |  |  |             const wikipedia = new Wikipedia({ language: data.language }) | 
					
						
							|  |  |  |             wikipedia.GetArticleHtml(data.pagename).then((article) => { | 
					
						
							|  |  |  |                 data.fullArticle = article | 
					
						
							|  |  |  |                 const content = document.createElement("div") | 
					
						
							|  |  |  |                 content.innerHTML = article | 
					
						
							|  |  |  |                 const firstParagraph = content.getElementsByTagName("p").item(0) | 
					
						
							|  |  |  |                 data.firstParagraph = firstParagraph.innerHTML | 
					
						
							|  |  |  |                 content.removeChild(firstParagraph) | 
					
						
							|  |  |  |                 data.restOfArticle = content.innerHTML | 
					
						
							|  |  |  |                 store.ping() | 
					
						
							|  |  |  |             }) | 
					
						
							|  |  |  |             return true // unregister
 | 
					
						
							|  |  |  |         }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return store | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     private static getBackendUrl( | 
					
						
							|  |  |  |         options: { language?: "en" | string } | { backend?: "en.wikipedia.org" | string } | 
					
						
							|  |  |  |     ): string { | 
					
						
							|  |  |  |         let backend = "en.wikipedia.org" | 
					
						
							|  |  |  |         if (options["backend"]) { | 
					
						
							|  |  |  |             backend = options["backend"] | 
					
						
							|  |  |  |         } else if (options["language"]) { | 
					
						
							|  |  |  |             backend = `${options["language"] ?? "en"}.wikipedia.org` | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         if (!backend.startsWith("http")) { | 
					
						
							|  |  |  |             backend = "https://" + backend | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         return backend | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |     /** | 
					
						
							|  |  |  |      * Extracts the actual pagename; returns undefined if this came from a different wikimedia entry | 
					
						
							|  |  |  |      * | 
					
						
							|  |  |  |      * new Wikipedia({backend: "https://wiki.openstreetmap.org"}).extractPageName("https://wiki.openstreetmap.org/wiki/NL:Speelbos") // => "NL:Speelbos"
 | 
					
						
							|  |  |  |      * new Wikipedia().extractPageName("https://wiki.openstreetmap.org/wiki/NL:Speelbos") // => undefined
 | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     public extractPageName(input: string): string | undefined { | 
					
						
							|  |  |  |         if (!input.startsWith(this.backend)) { | 
					
						
							|  |  |  |             return undefined | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         input = input.substring(this.backend.length) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         const matched = input.match("/?wiki/(.+)") | 
					
						
							|  |  |  |         if (matched === undefined || matched === null) { | 
					
						
							|  |  |  |             return undefined | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |         const [_, pageName] = matched | 
					
						
							|  |  |  |         return pageName | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     public getDataUrl(pageName: string): string { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |         return ( | 
					
						
							|  |  |  |             `${this.backend}/w/api.php?action=parse&format=json&origin=*&prop=text&page=` + pageName | 
					
						
							| 
									
										
										
										
											2022-09-08 21:40:48 +02:00
										 |  |  |         ) | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     public getPageUrl(pageName: string): string { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |         return `${this.backend}/wiki/${pageName}` | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     /** | 
					
						
							|  |  |  |      * Textual search of the specified wiki-instance. If searching Wikipedia, we recommend using wikidata.search instead | 
					
						
							|  |  |  |      * @param searchTerm | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     public async search(searchTerm: string): Promise<{ title: string; snippet: string }[]> { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |         const url = | 
					
						
							|  |  |  |             this.backend + | 
					
						
							|  |  |  |             "/w/api.php?action=query&format=json&list=search&srsearch=" + | 
					
						
							|  |  |  |             encodeURIComponent(searchTerm) | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         return (await Utils.downloadJson(url))["query"]["search"] | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     /** | 
					
						
							|  |  |  |      * Searches via 'index.php' and scrapes the result. | 
					
						
							|  |  |  |      * This gives better results then via the API | 
					
						
							|  |  |  |      * @param searchTerm | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     public async searchViaIndex( | 
					
						
							|  |  |  |         searchTerm: string | 
					
						
							|  |  |  |     ): Promise<{ title: string; snippet: string; url: string }[]> { | 
					
						
							| 
									
										
										
										
											2022-06-08 03:35:11 +02:00
										 |  |  |         const url = `${this.backend}/w/index.php?search=${encodeURIComponent(searchTerm)}&ns0=1` | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         const result = await Utils.downloadAdvanced(url) | 
					
						
							|  |  |  |         if (result["redirect"]) { | 
					
						
							| 
									
										
										
										
											2022-06-08 03:35:11 +02:00
										 |  |  |             const targetUrl = result["redirect"] | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |             // This is an exact match
 | 
					
						
							|  |  |  |             return [ | 
					
						
							|  |  |  |                 { | 
					
						
							| 
									
										
										
										
											2022-06-08 03:35:11 +02:00
										 |  |  |                     title: this.extractPageName(targetUrl)?.trim(), | 
					
						
							|  |  |  |                     url: targetUrl, | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |                     snippet: "", | 
					
						
							|  |  |  |                 }, | 
					
						
							|  |  |  |             ] | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2022-12-16 13:45:07 +01:00
										 |  |  |         if (result["error"]) { | 
					
						
							|  |  |  |             throw "Could not download: " + JSON.stringify(result) | 
					
						
							| 
									
										
										
										
											2022-12-16 01:02:23 +01:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         const el = document.createElement("html") | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |         el.innerHTML = result["content"].replace(/href="\//g, 'href="' + this.backend + "/") | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         const searchResults = el.getElementsByClassName("mw-search-results") | 
					
						
							|  |  |  |         const individualResults = Array.from( | 
					
						
							|  |  |  |             searchResults[0]?.getElementsByClassName("mw-search-result") ?? [] | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  |         return individualResults.map((result) => { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |             const toRemove = Array.from(result.getElementsByClassName("searchalttitle")) | 
					
						
							|  |  |  |             for (const toRm of toRemove) { | 
					
						
							|  |  |  |                 toRm.parentElement.removeChild(toRm) | 
					
						
							|  |  |  |             } | 
					
						
							| 
									
										
										
										
											2022-09-08 21:40:48 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |             return { | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |                 title: result | 
					
						
							|  |  |  |                     .getElementsByClassName("mw-search-result-heading")[0] | 
					
						
							|  |  |  |                     .textContent.trim(), | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |                 url: result.getElementsByTagName("a")[0].href, | 
					
						
							|  |  |  |                 snippet: result.getElementsByClassName("searchresult")[0].textContent, | 
					
						
							|  |  |  |             } | 
					
						
							|  |  |  |         }) | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |     /** | 
					
						
							|  |  |  |      * Returns the innerHTML for the given article as string. | 
					
						
							|  |  |  |      * Some cleanup is applied to this. | 
					
						
							|  |  |  |      * | 
					
						
							|  |  |  |      * This method uses a static, local cache, so each article will be retrieved only once via the network | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     public GetArticleHtml(pageName: string): Promise<string> { | 
					
						
							|  |  |  |         const cacheKey = this.backend + "/" + pageName | 
					
						
							|  |  |  |         if (Wikipedia._cache.has(cacheKey)) { | 
					
						
							|  |  |  |             return Wikipedia._cache.get(cacheKey) | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2023-04-21 16:02:36 +02:00
										 |  |  |         const promise = this.GetArticleUncachedAsync(pageName) | 
					
						
							|  |  |  |         Wikipedia._cache.set(cacheKey, promise) | 
					
						
							|  |  |  |         return promise | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     private async GetArticleUncachedAsync(pageName: string): Promise<string> { | 
					
						
							| 
									
										
										
										
											2022-05-26 13:23:25 +02:00
										 |  |  |         const response = await Utils.downloadJson(this.getDataUrl(pageName)) | 
					
						
							|  |  |  |         if (response?.parse?.text === undefined) { | 
					
						
							|  |  |  |             return undefined | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         const html = response["parse"]["text"]["*"] | 
					
						
							|  |  |  |         if (html === undefined) { | 
					
						
							|  |  |  |             return undefined | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  |         const div = document.createElement("div") | 
					
						
							|  |  |  |         div.innerHTML = html | 
					
						
							|  |  |  |         const content = Array.from(div.children)[0] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         for (const forbiddenClass of Wikipedia.classesToRemove) { | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  |             const toRemove = content.getElementsByClassName(forbiddenClass) | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  |             for (const toRemoveElement of Array.from(toRemove)) { | 
					
						
							|  |  |  |                 toRemoveElement.parentElement?.removeChild(toRemoveElement) | 
					
						
							|  |  |  |             } | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-10-07 22:06:47 +02:00
										 |  |  |         for (const forbiddenId of Wikipedia.idsToRemove) { | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  |             const toRemove = content.querySelector("#" + forbiddenId) | 
					
						
							| 
									
										
										
										
											2021-10-07 22:06:47 +02:00
										 |  |  |             toRemove?.parentElement?.removeChild(toRemove) | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |         const links = Array.from(content.getElementsByTagName("a")) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         // Rewrite relative links to absolute links + open them in a new tab
 | 
					
						
							| 
									
										
										
										
											2021-11-07 16:34:51 +01:00
										 |  |  |         links | 
					
						
							|  |  |  |             .filter((link) => link.getAttribute("href")?.startsWith("/") ?? false) | 
					
						
							|  |  |  |             .forEach((link) => { | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |                 link.target = "_blank" | 
					
						
							|  |  |  |                 // note: link.getAttribute("href") gets the textual value, link.href is the rewritten version which'll contain the host for relative paths
 | 
					
						
							| 
									
										
										
										
											2022-05-27 05:49:21 +02:00
										 |  |  |                 link.href = `${this.backend}${link.getAttribute("href")}` | 
					
						
							| 
									
										
										
										
											2021-10-02 22:31:16 +02:00
										 |  |  |             }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return content.innerHTML | 
					
						
							| 
									
										
										
										
											2021-10-02 17:57:54 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | } |