forked from MapComplete/MapComplete
chore: automated housekeeping...
This commit is contained in:
parent
d7509c8d6f
commit
66c27cbad4
127 changed files with 6557 additions and 2698 deletions
|
@ -47,7 +47,7 @@ class DownloadNsiLogos extends Script {
|
|||
await ScriptUtils.DownloadFileTo(logos.facebook, path)
|
||||
// Validate
|
||||
const content = readFileSync(path, "utf8")
|
||||
if (content.startsWith("{\"error\"")) {
|
||||
if (content.startsWith('{"error"')) {
|
||||
unlinkSync(path)
|
||||
console.error("Attempted to fetch", logos.facebook, " but this gave an error")
|
||||
} else {
|
||||
|
@ -90,7 +90,6 @@ class DownloadNsiLogos extends Script {
|
|||
return false
|
||||
}
|
||||
|
||||
|
||||
async downloadFor(type: string): Promise<void> {
|
||||
const nsi = await NameSuggestionIndex.getNsiIndex()
|
||||
const items = nsi.allPossible(type)
|
||||
|
@ -109,7 +108,7 @@ class DownloadNsiLogos extends Script {
|
|||
downloadCount++
|
||||
}
|
||||
return downloaded
|
||||
}),
|
||||
})
|
||||
)
|
||||
for (let j = 0; j < results.length; j++) {
|
||||
let didDownload = results[j]
|
||||
|
@ -128,14 +127,14 @@ class DownloadNsiLogos extends Script {
|
|||
private async generateRendering(type: string) {
|
||||
const nsi = await NameSuggestionIndex.getNsiIndex()
|
||||
const items = nsi.allPossible(type)
|
||||
const filterOptions: FilterConfigOptionJson[] = items.map(item => {
|
||||
return ({
|
||||
const filterOptions: FilterConfigOptionJson[] = items.map((item) => {
|
||||
return {
|
||||
question: item.displayName,
|
||||
icon: nsi.getIconUrl(item, type),
|
||||
osmTags: NameSuggestionIndex.asFilterTags(item),
|
||||
})
|
||||
}
|
||||
})
|
||||
const mappings = items.map(item => ({
|
||||
const mappings = items.map((item) => ({
|
||||
if: NameSuggestionIndex.asFilterTags(item),
|
||||
then: nsi.getIconUrl(item, type),
|
||||
}))
|
||||
|
@ -145,7 +144,6 @@ class DownloadNsiLogos extends Script {
|
|||
const condition = TagUtils.Tag(mappings[i].if)
|
||||
if (i % 100 === 0) {
|
||||
console.log("Checking for shadow-mappings...", i, "/", mappings.length)
|
||||
|
||||
}
|
||||
const shadowsSomething = mappings.some((m, j) => {
|
||||
if (i === j) {
|
||||
|
@ -173,9 +171,7 @@ class DownloadNsiLogos extends Script {
|
|||
},
|
||||
source: "special:library",
|
||||
pointRendering: null,
|
||||
tagRenderings: [
|
||||
iconsTr,
|
||||
],
|
||||
tagRenderings: [iconsTr],
|
||||
filter: [
|
||||
<any>{
|
||||
"#": "ignore-possible-duplicate",
|
||||
|
@ -203,8 +199,6 @@ class DownloadNsiLogos extends Script {
|
|||
await this.generateRendering(type)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
new DownloadNsiLogos().run()
|
||||
|
|
|
@ -497,8 +497,6 @@ class LayerOverviewUtils extends Script {
|
|||
priviliged.delete(key)
|
||||
})
|
||||
|
||||
|
||||
|
||||
// These two get a free pass
|
||||
priviliged.delete("summary")
|
||||
priviliged.delete("last_click")
|
||||
|
@ -529,7 +527,9 @@ class LayerOverviewUtils extends Script {
|
|||
writeFileSync(
|
||||
"./src/assets/generated/known_layers.json",
|
||||
JSON.stringify({
|
||||
layers: Array.from(sharedLayers.values()).filter((l) => !(l["#no-index"] === "yes")),
|
||||
layers: Array.from(sharedLayers.values()).filter(
|
||||
(l) => !(l["#no-index"] === "yes")
|
||||
),
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -839,8 +839,8 @@ class LayerOverviewUtils extends Script {
|
|||
const themeInfo = themeFiles[i]
|
||||
const themePath = themeInfo.path
|
||||
let themeFile = themeInfo.parsed
|
||||
if(!themeFile){
|
||||
throw "Got an empty file for"+themeInfo.path
|
||||
if (!themeFile) {
|
||||
throw "Got an empty file for" + themeInfo.path
|
||||
}
|
||||
if (whitelist.size > 0 && !whitelist.has(themeFile.id)) {
|
||||
continue
|
||||
|
|
|
@ -9,10 +9,7 @@ import ScriptUtils from "./ScriptUtils"
|
|||
import { Utils } from "../src/Utils"
|
||||
import SpecialVisualizations from "../src/UI/SpecialVisualizations"
|
||||
import Constants from "../src/Models/Constants"
|
||||
import {
|
||||
AvailableRasterLayers,
|
||||
RasterLayerPolygon,
|
||||
} from "../src/Models/RasterLayers"
|
||||
import { AvailableRasterLayers, RasterLayerPolygon } from "../src/Models/RasterLayers"
|
||||
import { ImmutableStore } from "../src/Logic/UIEventSource"
|
||||
import * as eli from "../public/assets/data/editor-layer-index.json"
|
||||
import * as layers_global from "../src/assets/global-raster-layers.json"
|
||||
|
@ -640,9 +637,9 @@ class GenerateLayouts extends Script {
|
|||
if (theme !== undefined) {
|
||||
console.warn("Only generating layout " + theme)
|
||||
}
|
||||
const paths = ScriptUtils.readDirRecSync("./public/assets/generated/themes/",1)
|
||||
const paths = ScriptUtils.readDirRecSync("./public/assets/generated/themes/", 1)
|
||||
for (const i in paths) {
|
||||
const layoutConfigJson = <ThemeConfigJson> JSON.parse(readFileSync(paths[i], "utf8"))
|
||||
const layoutConfigJson = <ThemeConfigJson>JSON.parse(readFileSync(paths[i], "utf8"))
|
||||
if (theme !== undefined && layoutConfigJson.id !== theme) {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import TagInfo from "../src/Logic/Web/TagInfo"
|
|||
class Utilities {
|
||||
static mapValues<X extends string | number, T, TOut>(
|
||||
record: Record<X, T>,
|
||||
f: (t: T) => TOut,
|
||||
f: (t: T) => TOut
|
||||
): Record<X, TOut> {
|
||||
const newR = <Record<X, TOut>>{}
|
||||
for (const x in record) {
|
||||
|
@ -77,10 +77,10 @@ class GenerateStats extends Script {
|
|||
const count = tagData.data.find((item) => item.type === "all").count
|
||||
tagTotal.get(key).set(value, count)
|
||||
console.log(key + "=" + value, "-->", count)
|
||||
}),
|
||||
})
|
||||
)
|
||||
}
|
||||
}),
|
||||
})
|
||||
)
|
||||
writeFileSync(
|
||||
"./src/assets/key_totals.json",
|
||||
|
@ -92,8 +92,8 @@ class GenerateStats extends Script {
|
|||
tags: Utils.MapToObj(tagTotal, (v) => Utils.MapToObj(v, (t) => t)),
|
||||
},
|
||||
null,
|
||||
" ",
|
||||
),
|
||||
" "
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -147,21 +147,16 @@ class GenerateStats extends Script {
|
|||
Object.keys(allBrands).length,
|
||||
" previously loaded " + type,
|
||||
"from",
|
||||
path,
|
||||
path
|
||||
)
|
||||
}
|
||||
const nsi = await NameSuggestionIndex.getNsiIndex()
|
||||
const allBrandNames: string[] = Utils.Dedup(
|
||||
nsi.allPossible(<any>type).map((item) => item.tags[type]),
|
||||
nsi.allPossible(<any>type).map((item) => item.tags[type])
|
||||
)
|
||||
const batchSize = 50
|
||||
for (let i = 0; i < allBrandNames.length; i += batchSize) {
|
||||
console.warn(
|
||||
"Downloading",
|
||||
batchSize,
|
||||
"items: ",
|
||||
i + "/" + allBrandNames.length,
|
||||
)
|
||||
console.warn("Downloading", batchSize, "items: ", i + "/" + allBrandNames.length)
|
||||
let downloaded = 0
|
||||
await Promise.all(
|
||||
Utils.TimesT(batchSize, async (j) => {
|
||||
|
@ -171,10 +166,14 @@ class GenerateStats extends Script {
|
|||
}
|
||||
const writeInto = allBrands[brand]
|
||||
const dloaded = await TagInfo.getGlobalDistributionsFor(
|
||||
writeInto, (stats) => stats.data.find((t) => t.type === "all").count,
|
||||
type, brand)
|
||||
writeInto,
|
||||
(stats) => stats.data.find((t) => t.type === "all").count,
|
||||
type,
|
||||
brand
|
||||
)
|
||||
downloaded += dloaded
|
||||
}))
|
||||
})
|
||||
)
|
||||
console.log("Downloaded ", downloaded, " values this batch")
|
||||
writeFileSync(path, JSON.stringify(allBrands), "utf8")
|
||||
console.log("Checkpointed", path)
|
||||
|
@ -185,7 +184,7 @@ class GenerateStats extends Script {
|
|||
|
||||
constructor() {
|
||||
super(
|
||||
"Downloads stats on osmSource-tags and keys from tagInfo. There are two usecases with separate outputs:\n 1. To optimize the query before sending it to overpass (generates ./src/assets/key_totals.json) \n 2. To amend the Name Suggestion Index ",
|
||||
"Downloads stats on osmSource-tags and keys from tagInfo. There are two usecases with separate outputs:\n 1. To optimize the query before sending it to overpass (generates ./src/assets/key_totals.json) \n 2. To amend the Name Suggestion Index "
|
||||
)
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue