forked from MapComplete/MapComplete
Merge branch 'develop' into feature/traffic-signs
This commit is contained in:
commit
d408858976
401 changed files with 11749 additions and 978064 deletions
322
scripts/GenerateSeries.ts
Normal file
322
scripts/GenerateSeries.ts
Normal file
|
|
@ -0,0 +1,322 @@
|
|||
import fs, { existsSync, readdirSync, readFileSync, unlinkSync, writeFileSync } from "fs"
|
||||
import ScriptUtils from "./ScriptUtils"
|
||||
import { Utils } from "../Utils"
|
||||
import Script from "./Script"
|
||||
import TiledFeatureSource from "../Logic/FeatureSource/TiledFeatureSource/TiledFeatureSource"
|
||||
import StaticFeatureSource from "../Logic/FeatureSource/Sources/StaticFeatureSource"
|
||||
import { GeoOperations } from "../Logic/GeoOperations"
|
||||
import { Feature, Polygon } from "geojson"
|
||||
|
||||
class StatsDownloader {
|
||||
private readonly urlTemplate =
|
||||
"https://osmcha.org/api/v1/changesets/?date__gte={start_date}&date__lte={end_date}&page={page}&comment=%23mapcomplete&page_size=100"
|
||||
|
||||
private readonly _targetDirectory: string
|
||||
|
||||
constructor(targetDirectory = ".") {
|
||||
this._targetDirectory = targetDirectory
|
||||
}
|
||||
|
||||
public async DownloadStats(startYear = 2020, startMonth = 5, startDay = 1): Promise<void> {
|
||||
const today = new Date()
|
||||
const currentYear = today.getFullYear()
|
||||
const currentMonth = today.getMonth() + 1
|
||||
for (let year = startYear; year <= currentYear; year++) {
|
||||
for (let month = 1; month <= 12; month++) {
|
||||
if (year === startYear && month < startMonth) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (year === currentYear && month > currentMonth) {
|
||||
break
|
||||
}
|
||||
|
||||
const pathM = `${this._targetDirectory}/stats.${year}-${month}.json`
|
||||
if (existsSync(pathM)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const features = []
|
||||
let monthIsFinished = true
|
||||
const writtenFiles = []
|
||||
for (let day = startDay; day <= 31; day++) {
|
||||
if (year === currentYear && month === currentMonth && day === today.getDate()) {
|
||||
monthIsFinished = false
|
||||
break
|
||||
}
|
||||
{
|
||||
const date = new Date(year, month - 1, day)
|
||||
if (date.getMonth() != month - 1) {
|
||||
// We did roll over
|
||||
continue
|
||||
}
|
||||
}
|
||||
const path = `${this._targetDirectory}/stats.${year}-${month}-${
|
||||
(day < 10 ? "0" : "") + day
|
||||
}.day.json`
|
||||
writtenFiles.push(path)
|
||||
if (existsSync(path)) {
|
||||
let features = JSON.parse(readFileSync(path, { encoding: "utf-8" }))
|
||||
features = features?.features ?? features
|
||||
features.push(...features.features) // day-stats are generally a list already, but in some ad-hoc cases might be a geojson-collection too
|
||||
console.log(
|
||||
"Loaded ",
|
||||
path,
|
||||
"from disk, which has",
|
||||
features.length,
|
||||
"features now"
|
||||
)
|
||||
continue
|
||||
}
|
||||
let dayFeatures: any[] = undefined
|
||||
try {
|
||||
dayFeatures = await this.DownloadStatsForDay(year, month, day)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
console.error(
|
||||
"Could not download " +
|
||||
year +
|
||||
"-" +
|
||||
month +
|
||||
"-" +
|
||||
day +
|
||||
"... Trying again"
|
||||
)
|
||||
dayFeatures = await this.DownloadStatsForDay(year, month, day)
|
||||
}
|
||||
writeFileSync(path, JSON.stringify(dayFeatures))
|
||||
features.push(...dayFeatures)
|
||||
}
|
||||
if (monthIsFinished) {
|
||||
writeFileSync(pathM, JSON.stringify({ features }))
|
||||
for (const writtenFile of writtenFiles) {
|
||||
unlinkSync(writtenFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
startDay = 1
|
||||
}
|
||||
}
|
||||
|
||||
public async DownloadStatsForDay(
|
||||
year: number,
|
||||
month: number,
|
||||
day: number
|
||||
): Promise<ChangeSetData[]> {
|
||||
let page = 1
|
||||
let allFeatures: ChangeSetData[] = []
|
||||
let endDay = new Date(year, month - 1 /* Zero-indexed: 0 = january*/, day + 1)
|
||||
let endDate = `${endDay.getFullYear()}-${Utils.TwoDigits(
|
||||
endDay.getMonth() + 1
|
||||
)}-${Utils.TwoDigits(endDay.getDate())}`
|
||||
let url = this.urlTemplate
|
||||
.replace(
|
||||
"{start_date}",
|
||||
year + "-" + Utils.TwoDigits(month) + "-" + Utils.TwoDigits(day)
|
||||
)
|
||||
.replace("{end_date}", endDate)
|
||||
.replace("{page}", "" + page)
|
||||
|
||||
let headers = {
|
||||
"User-Agent":
|
||||
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:86.0) Gecko/20100101 Firefox/86.0",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
Referer:
|
||||
"https://osmcha.org/?filters=%7B%22date__gte%22%3A%5B%7B%22label%22%3A%222020-07-05%22%2C%22value%22%3A%222020-07-05%22%7D%5D%2C%22editor%22%3A%5B%7B%22label%22%3A%22mapcomplete%22%2C%22value%22%3A%22mapcomplete%22%7D%5D%7D",
|
||||
"Content-Type": "application/json",
|
||||
Authorization: "Token 6e422e2afedb79ef66573982012000281f03dc91",
|
||||
DNT: "1",
|
||||
Connection: "keep-alive",
|
||||
TE: "Trailers",
|
||||
Pragma: "no-cache",
|
||||
"Cache-Control": "no-cache",
|
||||
}
|
||||
|
||||
while (url) {
|
||||
ScriptUtils.erasableLog(
|
||||
`Downloading stats for ${year}-${month}-${day}, page ${page} ${url}`
|
||||
)
|
||||
const result = await Utils.downloadJson(url, headers)
|
||||
page++
|
||||
allFeatures.push(...result.features)
|
||||
if (result.features === undefined) {
|
||||
console.log("ERROR", result)
|
||||
return
|
||||
}
|
||||
url = result.next
|
||||
}
|
||||
allFeatures = Utils.NoNull(allFeatures)
|
||||
allFeatures.forEach((f) => {
|
||||
f.properties = { ...f.properties, ...f.properties.metadata }
|
||||
delete f.properties.metadata
|
||||
f.properties["id"] = f.id
|
||||
})
|
||||
return allFeatures
|
||||
}
|
||||
}
|
||||
|
||||
interface ChangeSetData extends Feature<Polygon> {
|
||||
id: number
|
||||
type: "Feature"
|
||||
geometry: {
|
||||
type: "Polygon"
|
||||
coordinates: [number, number][][]
|
||||
}
|
||||
properties: {
|
||||
check_user: null
|
||||
reasons: []
|
||||
tags: []
|
||||
features: []
|
||||
user: string
|
||||
uid: string
|
||||
editor: string
|
||||
comment: string
|
||||
comments_count: number
|
||||
source: string
|
||||
imagery_used: string
|
||||
date: string
|
||||
reviewed_features: []
|
||||
create: number
|
||||
modify: number
|
||||
delete: number
|
||||
area: number
|
||||
is_suspect: boolean
|
||||
harmful: any
|
||||
checked: boolean
|
||||
check_date: any
|
||||
metadata: {
|
||||
host: string
|
||||
theme: string
|
||||
imagery: string
|
||||
language: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class GenerateSeries extends Script {
|
||||
constructor() {
|
||||
super("Downloads metadata about changesets made by MapComplete from OsmCha")
|
||||
}
|
||||
|
||||
async main(args: string[]): Promise<void> {
|
||||
const targetDir = args[0] ?? "../MapComplete-data"
|
||||
|
||||
await this.downloadStatistics(targetDir + "/changeset-metadata")
|
||||
await this.generateCenterPoints(
|
||||
targetDir + "/changeset-metadata",
|
||||
targetDir + "/mapcomplete-changes/",
|
||||
{
|
||||
zoomlevel: 8,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
private async downloadStatistics(targetDir: string) {
|
||||
let year = 2020
|
||||
let month = 5
|
||||
let day = 1
|
||||
if (!isNaN(Number(process.argv[2]))) {
|
||||
year = Number(process.argv[2])
|
||||
}
|
||||
if (!isNaN(Number(process.argv[3]))) {
|
||||
month = Number(process.argv[3])
|
||||
}
|
||||
|
||||
if (!isNaN(Number(process.argv[4]))) {
|
||||
day = Number(process.argv[4])
|
||||
}
|
||||
|
||||
do {
|
||||
try {
|
||||
await new StatsDownloader(targetDir).DownloadStats(year, month, day)
|
||||
break
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
} while (true)
|
||||
|
||||
const allFiles = readdirSync(targetDir).filter((p) => p.endsWith(".json"))
|
||||
writeFileSync(targetDir + "/file-overview.json", JSON.stringify(allFiles))
|
||||
}
|
||||
|
||||
private generateCenterPoints(
|
||||
sourceDir: string,
|
||||
targetDir: string,
|
||||
options: {
|
||||
zoomlevel: number
|
||||
}
|
||||
) {
|
||||
const allPaths = readdirSync(sourceDir).filter(
|
||||
(p) => p.startsWith("stats.") && p.endsWith(".json")
|
||||
)
|
||||
let allFeatures: ChangeSetData[] = [].concat(
|
||||
...allPaths.map(
|
||||
(path) => JSON.parse(readFileSync(sourceDir + "/" + path, "utf-8")).features
|
||||
)
|
||||
)
|
||||
allFeatures = allFeatures.filter(
|
||||
(f) =>
|
||||
f?.properties !== undefined &&
|
||||
(f.properties.editor === null ||
|
||||
f.properties.editor.toLowerCase().startsWith("mapcomplete"))
|
||||
)
|
||||
|
||||
allFeatures = allFeatures.filter(
|
||||
(f) => f.geometry !== null && f.properties.metadata?.theme !== "EMPTY CS"
|
||||
)
|
||||
allFeatures = allFeatures.filter(
|
||||
(f) =>
|
||||
f?.properties !== undefined &&
|
||||
(f.properties.editor === null ||
|
||||
f.properties.editor.toLowerCase().startsWith("mapcomplete"))
|
||||
)
|
||||
|
||||
allFeatures = allFeatures.filter((f) => f.properties.metadata?.theme !== "EMPTY CS")
|
||||
const centerpoints = allFeatures.map((f) => GeoOperations.centerpoint(f))
|
||||
console.log("Found", centerpoints.length, " changesets in total")
|
||||
const path = `${targetDir}/all_centerpoints.geojson`
|
||||
/*fs.writeFileSync(
|
||||
path,
|
||||
JSON.stringify(
|
||||
{
|
||||
type: "FeatureCollection",
|
||||
features: centerpoints,
|
||||
},
|
||||
null,
|
||||
" "
|
||||
)
|
||||
)//*/
|
||||
TiledFeatureSource.createHierarchy(StaticFeatureSource.fromGeojson(centerpoints), {
|
||||
minZoomLevel: options.zoomlevel,
|
||||
maxZoomLevel: options.zoomlevel,
|
||||
maxFeatureCount: Number.MAX_VALUE,
|
||||
registerTile: (tile) => {
|
||||
const path = `${targetDir}/tile_${tile.z}_${tile.x}_${tile.y}.geojson`
|
||||
const features = tile.features.data.map((ff) => ff.feature)
|
||||
features.forEach((f) => {
|
||||
delete f.bbox
|
||||
})
|
||||
fs.writeFileSync(
|
||||
path,
|
||||
JSON.stringify(
|
||||
{
|
||||
type: "FeatureCollection",
|
||||
features: features,
|
||||
},
|
||||
null,
|
||||
" "
|
||||
)
|
||||
)
|
||||
ScriptUtils.erasableLog(
|
||||
"Written ",
|
||||
path,
|
||||
"which has ",
|
||||
tile.features.data.length,
|
||||
"features"
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
new GenerateSeries().run()
|
||||
18
scripts/Script.ts
Normal file
18
scripts/Script.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import ScriptUtils from "./ScriptUtils"
|
||||
|
||||
export default abstract class Script {
|
||||
private readonly _docs: string
|
||||
|
||||
constructor(docs: string) {
|
||||
this._docs = docs
|
||||
}
|
||||
|
||||
abstract main(args: string[]): Promise<void>
|
||||
|
||||
public run(): void {
|
||||
ScriptUtils.fixUtils()
|
||||
const args = [...process.argv]
|
||||
args.splice(0, 2)
|
||||
this.main(args).then((_) => console.log("All done"))
|
||||
}
|
||||
}
|
||||
|
|
@ -11,8 +11,15 @@ export default class ScriptUtils {
|
|||
Utils.externalDownloadFunction = ScriptUtils.Download
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all files in a directory, recursively reads subdirectories.
|
||||
* The returned paths include the path given and subdirectories.
|
||||
*
|
||||
* @param path
|
||||
* @param maxDepth
|
||||
*/
|
||||
public static readDirRecSync(path, maxDepth = 999): string[] {
|
||||
const result = []
|
||||
const result: string[] = []
|
||||
if (maxDepth <= 0) {
|
||||
return []
|
||||
}
|
||||
|
|
@ -46,13 +53,13 @@ export default class ScriptUtils {
|
|||
process.stdout.write("\r " + text.join(" ") + " \r")
|
||||
}
|
||||
|
||||
public static sleep(ms) {
|
||||
public static sleep(ms: number, text?: string) {
|
||||
if (ms <= 0) {
|
||||
process.stdout.write("\r \r")
|
||||
return
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
process.stdout.write("\r Sleeping for " + ms / 1000 + "s \r")
|
||||
process.stdout.write("\r" + (text ?? "") + " Sleeping for " + ms / 1000 + "s \r")
|
||||
setTimeout(resolve, 1000)
|
||||
}).then(() => ScriptUtils.sleep(ms - 1000))
|
||||
}
|
||||
|
|
@ -71,7 +78,7 @@ export default class ScriptUtils {
|
|||
.filter((path) => path.indexOf("license_info.json") < 0)
|
||||
.map((path) => {
|
||||
try {
|
||||
const contents = readFileSync(path, "UTF8")
|
||||
const contents = readFileSync(path, { encoding: "utf8" })
|
||||
if (contents === "") {
|
||||
throw "The file " + path + " is empty, did you properly save?"
|
||||
}
|
||||
|
|
@ -94,7 +101,7 @@ export default class ScriptUtils {
|
|||
public static getThemeFiles(): { parsed: LayoutConfigJson; path: string }[] {
|
||||
return this.getThemePaths().map((path) => {
|
||||
try {
|
||||
const contents = readFileSync(path, "UTF8")
|
||||
const contents = readFileSync(path, { encoding: "utf8" })
|
||||
if (contents === "") {
|
||||
throw "The file " + path + " is empty, did you properly save?"
|
||||
}
|
||||
|
|
@ -118,17 +125,21 @@ export default class ScriptUtils {
|
|||
if (!existsSync(path)) {
|
||||
throw "File not found: " + path
|
||||
}
|
||||
const root = await xml2js.parseStringPromise(readFileSync(path, "UTF8"))
|
||||
const root = await xml2js.parseStringPromise(readFileSync(path, { encoding: "utf8" }))
|
||||
return root.svg
|
||||
}
|
||||
|
||||
public static ReadSvgSync(path: string, callback: (svg: any) => void): any {
|
||||
xml2js.parseString(readFileSync(path, "UTF8"), { async: false }, (err, root) => {
|
||||
if (err) {
|
||||
throw err
|
||||
xml2js.parseString(
|
||||
readFileSync(path, { encoding: "utf8" }),
|
||||
{ async: false },
|
||||
(err, root) => {
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
callback(root["svg"])
|
||||
}
|
||||
callback(root["svg"])
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
private static async DownloadJSON(url: string, headers?: any): Promise<any> {
|
||||
|
|
@ -136,7 +147,7 @@ export default class ScriptUtils {
|
|||
return JSON.parse(data.content)
|
||||
}
|
||||
|
||||
private static Download(url, headers?: any): Promise<{ content: string }> {
|
||||
private static Download(url: string, headers?: any): Promise<{ content: string }> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
headers = headers ?? {}
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
#! /usr/bin/env bash
|
||||
|
||||
# Automerge translations automatically fetches the translations from weblate
|
||||
|
||||
git fetch weblate-github
|
||||
git merge --no-commit weblate-github/weblate-mapcomplete-layers weblate-github/weblate-mapcomplete-layer-translations weblate-github/weblate-mapcomplete-core
|
||||
|
||||
npm run generate:translations
|
||||
if [ "$?" = "0" ]; then
|
||||
# Translation generation went fine - commit
|
||||
git add langs/
|
||||
git add assets/
|
||||
git commit -m "Merge weblate translations and regenerate translations"
|
||||
git push
|
||||
else
|
||||
echo "Generation of translations failed!"
|
||||
git merge --abort
|
||||
fi
|
||||
|
|
@ -54,9 +54,9 @@ function main(args: string[]): void {
|
|||
)
|
||||
return
|
||||
}
|
||||
const reference = JSON.parse(readFileSync("./langs/en.json", "UTF8"))
|
||||
const reference = JSON.parse(readFileSync("./langs/en.json", { encoding: "utf8" }))
|
||||
const path = `./langs/${l}.json`
|
||||
const file = JSON.parse(readFileSync(path, "UTF8"))
|
||||
const file = JSON.parse(readFileSync(path, { encoding: "utf8" }))
|
||||
fixSection(file[sectionName], reference[sectionName], l)
|
||||
writeFileSync(path, JSON.stringify(file, null, " ") + "\n")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,24 @@ if [ $? -ne 0 ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
||||
SRC_MAPS=""
|
||||
BRANCH=`git rev-parse --abbrev-ref HEAD`
|
||||
echo "The branch name is $BRANCH"
|
||||
if [ $BRANCH = "develop" ]
|
||||
then
|
||||
SRC_MAPS="--sourcemap"
|
||||
echo "Source maps are enabled"
|
||||
fi
|
||||
|
||||
ASSET_URL="mc/$BRANCH"
|
||||
export ASSET_URL
|
||||
echo "$ASSET_URL"
|
||||
export NODE_OPTIONS=--max-old-space-size=6500
|
||||
vite build $SRC_MAPS
|
||||
|
||||
|
||||
# Copy the layer files, as these might contain assets (e.g. svgs)
|
||||
cp -r assets/layers/ dist/assets/layers/
|
||||
cp -r assets/themes/ dist/assets/themes/
|
||||
|
|
@ -30,36 +48,3 @@ cp -r assets/templates/ dist/assets/templates/
|
|||
cp -r assets/tagRenderings/ dist/assets/tagRenderings/
|
||||
cp assets/*.png dist/assets/
|
||||
cp assets/*.svg dist/assets/
|
||||
|
||||
|
||||
SRC_MAPS="--no-source-maps"
|
||||
BRANCH=`git rev-parse --abbrev-ref HEAD`
|
||||
echo "The branch name is $BRANCH"
|
||||
if [ $BRANCH = "develop" ]
|
||||
then
|
||||
SRC_MAPS=""
|
||||
echo "Source maps are enabled"
|
||||
fi
|
||||
|
||||
echo -e "\n\n Building non-theme pages"
|
||||
echo -e " ==========================\n\n"
|
||||
parcel build --public-url "./" $SRC_MAPS "index.html" "404.html" "professional.html" "automaton.html" "import_helper.html" "import_viewer.html" "land.html" "customGenerator.html" "theme.html" vendor
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR - stopping the build"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\n\n Building theme pages"
|
||||
echo -e " ======================\n\n"
|
||||
|
||||
for file in index_*.ts
|
||||
do
|
||||
theme=${file:6:-3}
|
||||
echo -e "\n\n $theme"
|
||||
echo -e " ------------ \n\n"
|
||||
# Builds the necessary files for just one theme, e.g. 'bookcases.html' + 'index_bookcases.ts' + supporting file
|
||||
parcel build --public-url './' $SRC_MAPS "$theme.html"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR - stopping the build"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ function rewrite(obj, key) {
|
|||
|
||||
const stuff = fs.readFileSync(
|
||||
"/home/pietervdvn/Documents/Freelance/ToerismeVlaanderen 2021-09/TeImporteren/allchannels-bike_rental.json",
|
||||
"UTF8"
|
||||
{ encoding: "utf8" }
|
||||
)
|
||||
const data: any[] = JSON.parse(stuff)
|
||||
|
||||
|
|
|
|||
|
|
@ -138,7 +138,7 @@ async function main(wipeCache = false) {
|
|||
console.log("Reusing the cached file")
|
||||
}
|
||||
|
||||
const data = JSON.parse(readFileSync(cacheFile, "UTF8"))
|
||||
const data = JSON.parse(readFileSync(cacheFile, { encoding: "utf8" }))
|
||||
const perId = WikidataUtils.extractLanguageData(data, WikidataUtils.languageRemapping)
|
||||
const nativeList = getNativeList(perId)
|
||||
writeFileSync("./assets/language_native.json", JSON.stringify(nativeList, null, " "))
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ function main(args) {
|
|||
const spec = args[1]
|
||||
const output = args[2]
|
||||
|
||||
const data = JSON.parse(fs.readFileSync(path, "UTF8"))
|
||||
const data = JSON.parse(fs.readFileSync(path, { encoding: "utf8" }))
|
||||
let filter: TagsFilter
|
||||
try {
|
||||
filter = TagUtils.Tag(JSON.parse(spec))
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ function main() {
|
|||
const path = args[0]
|
||||
const iconClass = args[1] ?? "small"
|
||||
const targetFile = args[2] ?? path + ".autoconverted.json"
|
||||
const parsed = JSON.parse(readFileSync(path, "UTF8"))
|
||||
const parsed = JSON.parse(readFileSync(path, { encoding: "utf8" }))
|
||||
const converted = new ConvertImagesToIcon(iconClass).convertStrict(
|
||||
parsed,
|
||||
"While running the fixImagesInTagRenderings-script"
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ function WalkScheme<T>(
|
|||
|
||||
function extractMeta(typename: string, path: string) {
|
||||
const themeSchema = JSON.parse(
|
||||
readFileSync("./Docs/Schemas/" + typename + ".schema.json", "UTF-8")
|
||||
readFileSync("./Docs/Schemas/" + typename + ".schema.json", { encoding: "utf8" })
|
||||
)
|
||||
const withTypes = WalkScheme((schemePart) => {
|
||||
if (schemePart.description === undefined) {
|
||||
|
|
@ -126,7 +126,7 @@ function main() {
|
|||
for (const path of allSchemas) {
|
||||
const dir = path.substring(0, path.lastIndexOf("/"))
|
||||
const name = path.substring(path.lastIndexOf("/"), path.length - "JSC.ts".length)
|
||||
let content = readFileSync(path, "UTF-8")
|
||||
let content = readFileSync(path, { encoding: "utf8" })
|
||||
content = content.substring("export default ".length)
|
||||
let parsed = JSON.parse(content)
|
||||
parsed["additionalProperties"] = false
|
||||
|
|
@ -137,7 +137,7 @@ function main() {
|
|||
def["additionalProperties"] = false
|
||||
}
|
||||
}
|
||||
writeFileSync(dir + "/" + name + ".schema.json", JSON.stringify(parsed, null, " "), "UTF8")
|
||||
writeFileSync(dir + "/" + name + ".schema.json", JSON.stringify(parsed, null, " "), { encoding: "utf8" })
|
||||
}
|
||||
|
||||
extractMeta("LayoutConfigJson", "layoutconfigmeta")
|
||||
|
|
|
|||
|
|
@ -212,7 +212,7 @@ function loadAllTiles(
|
|||
}
|
||||
|
||||
// We read the raw OSM-file and convert it to a geojson
|
||||
const rawOsm = JSON.parse(readFileSync(filename, "UTF8"))
|
||||
const rawOsm = JSON.parse(readFileSync(filename, { encoding: "utf8" }))
|
||||
|
||||
// Create and save the geojson file - which is the main chunk of the data
|
||||
const geojson = OsmToGeoJson.default(rawOsm)
|
||||
|
|
|
|||
|
|
@ -79,11 +79,9 @@ function generateWikipage() {
|
|||
return ""
|
||||
}
|
||||
|
||||
const languagesInDescr = []
|
||||
for (const shortDescriptionKey in layout.shortDescription) {
|
||||
languagesInDescr.push(shortDescriptionKey)
|
||||
}
|
||||
|
||||
const languagesInDescr = Array.from(Object.keys(layout.shortDescription)).filter(
|
||||
(k) => k !== "_context"
|
||||
)
|
||||
const languages = languagesInDescr.map((ln) => `{{#language:${ln}|en}}`).join(", ")
|
||||
let auth = "Yes"
|
||||
return `{{service_item
|
||||
|
|
@ -122,6 +120,7 @@ function generateWikipage() {
|
|||
}
|
||||
|
||||
console.log("Starting documentation generation...")
|
||||
ScriptUtils.fixUtils()
|
||||
generateWikipage()
|
||||
|
||||
AllKnownLayouts.GenOverviewsForSingleLayer((layer, element, inlineSource) => {
|
||||
|
|
|
|||
238
scripts/generateImageAnalysis.ts
Normal file
238
scripts/generateImageAnalysis.ts
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
import Script from "./Script"
|
||||
import { Overpass } from "../Logic/Osm/Overpass"
|
||||
import { RegexTag } from "../Logic/Tags/RegexTag"
|
||||
import { ImmutableStore } from "../Logic/UIEventSource"
|
||||
import { BBox } from "../Logic/BBox"
|
||||
import * as fs from "fs"
|
||||
import { Feature } from "geojson"
|
||||
import ScriptUtils from "./ScriptUtils"
|
||||
import { Imgur } from "../Logic/ImageProviders/Imgur"
|
||||
import { LicenseInfo } from "../Logic/ImageProviders/LicenseInfo"
|
||||
import { Utils } from "../Utils"
|
||||
|
||||
export default class GenerateImageAnalysis extends Script {
|
||||
constructor() {
|
||||
super(
|
||||
"Downloads (from overpass) all tags which have an imgur-image; then analyses the licenses"
|
||||
)
|
||||
}
|
||||
|
||||
async fetchImages(key: string, datapath: string): Promise<void> {
|
||||
const targetPath = `${datapath}/features_with_${key.replace(/[:\/]/, "_")}.geojson`
|
||||
if (fs.existsSync(targetPath)) {
|
||||
console.log("Skipping", key)
|
||||
return
|
||||
}
|
||||
const tag = new RegexTag(key, /^https:\/\/i.imgur.com\/.*$/i)
|
||||
const overpass = new Overpass(
|
||||
tag,
|
||||
[],
|
||||
"https://overpass.kumi.systems/api/interpreter",
|
||||
new ImmutableStore(180),
|
||||
undefined,
|
||||
false
|
||||
)
|
||||
console.log("Starting query...")
|
||||
const data = await overpass.queryGeoJson(BBox.global)
|
||||
console.log("Got data: ", data[0].features.length)
|
||||
fs.writeFileSync(targetPath, JSON.stringify(data[0]), "utf8")
|
||||
console.log("Written", targetPath)
|
||||
}
|
||||
|
||||
async downloadData(datapath: string): Promise<void> {
|
||||
if (!fs.existsSync(datapath)) {
|
||||
fs.mkdirSync(datapath)
|
||||
}
|
||||
|
||||
await this.fetchImages("image", datapath)
|
||||
await this.fetchImages("image:streetsign", datapath)
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await this.fetchImages("image:" + i, datapath)
|
||||
}
|
||||
}
|
||||
|
||||
loadData(datapath: string): Feature[] {
|
||||
const allFeatures: Feature[] = []
|
||||
|
||||
const files = ScriptUtils.readDirRecSync(datapath)
|
||||
for (const file of files) {
|
||||
if (!file.endsWith(".geojson")) {
|
||||
continue
|
||||
}
|
||||
const contents = JSON.parse(fs.readFileSync(file, "utf8"))
|
||||
allFeatures.push(...contents.features)
|
||||
}
|
||||
|
||||
return allFeatures
|
||||
}
|
||||
|
||||
async fetchImageMetadata(datapath: string, image: string): Promise<boolean> {
|
||||
if (image === undefined) {
|
||||
return false
|
||||
}
|
||||
if (!image.match(/https:\/\/i\.imgur\.com\/[a-zA-Z0-9]+\.jpg/)) {
|
||||
return false
|
||||
}
|
||||
const targetPath = datapath + "/" + image.replace(/[\/:.\-%]/g, "_") + ".json"
|
||||
if (fs.existsSync(targetPath)) {
|
||||
return false
|
||||
}
|
||||
const attribution = await Imgur.singleton.DownloadAttribution(image)
|
||||
await fs.writeFileSync(targetPath, JSON.stringify(attribution, null, " "))
|
||||
return true
|
||||
}
|
||||
|
||||
async downloadMetadata(datapath: string): Promise<void> {
|
||||
const features = this.loadData(datapath)
|
||||
let allImages = new Set<string>()
|
||||
|
||||
for (const feature of features) {
|
||||
allImages.add(feature.properties["image"])
|
||||
for (let i = 0; i < 10; i++) {
|
||||
allImages.add(feature.properties["image:" + i])
|
||||
}
|
||||
}
|
||||
console.log("Detected", allImages.size, "images")
|
||||
let i = 0
|
||||
let d = 0
|
||||
let s = 0
|
||||
let f = 0
|
||||
let start = Date.now()
|
||||
for (const image of Array.from(allImages)) {
|
||||
i++
|
||||
try {
|
||||
const downloaded = await this.fetchImageMetadata(datapath, image)
|
||||
const runningSecs = (Date.now() - start) / 1000
|
||||
const left = allImages.size - i
|
||||
|
||||
const estimatedActualSeconds = Math.floor((left * runningSecs) / (f + d))
|
||||
const estimatedActualMinutes = Math.floor(estimatedActualSeconds / 60)
|
||||
|
||||
const msg = `${i}/${
|
||||
allImages.size
|
||||
} downloaded: ${d},skipped: ${s}, failed: ${f}, running: ${runningSecs}sec, ETA: ${estimatedActualMinutes}:${
|
||||
estimatedActualSeconds % 60
|
||||
}`
|
||||
ScriptUtils.erasableLog(msg)
|
||||
if (downloaded) {
|
||||
d++
|
||||
} else {
|
||||
s++
|
||||
}
|
||||
if (d + f == 75000) {
|
||||
console.log("Used 75000 API calls, leaving 5000 for the rest of the day...")
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
f++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
analyze(datapath: string) {
|
||||
const files = ScriptUtils.readDirRecSync(datapath)
|
||||
const byAuthor = new Map<string, string[]>()
|
||||
const byLicense = new Map<string, string[]>()
|
||||
const licenseByAuthor = new Map<string, Set<string>>()
|
||||
for (const file of files) {
|
||||
if (!file.endsWith(".json")) {
|
||||
continue
|
||||
}
|
||||
const attr = <LicenseInfo>JSON.parse(fs.readFileSync(file, { encoding: "utf8" }))
|
||||
const license = attr.licenseShortName
|
||||
|
||||
if (license === undefined || attr.artist === undefined) {
|
||||
continue
|
||||
}
|
||||
if (byAuthor.get(attr.artist) === undefined) {
|
||||
byAuthor.set(attr.artist, [])
|
||||
}
|
||||
byAuthor.get(attr.artist).push(file)
|
||||
|
||||
if (byLicense.get(license) === undefined) {
|
||||
byLicense.set(license, [])
|
||||
}
|
||||
byLicense.get(license).push(file)
|
||||
|
||||
if (licenseByAuthor.get(license) === undefined) {
|
||||
licenseByAuthor.set(license, new Set<string>())
|
||||
}
|
||||
licenseByAuthor.get(license).add(attr.artist)
|
||||
}
|
||||
byAuthor.delete(undefined)
|
||||
byLicense.delete(undefined)
|
||||
licenseByAuthor.delete(undefined)
|
||||
|
||||
const byLicenseCount = Utils.MapToObj(byLicense, (a) => a.length)
|
||||
const byAuthorCount = Utils.MapToObj(byAuthor, (a) => a.length)
|
||||
const licenseByAuthorCount = Utils.MapToObj(licenseByAuthor, (a) => a.size)
|
||||
|
||||
const countsPerAuthor: number[] = Array.from(Object.keys(byAuthorCount)).map(
|
||||
(k) => byAuthorCount[k]
|
||||
)
|
||||
console.log(countsPerAuthor)
|
||||
countsPerAuthor.sort()
|
||||
const median = countsPerAuthor[Math.floor(countsPerAuthor.length / 2)]
|
||||
for (let i = 0; i < 100; i++) {
|
||||
let maxAuthor: string = undefined
|
||||
let maxCount = 0
|
||||
for (const author in byAuthorCount) {
|
||||
const count = byAuthorCount[author]
|
||||
if (maxAuthor === undefined || count > maxCount) {
|
||||
maxAuthor = author
|
||||
maxCount = count
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
"|",
|
||||
i + 1,
|
||||
"|",
|
||||
`[${maxAuthor}](https://openstreetmap.org/user/${maxAuthor.replace(/ /g, "%20")})`,
|
||||
"|",
|
||||
maxCount,
|
||||
"|"
|
||||
)
|
||||
delete byAuthorCount[maxAuthor]
|
||||
}
|
||||
|
||||
const totalAuthors = byAuthor.size
|
||||
let totalLicensedImages = 0
|
||||
for (const license in byLicenseCount) {
|
||||
totalLicensedImages += byLicenseCount[license]
|
||||
}
|
||||
for (const license in byLicenseCount) {
|
||||
const total = byLicenseCount[license]
|
||||
const authors = licenseByAuthorCount[license]
|
||||
console.log(
|
||||
`License ${license}: ${total} total pictures (${
|
||||
Math.floor((1000 * total) / totalLicensedImages) / 10
|
||||
}%), ${authors} authors (${
|
||||
Math.floor((1000 * authors) / totalAuthors) / 10
|
||||
}%), ${Math.floor(total / authors)} images/author`
|
||||
)
|
||||
}
|
||||
|
||||
const nonDefaultAuthors = [
|
||||
...Array.from(licenseByAuthor.get("CC-BY 4.0").values()),
|
||||
...Array.from(licenseByAuthor.get("CC-BY-SA 4.0").values()),
|
||||
]
|
||||
|
||||
console.log("Total number of correctly licenses pictures: ", totalLicensedImages)
|
||||
console.log("Total number of authors:", byAuthor.size)
|
||||
console.log(
|
||||
"Total number of authors which used a valid, non CC0 license at one point in time",
|
||||
nonDefaultAuthors.length
|
||||
)
|
||||
console.log("Median contributions per author:", median)
|
||||
}
|
||||
|
||||
async main(args: string[]): Promise<void> {
|
||||
const datapath = args[0] ?? "../MapComplete-data/ImageLicenseInfo"
|
||||
await this.downloadData(datapath)
|
||||
|
||||
//await this.downloadMetadata(datapath)
|
||||
this.analyze(datapath)
|
||||
}
|
||||
}
|
||||
|
||||
new GenerateImageAnalysis().run()
|
||||
|
|
@ -125,7 +125,7 @@ class LayerOverviewUtils {
|
|||
writeFileSync(
|
||||
"./assets/generated/theme_overview.json",
|
||||
JSON.stringify(sorted, null, " "),
|
||||
"UTF8"
|
||||
{ encoding: "utf8" }
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -136,7 +136,7 @@ class LayerOverviewUtils {
|
|||
writeFileSync(
|
||||
`${LayerOverviewUtils.themePath}${theme.id}.json`,
|
||||
JSON.stringify(theme, null, " "),
|
||||
"UTF8"
|
||||
{ encoding: "utf8" }
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -147,7 +147,7 @@ class LayerOverviewUtils {
|
|||
writeFileSync(
|
||||
`${LayerOverviewUtils.layerPath}${layer.id}.json`,
|
||||
JSON.stringify(layer, null, " "),
|
||||
"UTF8"
|
||||
{ encoding: "utf8" }
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -219,7 +219,7 @@ class LayerOverviewUtils {
|
|||
continue
|
||||
}
|
||||
|
||||
const contents = readFileSync(path, "UTF8")
|
||||
const contents = readFileSync(path, { encoding: "utf8" })
|
||||
if (contents.indexOf("data:image/png;") >= 0) {
|
||||
console.warn("The SVG at " + path + " is a fake SVG: it contains PNG data!")
|
||||
errCount++
|
||||
|
|
@ -281,10 +281,9 @@ class LayerOverviewUtils {
|
|||
then: th.icon,
|
||||
}))
|
||||
const proto: LayoutConfigJson = JSON.parse(
|
||||
readFileSync(
|
||||
"./assets/themes/mapcomplete-changes/mapcomplete-changes.proto.json",
|
||||
"UTF8"
|
||||
)
|
||||
readFileSync("./assets/themes/mapcomplete-changes/mapcomplete-changes.proto.json", {
|
||||
encoding: "utf8",
|
||||
})
|
||||
)
|
||||
const protolayer = <LayerConfigJson>(
|
||||
proto.layers.filter((l) => l["id"] === "mapcomplete-changes")[0]
|
||||
|
|
@ -441,6 +440,35 @@ class LayerOverviewUtils {
|
|||
convertState.tagRenderings
|
||||
).convertStrict(themeFile, themePath)
|
||||
|
||||
if (themeFile.icon.endsWith(".svg")) {
|
||||
try {
|
||||
ScriptUtils.ReadSvgSync(themeFile.icon, (svg) => {
|
||||
const width: string = svg.$.width
|
||||
const height: string = svg.$.height
|
||||
const err = themeFile.hideFromOverview ? console.warn : console.error
|
||||
if (width !== height) {
|
||||
const e =
|
||||
`the icon for theme ${themeFile.id} is not square. Please square the icon at ${themeFile.icon}` +
|
||||
` Width = ${width} height = ${height}`
|
||||
err(e)
|
||||
}
|
||||
|
||||
const w = parseInt(width)
|
||||
const h = parseInt(height)
|
||||
if (w < 370 || h < 370) {
|
||||
const e: string = [
|
||||
`the icon for theme ${themeFile.id} is too small. Please rescale the icon at ${themeFile.icon}`,
|
||||
`Even though an SVG is 'infinitely scaleable', the icon should be dimensioned bigger. One of the build steps of the theme does convert the image to a PNG (to serve as PWA-icon) and having a small dimension will cause blurry images.`,
|
||||
` Width = ${width} height = ${height}; we recommend a size of at least 500px * 500px and to use a square aspect ratio.`,
|
||||
].join("\n")
|
||||
err(e)
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
console.error("Could not read " + themeFile.icon + " due to " + e)
|
||||
}
|
||||
}
|
||||
|
||||
this.writeTheme(themeFile)
|
||||
fixed.set(themeFile.id, themeFile)
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -270,8 +270,8 @@ async function createLandingPage(layout: LayoutConfig, manifest, whiteIcons, alr
|
|||
layout.shortDescription.textFor(targetLanguage)
|
||||
)
|
||||
.replace(
|
||||
'<script src="./index.ts"></script>',
|
||||
`<script src='./index_${layout.id}.ts'></script>`
|
||||
'<script type="module" src="./index.ts"></script>',
|
||||
`<script type="module" src='./index_${layout.id}.ts'></script>`
|
||||
)
|
||||
0
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ function generateLicenseInfos(paths: string[]): SmallLicense[] {
|
|||
const licenses = []
|
||||
for (const path of paths) {
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(path, "UTF-8"))
|
||||
const parsed = JSON.parse(readFileSync(path, { encoding: "utf8" }))
|
||||
if (Array.isArray(parsed)) {
|
||||
const l: SmallLicense[] = parsed
|
||||
for (const smallLicens of l) {
|
||||
|
|
@ -242,7 +242,7 @@ function createFullLicenseOverview(licensePaths: string[]) {
|
|||
if (!existsSync(licensePath)) {
|
||||
continue
|
||||
}
|
||||
const licenses = <SmallLicense[]>JSON.parse(readFileSync(licensePath, "UTF-8"))
|
||||
const licenses = <SmallLicense[]>JSON.parse(readFileSync(licensePath, { encoding: "utf8" }))
|
||||
for (const license of licenses) {
|
||||
validateLicenseInfo(license)
|
||||
const dir = licensePath.substring(0, licensePath.length - "license_info.json".length)
|
||||
|
|
|
|||
176
scripts/generateReviewsAnalysis.ts
Normal file
176
scripts/generateReviewsAnalysis.ts
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
import Script from "./Script"
|
||||
import * as fs from "fs"
|
||||
import { Review } from "mangrove-reviews-typescript"
|
||||
import { parse } from "csv-parse"
|
||||
import { Feature, FeatureCollection, Point } from "geojson"
|
||||
|
||||
export default class GenerateReviewsAnalysis extends Script {
|
||||
constructor() {
|
||||
super("Analyses a CSV-file with Mangrove reviews")
|
||||
}
|
||||
|
||||
async analyze(datapath: string) {
|
||||
const reviews = await this.parseCsv(datapath)
|
||||
|
||||
const clientWebsites: Record<string, number> = {}
|
||||
const themeHist: Record<string, number> = {}
|
||||
const languageHist: Record<string, number> = {}
|
||||
|
||||
const geojsonFeatures: Feature<Point, Record<string, string>>[] = []
|
||||
|
||||
for (const review of reviews) {
|
||||
try {
|
||||
const client = new URL(review.metadata.client_id)
|
||||
clientWebsites[client.host] = 1 + (clientWebsites[client.host] ?? 0)
|
||||
if (
|
||||
client.host.indexOf("mapcomplete") >= 0 ||
|
||||
client.host.indexOf("pietervdvn") >= 0
|
||||
) {
|
||||
let theme = client.pathname.substring(client.pathname.lastIndexOf("/") + 1)
|
||||
if (theme.endsWith(".html")) {
|
||||
theme = theme.substring(0, theme.length - 5)
|
||||
}
|
||||
if (theme === "theme") {
|
||||
// THis is a custom layout
|
||||
theme =
|
||||
client.searchParams.get("layout") ??
|
||||
client.searchParams.get("userlayout")
|
||||
}
|
||||
theme = "https://mapcomplete.osm.be/" + theme
|
||||
themeHist[theme] = (themeHist[theme] ?? 0) + 1
|
||||
|
||||
const language = client.searchParams.get("language")
|
||||
languageHist[language] = (languageHist[language] ?? 0) + 1
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Not a url:", review.metadata.client_id)
|
||||
}
|
||||
|
||||
try {
|
||||
const geo = new URL(review.sub)
|
||||
if (geo.protocol !== "geo:") {
|
||||
continue
|
||||
}
|
||||
const [lat, lon] = geo.pathname.split(",").map(Number)
|
||||
console.log(lat, lon)
|
||||
geojsonFeatures.push({
|
||||
geometry: {
|
||||
type: "Point",
|
||||
coordinates: [lon, lat],
|
||||
},
|
||||
type: "Feature",
|
||||
properties: {
|
||||
name: geo.searchParams.get("q"),
|
||||
rating: "" + review.rating,
|
||||
opinion: review.opinion,
|
||||
client: review.metadata.client_id,
|
||||
nickname: review.metadata.nickname,
|
||||
affiliated: "" + review.metadata.is_affiliated,
|
||||
},
|
||||
})
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
console.log("Total number of reviews", reviews.length)
|
||||
this.print("Website", clientWebsites)
|
||||
this.print("Theme", themeHist)
|
||||
this.print("language", languageHist)
|
||||
const fc: FeatureCollection = {
|
||||
type: "FeatureCollection",
|
||||
features: geojsonFeatures,
|
||||
}
|
||||
|
||||
const fcmc: FeatureCollection = {
|
||||
type: "FeatureCollection",
|
||||
features: geojsonFeatures.filter(
|
||||
(f) =>
|
||||
f.properties.client.indexOf("mapcomplete") >= 0 ||
|
||||
f.properties.client.indexOf("pietervdvn.github.io") >= 0
|
||||
),
|
||||
}
|
||||
fs.writeFileSync(
|
||||
"../MapComplete-data/reviews.geojson",
|
||||
|
||||
JSON.stringify(fc),
|
||||
{ encoding: "utf-8" }
|
||||
)
|
||||
fs.writeFileSync(
|
||||
"../MapComplete-data/reviewsmc.geojson",
|
||||
|
||||
JSON.stringify(fcmc),
|
||||
{ encoding: "utf-8" }
|
||||
)
|
||||
}
|
||||
|
||||
async main(args: string[]): Promise<void> {
|
||||
const datapath = args[0] ?? "../MapComplete-data/mangrove.reviews_1674234503.csv"
|
||||
await this.analyze(datapath)
|
||||
}
|
||||
|
||||
private sort(record: Record<string, number>): Record<string, number> {
|
||||
record = { ...record }
|
||||
const result: Record<string, number> = {}
|
||||
do {
|
||||
let maxKey: string = undefined
|
||||
let maxCount: number = -999
|
||||
|
||||
for (const key in record) {
|
||||
const c = record[key]
|
||||
if (c > maxCount) {
|
||||
maxCount = c
|
||||
maxKey = key
|
||||
}
|
||||
}
|
||||
result[maxKey] = maxCount
|
||||
delete record[maxKey]
|
||||
} while (Object.keys(record).length > 0)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
private print(type: string, histogram: Record<string, number>) {
|
||||
console.log(type, this.sort(histogram))
|
||||
}
|
||||
|
||||
private parseCsv(datapath: string): Promise<Review[]> {
|
||||
const header: string[] = [
|
||||
"signature",
|
||||
"pem",
|
||||
"iat",
|
||||
"sub",
|
||||
"rating",
|
||||
"opinion",
|
||||
"images",
|
||||
"metadata",
|
||||
]
|
||||
return new Promise<Review[]>((resolve) => {
|
||||
const parser = parse({ delimiter: "," }, function (err, data) {
|
||||
const asJson: Review[] = []
|
||||
for (let i = 1; i < data.length; i++) {
|
||||
const line = data[i]
|
||||
const entry: Review = { sub: undefined }
|
||||
for (let c = 0; c < line.length; c++) {
|
||||
const key: string = header[c]
|
||||
let value = line[c]
|
||||
if (value === "none") {
|
||||
value = null
|
||||
} else if (key === "images" || key === "metadata") {
|
||||
try {
|
||||
value = JSON.parse(value)
|
||||
} catch (e) {
|
||||
console.log("Could not parse", value, "\n", line)
|
||||
}
|
||||
}
|
||||
entry[key] = value
|
||||
}
|
||||
asJson.push(entry)
|
||||
}
|
||||
resolve(asJson)
|
||||
})
|
||||
fs.createReadStream(datapath).pipe(parser)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
new GenerateReviewsAnalysis().run()
|
||||
|
|
@ -176,7 +176,7 @@ function generateTagInfoEntry(layout: LayoutConfig): any {
|
|||
function generateProjectsOverview(files: string[]) {
|
||||
try {
|
||||
const tagInfoList = "../taginfo-projects/project_list.txt"
|
||||
let projectList = readFileSync(tagInfoList, "UTF8")
|
||||
let projectList = readFileSync(tagInfoList, { encoding: "utf8" })
|
||||
.split("\n")
|
||||
.filter((entry) => entry.indexOf("mapcomplete_") < 0)
|
||||
.concat(
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class TranslationPart {
|
|||
const files = ScriptUtils.readDirRecSync(path, 1).filter((file) => file.endsWith(".json"))
|
||||
const rootTranslation = new TranslationPart()
|
||||
for (const file of files) {
|
||||
const content = JSON.parse(readFileSync(file, "UTF8"))
|
||||
const content = JSON.parse(readFileSync(file, { encoding: "utf8" }))
|
||||
rootTranslation.addTranslation(file.substr(0, file.length - ".json".length), content)
|
||||
}
|
||||
return rootTranslation
|
||||
|
|
@ -360,7 +360,9 @@ function transformTranslation(
|
|||
return `new Translation( ${JSON.stringify(obj)} )`
|
||||
}
|
||||
|
||||
let values = ""
|
||||
let values: string[] = []
|
||||
const spaces = Utils.Times((_) => " ", path.length + 1)
|
||||
|
||||
for (const key in obj) {
|
||||
if (key === "#") {
|
||||
continue
|
||||
|
|
@ -405,18 +407,14 @@ function transformTranslation(
|
|||
)}, "core:${path.join(".")}.${key}")`
|
||||
}
|
||||
|
||||
values += `${Utils.Times((_) => " ", path.length + 1)}get ${key}() { ${expr} },
|
||||
`
|
||||
values.push(`${spaces}get ${key}() { ${expr} }`)
|
||||
} else {
|
||||
values +=
|
||||
Utils.Times((_) => " ", path.length + 1) +
|
||||
key +
|
||||
": " +
|
||||
transformTranslation(value, [...path, key], languageWhitelist) +
|
||||
",\n"
|
||||
values.push(
|
||||
spaces + key + ": " + transformTranslation(value, [...path, key], languageWhitelist)
|
||||
)
|
||||
}
|
||||
}
|
||||
return `{${values}}`
|
||||
return `{${values.join(",\n")}}`
|
||||
}
|
||||
|
||||
function sortKeys(o: object): object {
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import ScriptUtils from "./ScriptUtils"
|
|||
|
||||
function makeConvex(file) {
|
||||
ScriptUtils.erasableLog("Handling", file)
|
||||
const geoJson = JSON.parse(fs.readFileSync(file, "UTF8"))
|
||||
const geoJson = JSON.parse(fs.readFileSync(file, { encoding: "utf8" }))
|
||||
const convex = GeoOperations.convexHull(geoJson, { concavity: 2 })
|
||||
if (convex.properties === undefined) {
|
||||
convex.properties = {}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ function main(args) {
|
|||
const path = args[0]
|
||||
const key = args[1]
|
||||
|
||||
const data = JSON.parse(fs.readFileSync(path, "UTF8"))
|
||||
const data = JSON.parse(fs.readFileSync(path, { encoding: "utf8" }))
|
||||
const perProperty = new Map<string, any[]>()
|
||||
|
||||
console.log("Partitioning", data.features.length, "features")
|
||||
|
|
|
|||
|
|
@ -13,14 +13,14 @@ async function main(args: string[]) {
|
|||
|
||||
const alreadyLoaded = new Set<number>()
|
||||
if (existsSync(output)) {
|
||||
const lines = readFileSync(output, "UTF8").split("\n")
|
||||
const lines = readFileSync(output, { encoding: "utf8" }).split("\n")
|
||||
lines.shift()
|
||||
lines.forEach((line) => {
|
||||
const postalCode = Number(line.split(",")[0])
|
||||
alreadyLoaded.add(postalCode)
|
||||
})
|
||||
} else {
|
||||
writeFileSync(output, "postal_code,lon,lat\n", "UTF-8")
|
||||
writeFileSync(output, "postal_code,lon,lat\n", { encoding: "utf8" })
|
||||
}
|
||||
|
||||
for (const boundary of postcodes.features) {
|
||||
|
|
@ -104,7 +104,7 @@ async function main(args: string[]) {
|
|||
" are ",
|
||||
JSON.stringify(depPoints)
|
||||
)
|
||||
appendFileSync(output, [postcode, ...depPoints[0]].join(", ") + "\n", "UTF-8")
|
||||
appendFileSync(output, [postcode, ...depPoints[0]].join(", ") + "\n", { encoding: "utf8" })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { writeFileSync } from "fs"
|
|||
import ScriptUtils from "../ScriptUtils"
|
||||
|
||||
function handleFile(file: string, postalCode: number) {
|
||||
const geojson = JSON.parse(fs.readFileSync(file, "UTF8"))
|
||||
const geojson = JSON.parse(fs.readFileSync(file, { encoding: "utf8" }))
|
||||
geojson.properties = {
|
||||
type: "boundary",
|
||||
boundary: "postal_code",
|
||||
|
|
@ -14,7 +14,7 @@ function handleFile(file: string, postalCode: number) {
|
|||
|
||||
function getKnownPostalCodes(): number[] {
|
||||
return fs
|
||||
.readFileSync("./scripts/postal_code_tools/knownPostalCodes.csv", "UTF8")
|
||||
.readFileSync("./scripts/postal_code_tools/knownPostalCodes.csv", { encoding: "utf8" })
|
||||
.split("\n")
|
||||
.map((line) => Number(line.split(",")[1]))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ import * as fs from "fs"
|
|||
|
||||
async function main(args: string[]) {
|
||||
if (args.length !== 1) {
|
||||
console.log("Usage: first argument is the fully qualified key of the string to remove")
|
||||
console.log(
|
||||
"Usage: first argument is the fully qualified key of the string to remove. Only removes translations in the core translations"
|
||||
)
|
||||
return
|
||||
}
|
||||
const path = args[0].split(".")
|
||||
|
|
|
|||
286
scripts/slice.ts
286
scripts/slice.ts
|
|
@ -4,159 +4,179 @@ import StaticFeatureSource from "../Logic/FeatureSource/Sources/StaticFeatureSou
|
|||
import * as readline from "readline"
|
||||
import ScriptUtils from "./ScriptUtils"
|
||||
import { Utils } from "../Utils"
|
||||
import Script from "./Script"
|
||||
import { BBox } from "../Logic/BBox"
|
||||
import { GeoOperations } from "../Logic/GeoOperations"
|
||||
|
||||
/**
|
||||
* This script slices a big newline-delimeted geojson file into tiled geojson
|
||||
* It was used to convert the CRAB-data into geojson tiles
|
||||
*/
|
||||
|
||||
async function readFeaturesFromLineDelimitedJsonFile(inputFile: string): Promise<any[]> {
|
||||
const fileStream = fs.createReadStream(inputFile)
|
||||
class Slice extends Script {
|
||||
constructor() {
|
||||
super("Break data into tiles")
|
||||
}
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input.txt as a single line break.
|
||||
async readFeaturesFromLineDelimitedJsonFile(inputFile: string): Promise<any[]> {
|
||||
const fileStream = fs.createReadStream(inputFile)
|
||||
|
||||
const allFeatures: any[] = []
|
||||
// @ts-ignore
|
||||
for await (const line of rl) {
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input.txt as a single line break.
|
||||
|
||||
const allFeatures: any[] = []
|
||||
// @ts-ignore
|
||||
for await (const line of rl) {
|
||||
try {
|
||||
allFeatures.push(JSON.parse(line))
|
||||
} catch (e) {
|
||||
console.error("Could not parse", line)
|
||||
break
|
||||
}
|
||||
if (allFeatures.length % 10000 === 0) {
|
||||
ScriptUtils.erasableLog("Loaded ", allFeatures.length, "features up till now")
|
||||
}
|
||||
}
|
||||
return allFeatures
|
||||
}
|
||||
|
||||
async readGeojsonLineByLine(inputFile: string): Promise<any[]> {
|
||||
const fileStream = fs.createReadStream(inputFile)
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input.txt as a single line break.
|
||||
|
||||
const allFeatures: any[] = []
|
||||
let featuresSeen = false
|
||||
// @ts-ignore
|
||||
for await (let line: string of rl) {
|
||||
if (!featuresSeen && line.startsWith('"features":')) {
|
||||
featuresSeen = true
|
||||
continue
|
||||
}
|
||||
if (!featuresSeen) {
|
||||
continue
|
||||
}
|
||||
if (line.endsWith(",")) {
|
||||
line = line.substring(0, line.length - 1)
|
||||
}
|
||||
|
||||
try {
|
||||
allFeatures.push(JSON.parse(line))
|
||||
} catch (e) {
|
||||
console.error("Could not parse", line)
|
||||
break
|
||||
}
|
||||
if (allFeatures.length % 10000 === 0) {
|
||||
ScriptUtils.erasableLog("Loaded ", allFeatures.length, "features up till now")
|
||||
}
|
||||
}
|
||||
return allFeatures
|
||||
}
|
||||
|
||||
async readFeaturesFromGeoJson(inputFile: string): Promise<any[]> {
|
||||
try {
|
||||
allFeatures.push(JSON.parse(line))
|
||||
return JSON.parse(fs.readFileSync(inputFile, { encoding: "utf-8" })).features
|
||||
} catch (e) {
|
||||
console.error("Could not parse", line)
|
||||
break
|
||||
}
|
||||
if (allFeatures.length % 10000 === 0) {
|
||||
ScriptUtils.erasableLog("Loaded ", allFeatures.length, "features up till now")
|
||||
// We retry, but with a line-by-line approach
|
||||
return await this.readGeojsonLineByLine(inputFile)
|
||||
}
|
||||
}
|
||||
return allFeatures
|
||||
}
|
||||
|
||||
async function readGeojsonLineByLine(inputFile: string): Promise<any[]> {
|
||||
const fileStream = fs.createReadStream(inputFile)
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input.txt as a single line break.
|
||||
|
||||
const allFeatures: any[] = []
|
||||
let featuresSeen = false
|
||||
// @ts-ignore
|
||||
for await (let line: string of rl) {
|
||||
if (!featuresSeen && line.startsWith('"features":')) {
|
||||
featuresSeen = true
|
||||
continue
|
||||
}
|
||||
if (!featuresSeen) {
|
||||
continue
|
||||
}
|
||||
if (line.endsWith(",")) {
|
||||
line = line.substring(0, line.length - 1)
|
||||
async main(args: string[]) {
|
||||
console.log("GeoJSON slicer")
|
||||
if (args.length < 3) {
|
||||
console.log(
|
||||
"USAGE: <input-file.geojson> <target-zoom-level> <output-directory> [--clip]"
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
allFeatures.push(JSON.parse(line))
|
||||
} catch (e) {
|
||||
console.error("Could not parse", line)
|
||||
break
|
||||
const inputFile = args[0]
|
||||
const zoomlevel = Number(args[1])
|
||||
const outputDirectory = args[2]
|
||||
const doSlice = args[3] === "--clip"
|
||||
|
||||
if (!fs.existsSync(outputDirectory)) {
|
||||
fs.mkdirSync(outputDirectory)
|
||||
console.log("Directory created")
|
||||
}
|
||||
if (allFeatures.length % 10000 === 0) {
|
||||
ScriptUtils.erasableLog("Loaded ", allFeatures.length, "features up till now")
|
||||
console.log("Using directory ", outputDirectory)
|
||||
|
||||
let allFeatures: any[]
|
||||
if (inputFile.endsWith(".geojson")) {
|
||||
console.log("Detected geojson")
|
||||
allFeatures = await this.readFeaturesFromGeoJson(inputFile)
|
||||
} else {
|
||||
console.log("Loading as newline-delimited features")
|
||||
allFeatures = await this.readFeaturesFromLineDelimitedJsonFile(inputFile)
|
||||
}
|
||||
}
|
||||
return allFeatures
|
||||
}
|
||||
allFeatures = Utils.NoNull(allFeatures)
|
||||
|
||||
async function readFeaturesFromGeoJson(inputFile: string): Promise<any[]> {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(inputFile, "UTF-8")).features
|
||||
} catch (e) {
|
||||
// We retry, but with a line-by-line approach
|
||||
return await readGeojsonLineByLine(inputFile)
|
||||
}
|
||||
}
|
||||
console.log("Loaded all", allFeatures.length, "points")
|
||||
|
||||
async function main(args: string[]) {
|
||||
console.log("GeoJSON slicer")
|
||||
if (args.length < 3) {
|
||||
console.log("USAGE: <input-file.geojson> <target-zoom-level> <output-directory>")
|
||||
return
|
||||
}
|
||||
|
||||
const inputFile = args[0]
|
||||
const zoomlevel = Number(args[1])
|
||||
const outputDirectory = args[2]
|
||||
|
||||
if (!fs.existsSync(outputDirectory)) {
|
||||
fs.mkdirSync(outputDirectory)
|
||||
console.log("Directory created")
|
||||
}
|
||||
console.log("Using directory ", outputDirectory)
|
||||
|
||||
let allFeatures: any[]
|
||||
if (inputFile.endsWith(".geojson")) {
|
||||
console.log("Detected geojson")
|
||||
allFeatures = await readFeaturesFromGeoJson(inputFile)
|
||||
} else {
|
||||
console.log("Loading as newline-delimited features")
|
||||
allFeatures = await readFeaturesFromLineDelimitedJsonFile(inputFile)
|
||||
}
|
||||
allFeatures = Utils.NoNull(allFeatures)
|
||||
|
||||
console.log("Loaded all", allFeatures.length, "points")
|
||||
|
||||
const keysToRemove = ["STRAATNMID", "GEMEENTE", "POSTCODE"]
|
||||
for (const f of allFeatures) {
|
||||
if (f.properties === null) {
|
||||
console.log("Got a feature without properties!", f)
|
||||
continue
|
||||
const keysToRemove = ["STRAATNMID", "GEMEENTE", "POSTCODE"]
|
||||
for (const f of allFeatures) {
|
||||
if (f.properties === null) {
|
||||
console.log("Got a feature without properties!", f)
|
||||
continue
|
||||
}
|
||||
for (const keyToRm of keysToRemove) {
|
||||
delete f.properties[keyToRm]
|
||||
}
|
||||
delete f.bbox
|
||||
}
|
||||
for (const keyToRm of keysToRemove) {
|
||||
delete f.properties[keyToRm]
|
||||
}
|
||||
delete f.bbox
|
||||
}
|
||||
TiledFeatureSource.createHierarchy(StaticFeatureSource.fromGeojson(allFeatures), {
|
||||
minZoomLevel: zoomlevel,
|
||||
maxZoomLevel: zoomlevel,
|
||||
maxFeatureCount: Number.MAX_VALUE,
|
||||
registerTile: (tile) => {
|
||||
const path = `${outputDirectory}/tile_${tile.z}_${tile.x}_${tile.y}.geojson`
|
||||
const features = tile.features.data.map((ff) => ff.feature)
|
||||
features.forEach((f) => {
|
||||
delete f.bbox
|
||||
})
|
||||
fs.writeFileSync(
|
||||
path,
|
||||
JSON.stringify(
|
||||
{
|
||||
type: "FeatureCollection",
|
||||
features: features,
|
||||
},
|
||||
null,
|
||||
" "
|
||||
TiledFeatureSource.createHierarchy(StaticFeatureSource.fromGeojson(allFeatures), {
|
||||
minZoomLevel: zoomlevel,
|
||||
maxZoomLevel: zoomlevel,
|
||||
maxFeatureCount: Number.MAX_VALUE,
|
||||
registerTile: (tile) => {
|
||||
const path = `${outputDirectory}/tile_${tile.z}_${tile.x}_${tile.y}.geojson`
|
||||
const box = BBox.fromTile(tile.z, tile.x, tile.y)
|
||||
let features = tile.features.data.map((ff) => ff.feature)
|
||||
if (doSlice) {
|
||||
features = Utils.NoNull(
|
||||
features.map((f) => {
|
||||
const intersection = GeoOperations.intersect(f, box.asGeoJson({}))
|
||||
if (intersection) {
|
||||
intersection.properties = f.properties
|
||||
}
|
||||
return intersection
|
||||
})
|
||||
)
|
||||
}
|
||||
features.forEach((f) => {
|
||||
delete f.bbox
|
||||
})
|
||||
fs.writeFileSync(
|
||||
path,
|
||||
JSON.stringify(
|
||||
{
|
||||
type: "FeatureCollection",
|
||||
features: features,
|
||||
},
|
||||
null,
|
||||
" "
|
||||
)
|
||||
)
|
||||
)
|
||||
ScriptUtils.erasableLog(
|
||||
"Written ",
|
||||
path,
|
||||
"which has ",
|
||||
tile.features.data.length,
|
||||
"features"
|
||||
)
|
||||
},
|
||||
})
|
||||
ScriptUtils.erasableLog(
|
||||
"Written ",
|
||||
path,
|
||||
"which has ",
|
||||
tile.features.data.length,
|
||||
"features"
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let args = [...process.argv]
|
||||
args.splice(0, 2)
|
||||
main(args).then((_) => {
|
||||
console.log("All done!")
|
||||
})
|
||||
new Slice().run()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue