Scripts(community_index): create script to update community index files, create weekly data maintenance script
This commit is contained in:
parent
36a9b49c66
commit
7bddaa7d4c
11 changed files with 412 additions and 141 deletions
31
.forgejo/workflows/update_community_index.yml
Normal file
31
.forgejo/workflows/update_community_index.yml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
name: Weekly data updates
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
-cron: "* * * * 1"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy_on_hetzner_single:
|
||||||
|
runs-on: [ ubuntu-latest, hetzner-access ]
|
||||||
|
|
||||||
|
runs-on: [ ubuntu-latest, hetzner-access ]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
cache: "npm"
|
||||||
|
cache-dependency-path: package-lock.json
|
||||||
|
|
||||||
|
- name: install deps
|
||||||
|
run: npm ci
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: update community index files
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir community-index
|
||||||
|
npm run download:community-index -- community-index/
|
||||||
|
zip community-index.zip community-index/*
|
||||||
|
scp community-index.zip hetzner:data/
|
|
@ -119,6 +119,7 @@
|
||||||
"download:editor-layer-index": "vite-node scripts/downloadEli.ts",
|
"download:editor-layer-index": "vite-node scripts/downloadEli.ts",
|
||||||
"download:stats": "vite-node scripts/GenerateSeries.ts",
|
"download:stats": "vite-node scripts/GenerateSeries.ts",
|
||||||
"download:images": "vite-node scripts/generateImageAnalysis.ts -- ~/data/imgur-image-backup/",
|
"download:images": "vite-node scripts/generateImageAnalysis.ts -- ~/data/imgur-image-backup/",
|
||||||
|
"download:community-index": "vite-node scripts/downloadCommunityIndex.ts -- /tmp/test",
|
||||||
"weblate:add-upstream": "git remote add weblate https://translate.mapcomplete.org/git/mapcomplete/core/ ; git remote update weblate",
|
"weblate:add-upstream": "git remote add weblate https://translate.mapcomplete.org/git/mapcomplete/core/ ; git remote update weblate",
|
||||||
"weblate:fix": "npm run weblate:add-upstream && git merge weblate/master && git rebase origin/master && git push origin master",
|
"weblate:fix": "npm run weblate:add-upstream && git merge weblate/master && git rebase origin/master && git push origin master",
|
||||||
"lint": "npm run lint:prettier && npm run lint:eslint && npm run lint:themes",
|
"lint": "npm run lint:prettier && npm run lint:eslint && npm run lint:themes",
|
||||||
|
|
116
scripts/downloadCommunityIndex.ts
Normal file
116
scripts/downloadCommunityIndex.ts
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
import Script from "./Script"
|
||||||
|
import { CommunityResource } from "../src/Logic/Web/CommunityIndex"
|
||||||
|
import { Utils } from "../src/Utils"
|
||||||
|
import { FeatureCollection, MultiPolygon, Polygon } from "geojson"
|
||||||
|
import { writeFileSync } from "fs"
|
||||||
|
import { GeoOperations } from "../src/Logic/GeoOperations"
|
||||||
|
import { Tiles } from "../src/Models/TileRange"
|
||||||
|
import ScriptUtils from "./ScriptUtils"
|
||||||
|
|
||||||
|
class DownloadCommunityIndex extends Script {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super("Updates the community index")
|
||||||
|
}
|
||||||
|
|
||||||
|
printHelp() {
|
||||||
|
console.log("Arguments are:\noutputdirectory")
|
||||||
|
}
|
||||||
|
|
||||||
|
private static targetZoomlevel: number = 6
|
||||||
|
private static upstreamUrl: string = "https://raw.githubusercontent.com/osmlab/osm-community-index/main/dist/"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prunes away unnecessary fields from a CommunityResource
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
private static stripResource(r: Readonly<CommunityResource>): CommunityResource {
|
||||||
|
return {
|
||||||
|
id: r.id,
|
||||||
|
languageCodes: r.languageCodes,
|
||||||
|
account: r.account,
|
||||||
|
type: r.type,
|
||||||
|
resolved: {
|
||||||
|
name: r.resolved.name,
|
||||||
|
description: r.resolved.description,
|
||||||
|
url: r.resolved.url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static stripResourcesObj(resources: Readonly<Record<string, Readonly<CommunityResource>>>) {
|
||||||
|
const stripped: Record<string, CommunityResource> = {}
|
||||||
|
for (const k in resources) {
|
||||||
|
stripped[k] = DownloadCommunityIndex.stripResource(resources[k])
|
||||||
|
}
|
||||||
|
return stripped
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async update(targetDirectory: string) {
|
||||||
|
const data = await Utils.downloadJson<FeatureCollection<Polygon | MultiPolygon, {
|
||||||
|
resources: Record<string, CommunityResource>,
|
||||||
|
nameEn: string,
|
||||||
|
id: string
|
||||||
|
}>>(DownloadCommunityIndex.upstreamUrl + "completeFeatureCollection.json"
|
||||||
|
)
|
||||||
|
const features = data.features
|
||||||
|
const global = features.find(
|
||||||
|
f => f.id === "Q2"
|
||||||
|
)
|
||||||
|
const globalProperties = DownloadCommunityIndex.stripResourcesObj(global.properties.resources)
|
||||||
|
writeFileSync(targetDirectory + "/global.json", JSON.stringify(globalProperties), "utf8")
|
||||||
|
console.log("Written global properties")
|
||||||
|
|
||||||
|
const types = new Set<string>()
|
||||||
|
for (const f of features) {
|
||||||
|
const res = f.properties.resources
|
||||||
|
for (const k in res) {
|
||||||
|
types.add(res[k].type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const type of types) {
|
||||||
|
const url = `${DownloadCommunityIndex.upstreamUrl}img/${type}.svg`
|
||||||
|
await ScriptUtils.DownloadFileTo(url, `${targetDirectory}/${type}.svg`)
|
||||||
|
}
|
||||||
|
const local = features.filter(f => f.id !== "Q2")
|
||||||
|
const spread = GeoOperations.spreadIntoBboxes(local, DownloadCommunityIndex.targetZoomlevel)
|
||||||
|
let written = 0
|
||||||
|
let skipped = 0
|
||||||
|
writeFileSync(targetDirectory + "local.geojson", JSON.stringify({ type: "FeatureCollection", features: local }))
|
||||||
|
for (const tileIndex of spread.keys()) {
|
||||||
|
const features = spread.get(tileIndex)
|
||||||
|
const clipped = GeoOperations.clipAllInBox(features, tileIndex)
|
||||||
|
if (clipped.length === 0) {
|
||||||
|
skipped++
|
||||||
|
features.push(Tiles.asGeojson(tileIndex))
|
||||||
|
writeFileSync(`${targetDirectory + tileIndex}_skipped.geojson`, JSON.stringify({
|
||||||
|
type: "FeatureCollection", features
|
||||||
|
}))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const [z, x, y] = Tiles.tile_from_index(tileIndex)
|
||||||
|
const path = `${targetDirectory}/tile_${z}_${x}_${y}.geojson`
|
||||||
|
clipped.forEach((f) => {
|
||||||
|
delete f.bbox
|
||||||
|
})
|
||||||
|
writeFileSync(path, JSON.stringify({ type: "FeatureCollection", features: clipped }), "utf8")
|
||||||
|
written++
|
||||||
|
console.log(`Written tile ${path}`)
|
||||||
|
}
|
||||||
|
console.log(`Created ${written} tiles, skipped ${skipped}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async main(args: string[]): Promise<void> {
|
||||||
|
const path = args[0]
|
||||||
|
if (!path) {
|
||||||
|
this.printHelp()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await DownloadCommunityIndex.update(path)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
new DownloadCommunityIndex().run()
|
|
@ -1,13 +1,12 @@
|
||||||
import * as fs from "fs"
|
import * as fs from "fs"
|
||||||
import StaticFeatureSource from "../Logic/FeatureSource/Sources/StaticFeatureSource"
|
|
||||||
import * as readline from "readline"
|
import * as readline from "readline"
|
||||||
import ScriptUtils from "./ScriptUtils"
|
import ScriptUtils from "./ScriptUtils"
|
||||||
import { Utils } from "../Utils"
|
import { Utils } from "../Utils"
|
||||||
import Script from "./Script"
|
import Script from "./Script"
|
||||||
import { BBox } from "../Logic/BBox"
|
|
||||||
import { GeoOperations } from "../Logic/GeoOperations"
|
import { GeoOperations } from "../Logic/GeoOperations"
|
||||||
import { Tiles } from "../Models/TileRange"
|
import { Tiles } from "../Models/TileRange"
|
||||||
import { Feature } from "geojson"
|
import { Feature } from "geojson"
|
||||||
|
import { features } from "monaco-editor/esm/metadata"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This script slices a big newline-delimeted geojson file into tiled geojson
|
* This script slices a big newline-delimeted geojson file into tiled geojson
|
||||||
|
@ -96,34 +95,15 @@ class Slice extends Script {
|
||||||
features: Feature[],
|
features: Feature[],
|
||||||
tileIndex: number,
|
tileIndex: number,
|
||||||
outputDirectory: string,
|
outputDirectory: string,
|
||||||
doSlice: boolean,
|
doClip: boolean,
|
||||||
handled: number,
|
handled: number,
|
||||||
maxNumberOfTiles: number
|
maxNumberOfTiles: number
|
||||||
) {
|
): boolean {
|
||||||
|
if (doClip) {
|
||||||
|
features = GeoOperations.clipAllInBox(features, tileIndex)
|
||||||
|
}
|
||||||
const [z, x, y] = Tiles.tile_from_index(tileIndex)
|
const [z, x, y] = Tiles.tile_from_index(tileIndex)
|
||||||
const path = `${outputDirectory}/tile_${z}_${x}_${y}.geojson`
|
const path = `${outputDirectory}/tile_${z}_${x}_${y}.geojson`
|
||||||
const box = BBox.fromTileIndex(tileIndex)
|
|
||||||
if (doSlice) {
|
|
||||||
features = Utils.NoNull(
|
|
||||||
features.map((f) => {
|
|
||||||
const bbox = box.asGeoJson({})
|
|
||||||
const properties = {
|
|
||||||
...f.properties,
|
|
||||||
id: (f.properties?.id ?? "") + "_" + z + "_" + x + "_" + y,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (GeoOperations.completelyWithin(bbox, <any>f)) {
|
|
||||||
bbox.properties = properties
|
|
||||||
return bbox
|
|
||||||
}
|
|
||||||
const intersection = GeoOperations.intersect(f, box.asGeoJson({}))
|
|
||||||
if (intersection) {
|
|
||||||
intersection.properties = properties
|
|
||||||
}
|
|
||||||
return intersection
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
features.forEach((f) => {
|
features.forEach((f) => {
|
||||||
delete f.bbox
|
delete f.bbox
|
||||||
})
|
})
|
||||||
|
@ -177,7 +157,7 @@ class Slice extends Script {
|
||||||
}
|
}
|
||||||
console.log("Using directory ", outputDirectory)
|
console.log("Using directory ", outputDirectory)
|
||||||
|
|
||||||
let allFeatures: any[]
|
let allFeatures: Feature[]
|
||||||
if (inputFile.endsWith(".geojson")) {
|
if (inputFile.endsWith(".geojson")) {
|
||||||
console.log("Detected geojson")
|
console.log("Detected geojson")
|
||||||
allFeatures = await this.readFeaturesFromGeoJson(inputFile)
|
allFeatures = await this.readFeaturesFromGeoJson(inputFile)
|
||||||
|
@ -202,18 +182,16 @@ class Slice extends Script {
|
||||||
}
|
}
|
||||||
const maxNumberOfTiles = Math.pow(2, zoomlevel) * Math.pow(2, zoomlevel)
|
const maxNumberOfTiles = Math.pow(2, zoomlevel) * Math.pow(2, zoomlevel)
|
||||||
let handled = 0
|
let handled = 0
|
||||||
StaticFeatureSource.fromGeojson(allFeatures).features.addCallbackAndRun((feats) => {
|
GeoOperations.slice(zoomlevel, features).forEach((tileData, tileIndex) => {
|
||||||
GeoOperations.slice(zoomlevel, feats).forEach((tileData, tileIndex) => {
|
handled = handled + 1
|
||||||
handled = handled + 1
|
this.handleTileData(
|
||||||
this.handleTileData(
|
tileData,
|
||||||
tileData,
|
tileIndex,
|
||||||
tileIndex,
|
outputDirectory,
|
||||||
outputDirectory,
|
doSlice,
|
||||||
doSlice,
|
handled,
|
||||||
handled,
|
maxNumberOfTiles
|
||||||
maxNumberOfTiles
|
)
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,7 @@ export default class SaveFeatureSourceToLocalStorage {
|
||||||
this.storage = storage
|
this.storage = storage
|
||||||
const singleTileSavers: Map<number, SingleTileSaver> = new Map<number, SingleTileSaver>()
|
const singleTileSavers: Map<number, SingleTileSaver> = new Map<number, SingleTileSaver>()
|
||||||
features.features.addCallbackAndRunD((features) => {
|
features.features.addCallbackAndRunD((features) => {
|
||||||
const sliced = GeoOperations.slice(zoomlevel, features)
|
const sliced = GeoOperations.spreadIntoBboxes(features, zoomlevel)
|
||||||
|
|
||||||
sliced.forEach((features, tileIndex) => {
|
sliced.forEach((features, tileIndex) => {
|
||||||
let tileSaver = singleTileSavers.get(tileIndex)
|
let tileSaver = singleTileSavers.get(tileIndex)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { BBox } from "./BBox"
|
import { BBox } from "./BBox"
|
||||||
import * as turf from "@turf/turf"
|
import * as turf from "@turf/turf"
|
||||||
import { AllGeoJSON, booleanWithin, Coord } from "@turf/turf"
|
import { AllGeoJSON, booleanWithin, Coord, Polygon } from "@turf/turf"
|
||||||
import {
|
import {
|
||||||
Feature,
|
Feature,
|
||||||
FeatureCollection,
|
FeatureCollection,
|
||||||
|
@ -9,13 +9,13 @@ import {
|
||||||
MultiLineString,
|
MultiLineString,
|
||||||
MultiPolygon,
|
MultiPolygon,
|
||||||
Point,
|
Point,
|
||||||
Polygon,
|
Position
|
||||||
Position,
|
|
||||||
} from "geojson"
|
} from "geojson"
|
||||||
import { Tiles } from "../Models/TileRange"
|
import { Tiles } from "../Models/TileRange"
|
||||||
import { Utils } from "../Utils"
|
import { Utils } from "../Utils"
|
||||||
import { NearestPointOnLine } from "@turf/nearest-point-on-line"
|
import { NearestPointOnLine } from "@turf/nearest-point-on-line"
|
||||||
;("use strict")
|
|
||||||
|
("use strict")
|
||||||
|
|
||||||
export class GeoOperations {
|
export class GeoOperations {
|
||||||
private static readonly _earthRadius = 6378137
|
private static readonly _earthRadius = 6378137
|
||||||
|
@ -29,7 +29,7 @@ export class GeoOperations {
|
||||||
"behind",
|
"behind",
|
||||||
"sharp_left",
|
"sharp_left",
|
||||||
"left",
|
"left",
|
||||||
"slight_left",
|
"slight_left"
|
||||||
] as const
|
] as const
|
||||||
private static reverseBearing = {
|
private static reverseBearing = {
|
||||||
N: 0,
|
N: 0,
|
||||||
|
@ -47,7 +47,7 @@ export class GeoOperations {
|
||||||
W: 270,
|
W: 270,
|
||||||
WNW: 292.5,
|
WNW: 292.5,
|
||||||
NW: 315,
|
NW: 315,
|
||||||
NNW: 337.5,
|
NNW: 337.5
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,8 +61,8 @@ export class GeoOperations {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static intersect(
|
public static intersect(
|
||||||
f0: Feature<Polygon | MultiPolygon>,
|
f0: Readonly<Feature<Polygon | MultiPolygon>>,
|
||||||
f1: Feature<Polygon | MultiPolygon>
|
f1: Readonly<Feature<Polygon | MultiPolygon>>
|
||||||
): Feature<Polygon | MultiPolygon> | null {
|
): Feature<Polygon | MultiPolygon> | null {
|
||||||
return turf.intersect(f0, f1)
|
return turf.intersect(f0, f1)
|
||||||
}
|
}
|
||||||
|
@ -309,7 +309,7 @@ export class GeoOperations {
|
||||||
bufferSizeInMeter: number
|
bufferSizeInMeter: number
|
||||||
): Feature<Polygon | MultiPolygon> | FeatureCollection<Polygon | MultiPolygon> {
|
): Feature<Polygon | MultiPolygon> | FeatureCollection<Polygon | MultiPolygon> {
|
||||||
return turf.buffer(feature, bufferSizeInMeter / 1000, {
|
return turf.buffer(feature, bufferSizeInMeter / 1000, {
|
||||||
units: "kilometers",
|
units: "kilometers"
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -325,9 +325,9 @@ export class GeoOperations {
|
||||||
[lon0, lat],
|
[lon0, lat],
|
||||||
[lon0, lat0],
|
[lon0, lat0],
|
||||||
[lon, lat0],
|
[lon, lat0],
|
||||||
[lon, lat],
|
[lon, lat]
|
||||||
],
|
]
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,9 +368,9 @@ export class GeoOperations {
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "LineString",
|
type: "LineString",
|
||||||
coordinates: way.geometry.coordinates[0],
|
coordinates: way.geometry.coordinates[0]
|
||||||
},
|
},
|
||||||
properties: way.properties,
|
properties: way.properties
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (way.geometry.type === "MultiPolygon") {
|
if (way.geometry.type === "MultiPolygon") {
|
||||||
|
@ -378,9 +378,9 @@ export class GeoOperations {
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "MultiLineString",
|
type: "MultiLineString",
|
||||||
coordinates: way.geometry.coordinates[0],
|
coordinates: way.geometry.coordinates[0]
|
||||||
},
|
},
|
||||||
properties: way.properties,
|
properties: way.properties
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (way.geometry.type === "LineString") {
|
if (way.geometry.type === "LineString") {
|
||||||
|
@ -512,6 +512,8 @@ export class GeoOperations {
|
||||||
/**
|
/**
|
||||||
* Given a list of features, will construct a map of slippy map tile-indices.
|
* Given a list of features, will construct a map of slippy map tile-indices.
|
||||||
* Features of which the BBOX overlaps with the corresponding slippy map tile are added to the corresponding array
|
* Features of which the BBOX overlaps with the corresponding slippy map tile are added to the corresponding array
|
||||||
|
*
|
||||||
|
* Also @see clipAllInBox
|
||||||
* @param features
|
* @param features
|
||||||
* @param zoomlevel
|
* @param zoomlevel
|
||||||
*/
|
*/
|
||||||
|
@ -535,6 +537,33 @@ export class GeoOperations {
|
||||||
return perBbox
|
return perBbox
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of features, returns a new list of features so that the features are clipped into the given tile-index.
|
||||||
|
* Note: IDs are rewritten
|
||||||
|
* Also @see spreadIntoBBoxes
|
||||||
|
*/
|
||||||
|
public static clipAllInBox(features: ReadonlyArray<Readonly<Feature>>, tileIndex: number): Feature[] {
|
||||||
|
const bbox = Tiles.asGeojson(tileIndex)
|
||||||
|
const newFeatures: Feature[] = []
|
||||||
|
for (const f of features) {
|
||||||
|
const intersectionParts = GeoOperations.clipWith(f, bbox)
|
||||||
|
for (let i = 0; i < intersectionParts.length; i++) {
|
||||||
|
const intersectionPart = intersectionParts[i]
|
||||||
|
let id = (f.properties?.id ?? "") + "_" + tileIndex
|
||||||
|
if (i > 0) {
|
||||||
|
id += "_part_" + i
|
||||||
|
}
|
||||||
|
const properties = {
|
||||||
|
...f.properties,
|
||||||
|
id
|
||||||
|
}
|
||||||
|
intersectionPart.properties = properties
|
||||||
|
newFeatures.push(intersectionPart)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Utils.NoNull(newFeatures)
|
||||||
|
}
|
||||||
|
|
||||||
public static toGpx(
|
public static toGpx(
|
||||||
locations:
|
locations:
|
||||||
| Feature<LineString>
|
| Feature<LineString>
|
||||||
|
@ -558,8 +587,8 @@ export class GeoOperations {
|
||||||
properties: {},
|
properties: {},
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "Point",
|
type: "Point",
|
||||||
coordinates: p,
|
coordinates: p
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -575,7 +604,7 @@ export class GeoOperations {
|
||||||
trackPoints.push(trkpt)
|
trackPoints.push(trkpt)
|
||||||
}
|
}
|
||||||
const header =
|
const header =
|
||||||
'<gpx version="1.1" creator="mapcomplete.org" xmlns="http://www.topografix.com/GPX/1/1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">'
|
"<gpx version=\"1.1\" creator=\"mapcomplete.org\" xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd\">"
|
||||||
return (
|
return (
|
||||||
header +
|
header +
|
||||||
"\n<name>" +
|
"\n<name>" +
|
||||||
|
@ -614,7 +643,7 @@ export class GeoOperations {
|
||||||
trackPoints.push(trkpt)
|
trackPoints.push(trkpt)
|
||||||
}
|
}
|
||||||
const header =
|
const header =
|
||||||
'<gpx version="1.1" creator="mapcomplete.org" xmlns="http://www.topografix.com/GPX/1/1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">'
|
"<gpx version=\"1.1\" creator=\"mapcomplete.org\" xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd\">"
|
||||||
return (
|
return (
|
||||||
header +
|
header +
|
||||||
"\n<name>" +
|
"\n<name>" +
|
||||||
|
@ -640,7 +669,7 @@ export class GeoOperations {
|
||||||
|
|
||||||
const copy = {
|
const copy = {
|
||||||
...feature,
|
...feature,
|
||||||
geometry: { ...feature.geometry },
|
geometry: { ...feature.geometry }
|
||||||
}
|
}
|
||||||
let coordinates: [number, number][]
|
let coordinates: [number, number][]
|
||||||
if (feature.geometry.type === "LineString") {
|
if (feature.geometry.type === "LineString") {
|
||||||
|
@ -698,8 +727,8 @@ export class GeoOperations {
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "LineString",
|
type: "LineString",
|
||||||
coordinates: [a, b],
|
coordinates: [a, b]
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
distanceMeter,
|
distanceMeter,
|
||||||
{ units: "meters" }
|
{ units: "meters" }
|
||||||
|
@ -736,17 +765,26 @@ export class GeoOperations {
|
||||||
* GeoOperations.completelyWithin(park, pond) // => false
|
* GeoOperations.completelyWithin(park, pond) // => false
|
||||||
*/
|
*/
|
||||||
static completelyWithin(
|
static completelyWithin(
|
||||||
feature: Feature,
|
feature: Readonly<Feature>,
|
||||||
possiblyEnclosingFeature: Feature<Polygon | MultiPolygon>
|
possiblyEnclosingFeature: Readonly<Feature<Polygon | MultiPolygon>>
|
||||||
): boolean {
|
): boolean {
|
||||||
|
if (feature.geometry.type === "MultiPolygon") {
|
||||||
|
const polygons = feature.geometry.coordinates.map(coordinates =>
|
||||||
|
<Feature<Polygon>>{
|
||||||
|
type: "Feature", geometry: {
|
||||||
|
type: "Polygon", coordinates
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return !polygons.some(polygon => !booleanWithin(polygon, possiblyEnclosingFeature))
|
||||||
|
}
|
||||||
return booleanWithin(feature, possiblyEnclosingFeature)
|
return booleanWithin(feature, possiblyEnclosingFeature)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an intersection between two features.
|
* Create an intersection between two features.
|
||||||
* One or multiple new feature is returned based on 'toSplit', which'll have a geometry that is completely withing boundary
|
* One or multiple new feature are returned based on 'toSplit', which'll have a geometry that is completely withing boundary
|
||||||
*/
|
*/
|
||||||
public static clipWith(toSplit: Feature, boundary: Feature<Polygon>): Feature[] {
|
public static clipWith(toSplit: Readonly<Feature>, boundary: Readonly<Feature<Polygon>>): Feature[] {
|
||||||
if (toSplit.geometry.type === "Point") {
|
if (toSplit.geometry.type === "Point") {
|
||||||
const p = <Feature<Point>>toSplit
|
const p = <Feature<Point>>toSplit
|
||||||
if (GeoOperations.inside(<[number, number]>p.geometry.coordinates, boundary)) {
|
if (GeoOperations.inside(<[number, number]>p.geometry.coordinates, boundary)) {
|
||||||
|
@ -757,9 +795,9 @@ export class GeoOperations {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (toSplit.geometry.type === "LineString") {
|
if (toSplit.geometry.type === "LineString") {
|
||||||
const splitup = turf.lineSplit(<Feature<LineString>>toSplit, boundary)
|
const splitup: Feature<LineString>[] = turf.lineSplit(<Feature<LineString>>toSplit, boundary).features
|
||||||
const kept = []
|
const kept: Feature[] = []
|
||||||
for (const f of splitup.features) {
|
for (const f of splitup) {
|
||||||
if (!GeoOperations.inside(GeoOperations.centerpointCoordinates(f), boundary)) {
|
if (!GeoOperations.inside(GeoOperations.centerpointCoordinates(f), boundary)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -787,7 +825,24 @@ export class GeoOperations {
|
||||||
return kept
|
return kept
|
||||||
}
|
}
|
||||||
if (toSplit.geometry.type === "Polygon" || toSplit.geometry.type == "MultiPolygon") {
|
if (toSplit.geometry.type === "Polygon" || toSplit.geometry.type == "MultiPolygon") {
|
||||||
|
|
||||||
const splitup = turf.intersect(<Feature<Polygon>>toSplit, boundary)
|
const splitup = turf.intersect(<Feature<Polygon>>toSplit, boundary)
|
||||||
|
if (splitup === null) {
|
||||||
|
// No intersection found.
|
||||||
|
// Either: the boundary is contained fully in 'toSplit', 'toSplit' is contained fully in 'boundary' or they are unrelated at all
|
||||||
|
if (GeoOperations.completelyWithin(toSplit, boundary)) {
|
||||||
|
return [toSplit]
|
||||||
|
}
|
||||||
|
if (GeoOperations.completelyWithin(boundary, <Feature<Polygon | MultiPolygon>>toSplit)) {
|
||||||
|
return [{
|
||||||
|
type: "Feature",
|
||||||
|
properties: { ...toSplit.properties },
|
||||||
|
geometry: boundary.geometry,
|
||||||
|
bbox: boundary.bbox
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
}
|
||||||
splitup.properties = { ...toSplit.properties }
|
splitup.properties = { ...toSplit.properties }
|
||||||
return [splitup]
|
return [splitup]
|
||||||
}
|
}
|
||||||
|
@ -864,32 +919,6 @@ export class GeoOperations {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs all tiles where features overlap with and puts those features in them.
|
|
||||||
* Long features (e.g. lines or polygons) which overlap with multiple tiles are referenced in each tile they overlap with
|
|
||||||
* @param zoomlevel
|
|
||||||
* @param features
|
|
||||||
*/
|
|
||||||
public static slice(zoomlevel: number, features: Feature[]): Map<number, Feature[]> {
|
|
||||||
const tiles = new Map<number, Feature[]>()
|
|
||||||
|
|
||||||
for (const feature of features) {
|
|
||||||
const bbox = BBox.get(feature)
|
|
||||||
Tiles.MapRange(Tiles.tileRangeFrom(bbox, zoomlevel), (x, y) => {
|
|
||||||
const i = Tiles.tile_index(zoomlevel, x, y)
|
|
||||||
|
|
||||||
let tiledata = tiles.get(i)
|
|
||||||
if (tiledata === undefined) {
|
|
||||||
tiledata = []
|
|
||||||
tiles.set(i, tiledata)
|
|
||||||
}
|
|
||||||
tiledata.push(feature)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return tiles
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a linestring object based on the outer ring of the given polygon
|
* Creates a linestring object based on the outer ring of the given polygon
|
||||||
*
|
*
|
||||||
|
@ -905,8 +934,8 @@ export class GeoOperations {
|
||||||
properties: p.properties,
|
properties: p.properties,
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "LineString",
|
type: "LineString",
|
||||||
coordinates: p.geometry.coordinates[0],
|
coordinates: p.geometry.coordinates[0]
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -934,7 +963,7 @@ export class GeoOperations {
|
||||||
console.debug("SPlitting way", feature.properties.id)
|
console.debug("SPlitting way", feature.properties.id)
|
||||||
result.push({
|
result.push({
|
||||||
...feature,
|
...feature,
|
||||||
geometry: { ...feature.geometry, coordinates: coors.slice(i + 1) },
|
geometry: { ...feature.geometry, coordinates: coors.slice(i + 1) }
|
||||||
})
|
})
|
||||||
coors = coors.slice(0, i + 1)
|
coors = coors.slice(0, i + 1)
|
||||||
break
|
break
|
||||||
|
@ -943,7 +972,7 @@ export class GeoOperations {
|
||||||
}
|
}
|
||||||
result.push({
|
result.push({
|
||||||
...feature,
|
...feature,
|
||||||
geometry: { ...feature.geometry, coordinates: coors },
|
geometry: { ...feature.geometry, coordinates: coors }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1117,8 +1146,8 @@ export class GeoOperations {
|
||||||
properties: multiLineStringFeature.properties,
|
properties: multiLineStringFeature.properties,
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "LineString",
|
type: "LineString",
|
||||||
coordinates: coors[0],
|
coordinates: coors[0]
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
|
@ -1126,8 +1155,8 @@ export class GeoOperations {
|
||||||
properties: multiLineStringFeature.properties,
|
properties: multiLineStringFeature.properties,
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "MultiLineString",
|
type: "MultiLineString",
|
||||||
coordinates: coors,
|
coordinates: coors
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
33
src/Logic/Web/CommunityIndex.ts
Normal file
33
src/Logic/Web/CommunityIndex.ts
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
/**
|
||||||
|
* Various tools and types to work with the community index (https://openstreetmap.community/; https://github.com/osmlab/osm-community-index)
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
export interface CommunityResource {
|
||||||
|
/**
|
||||||
|
* A unique identifier for the resource
|
||||||
|
* "pattern": "^[-_.A-Za-z0-9]+$"
|
||||||
|
*/
|
||||||
|
id: string,
|
||||||
|
/**
|
||||||
|
* Type of community resource (thus: platform)
|
||||||
|
*/
|
||||||
|
type: string,
|
||||||
|
/**
|
||||||
|
* included and excluded locations for this item
|
||||||
|
* See location-conflation documentation for compatible values: https://github.com/rapideditor/location-conflation#readme
|
||||||
|
*/
|
||||||
|
locationSet?,
|
||||||
|
|
||||||
|
/** Array of ISO-639-1 (2 letter) or ISO-639-3 (3 letter) codes in lowercase
|
||||||
|
* */
|
||||||
|
languageCodes?: string[]
|
||||||
|
/**
|
||||||
|
* Resource account string, required for some resource types
|
||||||
|
*/
|
||||||
|
account?: string
|
||||||
|
|
||||||
|
resolved?: { url: string, name: string, description: string } & Record<string, string>
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { BBox } from "../Logic/BBox"
|
import { BBox } from "../Logic/BBox"
|
||||||
|
import { Feature, Polygon } from "geojson"
|
||||||
|
|
||||||
export interface TileRange {
|
export interface TileRange {
|
||||||
xstart: number
|
xstart: number
|
||||||
|
@ -80,6 +81,17 @@ export class Tiles {
|
||||||
return [z, x, index % factor]
|
return [z, x, index % factor]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static asGeojson(index: number): Feature<Polygon>;
|
||||||
|
static asGeojson(x: number, y: number, z: number): Feature<Polygon>;
|
||||||
|
static asGeojson(zIndex: number, x?: number, y?: number): Feature<Polygon> {
|
||||||
|
let z = zIndex
|
||||||
|
if (x === undefined) {
|
||||||
|
[z, x, y] = Tiles.tile_from_index(zIndex)
|
||||||
|
}
|
||||||
|
const bounds = Tiles.tile_bounds_lon_lat(z, x, y)
|
||||||
|
return new BBox(bounds).asGeoJson()
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return x, y of the tile containing (lat, lon) on the given zoom level
|
* Return x, y of the tile containing (lat, lon) on the given zoom level
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -6,8 +6,10 @@
|
||||||
import ContactLink from "./ContactLink.svelte"
|
import ContactLink from "./ContactLink.svelte"
|
||||||
import { GeoOperations } from "../../Logic/GeoOperations"
|
import { GeoOperations } from "../../Logic/GeoOperations"
|
||||||
import Translations from "../i18n/Translations"
|
import Translations from "../i18n/Translations"
|
||||||
import ToSvelte from "../Base/ToSvelte.svelte"
|
|
||||||
import type { Feature, Geometry, GeometryCollection } from "@turf/turf"
|
import type { Feature, Geometry, GeometryCollection } from "@turf/turf"
|
||||||
|
import type { FeatureCollection, Polygon } from "geojson"
|
||||||
|
import type { CommunityResource } from "../../Logic/Web/CommunityIndex"
|
||||||
|
import Tr from "../Base/Tr.svelte"
|
||||||
|
|
||||||
export let location: Store<{ lat: number; lon: number }>
|
export let location: Store<{ lat: number; lon: number }>
|
||||||
const tileToFetch: Store<string> = location.mapD((l) => {
|
const tileToFetch: Store<string> = location.mapD((l) => {
|
||||||
|
@ -20,7 +22,10 @@
|
||||||
>([])
|
>([])
|
||||||
|
|
||||||
tileToFetch.addCallbackAndRun(async (url) => {
|
tileToFetch.addCallbackAndRun(async (url) => {
|
||||||
const data = await Utils.downloadJsonCached(url, 24 * 60 * 60)
|
const data = await Utils.downloadJsonCached<FeatureCollection<Polygon, {
|
||||||
|
nameEn: string,
|
||||||
|
resources: Record<string, CommunityResource>
|
||||||
|
}>>(url, 24 * 60 * 60)
|
||||||
if (data === undefined) {
|
if (data === undefined) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -29,15 +34,13 @@
|
||||||
|
|
||||||
const filteredResources = resources.map(
|
const filteredResources = resources.map(
|
||||||
(features) =>
|
(features) =>
|
||||||
features.filter((f) => {
|
features.filter((f) => GeoOperations.inside([location.data.lon, location.data.lat], f)),
|
||||||
return GeoOperations.inside([location.data.lon, location.data.lat], f)
|
|
||||||
}),
|
|
||||||
[location]
|
[location]
|
||||||
)
|
)
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<ToSvelte construct={t.intro} />
|
<Tr t={t.intro} />
|
||||||
{#each $filteredResources as feature}
|
{#each $filteredResources as feature}
|
||||||
<ContactLink country={feature.properties} />
|
<ContactLink country={feature.properties} />
|
||||||
{/each}
|
{/each}
|
||||||
|
|
|
@ -3,23 +3,18 @@
|
||||||
// The _properties_ of a community feature
|
// The _properties_ of a community feature
|
||||||
import Locale from "../i18n/Locale.js"
|
import Locale from "../i18n/Locale.js"
|
||||||
import Translations from "../i18n/Translations"
|
import Translations from "../i18n/Translations"
|
||||||
import ToSvelte from "../Base/ToSvelte.svelte"
|
|
||||||
import * as native from "../../assets/language_native.json"
|
import * as native from "../../assets/language_native.json"
|
||||||
import { TypedTranslation } from "../i18n/Translation"
|
import { TypedTranslation } from "../i18n/Translation"
|
||||||
import Tr from "../Base/Tr.svelte"
|
import Tr from "../Base/Tr.svelte"
|
||||||
|
import type { CommunityResource } from "../../Logic/Web/CommunityIndex"
|
||||||
|
|
||||||
const availableTranslationTyped: TypedTranslation<{ native: string }> =
|
const availableTranslationTyped: TypedTranslation<{ native: string }> =
|
||||||
Translations.t.communityIndex.available
|
Translations.t.communityIndex.available
|
||||||
const availableTranslation = availableTranslationTyped.OnEveryLanguage((s, ln) =>
|
const availableTranslation = availableTranslationTyped.OnEveryLanguage((s, ln) =>
|
||||||
s.replace("{native}", native[ln] ?? ln)
|
s.replace("{native}", native[ln] ?? ln)
|
||||||
)
|
)
|
||||||
export let country: { resources; nameEn: string }
|
export let country: { resources: Record<string, CommunityResource>; nameEn: string }
|
||||||
let resources: {
|
let resources: CommunityResource[] = []
|
||||||
id: string
|
|
||||||
resolved: Record<string, string>
|
|
||||||
languageCodes: string[]
|
|
||||||
type: string
|
|
||||||
}[] = []
|
|
||||||
$: resources = Array.from(Object.values(country?.resources ?? {}))
|
$: resources = Array.from(Object.values(country?.resources ?? {}))
|
||||||
|
|
||||||
const language = Locale.language
|
const language = Locale.language
|
||||||
|
|
|
@ -6,7 +6,7 @@ import { describe, expect, it } from "vitest"
|
||||||
describe("GeoOperations", () => {
|
describe("GeoOperations", () => {
|
||||||
describe("calculateOverlap", () => {
|
describe("calculateOverlap", () => {
|
||||||
it("should not give too much overlap (regression test)", () => {
|
it("should not give too much overlap (regression test)", () => {
|
||||||
const polyGrb = {
|
const polyGrb: Feature<Polygon> = <any>{
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
properties: {
|
properties: {
|
||||||
osm_id: "25189153",
|
osm_id: "25189153",
|
||||||
|
@ -37,7 +37,7 @@ describe("GeoOperations", () => {
|
||||||
"_now:date": "2021-12-05",
|
"_now:date": "2021-12-05",
|
||||||
"_now:datetime": "2021-12-05 21:51:40",
|
"_now:datetime": "2021-12-05 21:51:40",
|
||||||
"_loaded:date": "2021-12-05",
|
"_loaded:date": "2021-12-05",
|
||||||
"_loaded:datetime": "2021-12-05 21:51:40",
|
"_loaded:datetime": "2021-12-05 21:51:40"
|
||||||
},
|
},
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "Polygon",
|
type: "Polygon",
|
||||||
|
@ -50,21 +50,21 @@ describe("GeoOperations", () => {
|
||||||
[3.24329779999996, 50.837435399999855],
|
[3.24329779999996, 50.837435399999855],
|
||||||
[3.2431881000000504, 50.83740090000025],
|
[3.2431881000000504, 50.83740090000025],
|
||||||
[3.243152699999997, 50.83738980000017],
|
[3.243152699999997, 50.83738980000017],
|
||||||
[3.2431059999999974, 50.83730270000021],
|
[3.2431059999999974, 50.83730270000021]
|
||||||
],
|
]
|
||||||
],
|
]
|
||||||
},
|
},
|
||||||
id: "https://betadata.grbosm.site/grb?bbox=360935.6475626023,6592540.815539878,361088.52161917265,6592693.689596449/37",
|
id: "https://betadata.grbosm.site/grb?bbox=360935.6475626023,6592540.815539878,361088.52161917265,6592693.689596449/37",
|
||||||
_lon: 3.2432137000000116,
|
_lon: 3.2432137000000116,
|
||||||
_lat: 50.83736194999996,
|
_lat: 50.83736194999996,
|
||||||
bbox: {
|
bbox: {
|
||||||
|
minLat: 50.83728850000007,
|
||||||
maxLat: 50.837435399999855,
|
maxLat: 50.837435399999855,
|
||||||
maxLon: 3.2433214000000254,
|
maxLon: 3.2433214000000254,
|
||||||
minLat: 50.83728850000007,
|
minLon: 3.2431059999999974
|
||||||
minLon: 3.2431059999999974,
|
}
|
||||||
},
|
|
||||||
}
|
}
|
||||||
const polyHouse = {
|
const polyHouse: Feature<Polygon> = <any>{
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
id: "way/594963177",
|
id: "way/594963177",
|
||||||
properties: {
|
properties: {
|
||||||
|
@ -95,7 +95,7 @@ describe("GeoOperations", () => {
|
||||||
"_loaded:date": "2021-12-05",
|
"_loaded:date": "2021-12-05",
|
||||||
"_loaded:datetime": "2021-12-05 21:51:39",
|
"_loaded:datetime": "2021-12-05 21:51:39",
|
||||||
_surface: "93.32785810484549",
|
_surface: "93.32785810484549",
|
||||||
"_surface:ha": "0",
|
"_surface:ha": "0"
|
||||||
},
|
},
|
||||||
geometry: {
|
geometry: {
|
||||||
type: "Polygon",
|
type: "Polygon",
|
||||||
|
@ -108,9 +108,9 @@ describe("GeoOperations", () => {
|
||||||
[3.2431691, 50.8374252],
|
[3.2431691, 50.8374252],
|
||||||
[3.2430936, 50.837401],
|
[3.2430936, 50.837401],
|
||||||
[3.243046, 50.8374112],
|
[3.243046, 50.8374112],
|
||||||
[3.2429993, 50.8373243],
|
[3.2429993, 50.8373243]
|
||||||
],
|
]
|
||||||
],
|
]
|
||||||
},
|
},
|
||||||
_lon: 3.2430937,
|
_lon: 3.2430937,
|
||||||
_lat: 50.83736395,
|
_lat: 50.83736395,
|
||||||
|
@ -118,8 +118,8 @@ describe("GeoOperations", () => {
|
||||||
maxLat: 50.8374252,
|
maxLat: 50.8374252,
|
||||||
maxLon: 3.2431881,
|
maxLon: 3.2431881,
|
||||||
minLat: 50.8373027,
|
minLat: 50.8373027,
|
||||||
minLon: 3.2429993,
|
minLon: 3.2429993
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const p0 = turf.polygon(polyGrb.geometry.coordinates)
|
const p0 = turf.polygon(polyGrb.geometry.coordinates)
|
||||||
|
@ -145,11 +145,11 @@ describe("GeoOperations", () => {
|
||||||
[3.218560377159008, 51.21499687768525],
|
[3.218560377159008, 51.21499687768525],
|
||||||
[3.2207456783268356, 51.21499687768525],
|
[3.2207456783268356, 51.21499687768525],
|
||||||
[3.2207456783268356, 51.21600586532159],
|
[3.2207456783268356, 51.21600586532159],
|
||||||
[3.218560377159008, 51.21600586532159],
|
[3.218560377159008, 51.21600586532159]
|
||||||
],
|
]
|
||||||
],
|
],
|
||||||
type: "Polygon",
|
type: "Polygon"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
const line: Feature<LineString> = {
|
const line: Feature<LineString> = {
|
||||||
type: "Feature",
|
type: "Feature",
|
||||||
|
@ -157,10 +157,10 @@ describe("GeoOperations", () => {
|
||||||
geometry: {
|
geometry: {
|
||||||
coordinates: [
|
coordinates: [
|
||||||
[3.218405371672816, 51.21499091846559],
|
[3.218405371672816, 51.21499091846559],
|
||||||
[3.2208408127450525, 51.21560173433727],
|
[3.2208408127450525, 51.21560173433727]
|
||||||
],
|
],
|
||||||
type: "LineString",
|
type: "LineString"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
const result = GeoOperations.clipWith(line, bbox)
|
const result = GeoOperations.clipWith(line, bbox)
|
||||||
expect(result.length).to.equal(1)
|
expect(result.length).to.equal(1)
|
||||||
|
@ -168,10 +168,83 @@ describe("GeoOperations", () => {
|
||||||
const clippedLine = (<Feature<LineString>>result[0]).geometry.coordinates
|
const clippedLine = (<Feature<LineString>>result[0]).geometry.coordinates
|
||||||
const expCoordinates = [
|
const expCoordinates = [
|
||||||
[3.2185604, 51.215029800031594],
|
[3.2185604, 51.215029800031594],
|
||||||
[3.2207457, 51.21557787977764],
|
[3.2207457, 51.21557787977764]
|
||||||
]
|
]
|
||||||
|
|
||||||
expect(clippedLine).to.deep.equal(expCoordinates)
|
expect(clippedLine).to.deep.equal(expCoordinates)
|
||||||
})
|
})
|
||||||
|
it("clipWith should contain the full feature if it is fully contained", () => {
|
||||||
|
const bbox: Feature<Polygon> = {
|
||||||
|
type: "Feature",
|
||||||
|
properties: {},
|
||||||
|
geometry: {
|
||||||
|
coordinates: [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
2.1541744759711037,
|
||||||
|
51.73994420687188
|
||||||
|
],
|
||||||
|
[
|
||||||
|
2.1541744759711037,
|
||||||
|
50.31129074222787
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4.53247037641421,
|
||||||
|
50.31129074222787
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4.53247037641421,
|
||||||
|
51.73994420687188
|
||||||
|
],
|
||||||
|
[
|
||||||
|
2.1541744759711037,
|
||||||
|
51.73994420687188
|
||||||
|
]
|
||||||
|
]
|
||||||
|
],
|
||||||
|
type: "Polygon"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const content: Feature<Polygon> = {
|
||||||
|
"type": "Feature",
|
||||||
|
"properties": {},
|
||||||
|
"geometry": {
|
||||||
|
"coordinates": [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
2.8900597545854225,
|
||||||
|
50.9035099487991
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3.4872999807053873,
|
||||||
|
50.74856284865993
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3.9512276563531543,
|
||||||
|
50.947206170675486
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3.897902636163167,
|
||||||
|
51.25526892606362
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3.188679867646016,
|
||||||
|
51.24525576870511
|
||||||
|
], [
|
||||||
|
2.8900597545854225,
|
||||||
|
50.9035099487991
|
||||||
|
]
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"type": "Polygon"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const clipped = GeoOperations.clipWith(content, bbox)
|
||||||
|
expect(clipped.length).to.equal(1)
|
||||||
|
|
||||||
|
const clippedReverse = GeoOperations.clipWith(bbox, content)
|
||||||
|
expect(clippedReverse.length).to.equal(1)
|
||||||
|
}
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue