forked from MapComplete/MapComplete
Do not show out-of-range features on speelplekken layer, fix handling of mutlipolygons in 'inside', better tests
This commit is contained in:
parent
117b0bddb1
commit
6f457a6f0d
26 changed files with 1284 additions and 770 deletions
|
@ -87,7 +87,8 @@ export default class LayerConfig {
|
|||
geojsonSource: json.source["geoJson"],
|
||||
geojsonSourceLevel: json.source["geoJsonZoomLevel"],
|
||||
overpassScript: json.source["overpassScript"],
|
||||
});
|
||||
isOsmCache: json.source["isOsmCache"]
|
||||
}, this.id);
|
||||
} else {
|
||||
this.source = new SourceConfig({
|
||||
osmTags: legacy
|
||||
|
|
|
@ -30,20 +30,34 @@ export interface LayerConfigJson {
|
|||
* This determines where the data for the layer is fetched.
|
||||
* There are some options:
|
||||
*
|
||||
* source: {osmTags: "key=value"} will fetch all objects with given tags from OSM. Currently, this will create a query to overpass and fetch the data - in the future this might fetch from the OSM API
|
||||
* source: {geoJson: "https://my.source.net/some-geo-data.geojson"} to fetch a geojson from a third party source
|
||||
* source: {geoJson: "https://my.source.net/some-tile-geojson-{layer}-{z}-{x}-{y}.geojson", geoJsonZoomLevel: 14} to use a tiled geojson source. The web server must offer multiple geojsons. {z}, {x} and {y} are substituted by the location; {layer} is substituted with the id of the loaded layer
|
||||
*
|
||||
* # Query OSM directly
|
||||
* source: {osmTags: "key=value"}
|
||||
* will fetch all objects with given tags from OSM.
|
||||
* Currently, this will create a query to overpass and fetch the data - in the future this might fetch from the OSM API
|
||||
*
|
||||
* # Query OSM Via the overpass API with a custom script
|
||||
* source: {overpassScript: "<custom overpass tags>"} when you want to do special things. _This should be really rare_.
|
||||
* This means that the data will be pulled from overpass with this script, and will ignore the osmTags for the query
|
||||
* However, for the rest of the pipeline, the OsmTags will _still_ be used. This is important to enable layers etc...
|
||||
*
|
||||
*
|
||||
* # A single geojson-file
|
||||
* source: {geoJson: "https://my.source.net/some-geo-data.geojson"}
|
||||
* fetches a geojson from a third party source
|
||||
*
|
||||
* # A tiled geojson source
|
||||
* source: {geoJson: "https://my.source.net/some-tile-geojson-{layer}-{z}-{x}-{y}.geojson", geoJsonZoomLevel: 14}
|
||||
* to use a tiled geojson source. The web server must offer multiple geojsons. {z}, {x} and {y} are substituted by the location; {layer} is substituted with the id of the loaded layer
|
||||
*
|
||||
*
|
||||
* Note that both geojson-options might set a flag 'isOsmCache' indicating that the data originally comes from OSM too
|
||||
*
|
||||
*
|
||||
* NOTE: the previous format was 'overpassTags: AndOrTagCOnfigJson | string', which is interpreted as a shorthand for source: {osmTags: "key=value"}
|
||||
* While still supported, this is considered deprecated
|
||||
*/
|
||||
source: { osmTags: AndOrTagConfigJson | string } |
|
||||
{ osmTags: AndOrTagConfigJson | string, geoJson: string, geoJsonZoomLevel?: number } |
|
||||
{ osmTags: AndOrTagConfigJson | string, geoJson: string, geoJsonZoomLevel?: number, isOsmCache?: boolean } |
|
||||
{ osmTags: AndOrTagConfigJson | string, overpassScript: string }
|
||||
|
||||
/**
|
||||
|
|
|
@ -6,13 +6,15 @@ export default class SourceConfig {
|
|||
overpassScript?: string;
|
||||
geojsonSource?: string;
|
||||
geojsonZoomLevel?: number;
|
||||
isOsmCacheLayer: boolean;
|
||||
|
||||
constructor(params: {
|
||||
osmTags?: TagsFilter,
|
||||
overpassScript?: string,
|
||||
geojsonSource?: string,
|
||||
isOsmCache?: boolean,
|
||||
geojsonSourceLevel?: number
|
||||
}) {
|
||||
}, context?: string) {
|
||||
|
||||
let defined = 0;
|
||||
if (params.osmTags) {
|
||||
|
@ -27,9 +29,14 @@ export default class SourceConfig {
|
|||
if (defined == 0) {
|
||||
throw "Source: nothing correct defined in the source"
|
||||
}
|
||||
if(params.isOsmCache && params.geojsonSource == undefined){
|
||||
console.error(params)
|
||||
throw `Source said it is a OSM-cached layer, but didn't define the actual source of the cache (in context ${context})`
|
||||
}
|
||||
this.osmTags = params.osmTags;
|
||||
this.overpassScript = params.overpassScript;
|
||||
this.geojsonSource = params.geojsonSource;
|
||||
this.geojsonZoomLevel = params.geojsonSourceLevel;
|
||||
this.isOsmCacheLayer = params.isOsmCache ?? false;
|
||||
}
|
||||
}
|
|
@ -2,7 +2,7 @@ import {FixedUiElement} from "./UI/Base/FixedUiElement";
|
|||
import CheckBox from "./UI/Input/CheckBox";
|
||||
import {Basemap} from "./UI/BigComponents/Basemap";
|
||||
import State from "./State";
|
||||
import LoadFromOverpass from "./Logic/Actors/UpdateFromOverpass";
|
||||
import LoadFromOverpass from "./Logic/Actors/OverpassFeatureSource";
|
||||
import {UIEventSource} from "./Logic/UIEventSource";
|
||||
import {QueryParameters} from "./Logic/Web/QueryParameters";
|
||||
import StrayClickHandler from "./Logic/Actors/StrayClickHandler";
|
||||
|
|
|
@ -10,9 +10,9 @@ import {TagsFilter} from "../Tags/TagsFilter";
|
|||
import SimpleMetaTagger from "../SimpleMetaTagger";
|
||||
|
||||
|
||||
export default class UpdateFromOverpass implements FeatureSource {
|
||||
export default class OverpassFeatureSource implements FeatureSource {
|
||||
|
||||
public readonly name = "UpdateFromOverpass"
|
||||
public readonly name = "OverpassFeatureSource"
|
||||
|
||||
/**
|
||||
* The last loaded features of the geojson
|
|
@ -84,7 +84,6 @@ export class ElementStorage {
|
|||
}
|
||||
}
|
||||
if (somethingChanged) {
|
||||
console.trace(`Merging multiple instances of ${elementId}: ` + debug_msg.join(", ")+" newProperties: ", newProperties)
|
||||
es.ping();
|
||||
}
|
||||
return es;
|
||||
|
|
|
@ -38,7 +38,7 @@ Some advanced functions are available on <b>feat</b> as well:
|
|||
`
|
||||
private static readonly OverlapFunc = new ExtraFunction(
|
||||
"overlapWith",
|
||||
"Gives a list of features from the specified layer which this feature overlaps with, the amount of overlap in m². The returned value is <b>{ feat: GeoJSONFeature, overlap: number}</b>",
|
||||
"Gives a list of features from the specified layer which this feature (partly) overlaps with. If the current feature is a point, all features that embed the point are given. The returned value is <code>{ feat: GeoJSONFeature, overlap: number}[]</code> where <code>overlap</code> is the overlapping surface are (in m²) for areas, the overlapping length (in meter) if the current feature is a line or <code>undefined</code> if the current feature is a point",
|
||||
["...layerIds - one or more layer ids of the layer from which every feature is checked for overlap)"],
|
||||
(params, feat) => {
|
||||
return (...layerIds: string[]) => {
|
||||
|
|
|
@ -6,16 +6,15 @@ export class GeoOperations {
|
|||
return turf.area(feature);
|
||||
}
|
||||
|
||||
static centerpoint(feature: any)
|
||||
{
|
||||
const newFeature= turf.center(feature);
|
||||
static centerpoint(feature: any) {
|
||||
const newFeature = turf.center(feature);
|
||||
newFeature.properties = feature.properties;
|
||||
newFeature.id = feature.id;
|
||||
|
||||
|
||||
return newFeature;
|
||||
}
|
||||
|
||||
static centerpointCoordinates(feature: any): [number, number]{
|
||||
|
||||
static centerpointCoordinates(feature: any): [number, number] {
|
||||
// @ts-ignore
|
||||
return turf.center(feature).geometry.coordinates;
|
||||
}
|
||||
|
@ -25,36 +24,100 @@ export class GeoOperations {
|
|||
* @param lonlat0
|
||||
* @param lonlat1
|
||||
*/
|
||||
static distanceBetween(lonlat0: [number,number], lonlat1:[number, number]){
|
||||
static distanceBetween(lonlat0: [number, number], lonlat1: [number, number]) {
|
||||
return turf.distance(lonlat0, lonlat1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the overlap of 'feature' with every other specified feature.
|
||||
* The features with which 'feature' overlaps, are returned together with their overlap area in m²
|
||||
*
|
||||
*
|
||||
* If 'feature' is a LineString, the features in which this feature is (partly) embedded is returned, the overlap length in meter is given
|
||||
*
|
||||
* If 'feature' is a point, it will return every feature the point is embedded in. Overlap will be undefined
|
||||
*/
|
||||
static calculateOverlap(feature: any,
|
||||
otherFeatures: any[]): { feat: any, overlap: number }[] {
|
||||
static calculateOverlap(feature: any, otherFeatures: any[]): { feat: any, overlap: number }[] {
|
||||
|
||||
const featureBBox = BBox.get(feature);
|
||||
const result : { feat: any, overlap: number }[] = [];
|
||||
const result: { feat: any, overlap: number }[] = [];
|
||||
if (feature.geometry.type === "Point") {
|
||||
const coor = feature.geometry.coordinates;
|
||||
for (const otherFeature of otherFeatures) {
|
||||
|
||||
if (otherFeature.geometry === undefined) {
|
||||
console.error("No geometry for feature ", feature)
|
||||
throw "List of other features contains a feature without geometry an undefined"
|
||||
}
|
||||
|
||||
let otherFeatureBBox = BBox.get(otherFeature);
|
||||
if (!featureBBox.overlapsWith(otherFeatureBBox)) {
|
||||
if (!featureBBox.overlapsWith(otherFeatureBBox)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.inside(coor, otherFeatures)) {
|
||||
result.push({ feat: otherFeatures, overlap: undefined })
|
||||
if (this.inside(coor, otherFeature)) {
|
||||
result.push({feat: otherFeature, overlap: undefined})
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (feature.geometry.type === "LineString") {
|
||||
|
||||
for (const otherFeature of otherFeatures) {
|
||||
const otherFeatureBBox = BBox.get(otherFeature);
|
||||
const overlaps = featureBBox.overlapsWith(otherFeatureBBox)
|
||||
if (!overlaps) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate the length of the intersection
|
||||
try {
|
||||
|
||||
let intersectionPoints = turf.lineIntersect(feature, otherFeature);
|
||||
if (intersectionPoints.features.length == 0) {
|
||||
// No intersections.
|
||||
// If one point is inside of the polygon, all points are
|
||||
|
||||
|
||||
const coors = feature.geometry.coordinates;
|
||||
const startCoor = coors[0]
|
||||
if (this.inside(startCoor, otherFeature)) {
|
||||
result.push({feat: otherFeature, overlap: this.lengthInMeters(feature)})
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
let intersectionPointsArray = intersectionPoints.features.map(d => {
|
||||
return d.geometry.coordinates
|
||||
});
|
||||
|
||||
if (intersectionPointsArray.length == 1) {
|
||||
// We need to add the start- or endpoint of the current feature, depending on which one is embedded
|
||||
const coors = feature.geometry.coordinates;
|
||||
const startCoor = coors[0]
|
||||
if (this.inside(startCoor, otherFeature)) {
|
||||
// The startpoint is embedded
|
||||
intersectionPointsArray.push(startCoor)
|
||||
} else {
|
||||
intersectionPointsArray.push(coors[coors.length - 1])
|
||||
}
|
||||
}
|
||||
|
||||
let intersection = turf.lineSlice(turf.point(intersectionPointsArray[0]), turf.point(intersectionPointsArray[1]), feature);
|
||||
|
||||
if (intersection == null) {
|
||||
continue;
|
||||
}
|
||||
const intersectionSize = turf.length(intersection); // in km
|
||||
result.push({feat: otherFeature, overlap: intersectionSize * 1000})
|
||||
} catch (exception) {
|
||||
console.warn("EXCEPTION CAUGHT WHILE INTERSECTING: ", exception);
|
||||
}
|
||||
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (feature.geometry.type === "Polygon" || feature.geometry.type === "MultiPolygon") {
|
||||
|
||||
for (const otherFeature of otherFeatures) {
|
||||
|
@ -74,7 +137,7 @@ export class GeoOperations {
|
|||
const intersectionSize = turf.area(intersection); // in m²
|
||||
result.push({feat: otherFeature, overlap: intersectionSize})
|
||||
} catch (exception) {
|
||||
console.log("EXCEPTION CAUGHT WHILE INTERSECTING: ", exception);
|
||||
console.warn("EXCEPTION CAUGHT WHILE INTERSECTING: ", exception);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -83,7 +146,7 @@ export class GeoOperations {
|
|||
console.error("Could not correctly calculate the overlap of ", feature, ": unsupported type")
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public static inside(pointCoordinate, feature): boolean {
|
||||
// ray-casting algorithm based on
|
||||
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
|
||||
|
@ -92,6 +155,32 @@ export class GeoOperations {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (feature.geometry.type === "MultiPolygon") {
|
||||
const coordinates = feature.geometry.coordinates[0];
|
||||
const outerPolygon = coordinates[0];
|
||||
const inside = GeoOperations.inside(pointCoordinate, {
|
||||
geometry: {
|
||||
type: 'Polygon',
|
||||
coordinates: [outerPolygon]
|
||||
}
|
||||
})
|
||||
if (!inside) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 1; i < coordinates.length; i++) {
|
||||
const inHole = GeoOperations.inside(pointCoordinate, {
|
||||
geometry: {
|
||||
type: 'Polygon',
|
||||
coordinates: [coordinates[i]]
|
||||
}
|
||||
})
|
||||
if (inHole) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
const x: number = pointCoordinate[0];
|
||||
const y: number = pointCoordinate[1];
|
||||
|
@ -125,7 +214,7 @@ export class GeoOperations {
|
|||
}
|
||||
|
||||
|
||||
class BBox{
|
||||
class BBox {
|
||||
|
||||
readonly maxLat: number;
|
||||
readonly maxLon: number;
|
||||
|
@ -148,29 +237,6 @@ class BBox{
|
|||
this.check();
|
||||
}
|
||||
|
||||
private check() {
|
||||
if (isNaN(this.maxLon) || isNaN(this.maxLat) || isNaN(this.minLon) || isNaN(this.minLat)) {
|
||||
console.log(this);
|
||||
throw "BBOX has NAN";
|
||||
}
|
||||
}
|
||||
|
||||
public overlapsWith(other: BBox) {
|
||||
this.check();
|
||||
other.check();
|
||||
if (this.maxLon < other.minLon) {
|
||||
return false;
|
||||
}
|
||||
if (this.maxLat < other.minLat) {
|
||||
return false;
|
||||
}
|
||||
if (this.minLon > other.maxLon) {
|
||||
return false;
|
||||
}
|
||||
return this.minLat <= other.maxLat;
|
||||
|
||||
}
|
||||
|
||||
static get(feature) {
|
||||
if (feature.bbox?.overlapsWith === undefined) {
|
||||
|
||||
|
@ -195,4 +261,27 @@ class BBox{
|
|||
return feature.bbox;
|
||||
}
|
||||
|
||||
public overlapsWith(other: BBox) {
|
||||
this.check();
|
||||
other.check();
|
||||
if (this.maxLon < other.minLon) {
|
||||
return false;
|
||||
}
|
||||
if (this.maxLat < other.minLat) {
|
||||
return false;
|
||||
}
|
||||
if (this.minLon > other.maxLon) {
|
||||
return false;
|
||||
}
|
||||
return this.minLat <= other.maxLat;
|
||||
|
||||
}
|
||||
|
||||
private check() {
|
||||
if (isNaN(this.maxLon) || isNaN(this.maxLat) || isNaN(this.minLon) || isNaN(this.minLat)) {
|
||||
console.log(this);
|
||||
throw "BBOX has NAN";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -26,7 +26,6 @@ export default class MetaTagging {
|
|||
layers: LayerConfig[],
|
||||
includeDates = true) {
|
||||
|
||||
console.debug("Adding meta tags to all features")
|
||||
for (const metatag of SimpleMetaTagger.metatags) {
|
||||
if (metatag.includesDates && !includeDates) {
|
||||
// We do not add dated entries
|
||||
|
@ -95,9 +94,17 @@ export default class MetaTagging {
|
|||
|
||||
const f = (featuresPerLayer, feature: any) => {
|
||||
try {
|
||||
feature.properties[key] =func(feature);
|
||||
let result = func(feature);
|
||||
if(result === undefined || result === ""){
|
||||
return;
|
||||
}
|
||||
if(typeof result !== "string"){
|
||||
// Make sure it is a string!
|
||||
result = "" + result;
|
||||
}
|
||||
feature.properties[key] = result;
|
||||
} catch (e) {
|
||||
console.error("Could not calculate a metatag defined by " + code + " due to " + e + ". This is code defined in the theme. Are you the theme creator? Doublecheck your code. Note that the metatags might not be stable on new features")
|
||||
console.error("Could not calculate a metatag defined by " + code + " due to " + e + ". This is code defined in the theme. Are you the theme creator? Doublecheck your code. Note that the metatags might not be stable on new features", e)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ export class RegexTag extends TagsFilter {
|
|||
continue;
|
||||
}
|
||||
if (RegexTag.doesMatch(key, this.key)) {
|
||||
const value = tags[key]
|
||||
const value = tags[key] ?? "";
|
||||
return RegexTag.doesMatch(value, this.value) != this.invert;
|
||||
}
|
||||
}
|
||||
|
|
4
State.ts
4
State.ts
|
@ -13,7 +13,7 @@ import BaseLayer from "./Models/BaseLayer";
|
|||
import Loc from "./Models/Loc";
|
||||
import Constants from "./Models/Constants";
|
||||
|
||||
import UpdateFromOverpass from "./Logic/Actors/UpdateFromOverpass";
|
||||
import OverpassFeatureSource from "./Logic/Actors/OverpassFeatureSource";
|
||||
import LayerConfig from "./Customizations/JSON/LayerConfig";
|
||||
import TitleHandler from "./Logic/Actors/TitleHandler";
|
||||
import PendingChangesUploader from "./Logic/Actors/PendingChangesUploader";
|
||||
|
@ -57,7 +57,7 @@ export default class State {
|
|||
|
||||
public favouriteLayers: UIEventSource<string[]>;
|
||||
|
||||
public layerUpdater: UpdateFromOverpass;
|
||||
public layerUpdater: OverpassFeatureSource;
|
||||
|
||||
public osmApiFeatureSource : OsmApiFeatureSource ;
|
||||
|
||||
|
|
|
@ -40,11 +40,15 @@
|
|||
"override": {
|
||||
"source": {
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"icon": "./assets/themes/speelplekken/speelbos.svg",
|
||||
"minzoom": 12
|
||||
}
|
||||
},
|
||||
"calculatedTags": [
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"builtin": "playground",
|
||||
|
@ -54,8 +58,12 @@
|
|||
"source": {
|
||||
"geoJsonLocal": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
}
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"calculatedTags": [
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -66,8 +74,12 @@
|
|||
"source": {
|
||||
"geoJsonLocal": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
}
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"calculatedTags": [
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -78,8 +90,12 @@
|
|||
"source": {
|
||||
"geoJsonLocal": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
}
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"calculatedTags": [
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -89,20 +105,26 @@
|
|||
"source": {
|
||||
"geoJsonLocal": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
}
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"calculatedTags": [
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"builtin": "slow_roads",
|
||||
"override": {
|
||||
"calculatedTags": [
|
||||
"_part_of_walking_routes=feat.memberships().map(r => \"<a href='#relation/\"+r.relation.id+\"'>\" + r.relation.tags.name + \"</a>\").join(', ')"
|
||||
"_part_of_walking_routes=feat.memberships().map(r => \"<a href='#relation/\"+r.relation.id+\"'>\" + r.relation.tags.name + \"</a>\").join(', ')",
|
||||
"_is_shadowed=feat.overlapWith('shadow').length > 0 ? 'yes' : 'no'"
|
||||
],
|
||||
"source": {
|
||||
"geoJsonLocal": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
"geoJson": "http://127.0.0.1:8080/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonWeb": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{layer}_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -121,7 +143,8 @@
|
|||
]
|
||||
},
|
||||
"geoJson": "https://pietervdvn.github.io/speelplekken_cache/speelplekken_{z}_{x}_{y}.geojson",
|
||||
"geoJsonZoomLevel": 14
|
||||
"geoJsonZoomLevel": 14,
|
||||
"isOsmCache": true
|
||||
},
|
||||
"title": {
|
||||
"render": "Wandeling <i>{name}</i>",
|
||||
|
@ -227,5 +250,16 @@
|
|||
"render": "Maakt deel uit van {_part_of_walking_routes}",
|
||||
"condition": "_part_of_walking_routes~*"
|
||||
}
|
||||
]
|
||||
],
|
||||
"overrideAll": {
|
||||
"isShown": {
|
||||
"render": "yes",
|
||||
"mappings": [
|
||||
{
|
||||
"if": "_is_shadowed=yes",
|
||||
"then": "no"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
173
package-lock.json
generated
173
package-lock.json
generated
|
@ -1183,7 +1183,8 @@
|
|||
"integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==",
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"bindings": "^1.5.0"
|
||||
"bindings": "^1.5.0",
|
||||
"nan": "^2.12.1"
|
||||
}
|
||||
},
|
||||
"glob-parent": {
|
||||
|
@ -1744,6 +1745,21 @@
|
|||
"@turf/helpers": "^6.3.0",
|
||||
"@turf/invariant": "^6.3.0",
|
||||
"polygon-clipping": "^0.15.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"polygon-clipping": {
|
||||
"version": "0.15.3",
|
||||
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
|
||||
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
|
||||
"requires": {
|
||||
"splaytree": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"splaytree": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.0.tgz",
|
||||
"integrity": "sha512-gvUGR7xnOy0fLKTCxDeUZYgU/I1Tdf8M/lM1Qrf8L2TIOR5ipZjGk02uYcdv0o2x7WjVRgpm3iS2clLyuVAt0Q=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@turf/dissolve": {
|
||||
|
@ -1882,6 +1898,21 @@
|
|||
"@turf/helpers": "^6.3.0",
|
||||
"@turf/invariant": "^6.3.0",
|
||||
"polygon-clipping": "^0.15.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"polygon-clipping": {
|
||||
"version": "0.15.3",
|
||||
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
|
||||
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
|
||||
"requires": {
|
||||
"splaytree": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"splaytree": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.0.tgz",
|
||||
"integrity": "sha512-gvUGR7xnOy0fLKTCxDeUZYgU/I1Tdf8M/lM1Qrf8L2TIOR5ipZjGk02uYcdv0o2x7WjVRgpm3iS2clLyuVAt0Q=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@turf/invariant": {
|
||||
|
@ -2588,6 +2619,21 @@
|
|||
"@turf/helpers": "^6.3.0",
|
||||
"@turf/invariant": "^6.3.0",
|
||||
"polygon-clipping": "^0.15.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"polygon-clipping": {
|
||||
"version": "0.15.3",
|
||||
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
|
||||
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
|
||||
"requires": {
|
||||
"splaytree": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"splaytree": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.0.tgz",
|
||||
"integrity": "sha512-gvUGR7xnOy0fLKTCxDeUZYgU/I1Tdf8M/lM1Qrf8L2TIOR5ipZjGk02uYcdv0o2x7WjVRgpm3iS2clLyuVAt0Q=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@turf/unkink-polygon": {
|
||||
|
@ -7770,6 +7816,59 @@
|
|||
"requires": {
|
||||
"commander": "2"
|
||||
}
|
||||
},
|
||||
"turf": {
|
||||
"version": "3.0.14",
|
||||
"resolved": "https://registry.npmjs.org/turf/-/turf-3.0.14.tgz",
|
||||
"integrity": "sha1-6y9KgKLVg7jGSGvHtccZBGaGbCc=",
|
||||
"requires": {
|
||||
"turf-along": "^3.0.12",
|
||||
"turf-area": "^3.0.12",
|
||||
"turf-bbox": "^3.0.12",
|
||||
"turf-bbox-polygon": "^3.0.12",
|
||||
"turf-bearing": "^3.0.12",
|
||||
"turf-bezier": "^3.0.12",
|
||||
"turf-buffer": "^3.0.12",
|
||||
"turf-center": "^3.0.12",
|
||||
"turf-centroid": "^3.0.12",
|
||||
"turf-circle": "^3.0.12",
|
||||
"turf-collect": "^3.0.12",
|
||||
"turf-combine": "^3.0.12",
|
||||
"turf-concave": "^3.0.12",
|
||||
"turf-convex": "^3.0.12",
|
||||
"turf-destination": "^3.0.12",
|
||||
"turf-difference": "^3.0.12",
|
||||
"turf-distance": "^3.0.12",
|
||||
"turf-envelope": "^3.0.12",
|
||||
"turf-explode": "^3.0.12",
|
||||
"turf-flip": "^3.0.12",
|
||||
"turf-helpers": "^3.0.12",
|
||||
"turf-hex-grid": "^3.0.12",
|
||||
"turf-inside": "^3.0.12",
|
||||
"turf-intersect": "^3.0.12",
|
||||
"turf-isolines": "^3.0.12",
|
||||
"turf-kinks": "^3.0.12",
|
||||
"turf-line-distance": "^3.0.12",
|
||||
"turf-line-slice": "^3.0.12",
|
||||
"turf-meta": "^3.0.12",
|
||||
"turf-midpoint": "^3.0.12",
|
||||
"turf-nearest": "^3.0.12",
|
||||
"turf-planepoint": "^3.0.12",
|
||||
"turf-point-grid": "^3.0.12",
|
||||
"turf-point-on-line": "^3.0.12",
|
||||
"turf-point-on-surface": "^3.0.12",
|
||||
"turf-random": "^3.0.12",
|
||||
"turf-sample": "^3.0.12",
|
||||
"turf-simplify": "^3.0.12",
|
||||
"turf-square": "^3.0.12",
|
||||
"turf-square-grid": "^3.0.12",
|
||||
"turf-tag": "^3.0.12",
|
||||
"turf-tesselate": "^3.0.12",
|
||||
"turf-tin": "^3.0.12",
|
||||
"turf-triangle-grid": "^3.0.12",
|
||||
"turf-union": "^3.0.12",
|
||||
"turf-within": "^3.0.12"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -8238,6 +8337,12 @@
|
|||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==",
|
||||
"optional": true
|
||||
},
|
||||
"nanoid": {
|
||||
"version": "3.1.22",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.22.tgz",
|
||||
|
@ -9084,14 +9189,6 @@
|
|||
"resolved": "https://registry.npmjs.org/point-in-polygon/-/point-in-polygon-1.1.0.tgz",
|
||||
"integrity": "sha512-3ojrFwjnnw8Q9242TzgXuTD+eKiutbzyslcq1ydfu82Db2y+Ogbmyrkpv0Hgj31qwT3lbS9+QAAO/pIQM35XRw=="
|
||||
},
|
||||
"polygon-clipping": {
|
||||
"version": "0.15.3",
|
||||
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
|
||||
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
|
||||
"requires": {
|
||||
"splaytree": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"posix-character-classes": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz",
|
||||
|
@ -10820,11 +10917,6 @@
|
|||
"integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==",
|
||||
"dev": true
|
||||
},
|
||||
"splaytree": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.0.tgz",
|
||||
"integrity": "sha512-gvUGR7xnOy0fLKTCxDeUZYgU/I1Tdf8M/lM1Qrf8L2TIOR5ipZjGk02uYcdv0o2x7WjVRgpm3iS2clLyuVAt0Q=="
|
||||
},
|
||||
"split": {
|
||||
"version": "0.2.10",
|
||||
"resolved": "https://registry.npmjs.org/split/-/split-0.2.10.tgz",
|
||||
|
@ -11587,59 +11679,6 @@
|
|||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"turf": {
|
||||
"version": "3.0.14",
|
||||
"resolved": "https://registry.npmjs.org/turf/-/turf-3.0.14.tgz",
|
||||
"integrity": "sha1-6y9KgKLVg7jGSGvHtccZBGaGbCc=",
|
||||
"requires": {
|
||||
"turf-along": "^3.0.12",
|
||||
"turf-area": "^3.0.12",
|
||||
"turf-bbox": "^3.0.12",
|
||||
"turf-bbox-polygon": "^3.0.12",
|
||||
"turf-bearing": "^3.0.12",
|
||||
"turf-bezier": "^3.0.12",
|
||||
"turf-buffer": "^3.0.12",
|
||||
"turf-center": "^3.0.12",
|
||||
"turf-centroid": "^3.0.12",
|
||||
"turf-circle": "^3.0.12",
|
||||
"turf-collect": "^3.0.12",
|
||||
"turf-combine": "^3.0.12",
|
||||
"turf-concave": "^3.0.12",
|
||||
"turf-convex": "^3.0.12",
|
||||
"turf-destination": "^3.0.12",
|
||||
"turf-difference": "^3.0.12",
|
||||
"turf-distance": "^3.0.12",
|
||||
"turf-envelope": "^3.0.12",
|
||||
"turf-explode": "^3.0.12",
|
||||
"turf-flip": "^3.0.12",
|
||||
"turf-helpers": "^3.0.12",
|
||||
"turf-hex-grid": "^3.0.12",
|
||||
"turf-inside": "^3.0.12",
|
||||
"turf-intersect": "^3.0.12",
|
||||
"turf-isolines": "^3.0.12",
|
||||
"turf-kinks": "^3.0.12",
|
||||
"turf-line-distance": "^3.0.12",
|
||||
"turf-line-slice": "^3.0.12",
|
||||
"turf-meta": "^3.0.12",
|
||||
"turf-midpoint": "^3.0.12",
|
||||
"turf-nearest": "^3.0.12",
|
||||
"turf-planepoint": "^3.0.12",
|
||||
"turf-point-grid": "^3.0.12",
|
||||
"turf-point-on-line": "^3.0.12",
|
||||
"turf-point-on-surface": "^3.0.12",
|
||||
"turf-random": "^3.0.12",
|
||||
"turf-sample": "^3.0.12",
|
||||
"turf-simplify": "^3.0.12",
|
||||
"turf-square": "^3.0.12",
|
||||
"turf-square-grid": "^3.0.12",
|
||||
"turf-tag": "^3.0.12",
|
||||
"turf-tesselate": "^3.0.12",
|
||||
"turf-tin": "^3.0.12",
|
||||
"turf-triangle-grid": "^3.0.12",
|
||||
"turf-union": "^3.0.12",
|
||||
"turf-within": "^3.0.12"
|
||||
}
|
||||
},
|
||||
"turf-along": {
|
||||
"version": "3.0.12",
|
||||
"resolved": "https://registry.npmjs.org/turf-along/-/turf-along-3.0.12.tgz",
|
||||
|
|
|
@ -9,14 +9,14 @@
|
|||
"scripts": {
|
||||
"increase-memory": "export NODE_OPTIONS=--max_old_space_size=4096",
|
||||
"start": "ts-node scripts/generateLayerOverview.ts --no-fail && npm run increase-memory && parcel *.html UI/** Logic/** assets/** assets/**/** assets/**/**/** vendor/* vendor/*/*",
|
||||
"test": "ts-node test/Tag.spec.ts && ts-node test/TagQuestion.spec.ts && ts-node test/ImageSearcher.spec.ts && ts-node test/ImageAttribution.spec.ts && ts-node test/Theme.spec.ts",
|
||||
"test": "ts-node test/TestAll.ts",
|
||||
"init": "npm run generate && npm run generate:editor-layer-index && npm run generate:layouts && npm run clean",
|
||||
"generate:editor-layer-index": "cd assets/ && wget https://osmlab.github.io/editor-layer-index/imagery.geojson --output-document=editor-layer-index.json",
|
||||
"generate:images": "ts-node scripts/generateIncludedImages.ts",
|
||||
"generate:translations": "ts-node scripts/generateTranslations.ts",
|
||||
"generate:layouts": "ts-node scripts/generateLayouts.ts",
|
||||
"generate:docs": "ts-node scripts/generateDocs.ts && ts-node scripts/generateTaginfoProjectFiles.ts",
|
||||
"generate:cache:speelplekken": "ts-node scripts/generateCache.ts speelplekken 14 ../pietervdvn.github.io/speelplekken_cache/ 51.22 4.30 51.08 4.55",
|
||||
"generate:cache:speelplekken": "npm run generate:layeroverview && ts-node scripts/generateCache.ts speelplekken 14 ../pietervdvn.github.io/speelplekken_cache/ 51.20 4.35 51.09 4.56",
|
||||
"generate:layeroverview": "ts-node scripts/generateLayerOverview.ts --no-fail",
|
||||
"generate:licenses": "ts-node scripts/generateLicenseInfo.ts --no-fail",
|
||||
"generate:report": "cd Docs/Tools && ./compileStats.sh && git commit . -m 'New statistics ands graphs' && git push",
|
||||
|
@ -79,8 +79,7 @@
|
|||
"sharp": "^0.27.0",
|
||||
"slick-carousel": "^1.8.1",
|
||||
"tailwindcss": "npm:@tailwindcss/postcss7-compat@^2.0.2",
|
||||
"tslint": "^6.1.3",
|
||||
"turf": "^3.0.14"
|
||||
"tslint": "^6.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/polyfill": "^7.10.4",
|
||||
|
|
|
@ -18,28 +18,38 @@ export default class ScriptUtils {
|
|||
return result;
|
||||
}
|
||||
|
||||
public static DownloadJSON(url, continuation : (parts : string []) => void){
|
||||
https.get(url, (res) => {
|
||||
console.log("Got response!")
|
||||
const parts : string[] = []
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function (chunk) {
|
||||
// @ts-ignore
|
||||
parts.push(chunk)
|
||||
});
|
||||
public static DownloadJSON(url) : Promise<any>{
|
||||
return new Promise((resolve, reject) => {
|
||||
https.get(url, (res) => {
|
||||
const parts : string[] = []
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function (chunk) {
|
||||
// @ts-ignore
|
||||
parts.push(chunk)
|
||||
});
|
||||
|
||||
res.addListener('end', function () {
|
||||
continuation(parts)
|
||||
});
|
||||
res.addListener('end', function () {
|
||||
const result = parts.join("")
|
||||
try{
|
||||
resolve(JSON.parse(result))
|
||||
}catch (e){
|
||||
reject(e)
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
public static sleep(ms) {
|
||||
if(ms <= 0){
|
||||
process.stdout.write("\r \r")
|
||||
return;
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
console.debug("Sleeping for", ms)
|
||||
setTimeout(resolve, ms);
|
||||
|
||||
});
|
||||
process.stdout.write("\r Sleeping for "+(ms/1000)+"s \r")
|
||||
setTimeout(resolve, 1000);
|
||||
}).then(() => ScriptUtils.sleep(ms - 1000));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import * as OsmToGeoJson from "osmtogeojson";
|
|||
import MetaTagging from "../Logic/MetaTagging";
|
||||
import LayerConfig from "../Customizations/JSON/LayerConfig";
|
||||
import {GeoOperations} from "../Logic/GeoOperations";
|
||||
import {fail} from "assert";
|
||||
|
||||
|
||||
function createOverpassObject(theme: LayoutConfig) {
|
||||
|
@ -29,8 +30,11 @@ function createOverpassObject(theme: LayoutConfig) {
|
|||
continue;
|
||||
}
|
||||
if (layer.source.geojsonSource !== undefined) {
|
||||
// We download these anyway - we are building the cache after all!
|
||||
//continue;
|
||||
// This layer defines a geoJson-source
|
||||
// SHould it be cached?
|
||||
if (!layer.source.isOsmCacheLayer) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -49,17 +53,6 @@ function createOverpassObject(theme: LayoutConfig) {
|
|||
return new Overpass(new Or(filters), extraScripts);
|
||||
}
|
||||
|
||||
function saveResponse(chunks: string[], targetDir: string): boolean {
|
||||
const contents = chunks.join("")
|
||||
if (contents.startsWith("<?xml")) {
|
||||
// THis is an error message
|
||||
console.error("Failed to create ", targetDir, "probably over quota: ", contents)
|
||||
return false;
|
||||
}
|
||||
writeFileSync(targetDir, contents)
|
||||
return true
|
||||
}
|
||||
|
||||
function rawJsonName(targetDir: string, x: number, y: number, z: number): string {
|
||||
return targetDir + "_" + z + "_" + x + "_" + y + ".json"
|
||||
}
|
||||
|
@ -68,6 +61,7 @@ function geoJsonName(targetDir: string, x: number, y: number, z: number): string
|
|||
return targetDir + "_" + z + "_" + x + "_" + y + ".geojson"
|
||||
}
|
||||
|
||||
/// Downloads the given feature and saves them to disk
|
||||
async function downloadRaw(targetdir: string, r: TileRange, overpass: Overpass)/* : {failed: number, skipped :number} */ {
|
||||
let downloaded = 0
|
||||
let failed = 0
|
||||
|
@ -92,37 +86,48 @@ async function downloadRaw(targetdir: string, r: TileRange, overpass: Overpass)/
|
|||
}
|
||||
const url = overpass.buildQuery("[bbox:" + bounds.south + "," + bounds.west + "," + bounds.north + "," + bounds.east + "]")
|
||||
|
||||
let gotResponse = false
|
||||
let success = false;
|
||||
ScriptUtils.DownloadJSON(url,
|
||||
chunks => {
|
||||
gotResponse = true;
|
||||
success = saveResponse(chunks, filename)
|
||||
await ScriptUtils.DownloadJSON(url)
|
||||
.then(json => {
|
||||
console.log("Got the response - writing to ", filename)
|
||||
writeFileSync(filename, JSON.stringify(json, null, " "));
|
||||
}
|
||||
)
|
||||
.catch(err => {
|
||||
console.log("Could not download - probably hit the rate limit; waiting a bit")
|
||||
failed++;
|
||||
return ScriptUtils.sleep(60000).then(() => console.log("Waiting is done"))
|
||||
})
|
||||
|
||||
while (!gotResponse) {
|
||||
await ScriptUtils.sleep(10000)
|
||||
console.debug("Waking up")
|
||||
if (!gotResponse) {
|
||||
console.log("Didn't get an answer yet - waiting more")
|
||||
}
|
||||
}
|
||||
|
||||
if (!success) {
|
||||
failed++;
|
||||
console.log("Hit the rate limit - waiting 90s")
|
||||
for (let i = 0; i < 90; i++) {
|
||||
console.log(90 - i)
|
||||
await ScriptUtils.sleep(1000)
|
||||
}
|
||||
}
|
||||
// Cooldown
|
||||
console.debug("Cooling down 10s")
|
||||
await ScriptUtils.sleep(10000)
|
||||
}
|
||||
}
|
||||
|
||||
return {failed: failed, skipped: skipped}
|
||||
}
|
||||
|
||||
async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig) {
|
||||
/*
|
||||
* Downloads extra geojson sources and returns the features.
|
||||
* Extra geojson layers should not be tiled
|
||||
*/
|
||||
async function downloadExtraData(theme: LayoutConfig)/* : any[] */ {
|
||||
const allFeatures: any[] = []
|
||||
for (const layer of theme.layers) {
|
||||
const source = layer.source.geojsonSource;
|
||||
if (source === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (layer.source.isOsmCacheLayer) {
|
||||
// Cached layers are not considered here
|
||||
continue;
|
||||
}
|
||||
console.log("Downloading extra data: ", source)
|
||||
await ScriptUtils.DownloadJSON(source).then(json => allFeatures.push(...json.features))
|
||||
}
|
||||
return allFeatures;
|
||||
}
|
||||
|
||||
async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig, extraFeatures: any[]) {
|
||||
let processed = 0;
|
||||
const layerIndex = theme.LayerIndex();
|
||||
for (let x = r.xstart; x <= r.xend; x++) {
|
||||
|
@ -131,7 +136,8 @@ async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig)
|
|||
const filename = rawJsonName(targetdir, x, y, r.zoomlevel)
|
||||
console.log(" Post processing", processed, "/", r.total, filename)
|
||||
if (!existsSync(filename)) {
|
||||
throw "Not found - and not downloaded. Run this script again!: " + filename
|
||||
console.error("Not found - and not downloaded. Run this script again!: " + filename)
|
||||
continue;
|
||||
}
|
||||
|
||||
// We read the raw OSM-file and convert it to a geojson
|
||||
|
@ -140,6 +146,8 @@ async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig)
|
|||
// Create and save the geojson file - which is the main chunk of the data
|
||||
const geojson = OsmToGeoJson.default(rawOsm);
|
||||
const osmTime = new Date(rawOsm.osm3s.timestamp_osm_base);
|
||||
// And merge in the extra features - needed for the metatagging
|
||||
geojson.features.push(...extraFeatures);
|
||||
|
||||
for (const feature of geojson.features) {
|
||||
|
||||
|
@ -149,9 +157,6 @@ async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig)
|
|||
break;
|
||||
}
|
||||
}
|
||||
if(feature["_matching_layer_id"] === undefined){
|
||||
console.log("No matching layer found for ", feature.properties.id)
|
||||
}
|
||||
}
|
||||
const featuresFreshness = geojson.features.map(feature => {
|
||||
return ({
|
||||
|
@ -161,6 +166,7 @@ async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig)
|
|||
});
|
||||
// Extract the relationship information
|
||||
const relations = ExtractRelations.BuildMembershipTable(ExtractRelations.GetRelationElements(rawOsm))
|
||||
|
||||
MetaTagging.addMetatags(featuresFreshness, relations, theme.layers, false);
|
||||
|
||||
|
||||
|
@ -180,9 +186,14 @@ async function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig)
|
|||
|
||||
}
|
||||
}
|
||||
for (const feature of geojson.features) {
|
||||
// Some cleanup
|
||||
delete feature["bbox"]
|
||||
}
|
||||
|
||||
|
||||
writeFileSync(geoJsonName(targetdir, x, y, r.zoomlevel), JSON.stringify(geojson, null, " "))
|
||||
const targetPath = geoJsonName(targetdir+".unfiltered", x, y, r.zoomlevel)
|
||||
// This is the geojson file containing all features
|
||||
writeFileSync(targetPath, JSON.stringify(geojson, null, " "))
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -192,15 +203,30 @@ async function splitPerLayer(targetdir: string, r: TileRange, theme: LayoutConfi
|
|||
const z = r.zoomlevel;
|
||||
for (let x = r.xstart; x <= r.xend; x++) {
|
||||
for (let y = r.ystart; y <= r.yend; y++) {
|
||||
const file = readFileSync(geoJsonName(targetdir, x, y, z), "UTF8")
|
||||
const file = readFileSync(geoJsonName(targetdir+".unfiltered", x, y, z), "UTF8")
|
||||
|
||||
for (const layer of theme.layers) {
|
||||
const geojson = JSON.parse(file)
|
||||
geojson.features = geojson.features.filter(f => f._matching_layer_id === layer.id)
|
||||
if (geojson.features.length == 0) {
|
||||
if (!layer.source.isOsmCacheLayer) {
|
||||
continue;
|
||||
}
|
||||
const geojson = JSON.parse(file)
|
||||
const oldLength = geojson.features.length;
|
||||
geojson.features = geojson.features
|
||||
.filter(f => f._matching_layer_id === layer.id)
|
||||
.filter(f => {
|
||||
const isShown = layer.isShown.GetRenderValue(f.properties).txt
|
||||
if (isShown === "no") {
|
||||
console.log("Dropping feature ", f.id)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
const new_path = geoJsonName(targetdir + "_" + layer.id, x, y, z);
|
||||
console.log(new_path, " has ", geojson.features.length, " features after filtering (dropped ", oldLength - geojson.features.length,")" )
|
||||
if (geojson.features.length == 0) {
|
||||
console.log("Not writing geojson file as it is empty", new_path)
|
||||
continue;
|
||||
}
|
||||
writeFileSync(new_path, JSON.stringify(geojson, null, " "))
|
||||
}
|
||||
|
||||
|
@ -243,11 +269,12 @@ async function main(args: string[]) {
|
|||
const cachingResult = await downloadRaw(targetdir, tileRange, overpass)
|
||||
failed = cachingResult.failed
|
||||
if (failed > 0) {
|
||||
ScriptUtils.sleep(30000)
|
||||
await ScriptUtils.sleep(30000)
|
||||
}
|
||||
} while (failed > 0)
|
||||
|
||||
await postProcess(targetdir, tileRange, theme)
|
||||
const extraFeatures = await downloadExtraData(theme);
|
||||
await postProcess(targetdir, tileRange, theme, extraFeatures)
|
||||
await splitPerLayer(targetdir, tileRange, theme)
|
||||
}
|
||||
|
||||
|
|
|
@ -32,8 +32,13 @@ const themeFiles: any[] = ScriptUtils.readDirRecSync("./assets/themes")
|
|||
.filter(path => path.endsWith(".json"))
|
||||
.filter(path => path.indexOf("license_info.json") < 0)
|
||||
.map(path => {
|
||||
return JSON.parse(readFileSync(path, "UTF8"));
|
||||
})
|
||||
try{
|
||||
return JSON.parse(readFileSync(path, "UTF8"));
|
||||
}catch(e){
|
||||
console.error("Could not read file ", path, "due to ", e)
|
||||
throw e
|
||||
}
|
||||
});
|
||||
|
||||
console.log("Discovered", layerFiles.length, "layers and", themeFiles.length, "themes\n")
|
||||
return {
|
||||
|
|
197
test/GeoOperations.spec.ts
Normal file
197
test/GeoOperations.spec.ts
Normal file
|
@ -0,0 +1,197 @@
|
|||
import {Utils} from "../Utils";
|
||||
|
||||
Utils.runningFromConsole = true;
|
||||
import {equal} from "assert";
|
||||
import T from "./TestHelper";
|
||||
import {FromJSON} from "../Customizations/JSON/FromJSON";
|
||||
import Locale from "../UI/i18n/Locale";
|
||||
import Translations from "../UI/i18n/Translations";
|
||||
import {UIEventSource} from "../Logic/UIEventSource";
|
||||
import TagRenderingConfig from "../Customizations/JSON/TagRenderingConfig";
|
||||
import EditableTagRendering from "../UI/Popup/EditableTagRendering";
|
||||
import {Translation} from "../UI/i18n/Translation";
|
||||
import {OH, OpeningHour} from "../UI/OpeningHours/OpeningHours";
|
||||
import PublicHolidayInput from "../UI/OpeningHours/PublicHolidayInput";
|
||||
import {SubstitutedTranslation} from "../UI/SubstitutedTranslation";
|
||||
import {Tag} from "../Logic/Tags/Tag";
|
||||
import {And} from "../Logic/Tags/And";
|
||||
import * as Assert from "assert";
|
||||
import {GeoOperations} from "../Logic/GeoOperations";
|
||||
|
||||
export default class GeoOperationsSpec extends T {
|
||||
|
||||
private static polygon = {
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
1.8017578124999998,
|
||||
50.401515322782366
|
||||
],
|
||||
[
|
||||
-3.1640625,
|
||||
46.255846818480315
|
||||
],
|
||||
[
|
||||
5.185546875,
|
||||
44.74673324024678
|
||||
],
|
||||
[
|
||||
1.8017578124999998,
|
||||
50.401515322782366
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
};
|
||||
private static multiPolygon = {
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [[
|
||||
[
|
||||
[
|
||||
1.8017578124999998,
|
||||
50.401515322782366
|
||||
],
|
||||
[
|
||||
-3.1640625,
|
||||
46.255846818480315
|
||||
],
|
||||
[
|
||||
5.185546875,
|
||||
44.74673324024678
|
||||
],
|
||||
[
|
||||
1.8017578124999998,
|
||||
50.401515322782366
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
1.0107421875,
|
||||
48.821332549646634
|
||||
],
|
||||
[
|
||||
1.329345703125,
|
||||
48.25394114463431
|
||||
],
|
||||
[
|
||||
1.988525390625,
|
||||
48.71271258145237
|
||||
],
|
||||
[
|
||||
0.999755859375,
|
||||
48.86471476180277
|
||||
],
|
||||
[
|
||||
1.0107421875,
|
||||
48.821332549646634
|
||||
]
|
||||
]
|
||||
]]
|
||||
}
|
||||
};
|
||||
|
||||
private static inHole = [1.42822265625, 48.61838518688487]
|
||||
private static inMultiPolygon = [2.515869140625, 47.37603463349758]
|
||||
private static outsidePolygon = [4.02099609375, 47.81315451752768]
|
||||
|
||||
constructor() {
|
||||
super(
|
||||
"GeoOperationsSpec", [
|
||||
["Point out of polygon", () => {
|
||||
GeoOperationsSpec.isFalse(GeoOperations.inside([
|
||||
3.779296875,
|
||||
48.777912755501845
|
||||
], GeoOperationsSpec.polygon), "Point is outside of the polygon");
|
||||
}
|
||||
|
||||
],
|
||||
["Point inside of polygon", () => {
|
||||
|
||||
GeoOperationsSpec.isTrue(GeoOperations.inside([
|
||||
1.23046875,
|
||||
47.60616304386874
|
||||
], GeoOperationsSpec.polygon), "Point is inside of the polygon");
|
||||
}
|
||||
],
|
||||
["MultiPolygonTest", () => {
|
||||
|
||||
const isTrue = GeoOperationsSpec.isTrue;
|
||||
const isFalse = GeoOperationsSpec.isFalse;
|
||||
|
||||
isFalse(GeoOperations.inside(GeoOperationsSpec.inHole, GeoOperationsSpec.multiPolygon), "InHole was detected as true");
|
||||
isTrue(GeoOperations.inside(GeoOperationsSpec.inMultiPolygon, GeoOperationsSpec.multiPolygon), "InMultiPolgyon was not detected as true");
|
||||
isFalse(GeoOperations.inside(GeoOperationsSpec.outsidePolygon, GeoOperationsSpec.multiPolygon), "OutsideOfPolygon was detected as true");
|
||||
|
||||
}],
|
||||
["Intersection between a line and a polygon", () => {
|
||||
const line = {
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": [
|
||||
[
|
||||
3.779296875,
|
||||
48.777912755501845
|
||||
],
|
||||
[
|
||||
1.23046875,
|
||||
47.60616304386874
|
||||
]
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
const overlap = GeoOperations.calculateOverlap(line, [GeoOperationsSpec.polygon]);
|
||||
Assert.equal(1, overlap.length)
|
||||
}],
|
||||
["Fully enclosed", () => {
|
||||
const line = {
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": [
|
||||
[
|
||||
0.0439453125,
|
||||
47.31648293428332
|
||||
],
|
||||
[
|
||||
0.6591796875,
|
||||
46.77749276376827
|
||||
]
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
const overlap = GeoOperations.calculateOverlap(line, [GeoOperationsSpec.polygon]);
|
||||
Assert.equal(1, overlap.length)
|
||||
}],
|
||||
["overlapWith matches points too", () => {
|
||||
const point = {
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
2.274169921875,
|
||||
46.76244305208004
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
const overlap = GeoOperations.calculateOverlap(point, [GeoOperationsSpec.polygon]);
|
||||
Assert.equal(1, overlap.length)
|
||||
}]
|
||||
]
|
||||
)
|
||||
|
||||
}
|
||||
}
|
|
@ -20,39 +20,43 @@ import {AllKnownLayouts} from "../Customizations/AllKnownLayouts";
|
|||
import AllKnownLayers from "../Customizations/AllKnownLayers";
|
||||
import LayerConfig from "../Customizations/JSON/LayerConfig";
|
||||
|
||||
export default class ImageAttributionSpec extends T {
|
||||
constructor() {
|
||||
super(
|
||||
"ImageAttribution Tests", [
|
||||
[
|
||||
"Should find all the images",
|
||||
() => {
|
||||
const pumps: LayerConfig = AllKnownLayers.sharedLayers["bike_repair_station"]
|
||||
const images = pumps.ExtractImages();
|
||||
const expectedValues = ['./assets/layers/bike_repair_station/repair_station.svg',
|
||||
'./assets/layers/bike_repair_station/repair_station_pump.svg',
|
||||
'./assets/layers/bike_repair_station/broken_pump_2.svg',
|
||||
'./assets/layers/bike_repair_station/pump.svg',
|
||||
'./assets/themes/cyclofix/fietsambassade_gent_logo_small.svg',
|
||||
'./assets/layers/bike_repair_station/pump_example_manual.jpg',
|
||||
'./assets/layers/bike_repair_station/pump_example.png',
|
||||
'./assets/layers/bike_repair_station/pump_example_round.jpg',
|
||||
'./assets/layers/bike_repair_station/repair_station_example.jpg']
|
||||
for (const expected of expectedValues) {
|
||||
T.isTrue(images.has(expected), expected + " not found")
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
"Test image discovery regex",
|
||||
() => {
|
||||
const tr = new Translation({en: "XYZ <img src='a.svg'/> XYZ <img src=\"some image.svg\"></img> XYZ <img src=b.svg/>"})
|
||||
const images = new Set<string>(tr.ExtractImages(false));
|
||||
equal(3, images.size)
|
||||
T.isTrue(images.has("a.svg"), "a.svg not found")
|
||||
T.isTrue(images.has("b.svg"), "b.svg not found")
|
||||
T.isTrue(images.has("some image.svg"), "some image.svg not found")
|
||||
|
||||
new T("ImageAttribution Tests", [
|
||||
[
|
||||
"Should find all the images",
|
||||
() => {
|
||||
const pumps: LayerConfig = AllKnownLayers.sharedLayers["bike_repair_station"]
|
||||
const images = pumps.ExtractImages();
|
||||
const expectedValues = ['./assets/layers/bike_repair_station/repair_station.svg',
|
||||
'./assets/layers/bike_repair_station/repair_station_pump.svg',
|
||||
'./assets/layers/bike_repair_station/broken_pump_2.svg',
|
||||
'./assets/layers/bike_repair_station/pump.svg',
|
||||
'./assets/themes/cyclofix/fietsambassade_gent_logo_small.svg',
|
||||
'./assets/layers/bike_repair_station/pump_example_manual.jpg',
|
||||
'./assets/layers/bike_repair_station/pump_example.png',
|
||||
'./assets/layers/bike_repair_station/pump_example_round.jpg',
|
||||
'./assets/layers/bike_repair_station/repair_station_example.jpg']
|
||||
for (const expected of expectedValues) {
|
||||
T.isTrue(images.has(expected), expected + " not found")
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
"Test image discovery regex",
|
||||
() => {
|
||||
const tr = new Translation({en: "XYZ <img src='a.svg'/> XYZ <img src=\"some image.svg\"></img> XYZ <img src=b.svg/>"})
|
||||
const images = new Set<string>(tr.ExtractImages(false));
|
||||
equal(3, images.size)
|
||||
T.isTrue(images.has("a.svg"), "a.svg not found")
|
||||
T.isTrue(images.has("b.svg"), "b.svg not found")
|
||||
T.isTrue(images.has("some image.svg"), "some image.svg not found")
|
||||
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
])
|
||||
]);
|
||||
}
|
||||
}
|
|
@ -16,20 +16,26 @@ import {SubstitutedTranslation} from "../UI/SubstitutedTranslation";
|
|||
import {Tag} from "../Logic/Tags/Tag";
|
||||
import {And} from "../Logic/Tags/And";
|
||||
import {ImageSearcher} from "../Logic/Actors/ImageSearcher";
|
||||
export default class ImageSearcherSpec extends T {
|
||||
|
||||
constructor() {
|
||||
super("ImageSearcher", [
|
||||
[
|
||||
"Should find images",
|
||||
() => {
|
||||
const tags = new UIEventSource({
|
||||
"mapillary": "https://www.mapillary.com/app/?pKey=bYH6FFl8LXAPapz4PNSh3Q"
|
||||
});
|
||||
const searcher = ImageSearcher.construct(tags)
|
||||
const result = searcher.data[0];
|
||||
equal(result.url, "https://www.mapillary.com/map/im/bYH6FFl8LXAPapz4PNSh3Q");
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
new T("ImageSearcher", [
|
||||
[
|
||||
"Should find images",
|
||||
() => {
|
||||
const tags = new UIEventSource({
|
||||
"mapillary": "https://www.mapillary.com/app/?pKey=bYH6FFl8LXAPapz4PNSh3Q"
|
||||
});
|
||||
const searcher = ImageSearcher.construct(tags)
|
||||
const result = searcher.data[0];
|
||||
equal(result.url, "https://www.mapillary.com/map/im/bYH6FFl8LXAPapz4PNSh3Q");
|
||||
}
|
||||
]
|
||||
]);
|
||||
|
||||
}
|
||||
|
||||
|
||||
])
|
||||
}
|
||||
|
|
752
test/Tag.spec.ts
752
test/Tag.spec.ts
|
@ -1,6 +1,4 @@
|
|||
import {Utils} from "../Utils";
|
||||
|
||||
Utils.runningFromConsole = true;
|
||||
import {equal} from "assert";
|
||||
import T from "./TestHelper";
|
||||
import {FromJSON} from "../Customizations/JSON/FromJSON";
|
||||
|
@ -15,414 +13,420 @@ import PublicHolidayInput from "../UI/OpeningHours/PublicHolidayInput";
|
|||
import {SubstitutedTranslation} from "../UI/SubstitutedTranslation";
|
||||
import {Tag} from "../Logic/Tags/Tag";
|
||||
import {And} from "../Logic/Tags/And";
|
||||
import * as Assert from "assert";
|
||||
|
||||
Utils.runningFromConsole = true;
|
||||
|
||||
export default class TagSpec extends T{
|
||||
|
||||
constructor() {
|
||||
super("Tags", [
|
||||
["Tag replacement works in translation", () => {
|
||||
const tr = new Translation({
|
||||
"en": "Test {key} abc"
|
||||
}).replace("{key}", "value");
|
||||
equal(tr.txt, "Test value abc");
|
||||
|
||||
}],
|
||||
["Parse tag config", (() => {
|
||||
const tag = FromJSON.Tag("key=value") as Tag;
|
||||
equal(tag.key, "key");
|
||||
equal(tag.value, "value");
|
||||
equal(tag.matchesProperties({"key": "value"}), true)
|
||||
equal(tag.matchesProperties({"key": "z"}), false)
|
||||
equal(tag.matchesProperties({"key": ""}), false)
|
||||
equal(tag.matchesProperties({"other_key": ""}), false)
|
||||
equal(tag.matchesProperties({"other_key": "value"}), false)
|
||||
|
||||
const isEmpty = FromJSON.Tag("key=") as Tag;
|
||||
equal(isEmpty.matchesProperties({"key": "value"}), false)
|
||||
equal(isEmpty.matchesProperties({"key": ""}), true)
|
||||
equal(isEmpty.matchesProperties({"other_key": ""}), true)
|
||||
equal(isEmpty.matchesProperties({"other_key": "value"}), true)
|
||||
|
||||
const isNotEmpty = FromJSON.Tag("key!=");
|
||||
equal(isNotEmpty.matchesProperties({"key": "value"}), true)
|
||||
equal(isNotEmpty.matchesProperties({"key": "other_value"}), true)
|
||||
equal(isNotEmpty.matchesProperties({"key": ""}), false)
|
||||
equal(isNotEmpty.matchesProperties({"other_key": ""}), false)
|
||||
equal(isNotEmpty.matchesProperties({"other_key": "value"}), false)
|
||||
|
||||
|
||||
new T("Tags", [
|
||||
["Tag replacement works in translation", () => {
|
||||
const tr = new Translation({
|
||||
"en": "Test {key} abc"
|
||||
}).replace("{key}", "value");
|
||||
equal(tr.txt, "Test value abc");
|
||||
|
||||
}],
|
||||
["Parse tag config", (() => {
|
||||
const tag = FromJSON.Tag("key=value") as Tag;
|
||||
equal(tag.key, "key");
|
||||
equal(tag.value, "value");
|
||||
equal(tag.matchesProperties({"key": "value"}), true)
|
||||
equal(tag.matchesProperties({"key": "z"}), false)
|
||||
equal(tag.matchesProperties({"key": ""}), false)
|
||||
equal(tag.matchesProperties({"other_key": ""}), false)
|
||||
equal(tag.matchesProperties({"other_key": "value"}), false)
|
||||
|
||||
const isEmpty = FromJSON.Tag("key=") as Tag;
|
||||
equal(isEmpty.matchesProperties({"key": "value"}), false)
|
||||
equal(isEmpty.matchesProperties({"key": ""}), true)
|
||||
equal(isEmpty.matchesProperties({"other_key": ""}), true)
|
||||
equal(isEmpty.matchesProperties({"other_key": "value"}), true)
|
||||
|
||||
const isNotEmpty = FromJSON.Tag("key!=");
|
||||
equal(isNotEmpty.matchesProperties({"key": "value"}), true)
|
||||
equal(isNotEmpty.matchesProperties({"key": "other_value"}), true)
|
||||
equal(isNotEmpty.matchesProperties({"key": ""}), false)
|
||||
equal(isNotEmpty.matchesProperties({"other_key": ""}), false)
|
||||
equal(isNotEmpty.matchesProperties({"other_key": "value"}), false)
|
||||
const and = FromJSON.Tag({"and": ["key=value", "x=y"]}) as And;
|
||||
equal((and.and[0] as Tag).key, "key");
|
||||
equal((and.and[1] as Tag).value, "y");
|
||||
|
||||
|
||||
const and = FromJSON.Tag({"and": ["key=value", "x=y"]}) as And;
|
||||
equal((and.and[0] as Tag).key, "key");
|
||||
equal((and.and[1] as Tag).value, "y");
|
||||
const notReg = FromJSON.Tag("x!~y") as And;
|
||||
equal(notReg.matchesProperties({"x": "y"}), false)
|
||||
equal(notReg.matchesProperties({"x": "z"}), true)
|
||||
equal(notReg.matchesProperties({"x": ""}), true)
|
||||
equal(notReg.matchesProperties({}), true)
|
||||
|
||||
const noMatch = FromJSON.Tag("key!=value") as Tag;
|
||||
equal(noMatch.matchesProperties({"key": "value"}), false)
|
||||
equal(noMatch.matchesProperties({"key": "otherValue"}), true)
|
||||
equal(noMatch.matchesProperties({"key": ""}), true)
|
||||
equal(noMatch.matchesProperties({"otherKey": ""}), true)
|
||||
|
||||
|
||||
const notReg = FromJSON.Tag("x!~y") as And;
|
||||
equal(notReg.matchesProperties({"x": "y"}), false)
|
||||
equal(notReg.matchesProperties({"x": "z"}), true)
|
||||
equal(notReg.matchesProperties({"x": ""}), true)
|
||||
equal(notReg.matchesProperties({}), true)
|
||||
const multiMatch = FromJSON.Tag("vending~.*bicycle_tube.*") as Tag;
|
||||
equal(multiMatch.matchesProperties({"vending": "bicycle_tube"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "something;bicycle_tube"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "bicycle_tube;something"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "xyz;bicycle_tube;something"}), true)
|
||||
|
||||
const noMatch = FromJSON.Tag("key!=value") as Tag;
|
||||
equal(noMatch.matchesProperties({"key": "value"}), false)
|
||||
equal(noMatch.matchesProperties({"key": "otherValue"}), true)
|
||||
equal(noMatch.matchesProperties({"key": ""}), true)
|
||||
equal(noMatch.matchesProperties({"otherKey": ""}), true)
|
||||
const nameStartsWith = FromJSON.Tag("name~[sS]peelbos.*")
|
||||
equal(nameStartsWith.matchesProperties({"name": "Speelbos Sint-Anna"}), true)
|
||||
equal(nameStartsWith.matchesProperties({"name": "speelbos Sint-Anna"}), true)
|
||||
equal(nameStartsWith.matchesProperties({"name": "Sint-Anna"}), false)
|
||||
equal(nameStartsWith.matchesProperties({"name": ""}), false)
|
||||
|
||||
|
||||
const multiMatch = FromJSON.Tag("vending~.*bicycle_tube.*") as Tag;
|
||||
equal(multiMatch.matchesProperties({"vending": "bicycle_tube"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "something;bicycle_tube"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "bicycle_tube;something"}), true)
|
||||
equal(multiMatch.matchesProperties({"vending": "xyz;bicycle_tube;something"}), true)
|
||||
const assign = FromJSON.Tag("survey:date:={_date:now}")
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-03-29"}), true);
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-01-01"}), false);
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29"}), false);
|
||||
equal(assign.matchesProperties({"_date:now": "2021-03-29"}), false);
|
||||
equal(assign.matchesProperties({"some_key": "2021-03-29"}), false);
|
||||
|
||||
const nameStartsWith = FromJSON.Tag("name~[sS]peelbos.*")
|
||||
equal(nameStartsWith.matchesProperties({"name": "Speelbos Sint-Anna"}), true)
|
||||
equal(nameStartsWith.matchesProperties({"name": "speelbos Sint-Anna"}), true)
|
||||
equal(nameStartsWith.matchesProperties({"name": "Sint-Anna"}), false)
|
||||
equal(nameStartsWith.matchesProperties({"name": ""}), false)
|
||||
})],
|
||||
["Is equivalent test", (() => {
|
||||
|
||||
const t0 = new And([
|
||||
new Tag("valves:special", "A"),
|
||||
new Tag("valves", "A")
|
||||
])
|
||||
const t1 = new And([
|
||||
new Tag("valves", "A")
|
||||
])
|
||||
const t2 = new And([
|
||||
new Tag("valves", "B")
|
||||
])
|
||||
equal(true, t0.isEquivalent(t0))
|
||||
equal(true, t1.isEquivalent(t1))
|
||||
equal(true, t2.isEquivalent(t2))
|
||||
|
||||
const assign = FromJSON.Tag("survey:date:={_date:now}")
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-03-29"}), true);
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29", "_date:now": "2021-01-01"}), false);
|
||||
equal(assign.matchesProperties({"survey:date": "2021-03-29"}), false);
|
||||
equal(assign.matchesProperties({"_date:now": "2021-03-29"}), false);
|
||||
equal(assign.matchesProperties({"some_key": "2021-03-29"}), false);
|
||||
equal(false, t0.isEquivalent(t1))
|
||||
equal(false, t0.isEquivalent(t2))
|
||||
equal(false, t1.isEquivalent(t0))
|
||||
|
||||
})],
|
||||
["Is equivalent test", (() => {
|
||||
equal(false, t1.isEquivalent(t2))
|
||||
equal(false, t2.isEquivalent(t0))
|
||||
equal(false, t2.isEquivalent(t1))
|
||||
})],
|
||||
["Parse translation map", (() => {
|
||||
|
||||
const t0 = new And([
|
||||
new Tag("valves:special", "A"),
|
||||
new Tag("valves", "A")
|
||||
])
|
||||
const t1 = new And([
|
||||
new Tag("valves", "A")
|
||||
])
|
||||
const t2 = new And([
|
||||
new Tag("valves", "B")
|
||||
])
|
||||
equal(true, t0.isEquivalent(t0))
|
||||
equal(true, t1.isEquivalent(t1))
|
||||
equal(true, t2.isEquivalent(t2))
|
||||
const json: any = {"en": "English", "nl": "Nederlands"};
|
||||
const translation = Translations.WT(new Translation(json));
|
||||
Locale.language.setData("en");
|
||||
equal(translation.txt, "English");
|
||||
Locale.language.setData("nl");
|
||||
equal(translation.txt, "Nederlands");
|
||||
})],
|
||||
["Parse tag rendering", (() => {
|
||||
Locale.language.setData("nl");
|
||||
const tr = new TagRenderingConfig({
|
||||
render: ({"en": "Name is {name}", "nl": "Ook een {name}"} as any),
|
||||
question: "Wat is de naam van dit object?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
},
|
||||
|
||||
equal(false, t0.isEquivalent(t1))
|
||||
equal(false, t0.isEquivalent(t2))
|
||||
equal(false, t1.isEquivalent(t0))
|
||||
mappings: [
|
||||
{
|
||||
if: "noname=yes",
|
||||
"then": "Has no name"
|
||||
}
|
||||
],
|
||||
condition: "x="
|
||||
}, undefined, "");
|
||||
|
||||
equal(false, t1.isEquivalent(t2))
|
||||
equal(false, t2.isEquivalent(t0))
|
||||
equal(false, t2.isEquivalent(t1))
|
||||
})],
|
||||
["Parse translation map", (() => {
|
||||
equal(undefined, tr.GetRenderValue({"foo": "bar"}));
|
||||
equal("Has no name", tr.GetRenderValue({"noname": "yes"})?.txt);
|
||||
equal("Ook een {name}", tr.GetRenderValue({"name": "xyz"})?.txt);
|
||||
equal("Ook een xyz", SubstitutedTranslation.construct(tr.GetRenderValue({"name": "xyz"}),
|
||||
new UIEventSource<any>({"name": "xyz"})).InnerRender());
|
||||
equal(undefined, tr.GetRenderValue({"foo": "bar"}));
|
||||
|
||||
const json: any = {"en": "English", "nl": "Nederlands"};
|
||||
const translation = Translations.WT(new Translation(json));
|
||||
Locale.language.setData("en");
|
||||
equal(translation.txt, "English");
|
||||
Locale.language.setData("nl");
|
||||
equal(translation.txt, "Nederlands");
|
||||
})],
|
||||
["Parse tag rendering", (() => {
|
||||
Locale.language.setData("nl");
|
||||
const tr = new TagRenderingConfig({
|
||||
render: ({"en": "Name is {name}", "nl": "Ook een {name}"} as any),
|
||||
question: "Wat is de naam van dit object?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
},
|
||||
})],
|
||||
|
||||
mappings: [
|
||||
{
|
||||
if: "noname=yes",
|
||||
"then": "Has no name"
|
||||
[
|
||||
"Empty match test",
|
||||
() => {
|
||||
const t = new Tag("key", "");
|
||||
equal(false, t.matchesProperties({"key": "somevalue"}))
|
||||
}
|
||||
],
|
||||
condition: "x="
|
||||
}, undefined, "");
|
||||
[
|
||||
"Tagrendering test",
|
||||
() => {
|
||||
|
||||
equal(undefined, tr.GetRenderValue({"foo": "bar"}));
|
||||
equal("Has no name", tr.GetRenderValue({"noname": "yes"})?.txt);
|
||||
equal("Ook een {name}", tr.GetRenderValue({"name": "xyz"})?.txt);
|
||||
equal("Ook een xyz", SubstitutedTranslation.construct(tr.GetRenderValue({"name": "xyz"}),
|
||||
new UIEventSource<any>({"name": "xyz"})).InnerRender());
|
||||
equal(undefined, tr.GetRenderValue({"foo": "bar"}));
|
||||
const def = {
|
||||
"render": {
|
||||
"nl": "De toegankelijkheid van dit gebied is: {access:description}"
|
||||
},
|
||||
"question": {
|
||||
"nl": "Is dit gebied toegankelijk?"
|
||||
},
|
||||
"freeform": {
|
||||
"key": "access:description"
|
||||
},
|
||||
"mappings": [
|
||||
{
|
||||
"if": {
|
||||
"and": [
|
||||
"access:description=",
|
||||
"access=",
|
||||
"leisure=park"
|
||||
]
|
||||
},
|
||||
"then": {
|
||||
"nl": "Dit gebied is vrij toegankelijk"
|
||||
},
|
||||
"hideInAnswer": true
|
||||
},
|
||||
{
|
||||
"if": "access=no",
|
||||
"then": "Niet toegankelijk"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
})],
|
||||
const constr = new TagRenderingConfig(def, undefined, "test");
|
||||
const uiEl = new EditableTagRendering(new UIEventSource<any>(
|
||||
{leisure: "park", "access": "no"}), constr
|
||||
);
|
||||
const rendered = uiEl.InnerRender();
|
||||
equal(true, rendered.indexOf("Niet toegankelijk") > 0)
|
||||
|
||||
[
|
||||
"Empty match test",
|
||||
() => {
|
||||
const t = new Tag("key", "");
|
||||
equal(false, t.matchesProperties({"key": "somevalue"}))
|
||||
}
|
||||
],
|
||||
[
|
||||
"Tagrendering test",
|
||||
() => {
|
||||
}
|
||||
], [
|
||||
"Merge touching opening hours",
|
||||
() => {
|
||||
const oh1: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0,
|
||||
endHour: 11,
|
||||
endMinutes: 0
|
||||
};
|
||||
const oh0: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 11,
|
||||
startMinutes: 0,
|
||||
endHour: 12,
|
||||
endMinutes: 0
|
||||
};
|
||||
|
||||
const def = {
|
||||
"render": {
|
||||
"nl": "De toegankelijkheid van dit gebied is: {access:description}"
|
||||
},
|
||||
"question": {
|
||||
"nl": "Is dit gebied toegankelijk?"
|
||||
},
|
||||
"freeform": {
|
||||
"key": "access:description"
|
||||
},
|
||||
"mappings": [
|
||||
const merged = OH.MergeTimes([oh0, oh1]);
|
||||
const r = merged[0];
|
||||
equal(merged.length, 1);
|
||||
equal(r.startHour, 10);
|
||||
equal(r.endHour, 12)
|
||||
|
||||
}
|
||||
], [
|
||||
"Merge overlapping opening hours",
|
||||
() => {
|
||||
const oh1: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0,
|
||||
endHour: 11,
|
||||
endMinutes: 0
|
||||
};
|
||||
const oh0: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 30,
|
||||
endHour: 12,
|
||||
endMinutes: 0
|
||||
};
|
||||
|
||||
const merged = OH.MergeTimes([oh0, oh1]);
|
||||
const r = merged[0];
|
||||
equal(merged.length, 1);
|
||||
equal(r.startHour, 10);
|
||||
equal(r.endHour, 12)
|
||||
|
||||
}],
|
||||
["Parse OH 1", () => {
|
||||
const rules = OH.ParseRule("11:00-19:00");
|
||||
equal(rules.length, 7);
|
||||
equal(rules[0].weekday, 0);
|
||||
equal(rules[0].startHour, 11);
|
||||
equal(rules[3].endHour, 19);
|
||||
|
||||
}],
|
||||
["Parse OH 2", () => {
|
||||
const rules = OH.ParseRule("Mo-Th 11:00-19:00");
|
||||
equal(rules.length, 4);
|
||||
equal(rules[0].weekday, 0);
|
||||
equal(rules[0].startHour, 11);
|
||||
equal(rules[3].endHour, 19);
|
||||
}],
|
||||
["JOIN OH 1", () => {
|
||||
const rules = OH.ToString([
|
||||
{
|
||||
"if": {
|
||||
"and": [
|
||||
"access:description=",
|
||||
"access=",
|
||||
"leisure=park"
|
||||
]
|
||||
},
|
||||
"then": {
|
||||
"nl": "Dit gebied is vrij toegankelijk"
|
||||
},
|
||||
"hideInAnswer": true
|
||||
weekday: 0,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
"if": "access=no",
|
||||
"then": "Niet toegankelijk"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const constr = new TagRenderingConfig(def, undefined, "test");
|
||||
const uiEl = new EditableTagRendering(new UIEventSource<any>(
|
||||
{leisure: "park", "access": "no"}), constr
|
||||
);
|
||||
const rendered = uiEl.InnerRender();
|
||||
equal(true, rendered.indexOf("Niet toegankelijk") > 0)
|
||||
|
||||
}
|
||||
], [
|
||||
"Merge touching opening hours",
|
||||
() => {
|
||||
const oh1: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0,
|
||||
endHour: 11,
|
||||
endMinutes: 0
|
||||
};
|
||||
const oh0: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 11,
|
||||
startMinutes: 0,
|
||||
endHour: 12,
|
||||
endMinutes: 0
|
||||
};
|
||||
|
||||
const merged = OH.MergeTimes([oh0, oh1]);
|
||||
const r = merged[0];
|
||||
equal(merged.length, 1);
|
||||
equal(r.startHour, 10);
|
||||
equal(r.endHour, 12)
|
||||
|
||||
}
|
||||
], [
|
||||
"Merge overlapping opening hours",
|
||||
() => {
|
||||
const oh1: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0,
|
||||
endHour: 11,
|
||||
endMinutes: 0
|
||||
};
|
||||
const oh0: OpeningHour = {
|
||||
weekday: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 30,
|
||||
endHour: 12,
|
||||
endMinutes: 0
|
||||
};
|
||||
|
||||
const merged = OH.MergeTimes([oh0, oh1]);
|
||||
const r = merged[0];
|
||||
equal(merged.length, 1);
|
||||
equal(r.startHour, 10);
|
||||
equal(r.endHour, 12)
|
||||
|
||||
}],
|
||||
["Parse OH 1", () => {
|
||||
const rules = OH.ParseRule("11:00-19:00");
|
||||
equal(rules.length, 7);
|
||||
equal(rules[0].weekday, 0);
|
||||
equal(rules[0].startHour, 11);
|
||||
equal(rules[3].endHour, 19);
|
||||
|
||||
}],
|
||||
["Parse OH 2", () => {
|
||||
const rules = OH.ParseRule("Mo-Th 11:00-19:00");
|
||||
equal(rules.length, 4);
|
||||
equal(rules[0].weekday, 0);
|
||||
equal(rules[0].startHour, 11);
|
||||
equal(rules[3].endHour, 19);
|
||||
}],
|
||||
["JOIN OH 1", () => {
|
||||
const rules = OH.ToString([
|
||||
{
|
||||
weekday: 0,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
weekday: 0,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
},
|
||||
weekday: 0,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
weekday: 1,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
weekday: 1,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
|
||||
]);
|
||||
equal(rules, "Mo-Tu 10:00-12:00, 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 2", () => {
|
||||
const rules = OH.ToString([
|
||||
]);
|
||||
equal(rules, "Mo-Tu 10:00-12:00, 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 2", () => {
|
||||
const rules = OH.ToString([
|
||||
|
||||
{
|
||||
weekday: 1,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
weekday: 1,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00, 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 3", () => {
|
||||
const rules = OH.ToString([
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00, 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 3", () => {
|
||||
const rules = OH.ToString([
|
||||
|
||||
{
|
||||
weekday: 3,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
weekday: 3,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00; Th 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 3", () => {
|
||||
const rules = OH.ToString([
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00; Th 13:00-17:00");
|
||||
}],
|
||||
["JOIN OH 3", () => {
|
||||
const rules = OH.ToString([
|
||||
|
||||
{
|
||||
weekday: 6,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
{
|
||||
weekday: 6,
|
||||
endHour: 17,
|
||||
endMinutes: 0,
|
||||
startHour: 13,
|
||||
startMinutes: 0
|
||||
}, {
|
||||
weekday: 1,
|
||||
endHour: 12,
|
||||
endMinutes: 0,
|
||||
startHour: 10,
|
||||
startMinutes: 0
|
||||
},
|
||||
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00; Su 13:00-17:00");
|
||||
}],
|
||||
["OH 24/7", () => {
|
||||
const rules = OH.Parse("24/7");
|
||||
equal(rules.length, 7);
|
||||
equal(rules[0].startHour, 0);
|
||||
const asStr = OH.ToString(rules);
|
||||
equal(asStr, "24/7");
|
||||
}],
|
||||
["OH Th[-1] off", () => {
|
||||
const rules = OH.ParseRule("Th[-1] off");
|
||||
equal(rules, null);
|
||||
}],
|
||||
["OHNo parsePH 12:00-17:00", () => {
|
||||
const rules = OH.ParseRule("PH 12:00-17:00");
|
||||
equal(rules, null);
|
||||
}],
|
||||
["OH Parse PH 12:00-17:00", () => {
|
||||
const rules = PublicHolidayInput.LoadValue("PH 12:00-17:00");
|
||||
equal(rules.mode, " ");
|
||||
}],
|
||||
["Round", () => {
|
||||
equal(Utils.Round(15), "15.0")
|
||||
equal(Utils.Round(1), "1.0")
|
||||
equal(Utils.Round(1.5), "1.5")
|
||||
equal(Utils.Round(0.5), "0.5")
|
||||
equal(Utils.Round(1.6), "1.6")
|
||||
]);
|
||||
equal(rules, "Tu 10:00-12:00; Su 13:00-17:00");
|
||||
}],
|
||||
["OH 24/7", () => {
|
||||
const rules = OH.Parse("24/7");
|
||||
equal(rules.length, 7);
|
||||
equal(rules[0].startHour, 0);
|
||||
const asStr = OH.ToString(rules);
|
||||
equal(asStr, "24/7");
|
||||
}],
|
||||
["OH Th[-1] off", () => {
|
||||
const rules = OH.ParseRule("Th[-1] off");
|
||||
equal(rules, null);
|
||||
}],
|
||||
["OHNo parsePH 12:00-17:00", () => {
|
||||
const rules = OH.ParseRule("PH 12:00-17:00");
|
||||
equal(rules, null);
|
||||
}],
|
||||
["OH Parse PH 12:00-17:00", () => {
|
||||
const rules = PublicHolidayInput.LoadValue("PH 12:00-17:00");
|
||||
equal(rules.mode, " ");
|
||||
}],
|
||||
["Round", () => {
|
||||
equal(Utils.Round(15), "15.0")
|
||||
equal(Utils.Round(1), "1.0")
|
||||
equal(Utils.Round(1.5), "1.5")
|
||||
equal(Utils.Round(0.5), "0.5")
|
||||
equal(Utils.Round(1.6), "1.6")
|
||||
|
||||
equal(Utils.Round(-15), "-15.0")
|
||||
equal(Utils.Round(-1), "-1.0")
|
||||
equal(Utils.Round(-1.5), "-1.5")
|
||||
equal(Utils.Round(-0.5), "-0.5")
|
||||
equal(Utils.Round(-1.6), "-1.6")
|
||||
equal(Utils.Round(-15), "-15.0")
|
||||
equal(Utils.Round(-1), "-1.0")
|
||||
equal(Utils.Round(-1.5), "-1.5")
|
||||
equal(Utils.Round(-0.5), "-0.5")
|
||||
equal(Utils.Round(-1.6), "-1.6")
|
||||
|
||||
}
|
||||
],
|
||||
["Regression", () => {
|
||||
|
||||
const config = {
|
||||
"#": "Bottle refill",
|
||||
"question": {
|
||||
"en": "How easy is it to fill water bottles?",
|
||||
"nl": "Hoe gemakkelijk is het om drinkbussen bij te vullen?",
|
||||
"de": "Wie einfach ist es, Wasserflaschen zu füllen?"
|
||||
},
|
||||
"mappings": [
|
||||
{
|
||||
"if": "bottle=yes",
|
||||
"then": {
|
||||
"en": "It is easy to refill water bottles",
|
||||
"nl": "Een drinkbus bijvullen gaat makkelijk",
|
||||
"de": "Es ist einfach, Wasserflaschen nachzufüllen"
|
||||
}
|
||||
},
|
||||
{
|
||||
"if": "bottle=no",
|
||||
"then": {
|
||||
"en": "Water bottles may not fit",
|
||||
"nl": "Een drinkbus past moeilijk",
|
||||
"de": "Wasserflaschen passen möglicherweise nicht"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const tagRendering = new TagRenderingConfig(config, null, "test");
|
||||
equal(true, tagRendering.IsKnown({bottle: "yes"}))
|
||||
equal(false, tagRendering.IsKnown({}))
|
||||
}]]);
|
||||
}
|
||||
],
|
||||
["Regression", () => {
|
||||
|
||||
const config = {
|
||||
"#": "Bottle refill",
|
||||
"question": {
|
||||
"en": "How easy is it to fill water bottles?",
|
||||
"nl": "Hoe gemakkelijk is het om drinkbussen bij te vullen?",
|
||||
"de": "Wie einfach ist es, Wasserflaschen zu füllen?"
|
||||
},
|
||||
"mappings": [
|
||||
{
|
||||
"if": "bottle=yes",
|
||||
"then": {
|
||||
"en": "It is easy to refill water bottles",
|
||||
"nl": "Een drinkbus bijvullen gaat makkelijk",
|
||||
"de": "Es ist einfach, Wasserflaschen nachzufüllen"
|
||||
}
|
||||
},
|
||||
{
|
||||
"if": "bottle=no",
|
||||
"then": {
|
||||
"en": "Water bottles may not fit",
|
||||
"nl": "Een drinkbus past moeilijk",
|
||||
"de": "Wasserflaschen passen möglicherweise nicht"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const tagRendering = new TagRenderingConfig(config, null, "test");
|
||||
equal(true, tagRendering.IsKnown({bottle: "yes"}))
|
||||
equal(false, tagRendering.IsKnown({}))
|
||||
}]]);
|
||||
|
||||
}
|
||||
|
|
|
@ -7,53 +7,57 @@ import {UIEventSource} from "../Logic/UIEventSource";
|
|||
import TagRenderingConfig from "../Customizations/JSON/TagRenderingConfig";
|
||||
import EditableTagRendering from "../UI/Popup/EditableTagRendering";
|
||||
|
||||
|
||||
new T("TagQuestionElement",
|
||||
[
|
||||
["Freeform has textfield", () => {
|
||||
const tags = new UIEventSource({
|
||||
id: "way/123",
|
||||
amenity: 'public_bookcases'
|
||||
});
|
||||
const config = new TagRenderingConfig(
|
||||
{
|
||||
render: "The name is {name}",
|
||||
question: "What is the name of this bookcase?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
type: "string"
|
||||
}
|
||||
}, undefined, "Testing tag"
|
||||
);
|
||||
const questionElement = new TagRenderingQuestion(tags, config);
|
||||
const html = questionElement.InnerRender();
|
||||
T.assertContains("What is the name of this bookcase?", html);
|
||||
T.assertContains("<input type='text'", html);
|
||||
}],
|
||||
["TagsQuestion with Freeform and mappings has textfield", () => {
|
||||
const tags = new UIEventSource({
|
||||
id: "way/123",
|
||||
amenity: 'public_bookcases'
|
||||
});
|
||||
const config = new TagRenderingConfig(
|
||||
{
|
||||
render: "The name is {name}",
|
||||
question: "What is the name of this bookcase?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
type: "string"
|
||||
},
|
||||
mappings: [
|
||||
{"if": "noname=yes",
|
||||
"then": "This bookcase has no name"}
|
||||
]
|
||||
}, undefined, "Testing tag"
|
||||
);
|
||||
const questionElement = new TagRenderingQuestion(tags, config);
|
||||
const html = questionElement.InnerRender();
|
||||
T.assertContains("What is the name of this bookcase?", html);
|
||||
T.assertContains("This bookcase has no name", html);
|
||||
T.assertContains("<input type='text'", html);
|
||||
}]
|
||||
]
|
||||
);
|
||||
export default class TagQuestionSpec extends T {
|
||||
constructor() {
|
||||
super("TagQuestionElement",
|
||||
[
|
||||
["Freeform has textfield", () => {
|
||||
const tags = new UIEventSource({
|
||||
id: "way/123",
|
||||
amenity: 'public_bookcases'
|
||||
});
|
||||
const config = new TagRenderingConfig(
|
||||
{
|
||||
render: "The name is {name}",
|
||||
question: "What is the name of this bookcase?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
type: "string"
|
||||
}
|
||||
}, undefined, "Testing tag"
|
||||
);
|
||||
const questionElement = new TagRenderingQuestion(tags, config);
|
||||
const html = questionElement.InnerRender();
|
||||
T.assertContains("What is the name of this bookcase?", html);
|
||||
T.assertContains("<input type='text'", html);
|
||||
}],
|
||||
["TagsQuestion with Freeform and mappings has textfield", () => {
|
||||
const tags = new UIEventSource({
|
||||
id: "way/123",
|
||||
amenity: 'public_bookcases'
|
||||
});
|
||||
const config = new TagRenderingConfig(
|
||||
{
|
||||
render: "The name is {name}",
|
||||
question: "What is the name of this bookcase?",
|
||||
freeform: {
|
||||
key: "name",
|
||||
type: "string"
|
||||
},
|
||||
mappings: [
|
||||
{
|
||||
"if": "noname=yes",
|
||||
"then": "This bookcase has no name"
|
||||
}
|
||||
]
|
||||
}, undefined, "Testing tag"
|
||||
);
|
||||
const questionElement = new TagRenderingQuestion(tags, config);
|
||||
const html = questionElement.InnerRender();
|
||||
T.assertContains("What is the name of this bookcase?", html);
|
||||
T.assertContains("This bookcase has no name", html);
|
||||
T.assertContains("<input type='text'", html);
|
||||
}]
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
|
27
test/TestAll.ts
Normal file
27
test/TestAll.ts
Normal file
|
@ -0,0 +1,27 @@
|
|||
import {Utils} from "../Utils";
|
||||
|
||||
Utils.runningFromConsole = true;
|
||||
|
||||
import TagSpec from "./Tag.spec";
|
||||
import ImageAttributionSpec from "./ImageAttribution.spec";
|
||||
import GeoOperationsSpec from "./GeoOperations.spec";
|
||||
import TagQuestionSpec from "./TagQuestion.spec";
|
||||
import ImageSearcherSpec from "./ImageSearcher.spec";
|
||||
import ThemeSpec from "./Theme.spec";
|
||||
import UtilsSpec from "./Utils.spec";
|
||||
|
||||
|
||||
const allTests = [
|
||||
new TagSpec(),
|
||||
new ImageAttributionSpec(),
|
||||
new GeoOperationsSpec(),
|
||||
new TagQuestionSpec(),
|
||||
new ImageSearcherSpec(),
|
||||
new ThemeSpec(),
|
||||
new UtilsSpec()]
|
||||
|
||||
for (const test of allTests) {
|
||||
if(test.failures.length > 0){
|
||||
throw "Some test failed"
|
||||
}
|
||||
}
|
|
@ -1,33 +1,38 @@
|
|||
|
||||
export default class T {
|
||||
|
||||
constructor(testsuite: string, tests: [string, () => void ][]) {
|
||||
let failures : string []= [];
|
||||
|
||||
public readonly failures = []
|
||||
|
||||
constructor(testsuite: string, tests: [string, () => void][]) {
|
||||
for (const [name, test] of tests) {
|
||||
try {
|
||||
test();
|
||||
} catch (e) {
|
||||
failures.push(name);
|
||||
this.failures.push(name);
|
||||
console.warn("Failed test: ", name, "because", e);
|
||||
}
|
||||
}
|
||||
if (failures.length == 0) {
|
||||
if (this.failures.length == 0) {
|
||||
console.log(`All tests of ${testsuite} done!`)
|
||||
} else {
|
||||
console.warn(failures.length, `tests of ${testsuite} failed :(`)
|
||||
console.log("Failed tests: ", failures.join(","))
|
||||
console.warn(this.failures.length, `tests of ${testsuite} failed :(`)
|
||||
console.log("Failed tests: ", this.failures.join(","))
|
||||
}
|
||||
}
|
||||
|
||||
static assertContains(needle: string, actual: string){
|
||||
if(actual.indexOf(needle) < 0){
|
||||
static assertContains(needle: string, actual: string) {
|
||||
if (actual.indexOf(needle) < 0) {
|
||||
throw `The substring ${needle} was not found`
|
||||
}
|
||||
}
|
||||
|
||||
static isTrue(b: boolean, msg: string) {
|
||||
if(!b){
|
||||
throw "Expected true, but got false: "+msg
|
||||
if (!b) {
|
||||
throw "Expected true, but got false: " + msg
|
||||
}
|
||||
}
|
||||
static isFalse(b: boolean, msg: string) {
|
||||
if (b) {
|
||||
throw "Expected false, but got true: " + msg
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,40 +9,42 @@ import LayoutConfig from "../Customizations/JSON/LayoutConfig";
|
|||
import {LayoutConfigJson} from "../Customizations/JSON/LayoutConfigJson";
|
||||
import * as assert from "assert";
|
||||
|
||||
export default class ThemeSpec extends T{
|
||||
constructor() {
|
||||
super("Theme tests",
|
||||
[
|
||||
["Nested overrides work", () => {
|
||||
|
||||
new T("Theme tests",
|
||||
[
|
||||
["Nested overrides work", () => {
|
||||
|
||||
const themeConfigJson : LayoutConfigJson = {
|
||||
description: "Descr",
|
||||
icon: "",
|
||||
language: ["en"],
|
||||
layers: [
|
||||
{
|
||||
builtin: "public_bookcase",
|
||||
override: {
|
||||
source:{
|
||||
geoJson: "xyz"
|
||||
const themeConfigJson : LayoutConfigJson = {
|
||||
description: "Descr",
|
||||
icon: "",
|
||||
language: ["en"],
|
||||
layers: [
|
||||
{
|
||||
builtin: "public_bookcase",
|
||||
override: {
|
||||
source:{
|
||||
geoJson: "xyz"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
maintainer: "",
|
||||
startLat: 0,
|
||||
startLon: 0,
|
||||
startZoom: 0,
|
||||
title: {
|
||||
en: "Title"
|
||||
},
|
||||
version: "",
|
||||
id: "test"
|
||||
}
|
||||
],
|
||||
maintainer: "",
|
||||
startLat: 0,
|
||||
startLon: 0,
|
||||
startZoom: 0,
|
||||
title: {
|
||||
en: "Title"
|
||||
},
|
||||
version: "",
|
||||
id: "test"
|
||||
}
|
||||
|
||||
const themeConfig = new LayoutConfig(themeConfigJson);
|
||||
assert.equal("xyz", themeConfig.layers[0].source.geojsonSource)
|
||||
|
||||
|
||||
}]
|
||||
]
|
||||
);
|
||||
|
||||
const themeConfig = new LayoutConfig(themeConfigJson);
|
||||
assert.equal("xyz", themeConfig.layers[0].source.geojsonSource)
|
||||
|
||||
|
||||
}]
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,50 +1,84 @@
|
|||
import T from "./TestHelper";
|
||||
import {Utils} from "../Utils";
|
||||
import {equal} from "assert";
|
||||
import {existsSync, mkdirSync, readFileSync, writeFile, writeFileSync} from "fs";
|
||||
import LZString from "lz-string";
|
||||
new T("Utils",[
|
||||
["Minify-json",() => {
|
||||
const str = JSON.stringify({title: "abc", "and":"xyz", "render":"somevalue"}, null, 0);
|
||||
const minified = Utils.MinifyJSON(str);
|
||||
console.log(minified)
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(minified)
|
||||
console.log(restored)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
|
||||
}],
|
||||
["Minify-json of the bookcases",() => {
|
||||
let str = readFileSync("/home/pietervdvn/git/MapComplete/assets/layers/public_bookcases/public_bookcases.json", "UTF8")
|
||||
str = JSON.stringify(JSON.parse(str), null, 0)
|
||||
const minified = Utils.MinifyJSON(str);
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(minified)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
export default class UtilsSpec extends T {
|
||||
private static readonly example = {
|
||||
"id": "bookcases",
|
||||
"maintainer": "MapComplete",
|
||||
"version": "2020-08-29",
|
||||
"language": [
|
||||
"en",
|
||||
"nl",
|
||||
"de",
|
||||
"fr"
|
||||
],
|
||||
"title": {
|
||||
"en": "Open Bookcase Map",
|
||||
"nl": "Open Boekenruilkastenkaart",
|
||||
"de": "Öffentliche Bücherschränke Karte",
|
||||
"fr": "Carte des microbibliothèques"
|
||||
},
|
||||
"description": {
|
||||
"en": "A public bookcase is a small streetside cabinet, box, old phone boot or some other objects where books are stored. Everyone can place or take a book. This map aims to collect all these bookcases. You can discover new bookcases nearby and, with a free OpenStreetMap account, quickly add your favourite bookcases.",
|
||||
"nl": "Een boekenruilkast is een kastje waar iedereen een boek kan nemen of achterlaten. Op deze kaart kan je deze boekenruilkasten terugvinden en met een gratis OpenStreetMap-account, ook boekenruilkasten toevoegen of informatie verbeteren",
|
||||
"de": "Ein öffentlicher Bücherschrank ist ein kleiner Bücherschrank am Straßenrand, ein Kasten, eine alte Telefonzelle oder andere Gegenstände, in denen Bücher aufbewahrt werden. Jeder kann ein Buch hinstellen oder mitnehmen. Diese Karte zielt darauf ab, all diese Bücherschränke zu sammeln. Sie können neue Bücherschränke in der Nähe entdecken und mit einem kostenlosen OpenStreetMap-Account schnell Ihre Lieblingsbücherschränke hinzufügen.",
|
||||
"fr": "Une microbibliothèques, également appelée boite à livre, est un élément de mobilier urbain (étagère, armoire, etc) dans lequel sont stockés des livres et autres objets en accès libre. Découvrez les boites à livres prêt de chez vous, ou ajouter en une nouvelle à l'aide de votre compte OpenStreetMap."
|
||||
},
|
||||
"icon": "./assets/themes/bookcases/bookcase.svg",
|
||||
"socialImage": null,
|
||||
"startLat": 0,
|
||||
"startLon": 0,
|
||||
"startZoom": 1,
|
||||
"widenFactor": 0.05,
|
||||
"roamingRenderings": [],
|
||||
"layers": [
|
||||
"public_bookcase"
|
||||
]
|
||||
}
|
||||
|
||||
}],
|
||||
["Minify-json with LZ-string of the bookcases",() => {
|
||||
let str = readFileSync("/home/pietervdvn/git/MapComplete/assets/layers/public_bookcases/public_bookcases.json", "UTF8")
|
||||
str = JSON.stringify(JSON.parse(str), null, 0)
|
||||
const minified =LZString.compressToBase64(Utils.MinifyJSON(str));
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(LZString.decompressFromBase64(minified))
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
constructor() {
|
||||
super("Utils", [
|
||||
["Minify-json", () => {
|
||||
const str = JSON.stringify({title: "abc", "and": "xyz", "render": "somevalue"}, null, 0);
|
||||
const minified = Utils.MinifyJSON(str);
|
||||
console.log(minified)
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(minified)
|
||||
console.log(restored)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
|
||||
}],
|
||||
["Minify-json with only LZ-string of the bookcases",() => {
|
||||
let str = readFileSync("/home/pietervdvn/git/MapComplete/assets/layers/public_bookcases/public_bookcases.json", "UTF8")
|
||||
str = JSON.stringify(JSON.parse(str), null, 0)
|
||||
const minified =LZString.compressToBase64(str);
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = LZString.decompressFromBase64(minified)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
}],
|
||||
["Minify-json of the bookcases", () => {
|
||||
const str = JSON.stringify(UtilsSpec.example, null, 0)
|
||||
const minified = Utils.MinifyJSON(str);
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(minified)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
|
||||
}]
|
||||
|
||||
|
||||
])
|
||||
}],
|
||||
["Minify-json with LZ-string of the bookcases", () => {
|
||||
const str = JSON.stringify(UtilsSpec.example, null, 0)
|
||||
const minified = LZString.compressToBase64(Utils.MinifyJSON(str));
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = Utils.UnMinify(LZString.decompressFromBase64(minified))
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
|
||||
}],
|
||||
["Minify-json with only LZ-string of the bookcases", () => {
|
||||
const str = JSON.stringify(UtilsSpec.example, null, 0)
|
||||
const minified = LZString.compressToBase64(str);
|
||||
console.log("Minified version has ", minified.length, "chars")
|
||||
const restored = LZString.decompressFromBase64(minified)
|
||||
console.log("Restored version has ", restored.length, "chars")
|
||||
equal(str, restored)
|
||||
|
||||
}]
|
||||
]);
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue