WIP Modernize

- switch to type: module
- replace all CJS require/module.exports with ES6 import/expor
This commit is contained in:
Bryan Housel 2021-06-22 00:04:52 -04:00
parent a0bfde221e
commit 1c4fc70e3b
20 changed files with 214 additions and 181 deletions

View file

@ -1,3 +1,3 @@
export { default as matcher } from './lib/matcher.js';
export { default as simplify } from './lib/simplify.js';
export { default as stemmer } from './lib/stemmer.js';
export { Matcher } from './lib/matcher.js';
export { simplify } from './lib/simplify.js';
export { stemmer } from './lib/stemmer.js';

View file

@ -1,18 +1,21 @@
const colors = require('colors/safe');
const fs = require('fs-extra');
const glob = require('glob');
const idgen = require('./idgen.js');
const JSON5 = require('json5');
const sortObject = require('./sort_object.js');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
const withLocale = require('locale-compare')('en-US');
import colors from 'colors/safe.js';
import fs from 'fs-extra';
import glob from 'glob';
import JSON5 from 'json5';
import localeCompare from 'locale-compare';
import stringify from '@aitodotai/json-stringify-pretty-compact';
import { idgen } from './idgen.js';
import { sortObject } from './sort_object.js';
import { validate } from './validate.js';
const withLocale = localeCompare('en-US');
// metadata about the trees
const trees = require('../config/trees.json').trees;
const trees = JSON5.parse(fs.readFileSync('./config/trees.json', 'utf8')).trees;
// validate the files as we read them
const validate = require('./validate.js');
const categoriesSchema = require('../schema/categories.json');
const categoriesSchema = JSON5.parse(fs.readFileSync('./schema/categories.json', 'utf8'));
// The code in here
// - validates data on read, generating any missing data
@ -32,7 +35,9 @@ const categoriesSchema = require('../schema/categories.json');
// },
exports.read = (cache, loco) => {
export let fileTree = {
read: (cache, loco) => {
cache = cache || {};
cache.id = cache.id || new Map();
cache.path = cache.path || {};
@ -156,10 +161,10 @@ exports.read = (cache, loco) => {
});
return cache;
};
},
exports.write = (cache) => {
write: (cache) => {
cache = cache || {};
cache.path = cache.path || {};
@ -305,10 +310,10 @@ exports.write = (cache) => {
if (typeof val !== 'string') return val;
return val.trim().toLowerCase();
}
};
},
exports.expandTemplates = (cache, loco) => {
expandTemplates: (cache, loco) => {
cache = cache || {};
cache.id = cache.id || new Map();
cache.path = cache.path || {};
@ -406,4 +411,6 @@ exports.expandTemplates = (cache, loco) => {
});
return cache;
}
};

View file

@ -1,10 +1,11 @@
const crypto = require('crypto');
const simplify = require('./simplify.js');
import crypto from 'node:crypto';
import { simplify } from './simplify.js';
// We want the identifiers to be useable in url strings and other places,
// and avoid any unicode or right-to-left surprises,
// so limit them to /^\w+$/ (only [A-Za-z0-9_] characters)
module.exports = (item, tkv, locationID) => {
export function idgen(item, tkv, locationID) {
let name;
const parts = tkv.split('/', 3); // tkv = "tree/key/value"
@ -53,4 +54,4 @@ module.exports = (item, tkv, locationID) => {
} else {
return null;
}
};
}

View file

@ -1,11 +1,17 @@
const simplify = require('./simplify.js');
const matchGroups = require('../config/matchGroups.json').matchGroups;
const genericWords = require('../config/genericWords.json').genericWords;
const trees = require('../config/trees.json').trees;
const whichPolygon = require('which-polygon');
import whichPolygon from 'which-polygon';
import { simplify } from './simplify.js';
// This will not work in the browser :(
// We may be able to switch to `import`, but:
// - for node, with node with --experimental-json-modules https://stackoverflow.com/a/59758333/7620
// - for browser, both esbuild and rollup should be able to bundle a .json import
import fs from 'fs';
const matchGroups = JSON.parse(fs.readFileSync('./config/matchGroups.json', 'utf8')).matchGroups;
const genericWords = JSON.parse(fs.readFileSync('./config/genericWords.json', 'utf8')).genericWords;
const trees = JSON.parse(fs.readFileSync('./config/trees.json', 'utf8')).trees;
module.exports = () => {
export function Matcher() {
// The `_matchIndex` is a specialized structure that allows us to quickly answer
// _"Given a [key/value tagpair, name, location], what canonical items (brands etc) can match it?"_
@ -521,4 +527,4 @@ module.exports = () => {
return matcher;
};
}

View file

@ -1,8 +1,8 @@
const diacritics = require('diacritics');
import diacritics from 'diacritics';
// remove spaces, punctuation, diacritics
// for punction see https://stackoverflow.com/a/21224179
module.exports = (str) => {
export function simplify(str) {
if (typeof str !== 'string') return '';
return diacritics.remove(
@ -12,4 +12,4 @@ module.exports = (str) => {
.replace(/[\s\-=_!"#%'*{},.\/:;?\(\)\[\]@\\$\^*+<>«»~`\u00a1\u00a7\u00b6\u00b7\u00bf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3\u05f4\u0609\u060a\u060c\u060d\u061b\u061e\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964\u0965\u0970\u0af0\u0df4\u0e4f\u0e5a\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d\u166e\u16eb-\u16ed\u1735\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944\u1945\u1a1e\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e\u1c7f\u1cc0-\u1cc7\u1cd3\u2000-\u206f\u2cf9-\u2cfc\u2cfe\u2cff\u2d70\u2e00-\u2e7f\u3001-\u3003\u303d\u30fb\ua4fe\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce\ua8cf\ua8f8-\ua8fa\ua92e\ua92f\ua95f\ua9c1-\ua9cd\ua9de\ua9df\uaa5c-\uaa5f\uaade\uaadf\uaaf0\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a\ufe6b\ufeff\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e\uff0f\uff1a\uff1b\uff1f\uff20\uff3c\uff61\uff64\uff65]+/g,'')
.toLowerCase()
);
};
}

View file

@ -1,8 +1,9 @@
const withLocale = require('locale-compare')('en-US');
import localeCompare from 'locale-compare';
const withLocale = localeCompare('en-US');
// Returns an object with sorted keys and sorted values.
// (This is useful for file diffing)
module.exports = (obj) => {
export function sortObject(obj) {
if (!obj) return null;
let sorted = {};
@ -21,4 +22,4 @@ module.exports = (obj) => {
return withLocale(a, b);
}
}
};
}

View file

@ -1,8 +1,8 @@
const simplify = require('./simplify.js');
import { simplify } from './simplify.js';
// Removes noise from the name so that we can compare
// similar names for catching duplicates.
module.exports = (str) => {
export function stemmer(str) {
if (typeof str !== 'string') return '';
const noise = [
@ -17,4 +17,4 @@ module.exports = (str) => {
str = noise.reduce((acc, regex) => acc.replace(regex, ''), str);
return simplify(str);
};
}

View file

@ -1,8 +1,9 @@
const colors = require('colors/safe');
const Validator = require('jsonschema').Validator;
import colors from 'colors/safe.js';
import jsonschema from 'jsonschema';
// Perform JSON Schema validation
module.exports = (fileName, object, schema) => {
export function validate(fileName, object, schema) {
const Validator = jsonschema.Validator;
const v = new Validator();
const validationErrors = v.validate(object, schema, { nestedErrors: true }).errors;
if (validationErrors.length) {
@ -25,4 +26,4 @@ module.exports = (fileName, object, schema) => {
console.error();
process.exit(1);
}
};
}

View file

@ -1,8 +1,8 @@
const crypto = require('crypto');
const fs = require('fs');
const JSON5 = require('json5');
const packageJSON = require('../package.json');
import crypto from 'node:crypto';
import fs from 'node:fs';
import JSON5 from 'json5';
const packageJSON = JSON.parse(fs.readFileSync('./package.json', 'utf8'));
const URLRoot = 'https://raw.githubusercontent.com/osmlab/name-suggestion-index/main';
//
@ -13,7 +13,7 @@ const URLRoot = 'https://raw.githubusercontent.com/osmlab/name-suggestion-index/
// `file` = the path to the file
// `contents` = should be stringified json containing an object {}
//
module.exports = (file, contents) => {
export function writeFileWithMeta(file, contents) {
// Load the previous file
let previous = { _meta: { } };
try {
@ -39,4 +39,4 @@ module.exports = (file, contents) => {
// Stick metadata at the beginning of the file in the most hacky way possible
fs.writeFileSync(file, contents.replace(/^\{/, '{' + meta));
}
};
}

View file

@ -52,8 +52,13 @@
"transit",
"wikidata"
],
"type": "module",
"main": "dist/index.js",
"module": "index.mjs",
"module": "./index.mjs",
"exports": {
"import": "./index.mjs",
"require": "./dist/index.js"
},
"nsiguide": "docs/index.html",
"targets": {
"nsiguide": {
@ -78,7 +83,7 @@
"build": "run-s build:features build:index",
"build:features": "node scripts/build_features.js",
"build:index": "node scripts/build_index.js",
"jest": "jest --coverage",
"jest": "node --experimental-vm-modules node_modules/.bin/jest --no-cache --coverage",
"lint": "eslint scripts/*.js lib/*.js",
"test": "run-s lint build jest",
"validate": "node scripts/validate.js",
@ -125,20 +130,23 @@
"rollup": "^2.36.2",
"safe-regex": "^2.1.1",
"shelljs": "^0.8.0",
"twitter": "^1.7.1",
"whatwg-fetch": "^3.5.0",
"wikibase-edit": "^4.7.3",
"wikibase-sdk": "^7.7.1",
"xmlbuilder2": "^2.1.2"
},
"optionalDependencies": {
"twitter": "^1.7.1",
"wikibase-edit": "^4.7.3",
"wikibase-sdk": "^7.7.1"
},
"engines": {
"node": ">=10"
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},
"jest": {
"moduleFileExtensions": ["ts", "js", "json"],
"transform": {},
"verbose": true
},
"babel": {
"presets": [
"@babel/preset-react"
]
}
}
}

View file

@ -1,19 +1,21 @@
const colors = require('colors/safe');
const fs = require('fs');
const glob = require('glob');
const JSON5 = require('json5');
const path = require('path');
const geojsonArea = require('@mapbox/geojson-area');
const geojsonBounds = require('geojson-bounds');
const geojsonPrecision = require('geojson-precision');
const geojsonRewind = require('@mapbox/geojson-rewind');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
const writeFileWithMeta = require('../lib/write_file_with_meta.js');
const Validator = require('jsonschema').Validator;
import colors from 'colors/safe.js';
import fs from 'node:fs';
import geojsonArea from '@mapbox/geojson-area';
import geojsonBounds from 'geojson-bounds';
import geojsonPrecision from 'geojson-precision';
import geojsonRewind from '@mapbox/geojson-rewind';
import glob from 'glob';
import JSON5 from 'json5';
import jsonschema from 'jsonschema';
import path from 'node:path';
import stringify from '@aitodotai/json-stringify-pretty-compact';
const geojsonSchema = require('../schema/geojson.json');
const featureSchema = require('../schema/feature.json');
import { writeFileWithMeta } from '../lib/write_file_with_meta.js';
const geojsonSchema = JSON.parse(fs.readFileSync('./schema/geojson.json', 'utf8'));
const featureSchema = JSON.parse(fs.readFileSync('./schema/feature.json', 'utf8'));
const Validator = jsonschema.Validator;
let v = new Validator();
v.addSchema(geojsonSchema, 'http://json.schemastore.org/geojson.json');

View file

@ -1,24 +1,27 @@
const colors = require('colors/safe');
const fs = require('fs');
const JSON5 = require('json5');
const safeRegex = require('safe-regex');
const shell = require('shelljs');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
const withLocale = require('locale-compare')('en-US');
import colors from 'colors/safe.js';
import fs from 'node:fs';
import JSON5 from 'json5';
import localeCompare from 'locale-compare';
import LocationConflation from '@ideditor/location-conflation';
import safeRegex from 'safe-regex';
import shell from 'shelljs';
import stringify from '@aitodotai/json-stringify-pretty-compact';
const fileTree = require('../lib/file_tree.js');
const idgen = require('../lib/idgen.js');
const matcher = require('../lib/matcher.js')();
const sortObject = require('../lib/sort_object.js');
const stemmer = require('../lib/stemmer.js');
const validate = require('../lib/validate.js');
import { fileTree } from '../lib/file_tree.js';
import { idgen } from '../lib/idgen.js';
import { Matcher } from '../lib/matcher.js';
const matcher = Matcher();
import { sortObject } from '../lib/sort_object.js';
import { stemmer } from '../lib/stemmer.js';
import { validate } from '../lib/validate.js';
const withLocale = localeCompare('en-US');
// metadata about the trees
const trees = require('../config/trees.json').trees;
const trees = JSON5.parse(fs.readFileSync('./config/trees.json', 'utf8')).trees;
// We use LocationConflation for validating and processing the locationSets
const featureCollection = require('../dist/featureCollection.json');
const LocationConflation = require('@ideditor/location-conflation').default;
const featureCollection = JSON5.parse(fs.readFileSync('./dist/featureCollection.json', 'utf8'));
const loco = new LocationConflation(featureCollection);
console.log(colors.blue('-'.repeat(70)));
@ -54,7 +57,7 @@ console.log('');
//
function loadConfig() {
['trees', 'replacements', 'genericWords'].forEach(which => {
const schema = require(`../schema/${which}.json`);
const schema = JSON5.parse(fs.readFileSync(`./schema/${which}.json`, 'utf8'));
const file = `config/${which}.json`;
const contents = fs.readFileSync(file, 'utf8');
let data;

View file

@ -1,25 +1,31 @@
const colors = require('colors/safe');
const crypto = require('crypto');
const fetch = require('node-fetch');
const fileTree = require('../lib/file_tree.js');
const http = require('http');
const https = require('https');
const iso1A2Code = require('@ideditor/country-coder').iso1A2Code;
const project = require('../package.json');
const sortObject = require('../lib/sort_object.js');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
const withLocale = require('locale-compare')('en-US');
const writeFileWithMeta = require('../lib/write_file_with_meta.js');
import colors from 'colors/safe.js';
import fs from 'node:fs';
import crypto from 'node:crypto';
import fetch from 'node-fetch';
import http from 'node:http';
import https from 'node:https';
import { iso1A2Code } from '@ideditor/country-coder';
import JSON5 from 'json5';
import localeCompare from 'locale-compare';
import LocationConflation from '@ideditor/location-conflation';
import stringify from '@aitodotai/json-stringify-pretty-compact';
import Twitter from 'Twitter';
import wikibase from 'wikibase-sdk';
import wikibaseEdit from 'wikibase-edit';
// metadata about the trees
const trees = require('../config/trees.json').trees;
import { sortObject } from '../lib/sort_object.js';
import { fileTree } from '../lib/file_tree.js';
import { writeFileWithMeta } from '../lib/write_file_with_meta.js';
const withLocale = localeCompare('en-US');
const project = JSON5.parse(fs.readFileSync('./package.json', 'utf8'));
const trees = JSON5.parse(fs.readFileSync('./config/trees.json', 'utf8')).trees;
// We use LocationConflation for validating and processing the locationSets
const featureCollection = require('../dist/featureCollection.json');
const LocationConflation = require('@ideditor/location-conflation').default;
const featureCollection = JSON.parse(fs.readFileSync('./dist/featureCollection.json', 'utf8'));
const loco = new LocationConflation(featureCollection);
const wbk = require('wikibase-sdk')({
const wbk = wikibase({
instance: 'https://www.wikidata.org',
sparqlEndpoint: 'https://query.wikidata.org/sparql'
});
@ -64,7 +70,7 @@ const DRYRUN = false;
let _secrets;
try {
_secrets = require('../config/secrets.json');
_secrets = JSON5.parse(fs.readFileSync('./config/secrets.json', 'utf8'));
} catch (err) { /* ignore */ }
if (_secrets && !_secrets.twitter && !_secrets.wikibase) {
@ -78,43 +84,30 @@ if (_secrets && !_secrets.twitter && !_secrets.wikibase) {
// To fetch Twitter logos, sign up for API credentials at https://apps.twitter.com/
// and put them into `config/secrets.json`
let Twitter;
let _twitterAPIs = [];
let _twitterAPIIndex = 0;
if (_secrets && _secrets.twitter) {
try {
Twitter = require('twitter');
} catch (err) {
console.warn(colors.yellow('Looks like you don\'t have the optional Twitter package installed...'));
console.warn(colors.yellow('Try `npm install twitter` to install it.'));
}
if (Twitter) {
_twitterAPIs = _secrets.twitter.map(s => {
return new Twitter({
consumer_key: s.twitter_consumer_key,
consumer_secret: s.twitter_consumer_secret,
access_token_key: s.twitter_access_token_key,
access_token_secret: s.twitter_access_token_secret
});
_twitterAPIs = _secrets.twitter.map(s => {
return new Twitter({
consumer_key: s.twitter_consumer_key,
consumer_secret: s.twitter_consumer_secret,
access_token_key: s.twitter_access_token_key,
access_token_secret: s.twitter_access_token_secret
});
}
});
}
// To update wikidata
// add your username/password into `config/secrets.json`
let _wbEdit;
if (_secrets && _secrets.wikibase) {
try {
_wbEdit = require('wikibase-edit')({
instance: 'https://www.wikidata.org',
credentials: _secrets.wikibase,
summary: 'Updated name-suggestion-index related claims, see https://nsi.guide for project details.',
userAgent: `${project.name}/${project.version} (${project.homepage})`,
});
} catch (err) {
console.warn(colors.yellow('Looks like you don\'t have the optional wikibase-edit package installed...'));
console.warn(colors.yellow('Try `npm install wikibase-edit` to install it.'));
}
_wbEdit = wikibaseEdit({
instance: 'https://www.wikidata.org',
credentials: _secrets.wikibase,
summary: 'Updated name-suggestion-index related claims, see https://nsi.guide for project details.',
userAgent: `${project.name}/${project.version} (${project.homepage})`,
});
}
@ -531,7 +524,7 @@ function finish() {
let origWikidata;
let dissolved = {};
try {
origWikidata = require('../dist/wikidata.json').wikidata;
origWikidata = JSON5.parse(fs.readFileSync('./dist/wikidata.json', 'utf8')).wikidata;
} catch (err) {
origWikidata = {};
}

View file

@ -1,15 +1,19 @@
const clearConsole = require('clear');
const colors = require('colors/safe');
const fetch = require('node-fetch');
const fileTree = require('../lib/file_tree.js');
import clearConsole from 'clear';
import colors from 'colors/safe.js';
import fetch from 'node-fetch';
import fs from 'node:fs';
import LocationConflation from '@ideditor/location-conflation';
import wikibase from 'wikibase-sdk';
const wbk = require('wikibase-sdk')({
import { fileTree } from '../lib/file_tree.js';
const wbk = wikibase({
instance: 'https://www.wikidata.org',
sparqlEndpoint: 'https://query.wikidata.org/sparql'
});
const featureCollection = require('../dist/featureCollection.json');
const LocationConflation = require('@ideditor/location-conflation').default;
// We use LocationConflation for validating and processing the locationSets
const featureCollection = JSON.parse(fs.readFileSync('./dist/featureCollection.json', 'utf8'));
const loco = new LocationConflation(featureCollection);
let _cache = {};

View file

@ -9,12 +9,13 @@
//
// Please see README.md for more info
const colors = require('colors/safe');
const fs = require('fs');
const osmium = require('osmium');
const shell = require('shelljs');
const sortObject = require('../lib/sort_object.js');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
import colors from 'colors/safe.js';
import fs from 'node:fs';
import osmium from 'osmium';
import shell from 'shelljs';
import stringify from '@aitodotai/json-stringify-pretty-compact';
import { sortObject } from '../lib/sort_object.js';
if (process.argv.length < 3) {
console.log('');

View file

@ -1,30 +1,32 @@
const colors = require('colors/safe');
const fs = require('fs');
const glob = require('glob');
const dissolved = require('../dist/dissolved.json').dissolved;
const fileTree = require('../lib/file_tree.js');
const JSON5 = require('json5');
const packageJSON = require('../package.json');
const shell = require('shelljs');
const sortObject = require('../lib/sort_object.js');
const stringify = require('@aitodotai/json-stringify-pretty-compact');
const wikidata = require('../dist/wikidata.json').wikidata;
const withLocale = require('locale-compare')('en-US');
const writeFileWithMeta = require('../lib/write_file_with_meta.js');
const xmlbuilder2 = require('xmlbuilder2');
import colors from 'colors/safe.js';
import fs from 'node:fs';
import glob from 'glob';
import JSON5 from 'json5';
import localeCompare from 'locale-compare';
import LocationConflation from '@ideditor/location-conflation';
import shell from 'shelljs';
import stringify from '@aitodotai/json-stringify-pretty-compact';
import xmlbuilder2 from 'xmlbuilder2';
import { fileTree } from '../lib/file_tree.js';
import { sortObject } from '../lib/sort_object.js';
import { writeFileWithMeta } from '../lib/write_file_with_meta.js';
const withLocale = localeCompare('en-US');
// JSON imports
const dissolved = JSON5.parse(fs.readFileSync('./dist/dissolved.json', 'utf8')).dissolved;
const packageJSON = JSON5.parse(fs.readFileSync('./package.json', 'utf8'));
const trees = JSON5.parse(fs.readFileSync('./config/trees.json', 'utf8')).trees;
const wikidata = JSON5.parse(fs.readFileSync('./dist/wikidata.json', 'utf8')).wikidata;
// iD's presets which we will build on
const sourcePresets = require('@openstreetmap/id-tagging-schema/dist/presets.json');
// metadata about the trees
const trees = require('../config/trees.json').trees;
const sourcePresets = JSON5.parse(fs.readFileSync('./node_modules/@openstreetmap/id-tagging-schema/dist/presets.json', 'utf8'));
// We use LocationConflation for validating and processing the locationSets
const featureCollection = require('../dist/featureCollection.json');
const LocationConflation = require('@ideditor/location-conflation').default;
const featureCollection = JSON.parse(fs.readFileSync('./dist/featureCollection.json', 'utf8'));
const loco = new LocationConflation(featureCollection);
let _cache = {};
fileTree.read(_cache, loco);
fileTree.expandTemplates(_cache, loco);

View file

@ -1,6 +1,7 @@
const colors = require('colors/safe');
const fs = require('fs');
const packageJSON = require('../package.json');
import colors from 'colors/safe.js';
import fs from 'node:fs';
const packageJSON = JSON.parse(fs.readFileSync('./package.json', 'utf8'));
// YYYYMMDD
const now = new Date();

View file

@ -1,10 +1,13 @@
const Matcher = require('../lib/matcher.js');
const data = require('./matcher.data.json');
import fs from 'fs';
import { jest } from '@jest/globals';
import LocationConflation from '@ideditor/location-conflation';
import { Matcher } from '../index.mjs';
const data = JSON.parse(fs.readFileSync('./tests/matcher.data.json', 'utf8'));
// We use LocationConflation for validating and processing the locationSets
const featureCollection = require('../dist/featureCollection.json');
const LocationConflation = require('@ideditor/location-conflation').default;
const loco = new LocationConflation(featureCollection);
const featureCollection = JSON.parse(fs.readFileSync('./dist/featureCollection.json', 'utf8'));
const loco = new LocationConflation.default(featureCollection);
let _matcher;
@ -14,8 +17,8 @@ const HONGKONG = [114.19, 22.33];
describe('index building', () => {
beforeEach(() => _matcher = Matcher() );
afterEach(() => _matcher = null );
beforeEach(() => _matcher = Matcher());
afterEach(() => _matcher = null);
test('buildMatchIndex does not throw', () => {
expect(() => _matcher.buildMatchIndex(data)).not.toThrow();

View file

@ -1,4 +1,4 @@
const simplify = require('../lib/simplify.js');
import { simplify } from '../index.mjs';
describe('simplify', () => {

View file

@ -1,4 +1,4 @@
const stemmer = require('../lib/stemmer.js');
import { stemmer } from '../index.mjs';
describe('stemmer', () => {