mirror of
https://github.com/Combodo/iTop.git
synced 2026-04-23 18:48:51 +02:00
N°7063 - Forms SDK - Add Symfony forms component
error forms issue
This commit is contained in:
182
node_modules/@orchidjs/sifter/README.md
generated
vendored
Normal file
182
node_modules/@orchidjs/sifter/README.md
generated
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
# sifter.js
|
||||
[](https://travis-ci.com/github/orchidjs/sifter.js)
|
||||
[](https://coveralls.io/r/orchidjs/sifter.js)
|
||||
<a href="https://www.npmjs.com/package/@orchidjs/sifter" class="m-1 d-inline-block"><img alt="npm (scoped)" src="https://img.shields.io/npm/v/@orchidjs/sifter?color=007ec6"></a>
|
||||
|
||||
Sifter is a fast and small (<6kb) client and server-side library (coded in TypeScript and available in [CJS, UMD, and ESM](https://irian.to/blogs/what-are-cjs-amd-umd-and-esm-in-javascript/)) for textually searching arrays and hashes of objects by property – or multiple properties. It's designed specifically for autocomplete. The process is three-step: *score*, *filter*, *sort*.
|
||||
|
||||
* **Supports díåcritîçs.**<br>For example, if searching for "montana" and an item in the set has a value of "montaña", it will still be matched. Sorting will also play nicely with diacritics. (using [unicode-variants](https://github.com/orchidjs/unicode-variants))
|
||||
* **Smart scoring.**<br>Items are scored / sorted intelligently depending on where a match is found in the string (how close to the beginning) and what percentage of the string matches.
|
||||
* **Multi-field sorting.**<br>When scores aren't enough to go by – like when getting results for an empty query – it can sort by one or more fields. For example, sort by a person's first name and last name without actually merging the properties to a single string.
|
||||
* **Nested properties.**<br>Allows to search and sort on nested properties so you can perform search on complex objects without flattening them simply by using dot-notation to reference fields (ie. `nested.property`).
|
||||
* **Weighted fields.**<br>Assign weights to multi-field configurations for more control of search results
|
||||
* **Field searching**<br>Search for values in one field with "field-name:query"
|
||||
|
||||
|
||||
```sh
|
||||
$ npm install @orchidjs/sifter # node.js
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import {Sifter} from '@orchidjs/sifter';
|
||||
|
||||
var sifter = new Sifter([
|
||||
{title: 'Annapurna I', location: 'Nepal', continent: 'Asia'},
|
||||
{title: 'Annapurna II', location: 'Nepal', continent: 'Asia'},
|
||||
{title: 'Annapurna III', location: 'Nepal', continent: 'Asia'},
|
||||
{title: 'Eiger', location: 'Switzerland', continent: 'Europe'},
|
||||
{title: 'Everest', location: 'Nepal', continent: 'Asia'},
|
||||
{title: 'Gannett', location: 'Wyoming', continent: 'North America'},
|
||||
{title: 'Denali', location: 'Alaska', continent: 'North America'}
|
||||
]);
|
||||
|
||||
var result = sifter.search('anna', {
|
||||
fields: [{field:'title',weight:2}, {field:'location'}, {field:'continent',weight:0.5}],
|
||||
sort: [{field: 'title', direction: 'asc'}],
|
||||
limit: 3
|
||||
});
|
||||
```
|
||||
|
||||
Seaching will provide back meta information and an "items" array that contains objects with the index (or key, if searching a hash) and a score that represents how good of a match the item was. Items that did not match will not be returned.
|
||||
|
||||
```js
|
||||
{ score: 0.5757575757575758, id: 0 },
|
||||
{ score: 0.5555555555555555, id: 1 },
|
||||
{ score: 0.5384615384615384, id: 2 }
|
||||
```
|
||||
|
||||
Items are sorted by best-match, primarily. If two or more items have the same score (which will be the case when searching with an empty string), it will resort to the fields listed in the "sort" option.
|
||||
|
||||
The full result comes back in the format of:
|
||||
|
||||
```js
|
||||
{
|
||||
options: {
|
||||
fields: [{field:"title",weight:2},{field:"location",weight:1}, {field:"continent",weight:0.5}],
|
||||
sort: [
|
||||
{field: "title", direction: "asc"}
|
||||
],
|
||||
limit: 3
|
||||
},
|
||||
query: "anna",
|
||||
tokens: [{
|
||||
string: "anna",
|
||||
regex: /[aÀÁÂÃÄÅàáâãäå][nÑñ][nÑñ][aÀÁÂÃÄÅàáâãäå]/
|
||||
}],
|
||||
total: 3,
|
||||
items: [
|
||||
{ score: 0.5757575757575758, id: 0 },
|
||||
{ score: 0.5555555555555555, id: 1 },
|
||||
{ score: 0.5384615384615384, id: 2 }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### API
|
||||
|
||||
#### #.search(query, options)
|
||||
|
||||
Performs a search for `query` with the provided `options`.
|
||||
|
||||
<table width="100%">
|
||||
<tr>
|
||||
<th align="left">Option</th>
|
||||
<th align="left">Type</th>
|
||||
<th align="left" width="100%">Description</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>fields</code></td>
|
||||
<td valign="top">array</td>
|
||||
<td valign="top">An array of property names and optional weights to be searched.
|
||||
|
||||
```js
|
||||
fields: [
|
||||
{field:"title",weight:2},
|
||||
{field:"location",weight:1},
|
||||
{field:"continent",weight:0.5}
|
||||
],
|
||||
```
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>limit</code></td>
|
||||
<td valign="top">integer</td>
|
||||
<td valign="top">The maximum number of results to return.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>sort</code></td>
|
||||
<td valign="top">array|function</td>
|
||||
<td valign="top">
|
||||
An array of fields to sort by.
|
||||
Each item should be an object containing at least a <code>"field"</code> property. Optionally, <code>direction</code> can be set to <code>"asc"</code> or <code>"desc"</code>.
|
||||
The order of the array defines the sort precedence.
|
||||
<br/><br/>
|
||||
Unless present, a special <code>"$score"</code> property will be automatically added to the beginning of the sort list.
|
||||
This will make results sorted primarily by match quality (descending).
|
||||
<br/><br/>
|
||||
Alternatively, you can define a callback function to handle sorting. For example:
|
||||
|
||||
```js
|
||||
sort: function(a,b){
|
||||
var item_a = this.items[a.id];
|
||||
var item_b = this.items[b.id];
|
||||
return item_a.fielda.localeCompare(item_b.fielda);
|
||||
},
|
||||
```
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>sort_empty</code></td>
|
||||
<td valign="top">array</td>
|
||||
<td valign="top">Optional. Defaults to "sort" setting. If provided, these sort settings are used when no query is present.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>filter</code></td>
|
||||
<td valign="top">boolean</td>
|
||||
<td valign="top">If <code>false</code>, items with a score of zero will <em>not</em> be filtered out of the result-set.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>conjunction</code></td>
|
||||
<td valign="top">string</td>
|
||||
<td valign="top">Determines how multiple search terms are joined (<code>"and"</code> or <code>"or"</code>, defaults to <code>"or"</code>).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>nesting</code></td>
|
||||
<td valign="top">boolean</td>
|
||||
<td valign="top">If <code>true</code>, nested fields will be available for search and sort using dot-notation to reference them (e.g. <code>nested.property</code>)<br><em>Warning: can reduce performance</em></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td valign="top"><code>respect_word_boundaries</code></td>
|
||||
<td valign="top">boolean</td>
|
||||
<td valign="top">If <code>true</code>, matches only at start of word boundaries (e.g. the beginning of words, instead of matching the middle of words)</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Install the dependencies that are required to build and test:
|
||||
|
||||
```sh
|
||||
$ npm install
|
||||
```
|
||||
|
||||
Build from typescript
|
||||
```sh
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
Run tests
|
||||
```sh
|
||||
$ npm test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Copyright © 2013–2021 [Contributors](https://github.com/orchidjs/sifter.js/graphs/contributors)
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
1139
node_modules/@orchidjs/sifter/dist/cjs/sifter.js
generated
vendored
Normal file
1139
node_modules/@orchidjs/sifter/dist/cjs/sifter.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@orchidjs/sifter/dist/cjs/sifter.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/cjs/sifter.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
368
node_modules/@orchidjs/sifter/dist/esm/lib/sifter.js
generated
vendored
Normal file
368
node_modules/@orchidjs/sifter/dist/esm/lib/sifter.js
generated
vendored
Normal file
@@ -0,0 +1,368 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
import { iterate, cmp, propToArray, getAttrNesting, getAttr, scoreValue } from './utils.js';
|
||||
export { cmp, getAttr, getAttrNesting, iterate, propToArray, scoreValue } from './utils.js';
|
||||
import { getPattern } from '../node_modules/@orchidjs/unicode-variants/dist/esm/index.js';
|
||||
export { getPattern } from '../node_modules/@orchidjs/unicode-variants/dist/esm/index.js';
|
||||
import { escape_regex } from '../node_modules/@orchidjs/unicode-variants/dist/esm/regex.js';
|
||||
|
||||
/**
|
||||
* sifter.js
|
||||
* Copyright (c) 2013–2020 Brian Reavis & contributors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
|
||||
* file except in compliance with the License. You may obtain a copy of the License at:
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under
|
||||
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
* ANY KIND, either express or implied. See the License for the specific language
|
||||
* governing permissions and limitations under the License.
|
||||
*
|
||||
* @author Brian Reavis <brian@thirdroute.com>
|
||||
*/
|
||||
|
||||
class Sifter {
|
||||
// []|{};
|
||||
|
||||
/**
|
||||
* Textually searches arrays and hashes of objects
|
||||
* by property (or multiple properties). Designed
|
||||
* specifically for autocomplete.
|
||||
*
|
||||
*/
|
||||
constructor(items, settings) {
|
||||
this.items = void 0;
|
||||
this.settings = void 0;
|
||||
this.items = items;
|
||||
this.settings = settings || {
|
||||
diacritics: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a search string into an array of individual
|
||||
* regexps to be used to match results.
|
||||
*
|
||||
*/
|
||||
tokenize(query, respect_word_boundaries, weights) {
|
||||
if (!query || !query.length) return [];
|
||||
const tokens = [];
|
||||
const words = query.split(/\s+/);
|
||||
var field_regex;
|
||||
|
||||
if (weights) {
|
||||
field_regex = new RegExp('^(' + Object.keys(weights).map(escape_regex).join('|') + ')\:(.*)$');
|
||||
}
|
||||
|
||||
words.forEach(word => {
|
||||
let field_match;
|
||||
let field = null;
|
||||
let regex = null; // look for "field:query" tokens
|
||||
|
||||
if (field_regex && (field_match = word.match(field_regex))) {
|
||||
field = field_match[1];
|
||||
word = field_match[2];
|
||||
}
|
||||
|
||||
if (word.length > 0) {
|
||||
if (this.settings.diacritics) {
|
||||
regex = getPattern(word) || null;
|
||||
} else {
|
||||
regex = escape_regex(word);
|
||||
}
|
||||
|
||||
if (regex && respect_word_boundaries) regex = "\\b" + regex;
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
string: word,
|
||||
regex: regex ? new RegExp(regex, 'iu') : null,
|
||||
field: field
|
||||
});
|
||||
});
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function to be used to score individual results.
|
||||
*
|
||||
* Good matches will have a higher score than poor matches.
|
||||
* If an item is not a match, 0 will be returned by the function.
|
||||
*
|
||||
* @returns {T.ScoreFn}
|
||||
*/
|
||||
getScoreFunction(query, options) {
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getScoreFunction(search);
|
||||
}
|
||||
/**
|
||||
* @returns {T.ScoreFn}
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
_getScoreFunction(search) {
|
||||
const tokens = search.tokens,
|
||||
token_count = tokens.length;
|
||||
|
||||
if (!token_count) {
|
||||
return function () {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
const fields = search.options.fields,
|
||||
weights = search.weights,
|
||||
field_count = fields.length,
|
||||
getAttrFn = search.getAttrFn;
|
||||
|
||||
if (!field_count) {
|
||||
return function () {
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Calculates the score of an object
|
||||
* against the search query.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
const scoreObject = function () {
|
||||
if (field_count === 1) {
|
||||
return function (token, data) {
|
||||
const field = fields[0].field;
|
||||
return scoreValue(getAttrFn(data, field), token, weights[field] || 1);
|
||||
};
|
||||
}
|
||||
|
||||
return function (token, data) {
|
||||
var sum = 0; // is the token specific to a field?
|
||||
|
||||
if (token.field) {
|
||||
const value = getAttrFn(data, token.field);
|
||||
|
||||
if (!token.regex && value) {
|
||||
sum += 1 / field_count;
|
||||
} else {
|
||||
sum += scoreValue(value, token, 1);
|
||||
}
|
||||
} else {
|
||||
iterate(weights, (weight, field) => {
|
||||
sum += scoreValue(getAttrFn(data, field), token, weight);
|
||||
});
|
||||
}
|
||||
|
||||
return sum / field_count;
|
||||
};
|
||||
}();
|
||||
|
||||
if (token_count === 1) {
|
||||
return function (data) {
|
||||
return scoreObject(tokens[0], data);
|
||||
};
|
||||
}
|
||||
|
||||
if (search.options.conjunction === 'and') {
|
||||
return function (data) {
|
||||
var score,
|
||||
sum = 0;
|
||||
|
||||
for (let token of tokens) {
|
||||
score = scoreObject(token, data);
|
||||
if (score <= 0) return 0;
|
||||
sum += score;
|
||||
}
|
||||
|
||||
return sum / token_count;
|
||||
};
|
||||
} else {
|
||||
return function (data) {
|
||||
var sum = 0;
|
||||
iterate(tokens, token => {
|
||||
sum += scoreObject(token, data);
|
||||
});
|
||||
return sum / token_count;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function that can be used to compare two
|
||||
* results, for sorting purposes. If no sorting should
|
||||
* be performed, `null` will be returned.
|
||||
*
|
||||
* @return function(a,b)
|
||||
*/
|
||||
getSortFunction(query, options) {
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getSortFunction(search);
|
||||
}
|
||||
|
||||
_getSortFunction(search) {
|
||||
var implicit_score,
|
||||
sort_flds = [];
|
||||
const self = this,
|
||||
options = search.options,
|
||||
sort = !search.query && options.sort_empty ? options.sort_empty : options.sort;
|
||||
|
||||
if (typeof sort == 'function') {
|
||||
return sort.bind(this);
|
||||
}
|
||||
/**
|
||||
* Fetches the specified sort field value
|
||||
* from a search result item.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
const get_field = function get_field(name, result) {
|
||||
if (name === '$score') return result.score;
|
||||
return search.getAttrFn(self.items[result.id], name);
|
||||
}; // parse options
|
||||
|
||||
|
||||
if (sort) {
|
||||
for (let s of sort) {
|
||||
if (search.query || s.field !== '$score') {
|
||||
sort_flds.push(s);
|
||||
}
|
||||
}
|
||||
} // the "$score" field is implied to be the primary
|
||||
// sort field, unless it's manually specified
|
||||
|
||||
|
||||
if (search.query) {
|
||||
implicit_score = true;
|
||||
|
||||
for (let fld of sort_flds) {
|
||||
if (fld.field === '$score') {
|
||||
implicit_score = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (implicit_score) {
|
||||
sort_flds.unshift({
|
||||
field: '$score',
|
||||
direction: 'desc'
|
||||
});
|
||||
} // without a search.query, all items will have the same score
|
||||
|
||||
} else {
|
||||
sort_flds = sort_flds.filter(fld => fld.field !== '$score');
|
||||
} // build function
|
||||
|
||||
|
||||
const sort_flds_count = sort_flds.length;
|
||||
|
||||
if (!sort_flds_count) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return function (a, b) {
|
||||
var result, field;
|
||||
|
||||
for (let sort_fld of sort_flds) {
|
||||
field = sort_fld.field;
|
||||
let multiplier = sort_fld.direction === 'desc' ? -1 : 1;
|
||||
result = multiplier * cmp(get_field(field, a), get_field(field, b));
|
||||
if (result) return result;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a search query and returns an object
|
||||
* with tokens and fields ready to be populated
|
||||
* with results.
|
||||
*
|
||||
*/
|
||||
prepareSearch(query, optsUser) {
|
||||
const weights = {};
|
||||
var options = Object.assign({}, optsUser);
|
||||
propToArray(options, 'sort');
|
||||
propToArray(options, 'sort_empty'); // convert fields to new format
|
||||
|
||||
if (options.fields) {
|
||||
propToArray(options, 'fields');
|
||||
const fields = [];
|
||||
options.fields.forEach(field => {
|
||||
if (typeof field == 'string') {
|
||||
field = {
|
||||
field: field,
|
||||
weight: 1
|
||||
};
|
||||
}
|
||||
|
||||
fields.push(field);
|
||||
weights[field.field] = 'weight' in field ? field.weight : 1;
|
||||
});
|
||||
options.fields = fields;
|
||||
}
|
||||
|
||||
return {
|
||||
options: options,
|
||||
query: query.toLowerCase().trim(),
|
||||
tokens: this.tokenize(query, options.respect_word_boundaries, weights),
|
||||
total: 0,
|
||||
items: [],
|
||||
weights: weights,
|
||||
getAttrFn: options.nesting ? getAttrNesting : getAttr
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches through all items and returns a sorted array of matches.
|
||||
*
|
||||
*/
|
||||
search(query, options) {
|
||||
var self = this,
|
||||
score,
|
||||
search;
|
||||
search = this.prepareSearch(query, options);
|
||||
options = search.options;
|
||||
query = search.query; // generate result scoring function
|
||||
|
||||
const fn_score = options.score || self._getScoreFunction(search); // perform search and sort
|
||||
|
||||
|
||||
if (query.length) {
|
||||
iterate(self.items, (item, id) => {
|
||||
score = fn_score(item);
|
||||
|
||||
if (options.filter === false || score > 0) {
|
||||
search.items.push({
|
||||
'score': score,
|
||||
'id': id
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
iterate(self.items, (_, id) => {
|
||||
search.items.push({
|
||||
'score': 1,
|
||||
'id': id
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const fn_sort = self._getSortFunction(search);
|
||||
|
||||
if (fn_sort) search.items.sort(fn_sort); // apply limits
|
||||
|
||||
search.total = search.items.length;
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
search.items = search.items.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return search;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export { Sifter };
|
||||
//# sourceMappingURL=sifter.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/lib/sifter.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/lib/sifter.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
95
node_modules/@orchidjs/sifter/dist/esm/lib/utils.js
generated
vendored
Normal file
95
node_modules/@orchidjs/sifter/dist/esm/lib/utils.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
import { asciifold } from '../node_modules/@orchidjs/unicode-variants/dist/esm/index.js';
|
||||
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
const getAttr = (obj, name) => {
|
||||
if (!obj) return;
|
||||
return obj[name];
|
||||
};
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
|
||||
const getAttrNesting = (obj, name) => {
|
||||
if (!obj) return;
|
||||
var part,
|
||||
names = name.split(".");
|
||||
|
||||
while ((part = names.shift()) && (obj = obj[part]));
|
||||
|
||||
return obj;
|
||||
};
|
||||
/**
|
||||
* Calculates how close of a match the
|
||||
* given value is against a search token.
|
||||
*
|
||||
*/
|
||||
|
||||
const scoreValue = (value, token, weight) => {
|
||||
var score, pos;
|
||||
if (!value) return 0;
|
||||
value = value + '';
|
||||
if (token.regex == null) return 0;
|
||||
pos = value.search(token.regex);
|
||||
if (pos === -1) return 0;
|
||||
score = token.string.length / value.length;
|
||||
if (pos === 0) score += 0.5;
|
||||
return score * weight;
|
||||
};
|
||||
/**
|
||||
* Cast object property to an array if it exists and has a value
|
||||
*
|
||||
*/
|
||||
|
||||
const propToArray = (obj, key) => {
|
||||
var value = obj[key];
|
||||
if (typeof value == 'function') return value;
|
||||
|
||||
if (value && !Array.isArray(value)) {
|
||||
obj[key] = [value];
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Iterates over arrays and hashes.
|
||||
*
|
||||
* ```
|
||||
* iterate(this.items, function(item, id) {
|
||||
* // invoked for each item
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
|
||||
const iterate = (object, callback) => {
|
||||
if (Array.isArray(object)) {
|
||||
object.forEach(callback);
|
||||
} else {
|
||||
for (var key in object) {
|
||||
if (object.hasOwnProperty(key)) {
|
||||
callback(object[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
const cmp = (a, b) => {
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a > b ? 1 : a < b ? -1 : 0;
|
||||
}
|
||||
|
||||
a = asciifold(a + '').toLowerCase();
|
||||
b = asciifold(b + '').toLowerCase();
|
||||
if (a > b) return 1;
|
||||
if (b > a) return -1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
export { cmp, getAttr, getAttrNesting, iterate, propToArray, scoreValue };
|
||||
//# sourceMappingURL=utils.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/lib/utils.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/lib/utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"utils.js","sources":["../../../lib/utils.ts"],"sourcesContent":["\nimport { asciifold } from '@orchidjs/unicode-variants';\nimport * as T from './types';\n\n\n/**\n * A property getter resolving dot-notation\n * @param {Object} obj The root object to fetch property on\n * @param {String} name The optionally dotted property name to fetch\n * @return {Object} The resolved property value\n */\nexport const getAttr = (obj:{[key:string]:any}, name:string ) => {\n if (!obj ) return;\n return obj[name];\n};\n\n/**\n * A property getter resolving dot-notation\n * @param {Object} obj The root object to fetch property on\n * @param {String} name The optionally dotted property name to fetch\n * @return {Object} The resolved property value\n */\nexport const getAttrNesting = (obj:{[key:string]:any}, name:string ) => {\n if (!obj ) return;\n var part, names = name.split(\".\");\n\twhile( (part = names.shift()) && (obj = obj[part]));\n return obj;\n};\n\n/**\n * Calculates how close of a match the\n * given value is against a search token.\n *\n */\nexport const scoreValue = (value:string, token:T.Token, weight:number ):number => {\n\tvar score, pos;\n\n\tif (!value) return 0;\n\n\tvalue = value + '';\n\tif( token.regex == null ) return 0;\n\tpos = value.search(token.regex);\n\tif (pos === -1) return 0;\n\n\tscore = token.string.length / value.length;\n\tif (pos === 0) score += 0.5;\n\n\treturn score * weight;\n};\n\n\n/**\n * Cast object property to an array if it exists and has a value\n *\n */\nexport const propToArray = (obj:{[key:string]:any}, key:string) => {\n\tvar value = obj[key];\n\n\tif( typeof value == 'function' ) return value;\n\n\tif( value && !Array.isArray(value) ){\n\t\tobj[key] = [value];\n\t}\n}\n\n\n/**\n * Iterates over arrays and hashes.\n *\n * ```\n * iterate(this.items, function(item, id) {\n * // invoked for each item\n * });\n * ```\n *\n */\nexport const iterate = (object:[]|{[key:string]:any}, callback:(value:any,key:any)=>any) => {\n\n\tif ( Array.isArray(object)) {\n\t\tobject.forEach(callback);\n\n\t}else{\n\n\t\tfor (var key in object) {\n\t\t\tif (object.hasOwnProperty(key)) {\n\t\t\t\tcallback(object[key], key);\n\t\t\t}\n\t\t}\n\t}\n};\n\n\n\nexport const cmp = (a:number|string, b:number|string) => {\n\tif (typeof a === 'number' && typeof b === 'number') {\n\t\treturn a > b ? 1 : (a < b ? -1 : 0);\n\t}\n\ta = asciifold(a + '').toLowerCase();\n\tb = asciifold(b + '').toLowerCase();\n\tif (a > b) return 1;\n\tif (b > a) return -1;\n\treturn 0;\n};\n"],"names":["getAttr","obj","name","getAttrNesting","part","names","split","shift","scoreValue","value","token","weight","score","pos","regex","search","string","length","propToArray","key","Array","isArray","iterate","object","callback","forEach","hasOwnProperty","cmp","a","b","asciifold","toLowerCase"],"mappings":";;;AAKA;AACA;AACA;AACA;AACA;AACA;MACaA,OAAO,GAAG,CAACC,GAAD,EAAyBC,IAAzB,KAA0C;AAC7D,MAAI,CAACD,GAAL,EAAW;AACX,SAAOA,GAAG,CAACC,IAAD,CAAV;AACH;AAED;AACA;AACA;AACA;AACA;AACA;;MACaC,cAAc,GAAG,CAACF,GAAD,EAAyBC,IAAzB,KAA0C;AACpE,MAAI,CAACD,GAAL,EAAW;AACX,MAAIG,IAAJ;AAAA,MAAUC,KAAK,GAAGH,IAAI,CAACI,KAAL,CAAW,GAAX,CAAlB;;AACH,SAAO,CAACF,IAAI,GAAGC,KAAK,CAACE,KAAN,EAAR,MAA2BN,GAAG,GAAGA,GAAG,CAACG,IAAD,CAApC,CAAP,CAAmD;;AAChD,SAAOH,GAAP;AACH;AAED;AACA;AACA;AACA;AACA;;MACaO,UAAU,GAAG,CAACC,KAAD,EAAeC,KAAf,EAA8BC,MAA9B,KAAwD;AACjF,MAAIC,KAAJ,EAAWC,GAAX;AAEA,MAAI,CAACJ,KAAL,EAAY,OAAO,CAAP;AAEZA,EAAAA,KAAK,GAAGA,KAAK,GAAG,EAAhB;AACA,MAAIC,KAAK,CAACI,KAAN,IAAe,IAAnB,EAA0B,OAAO,CAAP;AAC1BD,EAAAA,GAAG,GAAGJ,KAAK,CAACM,MAAN,CAAaL,KAAK,CAACI,KAAnB,CAAN;AACA,MAAID,GAAG,KAAK,CAAC,CAAb,EAAgB,OAAO,CAAP;AAEhBD,EAAAA,KAAK,GAAGF,KAAK,CAACM,MAAN,CAAaC,MAAb,GAAsBR,KAAK,CAACQ,MAApC;AACA,MAAIJ,GAAG,KAAK,CAAZ,EAAeD,KAAK,IAAI,GAAT;AAEf,SAAOA,KAAK,GAAGD,MAAf;AACA;AAGD;AACA;AACA;AACA;;MACaO,WAAW,GAAG,CAACjB,GAAD,EAAyBkB,GAAzB,KAAwC;AAClE,MAAIV,KAAK,GAAGR,GAAG,CAACkB,GAAD,CAAf;AAEA,MAAI,OAAOV,KAAP,IAAgB,UAApB,EAAiC,OAAOA,KAAP;;AAEjC,MAAIA,KAAK,IAAI,CAACW,KAAK,CAACC,OAAN,CAAcZ,KAAd,CAAd,EAAoC;AACnCR,IAAAA,GAAG,CAACkB,GAAD,CAAH,GAAW,CAACV,KAAD,CAAX;AACA;AACD;AAGD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;MACaa,OAAO,GAAG,CAACC,MAAD,EAA+BC,QAA/B,KAAqE;AAE3F,MAAKJ,KAAK,CAACC,OAAN,CAAcE,MAAd,CAAL,EAA4B;AAC3BA,IAAAA,MAAM,CAACE,OAAP,CAAeD,QAAf;AAEA,GAHD,MAGK;AAEJ,SAAK,IAAIL,GAAT,IAAgBI,MAAhB,EAAwB;AACvB,UAAIA,MAAM,CAACG,cAAP,CAAsBP,GAAtB,CAAJ,EAAgC;AAC/BK,QAAAA,QAAQ,CAACD,MAAM,CAACJ,GAAD,CAAP,EAAcA,GAAd,CAAR;AACA;AACD;AACD;AACD;MAIYQ,GAAG,GAAG,CAACC,CAAD,EAAkBC,CAAlB,KAAsC;AACxD,MAAI,OAAOD,CAAP,KAAa,QAAb,IAAyB,OAAOC,CAAP,KAAa,QAA1C,EAAoD;AACnD,WAAOD,CAAC,GAAGC,CAAJ,GAAQ,CAAR,GAAaD,CAAC,GAAGC,CAAJ,GAAQ,CAAC,CAAT,GAAa,CAAjC;AACA;;AACDD,EAAAA,CAAC,GAAGE,SAAS,CAACF,CAAC,GAAG,EAAL,CAAT,CAAkBG,WAAlB,EAAJ;AACAF,EAAAA,CAAC,GAAGC,SAAS,CAACD,CAAC,GAAG,EAAL,CAAT,CAAkBE,WAAlB,EAAJ;AACA,MAAIH,CAAC,GAAGC,CAAR,EAAW,OAAO,CAAP;AACX,MAAIA,CAAC,GAAGD,CAAR,EAAW,OAAO,CAAC,CAAR;AACX,SAAO,CAAP;AACA;;;;"}
|
||||
547
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/index.js
generated
vendored
Normal file
547
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,547 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
import { toArray, arrayToPattern, sequencePattern, setToPattern, escape_regex } from './regex.js';
|
||||
export { escape_regex } from './regex.js';
|
||||
import { allSubstrings } from './strings.js';
|
||||
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
/**
|
||||
* @typedef {{[key:string]:string}} TUnicodeMap
|
||||
* @typedef {{[key:string]:Set<string>}} TUnicodeSets
|
||||
* @typedef {[[number,number]]} TCodePoints
|
||||
* @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj
|
||||
* @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart
|
||||
*/
|
||||
|
||||
/** @type {TCodePoints} */
|
||||
|
||||
const code_points = [[0, 65535]];
|
||||
const accent_pat = '[\u0300-\u036F\u{b7}\u{2be}\u{2bc}]';
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
let unicode_map;
|
||||
/** @type {RegExp} */
|
||||
|
||||
let multi_char_reg;
|
||||
const max_char_length = 3;
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_convert = {};
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_condensed = {
|
||||
'/': '⁄∕',
|
||||
'0': '߀',
|
||||
"a": "ⱥɐɑ",
|
||||
"aa": "ꜳ",
|
||||
"ae": "æǽǣ",
|
||||
"ao": "ꜵ",
|
||||
"au": "ꜷ",
|
||||
"av": "ꜹꜻ",
|
||||
"ay": "ꜽ",
|
||||
"b": "ƀɓƃ",
|
||||
"c": "ꜿƈȼↄ",
|
||||
"d": "đɗɖᴅƌꮷԁɦ",
|
||||
"e": "ɛǝᴇɇ",
|
||||
"f": "ꝼƒ",
|
||||
"g": "ǥɠꞡᵹꝿɢ",
|
||||
"h": "ħⱨⱶɥ",
|
||||
"i": "ɨı",
|
||||
"j": "ɉȷ",
|
||||
"k": "ƙⱪꝁꝃꝅꞣ",
|
||||
"l": "łƚɫⱡꝉꝇꞁɭ",
|
||||
"m": "ɱɯϻ",
|
||||
"n": "ꞥƞɲꞑᴎлԉ",
|
||||
"o": "øǿɔɵꝋꝍᴑ",
|
||||
"oe": "œ",
|
||||
"oi": "ƣ",
|
||||
"oo": "ꝏ",
|
||||
"ou": "ȣ",
|
||||
"p": "ƥᵽꝑꝓꝕρ",
|
||||
"q": "ꝗꝙɋ",
|
||||
"r": "ɍɽꝛꞧꞃ",
|
||||
"s": "ßȿꞩꞅʂ",
|
||||
"t": "ŧƭʈⱦꞇ",
|
||||
"th": "þ",
|
||||
"tz": "ꜩ",
|
||||
"u": "ʉ",
|
||||
"v": "ʋꝟʌ",
|
||||
"vy": "ꝡ",
|
||||
"w": "ⱳ",
|
||||
"y": "ƴɏỿ",
|
||||
"z": "ƶȥɀⱬꝣ",
|
||||
"hv": "ƕ"
|
||||
};
|
||||
|
||||
for (let latin in latin_condensed) {
|
||||
let unicode = latin_condensed[latin] || '';
|
||||
|
||||
for (let i = 0; i < unicode.length; i++) {
|
||||
let char = unicode.substring(i, i + 1);
|
||||
latin_convert[char] = latin;
|
||||
}
|
||||
}
|
||||
|
||||
const convert_pat = new RegExp(Object.keys(latin_convert).join('|') + '|' + accent_pat, 'gu');
|
||||
/**
|
||||
* Initialize the unicode_map from the give code point ranges
|
||||
*
|
||||
* @param {TCodePoints=} _code_points
|
||||
*/
|
||||
|
||||
const initialize = _code_points => {
|
||||
if (unicode_map !== undefined) return;
|
||||
unicode_map = generateMap(_code_points || code_points);
|
||||
};
|
||||
/**
|
||||
* Helper method for normalize a string
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize
|
||||
* @param {string} str
|
||||
* @param {string} form
|
||||
*/
|
||||
|
||||
|
||||
const normalize = (str, form = 'NFKD') => str.normalize(form);
|
||||
/**
|
||||
* Remove accents without reordering string
|
||||
* calling str.normalize('NFKD') on \u{594}\u{595}\u{596} becomes \u{596}\u{594}\u{595}
|
||||
* via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const asciifold = str => {
|
||||
return toArray(str).reduce(
|
||||
/**
|
||||
* @param {string} result
|
||||
* @param {string} char
|
||||
*/
|
||||
(result, char) => {
|
||||
return result + _asciifold(char);
|
||||
}, '');
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const _asciifold = str => {
|
||||
str = normalize(str).toLowerCase().replace(convert_pat,
|
||||
/** @type {string} */
|
||||
char => {
|
||||
return latin_convert[char] || '';
|
||||
}); //return str;
|
||||
|
||||
return normalize(str, 'NFC');
|
||||
};
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
|
||||
|
||||
function* generator(code_points) {
|
||||
for (const [code_point_min, code_point_max] of code_points) {
|
||||
for (let i = code_point_min; i <= code_point_max; i++) {
|
||||
let composed = String.fromCharCode(i);
|
||||
let folded = asciifold(composed);
|
||||
|
||||
if (folded == composed.toLowerCase()) {
|
||||
continue;
|
||||
} // skip when folded is a string longer than 3 characters long
|
||||
// bc the resulting regex patterns will be long
|
||||
// eg:
|
||||
// folded صلى الله عليه وسلم length 18 code point 65018
|
||||
// folded جل جلاله length 8 code point 65019
|
||||
|
||||
|
||||
if (folded.length > max_char_length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (folded.length == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
yield {
|
||||
folded: folded,
|
||||
composed: composed,
|
||||
code_point: i
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeSets}
|
||||
*/
|
||||
|
||||
|
||||
const generateSets = code_points => {
|
||||
/** @type {{[key:string]:Set<string>}} */
|
||||
const unicode_sets = {};
|
||||
/**
|
||||
* @param {string} folded
|
||||
* @param {string} to_add
|
||||
*/
|
||||
|
||||
const addMatching = (folded, to_add) => {
|
||||
/** @type {Set<string>} */
|
||||
const folded_set = unicode_sets[folded] || new Set();
|
||||
const patt = new RegExp('^' + setToPattern(folded_set) + '$', 'iu');
|
||||
|
||||
if (to_add.match(patt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
folded_set.add(escape_regex(to_add));
|
||||
unicode_sets[folded] = folded_set;
|
||||
};
|
||||
|
||||
for (let value of generator(code_points)) {
|
||||
addMatching(value.folded, value.folded);
|
||||
addMatching(value.folded, value.composed);
|
||||
}
|
||||
|
||||
return unicode_sets;
|
||||
};
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))
|
||||
*
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeMap}
|
||||
*/
|
||||
|
||||
|
||||
const generateMap = code_points => {
|
||||
/** @type {TUnicodeSets} */
|
||||
const unicode_sets = generateSets(code_points);
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const unicode_map = {};
|
||||
/** @type {string[]} */
|
||||
|
||||
let multi_char = [];
|
||||
|
||||
for (let folded in unicode_sets) {
|
||||
let set = unicode_sets[folded];
|
||||
|
||||
if (set) {
|
||||
unicode_map[folded] = setToPattern(set);
|
||||
}
|
||||
|
||||
if (folded.length > 1) {
|
||||
multi_char.push(escape_regex(folded));
|
||||
}
|
||||
}
|
||||
|
||||
multi_char.sort((a, b) => b.length - a.length);
|
||||
const multi_char_patt = arrayToPattern(multi_char);
|
||||
multi_char_reg = new RegExp('^' + multi_char_patt, 'u');
|
||||
return unicode_map;
|
||||
};
|
||||
/**
|
||||
* Map each element of an array from it's folded value to all possible unicode matches
|
||||
* @param {string[]} strings
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const mapSequence = (strings, min_replacement = 1) => {
|
||||
let chars_replaced = 0;
|
||||
strings = strings.map(str => {
|
||||
if (unicode_map[str]) {
|
||||
chars_replaced += str.length;
|
||||
}
|
||||
|
||||
return unicode_map[str] || str;
|
||||
});
|
||||
|
||||
if (chars_replaced >= min_replacement) {
|
||||
return sequencePattern(strings);
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
/**
|
||||
* Convert a short string and split it into all possible patterns
|
||||
* Keep a pattern only if min_replacement is met
|
||||
*
|
||||
* 'abc'
|
||||
* => [['abc'],['ab','c'],['a','bc'],['a','b','c']]
|
||||
* => ['abc-pattern','ab-c-pattern'...]
|
||||
*
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const substringsToPattern = (str, min_replacement = 1) => {
|
||||
min_replacement = Math.max(min_replacement, str.length - 1);
|
||||
return arrayToPattern(allSubstrings(str).map(sub_pat => {
|
||||
return mapSequence(sub_pat, min_replacement);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Convert an array of sequences into a pattern
|
||||
* [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)
|
||||
*
|
||||
* @param {Sequence[]} sequences
|
||||
* @param {boolean} all
|
||||
*/
|
||||
|
||||
|
||||
const sequencesToPattern = (sequences, all = true) => {
|
||||
let min_replacement = sequences.length > 1 ? 1 : 0;
|
||||
return arrayToPattern(sequences.map(sequence => {
|
||||
let seq = [];
|
||||
const len = all ? sequence.length() : sequence.length() - 1;
|
||||
|
||||
for (let j = 0; j < len; j++) {
|
||||
seq.push(substringsToPattern(sequence.substrs[j] || '', min_replacement));
|
||||
}
|
||||
|
||||
return sequencePattern(seq);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Return true if the sequence is already in the sequences
|
||||
* @param {Sequence} needle_seq
|
||||
* @param {Sequence[]} sequences
|
||||
*/
|
||||
|
||||
|
||||
const inSequences = (needle_seq, sequences) => {
|
||||
for (const seq of sequences) {
|
||||
if (seq.start != needle_seq.start || seq.end != needle_seq.end) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (seq.substrs.join('') !== needle_seq.substrs.join('')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let needle_parts = needle_seq.parts;
|
||||
/**
|
||||
* @param {TSequencePart} part
|
||||
*/
|
||||
|
||||
const filter = part => {
|
||||
for (const needle_part of needle_parts) {
|
||||
if (needle_part.start === part.start && needle_part.substr === part.substr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (part.length == 1 || needle_part.length == 1) {
|
||||
continue;
|
||||
} // check for overlapping parts
|
||||
// a = ['::=','==']
|
||||
// b = ['::','===']
|
||||
// a = ['r','sm']
|
||||
// b = ['rs','m']
|
||||
|
||||
|
||||
if (part.start < needle_part.start && part.end > needle_part.start) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (needle_part.start < part.start && needle_part.end > part.start) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
let filtered = seq.parts.filter(filter);
|
||||
|
||||
if (filtered.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
class Sequence {
|
||||
constructor() {
|
||||
/** @type {TSequencePart[]} */
|
||||
this.parts = [];
|
||||
/** @type {string[]} */
|
||||
|
||||
this.substrs = [];
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
}
|
||||
/**
|
||||
* @param {TSequencePart|undefined} part
|
||||
*/
|
||||
|
||||
|
||||
add(part) {
|
||||
if (part) {
|
||||
this.parts.push(part);
|
||||
this.substrs.push(part.substr);
|
||||
this.start = Math.min(part.start, this.start);
|
||||
this.end = Math.max(part.end, this.end);
|
||||
}
|
||||
}
|
||||
|
||||
last() {
|
||||
return this.parts[this.parts.length - 1];
|
||||
}
|
||||
|
||||
length() {
|
||||
return this.parts.length;
|
||||
}
|
||||
/**
|
||||
* @param {number} position
|
||||
* @param {TSequencePart} last_piece
|
||||
*/
|
||||
|
||||
|
||||
clone(position, last_piece) {
|
||||
let clone = new Sequence();
|
||||
let parts = JSON.parse(JSON.stringify(this.parts));
|
||||
let last_part = parts.pop();
|
||||
|
||||
for (const part of parts) {
|
||||
clone.add(part);
|
||||
}
|
||||
|
||||
let last_substr = last_piece.substr.substring(0, position - last_part.start);
|
||||
let clone_last_len = last_substr.length;
|
||||
clone.add({
|
||||
start: last_part.start,
|
||||
end: last_part.start + clone_last_len,
|
||||
length: clone_last_len,
|
||||
substr: last_substr
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* Expand a regular expression pattern to include unicode variants
|
||||
* eg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/
|
||||
*
|
||||
* Issue:
|
||||
* ﺊﺋ [ 'ﺊ = \\u{fe8a}', 'ﺋ = \\u{fe8b}' ]
|
||||
* becomes: ئئ [ 'ي = \\u{64a}', 'ٔ = \\u{654}', 'ي = \\u{64a}', 'ٔ = \\u{654}' ]
|
||||
*
|
||||
* İIJ = IIJ = ⅡJ
|
||||
*
|
||||
* 1/2/4
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {string|undefined}
|
||||
*/
|
||||
|
||||
|
||||
const getPattern = str => {
|
||||
initialize();
|
||||
str = asciifold(str);
|
||||
let pattern = '';
|
||||
let sequences = [new Sequence()];
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
let substr = str.substring(i);
|
||||
let match = substr.match(multi_char_reg);
|
||||
const char = str.substring(i, i + 1);
|
||||
const match_str = match ? match[0] : null; // loop through sequences
|
||||
// add either the char or multi_match
|
||||
|
||||
let overlapping = [];
|
||||
let added_types = new Set();
|
||||
|
||||
for (const sequence of sequences) {
|
||||
const last_piece = sequence.last();
|
||||
|
||||
if (!last_piece || last_piece.length == 1 || last_piece.end <= i) {
|
||||
// if we have a multi match
|
||||
if (match_str) {
|
||||
const len = match_str.length;
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
added_types.add('1');
|
||||
} else {
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + 1,
|
||||
length: 1,
|
||||
substr: char
|
||||
});
|
||||
added_types.add('2');
|
||||
}
|
||||
} else if (match_str) {
|
||||
let clone = sequence.clone(i, last_piece);
|
||||
const len = match_str.length;
|
||||
clone.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
overlapping.push(clone);
|
||||
} else {
|
||||
// don't add char
|
||||
// adding would create invalid patterns: 234 => [2,34,4]
|
||||
added_types.add('3');
|
||||
}
|
||||
} // if we have overlapping
|
||||
|
||||
|
||||
if (overlapping.length > 0) {
|
||||
// ['ii','iii'] before ['i','i','iii']
|
||||
overlapping = overlapping.sort((a, b) => {
|
||||
return a.length() - b.length();
|
||||
});
|
||||
|
||||
for (let clone of overlapping) {
|
||||
// don't add if we already have an equivalent sequence
|
||||
if (inSequences(clone, sequences)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
sequences.push(clone);
|
||||
}
|
||||
|
||||
continue;
|
||||
} // if we haven't done anything unique
|
||||
// clean up the patterns
|
||||
// helps keep patterns smaller
|
||||
// if str = 'r₨㎧aarss', pattern will be 446 instead of 655
|
||||
|
||||
|
||||
if (i > 0 && added_types.size == 1 && !added_types.has('3')) {
|
||||
pattern += sequencesToPattern(sequences, false);
|
||||
let new_seq = new Sequence();
|
||||
const old_seq = sequences[0];
|
||||
|
||||
if (old_seq) {
|
||||
new_seq.add(old_seq.last());
|
||||
}
|
||||
|
||||
sequences = [new_seq];
|
||||
}
|
||||
}
|
||||
|
||||
pattern += sequencesToPattern(sequences, true);
|
||||
return pattern;
|
||||
};
|
||||
|
||||
export { _asciifold, asciifold, code_points, generateMap, generateSets, generator, getPattern, initialize, mapSequence, normalize, substringsToPattern, unicode_map };
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/index.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
113
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/regex.js
generated
vendored
Normal file
113
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/regex.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {string[]} chars
|
||||
* @return {string}
|
||||
*/
|
||||
const arrayToPattern = chars => {
|
||||
chars = chars.filter(Boolean);
|
||||
|
||||
if (chars.length < 2) {
|
||||
return chars[0] || '';
|
||||
}
|
||||
|
||||
return maxValueLength(chars) == 1 ? '[' + chars.join('') + ']' : '(?:' + chars.join('|') + ')';
|
||||
};
|
||||
/**
|
||||
* @param {string[]} array
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const sequencePattern = array => {
|
||||
if (!hasDuplicates(array)) {
|
||||
return array.join('');
|
||||
}
|
||||
|
||||
let pattern = '';
|
||||
let prev_char_count = 0;
|
||||
|
||||
const prev_pattern = () => {
|
||||
if (prev_char_count > 1) {
|
||||
pattern += '{' + prev_char_count + '}';
|
||||
}
|
||||
};
|
||||
|
||||
array.forEach((char, i) => {
|
||||
if (char === array[i - 1]) {
|
||||
prev_char_count++;
|
||||
return;
|
||||
}
|
||||
|
||||
prev_pattern();
|
||||
pattern += char;
|
||||
prev_char_count = 1;
|
||||
});
|
||||
prev_pattern();
|
||||
return pattern;
|
||||
};
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {Set<string>} chars
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const setToPattern = chars => {
|
||||
let array = toArray(chars);
|
||||
return arrayToPattern(array);
|
||||
};
|
||||
/**
|
||||
*
|
||||
* https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values
|
||||
* @param {any[]} array
|
||||
*/
|
||||
|
||||
|
||||
const hasDuplicates = array => {
|
||||
return new Set(array).size !== array.length;
|
||||
};
|
||||
/**
|
||||
* https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
const escape_regex = str => {
|
||||
return (str + '').replace(/([\$\(-\+\.\?\[-\^\{-\}])/g, '\\$1');
|
||||
};
|
||||
/**
|
||||
* Return the max length of array values
|
||||
* @param {string[]} array
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
const maxValueLength = array => {
|
||||
return array.reduce((longest, value) => Math.max(longest, unicodeLength(value)), 0);
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
|
||||
|
||||
const unicodeLength = str => {
|
||||
return toArray(str).length;
|
||||
};
|
||||
/**
|
||||
* @param {any} p
|
||||
* @return {any[]}
|
||||
*/
|
||||
|
||||
|
||||
const toArray = p => Array.from(p);
|
||||
|
||||
export { arrayToPattern, escape_regex, hasDuplicates, maxValueLength, sequencePattern, setToPattern, toArray, unicodeLength };
|
||||
//# sourceMappingURL=regex.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/regex.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/regex.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"regex.js","sources":["../../../../../../../node_modules/@orchidjs/unicode-variants/dist/esm/regex.js"],"sourcesContent":["/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {string[]} chars\n * @return {string}\n */\nconst arrayToPattern = chars => {\n chars = chars.filter(Boolean);\n\n if (chars.length < 2) {\n return chars[0] || '';\n }\n\n return maxValueLength(chars) == 1 ? '[' + chars.join('') + ']' : '(?:' + chars.join('|') + ')';\n};\n/**\n * @param {string[]} array\n * @return {string}\n */\n\nconst sequencePattern = array => {\n if (!hasDuplicates(array)) {\n return array.join('');\n }\n\n let pattern = '';\n let prev_char_count = 0;\n\n const prev_pattern = () => {\n if (prev_char_count > 1) {\n pattern += '{' + prev_char_count + '}';\n }\n };\n\n array.forEach((char, i) => {\n if (char === array[i - 1]) {\n prev_char_count++;\n return;\n }\n\n prev_pattern();\n pattern += char;\n prev_char_count = 1;\n });\n prev_pattern();\n return pattern;\n};\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {Set<string>} chars\n * @return {string}\n */\n\nconst setToPattern = chars => {\n let array = toArray(chars);\n return arrayToPattern(array);\n};\n/**\n *\n * https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values\n * @param {any[]} array\n */\n\nconst hasDuplicates = array => {\n return new Set(array).size !== array.length;\n};\n/**\n * https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error\n * @param {string} str\n * @return {string}\n */\n\nconst escape_regex = str => {\n return (str + '').replace(/([\\$\\(\\)\\*\\+\\.\\?\\[\\]\\^\\{\\|\\}\\\\])/gu, '\\\\$1');\n};\n/**\n * Return the max length of array values\n * @param {string[]} array\n *\n */\n\nconst maxValueLength = array => {\n return array.reduce((longest, value) => Math.max(longest, unicodeLength(value)), 0);\n};\n/**\n * @param {string} str\n */\n\nconst unicodeLength = str => {\n return toArray(str).length;\n};\n/**\n * @param {any} p\n * @return {any[]}\n */\n\nconst toArray = p => Array.from(p);\n\nexport { arrayToPattern, escape_regex, hasDuplicates, maxValueLength, sequencePattern, setToPattern, toArray, unicodeLength };\n//# sourceMappingURL=regex.js.map\n"],"names":["arrayToPattern","chars","maxValueLength","sequencePattern","array","hasDuplicates","pattern","prev_char_count","prev_pattern","char","i","setToPattern","toArray","escape_regex","str","Math","unicodeLength","p","Array"],"mappings":";;;AACA;AACA;AACA;AACA;AACA;AACA;AACA;MACaA,cAAc,GAAIC,KAAD,IAAU;AAEvCA,EAAAA,KAAK,GAAGA,KAAK,CAALA,MAAAA,CAARA,OAAQA,CAARA;;AAEA,MAAIA,KAAK,CAALA,MAAAA,GAAJ,CAAA,EAAsB;AACrB,WAAOA,KAAK,CAALA,CAAK,CAALA,IAAP,EAAA;AACA;;AAED,SAAQC,cAAc,CAAdA,KAAc,CAAdA,IAAD,CAACA,GAA8B,MAAID,KAAK,CAALA,IAAAA,CAAJ,EAAIA,CAAJ,GAA/B,GAACC,GAAuD,QAAMD,KAAK,CAALA,IAAAA,CAAN,GAAMA,CAAN,GAA/D,GAAA;AARM;AAWP;AACA;AACA;AACA;;;MACaE,eAAe,GAAIC,KAAD,IAAS;AAEvC,MAAI,CAACC,aAAa,CAAlB,KAAkB,CAAlB,EAA2B;AAC1B,WAAOD,KAAK,CAALA,IAAAA,CAAP,EAAOA,CAAP;AACA;;AAED,MAAIE,OAAO,GAAX,EAAA;AACA,MAAIC,eAAe,GAAnB,CAAA;;AAEA,QAAMC,YAAY,GAAG,MAAI;AACxB,QAAID,eAAe,GAAnB,CAAA,EAAyB;AACxBD,MAAAA,OAAO,IAAI,MAAA,eAAA,GAAXA,GAAAA;AACA;AAHF,GAAA;;AAMAF,EAAAA,KAAK,CAALA,OAAAA,CAAc,CAAA,IAAA,EAAA,CAAA,KAAU;AAEvB,QAAIK,IAAI,KAAKL,KAAK,CAACM,CAAC,GAApB,CAAkB,CAAlB,EAAyB;AACxBH,MAAAA,eAAe;AACf;AACA;;AAEDC,IAAAA,YAAY;AAEZF,IAAAA,OAAO,IAAPA,IAAAA;AACAC,IAAAA,eAAe,GAAfA,CAAAA;AAVDH,GAAAA;AAaAI,EAAAA,YAAY;AAEZ,SAAA,OAAA;AA9BM;AAoCP;AACA;AACA;AACA;AACA;AACA;AACA;;;MACaG,YAAY,GAAIV,KAAD,IAAS;AACpC,MAAIG,KAAK,GAAGQ,OAAO,CAAnB,KAAmB,CAAnB;AACA,SAAOZ,cAAc,CAArB,KAAqB,CAArB;AAFM;AAOP;AACA;AACA;AACA;AACA;;;MACaK,aAAa,GAAID,KAAD,IAAW;AACpC,SAAQ,IAAA,GAAA,CAAD,KAAC,EAAD,IAAC,KAAyBA,KAAK,CAAtC,MAAA;AADG;AAKP;AACA;AACA;AACA;AACA;;;MACaS,YAAY,GAAIC,GAAD,IAAS;AACpC,SAAO,CAACA,GAAG,GAAJ,EAAA,EAAA,OAAA,CAAA,4BAAA,EAAP,MAAO,CAAP;AADM;AAIP;AACA;AACA;AACA;AACA;;;MACaZ,cAAc,GAAIE,KAAD,IAAW;AACxC,SAAOA,KAAK,CAALA,MAAAA,CAAc,CAAA,OAAA,EAAA,KAAA,KAAoBW,IAAI,CAAJA,GAAAA,CAAAA,OAAAA,EAAiBC,aAAa,CAAhEZ,KAAgE,CAA9BW,CAAlCX,EAAP,CAAOA,CAAP;AADM;AAKP;AACA;AACA;;;MACaY,aAAa,GAAIF,GAAD,IAAS;AACrC,SAAOF,OAAO,CAAPA,GAAO,CAAPA,CAAP,MAAA;AADM;AAIP;AACA;AACA;AACA;;;MACaA,OAAO,GAAIK,CAAD,IAAOC,KAAK,CAALA,IAAAA,CAAAA,CAAAA;;;;"}
|
||||
29
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/strings.js
generated
vendored
Normal file
29
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/strings.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
|
||||
/**
|
||||
* Get all possible combinations of substrings that add up to the given string
|
||||
* https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string
|
||||
* @param {string} input
|
||||
* @return {string[][]}
|
||||
*/
|
||||
const allSubstrings = input => {
|
||||
if (input.length === 1) return [[input]];
|
||||
/** @type {string[][]} */
|
||||
|
||||
let result = [];
|
||||
const start = input.substring(1);
|
||||
const suba = allSubstrings(start);
|
||||
suba.forEach(function (subresult) {
|
||||
let tmp = subresult.slice(0);
|
||||
tmp[0] = input.charAt(0) + tmp[0];
|
||||
result.push(tmp);
|
||||
tmp = subresult.slice(0);
|
||||
tmp.unshift(input.charAt(0));
|
||||
result.push(tmp);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
export { allSubstrings };
|
||||
//# sourceMappingURL=strings.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/strings.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/node_modules/@orchidjs/unicode-variants/dist/esm/strings.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"strings.js","sources":["../../../../../../../node_modules/@orchidjs/unicode-variants/dist/esm/strings.js"],"sourcesContent":["/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */\n/**\n * Get all possible combinations of substrings that add up to the given string\n * https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string\n * @param {string} input\n * @return {string[][]}\n */\nconst allSubstrings = input => {\n if (input.length === 1) return [[input]];\n /** @type {string[][]} */\n\n let result = [];\n const start = input.substring(1);\n const suba = allSubstrings(start);\n suba.forEach(function (subresult) {\n let tmp = subresult.slice(0);\n tmp[0] = input.charAt(0) + tmp[0];\n result.push(tmp);\n tmp = subresult.slice(0);\n tmp.unshift(input.charAt(0));\n result.push(tmp);\n });\n return result;\n};\n\nexport { allSubstrings };\n//# sourceMappingURL=strings.js.map\n"],"names":["allSubstrings","input","result","start","suba","tmp","subresult"],"mappings":";;;AAGA;AACA;AACA;AACA;AACA;AACA;MACaA,aAAa,GAAIC,KAAD,IAAW;AAEpC,MAAIA,KAAK,CAALA,MAAAA,KAAJ,CAAA,EAAwB,OAAO,CAAC,CAAR,KAAQ,CAAD,CAAP;AAE3B;;AACG,MAAIC,MAAM,GAAV,EAAA;AAEH,QAAMC,KAAK,GAAGF,KAAK,CAALA,SAAAA,CAAd,CAAcA,CAAd;AACG,QAAMG,IAAI,GAAGJ,aAAa,CAA1B,KAA0B,CAA1B;AAEHI,EAAAA,IAAI,CAAJA,OAAAA,CAAa,UAAA,SAAA,EAAoB;AAC1B,QAAIC,GAAG,GAAGC,SAAS,CAATA,KAAAA,CAAV,CAAUA,CAAV;AACAD,IAAAA,GAAG,CAAHA,CAAG,CAAHA,GAASJ,KAAK,CAALA,MAAAA,CAAAA,CAAAA,IAAkBI,GAAG,CAA9BA,CAA8B,CAA9BA;AACAH,IAAAA,MAAM,CAANA,IAAAA,CAAAA,GAAAA;AAEAG,IAAAA,GAAG,GAAGC,SAAS,CAATA,KAAAA,CAAND,CAAMC,CAAND;AACAA,IAAAA,GAAG,CAAHA,OAAAA,CAAYJ,KAAK,CAALA,MAAAA,CAAZI,CAAYJ,CAAZI;AACAH,IAAAA,MAAM,CAANA,IAAAA,CAAAA,GAAAA;AAPPE,GAAAA;AAUG,SAAA,MAAA;AACH;;;;"}
|
||||
367
node_modules/@orchidjs/sifter/dist/esm/sifter.js
generated
vendored
Normal file
367
node_modules/@orchidjs/sifter/dist/esm/sifter.js
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
import { iterate, cmp, propToArray, getAttrNesting, getAttr, scoreValue } from './utils.js';
|
||||
export { cmp, getAttr, getAttrNesting, iterate, propToArray, scoreValue } from './utils.js';
|
||||
import { escape_regex, getPattern } from '@orchidjs/unicode-variants';
|
||||
export { getPattern } from '@orchidjs/unicode-variants';
|
||||
|
||||
/**
|
||||
* sifter.js
|
||||
* Copyright (c) 2013–2020 Brian Reavis & contributors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
|
||||
* file except in compliance with the License. You may obtain a copy of the License at:
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under
|
||||
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
* ANY KIND, either express or implied. See the License for the specific language
|
||||
* governing permissions and limitations under the License.
|
||||
*
|
||||
* @author Brian Reavis <brian@thirdroute.com>
|
||||
*/
|
||||
|
||||
class Sifter {
|
||||
// []|{};
|
||||
|
||||
/**
|
||||
* Textually searches arrays and hashes of objects
|
||||
* by property (or multiple properties). Designed
|
||||
* specifically for autocomplete.
|
||||
*
|
||||
*/
|
||||
constructor(items, settings) {
|
||||
this.items = void 0;
|
||||
this.settings = void 0;
|
||||
this.items = items;
|
||||
this.settings = settings || {
|
||||
diacritics: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a search string into an array of individual
|
||||
* regexps to be used to match results.
|
||||
*
|
||||
*/
|
||||
tokenize(query, respect_word_boundaries, weights) {
|
||||
if (!query || !query.length) return [];
|
||||
const tokens = [];
|
||||
const words = query.split(/\s+/);
|
||||
var field_regex;
|
||||
|
||||
if (weights) {
|
||||
field_regex = new RegExp('^(' + Object.keys(weights).map(escape_regex).join('|') + ')\:(.*)$');
|
||||
}
|
||||
|
||||
words.forEach(word => {
|
||||
let field_match;
|
||||
let field = null;
|
||||
let regex = null; // look for "field:query" tokens
|
||||
|
||||
if (field_regex && (field_match = word.match(field_regex))) {
|
||||
field = field_match[1];
|
||||
word = field_match[2];
|
||||
}
|
||||
|
||||
if (word.length > 0) {
|
||||
if (this.settings.diacritics) {
|
||||
regex = getPattern(word) || null;
|
||||
} else {
|
||||
regex = escape_regex(word);
|
||||
}
|
||||
|
||||
if (regex && respect_word_boundaries) regex = "\\b" + regex;
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
string: word,
|
||||
regex: regex ? new RegExp(regex, 'iu') : null,
|
||||
field: field
|
||||
});
|
||||
});
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function to be used to score individual results.
|
||||
*
|
||||
* Good matches will have a higher score than poor matches.
|
||||
* If an item is not a match, 0 will be returned by the function.
|
||||
*
|
||||
* @returns {T.ScoreFn}
|
||||
*/
|
||||
getScoreFunction(query, options) {
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getScoreFunction(search);
|
||||
}
|
||||
/**
|
||||
* @returns {T.ScoreFn}
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
_getScoreFunction(search) {
|
||||
const tokens = search.tokens,
|
||||
token_count = tokens.length;
|
||||
|
||||
if (!token_count) {
|
||||
return function () {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
const fields = search.options.fields,
|
||||
weights = search.weights,
|
||||
field_count = fields.length,
|
||||
getAttrFn = search.getAttrFn;
|
||||
|
||||
if (!field_count) {
|
||||
return function () {
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Calculates the score of an object
|
||||
* against the search query.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
const scoreObject = function () {
|
||||
if (field_count === 1) {
|
||||
return function (token, data) {
|
||||
const field = fields[0].field;
|
||||
return scoreValue(getAttrFn(data, field), token, weights[field] || 1);
|
||||
};
|
||||
}
|
||||
|
||||
return function (token, data) {
|
||||
var sum = 0; // is the token specific to a field?
|
||||
|
||||
if (token.field) {
|
||||
const value = getAttrFn(data, token.field);
|
||||
|
||||
if (!token.regex && value) {
|
||||
sum += 1 / field_count;
|
||||
} else {
|
||||
sum += scoreValue(value, token, 1);
|
||||
}
|
||||
} else {
|
||||
iterate(weights, (weight, field) => {
|
||||
sum += scoreValue(getAttrFn(data, field), token, weight);
|
||||
});
|
||||
}
|
||||
|
||||
return sum / field_count;
|
||||
};
|
||||
}();
|
||||
|
||||
if (token_count === 1) {
|
||||
return function (data) {
|
||||
return scoreObject(tokens[0], data);
|
||||
};
|
||||
}
|
||||
|
||||
if (search.options.conjunction === 'and') {
|
||||
return function (data) {
|
||||
var score,
|
||||
sum = 0;
|
||||
|
||||
for (let token of tokens) {
|
||||
score = scoreObject(token, data);
|
||||
if (score <= 0) return 0;
|
||||
sum += score;
|
||||
}
|
||||
|
||||
return sum / token_count;
|
||||
};
|
||||
} else {
|
||||
return function (data) {
|
||||
var sum = 0;
|
||||
iterate(tokens, token => {
|
||||
sum += scoreObject(token, data);
|
||||
});
|
||||
return sum / token_count;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function that can be used to compare two
|
||||
* results, for sorting purposes. If no sorting should
|
||||
* be performed, `null` will be returned.
|
||||
*
|
||||
* @return function(a,b)
|
||||
*/
|
||||
getSortFunction(query, options) {
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getSortFunction(search);
|
||||
}
|
||||
|
||||
_getSortFunction(search) {
|
||||
var implicit_score,
|
||||
sort_flds = [];
|
||||
const self = this,
|
||||
options = search.options,
|
||||
sort = !search.query && options.sort_empty ? options.sort_empty : options.sort;
|
||||
|
||||
if (typeof sort == 'function') {
|
||||
return sort.bind(this);
|
||||
}
|
||||
/**
|
||||
* Fetches the specified sort field value
|
||||
* from a search result item.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
const get_field = function get_field(name, result) {
|
||||
if (name === '$score') return result.score;
|
||||
return search.getAttrFn(self.items[result.id], name);
|
||||
}; // parse options
|
||||
|
||||
|
||||
if (sort) {
|
||||
for (let s of sort) {
|
||||
if (search.query || s.field !== '$score') {
|
||||
sort_flds.push(s);
|
||||
}
|
||||
}
|
||||
} // the "$score" field is implied to be the primary
|
||||
// sort field, unless it's manually specified
|
||||
|
||||
|
||||
if (search.query) {
|
||||
implicit_score = true;
|
||||
|
||||
for (let fld of sort_flds) {
|
||||
if (fld.field === '$score') {
|
||||
implicit_score = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (implicit_score) {
|
||||
sort_flds.unshift({
|
||||
field: '$score',
|
||||
direction: 'desc'
|
||||
});
|
||||
} // without a search.query, all items will have the same score
|
||||
|
||||
} else {
|
||||
sort_flds = sort_flds.filter(fld => fld.field !== '$score');
|
||||
} // build function
|
||||
|
||||
|
||||
const sort_flds_count = sort_flds.length;
|
||||
|
||||
if (!sort_flds_count) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return function (a, b) {
|
||||
var result, field;
|
||||
|
||||
for (let sort_fld of sort_flds) {
|
||||
field = sort_fld.field;
|
||||
let multiplier = sort_fld.direction === 'desc' ? -1 : 1;
|
||||
result = multiplier * cmp(get_field(field, a), get_field(field, b));
|
||||
if (result) return result;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a search query and returns an object
|
||||
* with tokens and fields ready to be populated
|
||||
* with results.
|
||||
*
|
||||
*/
|
||||
prepareSearch(query, optsUser) {
|
||||
const weights = {};
|
||||
var options = Object.assign({}, optsUser);
|
||||
propToArray(options, 'sort');
|
||||
propToArray(options, 'sort_empty'); // convert fields to new format
|
||||
|
||||
if (options.fields) {
|
||||
propToArray(options, 'fields');
|
||||
const fields = [];
|
||||
options.fields.forEach(field => {
|
||||
if (typeof field == 'string') {
|
||||
field = {
|
||||
field: field,
|
||||
weight: 1
|
||||
};
|
||||
}
|
||||
|
||||
fields.push(field);
|
||||
weights[field.field] = 'weight' in field ? field.weight : 1;
|
||||
});
|
||||
options.fields = fields;
|
||||
}
|
||||
|
||||
return {
|
||||
options: options,
|
||||
query: query.toLowerCase().trim(),
|
||||
tokens: this.tokenize(query, options.respect_word_boundaries, weights),
|
||||
total: 0,
|
||||
items: [],
|
||||
weights: weights,
|
||||
getAttrFn: options.nesting ? getAttrNesting : getAttr
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches through all items and returns a sorted array of matches.
|
||||
*
|
||||
*/
|
||||
search(query, options) {
|
||||
var self = this,
|
||||
score,
|
||||
search;
|
||||
search = this.prepareSearch(query, options);
|
||||
options = search.options;
|
||||
query = search.query; // generate result scoring function
|
||||
|
||||
const fn_score = options.score || self._getScoreFunction(search); // perform search and sort
|
||||
|
||||
|
||||
if (query.length) {
|
||||
iterate(self.items, (item, id) => {
|
||||
score = fn_score(item);
|
||||
|
||||
if (options.filter === false || score > 0) {
|
||||
search.items.push({
|
||||
'score': score,
|
||||
'id': id
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
iterate(self.items, (_, id) => {
|
||||
search.items.push({
|
||||
'score': 1,
|
||||
'id': id
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const fn_sort = self._getSortFunction(search);
|
||||
|
||||
if (fn_sort) search.items.sort(fn_sort); // apply limits
|
||||
|
||||
search.total = search.items.length;
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
search.items = search.items.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return search;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export { Sifter };
|
||||
//# sourceMappingURL=sifter.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/sifter.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/sifter.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
95
node_modules/@orchidjs/sifter/dist/esm/utils.js
generated
vendored
Normal file
95
node_modules/@orchidjs/sifter/dist/esm/utils.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
/*! sifter.js | https://github.com/orchidjs/sifter.js | Apache License (v2) */
|
||||
import { asciifold } from '@orchidjs/unicode-variants';
|
||||
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
const getAttr = (obj, name) => {
|
||||
if (!obj) return;
|
||||
return obj[name];
|
||||
};
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
|
||||
const getAttrNesting = (obj, name) => {
|
||||
if (!obj) return;
|
||||
var part,
|
||||
names = name.split(".");
|
||||
|
||||
while ((part = names.shift()) && (obj = obj[part]));
|
||||
|
||||
return obj;
|
||||
};
|
||||
/**
|
||||
* Calculates how close of a match the
|
||||
* given value is against a search token.
|
||||
*
|
||||
*/
|
||||
|
||||
const scoreValue = (value, token, weight) => {
|
||||
var score, pos;
|
||||
if (!value) return 0;
|
||||
value = value + '';
|
||||
if (token.regex == null) return 0;
|
||||
pos = value.search(token.regex);
|
||||
if (pos === -1) return 0;
|
||||
score = token.string.length / value.length;
|
||||
if (pos === 0) score += 0.5;
|
||||
return score * weight;
|
||||
};
|
||||
/**
|
||||
* Cast object property to an array if it exists and has a value
|
||||
*
|
||||
*/
|
||||
|
||||
const propToArray = (obj, key) => {
|
||||
var value = obj[key];
|
||||
if (typeof value == 'function') return value;
|
||||
|
||||
if (value && !Array.isArray(value)) {
|
||||
obj[key] = [value];
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Iterates over arrays and hashes.
|
||||
*
|
||||
* ```
|
||||
* iterate(this.items, function(item, id) {
|
||||
* // invoked for each item
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
|
||||
const iterate = (object, callback) => {
|
||||
if (Array.isArray(object)) {
|
||||
object.forEach(callback);
|
||||
} else {
|
||||
for (var key in object) {
|
||||
if (object.hasOwnProperty(key)) {
|
||||
callback(object[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
const cmp = (a, b) => {
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a > b ? 1 : a < b ? -1 : 0;
|
||||
}
|
||||
|
||||
a = asciifold(a + '').toLowerCase();
|
||||
b = asciifold(b + '').toLowerCase();
|
||||
if (a > b) return 1;
|
||||
if (b > a) return -1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
export { cmp, getAttr, getAttrNesting, iterate, propToArray, scoreValue };
|
||||
//# sourceMappingURL=utils.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/esm/utils.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/esm/utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"utils.js","sources":["../../lib/utils.ts"],"sourcesContent":["\nimport { asciifold } from '@orchidjs/unicode-variants';\nimport * as T from './types';\n\n\n/**\n * A property getter resolving dot-notation\n * @param {Object} obj The root object to fetch property on\n * @param {String} name The optionally dotted property name to fetch\n * @return {Object} The resolved property value\n */\nexport const getAttr = (obj:{[key:string]:any}, name:string ) => {\n if (!obj ) return;\n return obj[name];\n};\n\n/**\n * A property getter resolving dot-notation\n * @param {Object} obj The root object to fetch property on\n * @param {String} name The optionally dotted property name to fetch\n * @return {Object} The resolved property value\n */\nexport const getAttrNesting = (obj:{[key:string]:any}, name:string ) => {\n if (!obj ) return;\n var part, names = name.split(\".\");\n\twhile( (part = names.shift()) && (obj = obj[part]));\n return obj;\n};\n\n/**\n * Calculates how close of a match the\n * given value is against a search token.\n *\n */\nexport const scoreValue = (value:string, token:T.Token, weight:number ):number => {\n\tvar score, pos;\n\n\tif (!value) return 0;\n\n\tvalue = value + '';\n\tif( token.regex == null ) return 0;\n\tpos = value.search(token.regex);\n\tif (pos === -1) return 0;\n\n\tscore = token.string.length / value.length;\n\tif (pos === 0) score += 0.5;\n\n\treturn score * weight;\n};\n\n\n/**\n * Cast object property to an array if it exists and has a value\n *\n */\nexport const propToArray = (obj:{[key:string]:any}, key:string) => {\n\tvar value = obj[key];\n\n\tif( typeof value == 'function' ) return value;\n\n\tif( value && !Array.isArray(value) ){\n\t\tobj[key] = [value];\n\t}\n}\n\n\n/**\n * Iterates over arrays and hashes.\n *\n * ```\n * iterate(this.items, function(item, id) {\n * // invoked for each item\n * });\n * ```\n *\n */\nexport const iterate = (object:[]|{[key:string]:any}, callback:(value:any,key:any)=>any) => {\n\n\tif ( Array.isArray(object)) {\n\t\tobject.forEach(callback);\n\n\t}else{\n\n\t\tfor (var key in object) {\n\t\t\tif (object.hasOwnProperty(key)) {\n\t\t\t\tcallback(object[key], key);\n\t\t\t}\n\t\t}\n\t}\n};\n\n\n\nexport const cmp = (a:number|string, b:number|string) => {\n\tif (typeof a === 'number' && typeof b === 'number') {\n\t\treturn a > b ? 1 : (a < b ? -1 : 0);\n\t}\n\ta = asciifold(a + '').toLowerCase();\n\tb = asciifold(b + '').toLowerCase();\n\tif (a > b) return 1;\n\tif (b > a) return -1;\n\treturn 0;\n};\n"],"names":["getAttr","obj","name","getAttrNesting","part","names","split","shift","scoreValue","value","token","weight","score","pos","regex","search","string","length","propToArray","key","Array","isArray","iterate","object","callback","forEach","hasOwnProperty","cmp","a","b","asciifold","toLowerCase"],"mappings":";;;AAKA;AACA;AACA;AACA;AACA;AACA;MACaA,OAAO,GAAG,CAACC,GAAD,EAAyBC,IAAzB,KAA0C;AAC7D,MAAI,CAACD,GAAL,EAAW;AACX,SAAOA,GAAG,CAACC,IAAD,CAAV;AACH;AAED;AACA;AACA;AACA;AACA;AACA;;MACaC,cAAc,GAAG,CAACF,GAAD,EAAyBC,IAAzB,KAA0C;AACpE,MAAI,CAACD,GAAL,EAAW;AACX,MAAIG,IAAJ;AAAA,MAAUC,KAAK,GAAGH,IAAI,CAACI,KAAL,CAAW,GAAX,CAAlB;;AACH,SAAO,CAACF,IAAI,GAAGC,KAAK,CAACE,KAAN,EAAR,MAA2BN,GAAG,GAAGA,GAAG,CAACG,IAAD,CAApC,CAAP,CAAmD;;AAChD,SAAOH,GAAP;AACH;AAED;AACA;AACA;AACA;AACA;;MACaO,UAAU,GAAG,CAACC,KAAD,EAAeC,KAAf,EAA8BC,MAA9B,KAAwD;AACjF,MAAIC,KAAJ,EAAWC,GAAX;AAEA,MAAI,CAACJ,KAAL,EAAY,OAAO,CAAP;AAEZA,EAAAA,KAAK,GAAGA,KAAK,GAAG,EAAhB;AACA,MAAIC,KAAK,CAACI,KAAN,IAAe,IAAnB,EAA0B,OAAO,CAAP;AAC1BD,EAAAA,GAAG,GAAGJ,KAAK,CAACM,MAAN,CAAaL,KAAK,CAACI,KAAnB,CAAN;AACA,MAAID,GAAG,KAAK,CAAC,CAAb,EAAgB,OAAO,CAAP;AAEhBD,EAAAA,KAAK,GAAGF,KAAK,CAACM,MAAN,CAAaC,MAAb,GAAsBR,KAAK,CAACQ,MAApC;AACA,MAAIJ,GAAG,KAAK,CAAZ,EAAeD,KAAK,IAAI,GAAT;AAEf,SAAOA,KAAK,GAAGD,MAAf;AACA;AAGD;AACA;AACA;AACA;;MACaO,WAAW,GAAG,CAACjB,GAAD,EAAyBkB,GAAzB,KAAwC;AAClE,MAAIV,KAAK,GAAGR,GAAG,CAACkB,GAAD,CAAf;AAEA,MAAI,OAAOV,KAAP,IAAgB,UAApB,EAAiC,OAAOA,KAAP;;AAEjC,MAAIA,KAAK,IAAI,CAACW,KAAK,CAACC,OAAN,CAAcZ,KAAd,CAAd,EAAoC;AACnCR,IAAAA,GAAG,CAACkB,GAAD,CAAH,GAAW,CAACV,KAAD,CAAX;AACA;AACD;AAGD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;MACaa,OAAO,GAAG,CAACC,MAAD,EAA+BC,QAA/B,KAAqE;AAE3F,MAAKJ,KAAK,CAACC,OAAN,CAAcE,MAAd,CAAL,EAA4B;AAC3BA,IAAAA,MAAM,CAACE,OAAP,CAAeD,QAAf;AAEA,GAHD,MAGK;AAEJ,SAAK,IAAIL,GAAT,IAAgBI,MAAhB,EAAwB;AACvB,UAAIA,MAAM,CAACG,cAAP,CAAsBP,GAAtB,CAAJ,EAAgC;AAC/BK,QAAAA,QAAQ,CAACD,MAAM,CAACJ,GAAD,CAAP,EAAcA,GAAd,CAAR;AACA;AACD;AACD;AACD;MAIYQ,GAAG,GAAG,CAACC,CAAD,EAAkBC,CAAlB,KAAsC;AACxD,MAAI,OAAOD,CAAP,KAAa,QAAb,IAAyB,OAAOC,CAAP,KAAa,QAA1C,EAAoD;AACnD,WAAOD,CAAC,GAAGC,CAAJ,GAAQ,CAAR,GAAaD,CAAC,GAAGC,CAAJ,GAAQ,CAAC,CAAT,GAAa,CAAjC;AACA;;AACDD,EAAAA,CAAC,GAAGE,SAAS,CAACF,CAAC,GAAG,EAAL,CAAT,CAAkBG,WAAlB,EAAJ;AACAF,EAAAA,CAAC,GAAGC,SAAS,CAACD,CAAC,GAAG,EAAL,CAAT,CAAkBE,WAAlB,EAAJ;AACA,MAAIH,CAAC,GAAGC,CAAR,EAAW,OAAO,CAAP;AACX,MAAIA,CAAC,GAAGD,CAAR,EAAW,OAAO,CAAC,CAAR;AACX,SAAO,CAAP;AACA;;;;"}
|
||||
71
node_modules/@orchidjs/sifter/dist/types/sifter.d.ts
generated
vendored
Normal file
71
node_modules/@orchidjs/sifter/dist/types/sifter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* sifter.js
|
||||
* Copyright (c) 2013–2020 Brian Reavis & contributors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
|
||||
* file except in compliance with the License. You may obtain a copy of the License at:
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under
|
||||
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
* ANY KIND, either express or implied. See the License for the specific language
|
||||
* governing permissions and limitations under the License.
|
||||
*
|
||||
* @author Brian Reavis <brian@thirdroute.com>
|
||||
*/
|
||||
import { scoreValue, getAttr, getAttrNesting, propToArray, iterate, cmp } from './utils';
|
||||
import { getPattern } from '@orchidjs/unicode-variants';
|
||||
import * as T from './types';
|
||||
declare class Sifter {
|
||||
items: any;
|
||||
settings: T.Settings;
|
||||
/**
|
||||
* Textually searches arrays and hashes of objects
|
||||
* by property (or multiple properties). Designed
|
||||
* specifically for autocomplete.
|
||||
*
|
||||
*/
|
||||
constructor(items: any, settings: T.Settings);
|
||||
/**
|
||||
* Splits a search string into an array of individual
|
||||
* regexps to be used to match results.
|
||||
*
|
||||
*/
|
||||
tokenize(query: string, respect_word_boundaries?: boolean, weights?: T.Weights): T.Token[];
|
||||
/**
|
||||
* Returns a function to be used to score individual results.
|
||||
*
|
||||
* Good matches will have a higher score than poor matches.
|
||||
* If an item is not a match, 0 will be returned by the function.
|
||||
*
|
||||
* @returns {T.ScoreFn}
|
||||
*/
|
||||
getScoreFunction(query: string, options: T.UserOptions): (data: {}) => number;
|
||||
/**
|
||||
* @returns {T.ScoreFn}
|
||||
*
|
||||
*/
|
||||
_getScoreFunction(search: T.PrepareObj): (data: {}) => number;
|
||||
/**
|
||||
* Returns a function that can be used to compare two
|
||||
* results, for sorting purposes. If no sorting should
|
||||
* be performed, `null` will be returned.
|
||||
*
|
||||
* @return function(a,b)
|
||||
*/
|
||||
getSortFunction(query: string, options: T.UserOptions): ((a: T.ResultItem, b: T.ResultItem) => number) | null;
|
||||
_getSortFunction(search: T.PrepareObj): ((a: T.ResultItem, b: T.ResultItem) => number) | null;
|
||||
/**
|
||||
* Parses a search query and returns an object
|
||||
* with tokens and fields ready to be populated
|
||||
* with results.
|
||||
*
|
||||
*/
|
||||
prepareSearch(query: string, optsUser: T.UserOptions): T.PrepareObj;
|
||||
/**
|
||||
* Searches through all items and returns a sorted array of matches.
|
||||
*
|
||||
*/
|
||||
search(query: string, options: T.UserOptions): T.PrepareObj;
|
||||
}
|
||||
export { Sifter, scoreValue, getAttr, getAttrNesting, propToArray, iterate, cmp, getPattern };
|
||||
57
node_modules/@orchidjs/sifter/dist/types/types.d.ts
generated
vendored
Normal file
57
node_modules/@orchidjs/sifter/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import { Sifter } from './sifter';
|
||||
export declare type Field = {
|
||||
field: string;
|
||||
weight: number;
|
||||
};
|
||||
export declare type Sort = {
|
||||
field: string;
|
||||
direction?: string;
|
||||
};
|
||||
export declare type SortFn = (this: Sifter, a: ResultItem, b: ResultItem) => number;
|
||||
export declare type UserOptions = {
|
||||
fields: string[] | Field[];
|
||||
conjunction: string;
|
||||
sort: string | SortFn | Sort[];
|
||||
nesting?: boolean;
|
||||
score?: ScoreFn;
|
||||
filter?: boolean;
|
||||
sort_empty?: SortFn | Sort[];
|
||||
respect_word_boundaries?: boolean;
|
||||
limit?: number;
|
||||
};
|
||||
export declare type Options = {
|
||||
fields: Field[];
|
||||
conjunction: string;
|
||||
sort: SortFn | Sort[];
|
||||
nesting?: boolean;
|
||||
score?: ScoreFn;
|
||||
filter?: boolean;
|
||||
sort_empty?: SortFn | Sort[];
|
||||
respect_word_boundaries?: boolean;
|
||||
limit?: number;
|
||||
};
|
||||
export declare type Token = {
|
||||
string: string;
|
||||
regex: RegExp | null;
|
||||
field: string | null;
|
||||
};
|
||||
export declare type Weights = {
|
||||
[key: string]: number;
|
||||
};
|
||||
export declare type PrepareObj = {
|
||||
options: Options;
|
||||
query: string;
|
||||
tokens: Token[];
|
||||
total: number;
|
||||
items: ResultItem[];
|
||||
weights: Weights;
|
||||
getAttrFn: (data: any, field: string) => any;
|
||||
};
|
||||
export declare type Settings = {
|
||||
diacritics: boolean;
|
||||
};
|
||||
export declare type ResultItem = {
|
||||
score: number;
|
||||
id: number | string;
|
||||
};
|
||||
export declare type ScoreFn = (item: ResultItem) => number;
|
||||
48
node_modules/@orchidjs/sifter/dist/types/utils.d.ts
generated
vendored
Normal file
48
node_modules/@orchidjs/sifter/dist/types/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as T from './types';
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
export declare const getAttr: (obj: {
|
||||
[key: string]: any;
|
||||
}, name: string) => any;
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
export declare const getAttrNesting: (obj: {
|
||||
[key: string]: any;
|
||||
}, name: string) => {
|
||||
[key: string]: any;
|
||||
} | undefined;
|
||||
/**
|
||||
* Calculates how close of a match the
|
||||
* given value is against a search token.
|
||||
*
|
||||
*/
|
||||
export declare const scoreValue: (value: string, token: T.Token, weight: number) => number;
|
||||
/**
|
||||
* Cast object property to an array if it exists and has a value
|
||||
*
|
||||
*/
|
||||
export declare const propToArray: (obj: {
|
||||
[key: string]: any;
|
||||
}, key: string) => any;
|
||||
/**
|
||||
* Iterates over arrays and hashes.
|
||||
*
|
||||
* ```
|
||||
* iterate(this.items, function(item, id) {
|
||||
* // invoked for each item
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
export declare const iterate: (object: [] | {
|
||||
[key: string]: any;
|
||||
}, callback: (value: any, key: any) => any) => void;
|
||||
export declare const cmp: (a: number | string, b: number | string) => 1 | -1 | 0;
|
||||
1145
node_modules/@orchidjs/sifter/dist/umd/sifter.js
generated
vendored
Normal file
1145
node_modules/@orchidjs/sifter/dist/umd/sifter.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@orchidjs/sifter/dist/umd/sifter.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/umd/sifter.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
113
node_modules/@orchidjs/sifter/dist/umd/sifter.min.js
generated
vendored
Normal file
113
node_modules/@orchidjs/sifter/dist/umd/sifter.min.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).sifter={})}(this,(function(t){"use strict"
|
||||
const e=t=>(t=t.filter(Boolean)).length<2?t[0]||"":1==i(t)?"["+t.join("")+"]":"(?:"+t.join("|")+")",r=t=>{if(!s(t))return t.join("")
|
||||
let e="",r=0
|
||||
const n=()=>{r>1&&(e+="{"+r+"}")}
|
||||
return t.forEach(((s,o)=>{s!==t[o-1]?(n(),e+=s,r=1):r++})),n(),e},n=t=>{let r=u(t)
|
||||
return e(r)},s=t=>new Set(t).size!==t.length,o=t=>(t+"").replace(/([\$\(-\+\.\?\[-\^\{-\}])/g,"\\$1"),i=t=>t.reduce(((t,e)=>Math.max(t,l(e))),0),l=t=>u(t).length,u=t=>Array.from(t),a=t=>{if(1===t.length)return[[t]]
|
||||
let e=[]
|
||||
const r=t.substring(1)
|
||||
return a(r).forEach((function(r){let n=r.slice(0)
|
||||
n[0]=t.charAt(0)+n[0],e.push(n),n=r.slice(0),n.unshift(t.charAt(0)),e.push(n)})),e},f=[[0,65535]]
|
||||
let c,h
|
||||
const d={},g={"/":"⁄∕",0:"߀",a:"ⱥɐɑ",aa:"ꜳ",ae:"æǽǣ",ao:"ꜵ",au:"ꜷ",av:"ꜹꜻ",ay:"ꜽ",b:"ƀɓƃ",c:"ꜿƈȼↄ",d:"đɗɖᴅƌꮷԁɦ",e:"ɛǝᴇɇ",f:"ꝼƒ",g:"ǥɠꞡᵹꝿɢ",h:"ħⱨⱶɥ",i:"ɨı",j:"ɉȷ",k:"ƙⱪꝁꝃꝅꞣ",l:"łƚɫⱡꝉꝇꞁɭ",m:"ɱɯϻ",n:"ꞥƞɲꞑᴎлԉ",o:"øǿɔɵꝋꝍᴑ",oe:"œ",oi:"ƣ",oo:"ꝏ",ou:"ȣ",p:"ƥᵽꝑꝓꝕρ",q:"ꝗꝙɋ",r:"ɍɽꝛꞧꞃ",s:"ßȿꞩꞅʂ",t:"ŧƭʈⱦꞇ",th:"þ",tz:"ꜩ",u:"ʉ",v:"ʋꝟʌ",vy:"ꝡ",w:"ⱳ",y:"ƴɏỿ",z:"ƶȥɀⱬꝣ",hv:"ƕ"}
|
||||
for(let t in g){let e=g[t]||""
|
||||
for(let r=0;r<e.length;r++){let n=e.substring(r,r+1)
|
||||
d[n]=t}}const p=new RegExp(Object.keys(d).join("|")+"|[̀-ͯ·ʾʼ]","gu"),m=(t,e="NFKD")=>t.normalize(e),b=t=>u(t).reduce(((t,e)=>t+y(e)),""),y=t=>(t=m(t).toLowerCase().replace(p,(t=>d[t]||"")),m(t,"NFC"))
|
||||
const w=t=>{const e={},r=(t,r)=>{const s=e[t]||new Set,i=new RegExp("^"+n(s)+"$","iu")
|
||||
r.match(i)||(s.add(o(r)),e[t]=s)}
|
||||
for(let e of function*(t){for(const[e,r]of t)for(let t=e;t<=r;t++){let e=String.fromCharCode(t),r=b(e)
|
||||
r!=e.toLowerCase()&&(r.length>3||0!=r.length&&(yield{folded:r,composed:e,code_point:t}))}}(t))r(e.folded,e.folded),r(e.folded,e.composed)
|
||||
return e},v=t=>{const r=w(t),s={}
|
||||
let i=[]
|
||||
for(let t in r){let e=r[t]
|
||||
e&&(s[t]=n(e)),t.length>1&&i.push(o(t))}i.sort(((t,e)=>e.length-t.length))
|
||||
const l=e(i)
|
||||
return h=new RegExp("^"+l,"u"),s},S=(t,n=1)=>(n=Math.max(n,t.length-1),e(a(t).map((t=>((t,e=1)=>{let n=0
|
||||
return t=t.map((t=>(c[t]&&(n+=t.length),c[t]||t))),n>=e?r(t):""})(t,n))))),x=(t,n=!0)=>{let s=t.length>1?1:0
|
||||
return e(t.map((t=>{let e=[]
|
||||
const o=n?t.length():t.length()-1
|
||||
for(let r=0;r<o;r++)e.push(S(t.substrs[r]||"",s))
|
||||
return r(e)})))},j=(t,e)=>{for(const r of e){if(r.start!=t.start||r.end!=t.end)continue
|
||||
if(r.substrs.join("")!==t.substrs.join(""))continue
|
||||
let e=t.parts
|
||||
const n=t=>{for(const r of e){if(r.start===t.start&&r.substr===t.substr)return!1
|
||||
if(1!=t.length&&1!=r.length){if(t.start<r.start&&t.end>r.start)return!0
|
||||
if(r.start<t.start&&r.end>t.start)return!0}}return!1}
|
||||
if(!(r.parts.filter(n).length>0))return!0}return!1}
|
||||
class _{constructor(){this.parts=[],this.substrs=[],this.start=0,this.end=0}add(t){t&&(this.parts.push(t),this.substrs.push(t.substr),this.start=Math.min(t.start,this.start),this.end=Math.max(t.end,this.end))}last(){return this.parts[this.parts.length-1]}length(){return this.parts.length}clone(t,e){let r=new _,n=JSON.parse(JSON.stringify(this.parts)),s=n.pop()
|
||||
for(const t of n)r.add(t)
|
||||
let o=e.substr.substring(0,t-s.start),i=o.length
|
||||
return r.add({start:s.start,end:s.start+i,length:i,substr:o}),r}}const A=t=>{var e
|
||||
void 0===c&&(c=v(e||f)),t=b(t)
|
||||
let r="",n=[new _]
|
||||
for(let e=0;e<t.length;e++){let s=t.substring(e).match(h)
|
||||
const o=t.substring(e,e+1),i=s?s[0]:null
|
||||
let l=[],u=new Set
|
||||
for(const t of n){const r=t.last()
|
||||
if(!r||1==r.length||r.end<=e)if(i){const r=i.length
|
||||
t.add({start:e,end:e+r,length:r,substr:i}),u.add("1")}else t.add({start:e,end:e+1,length:1,substr:o}),u.add("2")
|
||||
else if(i){let n=t.clone(e,r)
|
||||
const s=i.length
|
||||
n.add({start:e,end:e+s,length:s,substr:i}),l.push(n)}else u.add("3")}if(l.length>0){l=l.sort(((t,e)=>t.length()-e.length()))
|
||||
for(let t of l)j(t,n)||n.push(t)}else if(e>0&&1==u.size&&!u.has("3")){r+=x(n,!1)
|
||||
let t=new _
|
||||
const e=n[0]
|
||||
e&&t.add(e.last()),n=[t]}}return r+=x(n,!0),r},F=(t,e)=>{if(t)return t[e]},E=(t,e)=>{if(t){for(var r,n=e.split(".");(r=n.shift())&&(t=t[r]););return t}},$=(t,e,r)=>{var n,s
|
||||
return t?(t+="",null==e.regex||-1===(s=t.search(e.regex))?0:(n=e.string.length/t.length,0===s&&(n+=.5),n*r)):0},k=(t,e)=>{var r=t[e]
|
||||
if("function"==typeof r)return r
|
||||
r&&!Array.isArray(r)&&(t[e]=[r])},C=(t,e)=>{if(Array.isArray(t))t.forEach(e)
|
||||
else for(var r in t)t.hasOwnProperty(r)&&e(t[r],r)},z=(t,e)=>"number"==typeof t&&"number"==typeof e?t>e?1:t<e?-1:0:(t=b(t+"").toLowerCase())>(e=b(e+"").toLowerCase())?1:e>t?-1:0
|
||||
t.Sifter=
|
||||
/**
|
||||
* sifter.js
|
||||
* Copyright (c) 2013–2020 Brian Reavis & contributors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
|
||||
* file except in compliance with the License. You may obtain a copy of the License at:
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under
|
||||
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
* ANY KIND, either express or implied. See the License for the specific language
|
||||
* governing permissions and limitations under the License.
|
||||
*
|
||||
* @author Brian Reavis <brian@thirdroute.com>
|
||||
*/
|
||||
class{constructor(t,e){this.items=void 0,this.settings=void 0,this.items=t,this.settings=e||{diacritics:!0}}tokenize(t,e,r){if(!t||!t.length)return[]
|
||||
const n=[],s=t.split(/\s+/)
|
||||
var i
|
||||
return r&&(i=new RegExp("^("+Object.keys(r).map(o).join("|")+"):(.*)$")),s.forEach((t=>{let r,s=null,l=null
|
||||
i&&(r=t.match(i))&&(s=r[1],t=r[2]),t.length>0&&(l=this.settings.diacritics?A(t)||null:o(t),l&&e&&(l="\\b"+l)),n.push({string:t,regex:l?new RegExp(l,"iu"):null,field:s})})),n}getScoreFunction(t,e){var r=this.prepareSearch(t,e)
|
||||
return this._getScoreFunction(r)}_getScoreFunction(t){const e=t.tokens,r=e.length
|
||||
if(!r)return function(){return 0}
|
||||
const n=t.options.fields,s=t.weights,o=n.length,i=t.getAttrFn
|
||||
if(!o)return function(){return 1}
|
||||
const l=1===o?function(t,e){const r=n[0].field
|
||||
return $(i(e,r),t,s[r]||1)}:function(t,e){var r=0
|
||||
if(t.field){const n=i(e,t.field)
|
||||
!t.regex&&n?r+=1/o:r+=$(n,t,1)}else C(s,((n,s)=>{r+=$(i(e,s),t,n)}))
|
||||
return r/o}
|
||||
return 1===r?function(t){return l(e[0],t)}:"and"===t.options.conjunction?function(t){var n,s=0
|
||||
for(let r of e){if((n=l(r,t))<=0)return 0
|
||||
s+=n}return s/r}:function(t){var n=0
|
||||
return C(e,(e=>{n+=l(e,t)})),n/r}}getSortFunction(t,e){var r=this.prepareSearch(t,e)
|
||||
return this._getSortFunction(r)}_getSortFunction(t){var e,r=[]
|
||||
const n=this,s=t.options,o=!t.query&&s.sort_empty?s.sort_empty:s.sort
|
||||
if("function"==typeof o)return o.bind(this)
|
||||
const i=function(e,r){return"$score"===e?r.score:t.getAttrFn(n.items[r.id],e)}
|
||||
if(o)for(let e of o)(t.query||"$score"!==e.field)&&r.push(e)
|
||||
if(t.query){e=!0
|
||||
for(let t of r)if("$score"===t.field){e=!1
|
||||
break}e&&r.unshift({field:"$score",direction:"desc"})}else r=r.filter((t=>"$score"!==t.field))
|
||||
return r.length?function(t,e){var n,s
|
||||
for(let o of r){if(s=o.field,n=("desc"===o.direction?-1:1)*z(i(s,t),i(s,e)))return n}return 0}:null}prepareSearch(t,e){const r={}
|
||||
var n=Object.assign({},e)
|
||||
if(k(n,"sort"),k(n,"sort_empty"),n.fields){k(n,"fields")
|
||||
const t=[]
|
||||
n.fields.forEach((e=>{"string"==typeof e&&(e={field:e,weight:1}),t.push(e),r[e.field]="weight"in e?e.weight:1})),n.fields=t}return{options:n,query:t.toLowerCase().trim(),tokens:this.tokenize(t,n.respect_word_boundaries,r),total:0,items:[],weights:r,getAttrFn:n.nesting?E:F}}search(t,e){var r,n,s=this
|
||||
n=this.prepareSearch(t,e),e=n.options,t=n.query
|
||||
const o=e.score||s._getScoreFunction(n)
|
||||
t.length?C(s.items,((t,s)=>{r=o(t),(!1===e.filter||r>0)&&n.items.push({score:r,id:s})})):C(s.items,((t,e)=>{n.items.push({score:1,id:e})}))
|
||||
const i=s._getSortFunction(n)
|
||||
return i&&n.items.sort(i),n.total=n.items.length,"number"==typeof e.limit&&(n.items=n.items.slice(0,e.limit)),n}},t.cmp=z,t.getAttr=F,t.getAttrNesting=E,t.getPattern=A,t.iterate=C,t.propToArray=k,t.scoreValue=$,Object.defineProperty(t,"__esModule",{value:!0})}))
|
||||
//# sourceMappingURL=sifter.min.js.map
|
||||
1
node_modules/@orchidjs/sifter/dist/umd/sifter.min.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/sifter/dist/umd/sifter.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
355
node_modules/@orchidjs/sifter/lib/sifter.ts
generated
vendored
Normal file
355
node_modules/@orchidjs/sifter/lib/sifter.ts
generated
vendored
Normal file
@@ -0,0 +1,355 @@
|
||||
/**
|
||||
* sifter.js
|
||||
* Copyright (c) 2013–2020 Brian Reavis & contributors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
|
||||
* file except in compliance with the License. You may obtain a copy of the License at:
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under
|
||||
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
* ANY KIND, either express or implied. See the License for the specific language
|
||||
* governing permissions and limitations under the License.
|
||||
*
|
||||
* @author Brian Reavis <brian@thirdroute.com>
|
||||
*/
|
||||
|
||||
import { scoreValue, getAttr, getAttrNesting, propToArray, iterate, cmp } from './utils';
|
||||
import { getPattern, escape_regex } from '@orchidjs/unicode-variants';
|
||||
import * as T from './types';
|
||||
|
||||
class Sifter{
|
||||
|
||||
public items: any; // []|{};
|
||||
public settings: T.Settings;
|
||||
|
||||
/**
|
||||
* Textually searches arrays and hashes of objects
|
||||
* by property (or multiple properties). Designed
|
||||
* specifically for autocomplete.
|
||||
*
|
||||
*/
|
||||
constructor(items:any, settings:T.Settings) {
|
||||
this.items = items;
|
||||
this.settings = settings || {diacritics: true};
|
||||
};
|
||||
|
||||
/**
|
||||
* Splits a search string into an array of individual
|
||||
* regexps to be used to match results.
|
||||
*
|
||||
*/
|
||||
tokenize(query:string, respect_word_boundaries?:boolean, weights?:T.Weights ):T.Token[] {
|
||||
if (!query || !query.length) return [];
|
||||
|
||||
const tokens:T.Token[] = [];
|
||||
const words = query.split(/\s+/);
|
||||
var field_regex:RegExp;
|
||||
|
||||
if( weights ){
|
||||
field_regex = new RegExp( '^('+ Object.keys(weights).map(escape_regex).join('|')+')\:(.*)$');
|
||||
}
|
||||
|
||||
words.forEach((word:string) => {
|
||||
let field_match;
|
||||
let field:null|string = null;
|
||||
let regex:null|string = null;
|
||||
|
||||
// look for "field:query" tokens
|
||||
if( field_regex && (field_match = word.match(field_regex)) ){
|
||||
field = field_match[1]!;
|
||||
word = field_match[2]!;
|
||||
}
|
||||
|
||||
if( word.length > 0 ){
|
||||
if( this.settings.diacritics ){
|
||||
regex = getPattern(word) || null;
|
||||
}else{
|
||||
regex = escape_regex(word);
|
||||
}
|
||||
if( regex && respect_word_boundaries ) regex = "\\b"+regex;
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
string : word,
|
||||
regex : regex ? new RegExp(regex,'iu') : null,
|
||||
field : field,
|
||||
});
|
||||
});
|
||||
|
||||
return tokens;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Returns a function to be used to score individual results.
|
||||
*
|
||||
* Good matches will have a higher score than poor matches.
|
||||
* If an item is not a match, 0 will be returned by the function.
|
||||
*
|
||||
* @returns {T.ScoreFn}
|
||||
*/
|
||||
getScoreFunction(query:string, options:T.UserOptions ){
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getScoreFunction(search);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {T.ScoreFn}
|
||||
*
|
||||
*/
|
||||
_getScoreFunction(search:T.PrepareObj ){
|
||||
const tokens = search.tokens,
|
||||
token_count = tokens.length;
|
||||
|
||||
if (!token_count) {
|
||||
return function() { return 0; };
|
||||
}
|
||||
|
||||
const fields = search.options.fields,
|
||||
weights = search.weights,
|
||||
field_count = fields.length,
|
||||
getAttrFn = search.getAttrFn;
|
||||
|
||||
if (!field_count) {
|
||||
return function() { return 1; };
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Calculates the score of an object
|
||||
* against the search query.
|
||||
*
|
||||
*/
|
||||
const scoreObject = (function() {
|
||||
|
||||
|
||||
if (field_count === 1) {
|
||||
return function(token:T.Token, data:{}) {
|
||||
const field = fields[0]!.field;
|
||||
return scoreValue(getAttrFn(data, field), token, weights[field]||1);
|
||||
};
|
||||
}
|
||||
|
||||
return function(token:T.Token, data:{}) {
|
||||
var sum = 0;
|
||||
|
||||
// is the token specific to a field?
|
||||
if( token.field ){
|
||||
|
||||
const value = getAttrFn(data, token.field);
|
||||
|
||||
if( !token.regex && value ){
|
||||
sum += (1/field_count);
|
||||
}else{
|
||||
sum += scoreValue(value, token, 1);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}else{
|
||||
iterate(weights, (weight:number, field:string) => {
|
||||
sum += scoreValue(getAttrFn(data, field), token, weight);
|
||||
});
|
||||
}
|
||||
|
||||
return sum / field_count;
|
||||
};
|
||||
})();
|
||||
|
||||
if (token_count === 1) {
|
||||
return function(data:{}) {
|
||||
return scoreObject(tokens[0]!, data);
|
||||
};
|
||||
}
|
||||
|
||||
if (search.options.conjunction === 'and') {
|
||||
return function(data:{}) {
|
||||
var score, sum = 0;
|
||||
for( let token of tokens){
|
||||
score = scoreObject(token, data);
|
||||
if (score <= 0) return 0;
|
||||
sum += score;
|
||||
}
|
||||
return sum / token_count;
|
||||
};
|
||||
} else {
|
||||
return function(data:{}) {
|
||||
var sum = 0;
|
||||
iterate(tokens,(token:T.Token)=>{
|
||||
sum += scoreObject(token, data);
|
||||
});
|
||||
return sum / token_count;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a function that can be used to compare two
|
||||
* results, for sorting purposes. If no sorting should
|
||||
* be performed, `null` will be returned.
|
||||
*
|
||||
* @return function(a,b)
|
||||
*/
|
||||
getSortFunction(query:string, options:T.UserOptions) {
|
||||
var search = this.prepareSearch(query, options);
|
||||
return this._getSortFunction(search);
|
||||
}
|
||||
|
||||
_getSortFunction(search:T.PrepareObj){
|
||||
var implicit_score,
|
||||
sort_flds:T.Sort[] = [];
|
||||
|
||||
const self = this,
|
||||
options = search.options,
|
||||
sort = (!search.query && options.sort_empty) ? options.sort_empty : options.sort;
|
||||
|
||||
|
||||
if( typeof sort == 'function' ){
|
||||
return sort.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the specified sort field value
|
||||
* from a search result item.
|
||||
*
|
||||
*/
|
||||
const get_field = function(name:string, result:T.ResultItem):string|number {
|
||||
if (name === '$score') return result.score;
|
||||
return search.getAttrFn(self.items[result.id], name);
|
||||
};
|
||||
|
||||
// parse options
|
||||
if (sort) {
|
||||
for( let s of sort ){
|
||||
if (search.query || s.field !== '$score') {
|
||||
sort_flds.push(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// the "$score" field is implied to be the primary
|
||||
// sort field, unless it's manually specified
|
||||
if (search.query) {
|
||||
implicit_score = true;
|
||||
for( let fld of sort_flds ){
|
||||
if( fld.field === '$score' ){
|
||||
implicit_score = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (implicit_score) {
|
||||
sort_flds.unshift({field: '$score', direction: 'desc'});
|
||||
}
|
||||
|
||||
// without a search.query, all items will have the same score
|
||||
} else {
|
||||
sort_flds = sort_flds.filter((fld) => fld.field !== '$score' );
|
||||
}
|
||||
|
||||
|
||||
// build function
|
||||
const sort_flds_count = sort_flds.length;
|
||||
if (!sort_flds_count) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return function(a:T.ResultItem, b:T.ResultItem) {
|
||||
var result, field;
|
||||
for( let sort_fld of sort_flds ){
|
||||
field = sort_fld.field;
|
||||
|
||||
let multiplier = sort_fld.direction === 'desc' ? -1 : 1;
|
||||
|
||||
result = multiplier * cmp(
|
||||
get_field(field, a),
|
||||
get_field(field, b)
|
||||
);
|
||||
if (result) return result;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a search query and returns an object
|
||||
* with tokens and fields ready to be populated
|
||||
* with results.
|
||||
*
|
||||
*/
|
||||
prepareSearch(query:string, optsUser:T.UserOptions):T.PrepareObj {
|
||||
const weights:T.Weights = {};
|
||||
var options = Object.assign({},optsUser) as T.Options;
|
||||
|
||||
propToArray(options,'sort');
|
||||
propToArray(options,'sort_empty');
|
||||
|
||||
// convert fields to new format
|
||||
if( options.fields ){
|
||||
propToArray(options,'fields');
|
||||
const fields:T.Field[] = [];
|
||||
options.fields.forEach((field:string|T.Field) => {
|
||||
if( typeof field == 'string' ){
|
||||
field = {field:field,weight:1};
|
||||
}
|
||||
fields.push(field);
|
||||
weights[field.field] = ('weight' in field) ? field.weight : 1;
|
||||
});
|
||||
options.fields = fields;
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
options : options as T.Options,
|
||||
query : query.toLowerCase().trim(),
|
||||
tokens : this.tokenize(query, options.respect_word_boundaries, weights),
|
||||
total : 0,
|
||||
items : [],
|
||||
weights : weights,
|
||||
getAttrFn : (options.nesting) ? getAttrNesting : getAttr,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Searches through all items and returns a sorted array of matches.
|
||||
*
|
||||
*/
|
||||
search(query:string, options:T.UserOptions) : T.PrepareObj {
|
||||
var self = this, score, search: T.PrepareObj;
|
||||
|
||||
search = this.prepareSearch(query, options);
|
||||
options = search.options;
|
||||
query = search.query;
|
||||
|
||||
// generate result scoring function
|
||||
const fn_score:T.ScoreFn = options.score || self._getScoreFunction(search);
|
||||
|
||||
// perform search and sort
|
||||
if (query.length) {
|
||||
iterate(self.items, (item:T.ResultItem, id:string|number) => {
|
||||
score = fn_score(item);
|
||||
if (options.filter === false || score > 0) {
|
||||
search.items.push({'score': score, 'id': id});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
iterate(self.items, (_:T.ResultItem, id:string|number) => {
|
||||
search.items.push({'score': 1, 'id': id});
|
||||
});
|
||||
}
|
||||
|
||||
const fn_sort = self._getSortFunction(search);
|
||||
if (fn_sort) search.items.sort(fn_sort);
|
||||
|
||||
// apply limits
|
||||
search.total = search.items.length;
|
||||
if (typeof options.limit === 'number') {
|
||||
search.items = search.items.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return search;
|
||||
};
|
||||
}
|
||||
|
||||
export { Sifter, scoreValue, getAttr, getAttrNesting, propToArray, iterate, cmp, getPattern }
|
||||
72
node_modules/@orchidjs/sifter/lib/types.ts
generated
vendored
Normal file
72
node_modules/@orchidjs/sifter/lib/types.ts
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
|
||||
import {Sifter} from './sifter';
|
||||
|
||||
export type Field = {
|
||||
field: string,
|
||||
weight: number,
|
||||
}
|
||||
|
||||
export type Sort = {
|
||||
field: string,
|
||||
direction?: string,
|
||||
}
|
||||
|
||||
export type SortFn = (this:Sifter, a:ResultItem, b:ResultItem)=>number;
|
||||
|
||||
export type UserOptions = {
|
||||
fields: string[]|Field[],
|
||||
conjunction: string,
|
||||
sort: string|SortFn|Sort[],
|
||||
|
||||
nesting?: boolean,
|
||||
score?: ScoreFn,
|
||||
filter?: boolean,
|
||||
sort_empty?: SortFn|Sort[],
|
||||
respect_word_boundaries?: boolean,
|
||||
limit?: number,
|
||||
}
|
||||
|
||||
|
||||
export type Options = {
|
||||
fields: Field[],
|
||||
conjunction: string,
|
||||
sort: SortFn|Sort[],
|
||||
|
||||
nesting?: boolean,
|
||||
score?: ScoreFn,
|
||||
filter?: boolean,
|
||||
sort_empty?: SortFn|Sort[],
|
||||
respect_word_boundaries?: boolean,
|
||||
limit?: number,
|
||||
}
|
||||
|
||||
export type Token = {
|
||||
string:string,
|
||||
regex:RegExp|null,
|
||||
field:string|null,
|
||||
}
|
||||
|
||||
export type Weights = {[key:string]:number}
|
||||
|
||||
export type PrepareObj = {
|
||||
options: Options,
|
||||
query: string,
|
||||
tokens: Token[],
|
||||
total: number,
|
||||
items: ResultItem[],
|
||||
weights: Weights,
|
||||
getAttrFn: (data:any,field:string)=>any,
|
||||
|
||||
}
|
||||
|
||||
export type Settings = {
|
||||
diacritics:boolean
|
||||
}
|
||||
|
||||
export type ResultItem = {
|
||||
score: number,
|
||||
id: number|string,
|
||||
}
|
||||
|
||||
|
||||
export type ScoreFn = (item:ResultItem) => number;
|
||||
103
node_modules/@orchidjs/sifter/lib/utils.ts
generated
vendored
Normal file
103
node_modules/@orchidjs/sifter/lib/utils.ts
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
import { asciifold } from '@orchidjs/unicode-variants';
|
||||
import * as T from './types';
|
||||
|
||||
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
export const getAttr = (obj:{[key:string]:any}, name:string ) => {
|
||||
if (!obj ) return;
|
||||
return obj[name];
|
||||
};
|
||||
|
||||
/**
|
||||
* A property getter resolving dot-notation
|
||||
* @param {Object} obj The root object to fetch property on
|
||||
* @param {String} name The optionally dotted property name to fetch
|
||||
* @return {Object} The resolved property value
|
||||
*/
|
||||
export const getAttrNesting = (obj:{[key:string]:any}, name:string ) => {
|
||||
if (!obj ) return;
|
||||
var part, names = name.split(".");
|
||||
while( (part = names.shift()) && (obj = obj[part]));
|
||||
return obj;
|
||||
};
|
||||
|
||||
/**
|
||||
* Calculates how close of a match the
|
||||
* given value is against a search token.
|
||||
*
|
||||
*/
|
||||
export const scoreValue = (value:string, token:T.Token, weight:number ):number => {
|
||||
var score, pos;
|
||||
|
||||
if (!value) return 0;
|
||||
|
||||
value = value + '';
|
||||
if( token.regex == null ) return 0;
|
||||
pos = value.search(token.regex);
|
||||
if (pos === -1) return 0;
|
||||
|
||||
score = token.string.length / value.length;
|
||||
if (pos === 0) score += 0.5;
|
||||
|
||||
return score * weight;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Cast object property to an array if it exists and has a value
|
||||
*
|
||||
*/
|
||||
export const propToArray = (obj:{[key:string]:any}, key:string) => {
|
||||
var value = obj[key];
|
||||
|
||||
if( typeof value == 'function' ) return value;
|
||||
|
||||
if( value && !Array.isArray(value) ){
|
||||
obj[key] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Iterates over arrays and hashes.
|
||||
*
|
||||
* ```
|
||||
* iterate(this.items, function(item, id) {
|
||||
* // invoked for each item
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
export const iterate = (object:[]|{[key:string]:any}, callback:(value:any,key:any)=>any) => {
|
||||
|
||||
if ( Array.isArray(object)) {
|
||||
object.forEach(callback);
|
||||
|
||||
}else{
|
||||
|
||||
for (var key in object) {
|
||||
if (object.hasOwnProperty(key)) {
|
||||
callback(object[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
export const cmp = (a:number|string, b:number|string) => {
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a > b ? 1 : (a < b ? -1 : 0);
|
||||
}
|
||||
a = asciifold(a + '').toLowerCase();
|
||||
b = asciifold(b + '').toLowerCase();
|
||||
if (a > b) return 1;
|
||||
if (b > a) return -1;
|
||||
return 0;
|
||||
};
|
||||
71
node_modules/@orchidjs/sifter/package.json
generated
vendored
Normal file
71
node_modules/@orchidjs/sifter/package.json
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"name": "@orchidjs/sifter",
|
||||
"keywords": [
|
||||
"search",
|
||||
"filter",
|
||||
"sift",
|
||||
"data",
|
||||
"results",
|
||||
"match",
|
||||
"sort",
|
||||
"autocomplete"
|
||||
],
|
||||
"description": "A library for textually searching arrays and hashes of objects by property (or multiple properties). Designed specifically for autocomplete.",
|
||||
"version": "1.0.3",
|
||||
"license": "Apache-2.0",
|
||||
"author": "Brian Reavis <brian@thirdroute.com>",
|
||||
"main": "dist/umd/sifter.js",
|
||||
"browser": "dist/umd/sifter.js",
|
||||
"module": "dist/esm/sifter.js",
|
||||
"types": "dist/types/sifter.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/orchidjs/sifter.js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest --coverage",
|
||||
"test:typescript": "tsc -p .config --noemit",
|
||||
"test:coveralls": "npm run build && jest --coverage && cat ./coverage/lcov.info | coveralls",
|
||||
"pretest": "npm run build",
|
||||
"benchmark": "npm run build && node --expose-gc benchmark/index.js",
|
||||
"build": "npx rollup -c .config/rollup.config.js",
|
||||
"build:types": "tsc -p .config --emitDeclarationOnly"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.13.16",
|
||||
"@babel/plugin-proposal-class-properties": "^7.13.0",
|
||||
"@babel/preset-env": "^7.13.15",
|
||||
"@babel/preset-typescript": "^7.13.0",
|
||||
"@rollup/plugin-babel": "^5.3.0",
|
||||
"@rollup/plugin-node-resolve": "^13.0.0",
|
||||
"coveralls": "^3.1.0",
|
||||
"humanize": "0.0.9",
|
||||
"jest": "^28.1.2",
|
||||
"rollup": "^2.45.2",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"typescript": "^4.8.3"
|
||||
},
|
||||
"browserslist": [
|
||||
">= 0.5%",
|
||||
"not dead",
|
||||
"Chrome >= 60",
|
||||
"Firefox >= 60",
|
||||
"Edge >= 17",
|
||||
"iOS >= 10",
|
||||
"Safari >= 10",
|
||||
"not Explorer <= 11"
|
||||
],
|
||||
"jest": {
|
||||
"verbose": true,
|
||||
"testMatch": [
|
||||
"**/test/**/*.js"
|
||||
]
|
||||
},
|
||||
"files": [
|
||||
"/dist",
|
||||
"/lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"@orchidjs/unicode-variants": "^1.0.4"
|
||||
}
|
||||
}
|
||||
201
node_modules/@orchidjs/unicode-variants/LICENSE
generated
vendored
Normal file
201
node_modules/@orchidjs/unicode-variants/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
66
node_modules/@orchidjs/unicode-variants/README.md
generated
vendored
Normal file
66
node_modules/@orchidjs/unicode-variants/README.md
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
# Unicode Variants
|
||||
[](https://travis-ci.com/github/orchidjs/unicode-variants)
|
||||
[](https://coveralls.io/r/orchidjs/unicode-variants)
|
||||
<a href="https://www.npmjs.com/package/@orchidjs/unicode-variants" class="m-1 d-inline-block"><img alt="npm (scoped)" src="https://img.shields.io/npm/v/@orchidjs/unicode-variants?color=007ec6"></a>
|
||||
|
||||
A small utility for comparing strings with unicode variants
|
||||
|
||||
Supported comparisons:
|
||||
* 1/4 and ¼
|
||||
* TM and ™
|
||||
* À, Á, Â, Ã, Ä, Å, Ⓐ and A
|
||||
* キロ and ㌔
|
||||
* and thousands more
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
|
||||
const stringa = '1/4';
|
||||
const stringb = '¼';
|
||||
|
||||
// without @orchidjs/unicode-variants
|
||||
let regex = new RegExp(stringa,'ui');
|
||||
console.log(regex.test(stringa)); // true
|
||||
console.log(regex.test(stringb)); // false
|
||||
|
||||
// with @orchidjs/unicode-variants
|
||||
import {getPattern} from '@orchidjs/unicode-variants';
|
||||
let pattern = getPattern(stringa);
|
||||
regex = new RegExp(stringa,'ui');
|
||||
console.log(regex.test(stringa)); // true
|
||||
console.log(regex.test(stringb)); // true
|
||||
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
$ npm install @orchidjs/unicode-variants
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Install the dependencies that are required to build and test:
|
||||
|
||||
```sh
|
||||
$ npm install
|
||||
```
|
||||
|
||||
Build from typescript
|
||||
```sh
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
Run tests
|
||||
```sh
|
||||
$ npm test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Copyright © 2013–2021 [Contributors](https://github.com/orchidjs/unicode-variants/graphs/contributors)
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
672
node_modules/@orchidjs/unicode-variants/dist/cjs/index.js
generated
vendored
Normal file
672
node_modules/@orchidjs/unicode-variants/dist/cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,672 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {string[]} chars
|
||||
* @return {string}
|
||||
*/
|
||||
const arrayToPattern = chars => {
|
||||
chars = chars.filter(Boolean);
|
||||
|
||||
if (chars.length < 2) {
|
||||
return chars[0] || '';
|
||||
}
|
||||
|
||||
return maxValueLength(chars) == 1 ? '[' + chars.join('') + ']' : '(?:' + chars.join('|') + ')';
|
||||
};
|
||||
/**
|
||||
* @param {string[]} array
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const sequencePattern = array => {
|
||||
if (!hasDuplicates(array)) {
|
||||
return array.join('');
|
||||
}
|
||||
|
||||
let pattern = '';
|
||||
let prev_char_count = 0;
|
||||
|
||||
const prev_pattern = () => {
|
||||
if (prev_char_count > 1) {
|
||||
pattern += '{' + prev_char_count + '}';
|
||||
}
|
||||
};
|
||||
|
||||
array.forEach((char, i) => {
|
||||
if (char === array[i - 1]) {
|
||||
prev_char_count++;
|
||||
return;
|
||||
}
|
||||
|
||||
prev_pattern();
|
||||
pattern += char;
|
||||
prev_char_count = 1;
|
||||
});
|
||||
prev_pattern();
|
||||
return pattern;
|
||||
};
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {Set<string>} chars
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const setToPattern = chars => {
|
||||
let array = toArray(chars);
|
||||
return arrayToPattern(array);
|
||||
};
|
||||
/**
|
||||
*
|
||||
* https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values
|
||||
* @param {any[]} array
|
||||
*/
|
||||
|
||||
const hasDuplicates = array => {
|
||||
return new Set(array).size !== array.length;
|
||||
};
|
||||
/**
|
||||
* https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const escape_regex = str => {
|
||||
return (str + '').replace(/([\$\(\)\*\+\.\?\[\]\^\{\|\}\\])/gu, '\\$1');
|
||||
};
|
||||
/**
|
||||
* Return the max length of array values
|
||||
* @param {string[]} array
|
||||
*
|
||||
*/
|
||||
|
||||
const maxValueLength = array => {
|
||||
return array.reduce((longest, value) => Math.max(longest, unicodeLength(value)), 0);
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
|
||||
const unicodeLength = str => {
|
||||
return toArray(str).length;
|
||||
};
|
||||
/**
|
||||
* @param {any} p
|
||||
* @return {any[]}
|
||||
*/
|
||||
|
||||
const toArray = p => Array.from(p);
|
||||
|
||||
/**
|
||||
* Get all possible combinations of substrings that add up to the given string
|
||||
* https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string
|
||||
* @param {string} input
|
||||
* @return {string[][]}
|
||||
*/
|
||||
const allSubstrings = input => {
|
||||
if (input.length === 1) return [[input]];
|
||||
/** @type {string[][]} */
|
||||
|
||||
let result = [];
|
||||
const start = input.substring(1);
|
||||
const suba = allSubstrings(start);
|
||||
suba.forEach(function (subresult) {
|
||||
let tmp = subresult.slice(0);
|
||||
tmp[0] = input.charAt(0) + tmp[0];
|
||||
result.push(tmp);
|
||||
tmp = subresult.slice(0);
|
||||
tmp.unshift(input.charAt(0));
|
||||
result.push(tmp);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {{[key:string]:string}} TUnicodeMap
|
||||
* @typedef {{[key:string]:Set<string>}} TUnicodeSets
|
||||
* @typedef {[[number,number]]} TCodePoints
|
||||
* @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj
|
||||
* @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart
|
||||
*/
|
||||
/** @type {TCodePoints} */
|
||||
|
||||
const code_points = [[0, 65535]];
|
||||
const accent_pat = '[\u0300-\u036F\u{b7}\u{2be}\u{2bc}]';
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
exports.unicode_map = void 0;
|
||||
/** @type {RegExp} */
|
||||
|
||||
let multi_char_reg;
|
||||
const max_char_length = 3;
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_convert = {};
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_condensed = {
|
||||
'/': '⁄∕',
|
||||
'0': '߀',
|
||||
"a": "ⱥɐɑ",
|
||||
"aa": "ꜳ",
|
||||
"ae": "æǽǣ",
|
||||
"ao": "ꜵ",
|
||||
"au": "ꜷ",
|
||||
"av": "ꜹꜻ",
|
||||
"ay": "ꜽ",
|
||||
"b": "ƀɓƃ",
|
||||
"c": "ꜿƈȼↄ",
|
||||
"d": "đɗɖᴅƌꮷԁɦ",
|
||||
"e": "ɛǝᴇɇ",
|
||||
"f": "ꝼƒ",
|
||||
"g": "ǥɠꞡᵹꝿɢ",
|
||||
"h": "ħⱨⱶɥ",
|
||||
"i": "ɨı",
|
||||
"j": "ɉȷ",
|
||||
"k": "ƙⱪꝁꝃꝅꞣ",
|
||||
"l": "łƚɫⱡꝉꝇꞁɭ",
|
||||
"m": "ɱɯϻ",
|
||||
"n": "ꞥƞɲꞑᴎлԉ",
|
||||
"o": "øǿɔɵꝋꝍᴑ",
|
||||
"oe": "œ",
|
||||
"oi": "ƣ",
|
||||
"oo": "ꝏ",
|
||||
"ou": "ȣ",
|
||||
"p": "ƥᵽꝑꝓꝕρ",
|
||||
"q": "ꝗꝙɋ",
|
||||
"r": "ɍɽꝛꞧꞃ",
|
||||
"s": "ßȿꞩꞅʂ",
|
||||
"t": "ŧƭʈⱦꞇ",
|
||||
"th": "þ",
|
||||
"tz": "ꜩ",
|
||||
"u": "ʉ",
|
||||
"v": "ʋꝟʌ",
|
||||
"vy": "ꝡ",
|
||||
"w": "ⱳ",
|
||||
"y": "ƴɏỿ",
|
||||
"z": "ƶȥɀⱬꝣ",
|
||||
"hv": "ƕ"
|
||||
};
|
||||
|
||||
for (let latin in latin_condensed) {
|
||||
let unicode = latin_condensed[latin] || '';
|
||||
|
||||
for (let i = 0; i < unicode.length; i++) {
|
||||
let char = unicode.substring(i, i + 1);
|
||||
latin_convert[char] = latin;
|
||||
}
|
||||
}
|
||||
|
||||
const convert_pat = new RegExp(Object.keys(latin_convert).join('|') + '|' + accent_pat, 'gu');
|
||||
/**
|
||||
* Initialize the unicode_map from the give code point ranges
|
||||
*
|
||||
* @param {TCodePoints=} _code_points
|
||||
*/
|
||||
|
||||
const initialize = _code_points => {
|
||||
if (exports.unicode_map !== undefined) return;
|
||||
exports.unicode_map = generateMap(_code_points || code_points);
|
||||
};
|
||||
/**
|
||||
* Helper method for normalize a string
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize
|
||||
* @param {string} str
|
||||
* @param {string} form
|
||||
*/
|
||||
|
||||
const normalize = (str, form = 'NFKD') => str.normalize(form);
|
||||
/**
|
||||
* Remove accents without reordering string
|
||||
* calling str.normalize('NFKD') on \u{594}\u{595}\u{596} becomes \u{596}\u{594}\u{595}
|
||||
* via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const asciifold = str => {
|
||||
return toArray(str).reduce(
|
||||
/**
|
||||
* @param {string} result
|
||||
* @param {string} char
|
||||
*/
|
||||
(result, char) => {
|
||||
return result + _asciifold(char);
|
||||
}, '');
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const _asciifold = str => {
|
||||
str = normalize(str).toLowerCase().replace(convert_pat, (
|
||||
/** @type {string} */
|
||||
char) => {
|
||||
return latin_convert[char] || '';
|
||||
}); //return str;
|
||||
|
||||
return normalize(str, 'NFC');
|
||||
};
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
|
||||
function* generator(code_points) {
|
||||
for (const [code_point_min, code_point_max] of code_points) {
|
||||
for (let i = code_point_min; i <= code_point_max; i++) {
|
||||
let composed = String.fromCharCode(i);
|
||||
let folded = asciifold(composed);
|
||||
|
||||
if (folded == composed.toLowerCase()) {
|
||||
continue;
|
||||
} // skip when folded is a string longer than 3 characters long
|
||||
// bc the resulting regex patterns will be long
|
||||
// eg:
|
||||
// folded صلى الله عليه وسلم length 18 code point 65018
|
||||
// folded جل جلاله length 8 code point 65019
|
||||
|
||||
|
||||
if (folded.length > max_char_length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (folded.length == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
yield {
|
||||
folded: folded,
|
||||
composed: composed,
|
||||
code_point: i
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeSets}
|
||||
*/
|
||||
|
||||
const generateSets = code_points => {
|
||||
/** @type {{[key:string]:Set<string>}} */
|
||||
const unicode_sets = {};
|
||||
/**
|
||||
* @param {string} folded
|
||||
* @param {string} to_add
|
||||
*/
|
||||
|
||||
const addMatching = (folded, to_add) => {
|
||||
/** @type {Set<string>} */
|
||||
const folded_set = unicode_sets[folded] || new Set();
|
||||
const patt = new RegExp('^' + setToPattern(folded_set) + '$', 'iu');
|
||||
|
||||
if (to_add.match(patt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
folded_set.add(escape_regex(to_add));
|
||||
unicode_sets[folded] = folded_set;
|
||||
};
|
||||
|
||||
for (let value of generator(code_points)) {
|
||||
addMatching(value.folded, value.folded);
|
||||
addMatching(value.folded, value.composed);
|
||||
}
|
||||
|
||||
return unicode_sets;
|
||||
};
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))
|
||||
*
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeMap}
|
||||
*/
|
||||
|
||||
const generateMap = code_points => {
|
||||
/** @type {TUnicodeSets} */
|
||||
const unicode_sets = generateSets(code_points);
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const unicode_map = {};
|
||||
/** @type {string[]} */
|
||||
|
||||
let multi_char = [];
|
||||
|
||||
for (let folded in unicode_sets) {
|
||||
let set = unicode_sets[folded];
|
||||
|
||||
if (set) {
|
||||
unicode_map[folded] = setToPattern(set);
|
||||
}
|
||||
|
||||
if (folded.length > 1) {
|
||||
multi_char.push(escape_regex(folded));
|
||||
}
|
||||
}
|
||||
|
||||
multi_char.sort((a, b) => b.length - a.length);
|
||||
const multi_char_patt = arrayToPattern(multi_char);
|
||||
multi_char_reg = new RegExp('^' + multi_char_patt, 'u');
|
||||
return unicode_map;
|
||||
};
|
||||
/**
|
||||
* Map each element of an array from it's folded value to all possible unicode matches
|
||||
* @param {string[]} strings
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const mapSequence = (strings, min_replacement = 1) => {
|
||||
let chars_replaced = 0;
|
||||
strings = strings.map(str => {
|
||||
if (exports.unicode_map[str]) {
|
||||
chars_replaced += str.length;
|
||||
}
|
||||
|
||||
return exports.unicode_map[str] || str;
|
||||
});
|
||||
|
||||
if (chars_replaced >= min_replacement) {
|
||||
return sequencePattern(strings);
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
/**
|
||||
* Convert a short string and split it into all possible patterns
|
||||
* Keep a pattern only if min_replacement is met
|
||||
*
|
||||
* 'abc'
|
||||
* => [['abc'],['ab','c'],['a','bc'],['a','b','c']]
|
||||
* => ['abc-pattern','ab-c-pattern'...]
|
||||
*
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const substringsToPattern = (str, min_replacement = 1) => {
|
||||
min_replacement = Math.max(min_replacement, str.length - 1);
|
||||
return arrayToPattern(allSubstrings(str).map(sub_pat => {
|
||||
return mapSequence(sub_pat, min_replacement);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Convert an array of sequences into a pattern
|
||||
* [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)
|
||||
*
|
||||
* @param {Sequence[]} sequences
|
||||
* @param {boolean} all
|
||||
*/
|
||||
|
||||
const sequencesToPattern = (sequences, all = true) => {
|
||||
let min_replacement = sequences.length > 1 ? 1 : 0;
|
||||
return arrayToPattern(sequences.map(sequence => {
|
||||
let seq = [];
|
||||
const len = all ? sequence.length() : sequence.length() - 1;
|
||||
|
||||
for (let j = 0; j < len; j++) {
|
||||
seq.push(substringsToPattern(sequence.substrs[j] || '', min_replacement));
|
||||
}
|
||||
|
||||
return sequencePattern(seq);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Return true if the sequence is already in the sequences
|
||||
* @param {Sequence} needle_seq
|
||||
* @param {Sequence[]} sequences
|
||||
*/
|
||||
|
||||
|
||||
const inSequences = (needle_seq, sequences) => {
|
||||
for (const seq of sequences) {
|
||||
if (seq.start != needle_seq.start || seq.end != needle_seq.end) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (seq.substrs.join('') !== needle_seq.substrs.join('')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let needle_parts = needle_seq.parts;
|
||||
/**
|
||||
* @param {TSequencePart} part
|
||||
*/
|
||||
|
||||
const filter = part => {
|
||||
for (const needle_part of needle_parts) {
|
||||
if (needle_part.start === part.start && needle_part.substr === part.substr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (part.length == 1 || needle_part.length == 1) {
|
||||
continue;
|
||||
} // check for overlapping parts
|
||||
// a = ['::=','==']
|
||||
// b = ['::','===']
|
||||
// a = ['r','sm']
|
||||
// b = ['rs','m']
|
||||
|
||||
|
||||
if (part.start < needle_part.start && part.end > needle_part.start) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (needle_part.start < part.start && needle_part.end > part.start) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
let filtered = seq.parts.filter(filter);
|
||||
|
||||
if (filtered.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
class Sequence {
|
||||
constructor() {
|
||||
/** @type {TSequencePart[]} */
|
||||
this.parts = [];
|
||||
/** @type {string[]} */
|
||||
|
||||
this.substrs = [];
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
}
|
||||
/**
|
||||
* @param {TSequencePart|undefined} part
|
||||
*/
|
||||
|
||||
|
||||
add(part) {
|
||||
if (part) {
|
||||
this.parts.push(part);
|
||||
this.substrs.push(part.substr);
|
||||
this.start = Math.min(part.start, this.start);
|
||||
this.end = Math.max(part.end, this.end);
|
||||
}
|
||||
}
|
||||
|
||||
last() {
|
||||
return this.parts[this.parts.length - 1];
|
||||
}
|
||||
|
||||
length() {
|
||||
return this.parts.length;
|
||||
}
|
||||
/**
|
||||
* @param {number} position
|
||||
* @param {TSequencePart} last_piece
|
||||
*/
|
||||
|
||||
|
||||
clone(position, last_piece) {
|
||||
let clone = new Sequence();
|
||||
let parts = JSON.parse(JSON.stringify(this.parts));
|
||||
let last_part = parts.pop();
|
||||
|
||||
for (const part of parts) {
|
||||
clone.add(part);
|
||||
}
|
||||
|
||||
let last_substr = last_piece.substr.substring(0, position - last_part.start);
|
||||
let clone_last_len = last_substr.length;
|
||||
clone.add({
|
||||
start: last_part.start,
|
||||
end: last_part.start + clone_last_len,
|
||||
length: clone_last_len,
|
||||
substr: last_substr
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* Expand a regular expression pattern to include unicode variants
|
||||
* eg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/
|
||||
*
|
||||
* Issue:
|
||||
* ﺊﺋ [ 'ﺊ = \\u{fe8a}', 'ﺋ = \\u{fe8b}' ]
|
||||
* becomes: ئئ [ 'ي = \\u{64a}', 'ٔ = \\u{654}', 'ي = \\u{64a}', 'ٔ = \\u{654}' ]
|
||||
*
|
||||
* İIJ = IIJ = ⅡJ
|
||||
*
|
||||
* 1/2/4
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {string|undefined}
|
||||
*/
|
||||
|
||||
|
||||
const getPattern = str => {
|
||||
initialize();
|
||||
str = asciifold(str);
|
||||
let pattern = '';
|
||||
let sequences = [new Sequence()];
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
let substr = str.substring(i);
|
||||
let match = substr.match(multi_char_reg);
|
||||
const char = str.substring(i, i + 1);
|
||||
const match_str = match ? match[0] : null; // loop through sequences
|
||||
// add either the char or multi_match
|
||||
|
||||
let overlapping = [];
|
||||
let added_types = new Set();
|
||||
|
||||
for (const sequence of sequences) {
|
||||
const last_piece = sequence.last();
|
||||
|
||||
if (!last_piece || last_piece.length == 1 || last_piece.end <= i) {
|
||||
// if we have a multi match
|
||||
if (match_str) {
|
||||
const len = match_str.length;
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
added_types.add('1');
|
||||
} else {
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + 1,
|
||||
length: 1,
|
||||
substr: char
|
||||
});
|
||||
added_types.add('2');
|
||||
}
|
||||
} else if (match_str) {
|
||||
let clone = sequence.clone(i, last_piece);
|
||||
const len = match_str.length;
|
||||
clone.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
overlapping.push(clone);
|
||||
} else {
|
||||
// don't add char
|
||||
// adding would create invalid patterns: 234 => [2,34,4]
|
||||
added_types.add('3');
|
||||
}
|
||||
} // if we have overlapping
|
||||
|
||||
|
||||
if (overlapping.length > 0) {
|
||||
// ['ii','iii'] before ['i','i','iii']
|
||||
overlapping = overlapping.sort((a, b) => {
|
||||
return a.length() - b.length();
|
||||
});
|
||||
|
||||
for (let clone of overlapping) {
|
||||
// don't add if we already have an equivalent sequence
|
||||
if (inSequences(clone, sequences)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
sequences.push(clone);
|
||||
}
|
||||
|
||||
continue;
|
||||
} // if we haven't done anything unique
|
||||
// clean up the patterns
|
||||
// helps keep patterns smaller
|
||||
// if str = 'r₨㎧aarss', pattern will be 446 instead of 655
|
||||
|
||||
|
||||
if (i > 0 && added_types.size == 1 && !added_types.has('3')) {
|
||||
pattern += sequencesToPattern(sequences, false);
|
||||
let new_seq = new Sequence();
|
||||
const old_seq = sequences[0];
|
||||
|
||||
if (old_seq) {
|
||||
new_seq.add(old_seq.last());
|
||||
}
|
||||
|
||||
sequences = [new_seq];
|
||||
}
|
||||
}
|
||||
|
||||
pattern += sequencesToPattern(sequences, true);
|
||||
return pattern;
|
||||
};
|
||||
|
||||
exports._asciifold = _asciifold;
|
||||
exports.asciifold = asciifold;
|
||||
exports.code_points = code_points;
|
||||
exports.escape_regex = escape_regex;
|
||||
exports.generateMap = generateMap;
|
||||
exports.generateSets = generateSets;
|
||||
exports.generator = generator;
|
||||
exports.getPattern = getPattern;
|
||||
exports.initialize = initialize;
|
||||
exports.mapSequence = mapSequence;
|
||||
exports.normalize = normalize;
|
||||
exports.substringsToPattern = substringsToPattern;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/cjs/index.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/cjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
536
node_modules/@orchidjs/unicode-variants/dist/esm/index.js
generated
vendored
Normal file
536
node_modules/@orchidjs/unicode-variants/dist/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,536 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
import { toArray, setToPattern, escape_regex, arrayToPattern, sequencePattern } from './regex.js';
|
||||
export { escape_regex } from './regex.js';
|
||||
import { allSubstrings } from './strings.js';
|
||||
|
||||
/**
|
||||
* @typedef {{[key:string]:string}} TUnicodeMap
|
||||
* @typedef {{[key:string]:Set<string>}} TUnicodeSets
|
||||
* @typedef {[[number,number]]} TCodePoints
|
||||
* @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj
|
||||
* @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart
|
||||
*/
|
||||
/** @type {TCodePoints} */
|
||||
|
||||
const code_points = [[0, 65535]];
|
||||
const accent_pat = '[\u0300-\u036F\u{b7}\u{2be}\u{2bc}]';
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
let unicode_map;
|
||||
/** @type {RegExp} */
|
||||
|
||||
let multi_char_reg;
|
||||
const max_char_length = 3;
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_convert = {};
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_condensed = {
|
||||
'/': '⁄∕',
|
||||
'0': '߀',
|
||||
"a": "ⱥɐɑ",
|
||||
"aa": "ꜳ",
|
||||
"ae": "æǽǣ",
|
||||
"ao": "ꜵ",
|
||||
"au": "ꜷ",
|
||||
"av": "ꜹꜻ",
|
||||
"ay": "ꜽ",
|
||||
"b": "ƀɓƃ",
|
||||
"c": "ꜿƈȼↄ",
|
||||
"d": "đɗɖᴅƌꮷԁɦ",
|
||||
"e": "ɛǝᴇɇ",
|
||||
"f": "ꝼƒ",
|
||||
"g": "ǥɠꞡᵹꝿɢ",
|
||||
"h": "ħⱨⱶɥ",
|
||||
"i": "ɨı",
|
||||
"j": "ɉȷ",
|
||||
"k": "ƙⱪꝁꝃꝅꞣ",
|
||||
"l": "łƚɫⱡꝉꝇꞁɭ",
|
||||
"m": "ɱɯϻ",
|
||||
"n": "ꞥƞɲꞑᴎлԉ",
|
||||
"o": "øǿɔɵꝋꝍᴑ",
|
||||
"oe": "œ",
|
||||
"oi": "ƣ",
|
||||
"oo": "ꝏ",
|
||||
"ou": "ȣ",
|
||||
"p": "ƥᵽꝑꝓꝕρ",
|
||||
"q": "ꝗꝙɋ",
|
||||
"r": "ɍɽꝛꞧꞃ",
|
||||
"s": "ßȿꞩꞅʂ",
|
||||
"t": "ŧƭʈⱦꞇ",
|
||||
"th": "þ",
|
||||
"tz": "ꜩ",
|
||||
"u": "ʉ",
|
||||
"v": "ʋꝟʌ",
|
||||
"vy": "ꝡ",
|
||||
"w": "ⱳ",
|
||||
"y": "ƴɏỿ",
|
||||
"z": "ƶȥɀⱬꝣ",
|
||||
"hv": "ƕ"
|
||||
};
|
||||
|
||||
for (let latin in latin_condensed) {
|
||||
let unicode = latin_condensed[latin] || '';
|
||||
|
||||
for (let i = 0; i < unicode.length; i++) {
|
||||
let char = unicode.substring(i, i + 1);
|
||||
latin_convert[char] = latin;
|
||||
}
|
||||
}
|
||||
|
||||
const convert_pat = new RegExp(Object.keys(latin_convert).join('|') + '|' + accent_pat, 'gu');
|
||||
/**
|
||||
* Initialize the unicode_map from the give code point ranges
|
||||
*
|
||||
* @param {TCodePoints=} _code_points
|
||||
*/
|
||||
|
||||
const initialize = _code_points => {
|
||||
if (unicode_map !== undefined) return;
|
||||
unicode_map = generateMap(_code_points || code_points);
|
||||
};
|
||||
/**
|
||||
* Helper method for normalize a string
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize
|
||||
* @param {string} str
|
||||
* @param {string} form
|
||||
*/
|
||||
|
||||
const normalize = (str, form = 'NFKD') => str.normalize(form);
|
||||
/**
|
||||
* Remove accents without reordering string
|
||||
* calling str.normalize('NFKD') on \u{594}\u{595}\u{596} becomes \u{596}\u{594}\u{595}
|
||||
* via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const asciifold = str => {
|
||||
return toArray(str).reduce(
|
||||
/**
|
||||
* @param {string} result
|
||||
* @param {string} char
|
||||
*/
|
||||
(result, char) => {
|
||||
return result + _asciifold(char);
|
||||
}, '');
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const _asciifold = str => {
|
||||
str = normalize(str).toLowerCase().replace(convert_pat, (
|
||||
/** @type {string} */
|
||||
char) => {
|
||||
return latin_convert[char] || '';
|
||||
}); //return str;
|
||||
|
||||
return normalize(str, 'NFC');
|
||||
};
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
|
||||
function* generator(code_points) {
|
||||
for (const [code_point_min, code_point_max] of code_points) {
|
||||
for (let i = code_point_min; i <= code_point_max; i++) {
|
||||
let composed = String.fromCharCode(i);
|
||||
let folded = asciifold(composed);
|
||||
|
||||
if (folded == composed.toLowerCase()) {
|
||||
continue;
|
||||
} // skip when folded is a string longer than 3 characters long
|
||||
// bc the resulting regex patterns will be long
|
||||
// eg:
|
||||
// folded صلى الله عليه وسلم length 18 code point 65018
|
||||
// folded جل جلاله length 8 code point 65019
|
||||
|
||||
|
||||
if (folded.length > max_char_length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (folded.length == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
yield {
|
||||
folded: folded,
|
||||
composed: composed,
|
||||
code_point: i
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeSets}
|
||||
*/
|
||||
|
||||
const generateSets = code_points => {
|
||||
/** @type {{[key:string]:Set<string>}} */
|
||||
const unicode_sets = {};
|
||||
/**
|
||||
* @param {string} folded
|
||||
* @param {string} to_add
|
||||
*/
|
||||
|
||||
const addMatching = (folded, to_add) => {
|
||||
/** @type {Set<string>} */
|
||||
const folded_set = unicode_sets[folded] || new Set();
|
||||
const patt = new RegExp('^' + setToPattern(folded_set) + '$', 'iu');
|
||||
|
||||
if (to_add.match(patt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
folded_set.add(escape_regex(to_add));
|
||||
unicode_sets[folded] = folded_set;
|
||||
};
|
||||
|
||||
for (let value of generator(code_points)) {
|
||||
addMatching(value.folded, value.folded);
|
||||
addMatching(value.folded, value.composed);
|
||||
}
|
||||
|
||||
return unicode_sets;
|
||||
};
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))
|
||||
*
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeMap}
|
||||
*/
|
||||
|
||||
const generateMap = code_points => {
|
||||
/** @type {TUnicodeSets} */
|
||||
const unicode_sets = generateSets(code_points);
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const unicode_map = {};
|
||||
/** @type {string[]} */
|
||||
|
||||
let multi_char = [];
|
||||
|
||||
for (let folded in unicode_sets) {
|
||||
let set = unicode_sets[folded];
|
||||
|
||||
if (set) {
|
||||
unicode_map[folded] = setToPattern(set);
|
||||
}
|
||||
|
||||
if (folded.length > 1) {
|
||||
multi_char.push(escape_regex(folded));
|
||||
}
|
||||
}
|
||||
|
||||
multi_char.sort((a, b) => b.length - a.length);
|
||||
const multi_char_patt = arrayToPattern(multi_char);
|
||||
multi_char_reg = new RegExp('^' + multi_char_patt, 'u');
|
||||
return unicode_map;
|
||||
};
|
||||
/**
|
||||
* Map each element of an array from it's folded value to all possible unicode matches
|
||||
* @param {string[]} strings
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const mapSequence = (strings, min_replacement = 1) => {
|
||||
let chars_replaced = 0;
|
||||
strings = strings.map(str => {
|
||||
if (unicode_map[str]) {
|
||||
chars_replaced += str.length;
|
||||
}
|
||||
|
||||
return unicode_map[str] || str;
|
||||
});
|
||||
|
||||
if (chars_replaced >= min_replacement) {
|
||||
return sequencePattern(strings);
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
/**
|
||||
* Convert a short string and split it into all possible patterns
|
||||
* Keep a pattern only if min_replacement is met
|
||||
*
|
||||
* 'abc'
|
||||
* => [['abc'],['ab','c'],['a','bc'],['a','b','c']]
|
||||
* => ['abc-pattern','ab-c-pattern'...]
|
||||
*
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const substringsToPattern = (str, min_replacement = 1) => {
|
||||
min_replacement = Math.max(min_replacement, str.length - 1);
|
||||
return arrayToPattern(allSubstrings(str).map(sub_pat => {
|
||||
return mapSequence(sub_pat, min_replacement);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Convert an array of sequences into a pattern
|
||||
* [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)
|
||||
*
|
||||
* @param {Sequence[]} sequences
|
||||
* @param {boolean} all
|
||||
*/
|
||||
|
||||
const sequencesToPattern = (sequences, all = true) => {
|
||||
let min_replacement = sequences.length > 1 ? 1 : 0;
|
||||
return arrayToPattern(sequences.map(sequence => {
|
||||
let seq = [];
|
||||
const len = all ? sequence.length() : sequence.length() - 1;
|
||||
|
||||
for (let j = 0; j < len; j++) {
|
||||
seq.push(substringsToPattern(sequence.substrs[j] || '', min_replacement));
|
||||
}
|
||||
|
||||
return sequencePattern(seq);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Return true if the sequence is already in the sequences
|
||||
* @param {Sequence} needle_seq
|
||||
* @param {Sequence[]} sequences
|
||||
*/
|
||||
|
||||
|
||||
const inSequences = (needle_seq, sequences) => {
|
||||
for (const seq of sequences) {
|
||||
if (seq.start != needle_seq.start || seq.end != needle_seq.end) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (seq.substrs.join('') !== needle_seq.substrs.join('')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let needle_parts = needle_seq.parts;
|
||||
/**
|
||||
* @param {TSequencePart} part
|
||||
*/
|
||||
|
||||
const filter = part => {
|
||||
for (const needle_part of needle_parts) {
|
||||
if (needle_part.start === part.start && needle_part.substr === part.substr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (part.length == 1 || needle_part.length == 1) {
|
||||
continue;
|
||||
} // check for overlapping parts
|
||||
// a = ['::=','==']
|
||||
// b = ['::','===']
|
||||
// a = ['r','sm']
|
||||
// b = ['rs','m']
|
||||
|
||||
|
||||
if (part.start < needle_part.start && part.end > needle_part.start) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (needle_part.start < part.start && needle_part.end > part.start) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
let filtered = seq.parts.filter(filter);
|
||||
|
||||
if (filtered.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
class Sequence {
|
||||
constructor() {
|
||||
/** @type {TSequencePart[]} */
|
||||
this.parts = [];
|
||||
/** @type {string[]} */
|
||||
|
||||
this.substrs = [];
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
}
|
||||
/**
|
||||
* @param {TSequencePart|undefined} part
|
||||
*/
|
||||
|
||||
|
||||
add(part) {
|
||||
if (part) {
|
||||
this.parts.push(part);
|
||||
this.substrs.push(part.substr);
|
||||
this.start = Math.min(part.start, this.start);
|
||||
this.end = Math.max(part.end, this.end);
|
||||
}
|
||||
}
|
||||
|
||||
last() {
|
||||
return this.parts[this.parts.length - 1];
|
||||
}
|
||||
|
||||
length() {
|
||||
return this.parts.length;
|
||||
}
|
||||
/**
|
||||
* @param {number} position
|
||||
* @param {TSequencePart} last_piece
|
||||
*/
|
||||
|
||||
|
||||
clone(position, last_piece) {
|
||||
let clone = new Sequence();
|
||||
let parts = JSON.parse(JSON.stringify(this.parts));
|
||||
let last_part = parts.pop();
|
||||
|
||||
for (const part of parts) {
|
||||
clone.add(part);
|
||||
}
|
||||
|
||||
let last_substr = last_piece.substr.substring(0, position - last_part.start);
|
||||
let clone_last_len = last_substr.length;
|
||||
clone.add({
|
||||
start: last_part.start,
|
||||
end: last_part.start + clone_last_len,
|
||||
length: clone_last_len,
|
||||
substr: last_substr
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* Expand a regular expression pattern to include unicode variants
|
||||
* eg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/
|
||||
*
|
||||
* Issue:
|
||||
* ﺊﺋ [ 'ﺊ = \\u{fe8a}', 'ﺋ = \\u{fe8b}' ]
|
||||
* becomes: ئئ [ 'ي = \\u{64a}', 'ٔ = \\u{654}', 'ي = \\u{64a}', 'ٔ = \\u{654}' ]
|
||||
*
|
||||
* İIJ = IIJ = ⅡJ
|
||||
*
|
||||
* 1/2/4
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {string|undefined}
|
||||
*/
|
||||
|
||||
|
||||
const getPattern = str => {
|
||||
initialize();
|
||||
str = asciifold(str);
|
||||
let pattern = '';
|
||||
let sequences = [new Sequence()];
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
let substr = str.substring(i);
|
||||
let match = substr.match(multi_char_reg);
|
||||
const char = str.substring(i, i + 1);
|
||||
const match_str = match ? match[0] : null; // loop through sequences
|
||||
// add either the char or multi_match
|
||||
|
||||
let overlapping = [];
|
||||
let added_types = new Set();
|
||||
|
||||
for (const sequence of sequences) {
|
||||
const last_piece = sequence.last();
|
||||
|
||||
if (!last_piece || last_piece.length == 1 || last_piece.end <= i) {
|
||||
// if we have a multi match
|
||||
if (match_str) {
|
||||
const len = match_str.length;
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
added_types.add('1');
|
||||
} else {
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + 1,
|
||||
length: 1,
|
||||
substr: char
|
||||
});
|
||||
added_types.add('2');
|
||||
}
|
||||
} else if (match_str) {
|
||||
let clone = sequence.clone(i, last_piece);
|
||||
const len = match_str.length;
|
||||
clone.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
overlapping.push(clone);
|
||||
} else {
|
||||
// don't add char
|
||||
// adding would create invalid patterns: 234 => [2,34,4]
|
||||
added_types.add('3');
|
||||
}
|
||||
} // if we have overlapping
|
||||
|
||||
|
||||
if (overlapping.length > 0) {
|
||||
// ['ii','iii'] before ['i','i','iii']
|
||||
overlapping = overlapping.sort((a, b) => {
|
||||
return a.length() - b.length();
|
||||
});
|
||||
|
||||
for (let clone of overlapping) {
|
||||
// don't add if we already have an equivalent sequence
|
||||
if (inSequences(clone, sequences)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
sequences.push(clone);
|
||||
}
|
||||
|
||||
continue;
|
||||
} // if we haven't done anything unique
|
||||
// clean up the patterns
|
||||
// helps keep patterns smaller
|
||||
// if str = 'r₨㎧aarss', pattern will be 446 instead of 655
|
||||
|
||||
|
||||
if (i > 0 && added_types.size == 1 && !added_types.has('3')) {
|
||||
pattern += sequencesToPattern(sequences, false);
|
||||
let new_seq = new Sequence();
|
||||
const old_seq = sequences[0];
|
||||
|
||||
if (old_seq) {
|
||||
new_seq.add(old_seq.last());
|
||||
}
|
||||
|
||||
sequences = [new_seq];
|
||||
}
|
||||
}
|
||||
|
||||
pattern += sequencesToPattern(sequences, true);
|
||||
return pattern;
|
||||
};
|
||||
|
||||
export { _asciifold, asciifold, code_points, generateMap, generateSets, generator, getPattern, initialize, mapSequence, normalize, substringsToPattern, unicode_map };
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/esm/index.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/esm/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
104
node_modules/@orchidjs/unicode-variants/dist/esm/regex.js
generated
vendored
Normal file
104
node_modules/@orchidjs/unicode-variants/dist/esm/regex.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {string[]} chars
|
||||
* @return {string}
|
||||
*/
|
||||
const arrayToPattern = chars => {
|
||||
chars = chars.filter(Boolean);
|
||||
|
||||
if (chars.length < 2) {
|
||||
return chars[0] || '';
|
||||
}
|
||||
|
||||
return maxValueLength(chars) == 1 ? '[' + chars.join('') + ']' : '(?:' + chars.join('|') + ')';
|
||||
};
|
||||
/**
|
||||
* @param {string[]} array
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const sequencePattern = array => {
|
||||
if (!hasDuplicates(array)) {
|
||||
return array.join('');
|
||||
}
|
||||
|
||||
let pattern = '';
|
||||
let prev_char_count = 0;
|
||||
|
||||
const prev_pattern = () => {
|
||||
if (prev_char_count > 1) {
|
||||
pattern += '{' + prev_char_count + '}';
|
||||
}
|
||||
};
|
||||
|
||||
array.forEach((char, i) => {
|
||||
if (char === array[i - 1]) {
|
||||
prev_char_count++;
|
||||
return;
|
||||
}
|
||||
|
||||
prev_pattern();
|
||||
pattern += char;
|
||||
prev_char_count = 1;
|
||||
});
|
||||
prev_pattern();
|
||||
return pattern;
|
||||
};
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {Set<string>} chars
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const setToPattern = chars => {
|
||||
let array = toArray(chars);
|
||||
return arrayToPattern(array);
|
||||
};
|
||||
/**
|
||||
*
|
||||
* https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values
|
||||
* @param {any[]} array
|
||||
*/
|
||||
|
||||
const hasDuplicates = array => {
|
||||
return new Set(array).size !== array.length;
|
||||
};
|
||||
/**
|
||||
* https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const escape_regex = str => {
|
||||
return (str + '').replace(/([\$\(\)\*\+\.\?\[\]\^\{\|\}\\])/gu, '\\$1');
|
||||
};
|
||||
/**
|
||||
* Return the max length of array values
|
||||
* @param {string[]} array
|
||||
*
|
||||
*/
|
||||
|
||||
const maxValueLength = array => {
|
||||
return array.reduce((longest, value) => Math.max(longest, unicodeLength(value)), 0);
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
|
||||
const unicodeLength = str => {
|
||||
return toArray(str).length;
|
||||
};
|
||||
/**
|
||||
* @param {any} p
|
||||
* @return {any[]}
|
||||
*/
|
||||
|
||||
const toArray = p => Array.from(p);
|
||||
|
||||
export { arrayToPattern, escape_regex, hasDuplicates, maxValueLength, sequencePattern, setToPattern, toArray, unicodeLength };
|
||||
//# sourceMappingURL=regex.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/esm/regex.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/esm/regex.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"regex.js","sources":["../../lib/regex.mjs"],"sourcesContent":["\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {string[]} chars\n * @return {string}\n */\nexport const arrayToPattern = (chars) =>{\n\n\tchars = chars.filter( Boolean );\n\n\tif( chars.length < 2 ){\n\t\treturn chars[0] || '';\n\t}\n\n\treturn (maxValueLength(chars) == 1) ? '['+chars.join('')+']' : '(?:'+chars.join('|')+')';\n};\n\n/**\n * @param {string[]} array\n * @return {string}\n */\nexport const sequencePattern = (array)=>{\n\n\tif( !hasDuplicates(array) ){\n\t\treturn array.join('');\n\t}\n\n\tlet pattern = '';\n\tlet prev_char_count = 0;\n\n\tconst prev_pattern = ()=>{\n\t\tif( prev_char_count > 1 ){\n\t\t\tpattern += '{'+prev_char_count+'}';\n\t\t}\n\t}\n\n\tarray.forEach((char,i)=>{\n\n\t\tif( char === array[i-1] ){\n\t\t\tprev_char_count++;\n\t\t\treturn;\n\t\t}\n\n\t\tprev_pattern();\n\n\t\tpattern += char;\n\t\tprev_char_count = 1;\n\t});\n\n\tprev_pattern();\n\n\treturn pattern;\n\n}\n\n\n\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {Set<string>} chars\n * @return {string}\n */\nexport const setToPattern = (chars)=>{\n\tlet array = toArray(chars);\n\treturn arrayToPattern(array);\n}\n\n\n\n/**\n *\n * https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values\n * @param {any[]} array\n */\nexport const hasDuplicates = (array) => {\n return (new Set(array)).size !== array.length;\n}\n\n\n/**\n * https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error\n * @param {string} str\n * @return {string}\n */\nexport const escape_regex = (str) => {\n\treturn (str + '').replace(/([\\$\\(\\)\\*\\+\\.\\?\\[\\]\\^\\{\\|\\}\\\\])/gu, '\\\\$1');\n};\n\n/**\n * Return the max length of array values\n * @param {string[]} array\n *\n */\nexport const maxValueLength = (array) => {\n\treturn array.reduce( (longest, value) => Math.max(longest,unicodeLength(value)),0);\n}\n\n\n/**\n * @param {string} str\n */\nexport const unicodeLength = (str) => {\n\treturn toArray(str).length;\n}\n\n/**\n * @param {any} p\n * @return {any[]}\n */\nexport const toArray = (p) => Array.from(p);\n"],"names":["arrayToPattern","chars","filter","Boolean","length","maxValueLength","join","sequencePattern","array","hasDuplicates","pattern","prev_char_count","prev_pattern","forEach","char","i","setToPattern","toArray","Set","size","escape_regex","str","replace","reduce","longest","value","Math","max","unicodeLength","p","Array","from"],"mappings":";AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACaA,MAAAA,cAAc,GAAIC,KAAD,IAAU;AAEvCA,EAAAA,KAAK,GAAGA,KAAK,CAACC,MAAN,CAAcC,OAAd,CAAR,CAAA;;AAEA,EAAA,IAAIF,KAAK,CAACG,MAAN,GAAe,CAAnB,EAAsB;AACrB,IAAA,OAAOH,KAAK,CAAC,CAAD,CAAL,IAAY,EAAnB,CAAA;AACA,GAAA;;EAED,OAAQI,cAAc,CAACJ,KAAD,CAAd,IAAyB,CAA1B,GAA+B,GAAA,GAAIA,KAAK,CAACK,IAAN,CAAW,EAAX,CAAJ,GAAmB,GAAlD,GAAwD,KAAML,GAAAA,KAAK,CAACK,IAAN,CAAW,GAAX,CAAN,GAAsB,GAArF,CAAA;AACA,EATM;AAWP;AACA;AACA;AACA;;AACaC,MAAAA,eAAe,GAAIC,KAAD,IAAS;AAEvC,EAAA,IAAI,CAACC,aAAa,CAACD,KAAD,CAAlB,EAA2B;AAC1B,IAAA,OAAOA,KAAK,CAACF,IAAN,CAAW,EAAX,CAAP,CAAA;AACA,GAAA;;EAED,IAAII,OAAO,GAAG,EAAd,CAAA;EACA,IAAIC,eAAe,GAAG,CAAtB,CAAA;;EAEA,MAAMC,YAAY,GAAG,MAAI;IACxB,IAAID,eAAe,GAAG,CAAtB,EAAyB;AACxBD,MAAAA,OAAO,IAAI,GAAA,GAAIC,eAAJ,GAAoB,GAA/B,CAAA;AACA,KAAA;GAHF,CAAA;;AAMAH,EAAAA,KAAK,CAACK,OAAN,CAAc,CAACC,IAAD,EAAMC,CAAN,KAAU;IAEvB,IAAID,IAAI,KAAKN,KAAK,CAACO,CAAC,GAAC,CAAH,CAAlB,EAAyB;MACxBJ,eAAe,EAAA,CAAA;AACf,MAAA,OAAA;AACA,KAAA;;IAEDC,YAAY,EAAA,CAAA;AAEZF,IAAAA,OAAO,IAAII,IAAX,CAAA;AACAH,IAAAA,eAAe,GAAG,CAAlB,CAAA;GAVD,CAAA,CAAA;EAaAC,YAAY,EAAA,CAAA;AAEZ,EAAA,OAAOF,OAAP,CAAA;AAEA,EAhCM;AAoCP;AACA;AACA;AACA;AACA;AACA;AACA;;AACaM,MAAAA,YAAY,GAAIf,KAAD,IAAS;AACpC,EAAA,IAAIO,KAAK,GAAGS,OAAO,CAAChB,KAAD,CAAnB,CAAA;EACA,OAAOD,cAAc,CAACQ,KAAD,CAArB,CAAA;AACA,EAHM;AAOP;AACA;AACA;AACA;AACA;;AACaC,MAAAA,aAAa,GAAID,KAAD,IAAW;EACpC,OAAQ,IAAIU,GAAJ,CAAQV,KAAR,CAAD,CAAiBW,IAAjB,KAA0BX,KAAK,CAACJ,MAAvC,CAAA;AACH,EAFM;AAKP;AACA;AACA;AACA;AACA;;AACagB,MAAAA,YAAY,GAAIC,GAAD,IAAS;EACpC,OAAO,CAACA,GAAG,GAAG,EAAP,EAAWC,OAAX,CAAmB,oCAAnB,EAAyD,MAAzD,CAAP,CAAA;AACA,EAFM;AAIP;AACA;AACA;AACA;AACA;;AACajB,MAAAA,cAAc,GAAIG,KAAD,IAAW;EACxC,OAAOA,KAAK,CAACe,MAAN,CAAc,CAACC,OAAD,EAAUC,KAAV,KAAoBC,IAAI,CAACC,GAAL,CAASH,OAAT,EAAiBI,aAAa,CAACH,KAAD,CAA9B,CAAlC,EAAyE,CAAzE,CAAP,CAAA;AACA,EAFM;AAKP;AACA;AACA;;AACaG,MAAAA,aAAa,GAAIP,GAAD,IAAS;AACrC,EAAA,OAAOJ,OAAO,CAACI,GAAD,CAAP,CAAajB,MAApB,CAAA;AACA,EAFM;AAIP;AACA;AACA;AACA;;AACO,MAAMa,OAAO,GAAIY,CAAD,IAAOC,KAAK,CAACC,IAAN,CAAWF,CAAX;;;;"}
|
||||
27
node_modules/@orchidjs/unicode-variants/dist/esm/strings.js
generated
vendored
Normal file
27
node_modules/@orchidjs/unicode-variants/dist/esm/strings.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
/**
|
||||
* Get all possible combinations of substrings that add up to the given string
|
||||
* https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string
|
||||
* @param {string} input
|
||||
* @return {string[][]}
|
||||
*/
|
||||
const allSubstrings = input => {
|
||||
if (input.length === 1) return [[input]];
|
||||
/** @type {string[][]} */
|
||||
|
||||
let result = [];
|
||||
const start = input.substring(1);
|
||||
const suba = allSubstrings(start);
|
||||
suba.forEach(function (subresult) {
|
||||
let tmp = subresult.slice(0);
|
||||
tmp[0] = input.charAt(0) + tmp[0];
|
||||
result.push(tmp);
|
||||
tmp = subresult.slice(0);
|
||||
tmp.unshift(input.charAt(0));
|
||||
result.push(tmp);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
export { allSubstrings };
|
||||
//# sourceMappingURL=strings.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/esm/strings.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/esm/strings.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"strings.js","sources":["../../lib/strings.mjs"],"sourcesContent":["\n\n\n/**\n * Get all possible combinations of substrings that add up to the given string\n * https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string\n * @param {string} input\n * @return {string[][]}\n */\nexport const allSubstrings = (input) => {\n\n if( input.length === 1) return [[input]];\n\n\t/** @type {string[][]} */\n let result = [];\n\n\tconst start = input.substring(1);\n const suba = allSubstrings(start);\n\n\tsuba.forEach(function(subresult) {\n let tmp = subresult.slice(0);\n tmp[0] = input.charAt(0) + tmp[0];\n result.push(tmp);\n\n tmp = subresult.slice(0);\n tmp.unshift(input.charAt(0));\n result.push(tmp);\n });\n\n return result;\n}\n"],"names":["allSubstrings","input","length","result","start","substring","suba","forEach","subresult","tmp","slice","charAt","push","unshift"],"mappings":";AAGA;AACA;AACA;AACA;AACA;AACA;AACaA,MAAAA,aAAa,GAAIC,KAAD,IAAW;EAEpC,IAAIA,KAAK,CAACC,MAAN,KAAiB,CAArB,EAAwB,OAAO,CAAC,CAACD,KAAD,CAAD,CAAP,CAAA;AAE3B;;EACG,IAAIE,MAAM,GAAG,EAAb,CAAA;AAEH,EAAA,MAAMC,KAAK,GAAGH,KAAK,CAACI,SAAN,CAAgB,CAAhB,CAAd,CAAA;AACG,EAAA,MAAMC,IAAI,GAAGN,aAAa,CAACI,KAAD,CAA1B,CAAA;AAEHE,EAAAA,IAAI,CAACC,OAAL,CAAa,UAASC,SAAT,EAAoB;AAC1B,IAAA,IAAIC,GAAG,GAAGD,SAAS,CAACE,KAAV,CAAgB,CAAhB,CAAV,CAAA;AACAD,IAAAA,GAAG,CAAC,CAAD,CAAH,GAASR,KAAK,CAACU,MAAN,CAAa,CAAb,CAAA,GAAkBF,GAAG,CAAC,CAAD,CAA9B,CAAA;IACAN,MAAM,CAACS,IAAP,CAAYH,GAAZ,CAAA,CAAA;AAEAA,IAAAA,GAAG,GAAGD,SAAS,CAACE,KAAV,CAAgB,CAAhB,CAAN,CAAA;IACAD,GAAG,CAACI,OAAJ,CAAYZ,KAAK,CAACU,MAAN,CAAa,CAAb,CAAZ,CAAA,CAAA;IACAR,MAAM,CAACS,IAAP,CAAYH,GAAZ,CAAA,CAAA;GAPP,CAAA,CAAA;AAUG,EAAA,OAAON,MAAP,CAAA;AACH;;;;"}
|
||||
43
node_modules/@orchidjs/unicode-variants/dist/types/index.d.mts
generated
vendored
Normal file
43
node_modules/@orchidjs/unicode-variants/dist/types/index.d.mts
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
export function generator(code_points: TCodePoints): Generator<{
|
||||
folded: string;
|
||||
composed: string;
|
||||
code_point: number;
|
||||
}, void, unknown>;
|
||||
/** @type {TCodePoints} */
|
||||
export const code_points: TCodePoints;
|
||||
/** @type {TUnicodeMap} */
|
||||
export let unicode_map: TUnicodeMap;
|
||||
export function initialize(_code_points?: TCodePoints | undefined): void;
|
||||
export function normalize(str: string, form?: string): string;
|
||||
export function asciifold(str: string): string;
|
||||
export function _asciifold(str: string): string;
|
||||
export function generateSets(code_points: TCodePoints): TUnicodeSets;
|
||||
export function generateMap(code_points: TCodePoints): TUnicodeMap;
|
||||
export function mapSequence(strings: string[], min_replacement?: number): string;
|
||||
export function substringsToPattern(str: string, min_replacement?: number): string;
|
||||
export function getPattern(str: string): string | undefined;
|
||||
export { escape_regex };
|
||||
export type TUnicodeMap = {
|
||||
[key: string]: string;
|
||||
};
|
||||
export type TUnicodeSets = {
|
||||
[key: string]: Set<string>;
|
||||
};
|
||||
export type TCodePoints = [[number, number]];
|
||||
export type TCodePointObj = {
|
||||
folded: string;
|
||||
composed: string;
|
||||
code_point: number;
|
||||
};
|
||||
export type TSequencePart = {
|
||||
start: number;
|
||||
end: number;
|
||||
length: number;
|
||||
substr: string;
|
||||
};
|
||||
import { escape_regex } from "./regex.mjs";
|
||||
8
node_modules/@orchidjs/unicode-variants/dist/types/regex.d.mts
generated
vendored
Normal file
8
node_modules/@orchidjs/unicode-variants/dist/types/regex.d.mts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export function arrayToPattern(chars: string[]): string;
|
||||
export function sequencePattern(array: string[]): string;
|
||||
export function setToPattern(chars: Set<string>): string;
|
||||
export function hasDuplicates(array: any[]): boolean;
|
||||
export function escape_regex(str: string): string;
|
||||
export function maxValueLength(array: string[]): number;
|
||||
export function unicodeLength(str: string): number;
|
||||
export function toArray(p: any): any[];
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/types/strings.d.mts
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/types/strings.d.mts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export function allSubstrings(input: string): string[][];
|
||||
678
node_modules/@orchidjs/unicode-variants/dist/umd/index.js
generated
vendored
Normal file
678
node_modules/@orchidjs/unicode-variants/dist/umd/index.js
generated
vendored
Normal file
@@ -0,0 +1,678 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.diacritics = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {string[]} chars
|
||||
* @return {string}
|
||||
*/
|
||||
const arrayToPattern = chars => {
|
||||
chars = chars.filter(Boolean);
|
||||
|
||||
if (chars.length < 2) {
|
||||
return chars[0] || '';
|
||||
}
|
||||
|
||||
return maxValueLength(chars) == 1 ? '[' + chars.join('') + ']' : '(?:' + chars.join('|') + ')';
|
||||
};
|
||||
/**
|
||||
* @param {string[]} array
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const sequencePattern = array => {
|
||||
if (!hasDuplicates(array)) {
|
||||
return array.join('');
|
||||
}
|
||||
|
||||
let pattern = '';
|
||||
let prev_char_count = 0;
|
||||
|
||||
const prev_pattern = () => {
|
||||
if (prev_char_count > 1) {
|
||||
pattern += '{' + prev_char_count + '}';
|
||||
}
|
||||
};
|
||||
|
||||
array.forEach((char, i) => {
|
||||
if (char === array[i - 1]) {
|
||||
prev_char_count++;
|
||||
return;
|
||||
}
|
||||
|
||||
prev_pattern();
|
||||
pattern += char;
|
||||
prev_char_count = 1;
|
||||
});
|
||||
prev_pattern();
|
||||
return pattern;
|
||||
};
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {Set<string>} chars
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const setToPattern = chars => {
|
||||
let array = toArray(chars);
|
||||
return arrayToPattern(array);
|
||||
};
|
||||
/**
|
||||
*
|
||||
* https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values
|
||||
* @param {any[]} array
|
||||
*/
|
||||
|
||||
const hasDuplicates = array => {
|
||||
return new Set(array).size !== array.length;
|
||||
};
|
||||
/**
|
||||
* https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const escape_regex = str => {
|
||||
return (str + '').replace(/([\$\(\)\*\+\.\?\[\]\^\{\|\}\\])/gu, '\\$1');
|
||||
};
|
||||
/**
|
||||
* Return the max length of array values
|
||||
* @param {string[]} array
|
||||
*
|
||||
*/
|
||||
|
||||
const maxValueLength = array => {
|
||||
return array.reduce((longest, value) => Math.max(longest, unicodeLength(value)), 0);
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
|
||||
const unicodeLength = str => {
|
||||
return toArray(str).length;
|
||||
};
|
||||
/**
|
||||
* @param {any} p
|
||||
* @return {any[]}
|
||||
*/
|
||||
|
||||
const toArray = p => Array.from(p);
|
||||
|
||||
/**
|
||||
* Get all possible combinations of substrings that add up to the given string
|
||||
* https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string
|
||||
* @param {string} input
|
||||
* @return {string[][]}
|
||||
*/
|
||||
const allSubstrings = input => {
|
||||
if (input.length === 1) return [[input]];
|
||||
/** @type {string[][]} */
|
||||
|
||||
let result = [];
|
||||
const start = input.substring(1);
|
||||
const suba = allSubstrings(start);
|
||||
suba.forEach(function (subresult) {
|
||||
let tmp = subresult.slice(0);
|
||||
tmp[0] = input.charAt(0) + tmp[0];
|
||||
result.push(tmp);
|
||||
tmp = subresult.slice(0);
|
||||
tmp.unshift(input.charAt(0));
|
||||
result.push(tmp);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {{[key:string]:string}} TUnicodeMap
|
||||
* @typedef {{[key:string]:Set<string>}} TUnicodeSets
|
||||
* @typedef {[[number,number]]} TCodePoints
|
||||
* @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj
|
||||
* @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart
|
||||
*/
|
||||
/** @type {TCodePoints} */
|
||||
|
||||
const code_points = [[0, 65535]];
|
||||
const accent_pat = '[\u0300-\u036F\u{b7}\u{2be}\u{2bc}]';
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
exports.unicode_map = void 0;
|
||||
/** @type {RegExp} */
|
||||
|
||||
let multi_char_reg;
|
||||
const max_char_length = 3;
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_convert = {};
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const latin_condensed = {
|
||||
'/': '⁄∕',
|
||||
'0': '߀',
|
||||
"a": "ⱥɐɑ",
|
||||
"aa": "ꜳ",
|
||||
"ae": "æǽǣ",
|
||||
"ao": "ꜵ",
|
||||
"au": "ꜷ",
|
||||
"av": "ꜹꜻ",
|
||||
"ay": "ꜽ",
|
||||
"b": "ƀɓƃ",
|
||||
"c": "ꜿƈȼↄ",
|
||||
"d": "đɗɖᴅƌꮷԁɦ",
|
||||
"e": "ɛǝᴇɇ",
|
||||
"f": "ꝼƒ",
|
||||
"g": "ǥɠꞡᵹꝿɢ",
|
||||
"h": "ħⱨⱶɥ",
|
||||
"i": "ɨı",
|
||||
"j": "ɉȷ",
|
||||
"k": "ƙⱪꝁꝃꝅꞣ",
|
||||
"l": "łƚɫⱡꝉꝇꞁɭ",
|
||||
"m": "ɱɯϻ",
|
||||
"n": "ꞥƞɲꞑᴎлԉ",
|
||||
"o": "øǿɔɵꝋꝍᴑ",
|
||||
"oe": "œ",
|
||||
"oi": "ƣ",
|
||||
"oo": "ꝏ",
|
||||
"ou": "ȣ",
|
||||
"p": "ƥᵽꝑꝓꝕρ",
|
||||
"q": "ꝗꝙɋ",
|
||||
"r": "ɍɽꝛꞧꞃ",
|
||||
"s": "ßȿꞩꞅʂ",
|
||||
"t": "ŧƭʈⱦꞇ",
|
||||
"th": "þ",
|
||||
"tz": "ꜩ",
|
||||
"u": "ʉ",
|
||||
"v": "ʋꝟʌ",
|
||||
"vy": "ꝡ",
|
||||
"w": "ⱳ",
|
||||
"y": "ƴɏỿ",
|
||||
"z": "ƶȥɀⱬꝣ",
|
||||
"hv": "ƕ"
|
||||
};
|
||||
|
||||
for (let latin in latin_condensed) {
|
||||
let unicode = latin_condensed[latin] || '';
|
||||
|
||||
for (let i = 0; i < unicode.length; i++) {
|
||||
let char = unicode.substring(i, i + 1);
|
||||
latin_convert[char] = latin;
|
||||
}
|
||||
}
|
||||
|
||||
const convert_pat = new RegExp(Object.keys(latin_convert).join('|') + '|' + accent_pat, 'gu');
|
||||
/**
|
||||
* Initialize the unicode_map from the give code point ranges
|
||||
*
|
||||
* @param {TCodePoints=} _code_points
|
||||
*/
|
||||
|
||||
const initialize = _code_points => {
|
||||
if (exports.unicode_map !== undefined) return;
|
||||
exports.unicode_map = generateMap(_code_points || code_points);
|
||||
};
|
||||
/**
|
||||
* Helper method for normalize a string
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize
|
||||
* @param {string} str
|
||||
* @param {string} form
|
||||
*/
|
||||
|
||||
const normalize = (str, form = 'NFKD') => str.normalize(form);
|
||||
/**
|
||||
* Remove accents without reordering string
|
||||
* calling str.normalize('NFKD') on \u{594}\u{595}\u{596} becomes \u{596}\u{594}\u{595}
|
||||
* via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const asciifold = str => {
|
||||
return toArray(str).reduce(
|
||||
/**
|
||||
* @param {string} result
|
||||
* @param {string} char
|
||||
*/
|
||||
(result, char) => {
|
||||
return result + _asciifold(char);
|
||||
}, '');
|
||||
};
|
||||
/**
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const _asciifold = str => {
|
||||
str = normalize(str).toLowerCase().replace(convert_pat, (
|
||||
/** @type {string} */
|
||||
char) => {
|
||||
return latin_convert[char] || '';
|
||||
}); //return str;
|
||||
|
||||
return normalize(str, 'NFC');
|
||||
};
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
|
||||
function* generator(code_points) {
|
||||
for (const [code_point_min, code_point_max] of code_points) {
|
||||
for (let i = code_point_min; i <= code_point_max; i++) {
|
||||
let composed = String.fromCharCode(i);
|
||||
let folded = asciifold(composed);
|
||||
|
||||
if (folded == composed.toLowerCase()) {
|
||||
continue;
|
||||
} // skip when folded is a string longer than 3 characters long
|
||||
// bc the resulting regex patterns will be long
|
||||
// eg:
|
||||
// folded صلى الله عليه وسلم length 18 code point 65018
|
||||
// folded جل جلاله length 8 code point 65019
|
||||
|
||||
|
||||
if (folded.length > max_char_length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (folded.length == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
yield {
|
||||
folded: folded,
|
||||
composed: composed,
|
||||
code_point: i
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeSets}
|
||||
*/
|
||||
|
||||
const generateSets = code_points => {
|
||||
/** @type {{[key:string]:Set<string>}} */
|
||||
const unicode_sets = {};
|
||||
/**
|
||||
* @param {string} folded
|
||||
* @param {string} to_add
|
||||
*/
|
||||
|
||||
const addMatching = (folded, to_add) => {
|
||||
/** @type {Set<string>} */
|
||||
const folded_set = unicode_sets[folded] || new Set();
|
||||
const patt = new RegExp('^' + setToPattern(folded_set) + '$', 'iu');
|
||||
|
||||
if (to_add.match(patt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
folded_set.add(escape_regex(to_add));
|
||||
unicode_sets[folded] = folded_set;
|
||||
};
|
||||
|
||||
for (let value of generator(code_points)) {
|
||||
addMatching(value.folded, value.folded);
|
||||
addMatching(value.folded, value.composed);
|
||||
}
|
||||
|
||||
return unicode_sets;
|
||||
};
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))
|
||||
*
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeMap}
|
||||
*/
|
||||
|
||||
const generateMap = code_points => {
|
||||
/** @type {TUnicodeSets} */
|
||||
const unicode_sets = generateSets(code_points);
|
||||
/** @type {TUnicodeMap} */
|
||||
|
||||
const unicode_map = {};
|
||||
/** @type {string[]} */
|
||||
|
||||
let multi_char = [];
|
||||
|
||||
for (let folded in unicode_sets) {
|
||||
let set = unicode_sets[folded];
|
||||
|
||||
if (set) {
|
||||
unicode_map[folded] = setToPattern(set);
|
||||
}
|
||||
|
||||
if (folded.length > 1) {
|
||||
multi_char.push(escape_regex(folded));
|
||||
}
|
||||
}
|
||||
|
||||
multi_char.sort((a, b) => b.length - a.length);
|
||||
const multi_char_patt = arrayToPattern(multi_char);
|
||||
multi_char_reg = new RegExp('^' + multi_char_patt, 'u');
|
||||
return unicode_map;
|
||||
};
|
||||
/**
|
||||
* Map each element of an array from it's folded value to all possible unicode matches
|
||||
* @param {string[]} strings
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const mapSequence = (strings, min_replacement = 1) => {
|
||||
let chars_replaced = 0;
|
||||
strings = strings.map(str => {
|
||||
if (exports.unicode_map[str]) {
|
||||
chars_replaced += str.length;
|
||||
}
|
||||
|
||||
return exports.unicode_map[str] || str;
|
||||
});
|
||||
|
||||
if (chars_replaced >= min_replacement) {
|
||||
return sequencePattern(strings);
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
/**
|
||||
* Convert a short string and split it into all possible patterns
|
||||
* Keep a pattern only if min_replacement is met
|
||||
*
|
||||
* 'abc'
|
||||
* => [['abc'],['ab','c'],['a','bc'],['a','b','c']]
|
||||
* => ['abc-pattern','ab-c-pattern'...]
|
||||
*
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
const substringsToPattern = (str, min_replacement = 1) => {
|
||||
min_replacement = Math.max(min_replacement, str.length - 1);
|
||||
return arrayToPattern(allSubstrings(str).map(sub_pat => {
|
||||
return mapSequence(sub_pat, min_replacement);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Convert an array of sequences into a pattern
|
||||
* [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)
|
||||
*
|
||||
* @param {Sequence[]} sequences
|
||||
* @param {boolean} all
|
||||
*/
|
||||
|
||||
const sequencesToPattern = (sequences, all = true) => {
|
||||
let min_replacement = sequences.length > 1 ? 1 : 0;
|
||||
return arrayToPattern(sequences.map(sequence => {
|
||||
let seq = [];
|
||||
const len = all ? sequence.length() : sequence.length() - 1;
|
||||
|
||||
for (let j = 0; j < len; j++) {
|
||||
seq.push(substringsToPattern(sequence.substrs[j] || '', min_replacement));
|
||||
}
|
||||
|
||||
return sequencePattern(seq);
|
||||
}));
|
||||
};
|
||||
/**
|
||||
* Return true if the sequence is already in the sequences
|
||||
* @param {Sequence} needle_seq
|
||||
* @param {Sequence[]} sequences
|
||||
*/
|
||||
|
||||
|
||||
const inSequences = (needle_seq, sequences) => {
|
||||
for (const seq of sequences) {
|
||||
if (seq.start != needle_seq.start || seq.end != needle_seq.end) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (seq.substrs.join('') !== needle_seq.substrs.join('')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let needle_parts = needle_seq.parts;
|
||||
/**
|
||||
* @param {TSequencePart} part
|
||||
*/
|
||||
|
||||
const filter = part => {
|
||||
for (const needle_part of needle_parts) {
|
||||
if (needle_part.start === part.start && needle_part.substr === part.substr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (part.length == 1 || needle_part.length == 1) {
|
||||
continue;
|
||||
} // check for overlapping parts
|
||||
// a = ['::=','==']
|
||||
// b = ['::','===']
|
||||
// a = ['r','sm']
|
||||
// b = ['rs','m']
|
||||
|
||||
|
||||
if (part.start < needle_part.start && part.end > needle_part.start) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (needle_part.start < part.start && needle_part.end > part.start) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
let filtered = seq.parts.filter(filter);
|
||||
|
||||
if (filtered.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
class Sequence {
|
||||
constructor() {
|
||||
/** @type {TSequencePart[]} */
|
||||
this.parts = [];
|
||||
/** @type {string[]} */
|
||||
|
||||
this.substrs = [];
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
}
|
||||
/**
|
||||
* @param {TSequencePart|undefined} part
|
||||
*/
|
||||
|
||||
|
||||
add(part) {
|
||||
if (part) {
|
||||
this.parts.push(part);
|
||||
this.substrs.push(part.substr);
|
||||
this.start = Math.min(part.start, this.start);
|
||||
this.end = Math.max(part.end, this.end);
|
||||
}
|
||||
}
|
||||
|
||||
last() {
|
||||
return this.parts[this.parts.length - 1];
|
||||
}
|
||||
|
||||
length() {
|
||||
return this.parts.length;
|
||||
}
|
||||
/**
|
||||
* @param {number} position
|
||||
* @param {TSequencePart} last_piece
|
||||
*/
|
||||
|
||||
|
||||
clone(position, last_piece) {
|
||||
let clone = new Sequence();
|
||||
let parts = JSON.parse(JSON.stringify(this.parts));
|
||||
let last_part = parts.pop();
|
||||
|
||||
for (const part of parts) {
|
||||
clone.add(part);
|
||||
}
|
||||
|
||||
let last_substr = last_piece.substr.substring(0, position - last_part.start);
|
||||
let clone_last_len = last_substr.length;
|
||||
clone.add({
|
||||
start: last_part.start,
|
||||
end: last_part.start + clone_last_len,
|
||||
length: clone_last_len,
|
||||
substr: last_substr
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* Expand a regular expression pattern to include unicode variants
|
||||
* eg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/
|
||||
*
|
||||
* Issue:
|
||||
* ﺊﺋ [ 'ﺊ = \\u{fe8a}', 'ﺋ = \\u{fe8b}' ]
|
||||
* becomes: ئئ [ 'ي = \\u{64a}', 'ٔ = \\u{654}', 'ي = \\u{64a}', 'ٔ = \\u{654}' ]
|
||||
*
|
||||
* İIJ = IIJ = ⅡJ
|
||||
*
|
||||
* 1/2/4
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {string|undefined}
|
||||
*/
|
||||
|
||||
|
||||
const getPattern = str => {
|
||||
initialize();
|
||||
str = asciifold(str);
|
||||
let pattern = '';
|
||||
let sequences = [new Sequence()];
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
let substr = str.substring(i);
|
||||
let match = substr.match(multi_char_reg);
|
||||
const char = str.substring(i, i + 1);
|
||||
const match_str = match ? match[0] : null; // loop through sequences
|
||||
// add either the char or multi_match
|
||||
|
||||
let overlapping = [];
|
||||
let added_types = new Set();
|
||||
|
||||
for (const sequence of sequences) {
|
||||
const last_piece = sequence.last();
|
||||
|
||||
if (!last_piece || last_piece.length == 1 || last_piece.end <= i) {
|
||||
// if we have a multi match
|
||||
if (match_str) {
|
||||
const len = match_str.length;
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
added_types.add('1');
|
||||
} else {
|
||||
sequence.add({
|
||||
start: i,
|
||||
end: i + 1,
|
||||
length: 1,
|
||||
substr: char
|
||||
});
|
||||
added_types.add('2');
|
||||
}
|
||||
} else if (match_str) {
|
||||
let clone = sequence.clone(i, last_piece);
|
||||
const len = match_str.length;
|
||||
clone.add({
|
||||
start: i,
|
||||
end: i + len,
|
||||
length: len,
|
||||
substr: match_str
|
||||
});
|
||||
overlapping.push(clone);
|
||||
} else {
|
||||
// don't add char
|
||||
// adding would create invalid patterns: 234 => [2,34,4]
|
||||
added_types.add('3');
|
||||
}
|
||||
} // if we have overlapping
|
||||
|
||||
|
||||
if (overlapping.length > 0) {
|
||||
// ['ii','iii'] before ['i','i','iii']
|
||||
overlapping = overlapping.sort((a, b) => {
|
||||
return a.length() - b.length();
|
||||
});
|
||||
|
||||
for (let clone of overlapping) {
|
||||
// don't add if we already have an equivalent sequence
|
||||
if (inSequences(clone, sequences)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
sequences.push(clone);
|
||||
}
|
||||
|
||||
continue;
|
||||
} // if we haven't done anything unique
|
||||
// clean up the patterns
|
||||
// helps keep patterns smaller
|
||||
// if str = 'r₨㎧aarss', pattern will be 446 instead of 655
|
||||
|
||||
|
||||
if (i > 0 && added_types.size == 1 && !added_types.has('3')) {
|
||||
pattern += sequencesToPattern(sequences, false);
|
||||
let new_seq = new Sequence();
|
||||
const old_seq = sequences[0];
|
||||
|
||||
if (old_seq) {
|
||||
new_seq.add(old_seq.last());
|
||||
}
|
||||
|
||||
sequences = [new_seq];
|
||||
}
|
||||
}
|
||||
|
||||
pattern += sequencesToPattern(sequences, true);
|
||||
return pattern;
|
||||
};
|
||||
|
||||
exports._asciifold = _asciifold;
|
||||
exports.asciifold = asciifold;
|
||||
exports.code_points = code_points;
|
||||
exports.escape_regex = escape_regex;
|
||||
exports.generateMap = generateMap;
|
||||
exports.generateSets = generateSets;
|
||||
exports.generator = generator;
|
||||
exports.getPattern = getPattern;
|
||||
exports.initialize = initialize;
|
||||
exports.mapSequence = mapSequence;
|
||||
exports.normalize = normalize;
|
||||
exports.substringsToPattern = substringsToPattern;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/umd/index.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/umd/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
57
node_modules/@orchidjs/unicode-variants/dist/umd/index.min.js
generated
vendored
Normal file
57
node_modules/@orchidjs/unicode-variants/dist/umd/index.min.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
/*! @orchidjs/unicode-variants | https://github.com/orchidjs/unicode-variants | Apache License (v2) */
|
||||
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).diacritics={})}(this,(function(t){"use strict"
|
||||
const e=t=>(t=t.filter(Boolean)).length<2?t[0]||"":1==a(t)?"["+t.join("")+"]":"(?:"+t.join("|")+")",n=t=>{if(!r(t))return t.join("")
|
||||
let e="",n=0
|
||||
const s=()=>{n>1&&(e+="{"+n+"}")}
|
||||
return t.forEach(((r,o)=>{r!==t[o-1]?(s(),e+=r,n=1):n++})),s(),e},s=t=>{let n=i(t)
|
||||
return e(n)},r=t=>new Set(t).size!==t.length,o=t=>(t+"").replace(/([\$\(\)\*\+\.\?\[\]\^\{\|\}\\])/gu,"\\$1"),a=t=>t.reduce(((t,e)=>Math.max(t,l(e))),0),l=t=>i(t).length,i=t=>Array.from(t),u=t=>{if(1===t.length)return[[t]]
|
||||
let e=[]
|
||||
const n=t.substring(1)
|
||||
return u(n).forEach((function(n){let s=n.slice(0)
|
||||
s[0]=t.charAt(0)+s[0],e.push(s),s=n.slice(0),s.unshift(t.charAt(0)),e.push(s)})),e},h=[[0,65535]]
|
||||
let d
|
||||
t.unicode_map=void 0
|
||||
const c={},f={"/":"⁄∕",0:"߀",a:"ⱥɐɑ",aa:"ꜳ",ae:"æǽǣ",ao:"ꜵ",au:"ꜷ",av:"ꜹꜻ",ay:"ꜽ",b:"ƀɓƃ",c:"ꜿƈȼↄ",d:"đɗɖᴅƌꮷԁɦ",e:"ɛǝᴇɇ",f:"ꝼƒ",g:"ǥɠꞡᵹꝿɢ",h:"ħⱨⱶɥ",i:"ɨı",j:"ɉȷ",k:"ƙⱪꝁꝃꝅꞣ",l:"łƚɫⱡꝉꝇꞁɭ",m:"ɱɯϻ",n:"ꞥƞɲꞑᴎлԉ",o:"øǿɔɵꝋꝍᴑ",oe:"œ",oi:"ƣ",oo:"ꝏ",ou:"ȣ",p:"ƥᵽꝑꝓꝕρ",q:"ꝗꝙɋ",r:"ɍɽꝛꞧꞃ",s:"ßȿꞩꞅʂ",t:"ŧƭʈⱦꞇ",th:"þ",tz:"ꜩ",u:"ʉ",v:"ʋꝟʌ",vy:"ꝡ",w:"ⱳ",y:"ƴɏỿ",z:"ƶȥɀⱬꝣ",hv:"ƕ"}
|
||||
for(let t in f){let e=f[t]||""
|
||||
for(let n=0;n<e.length;n++){let s=e.substring(n,n+1)
|
||||
c[s]=t}}const g=new RegExp(Object.keys(c).join("|")+"|[̀-ͯ·ʾʼ]","gu"),p=e=>{void 0===t.unicode_map&&(t.unicode_map=j(e||h))},b=(t,e="NFKD")=>t.normalize(e),m=t=>i(t).reduce(((t,e)=>t+w(e)),""),w=t=>(t=b(t).toLowerCase().replace(g,(t=>c[t]||"")),b(t,"NFC"))
|
||||
function*y(t){for(const[e,n]of t)for(let t=e;t<=n;t++){let e=String.fromCharCode(t),n=m(e)
|
||||
n!=e.toLowerCase()&&(n.length>3||0!=n.length&&(yield{folded:n,composed:e,code_point:t}))}}const _=t=>{const e={},n=(t,n)=>{const r=e[t]||new Set,a=new RegExp("^"+s(r)+"$","iu")
|
||||
n.match(a)||(r.add(o(n)),e[t]=r)}
|
||||
for(let e of y(t))n(e.folded,e.folded),n(e.folded,e.composed)
|
||||
return e},j=t=>{const n=_(t),r={}
|
||||
let a=[]
|
||||
for(let t in n){let e=n[t]
|
||||
e&&(r[t]=s(e)),t.length>1&&a.push(o(t))}a.sort(((t,e)=>e.length-t.length))
|
||||
const l=e(a)
|
||||
return d=new RegExp("^"+l,"u"),r},x=(e,s=1)=>{let r=0
|
||||
return e=e.map((e=>(t.unicode_map[e]&&(r+=e.length),t.unicode_map[e]||e))),r>=s?n(e):""},S=(t,n=1)=>(n=Math.max(n,t.length-1),e(u(t).map((t=>x(t,n))))),v=(t,s=!0)=>{let r=t.length>1?1:0
|
||||
return e(t.map((t=>{let e=[]
|
||||
const o=s?t.length():t.length()-1
|
||||
for(let n=0;n<o;n++)e.push(S(t.substrs[n]||"",r))
|
||||
return n(e)})))},z=(t,e)=>{for(const n of e){if(n.start!=t.start||n.end!=t.end)continue
|
||||
if(n.substrs.join("")!==t.substrs.join(""))continue
|
||||
let e=t.parts
|
||||
const s=t=>{for(const n of e){if(n.start===t.start&&n.substr===t.substr)return!1
|
||||
if(1!=t.length&&1!=n.length){if(t.start<n.start&&t.end>n.start)return!0
|
||||
if(n.start<t.start&&n.end>t.start)return!0}}return!1}
|
||||
if(!(n.parts.filter(s).length>0))return!0}return!1}
|
||||
class M{constructor(){this.parts=[],this.substrs=[],this.start=0,this.end=0}add(t){t&&(this.parts.push(t),this.substrs.push(t.substr),this.start=Math.min(t.start,this.start),this.end=Math.max(t.end,this.end))}last(){return this.parts[this.parts.length-1]}length(){return this.parts.length}clone(t,e){let n=new M,s=JSON.parse(JSON.stringify(this.parts)),r=s.pop()
|
||||
for(const t of s)n.add(t)
|
||||
let o=e.substr.substring(0,t-r.start),a=o.length
|
||||
return n.add({start:r.start,end:r.start+a,length:a,substr:o}),n}}t._asciifold=w,t.asciifold=m,t.code_points=h,t.escape_regex=o,t.generateMap=j,t.generateSets=_,t.generator=y,t.getPattern=t=>{p(),t=m(t)
|
||||
let e="",n=[new M]
|
||||
for(let s=0;s<t.length;s++){let r=t.substring(s).match(d)
|
||||
const o=t.substring(s,s+1),a=r?r[0]:null
|
||||
let l=[],i=new Set
|
||||
for(const t of n){const e=t.last()
|
||||
if(!e||1==e.length||e.end<=s)if(a){const e=a.length
|
||||
t.add({start:s,end:s+e,length:e,substr:a}),i.add("1")}else t.add({start:s,end:s+1,length:1,substr:o}),i.add("2")
|
||||
else if(a){let n=t.clone(s,e)
|
||||
const r=a.length
|
||||
n.add({start:s,end:s+r,length:r,substr:a}),l.push(n)}else i.add("3")}if(l.length>0){l=l.sort(((t,e)=>t.length()-e.length()))
|
||||
for(let t of l)z(t,n)||n.push(t)}else if(s>0&&1==i.size&&!i.has("3")){e+=v(n,!1)
|
||||
let t=new M
|
||||
const s=n[0]
|
||||
s&&t.add(s.last()),n=[t]}}return e+=v(n,!0),e},t.initialize=p,t.mapSequence=x,t.normalize=b,t.substringsToPattern=S,Object.defineProperty(t,"__esModule",{value:!0})}))
|
||||
//# sourceMappingURL=index.min.js.map
|
||||
1
node_modules/@orchidjs/unicode-variants/dist/umd/index.min.js.map
generated
vendored
Normal file
1
node_modules/@orchidjs/unicode-variants/dist/umd/index.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
97
node_modules/@orchidjs/unicode-variants/lib/diacritics.json
generated
vendored
Normal file
97
node_modules/@orchidjs/unicode-variants/lib/diacritics.json
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
{
|
||||
" ":" ",
|
||||
"0":"߀",
|
||||
"A":"ⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ",
|
||||
"AA":"Ꜳ",
|
||||
"AE":"ÆǼǢ",
|
||||
"AO":"Ꜵ",
|
||||
"AU":"Ꜷ",
|
||||
"AV":"ꜸꜺ",
|
||||
"AY":"Ꜽ",
|
||||
"B":"ⒷBḄḆɃƁ",
|
||||
"C":"ⒸCꜾḈĆCĈČÇƇȻ",
|
||||
"D":"ⒹDĎḌḐḒḎĐƊƉᴅ",
|
||||
"DZ":"DZDŽ",
|
||||
"Dz":"DzDž",
|
||||
"E":"ɛⒺEÈÉÊỀẾỄỂẼĒḔḖĔËẺĚȄȆẸỆȨḜĘḘḚƐƎᴇ",
|
||||
"F":"ꝼⒻFƑꝻ",
|
||||
"G":"ⒼGǴĜḠĞǦĢǤƓꞠꝽꝾɢ",
|
||||
"H":"ⒽHĤḦȞḤḨḪĦⱧⱵꞍ",
|
||||
"I":"ⒾIÌÍÎĨĪĬÏḮỈǏȈȊỊĮḬƗ",
|
||||
"J":"ⒿJĴɈȷ",
|
||||
"K":"ⓀKḰǨḲĶḴƘⱩꝀꝂꝄꞢ",
|
||||
"L":"ⓁLĿĹĽḶḸĻḼḺŁȽⱢⱠꝈꝆꞀ",
|
||||
"LJ":"LJ",
|
||||
"Lj":"Lj",
|
||||
"M":"ⓂMḾṂⱮƜϻ",
|
||||
"N":"ꞤȠⓃNǸŃÑŇṆŅṊṈƝꞐᴎ",
|
||||
"NJ":"NJ",
|
||||
"Nj":"Nj",
|
||||
"O":"ⓄOÒÓÔỒỐỖỔÕṌȬṎŌṐṒŎÖȪỎŐǑȌȎƠỜỚỠỞỢỌỘǪǬØǾƆƟꝊꝌ",
|
||||
"OE":"Œ",
|
||||
"OI":"Ƣ",
|
||||
"OO":"Ꝏ",
|
||||
"OU":"Ȣ",
|
||||
"P":"ⓅPṔƤⱣꝐꝒꝔ",
|
||||
"Q":"ⓆQꝖꝘɊ",
|
||||
"R":"ⓇRŔŘȐȒṚṜŖṞɌⱤꝚꞦꞂ",
|
||||
"S":"ⓈSẞŚŜŠṢȘŞⱾꞨꞄ",
|
||||
"T":"ⓉTŤṬȚŢṰṮŦƬƮȾꞆ",
|
||||
"Th":"Þ",
|
||||
"TZ":"Ꜩ",
|
||||
"U":"ⓊUÙÚÛŨṸŪṺŬÜǛǗǕǙỦŮŰǓȔȖƯỪỨỮỬỰỤṲŲṶṴɄ",
|
||||
"V":"ⓋVṼṾƲꝞɅ",
|
||||
"VY":"Ꝡ",
|
||||
"W":"ⓌWẀẂŴẄẈⱲ",
|
||||
"X":"ⓍXẌ",
|
||||
"Y":"ⓎYỲÝŶỸȲŸỶỴƳɎỾ",
|
||||
"Z":"ⓏZŹẐŽẒẔƵȤⱿⱫꝢ",
|
||||
"a":"ⓐaẚàáâầấẫẩãāăằắẵẳäǟảåǻǎȁȃạậặḁąⱥɑ",
|
||||
"aa":"ꜳ",
|
||||
"ae":"æǽǣ",
|
||||
"ao":"ꜵ",
|
||||
"au":"ꜷ",
|
||||
"av":"ꜹꜻ",
|
||||
"ay":"ꜽ",
|
||||
"b":"ⓑbḅḇƀƃɓƂ",
|
||||
"c":"cⓒćĉčçḉƈȼꜿↄ",
|
||||
"d":"ⓓdďḍḑḓḏđƌɖɗƋᏧԁꞪ",
|
||||
"dz":"dzdž",
|
||||
"e":"ⓔeèéêềếễểẽēḕḗĕëẻěȅȇẹệȩḝęḙḛɇǝ",
|
||||
"f":"ⓕfƒ",
|
||||
"ff":"ff",
|
||||
"fi":"fi",
|
||||
"fl":"fl",
|
||||
"ffi":"ffi",
|
||||
"ffl":"ffl",
|
||||
"g":"ⓖgǵĝḡğǧģǥɠꞡꝿᵹ",
|
||||
"h":"ⓗhĥḧȟḥḩḫẖħⱨⱶɥ",
|
||||
"hv":"ƕ",
|
||||
"i":"ⓘiìíîĩīĭïḯỉǐȉȋịįḭɨı",
|
||||
"j":"ⓙjĵǰɉ",
|
||||
"k":"ⓚkḱǩḳķḵƙⱪꝁꝃꝅꞣ",
|
||||
"l":"ⓛlŀĺľḷḹļḽḻłƚɫⱡꝉꞁꝇɭ",
|
||||
"lj":"lj",
|
||||
"m":"ⓜmḿṃɱɯ",
|
||||
"n":"ⓝnǹńñňṇņṋṉƞɲʼnꞑꞥлԉ",
|
||||
"nj":"nj",
|
||||
"o":"ⓞoòóôồốỗổõṍȭṏōṑṓŏöȫỏőǒȍȏơờớỡởợọộǫǭøǿꝋꝍɵɔᴑ",
|
||||
"oe":"œ",
|
||||
"oi":"ƣ",
|
||||
"oo":"ꝏ",
|
||||
"ou":"ȣ",
|
||||
"p":"ⓟpṕƥᵽꝑꝓꝕρ",
|
||||
"q":"ⓠqɋꝗꝙ",
|
||||
"r":"ⓡrŕřȑȓṛṝŗṟɍɽꝛꞧꞃ",
|
||||
"s":"ⓢsśŝšṣșşȿꞩꞅʂ",
|
||||
"t":"ⓣtẗťṭțţṱṯŧƭʈⱦꞇ",
|
||||
"th":"þ",
|
||||
"tz":"ꜩ",
|
||||
"u":"ⓤuùúûũṹūṻŭüǜǘǖǚủůűǔȕȗưừứữửựụṳųṷṵʉ",
|
||||
"v":"ⓥvṽṿʋꝟʌ",
|
||||
"vy":"ꝡ",
|
||||
"w":"ⓦwẁẃŵẅẘẉⱳ",
|
||||
"x":"ⓧxẍ",
|
||||
"y":"ⓨyỳýŷỹȳÿỷẙỵƴɏỿ",
|
||||
"z":"ⓩzźẑžẓẕƶȥɀⱬꝣ"
|
||||
}
|
||||
11
node_modules/@orchidjs/unicode-variants/lib/index.d.ts
generated
vendored
Normal file
11
node_modules/@orchidjs/unicode-variants/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
|
||||
type TUnicodeMap = {[key:string]:string};
|
||||
|
||||
type TUnicodeSets = {[key:string]:Set<string>};
|
||||
|
||||
type TCodePoints = [[number,number]];
|
||||
|
||||
type TCodePointObj = {folded:string,composed:string,code_point:number}
|
||||
|
||||
type TSequencePart = {start:number,end:number,length:number,substr:string}
|
||||
572
node_modules/@orchidjs/unicode-variants/lib/index.mjs
generated
vendored
Normal file
572
node_modules/@orchidjs/unicode-variants/lib/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,572 @@
|
||||
|
||||
/**
|
||||
* @typedef {{[key:string]:string}} TUnicodeMap
|
||||
* @typedef {{[key:string]:Set<string>}} TUnicodeSets
|
||||
* @typedef {[[number,number]]} TCodePoints
|
||||
* @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj
|
||||
* @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart
|
||||
*/
|
||||
|
||||
|
||||
import { setToPattern, arrayToPattern, escape_regex, sequencePattern, toArray } from './regex.mjs';
|
||||
import { allSubstrings } from './strings.mjs';
|
||||
|
||||
|
||||
/** @type {TCodePoints} */
|
||||
export const code_points = [[ 0, 65535 ]];
|
||||
|
||||
const accent_pat = '[\u0300-\u036F\u{b7}\u{2be}\u{2bc}]';
|
||||
|
||||
/** @type {TUnicodeMap} */
|
||||
export let unicode_map;
|
||||
|
||||
/** @type {RegExp} */
|
||||
let multi_char_reg;
|
||||
|
||||
const max_char_length = 3;
|
||||
|
||||
/** @type {TUnicodeMap} */
|
||||
const latin_convert = {}
|
||||
|
||||
/** @type {TUnicodeMap} */
|
||||
const latin_condensed = {
|
||||
'/': '⁄∕',
|
||||
'0': '߀',
|
||||
"a": "ⱥɐɑ",
|
||||
"aa": "ꜳ",
|
||||
"ae": "æǽǣ",
|
||||
"ao": "ꜵ",
|
||||
"au": "ꜷ",
|
||||
"av": "ꜹꜻ",
|
||||
"ay": "ꜽ",
|
||||
"b": "ƀɓƃ",
|
||||
"c": "ꜿƈȼↄ",
|
||||
"d": "đɗɖᴅƌꮷԁɦ",
|
||||
"e": "ɛǝᴇɇ",
|
||||
"f": "ꝼƒ",
|
||||
"g": "ǥɠꞡᵹꝿɢ",
|
||||
"h": "ħⱨⱶɥ",
|
||||
"i": "ɨı",
|
||||
"j": "ɉȷ",
|
||||
"k": "ƙⱪꝁꝃꝅꞣ",
|
||||
"l": "łƚɫⱡꝉꝇꞁɭ",
|
||||
"m": "ɱɯϻ",
|
||||
"n": "ꞥƞɲꞑᴎлԉ",
|
||||
"o": "øǿɔɵꝋꝍᴑ",
|
||||
"oe": "œ",
|
||||
"oi": "ƣ",
|
||||
"oo": "ꝏ",
|
||||
"ou": "ȣ",
|
||||
"p": "ƥᵽꝑꝓꝕρ",
|
||||
"q": "ꝗꝙɋ",
|
||||
"r": "ɍɽꝛꞧꞃ",
|
||||
"s": "ßȿꞩꞅʂ",
|
||||
"t": "ŧƭʈⱦꞇ",
|
||||
"th": "þ",
|
||||
"tz": "ꜩ",
|
||||
"u": "ʉ",
|
||||
"v": "ʋꝟʌ",
|
||||
"vy": "ꝡ",
|
||||
"w": "ⱳ",
|
||||
"y": "ƴɏỿ",
|
||||
"z": "ƶȥɀⱬꝣ",
|
||||
"hv": "ƕ"
|
||||
}
|
||||
|
||||
|
||||
for( let latin in latin_condensed ){
|
||||
let unicode = latin_condensed[latin] || '';
|
||||
for( let i = 0; i < unicode.length; i++){
|
||||
let char = unicode.substring(i,i+1);
|
||||
latin_convert[char] = latin;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const convert_pat = new RegExp(Object.keys(latin_convert).join('|')+'|'+accent_pat,'gu');
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Initialize the unicode_map from the give code point ranges
|
||||
*
|
||||
* @param {TCodePoints=} _code_points
|
||||
*/
|
||||
export const initialize = (_code_points) => {
|
||||
if( unicode_map !== undefined ) return;
|
||||
unicode_map = generateMap(_code_points || code_points );
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper method for normalize a string
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize
|
||||
* @param {string} str
|
||||
* @param {string} form
|
||||
*/
|
||||
export const normalize = (str,form='NFKD') => str.normalize(form);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Remove accents without reordering string
|
||||
* calling str.normalize('NFKD') on \u{594}\u{595}\u{596} becomes \u{596}\u{594}\u{595}
|
||||
* via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
export const asciifold = (str) => {
|
||||
|
||||
return toArray(str).reduce(
|
||||
/**
|
||||
* @param {string} result
|
||||
* @param {string} char
|
||||
*/
|
||||
(result, char) =>{
|
||||
return result + _asciifold(char)
|
||||
},
|
||||
''
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
export const _asciifold = (str) => {
|
||||
str = normalize(str)
|
||||
.toLowerCase()
|
||||
.replace(convert_pat,(/** @type {string} */ char) => {
|
||||
return latin_convert[char] || '';
|
||||
})
|
||||
|
||||
//return str;
|
||||
return normalize(str,'NFC')
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Generate a list of unicode variants from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @yield {TCodePointObj}
|
||||
*/
|
||||
export function* generator(code_points){
|
||||
|
||||
for(const [code_point_min, code_point_max] of code_points){
|
||||
for(let i = code_point_min; i <= code_point_max; i++){
|
||||
|
||||
let composed = String.fromCharCode(i);
|
||||
let folded = asciifold(composed);
|
||||
|
||||
|
||||
if( folded == composed.toLowerCase() ){
|
||||
continue;
|
||||
}
|
||||
|
||||
// skip when folded is a string longer than 3 characters long
|
||||
// bc the resulting regex patterns will be long
|
||||
// eg:
|
||||
// folded صلى الله عليه وسلم length 18 code point 65018
|
||||
// folded جل جلاله length 8 code point 65019
|
||||
if( folded.length > max_char_length ){
|
||||
continue;
|
||||
}
|
||||
|
||||
if( folded.length == 0 ){
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
yield {folded:folded,composed:composed,code_point:i};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeSets}
|
||||
*/
|
||||
export const generateSets = (code_points) => {
|
||||
|
||||
/** @type {{[key:string]:Set<string>}} */
|
||||
const unicode_sets = {};
|
||||
|
||||
|
||||
/**
|
||||
* @param {string} folded
|
||||
* @param {string} to_add
|
||||
*/
|
||||
const addMatching = (folded,to_add) => {
|
||||
|
||||
/** @type {Set<string>} */
|
||||
const folded_set = unicode_sets[folded] || new Set();
|
||||
|
||||
const patt = new RegExp( '^'+setToPattern(folded_set)+'$','iu');
|
||||
if( to_add.match(patt) ){
|
||||
return;
|
||||
}
|
||||
|
||||
folded_set.add(escape_regex(to_add));
|
||||
unicode_sets[folded] = folded_set;
|
||||
}
|
||||
|
||||
|
||||
for( let value of generator(code_points) ){
|
||||
addMatching(value.folded,value.folded);
|
||||
addMatching(value.folded,value.composed);
|
||||
}
|
||||
|
||||
return unicode_sets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unicode map from the list of code points
|
||||
* ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))
|
||||
*
|
||||
* @param {TCodePoints} code_points
|
||||
* @return {TUnicodeMap}
|
||||
*/
|
||||
export const generateMap = (code_points) => {
|
||||
|
||||
/** @type {TUnicodeSets} */
|
||||
const unicode_sets = generateSets(code_points);
|
||||
|
||||
/** @type {TUnicodeMap} */
|
||||
const unicode_map = {};
|
||||
|
||||
/** @type {string[]} */
|
||||
let multi_char = [];
|
||||
|
||||
for( let folded in unicode_sets ){
|
||||
|
||||
let set = unicode_sets[folded];
|
||||
if( set ){
|
||||
unicode_map[folded] = setToPattern(set);
|
||||
}
|
||||
|
||||
if( folded.length > 1 ){
|
||||
multi_char.push(escape_regex(folded));
|
||||
}
|
||||
}
|
||||
|
||||
multi_char.sort((a, b) => b.length - a.length );
|
||||
const multi_char_patt = arrayToPattern(multi_char);
|
||||
multi_char_reg = new RegExp('^'+multi_char_patt,'u');
|
||||
|
||||
return unicode_map;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Map each element of an array from it's folded value to all possible unicode matches
|
||||
* @param {string[]} strings
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
export const mapSequence = (strings,min_replacement=1) =>{
|
||||
let chars_replaced = 0;
|
||||
|
||||
|
||||
strings = strings.map((str)=>{
|
||||
if( unicode_map[str] ){
|
||||
chars_replaced += str.length;
|
||||
}
|
||||
return unicode_map[str] || str;
|
||||
});
|
||||
|
||||
if( chars_replaced >= min_replacement ){
|
||||
return sequencePattern(strings);
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a short string and split it into all possible patterns
|
||||
* Keep a pattern only if min_replacement is met
|
||||
*
|
||||
* 'abc'
|
||||
* => [['abc'],['ab','c'],['a','bc'],['a','b','c']]
|
||||
* => ['abc-pattern','ab-c-pattern'...]
|
||||
*
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {number} min_replacement
|
||||
* @return {string}
|
||||
*/
|
||||
export const substringsToPattern = (str,min_replacement=1) => {
|
||||
|
||||
min_replacement = Math.max(min_replacement,str.length-1);
|
||||
|
||||
return arrayToPattern(
|
||||
allSubstrings(str).map( (sub_pat) =>{
|
||||
return mapSequence(sub_pat,min_replacement)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an array of sequences into a pattern
|
||||
* [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)
|
||||
*
|
||||
* @param {Sequence[]} sequences
|
||||
* @param {boolean} all
|
||||
*/
|
||||
const sequencesToPattern = (sequences,all=true) => {
|
||||
|
||||
let min_replacement = sequences.length > 1 ? 1 : 0;
|
||||
return arrayToPattern(
|
||||
sequences.map( (sequence) =>{
|
||||
let seq = [];
|
||||
const len = all ? sequence.length() : sequence.length() - 1;
|
||||
for( let j = 0; j < len; j++){
|
||||
seq.push(substringsToPattern(sequence.substrs[j]||'',min_replacement));
|
||||
}
|
||||
|
||||
return sequencePattern(seq);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if the sequence is already in the sequences
|
||||
* @param {Sequence} needle_seq
|
||||
* @param {Sequence[]} sequences
|
||||
*/
|
||||
const inSequences = (needle_seq, sequences) => {
|
||||
|
||||
for(const seq of sequences){
|
||||
|
||||
if( seq.start != needle_seq.start || seq.end != needle_seq.end ){
|
||||
continue;
|
||||
}
|
||||
|
||||
if( seq.substrs.join('') !== needle_seq.substrs.join('') ){
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
let needle_parts = needle_seq.parts;
|
||||
|
||||
/**
|
||||
* @param {TSequencePart} part
|
||||
*/
|
||||
const filter = (part) =>{
|
||||
|
||||
for(const needle_part of needle_parts){
|
||||
|
||||
if( needle_part.start === part.start && needle_part.substr === part.substr ){
|
||||
return false;
|
||||
}
|
||||
|
||||
if( part.length == 1 || needle_part.length == 1 ){
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// check for overlapping parts
|
||||
// a = ['::=','==']
|
||||
// b = ['::','===']
|
||||
// a = ['r','sm']
|
||||
// b = ['rs','m']
|
||||
if( part.start < needle_part.start && part.end > needle_part.start ){
|
||||
return true;
|
||||
}
|
||||
|
||||
if( needle_part.start < part.start && needle_part.end > part.start ){
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
let filtered = seq.parts.filter(filter);
|
||||
|
||||
if( filtered.length > 0 ){
|
||||
continue;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
class Sequence{
|
||||
|
||||
constructor(){
|
||||
|
||||
/** @type {TSequencePart[]} */
|
||||
this.parts = [];
|
||||
|
||||
/** @type {string[]} */
|
||||
this.substrs = [];
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TSequencePart|undefined} part
|
||||
*/
|
||||
add(part){
|
||||
if( part ){
|
||||
this.parts.push(part);
|
||||
this.substrs.push(part.substr);
|
||||
this.start = Math.min(part.start,this.start);
|
||||
this.end = Math.max(part.end,this.end);
|
||||
}
|
||||
}
|
||||
|
||||
last(){
|
||||
return this.parts[this.parts.length-1];
|
||||
}
|
||||
|
||||
length(){
|
||||
return this.parts.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} position
|
||||
* @param {TSequencePart} last_piece
|
||||
*/
|
||||
clone(position, last_piece){
|
||||
let clone = new Sequence();
|
||||
|
||||
let parts = JSON.parse(JSON.stringify(this.parts));
|
||||
let last_part = parts.pop();
|
||||
for( const part of parts ){
|
||||
clone.add(part);
|
||||
}
|
||||
|
||||
let last_substr = last_piece.substr.substring(0,position-last_part.start);
|
||||
let clone_last_len = last_substr.length;
|
||||
clone.add({start:last_part.start,end:last_part.start+clone_last_len,length:clone_last_len,substr:last_substr});
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand a regular expression pattern to include unicode variants
|
||||
* eg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/
|
||||
*
|
||||
* Issue:
|
||||
* ﺊﺋ [ 'ﺊ = \\u{fe8a}', 'ﺋ = \\u{fe8b}' ]
|
||||
* becomes: ئئ [ 'ي = \\u{64a}', 'ٔ = \\u{654}', 'ي = \\u{64a}', 'ٔ = \\u{654}' ]
|
||||
*
|
||||
* İIJ = IIJ = ⅡJ
|
||||
*
|
||||
* 1/2/4
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {string|undefined}
|
||||
*/
|
||||
export const getPattern = (str) => {
|
||||
initialize();
|
||||
|
||||
str = asciifold(str);
|
||||
|
||||
let pattern = '';
|
||||
let sequences = [new Sequence()];
|
||||
|
||||
for( let i = 0; i < str.length; i++ ){
|
||||
|
||||
let substr = str.substring(i);
|
||||
let match = substr.match(multi_char_reg);
|
||||
const char = str.substring(i,i+1);
|
||||
const match_str = match ? match[0] : null;
|
||||
|
||||
|
||||
// loop through sequences
|
||||
// add either the char or multi_match
|
||||
let overlapping = [];
|
||||
let added_types = new Set();
|
||||
for(const sequence of sequences){
|
||||
|
||||
const last_piece = sequence.last();
|
||||
|
||||
|
||||
if( !last_piece || last_piece.length == 1 || last_piece.end <= i ){
|
||||
|
||||
// if we have a multi match
|
||||
if( match_str ){
|
||||
const len = match_str.length;
|
||||
sequence.add({start:i,end:i+len,length:len,substr:match_str});
|
||||
added_types.add('1');
|
||||
}else{
|
||||
sequence.add({start:i,end:i+1,length:1,substr:char});
|
||||
added_types.add('2');
|
||||
}
|
||||
|
||||
}else if( match_str ){
|
||||
|
||||
let clone = sequence.clone(i,last_piece);
|
||||
|
||||
const len = match_str.length;
|
||||
clone.add({start:i,end:i+len,length:len,substr:match_str});
|
||||
|
||||
overlapping.push(clone);
|
||||
|
||||
}else{
|
||||
// don't add char
|
||||
// adding would create invalid patterns: 234 => [2,34,4]
|
||||
added_types.add('3');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// if we have overlapping
|
||||
if( overlapping.length > 0 ){
|
||||
|
||||
// ['ii','iii'] before ['i','i','iii']
|
||||
overlapping = overlapping.sort((a,b)=>{
|
||||
return a.length() - b.length();
|
||||
});
|
||||
|
||||
for( let clone of overlapping){
|
||||
|
||||
// don't add if we already have an equivalent sequence
|
||||
if( inSequences(clone, sequences) ){
|
||||
continue;
|
||||
}
|
||||
|
||||
sequences.push(clone);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// if we haven't done anything unique
|
||||
// clean up the patterns
|
||||
// helps keep patterns smaller
|
||||
// if str = 'r₨㎧aarss', pattern will be 446 instead of 655
|
||||
if( i > 0 && added_types.size == 1 && !added_types.has('3') ){
|
||||
pattern += sequencesToPattern(sequences,false);
|
||||
let new_seq = new Sequence();
|
||||
const old_seq = sequences[0];
|
||||
if( old_seq ){
|
||||
new_seq.add(old_seq.last());
|
||||
}
|
||||
sequences = [new_seq];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pattern += sequencesToPattern(sequences,true);
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
|
||||
export { escape_regex };
|
||||
114
node_modules/@orchidjs/unicode-variants/lib/regex.mjs
generated
vendored
Normal file
114
node_modules/@orchidjs/unicode-variants/lib/regex.mjs
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {string[]} chars
|
||||
* @return {string}
|
||||
*/
|
||||
export const arrayToPattern = (chars) =>{
|
||||
|
||||
chars = chars.filter( Boolean );
|
||||
|
||||
if( chars.length < 2 ){
|
||||
return chars[0] || '';
|
||||
}
|
||||
|
||||
return (maxValueLength(chars) == 1) ? '['+chars.join('')+']' : '(?:'+chars.join('|')+')';
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string[]} array
|
||||
* @return {string}
|
||||
*/
|
||||
export const sequencePattern = (array)=>{
|
||||
|
||||
if( !hasDuplicates(array) ){
|
||||
return array.join('');
|
||||
}
|
||||
|
||||
let pattern = '';
|
||||
let prev_char_count = 0;
|
||||
|
||||
const prev_pattern = ()=>{
|
||||
if( prev_char_count > 1 ){
|
||||
pattern += '{'+prev_char_count+'}';
|
||||
}
|
||||
}
|
||||
|
||||
array.forEach((char,i)=>{
|
||||
|
||||
if( char === array[i-1] ){
|
||||
prev_char_count++;
|
||||
return;
|
||||
}
|
||||
|
||||
prev_pattern();
|
||||
|
||||
pattern += char;
|
||||
prev_char_count = 1;
|
||||
});
|
||||
|
||||
prev_pattern();
|
||||
|
||||
return pattern;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Convert array of strings to a regular expression
|
||||
* ex ['ab','a'] => (?:ab|a)
|
||||
* ex ['a','b'] => [ab]
|
||||
* @param {Set<string>} chars
|
||||
* @return {string}
|
||||
*/
|
||||
export const setToPattern = (chars)=>{
|
||||
let array = toArray(chars);
|
||||
return arrayToPattern(array);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values
|
||||
* @param {any[]} array
|
||||
*/
|
||||
export const hasDuplicates = (array) => {
|
||||
return (new Set(array)).size !== array.length;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error
|
||||
* @param {string} str
|
||||
* @return {string}
|
||||
*/
|
||||
export const escape_regex = (str) => {
|
||||
return (str + '').replace(/([\$\(\)\*\+\.\?\[\]\^\{\|\}\\])/gu, '\\$1');
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the max length of array values
|
||||
* @param {string[]} array
|
||||
*
|
||||
*/
|
||||
export const maxValueLength = (array) => {
|
||||
return array.reduce( (longest, value) => Math.max(longest,unicodeLength(value)),0);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
export const unicodeLength = (str) => {
|
||||
return toArray(str).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} p
|
||||
* @return {any[]}
|
||||
*/
|
||||
export const toArray = (p) => Array.from(p);
|
||||
31
node_modules/@orchidjs/unicode-variants/lib/strings.mjs
generated
vendored
Normal file
31
node_modules/@orchidjs/unicode-variants/lib/strings.mjs
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get all possible combinations of substrings that add up to the given string
|
||||
* https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string
|
||||
* @param {string} input
|
||||
* @return {string[][]}
|
||||
*/
|
||||
export const allSubstrings = (input) => {
|
||||
|
||||
if( input.length === 1) return [[input]];
|
||||
|
||||
/** @type {string[][]} */
|
||||
let result = [];
|
||||
|
||||
const start = input.substring(1);
|
||||
const suba = allSubstrings(start);
|
||||
|
||||
suba.forEach(function(subresult) {
|
||||
let tmp = subresult.slice(0);
|
||||
tmp[0] = input.charAt(0) + tmp[0];
|
||||
result.push(tmp);
|
||||
|
||||
tmp = subresult.slice(0);
|
||||
tmp.unshift(input.charAt(0));
|
||||
result.push(tmp);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
72
node_modules/@orchidjs/unicode-variants/package.json
generated
vendored
Normal file
72
node_modules/@orchidjs/unicode-variants/package.json
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"name": "@orchidjs/unicode-variants",
|
||||
"version": "1.0.4",
|
||||
"description": "Unicode variant string matching",
|
||||
"main": "dist/umd/index.js",
|
||||
"browser": "dist/umd/index.js",
|
||||
"module": "dist/esm/index.js",
|
||||
"types": "dist/types/index.d.mts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/orchidjs/unicode-variants.git"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "npm run build",
|
||||
"test": "jest",
|
||||
"test:coveralls": "npm run build && jest --coverage && cat ./coverage/lcov.info | coveralls",
|
||||
"test:typescript": "tsc -p .config --noemit",
|
||||
"bench": "npx 0x -- node --zero-fill-buffers bench.mjs",
|
||||
"build": "npx rollup -c .config/rollup.config.js",
|
||||
"build:types": "tsc -p .config --emitDeclarationOnly"
|
||||
},
|
||||
"keywords": [
|
||||
"unicode",
|
||||
"diacritic",
|
||||
"diacritics",
|
||||
"matching",
|
||||
"search",
|
||||
"find",
|
||||
"string",
|
||||
"text"
|
||||
],
|
||||
"author": "",
|
||||
"license": "Apache-2.0",
|
||||
"files": [
|
||||
"/dist",
|
||||
"/lib"
|
||||
],
|
||||
"dependencies": {},
|
||||
"browserslist": [
|
||||
">= 0.5%",
|
||||
"not dead",
|
||||
"Chrome >= 60",
|
||||
"Firefox >= 60",
|
||||
"Firefox ESR",
|
||||
"Edge >= 17",
|
||||
"iOS >= 12",
|
||||
"Safari >= 12",
|
||||
"not Explorer <= 11"
|
||||
],
|
||||
"devDependencies": {
|
||||
"0x": "^5.4.1",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"@rollup/plugin-babel": "^5.3.1",
|
||||
"coveralls": "^3.1.1",
|
||||
"jest": "^28.1.3",
|
||||
"rollup": "^2.77.2",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"typescript": "^4.7.4"
|
||||
},
|
||||
"jest": {
|
||||
"verbose": true,
|
||||
"testMatch": [
|
||||
"**/test/**/*.js"
|
||||
]
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user