mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-23 20:18:00 +01:00
match_query: unit test and rewrite for TypeScript (#208)
* match_query: unit test and rewrite for TypeScript * match_query: use new type for parse errors * match_query: avoid exceptional control flow in date parsing
This commit is contained in:
parent
07d326b5bd
commit
3590be1429
25 changed files with 1412 additions and 902 deletions
|
@ -1,877 +0,0 @@
|
||||||
/**
|
|
||||||
* booru.match_query: A port and modification of the search_parser library for
|
|
||||||
* performing client-side filtering.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const tokenList = [
|
|
||||||
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
|
||||||
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
|
||||||
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
|
||||||
['lparen', /^\s*\(\s*/],
|
|
||||||
['rparen', /^\s*\)\s*/],
|
|
||||||
['and_op', /^\s*(?:&&|AND)\s+/],
|
|
||||||
['and_op', /^\s*,\s*/],
|
|
||||||
['or_op', /^\s*(?:\|\||OR)\s+/],
|
|
||||||
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
|
||||||
['not_op', /^\s*[!-]\s*/],
|
|
||||||
['space', /^\s+/],
|
|
||||||
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
|
||||||
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
|
||||||
],
|
|
||||||
numberFields = ['id', 'width', 'height', 'aspect_ratio',
|
|
||||||
'comment_count', 'score', 'upvotes', 'downvotes',
|
|
||||||
'faves', 'tag_count'],
|
|
||||||
dateFields = ['created_at'],
|
|
||||||
literalFields = ['tags', 'orig_sha512_hash', 'sha512_hash',
|
|
||||||
'score', 'uploader', 'source_url', 'description'],
|
|
||||||
termSpaceToImageField = {
|
|
||||||
tags: 'data-image-tag-aliases',
|
|
||||||
score: 'data-score',
|
|
||||||
upvotes: 'data-upvotes',
|
|
||||||
downvotes: 'data-downvotes',
|
|
||||||
uploader: 'data-uploader',
|
|
||||||
// Yeah, I don't think this is reasonably supportable.
|
|
||||||
// faved_by: 'data-faved-by',
|
|
||||||
id: 'data-image-id',
|
|
||||||
width: 'data-width',
|
|
||||||
height: 'data-height',
|
|
||||||
/* eslint-disable camelcase */
|
|
||||||
aspect_ratio: 'data-aspect-ratio',
|
|
||||||
comment_count: 'data-comment-count',
|
|
||||||
tag_count: 'data-tag-count',
|
|
||||||
source_url: 'data-source-url',
|
|
||||||
faves: 'data-faves',
|
|
||||||
sha512_hash: 'data-sha512',
|
|
||||||
orig_sha512_hash: 'data-orig-sha512',
|
|
||||||
created_at: 'data-created-at'
|
|
||||||
/* eslint-enable camelcase */
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
function SearchTerm(termStr) {
|
|
||||||
this.term = termStr.trim();
|
|
||||||
this.parsed = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchTerm.prototype.append = function(substr) {
|
|
||||||
this.term += substr;
|
|
||||||
this.parsed = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseRangeField = function(field) {
|
|
||||||
if (numberFields.indexOf(field) !== -1) {
|
|
||||||
return [field, 'eq', 'number'];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dateFields.indexOf(field) !== -1) {
|
|
||||||
return [field, 'eq', 'date'];
|
|
||||||
}
|
|
||||||
|
|
||||||
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
|
||||||
|
|
||||||
if (qual) {
|
|
||||||
if (numberFields.indexOf(qual[1]) !== -1) {
|
|
||||||
return [qual[1], qual[2], 'number'];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dateFields.indexOf(qual[1]) !== -1) {
|
|
||||||
return [qual[1], qual[2], 'date'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseRelativeDate = function(dateVal, qual) {
|
|
||||||
const match = /(\d+) (second|minute|hour|day|week|month|year)s? ago/.exec(dateVal);
|
|
||||||
const bounds = {
|
|
||||||
second: 1000,
|
|
||||||
minute: 60000,
|
|
||||||
hour: 3600000,
|
|
||||||
day: 86400000,
|
|
||||||
week: 604800000,
|
|
||||||
month: 2592000000,
|
|
||||||
year: 31536000000
|
|
||||||
};
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
const amount = parseInt(match[1], 10);
|
|
||||||
const scale = bounds[match[2]];
|
|
||||||
|
|
||||||
const now = new Date().getTime();
|
|
||||||
const bottomDate = new Date(now - (amount * scale));
|
|
||||||
const topDate = new Date(now - ((amount - 1) * scale));
|
|
||||||
|
|
||||||
switch (qual) {
|
|
||||||
case 'lte':
|
|
||||||
return [bottomDate, 'lt'];
|
|
||||||
case 'gte':
|
|
||||||
return [bottomDate, 'gte'];
|
|
||||||
case 'lt':
|
|
||||||
return [bottomDate, 'lt'];
|
|
||||||
case 'gt':
|
|
||||||
return [bottomDate, 'gte'];
|
|
||||||
default:
|
|
||||||
return [[bottomDate, topDate], 'eq'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error(`Cannot parse date string: ${dateVal}`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseAbsoluteDate = function(dateVal, qual) {
|
|
||||||
const parseRes = [
|
|
||||||
/^(\d{4})/,
|
|
||||||
/^-(\d{2})/,
|
|
||||||
/^-(\d{2})/,
|
|
||||||
/^(?:\s+|T|t)(\d{2})/,
|
|
||||||
/^:(\d{2})/,
|
|
||||||
/^:(\d{2})/
|
|
||||||
],
|
|
||||||
timeZoneOffset = [0, 0],
|
|
||||||
timeData = [0, 0, 1, 0, 0, 0],
|
|
||||||
origDateVal = dateVal;
|
|
||||||
let topDate = null,
|
|
||||||
i,
|
|
||||||
match,
|
|
||||||
bottomDate = null,
|
|
||||||
localDateVal = origDateVal;
|
|
||||||
|
|
||||||
match = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
|
||||||
if (match) {
|
|
||||||
timeZoneOffset[0] = parseInt(match[2], 10);
|
|
||||||
timeZoneOffset[1] = parseInt(match[3], 10);
|
|
||||||
if (match[1] === '-') {
|
|
||||||
timeZoneOffset[0] *= -1;
|
|
||||||
timeZoneOffset[1] *= -1;
|
|
||||||
}
|
|
||||||
localDateVal = localDateVal.substr(0, localDateVal.length - 6);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 0; i < parseRes.length; i += 1) {
|
|
||||||
if (localDateVal.length === 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
match = parseRes[i].exec(localDateVal);
|
|
||||||
if (match) {
|
|
||||||
if (i === 1) {
|
|
||||||
timeData[i] = parseInt(match[1], 10) - 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
timeData[i] = parseInt(match[1], 10);
|
|
||||||
}
|
|
||||||
localDateVal = localDateVal.substr(
|
|
||||||
match[0].length, localDateVal.length - match[0].length
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (localDateVal.length > 0) {
|
|
||||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply the user-specified time zone offset. The JS Date constructor
|
|
||||||
// is very flexible here.
|
|
||||||
timeData[3] -= timeZoneOffset[0];
|
|
||||||
timeData[4] -= timeZoneOffset[1];
|
|
||||||
|
|
||||||
switch (qual) {
|
|
||||||
case 'lte':
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
|
||||||
case 'gte':
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
|
||||||
case 'lt':
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
|
||||||
case 'gt':
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
|
||||||
default:
|
|
||||||
bottomDate = Date.UTC.apply(Date, timeData);
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
topDate = Date.UTC.apply(Date, timeData);
|
|
||||||
return [[bottomDate, topDate], 'eq'];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseDate = function(dateVal, qual) {
|
|
||||||
try {
|
|
||||||
return this.parseAbsoluteDate(dateVal, qual);
|
|
||||||
}
|
|
||||||
catch (_) {
|
|
||||||
return this.parseRelativeDate(dateVal, qual);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parse = function() {
|
|
||||||
let rangeParsing,
|
|
||||||
candidateTermSpace,
|
|
||||||
termCandidate;
|
|
||||||
|
|
||||||
this.wildcardable = !this.fuzz && !/^"([^"]|\\")+"$/.test(this.term);
|
|
||||||
|
|
||||||
if (!this.wildcardable && !this.fuzz) {
|
|
||||||
this.term = this.term.substr(1, this.term.length - 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.term = this._normalizeTerm();
|
|
||||||
|
|
||||||
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
|
||||||
|
|
||||||
// Default.
|
|
||||||
this.termSpace = 'tags';
|
|
||||||
this.termType = 'literal';
|
|
||||||
|
|
||||||
const matchArr = this.term.split(':');
|
|
||||||
|
|
||||||
if (matchArr.length > 1) {
|
|
||||||
candidateTermSpace = matchArr[0];
|
|
||||||
termCandidate = matchArr.slice(1).join(':');
|
|
||||||
rangeParsing = this.parseRangeField(candidateTermSpace);
|
|
||||||
|
|
||||||
if (rangeParsing) {
|
|
||||||
this.termSpace = rangeParsing[0];
|
|
||||||
this.termType = rangeParsing[2];
|
|
||||||
|
|
||||||
if (this.termType === 'date') {
|
|
||||||
rangeParsing = this.parseDate(termCandidate, rangeParsing[1]);
|
|
||||||
this.term = rangeParsing[0];
|
|
||||||
this.compare = rangeParsing[1];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this.term = parseFloat(termCandidate);
|
|
||||||
this.compare = rangeParsing[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
this.wildcardable = false;
|
|
||||||
}
|
|
||||||
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
|
||||||
this.termType = 'literal';
|
|
||||||
this.term = termCandidate;
|
|
||||||
this.termSpace = candidateTermSpace;
|
|
||||||
}
|
|
||||||
else if (candidateTermSpace === 'my') {
|
|
||||||
this.termType = 'my';
|
|
||||||
this.termSpace = termCandidate;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.wildcardable) {
|
|
||||||
// Transforms wildcard match into regular expression.
|
|
||||||
// A custom NFA with caching may be more sophisticated but not
|
|
||||||
// likely to be faster.
|
|
||||||
this.term = new RegExp(
|
|
||||||
`^${
|
|
||||||
this.term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
|
||||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
|
||||||
.replace(/^(?:\\\\)*\?/g, '.?')
|
|
||||||
}$`, 'i'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update parse status flag to indicate the new properties are ready.
|
|
||||||
this.parsed = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype._normalizeTerm = function() {
|
|
||||||
if (!this.wildcardable) {
|
|
||||||
return this.term.replace('"', '"');
|
|
||||||
}
|
|
||||||
return this.term.replace(/\\([^*?])/g, '$1');
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.fuzzyMatch = function(targetStr) {
|
|
||||||
let targetDistance,
|
|
||||||
i,
|
|
||||||
j,
|
|
||||||
// Work vectors, representing the last three populated
|
|
||||||
// rows of the dynamic programming matrix of the iterative
|
|
||||||
// optimal string alignment calculation.
|
|
||||||
v0 = [],
|
|
||||||
v1 = [],
|
|
||||||
v2 = [],
|
|
||||||
temp;
|
|
||||||
|
|
||||||
if (this.fuzz < 1.0) {
|
|
||||||
targetDistance = targetStr.length * (1.0 - this.fuzz);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
targetDistance = this.fuzz;
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetStrLower = targetStr.toLowerCase();
|
|
||||||
|
|
||||||
for (i = 0; i <= targetStrLower.length; i += 1) {
|
|
||||||
v1.push(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 0; i < this.term.length; i += 1) {
|
|
||||||
v2[0] = i;
|
|
||||||
for (j = 0; j < targetStrLower.length; j += 1) {
|
|
||||||
const cost = this.term[i] === targetStrLower[j] ? 0 : 1;
|
|
||||||
v2[j + 1] = Math.min(
|
|
||||||
// Deletion.
|
|
||||||
v1[j + 1] + 1,
|
|
||||||
// Insertion.
|
|
||||||
v2[j] + 1,
|
|
||||||
// Substitution or No Change.
|
|
||||||
v1[j] + cost
|
|
||||||
);
|
|
||||||
if (i > 1 && j > 1 && this.term[i] === targetStrLower[j - 1] &&
|
|
||||||
targetStrLower[i - 1] === targetStrLower[j]) {
|
|
||||||
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Rotate dem vec pointers bra.
|
|
||||||
temp = v0;
|
|
||||||
v0 = v1;
|
|
||||||
v1 = v2;
|
|
||||||
v2 = temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
return v1[targetStrLower.length] <= targetDistance;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.exactMatch = function(targetStr) {
|
|
||||||
return this.term.toLowerCase() === targetStr.toLowerCase();
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.wildcardMatch = function(targetStr) {
|
|
||||||
return this.term.test(targetStr);
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.interactionMatch = function(imageID, type, interaction, interactions) {
|
|
||||||
let ret = false;
|
|
||||||
|
|
||||||
interactions.forEach(v => {
|
|
||||||
if (v.image_id === imageID && v.interaction_type === type && (interaction === null || v.value === interaction)) {
|
|
||||||
ret = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.match = function(target) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
|
||||||
const ohffs = this;
|
|
||||||
let ret = false,
|
|
||||||
compFunc,
|
|
||||||
numbuh,
|
|
||||||
date;
|
|
||||||
|
|
||||||
if (!this.parsed) {
|
|
||||||
this.parse();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.termType === 'literal') {
|
|
||||||
// Literal matching.
|
|
||||||
if (this.fuzz) {
|
|
||||||
compFunc = this.fuzzyMatch;
|
|
||||||
}
|
|
||||||
else if (this.wildcardable) {
|
|
||||||
compFunc = this.wildcardMatch;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
compFunc = this.exactMatch;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.termSpace === 'tags') {
|
|
||||||
target.getAttribute('data-image-tag-aliases').split(', ').every(
|
|
||||||
str => {
|
|
||||||
if (compFunc.call(ohffs, str)) {
|
|
||||||
ret = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
ret = compFunc.call(
|
|
||||||
this, target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this.termType === 'my' && window.booru.interactions.length > 0) {
|
|
||||||
// Should work with most my:conditions except watched.
|
|
||||||
switch (this.termSpace) {
|
|
||||||
case 'faves':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'faved', null, window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
case 'upvotes':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'voted', 'up', window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
case 'downvotes':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'voted', 'down', window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = false; // Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this.termType === 'date') {
|
|
||||||
// Date matching.
|
|
||||||
date = new Date(
|
|
||||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
).getTime();
|
|
||||||
|
|
||||||
switch (this.compare) {
|
|
||||||
// The open-left, closed-right date range specified by the
|
|
||||||
// date/time format limits the types of comparisons that are
|
|
||||||
// done compared to numeric ranges.
|
|
||||||
case 'lt':
|
|
||||||
ret = this.term > date;
|
|
||||||
break;
|
|
||||||
case 'gte':
|
|
||||||
ret = this.term <= date;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = this.term[0] <= date && this.term[1] > date;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Range matching.
|
|
||||||
numbuh = parseFloat(
|
|
||||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isNaN(this.term)) {
|
|
||||||
ret = false;
|
|
||||||
}
|
|
||||||
else if (this.fuzz) {
|
|
||||||
ret = this.term <= numbuh + this.fuzz &&
|
|
||||||
this.term + this.fuzz >= numbuh;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
switch (this.compare) {
|
|
||||||
case 'lt':
|
|
||||||
ret = this.term > numbuh;
|
|
||||||
break;
|
|
||||||
case 'gt':
|
|
||||||
ret = this.term < numbuh;
|
|
||||||
break;
|
|
||||||
case 'lte':
|
|
||||||
ret = this.term >= numbuh;
|
|
||||||
break;
|
|
||||||
case 'gte':
|
|
||||||
ret = this.term <= numbuh;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = this.term === numbuh;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
function generateLexArray(searchStr) {
|
|
||||||
const opQueue = [],
|
|
||||||
groupNegate = [],
|
|
||||||
tokenStack = [];
|
|
||||||
let searchTerm = null,
|
|
||||||
boost = null,
|
|
||||||
fuzz = null,
|
|
||||||
lparenCtr = 0,
|
|
||||||
negate = false,
|
|
||||||
boostFuzzStr = '',
|
|
||||||
localSearchStr = searchStr;
|
|
||||||
|
|
||||||
while (localSearchStr.length > 0) {
|
|
||||||
// eslint-disable-next-line no-loop-func
|
|
||||||
tokenList.every(tokenArr => {
|
|
||||||
const tokenName = tokenArr[0],
|
|
||||||
tokenRE = tokenArr[1];
|
|
||||||
let match = tokenRE.exec(localSearchStr),
|
|
||||||
op;
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
match = match[0];
|
|
||||||
|
|
||||||
if (Boolean(searchTerm) && (
|
|
||||||
['and_op', 'or_op'].indexOf(tokenName) !== -1 ||
|
|
||||||
tokenName === 'rparen' && lparenCtr === 0)) {
|
|
||||||
// Set options.
|
|
||||||
searchTerm.boost = boost;
|
|
||||||
searchTerm.fuzz = fuzz;
|
|
||||||
// Push to stack.
|
|
||||||
tokenStack.push(searchTerm);
|
|
||||||
// Reset term and options data.
|
|
||||||
searchTerm = fuzz = boost = null;
|
|
||||||
boostFuzzStr = '';
|
|
||||||
lparenCtr = 0;
|
|
||||||
|
|
||||||
if (negate) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
negate = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (tokenName) {
|
|
||||||
case 'and_op':
|
|
||||||
while (opQueue[0] === 'and_op') {
|
|
||||||
tokenStack.push(opQueue.shift());
|
|
||||||
}
|
|
||||||
opQueue.unshift('and_op');
|
|
||||||
break;
|
|
||||||
case 'or_op':
|
|
||||||
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
|
||||||
tokenStack.push(opQueue.shift());
|
|
||||||
}
|
|
||||||
opQueue.unshift('or_op');
|
|
||||||
break;
|
|
||||||
case 'not_op':
|
|
||||||
if (searchTerm) {
|
|
||||||
// We're already inside a search term, so it does
|
|
||||||
// not apply, obv.
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
negate = !negate;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'lparen':
|
|
||||||
if (searchTerm) {
|
|
||||||
// If we are inside the search term, do not error
|
|
||||||
// out just yet; instead, consider it as part of
|
|
||||||
// the search term, as a user convenience.
|
|
||||||
searchTerm.append(match);
|
|
||||||
lparenCtr += 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
opQueue.unshift('lparen');
|
|
||||||
groupNegate.push(negate);
|
|
||||||
negate = false;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'rparen':
|
|
||||||
if (lparenCtr > 0) {
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
lparenCtr -= 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
while (opQueue.length) {
|
|
||||||
op = opQueue.shift();
|
|
||||||
if (op === 'lparen') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
tokenStack.push(op);
|
|
||||||
}
|
|
||||||
if (groupNegate.length > 0 && groupNegate.pop()) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'fuzz':
|
|
||||||
if (searchTerm) {
|
|
||||||
// For this and boost operations, we store the
|
|
||||||
// current match so far to a temporary string in
|
|
||||||
// case this is actually inside the term.
|
|
||||||
fuzz = parseFloat(match.substr(1));
|
|
||||||
boostFuzzStr += match;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'boost':
|
|
||||||
if (searchTerm) {
|
|
||||||
boost = match.substr(1);
|
|
||||||
boostFuzzStr += match;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'quoted_lit':
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'word':
|
|
||||||
if (searchTerm) {
|
|
||||||
if (fuzz || boost) {
|
|
||||||
boost = fuzz = null;
|
|
||||||
searchTerm.append(boostFuzzStr);
|
|
||||||
boostFuzzStr = '';
|
|
||||||
}
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
// Append extra spaces within search terms.
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate string and restart the token tests.
|
|
||||||
localSearchStr = localSearchStr.substr(
|
|
||||||
match.length, localSearchStr.length - match.length
|
|
||||||
);
|
|
||||||
|
|
||||||
// Break since we have found a match.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append final tokens to the stack, starting with the search term.
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.boost = boost;
|
|
||||||
searchTerm.fuzz = fuzz;
|
|
||||||
tokenStack.push(searchTerm);
|
|
||||||
}
|
|
||||||
if (negate) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opQueue.indexOf('rparen') !== -1 ||
|
|
||||||
opQueue.indexOf('lparen') !== -1) {
|
|
||||||
throw new Error('Mismatched parentheses.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Memory-efficient concatenation of remaining operators queue to the
|
|
||||||
// token stack.
|
|
||||||
tokenStack.push.apply(tokenStack, opQueue);
|
|
||||||
|
|
||||||
return tokenStack;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseTokens(lexicalArray) {
|
|
||||||
const operandStack = [];
|
|
||||||
let negate, op1, op2;
|
|
||||||
lexicalArray.forEach((token, i) => {
|
|
||||||
if (token !== 'not_op') {
|
|
||||||
negate = lexicalArray[i + 1] === 'not_op';
|
|
||||||
|
|
||||||
if (typeof token === 'string') {
|
|
||||||
op2 = operandStack.pop();
|
|
||||||
op1 = operandStack.pop();
|
|
||||||
|
|
||||||
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
|
||||||
throw new Error('Missing operand.');
|
|
||||||
}
|
|
||||||
|
|
||||||
operandStack.push(new SearchAST(token, negate, op1, op2));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (negate) {
|
|
||||||
operandStack.push(new SearchAST(null, true, token));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
operandStack.push(token);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (operandStack.length > 1) {
|
|
||||||
throw new Error('Missing operator.');
|
|
||||||
}
|
|
||||||
|
|
||||||
op1 = operandStack.pop();
|
|
||||||
|
|
||||||
if (typeof op1 === 'undefined') {
|
|
||||||
return new SearchAST();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isTerminal(op1)) {
|
|
||||||
return new SearchAST(null, false, op1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return op1;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseSearch(searchStr) {
|
|
||||||
return parseTokens(generateLexArray(searchStr));
|
|
||||||
}
|
|
||||||
|
|
||||||
function isTerminal(operand) {
|
|
||||||
// Whether operand is a terminal SearchTerm.
|
|
||||||
return typeof operand.term !== 'undefined';
|
|
||||||
}
|
|
||||||
|
|
||||||
function SearchAST(op, negate, leftOperand, rightOperand) {
|
|
||||||
this.negate = Boolean(negate);
|
|
||||||
this.leftOperand = leftOperand || null;
|
|
||||||
this.op = op || null;
|
|
||||||
this.rightOperand = rightOperand || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function combineOperands(ast1, ast2, parentAST) {
|
|
||||||
let localAst1;
|
|
||||||
if (parentAST.op === 'and_op') {
|
|
||||||
localAst1 = ast1 && ast2;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
localAst1 = ast1 || ast2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parentAST.negate) {
|
|
||||||
return !localAst1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return localAst1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Evaluation of the AST in regard to a target image
|
|
||||||
SearchAST.prototype.hitsImage = function(image) {
|
|
||||||
const treeStack = [];
|
|
||||||
// Left side node.
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
|
||||||
let ast1 = this,
|
|
||||||
// Right side node.
|
|
||||||
ast2,
|
|
||||||
// Parent node of the current subtree.
|
|
||||||
parentAST;
|
|
||||||
|
|
||||||
// Build the initial tree node traversal stack, of the "far left" side.
|
|
||||||
// The general idea is to accumulate from the bottom and make stacks
|
|
||||||
// of right-hand subtrees that themselves accumulate upward. The left
|
|
||||||
// side node, ast1, will always be a Boolean representing the left-side
|
|
||||||
// evaluated value, up to the current subtree (parentAST).
|
|
||||||
while (!isTerminal(ast1)) {
|
|
||||||
treeStack.push(ast1);
|
|
||||||
ast1 = ast1.leftOperand;
|
|
||||||
|
|
||||||
if (!ast1) {
|
|
||||||
// Empty tree.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ast1 = ast1.match(image);
|
|
||||||
treeStack.push(ast1);
|
|
||||||
|
|
||||||
while (treeStack.length > 0) {
|
|
||||||
parentAST = treeStack.pop();
|
|
||||||
|
|
||||||
if (parentAST === null) {
|
|
||||||
// We are at the end of a virtual stack for a right node
|
|
||||||
// subtree. We switch the result of this stack from left
|
|
||||||
// (ast1) to right (ast2), pop the original left node,
|
|
||||||
// and finally pop the parent subtree itself. See near the
|
|
||||||
// end of this function to view how this is populated.
|
|
||||||
ast2 = ast1;
|
|
||||||
ast1 = treeStack.pop();
|
|
||||||
parentAST = treeStack.pop();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// First, check to see if we can do a short-circuit
|
|
||||||
// evaluation to skip evaluating the right side entirely.
|
|
||||||
if (!ast1 && parentAST.op === 'and_op') {
|
|
||||||
ast1 = parentAST.negate;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ast1 && parentAST.op === 'or_op') {
|
|
||||||
ast1 = !parentAST.negate;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we are not at the end of a stack, grab the right
|
|
||||||
// node. The left node (ast1) is currently a terminal Boolean.
|
|
||||||
ast2 = parentAST.rightOperand;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof ast2 === 'boolean') {
|
|
||||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
|
||||||
}
|
|
||||||
else if (!ast2) {
|
|
||||||
// A subtree with a single node. This is generally the case
|
|
||||||
// for negated tokens.
|
|
||||||
if (parentAST.negate) {
|
|
||||||
ast1 = !ast1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isTerminal(ast2)) {
|
|
||||||
// We are finally at a leaf and can evaluate.
|
|
||||||
ast2 = ast2.match(image);
|
|
||||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// We are at a node whose right side is a new subtree.
|
|
||||||
// We will build a new "virtual" stack, but instead of
|
|
||||||
// building a new Array, we can insert a null object as a
|
|
||||||
// marker.
|
|
||||||
treeStack.push(parentAST, ast1, null);
|
|
||||||
|
|
||||||
do {
|
|
||||||
treeStack.push(ast2);
|
|
||||||
ast2 = ast2.leftOperand;
|
|
||||||
} while (!isTerminal(ast2));
|
|
||||||
|
|
||||||
ast1 = ast2.match(image);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ast1;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchAST.prototype.dumpTree = function() {
|
|
||||||
// Dumps to string a simple diagram of the syntax tree structure
|
|
||||||
// (starting with this object as the root) for debugging purposes.
|
|
||||||
const retStrArr = [],
|
|
||||||
treeQueue = [['', this]];
|
|
||||||
let treeArr,
|
|
||||||
prefix,
|
|
||||||
tree;
|
|
||||||
|
|
||||||
while (treeQueue.length > 0) {
|
|
||||||
treeArr = treeQueue.shift();
|
|
||||||
prefix = treeArr[0];
|
|
||||||
tree = treeArr[1];
|
|
||||||
|
|
||||||
if (isTerminal(tree)) {
|
|
||||||
retStrArr.push(`${prefix}-> ${tree.term}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (tree.negate) {
|
|
||||||
retStrArr.push(`${prefix}+ NOT_OP`);
|
|
||||||
prefix += '\t';
|
|
||||||
}
|
|
||||||
if (tree.op) {
|
|
||||||
retStrArr.push(`${prefix}+ ${tree.op.toUpperCase()}`);
|
|
||||||
prefix += '\t';
|
|
||||||
treeQueue.unshift([prefix, tree.rightOperand]);
|
|
||||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retStrArr.join('\n');
|
|
||||||
};
|
|
||||||
|
|
||||||
export default parseSearch;
|
|
15
assets/js/match_query.ts
Normal file
15
assets/js/match_query.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { defaultMatcher } from './query/matcher';
|
||||||
|
import { generateLexArray } from './query/lex';
|
||||||
|
import { parseTokens } from './query/parse';
|
||||||
|
import { getAstMatcherForTerm } from './query/term';
|
||||||
|
|
||||||
|
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||||
|
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSearch(query: string) {
|
||||||
|
const tokens = generateLexArray(query, parseWithDefaultMatcher);
|
||||||
|
return parseTokens(tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default parseSearch;
|
106
assets/js/query/__tests__/date.spec.ts
Normal file
106
assets/js/query/__tests__/date.spec.ts
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
import { makeDateMatcher } from '../date';
|
||||||
|
|
||||||
|
function daysAgo(days: number) {
|
||||||
|
return new Date(Date.now() - days * 86400000).toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Date parsing', () => {
|
||||||
|
it('should match relative dates (upper bound)', () => {
|
||||||
|
const matcher = makeDateMatcher('3 days ago', 'lte');
|
||||||
|
|
||||||
|
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match relative dates (lower bound)', () => {
|
||||||
|
const matcher = makeDateMatcher('3 days ago', 'gte');
|
||||||
|
|
||||||
|
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute date ranges', () => {
|
||||||
|
const ltMatcher = makeDateMatcher('2025', 'lt');
|
||||||
|
const gtMatcher = makeDateMatcher('2023', 'gt');
|
||||||
|
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through years', () => {
|
||||||
|
const matcher = makeDateMatcher('2024', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through months', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 6, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 4, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through days', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through hours', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 5)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through minutes', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30Z', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds with positive timezone offset', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30+01:30', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds with negative timezone offset', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30-01:30', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match malformed absolute date expressions', () => {
|
||||||
|
expect(() => makeDateMatcher('2024-06-21T06:21:30+01:3020', 'eq')).toThrow('Cannot parse date string: 2024-06-21T06:21:30+01:3020');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match malformed relative date expressions', () => {
|
||||||
|
expect(() => makeDateMatcher('3 test failures ago', 'eq')).toThrow('Cannot parse date string: 3 test failures ago');
|
||||||
|
});
|
||||||
|
});
|
177
assets/js/query/__tests__/lex.spec.ts
Normal file
177
assets/js/query/__tests__/lex.spec.ts
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
import { generateLexArray } from '../lex';
|
||||||
|
import { AstMatcher } from '../types';
|
||||||
|
|
||||||
|
describe('Lexical analysis', () => {
|
||||||
|
let terms: string[];
|
||||||
|
let fuzzes: number[];
|
||||||
|
let boosts: number[];
|
||||||
|
|
||||||
|
function noMatch() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseTerm(term: string, fuzz: number, boost: number): AstMatcher {
|
||||||
|
terms.push(term);
|
||||||
|
fuzzes.push(fuzz);
|
||||||
|
boosts.push(boost);
|
||||||
|
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
terms = [];
|
||||||
|
fuzzes = [];
|
||||||
|
boosts = [];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms', () => {
|
||||||
|
const array = generateLexArray('safe', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms with fuzzing', () => {
|
||||||
|
const array = generateLexArray('safe~4', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([4]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms with boosting', () => {
|
||||||
|
const array = generateLexArray('safe^2', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([2]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex quoted single terms', () => {
|
||||||
|
const array = generateLexArray('"safe"', parseTerm);
|
||||||
|
expect(terms).toEqual(['"safe"']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex multiple terms connected by AND', () => {
|
||||||
|
const array = generateLexArray('safe AND solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(fuzzes).toEqual([0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex multiple terms connected by OR', () => {
|
||||||
|
const array = generateLexArray('safe OR solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(fuzzes).toEqual([0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize AND over OR', () => {
|
||||||
|
const array = generateLexArray('safe OR solo AND fluttershy', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, noMatch, 'and_op', 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should override ordering when using parenthetical expressions', () => {
|
||||||
|
const array = generateLexArray('(safe OR solo) AND fluttershy', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||||
|
expect(fuzzes).toEqual([0, 0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex unary NOT', () => {
|
||||||
|
const array = generateLexArray('NOT safe', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize NOT over AND', () => {
|
||||||
|
const array = generateLexArray('NOT safe AND solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize NOT over OR', () => {
|
||||||
|
const array = generateLexArray('NOT safe OR solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op', noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow group negation', () => {
|
||||||
|
const array = generateLexArray('NOT (safe OR solo)', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', 'not_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow NOT expressions inside terms', () => {
|
||||||
|
const array = generateLexArray('this NOT that', parseTerm);
|
||||||
|
expect(terms).toEqual(['this NOT that']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow parenthetical expressions inside terms', () => {
|
||||||
|
const array = generateLexArray('rose (flower)', parseTerm);
|
||||||
|
expect(terms).toEqual(['rose (flower)']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzz expressions in place of terms', () => {
|
||||||
|
const array = generateLexArray('~2', parseTerm);
|
||||||
|
expect(terms).toEqual(['~2']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle boost expressions in place of terms', () => {
|
||||||
|
const array = generateLexArray('^2', parseTerm);
|
||||||
|
expect(terms).toEqual(['^2']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzz expressions in terms', () => {
|
||||||
|
const array = generateLexArray('two~2~two', parseTerm);
|
||||||
|
expect(terms).toEqual(['two~2~two']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle boost expressions in terms', () => {
|
||||||
|
const array = generateLexArray('two^2^two', parseTerm);
|
||||||
|
expect(terms).toEqual(['two^2^two']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle quotes in terms', () => {
|
||||||
|
const array = generateLexArray('a "quoted" expression', parseTerm);
|
||||||
|
expect(terms).toEqual(['a "quoted" expression']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow extra spaces in terms', () => {
|
||||||
|
const array = generateLexArray('twilight sparkle', parseTerm);
|
||||||
|
expect(terms).toEqual(['twilight sparkle']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should collapse consecutive AND expressions', () => {
|
||||||
|
const array = generateLexArray('safe AND solo AND fluttershy AND applejack', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'and_op', noMatch, 'and_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should collapse consecutive OR expressions', () => {
|
||||||
|
const array = generateLexArray('safe OR solo OR fluttershy OR applejack', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'or_op', noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw exception on mismatched parentheses', () => {
|
||||||
|
expect(() => generateLexArray('(safe OR solo AND fluttershy', parseTerm)).toThrow('Mismatched parentheses.');
|
||||||
|
// expect(() => generateLexArray(')bad', parseTerm)).toThrow('Mismatched parentheses.');
|
||||||
|
});
|
||||||
|
});
|
36
assets/js/query/__tests__/literal.spec.ts
Normal file
36
assets/js/query/__tests__/literal.spec.ts
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import { makeLiteralMatcher } from '../literal';
|
||||||
|
|
||||||
|
describe('Literal field parsing', () => {
|
||||||
|
it('should handle exact matching in arrayed fields', () => {
|
||||||
|
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||||
|
expect(matcher('safe, solo', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('solo', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle exact matching in non-arrayed fields', () => {
|
||||||
|
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||||
|
expect(matcher('safe, solo', 'description', 0)).toBe(false);
|
||||||
|
expect(matcher('safe', 'description', 0)).toBe(true);
|
||||||
|
expect(matcher('solo', 'description', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzzy matching based on normalized edit distance', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fluttersho', 0.8, false);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzzy matching based on raw edit distance', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fluttersho', 1, false);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle wildcard matching', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fl?tter*', 0, true);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('flitter', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rainbow dash', 'tags', 0)).toBe(false);
|
||||||
|
expect(matcher('gentle flutter', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
53
assets/js/query/__tests__/number.spec.ts
Normal file
53
assets/js/query/__tests__/number.spec.ts
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import { makeNumberMatcher } from '../number';
|
||||||
|
|
||||||
|
describe('Number parsing', () => {
|
||||||
|
it('should match numbers directly', () => {
|
||||||
|
const intMatch = makeNumberMatcher(2067, 0, 'eq');
|
||||||
|
|
||||||
|
expect(intMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(intMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(intMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(intMatch('20677', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match number ranges', () => {
|
||||||
|
const ltMatch = makeNumberMatcher(2067, 0, 'lt');
|
||||||
|
const lteMatch = makeNumberMatcher(2067, 0, 'lte');
|
||||||
|
const gtMatch = makeNumberMatcher(2067, 0, 'gt');
|
||||||
|
const gteMatch = makeNumberMatcher(2067, 0, 'gte');
|
||||||
|
|
||||||
|
expect(ltMatch('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(ltMatch('2067', 'value', 0)).toBe(false);
|
||||||
|
expect(ltMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(lteMatch('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(lteMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(lteMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2067', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2068', 'value', 0)).toBe(true);
|
||||||
|
expect(gteMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(gteMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(gteMatch('2068', 'value', 0)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match unparsed values', () => {
|
||||||
|
const matcher = makeNumberMatcher(2067, 0, 'eq');
|
||||||
|
|
||||||
|
expect(matcher('NaN', 'value', 0)).toBe(false);
|
||||||
|
expect(matcher('test', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should interpret fuzz as an inclusive range around the value', () => {
|
||||||
|
const matcher = makeNumberMatcher(2067, 3, 'eq');
|
||||||
|
|
||||||
|
expect(matcher('2063', 'value', 0)).toBe(false);
|
||||||
|
expect(matcher('2064', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2065', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2068', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2069', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2070', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2071', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
84
assets/js/query/__tests__/parse.spec.ts
Normal file
84
assets/js/query/__tests__/parse.spec.ts
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
import { defaultMatcher } from '../matcher';
|
||||||
|
import { termSpaceToImageField } from '../fields';
|
||||||
|
import { generateLexArray } from '../lex';
|
||||||
|
import { getAstMatcherForTerm } from '../term';
|
||||||
|
import { parseTokens } from '../parse';
|
||||||
|
|
||||||
|
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||||
|
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Semantic analysis', () => {
|
||||||
|
let documents: HTMLElement[];
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
const e0 = document.createElement('div');
|
||||||
|
e0.setAttribute(termSpaceToImageField.id, '0');
|
||||||
|
e0.setAttribute(termSpaceToImageField.tags, 'safe, solo, fluttershy');
|
||||||
|
|
||||||
|
const e1 = document.createElement('div');
|
||||||
|
e1.setAttribute(termSpaceToImageField.id, '1');
|
||||||
|
e1.setAttribute(termSpaceToImageField.tags, 'suggestive, solo, fluttershy');
|
||||||
|
|
||||||
|
const e2 = document.createElement('div');
|
||||||
|
e2.setAttribute(termSpaceToImageField.id, '2');
|
||||||
|
e2.setAttribute(termSpaceToImageField.tags, 'suggestive, fluttershy, twilight sparkle');
|
||||||
|
|
||||||
|
documents = [e0, e1, e2];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match single term expressions', () => {
|
||||||
|
const tokens = generateLexArray('fluttershy', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match AND expressions', () => {
|
||||||
|
const tokens = generateLexArray('fluttershy,solo', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match OR expressions', () => {
|
||||||
|
const tokens = generateLexArray('suggestive || twilight sparkle', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(false);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match NOT expressions', () => {
|
||||||
|
const tokens = generateLexArray('NOT twilight sparkle', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow empty expressions', () => {
|
||||||
|
const tokens = generateLexArray('', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(false);
|
||||||
|
expect(matcher(documents[1])).toBe(false);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on unpaired AND', () => {
|
||||||
|
const tokens = generateLexArray(' AND ', parseWithDefaultMatcher);
|
||||||
|
expect(() => parseTokens(tokens)).toThrow('Missing operand.');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on unjoined parenthetical', () => {
|
||||||
|
const tokens = generateLexArray('(safe) solo', parseWithDefaultMatcher);
|
||||||
|
expect(() => parseTokens(tokens)).toThrow('Missing operator.');
|
||||||
|
});
|
||||||
|
});
|
131
assets/js/query/__tests__/term.spec.ts
Normal file
131
assets/js/query/__tests__/term.spec.ts
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
import { getAstMatcherForTerm } from '../term';
|
||||||
|
import { MatcherFactory, defaultMatcher } from '../matcher';
|
||||||
|
import { termSpaceToImageField } from '../fields';
|
||||||
|
|
||||||
|
function noMatch() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
class TestMatcherFactory implements MatcherFactory {
|
||||||
|
public dateVals: string[];
|
||||||
|
public literalVals: string[];
|
||||||
|
public numberVals: number[];
|
||||||
|
public userVals: string[];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.dateVals = [];
|
||||||
|
this.literalVals = [];
|
||||||
|
this.numberVals = [];
|
||||||
|
this.userVals = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
makeDateMatcher(term: string) {
|
||||||
|
this.dateVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLiteralMatcher(term: string) {
|
||||||
|
this.literalVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeNumberMatcher(term: number) {
|
||||||
|
this.numberVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeUserMatcher(term: string) {
|
||||||
|
this.userVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Search terms', () => {
|
||||||
|
let factory: TestMatcherFactory;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
factory = new TestMatcherFactory();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field', () => {
|
||||||
|
getAstMatcherForTerm('default', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field with wildcarding', () => {
|
||||||
|
getAstMatcherForTerm('def?ul*', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['def?ul*']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field with fuzzing', () => {
|
||||||
|
getAstMatcherForTerm('default', 1, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field within quotes', () => {
|
||||||
|
getAstMatcherForTerm('"default"', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse exact date field values', () => {
|
||||||
|
getAstMatcherForTerm('created_at:2024', 0, factory);
|
||||||
|
expect(factory.dateVals).toEqual(['2024']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse ranged date field values', () => {
|
||||||
|
getAstMatcherForTerm('created_at.lte:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.lt:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.gte:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.gt:2024', 0, factory);
|
||||||
|
expect(factory.dateVals).toEqual(['2024', '2024', '2024', '2024']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse exact number field values', () => {
|
||||||
|
getAstMatcherForTerm('width:1920', 0, factory);
|
||||||
|
expect(factory.numberVals).toEqual([1920]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse ranged number field values', () => {
|
||||||
|
getAstMatcherForTerm('width.lte:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.lt:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.gte:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.gt:1920', 0, factory);
|
||||||
|
expect(factory.numberVals).toEqual([1920, 1920, 1920, 1920]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse literal field values', () => {
|
||||||
|
getAstMatcherForTerm('source_url:*twitter*', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['*twitter*']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse user field values', () => {
|
||||||
|
getAstMatcherForTerm('my:upvotes', 0, factory);
|
||||||
|
getAstMatcherForTerm('my:downvotes', 0, factory);
|
||||||
|
getAstMatcherForTerm('my:faves', 0, factory);
|
||||||
|
expect(factory.userVals).toEqual(['upvotes', 'downvotes', 'faves']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match document with proper field values', () => {
|
||||||
|
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||||
|
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||||
|
|
||||||
|
const idAttribute = termSpaceToImageField.id;
|
||||||
|
const sourceUrlAttribute = termSpaceToImageField.source_url;
|
||||||
|
|
||||||
|
const properElement = document.createElement('div');
|
||||||
|
properElement.setAttribute(idAttribute, '0');
|
||||||
|
properElement.setAttribute(sourceUrlAttribute, 'twitter.com');
|
||||||
|
|
||||||
|
expect(idMatcher(properElement)).toBe(true);
|
||||||
|
expect(sourceMatcher(properElement)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match document without field values', () => {
|
||||||
|
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||||
|
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||||
|
const improperElement = document.createElement('div');
|
||||||
|
|
||||||
|
expect(idMatcher(improperElement)).toBe(false);
|
||||||
|
expect(sourceMatcher(improperElement)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
50
assets/js/query/__tests__/user.spec.ts
Normal file
50
assets/js/query/__tests__/user.spec.ts
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
import { makeUserMatcher } from '../user';
|
||||||
|
|
||||||
|
describe('User field parsing', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
window.booru.interactions = [
|
||||||
|
{image_id: 0, user_id: 0, interaction_type: 'faved', value: null},
|
||||||
|
{image_id: 0, user_id: 0, interaction_type: 'voted', value: 'up'},
|
||||||
|
{image_id: 1, user_id: 0, interaction_type: 'voted', value: 'down'},
|
||||||
|
{image_id: 2, user_id: 0, interaction_type: 'hidden', value: null},
|
||||||
|
];
|
||||||
|
/* eslint-enable camelcase */
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:faves', () => {
|
||||||
|
const matcher = makeUserMatcher('faves');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:upvotes', () => {
|
||||||
|
const matcher = makeUserMatcher('upvotes');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:downvotes', () => {
|
||||||
|
const matcher = makeUserMatcher('downvotes');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not parse other my: fields', () => {
|
||||||
|
const hiddenMatcher = makeUserMatcher('hidden');
|
||||||
|
const watchedMatcher = makeUserMatcher('watched');
|
||||||
|
|
||||||
|
expect(hiddenMatcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(hiddenMatcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(hiddenMatcher('', 'my', 2)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
17
assets/js/query/boolean.ts
Normal file
17
assets/js/query/boolean.ts
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
import { AstMatcher } from './types';
|
||||||
|
|
||||||
|
export function matchAny(...matchers: AstMatcher[]): AstMatcher {
|
||||||
|
return (e: HTMLElement) => matchers.some(matcher => matcher(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchAll(...matchers: AstMatcher[]): AstMatcher {
|
||||||
|
return (e: HTMLElement) => matchers.every(matcher => matcher(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchNot(matcher: AstMatcher): AstMatcher {
|
||||||
|
return (e: HTMLElement) => !matcher(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchNone(): AstMatcher {
|
||||||
|
return () => false;
|
||||||
|
}
|
140
assets/js/query/date.ts
Normal file
140
assets/js/query/date.ts
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
import { assertNotNull } from '../utils/assert';
|
||||||
|
import { FieldMatcher, ParseError, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
type Year = number;
|
||||||
|
type Month = number;
|
||||||
|
type Day = number;
|
||||||
|
type Hours = number;
|
||||||
|
type Minutes = number;
|
||||||
|
type Seconds = number;
|
||||||
|
type AbsoluteDate = [Year, Month, Day, Hours, Minutes, Seconds];
|
||||||
|
type TimeZoneOffset = [Hours, Minutes];
|
||||||
|
type PosixTimeMs = number;
|
||||||
|
|
||||||
|
function makeMatcher(bottomDate: PosixTimeMs, topDate: PosixTimeMs, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
// The open-left, closed-right date range specified by the
|
||||||
|
// date/time format limits the types of comparisons that are
|
||||||
|
// done compared to numeric ranges.
|
||||||
|
switch (qual) {
|
||||||
|
case 'lte':
|
||||||
|
return v => new Date(v).getTime() < topDate;
|
||||||
|
case 'gte':
|
||||||
|
return v => new Date(v).getTime() >= bottomDate;
|
||||||
|
case 'lt':
|
||||||
|
return v => new Date(v).getTime() < bottomDate;
|
||||||
|
case 'gt':
|
||||||
|
return v => new Date(v).getTime() >= topDate;
|
||||||
|
case 'eq':
|
||||||
|
default:
|
||||||
|
return v => {
|
||||||
|
const t = new Date(v).getTime();
|
||||||
|
return t >= bottomDate && t < topDate;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const relativeDateMatch = /(\d+) (second|minute|hour|day|week|month|year)s? ago/;
|
||||||
|
|
||||||
|
function makeRelativeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
const match = assertNotNull(relativeDateMatch.exec(dateVal));
|
||||||
|
const bounds: Record<string, number> = {
|
||||||
|
second: 1000,
|
||||||
|
minute: 60000,
|
||||||
|
hour: 3600000,
|
||||||
|
day: 86400000,
|
||||||
|
week: 604800000,
|
||||||
|
month: 2592000000,
|
||||||
|
year: 31536000000
|
||||||
|
};
|
||||||
|
|
||||||
|
const amount = parseInt(match[1], 10);
|
||||||
|
const scale = bounds[match[2]];
|
||||||
|
|
||||||
|
const now = new Date().getTime();
|
||||||
|
const bottomDate = new Date(now - amount * scale).getTime();
|
||||||
|
const topDate = new Date(now - (amount - 1) * scale).getTime();
|
||||||
|
|
||||||
|
return makeMatcher(bottomDate, topDate, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
const parseRes: RegExp[] = [
|
||||||
|
/^(\d{4})/,
|
||||||
|
/^-(\d{2})/,
|
||||||
|
/^-(\d{2})/,
|
||||||
|
/^(?:\s+|T|t)(\d{2})/,
|
||||||
|
/^:(\d{2})/,
|
||||||
|
/^:(\d{2})/
|
||||||
|
];
|
||||||
|
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
||||||
|
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
||||||
|
|
||||||
|
const origDateVal: string = dateVal;
|
||||||
|
let localDateVal = origDateVal;
|
||||||
|
|
||||||
|
const offsetMatch = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
||||||
|
if (offsetMatch) {
|
||||||
|
timeZoneOffset[0] = parseInt(offsetMatch[2], 10);
|
||||||
|
timeZoneOffset[1] = parseInt(offsetMatch[3], 10);
|
||||||
|
if (offsetMatch[1] === '-') {
|
||||||
|
timeZoneOffset[0] *= -1;
|
||||||
|
timeZoneOffset[1] *= -1;
|
||||||
|
}
|
||||||
|
localDateVal = localDateVal.substring(0, localDateVal.length - 6);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
let matchIndex = 0;
|
||||||
|
for (; matchIndex < parseRes.length; matchIndex += 1) {
|
||||||
|
if (localDateVal.length === 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const componentMatch = parseRes[matchIndex].exec(localDateVal);
|
||||||
|
if (componentMatch) {
|
||||||
|
if (matchIndex === 1) {
|
||||||
|
// Months are offset by 1.
|
||||||
|
timeData[matchIndex] = parseInt(componentMatch[1], 10) - 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// All other components are not offset.
|
||||||
|
timeData[matchIndex] = parseInt(componentMatch[1], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate string.
|
||||||
|
localDateVal = localDateVal.substring(componentMatch[0].length);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (localDateVal.length > 0) {
|
||||||
|
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the user-specified time zone offset. The JS Date constructor
|
||||||
|
// is very flexible here.
|
||||||
|
timeData[3] -= timeZoneOffset[0];
|
||||||
|
timeData[4] -= timeZoneOffset[1];
|
||||||
|
|
||||||
|
const asPosix = (data: AbsoluteDate) => {
|
||||||
|
return new Date(Date.UTC.apply(Date, data)).getTime();
|
||||||
|
};
|
||||||
|
|
||||||
|
const bottomDate = asPosix(timeData);
|
||||||
|
timeData[matchIndex - 1] += 1;
|
||||||
|
const topDate = asPosix(timeData);
|
||||||
|
|
||||||
|
return makeMatcher(bottomDate, topDate, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
if (relativeDateMatch.test(dateVal)) {
|
||||||
|
return makeRelativeDateMatcher(dateVal, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
return makeAbsoluteDateMatcher(dateVal, qual);
|
||||||
|
}
|
39
assets/js/query/fields.ts
Normal file
39
assets/js/query/fields.ts
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
import { FieldName } from './types';
|
||||||
|
|
||||||
|
type AttributeName = string;
|
||||||
|
|
||||||
|
export const numberFields: FieldName[] =
|
||||||
|
['id', 'width', 'height', 'aspect_ratio',
|
||||||
|
'comment_count', 'score', 'upvotes', 'downvotes',
|
||||||
|
'faves', 'tag_count', 'score'];
|
||||||
|
|
||||||
|
export const dateFields: FieldName[] = ['created_at'];
|
||||||
|
|
||||||
|
export const literalFields =
|
||||||
|
['tags', 'orig_sha512_hash', 'sha512_hash',
|
||||||
|
'uploader', 'source_url', 'description'];
|
||||||
|
|
||||||
|
export const termSpaceToImageField: Record<FieldName, AttributeName> = {
|
||||||
|
tags: 'data-image-tag-aliases',
|
||||||
|
score: 'data-score',
|
||||||
|
upvotes: 'data-upvotes',
|
||||||
|
downvotes: 'data-downvotes',
|
||||||
|
uploader: 'data-uploader',
|
||||||
|
// Yeah, I don't think this is reasonably supportable.
|
||||||
|
// faved_by: 'data-faved-by',
|
||||||
|
id: 'data-image-id',
|
||||||
|
width: 'data-width',
|
||||||
|
height: 'data-height',
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
aspect_ratio: 'data-aspect-ratio',
|
||||||
|
comment_count: 'data-comment-count',
|
||||||
|
tag_count: 'data-tag-count',
|
||||||
|
source_url: 'data-source-url',
|
||||||
|
faves: 'data-faves',
|
||||||
|
sha512_hash: 'data-sha512',
|
||||||
|
orig_sha512_hash: 'data-orig-sha512',
|
||||||
|
created_at: 'data-created-at'
|
||||||
|
/* eslint-enable camelcase */
|
||||||
|
};
|
||||||
|
|
||||||
|
export const defaultField = 'tags';
|
191
assets/js/query/lex.ts
Normal file
191
assets/js/query/lex.ts
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
import { assertNotNull, assertNotUndefined } from '../utils/assert';
|
||||||
|
import { AstMatcher, ParseError, TokenList } from './types';
|
||||||
|
|
||||||
|
type TokenName = string;
|
||||||
|
type Token = [TokenName, RegExp];
|
||||||
|
|
||||||
|
const tokenList: Token[] = [
|
||||||
|
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
||||||
|
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
||||||
|
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
||||||
|
['lparen', /^\s*\(\s*/],
|
||||||
|
['rparen', /^\s*\)\s*/],
|
||||||
|
['and_op', /^\s*(?:&&|AND)\s+/],
|
||||||
|
['and_op', /^\s*,\s*/],
|
||||||
|
['or_op', /^\s*(?:\|\||OR)\s+/],
|
||||||
|
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
||||||
|
['not_op', /^\s*[!-]\s*/],
|
||||||
|
['space', /^\s+/],
|
||||||
|
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
||||||
|
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
||||||
|
];
|
||||||
|
|
||||||
|
export type ParseTerm = (term: string, fuzz: number, boost: number) => AstMatcher;
|
||||||
|
|
||||||
|
export function generateLexArray(searchStr: string, parseTerm: ParseTerm): TokenList {
|
||||||
|
const opQueue: string[] = [],
|
||||||
|
groupNegate: boolean[] = [],
|
||||||
|
tokenStack: TokenList = [];
|
||||||
|
|
||||||
|
let searchTerm: string | null = null;
|
||||||
|
let boostFuzzStr = '';
|
||||||
|
let localSearchStr: string = searchStr;
|
||||||
|
let negate = false;
|
||||||
|
let boost = 1;
|
||||||
|
let fuzz = 0;
|
||||||
|
let lparenCtr = 0;
|
||||||
|
|
||||||
|
const pushTerm = () => {
|
||||||
|
if (searchTerm !== null) {
|
||||||
|
// Push to stack.
|
||||||
|
tokenStack.push(parseTerm(searchTerm, fuzz, boost));
|
||||||
|
// Reset term and options data.
|
||||||
|
boost = 1;
|
||||||
|
fuzz = 0;
|
||||||
|
searchTerm = null;
|
||||||
|
boostFuzzStr = '';
|
||||||
|
lparenCtr = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (negate) {
|
||||||
|
tokenStack.push('not_op');
|
||||||
|
negate = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
while (localSearchStr.length > 0) {
|
||||||
|
for (const [tokenName, tokenRe] of tokenList) {
|
||||||
|
const match = tokenRe.exec(localSearchStr);
|
||||||
|
|
||||||
|
if (!match) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = match[0];
|
||||||
|
|
||||||
|
if (searchTerm !== null && (['and_op', 'or_op'].indexOf(tokenName) !== -1 || tokenName === 'rparen' && lparenCtr === 0)) {
|
||||||
|
pushTerm();
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (tokenName) {
|
||||||
|
case 'and_op':
|
||||||
|
while (opQueue[0] === 'and_op') {
|
||||||
|
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||||
|
}
|
||||||
|
opQueue.unshift('and_op');
|
||||||
|
break;
|
||||||
|
case 'or_op':
|
||||||
|
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
||||||
|
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||||
|
}
|
||||||
|
opQueue.unshift('or_op');
|
||||||
|
break;
|
||||||
|
case 'not_op':
|
||||||
|
if (searchTerm) {
|
||||||
|
// We're already inside a search term, so it does not apply, obv.
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
negate = !negate;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'lparen':
|
||||||
|
if (searchTerm) {
|
||||||
|
// If we are inside the search term, do not error out just yet;
|
||||||
|
// instead, consider it as part of the search term, as a user convenience.
|
||||||
|
searchTerm += token;
|
||||||
|
lparenCtr += 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
opQueue.unshift('lparen');
|
||||||
|
groupNegate.push(negate);
|
||||||
|
negate = false;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'rparen':
|
||||||
|
if (lparenCtr > 0) {
|
||||||
|
searchTerm = assertNotNull(searchTerm) + token;
|
||||||
|
lparenCtr -= 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
while (opQueue.length > 0) {
|
||||||
|
const op = assertNotUndefined(opQueue.shift());
|
||||||
|
if (op === 'lparen') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
tokenStack.push(op);
|
||||||
|
}
|
||||||
|
if (groupNegate.length > 0 && groupNegate.pop()) {
|
||||||
|
tokenStack.push('not_op');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'fuzz':
|
||||||
|
if (searchTerm) {
|
||||||
|
// For this and boost operations, we store the current match so far
|
||||||
|
// to a temporary string in case this is actually inside the term.
|
||||||
|
fuzz = parseFloat(token.substring(1));
|
||||||
|
boostFuzzStr += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'boost':
|
||||||
|
if (searchTerm) {
|
||||||
|
boost = parseFloat(token.substring(1));
|
||||||
|
boostFuzzStr += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'quoted_lit':
|
||||||
|
if (searchTerm) {
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'word':
|
||||||
|
if (searchTerm) {
|
||||||
|
if (fuzz !== 0 || boost !== 1) {
|
||||||
|
boost = 1;
|
||||||
|
fuzz = 0;
|
||||||
|
searchTerm += boostFuzzStr;
|
||||||
|
boostFuzzStr = '';
|
||||||
|
}
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// Append extra spaces within search terms.
|
||||||
|
if (searchTerm) {
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate string and restart the token tests.
|
||||||
|
localSearchStr = localSearchStr.substring(token.length);
|
||||||
|
|
||||||
|
// Break since we have found a match.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append final tokens to the stack.
|
||||||
|
pushTerm();
|
||||||
|
|
||||||
|
if (opQueue.indexOf('rparen') !== -1 || opQueue.indexOf('lparen') !== -1) {
|
||||||
|
throw new ParseError('Mismatched parentheses.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concatenatte remaining operators to the token stack.
|
||||||
|
tokenStack.push(...opQueue);
|
||||||
|
|
||||||
|
return tokenStack;
|
||||||
|
}
|
113
assets/js/query/literal.ts
Normal file
113
assets/js/query/literal.ts
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
import { FieldMatcher } from './types';
|
||||||
|
|
||||||
|
function extractValues(v: string, name: string) {
|
||||||
|
return name === 'tags' ? v.split(', ') : [v];
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeExactMatcher(term: string): FieldMatcher {
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (val.toLowerCase() === term.toLowerCase()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeWildcardMatcher(term: string): FieldMatcher {
|
||||||
|
// Transforms wildcard match into regular expression.
|
||||||
|
// A custom NFA with caching may be more sophisticated but not
|
||||||
|
// likely to be faster.
|
||||||
|
const wildcard = new RegExp(
|
||||||
|
`^${term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||||
|
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||||
|
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||||
|
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||||
|
.replace(/^(?:\\\\)*\?/g, '.?')}$`, 'i'
|
||||||
|
);
|
||||||
|
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (wildcard.test(val)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function fuzzyMatch(term: string, targetStr: string, fuzz: number): boolean {
|
||||||
|
const targetDistance = fuzz < 1.0 ? targetStr.length * (1.0 - fuzz) : fuzz;
|
||||||
|
const targetStrLower = targetStr.toLowerCase();
|
||||||
|
|
||||||
|
// Work vectors, representing the last three populated
|
||||||
|
// rows of the dynamic programming matrix of the iterative
|
||||||
|
// optimal string alignment calculation.
|
||||||
|
let v0: number[] = [];
|
||||||
|
let v1: number[] = [];
|
||||||
|
let v2: number[] = [];
|
||||||
|
let temp: number[];
|
||||||
|
|
||||||
|
for (let i = 0; i <= targetStrLower.length; i += 1) {
|
||||||
|
v1.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < term.length; i += 1) {
|
||||||
|
v2[0] = i;
|
||||||
|
for (let j = 0; j < targetStrLower.length; j += 1) {
|
||||||
|
const cost = term[i] === targetStrLower[j] ? 0 : 1;
|
||||||
|
v2[j + 1] = Math.min(
|
||||||
|
// Deletion.
|
||||||
|
v1[j + 1] + 1,
|
||||||
|
// Insertion.
|
||||||
|
v2[j] + 1,
|
||||||
|
// Substitution or No Change.
|
||||||
|
v1[j] + cost
|
||||||
|
);
|
||||||
|
if (i > 1 && j > 1 && term[i] === targetStrLower[j - 1] &&
|
||||||
|
targetStrLower[i - 1] === targetStrLower[j]) {
|
||||||
|
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Rotate dem vec pointers bra.
|
||||||
|
temp = v0;
|
||||||
|
v0 = v1;
|
||||||
|
v1 = v2;
|
||||||
|
v2 = temp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return v1[targetStrLower.length] <= targetDistance;
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeFuzzyMatcher(term: string, fuzz: number): FieldMatcher {
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (fuzzyMatch(term, val, fuzz)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeLiteralMatcher(term: string, fuzz: number, wildcardable: boolean): FieldMatcher {
|
||||||
|
if (fuzz === 0 && !wildcardable) {
|
||||||
|
return makeExactMatcher(term);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!wildcardable) {
|
||||||
|
return makeFuzzyMatcher(term, fuzz);
|
||||||
|
}
|
||||||
|
|
||||||
|
return makeWildcardMatcher(term);
|
||||||
|
}
|
20
assets/js/query/matcher.ts
Normal file
20
assets/js/query/matcher.ts
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import { makeDateMatcher } from './date';
|
||||||
|
import { makeLiteralMatcher } from './literal';
|
||||||
|
import { makeNumberMatcher } from './number';
|
||||||
|
import { makeUserMatcher } from './user';
|
||||||
|
|
||||||
|
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
export interface MatcherFactory {
|
||||||
|
makeDateMatcher: (dateVal: string, qual: RangeEqualQualifier) => FieldMatcher,
|
||||||
|
makeLiteralMatcher: (term: string, fuzz: number, wildcardable: boolean) => FieldMatcher,
|
||||||
|
makeNumberMatcher: (term: number, fuzz: number, qual: RangeEqualQualifier) => FieldMatcher,
|
||||||
|
makeUserMatcher: (term: string) => FieldMatcher
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultMatcher: MatcherFactory = {
|
||||||
|
makeDateMatcher,
|
||||||
|
makeLiteralMatcher,
|
||||||
|
makeNumberMatcher,
|
||||||
|
makeUserMatcher,
|
||||||
|
};
|
30
assets/js/query/number.ts
Normal file
30
assets/js/query/number.ts
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
export function makeNumberMatcher(term: number, fuzz: number, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
// Range matching.
|
||||||
|
return v => {
|
||||||
|
const attrVal = parseFloat(v);
|
||||||
|
|
||||||
|
if (isNaN(attrVal)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fuzz !== 0) {
|
||||||
|
return term - fuzz <= attrVal && term + fuzz >= attrVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (qual) {
|
||||||
|
case 'lt':
|
||||||
|
return attrVal < term;
|
||||||
|
case 'gt':
|
||||||
|
return attrVal > term;
|
||||||
|
case 'lte':
|
||||||
|
return attrVal <= term;
|
||||||
|
case 'gte':
|
||||||
|
return attrVal >= term;
|
||||||
|
case 'eq':
|
||||||
|
default:
|
||||||
|
return attrVal === term;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
52
assets/js/query/parse.ts
Normal file
52
assets/js/query/parse.ts
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import { matchAll, matchAny, matchNone, matchNot } from './boolean';
|
||||||
|
import { AstMatcher, ParseError, TokenList } from './types';
|
||||||
|
|
||||||
|
export function parseTokens(lexicalArray: TokenList): AstMatcher {
|
||||||
|
const operandStack: AstMatcher[] = [];
|
||||||
|
|
||||||
|
lexicalArray.forEach((token, i) => {
|
||||||
|
if (token === 'not_op') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let intermediate: AstMatcher;
|
||||||
|
|
||||||
|
if (typeof token === 'string') {
|
||||||
|
const op2 = operandStack.pop();
|
||||||
|
const op1 = operandStack.pop();
|
||||||
|
|
||||||
|
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
||||||
|
throw new ParseError('Missing operand.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (token === 'and_op') {
|
||||||
|
intermediate = matchAll(op1, op2);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
intermediate = matchAny(op1, op2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
intermediate = token;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lexicalArray[i + 1] === 'not_op') {
|
||||||
|
operandStack.push(matchNot(intermediate));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
operandStack.push(intermediate);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (operandStack.length > 1) {
|
||||||
|
throw new ParseError('Missing operator.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const op1 = operandStack.pop();
|
||||||
|
|
||||||
|
if (typeof op1 === 'undefined') {
|
||||||
|
return matchNone();
|
||||||
|
}
|
||||||
|
|
||||||
|
return op1;
|
||||||
|
}
|
90
assets/js/query/term.ts
Normal file
90
assets/js/query/term.ts
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import { MatcherFactory } from './matcher';
|
||||||
|
|
||||||
|
import { numberFields, dateFields, literalFields, termSpaceToImageField, defaultField } from './fields';
|
||||||
|
import { FieldName, FieldMatcher, RangeEqualQualifier, TermType, AstMatcher } from './types';
|
||||||
|
|
||||||
|
type RangeInfo = [FieldName, RangeEqualQualifier, TermType];
|
||||||
|
|
||||||
|
function normalizeTerm(term: string, wildcardable: boolean) {
|
||||||
|
if (!wildcardable) {
|
||||||
|
return term.replace('\\"', '"');
|
||||||
|
}
|
||||||
|
return term.replace(/\\([^*?])/g, '$1');
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRangeField(field: string): RangeInfo | null {
|
||||||
|
if (numberFields.indexOf(field) !== -1) {
|
||||||
|
return [field, 'eq', 'number'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dateFields.indexOf(field) !== -1) {
|
||||||
|
return [field, 'eq', 'date'];
|
||||||
|
}
|
||||||
|
|
||||||
|
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
||||||
|
|
||||||
|
if (qual) {
|
||||||
|
const fieldName: FieldName = qual[1];
|
||||||
|
const rangeQual = qual[2] as RangeEqualQualifier;
|
||||||
|
|
||||||
|
if (numberFields.indexOf(fieldName) !== -1) {
|
||||||
|
return [fieldName, rangeQual, 'number'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dateFields.indexOf(fieldName) !== -1) {
|
||||||
|
return [fieldName, rangeQual, 'date'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTermMatcher(term: string, fuzz: number, factory: MatcherFactory): [FieldName, FieldMatcher] {
|
||||||
|
let rangeParsing, candidateTermSpace, termCandidate;
|
||||||
|
let localTerm = term;
|
||||||
|
const wildcardable = fuzz === 0 && !/^"([^"]|\\")+"$/.test(localTerm);
|
||||||
|
|
||||||
|
if (!wildcardable && !fuzz) {
|
||||||
|
// Remove quotes around quoted literal term
|
||||||
|
localTerm = localTerm.substring(1, localTerm.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
localTerm = normalizeTerm(localTerm, wildcardable);
|
||||||
|
|
||||||
|
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
||||||
|
const matchArr = localTerm.split(':');
|
||||||
|
|
||||||
|
if (matchArr.length > 1) {
|
||||||
|
candidateTermSpace = matchArr[0];
|
||||||
|
termCandidate = matchArr.slice(1).join(':');
|
||||||
|
rangeParsing = parseRangeField(candidateTermSpace);
|
||||||
|
|
||||||
|
if (rangeParsing) {
|
||||||
|
const [fieldName, rangeType, fieldType] = rangeParsing;
|
||||||
|
|
||||||
|
if (fieldType === 'date') {
|
||||||
|
return [fieldName, factory.makeDateMatcher(termCandidate, rangeType)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [fieldName, factory.makeNumberMatcher(parseFloat(termCandidate), fuzz, rangeType)];
|
||||||
|
}
|
||||||
|
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
||||||
|
return [candidateTermSpace, factory.makeLiteralMatcher(termCandidate, fuzz, wildcardable)];
|
||||||
|
}
|
||||||
|
else if (candidateTermSpace === 'my') {
|
||||||
|
return [candidateTermSpace, factory.makeUserMatcher(termCandidate)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [defaultField, factory.makeLiteralMatcher(localTerm, fuzz, wildcardable)];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAstMatcherForTerm(term: string, fuzz: number, factory: MatcherFactory): AstMatcher {
|
||||||
|
const [fieldName, matcher] = makeTermMatcher(term, fuzz, factory);
|
||||||
|
|
||||||
|
return (e: HTMLElement) => {
|
||||||
|
const value = e.getAttribute(termSpaceToImageField[fieldName]) || '';
|
||||||
|
const documentId = parseInt(e.getAttribute(termSpaceToImageField.id) || '0', 10);
|
||||||
|
return matcher(value, fieldName, documentId);
|
||||||
|
};
|
||||||
|
}
|
12
assets/js/query/types.ts
Normal file
12
assets/js/query/types.ts
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
export type TermType = 'number' | 'date' | 'literal' | 'my';
|
||||||
|
export type RangeQualifier = 'gt' | 'gte' | 'lt' | 'lte';
|
||||||
|
export type RangeEqualQualifier = RangeQualifier | 'eq';
|
||||||
|
|
||||||
|
export type FieldValue = string;
|
||||||
|
export type FieldName = string;
|
||||||
|
export type FieldMatcher = (value: FieldValue, name: FieldName, documentId: number) => boolean;
|
||||||
|
|
||||||
|
export type AstMatcher = (e: HTMLElement) => boolean;
|
||||||
|
export type TokenList = (string | AstMatcher)[];
|
||||||
|
|
||||||
|
export class ParseError extends Error {}
|
25
assets/js/query/user.ts
Normal file
25
assets/js/query/user.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import { Interaction, InteractionType, InteractionValue } from '../../types/booru-object';
|
||||||
|
import { FieldMatcher } from './types';
|
||||||
|
|
||||||
|
function interactionMatch(imageId: number, type: InteractionType, value: InteractionValue, interactions: Interaction[]): boolean {
|
||||||
|
return interactions.some(v => v.image_id === imageId && v.interaction_type === type && (value === null || v.value === value));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeUserMatcher(term: string): FieldMatcher {
|
||||||
|
// Should work with most my:conditions except watched.
|
||||||
|
return (value, field, documentId) => {
|
||||||
|
switch (term) {
|
||||||
|
case 'faves':
|
||||||
|
return interactionMatch(documentId, 'faved', null, window.booru.interactions);
|
||||||
|
case 'upvotes':
|
||||||
|
return interactionMatch(documentId, 'voted', 'up', window.booru.interactions);
|
||||||
|
case 'downvotes':
|
||||||
|
return interactionMatch(documentId, 'voted', 'down', window.booru.interactions);
|
||||||
|
case 'watched':
|
||||||
|
case 'hidden':
|
||||||
|
default:
|
||||||
|
// Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ import { getRandomArrayItem } from '../../../test/randomness';
|
||||||
import { mockStorage } from '../../../test/mock-storage';
|
import { mockStorage } from '../../../test/mock-storage';
|
||||||
import { createEvent, fireEvent } from '@testing-library/dom';
|
import { createEvent, fireEvent } from '@testing-library/dom';
|
||||||
import { EventType } from '@testing-library/dom/types/events';
|
import { EventType } from '@testing-library/dom/types/events';
|
||||||
|
import { SpoilerType } from '../../../types/booru-object';
|
||||||
|
|
||||||
describe('Image utils', () => {
|
describe('Image utils', () => {
|
||||||
const hiddenClass = 'hidden';
|
const hiddenClass = 'hidden';
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { displayTags, getHiddenTags, getSpoileredTags, imageHitsComplex, imageHi
|
||||||
import { mockStorage } from '../../../test/mock-storage';
|
import { mockStorage } from '../../../test/mock-storage';
|
||||||
import { getRandomArrayItem } from '../../../test/randomness';
|
import { getRandomArrayItem } from '../../../test/randomness';
|
||||||
import parseSearch from '../../match_query';
|
import parseSearch from '../../match_query';
|
||||||
|
import { SpoilerType } from '../../../types/booru-object';
|
||||||
|
|
||||||
describe('Tag utilities', () => {
|
describe('Tag utilities', () => {
|
||||||
const tagStorageKeyPrefix = 'bor_tags_';
|
const tagStorageKeyPrefix = 'bor_tags_';
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { escapeHtml } from './dom';
|
import { escapeHtml } from './dom';
|
||||||
import { getTag } from '../booru';
|
import { getTag } from '../booru';
|
||||||
|
import { AstMatcher } from '../query/types';
|
||||||
|
|
||||||
export interface TagData {
|
export interface TagData {
|
||||||
id: number;
|
id: number;
|
||||||
|
@ -42,7 +43,7 @@ export function getSpoileredTags() {
|
||||||
.sort(sortTags.bind(null, false));
|
.sort(sortTags.bind(null, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagData[] {
|
export function imageHitsTags(img: HTMLElement, matchTags: TagData[]): TagData[] {
|
||||||
const imageTagsString = img.dataset.imageTags;
|
const imageTagsString = img.dataset.imageTags;
|
||||||
if (typeof imageTagsString === 'undefined') {
|
if (typeof imageTagsString === 'undefined') {
|
||||||
return [];
|
return [];
|
||||||
|
@ -51,8 +52,8 @@ export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagD
|
||||||
return matchTags.filter(t => imageTags.indexOf(t.id) !== -1);
|
return matchTags.filter(t => imageTags.indexOf(t.id) !== -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function imageHitsComplex(img: HTMLImageElement, matchComplex: { hitsImage: (img: HTMLImageElement) => boolean }) {
|
export function imageHitsComplex(img: HTMLElement, matchComplex: AstMatcher) {
|
||||||
return matchComplex.hitsImage(img);
|
return matchComplex(img);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function displayTags(tags: TagData[]): string {
|
export function displayTags(tags: TagData[]): string {
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
import '@testing-library/jest-dom';
|
import '@testing-library/jest-dom';
|
||||||
|
import { matchNone } from '../js/query/boolean';
|
||||||
const blankFilter = {
|
|
||||||
leftOperand: null,
|
|
||||||
negate: false,
|
|
||||||
op: null,
|
|
||||||
rightOperand: null,
|
|
||||||
};
|
|
||||||
|
|
||||||
window.booru = {
|
window.booru = {
|
||||||
csrfToken: 'mockCsrfToken',
|
csrfToken: 'mockCsrfToken',
|
||||||
|
@ -18,7 +12,8 @@ window.booru = {
|
||||||
userCanEditFilter: false,
|
userCanEditFilter: false,
|
||||||
userIsSignedIn: false,
|
userIsSignedIn: false,
|
||||||
watchedTagList: [],
|
watchedTagList: [],
|
||||||
hiddenFilter: blankFilter,
|
hiddenFilter: matchNone(),
|
||||||
spoileredFilter: blankFilter,
|
spoileredFilter: matchNone(),
|
||||||
|
interactions: [],
|
||||||
tagsVersion: 5
|
tagsVersion: 5
|
||||||
};
|
};
|
||||||
|
|
34
assets/types/booru-object.d.ts
vendored
34
assets/types/booru-object.d.ts
vendored
|
@ -1,5 +1,17 @@
|
||||||
|
import { AstMatcher } from 'query/types';
|
||||||
|
|
||||||
type SpoilerType = 'click' | 'hover' | 'static' | 'off';
|
type SpoilerType = 'click' | 'hover' | 'static' | 'off';
|
||||||
|
|
||||||
|
type InteractionType = 'voted' | 'faved' | 'hidden';
|
||||||
|
type InteractionValue = 'up' | 'down' | null;
|
||||||
|
|
||||||
|
interface Interaction {
|
||||||
|
image_id: number;
|
||||||
|
user_id: number;
|
||||||
|
interaction_type: InteractionType;
|
||||||
|
value: 'up' | 'down' | null;
|
||||||
|
}
|
||||||
|
|
||||||
interface BooruObject {
|
interface BooruObject {
|
||||||
csrfToken: string;
|
csrfToken: string;
|
||||||
/**
|
/**
|
||||||
|
@ -36,24 +48,20 @@ interface BooruObject {
|
||||||
*/
|
*/
|
||||||
userCanEditFilter: boolean;
|
userCanEditFilter: boolean;
|
||||||
/**
|
/**
|
||||||
* SearchAST instance for hidden tags, converted from raw AST data in {@see import('../js/booru.js')}
|
* AST matcher instance for filter hidden query
|
||||||
*
|
*
|
||||||
* TODO Properly type after TypeScript migration
|
|
||||||
*
|
|
||||||
* @type {import('../js/match_query.js').SearchAST}
|
|
||||||
*/
|
*/
|
||||||
hiddenFilter: unknown;
|
hiddenFilter: AstMatcher;
|
||||||
/**
|
/**
|
||||||
* SearchAST instance for spoilered tags, converted from raw AST data in {@see import('../js/booru.js')}
|
* AST matcher instance for filter spoilered query
|
||||||
*
|
|
||||||
* TODO Properly type after TypeScript migration
|
|
||||||
*
|
|
||||||
* @type {import('../js/match_query.js').SearchAST}
|
|
||||||
*/
|
*/
|
||||||
spoileredFilter: unknown;
|
spoileredFilter: AstMatcher;
|
||||||
tagsVersion: number;
|
tagsVersion: number;
|
||||||
|
interactions: Interaction[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Window {
|
declare global {
|
||||||
booru: BooruObject;
|
interface Window {
|
||||||
|
booru: BooruObject;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue