mirror of
https://github.com/philomena-dev/philomena.git
synced 2025-03-28 06:17:46 +01:00
Merge remote-tracking branch 'origin/master' into opensearch
This commit is contained in:
commit
da7dae20f9
170 changed files with 5957 additions and 9457 deletions
22
.github/workflows/elixir.yml
vendored
22
.github/workflows/elixir.yml
vendored
|
@ -7,40 +7,40 @@ jobs:
|
|||
name: 'Build Elixir app'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Cache mix deps
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
_build
|
||||
deps
|
||||
key: ${{ runner.os }}-build-deps-${{ hashFiles('mix.lock') }}
|
||||
|
||||
- run: docker-compose pull
|
||||
- run: docker-compose build
|
||||
- run: docker compose pull
|
||||
- run: docker compose build
|
||||
|
||||
- name: Build and test
|
||||
run: docker-compose run app run-test
|
||||
run: docker compose run app run-test
|
||||
|
||||
- name: Security lint
|
||||
run: |
|
||||
docker-compose run app mix sobelow --config
|
||||
docker-compose run app mix deps.audit
|
||||
docker compose run app mix sobelow --config
|
||||
docker compose run app mix deps.audit
|
||||
lint-and-test:
|
||||
name: 'JavaScript Linting and Unit Tests'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
node-version: '20'
|
||||
|
||||
- name: Cache node_modules
|
||||
id: cache-node-modules
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ./assets/node_modules
|
||||
key: node_modules-${{ hashFiles('./assets/package-lock.json') }}
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -59,5 +59,5 @@ npm-debug.log
|
|||
/native/**/target
|
||||
/.cargo
|
||||
|
||||
# Jest coverage
|
||||
# Vitest coverage
|
||||
/assets/coverage
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||

|
||||
|
||||
## Getting started
|
||||
On systems with `docker` and `docker-compose` installed, the process should be as simple as:
|
||||
On systems with `docker` and `docker compose` installed, the process should be as simple as:
|
||||
|
||||
```
|
||||
docker-compose build
|
||||
docker-compose up
|
||||
docker compose build
|
||||
docker compose up
|
||||
```
|
||||
|
||||
If you use `podman` and `podman-compose` instead, the process for constructing a rootless container is nearly identical:
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
js/vendor/*
|
||||
webpack.config.js
|
||||
jest.config.js
|
||||
vite.config.ts
|
||||
|
|
|
@ -10,7 +10,7 @@ parserOptions:
|
|||
|
||||
plugins:
|
||||
- '@typescript-eslint'
|
||||
- jest
|
||||
- vitest
|
||||
|
||||
globals:
|
||||
ga: false
|
||||
|
@ -276,12 +276,14 @@ overrides:
|
|||
'@typescript-eslint/no-extra-parens': 2
|
||||
no-shadow: 0
|
||||
'@typescript-eslint/no-shadow': 2
|
||||
# Jest Tests (also written in TypeScript)
|
||||
# Unit Tests (also written in TypeScript)
|
||||
# Disable rules that do not make sense in test files (e.g. testing for undefined input values should be allowed)
|
||||
- files:
|
||||
- '*.spec.ts'
|
||||
- 'test/*.ts'
|
||||
extends:
|
||||
- 'plugin:jest/recommended'
|
||||
- 'plugin:vitest/legacy-recommended'
|
||||
rules:
|
||||
no-undefined: 0
|
||||
no-unused-expressions: 0
|
||||
vitest/valid-expect: 0
|
||||
|
|
|
@ -9,13 +9,13 @@
|
|||
@import "global";
|
||||
|
||||
// Because FA is a SPECIAL SNOWFLAKE.
|
||||
$fa-font-path: '~@fortawesome/fontawesome-free/webfonts';
|
||||
$fa-font-path: '@fortawesome/fontawesome-free/webfonts';
|
||||
|
||||
@import "~@fortawesome/fontawesome-free/scss/fontawesome.scss";
|
||||
@import "~@fortawesome/fontawesome-free/scss/solid.scss";
|
||||
@import "~@fortawesome/fontawesome-free/scss/regular.scss";
|
||||
@import "~@fortawesome/fontawesome-free/scss/brands.scss";
|
||||
@import "~normalize-scss/sass/normalize/import-now";
|
||||
@import "@fortawesome/fontawesome-free/scss/fontawesome.scss";
|
||||
@import "@fortawesome/fontawesome-free/scss/solid.scss";
|
||||
@import "@fortawesome/fontawesome-free/scss/regular.scss";
|
||||
@import "@fortawesome/fontawesome-free/scss/brands.scss";
|
||||
@import "normalize-scss/sass/normalize/import-now";
|
||||
|
||||
body {
|
||||
background-color: $background_color;
|
||||
|
@ -469,26 +469,26 @@ span.stat {
|
|||
@import "shame";
|
||||
@import "text";
|
||||
|
||||
@import "~views/adverts";
|
||||
@import "~views/approval";
|
||||
@import "~views/badges";
|
||||
@import "~views/channels";
|
||||
@import "~views/comments";
|
||||
@import "~views/commissions";
|
||||
@import "~views/communications";
|
||||
@import "~views/duplicate_reports";
|
||||
@import "~views/filters";
|
||||
@import "~views/galleries";
|
||||
@import "~views/images";
|
||||
@import "~views/pages";
|
||||
@import "~views/polls";
|
||||
@import "~views/posts";
|
||||
@import "~views/profiles";
|
||||
@import "~views/pagination";
|
||||
@import "~views/search";
|
||||
@import "~views/staff";
|
||||
@import "~views/stats";
|
||||
@import "~views/tags";
|
||||
@import "views/adverts";
|
||||
@import "views/approval";
|
||||
@import "views/badges";
|
||||
@import "views/channels";
|
||||
@import "views/comments";
|
||||
@import "views/commissions";
|
||||
@import "views/communications";
|
||||
@import "views/duplicate_reports";
|
||||
@import "views/filters";
|
||||
@import "views/galleries";
|
||||
@import "views/images";
|
||||
@import "views/pages";
|
||||
@import "views/polls";
|
||||
@import "views/posts";
|
||||
@import "views/profiles";
|
||||
@import "views/pagination";
|
||||
@import "views/search";
|
||||
@import "views/staff";
|
||||
@import "views/stats";
|
||||
@import "views/tags";
|
||||
|
||||
.no-overflow {
|
||||
overflow: hidden;
|
||||
|
|
|
@ -124,6 +124,8 @@ a.block__header--single-item, .block__header a {
|
|||
.block__header--js-tabbed {
|
||||
@extend .block__header--light;
|
||||
background: transparent;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
|
||||
border-bottom: $border;
|
||||
a {
|
||||
|
|
|
@ -23,6 +23,11 @@
|
|||
padding-left: 6px;
|
||||
}
|
||||
|
||||
.header__navigation {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
a.header__link {
|
||||
display: inline-block;
|
||||
padding: 0 $header_spacing;
|
||||
|
|
|
@ -190,4 +190,4 @@ $dnp_warning_hover_color: lighten($vote_down_color, 10%);
|
|||
$poll_form_label_background: lighten($border_color, 8);
|
||||
$tag_dropdown_hover_background: darken($meta_color, 4%);
|
||||
|
||||
@import "~common/base";
|
||||
@import "common/base";
|
||||
|
|
|
@ -180,4 +180,4 @@ $dnp_warning_hover_color: lighten($vote_down_color, 10%);
|
|||
$poll_form_label_background: lighten($border_color, 8);
|
||||
$tag_dropdown_hover_background: darken($meta_color, 4%);
|
||||
|
||||
@import "~common/base";
|
||||
@import "common/base";
|
||||
|
|
|
@ -192,4 +192,4 @@ $dnp_warning_hover_color: lighten($vote_down_color, 10%);
|
|||
$poll_form_label_background: lighten($border_color, 8);
|
||||
$tag_dropdown_hover_background: darken($meta_color, 4%);
|
||||
|
||||
@import "~common/base";
|
||||
@import "common/base";
|
||||
|
|
|
@ -92,12 +92,6 @@ div.image-container {
|
|||
overflow: hidden;
|
||||
/* prevent .media-box__overlay from overflowing the container */
|
||||
text-align: center;
|
||||
a::before {
|
||||
content: "";
|
||||
display: inline-block;
|
||||
height: 100%;
|
||||
vertical-align: middle;
|
||||
}
|
||||
img,
|
||||
video {
|
||||
vertical-align: middle;
|
||||
|
@ -105,12 +99,12 @@ div.image-container {
|
|||
max-height: 100%;
|
||||
}
|
||||
/* Make the link cover the whole container if the image is oblong */
|
||||
a {
|
||||
a, picture, video {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
vertical-align: middle;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -70,7 +70,11 @@
|
|||
.tag > span {
|
||||
padding: 5px;
|
||||
display: table-cell;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.tag-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.tag a {
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
export default {
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: [
|
||||
'js/**/*.{js,ts}',
|
||||
],
|
||||
coveragePathIgnorePatterns: [
|
||||
'/node_modules/',
|
||||
'/.*\\.test\\.ts$',
|
||||
'.*\\.d\\.ts$',
|
||||
],
|
||||
coverageDirectory: '<rootDir>/coverage/',
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 0,
|
||||
branches: 0,
|
||||
functions: 0,
|
||||
lines: 0,
|
||||
},
|
||||
'./js/utils/**/*.ts': {
|
||||
statements: 100,
|
||||
branches: 100,
|
||||
functions: 100,
|
||||
lines: 100,
|
||||
},
|
||||
},
|
||||
preset: 'ts-jest/presets/js-with-ts-esm',
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
|
||||
testEnvironment: 'jsdom',
|
||||
testPathIgnorePatterns: ['/node_modules/', '/dist/'],
|
||||
moduleNameMapper: {
|
||||
'./js/(.*)': '<rootDir>/js/$1',
|
||||
},
|
||||
transform: {},
|
||||
globals: {
|
||||
extensionsToTreatAsEsm: ['.ts', '.js'],
|
||||
'ts-jest': {
|
||||
tsconfig: '<rootDir>/tsconfig.json',
|
||||
useESM: true,
|
||||
},
|
||||
},
|
||||
};
|
92
assets/js/__tests__/input-duplicator.spec.ts
Normal file
92
assets/js/__tests__/input-duplicator.spec.ts
Normal file
|
@ -0,0 +1,92 @@
|
|||
import { inputDuplicatorCreator } from '../input-duplicator';
|
||||
import { assertNotNull } from '../utils/assert';
|
||||
import { $, $$, removeEl } from '../utils/dom';
|
||||
import { fireEvent } from '@testing-library/dom';
|
||||
|
||||
describe('Input duplicator functionality', () => {
|
||||
beforeEach(() => {
|
||||
document.documentElement.insertAdjacentHTML('beforeend', `<form action="/">
|
||||
<div class="js-max-input-count">3</div>
|
||||
<div class="js-input-source">
|
||||
<input id="0" name="0" class="js-input" type="text"/>
|
||||
<label>
|
||||
<a href="#" class="js-remove-input">Delete</a>
|
||||
</label>
|
||||
</div>
|
||||
<div class="js-button-container">
|
||||
<button type="button" class="js-add-input">Add input</button>
|
||||
</div>
|
||||
</form>`);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
removeEl($$<HTMLFormElement>('form'));
|
||||
});
|
||||
|
||||
function runCreator() {
|
||||
inputDuplicatorCreator({
|
||||
addButtonSelector: '.js-add-input',
|
||||
fieldSelector: '.js-input-source',
|
||||
maxInputCountSelector: '.js-max-input-count',
|
||||
removeButtonSelector: '.js-remove-input',
|
||||
});
|
||||
}
|
||||
|
||||
it('should ignore forms without a duplicator button', () => {
|
||||
removeEl($$<HTMLButtonElement>('button'));
|
||||
expect(runCreator()).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should duplicate the input elements', () => {
|
||||
runCreator();
|
||||
|
||||
expect($$('input')).toHaveLength(1);
|
||||
|
||||
fireEvent.click(assertNotNull($<HTMLButtonElement>('.js-add-input')));
|
||||
|
||||
expect($$('input')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should duplicate the input elements when the button is before the inputs', () => {
|
||||
const form = assertNotNull($<HTMLFormElement>('form'));
|
||||
const buttonDiv = assertNotNull($<HTMLDivElement>('.js-button-container'));
|
||||
removeEl(buttonDiv);
|
||||
form.insertAdjacentElement('afterbegin', buttonDiv);
|
||||
runCreator();
|
||||
|
||||
fireEvent.click(assertNotNull($<HTMLButtonElement>('.js-add-input')));
|
||||
|
||||
expect($$('input')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should not create more input elements than the limit', () => {
|
||||
runCreator();
|
||||
|
||||
for (let i = 0; i < 5; i += 1) {
|
||||
fireEvent.click(assertNotNull($<HTMLButtonElement>('.js-add-input')));
|
||||
}
|
||||
|
||||
expect($$('input')).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should remove duplicated input elements', () => {
|
||||
runCreator();
|
||||
|
||||
fireEvent.click(assertNotNull($<HTMLButtonElement>('.js-add-input')));
|
||||
fireEvent.click(assertNotNull($<HTMLAnchorElement>('.js-remove-input')));
|
||||
|
||||
expect($$('input')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not remove the last input element', () => {
|
||||
runCreator();
|
||||
|
||||
fireEvent.click(assertNotNull($<HTMLAnchorElement>('.js-remove-input')));
|
||||
fireEvent.click(assertNotNull($<HTMLAnchorElement>('.js-remove-input')));
|
||||
for (let i = 0; i < 5; i += 1) {
|
||||
fireEvent.click(assertNotNull($<HTMLAnchorElement>('.js-remove-input')));
|
||||
}
|
||||
|
||||
expect($$('input')).toHaveLength(1);
|
||||
});
|
||||
});
|
114
assets/js/__tests__/timeago.spec.ts
Normal file
114
assets/js/__tests__/timeago.spec.ts
Normal file
|
@ -0,0 +1,114 @@
|
|||
import { timeAgo, setupTimestamps } from '../timeago';
|
||||
|
||||
const epochRfc3339 = '1970-01-01T00:00:00.000Z';
|
||||
|
||||
describe('Timeago functionality', () => {
|
||||
// TODO: is this robust? do we need e.g. timekeeper to freeze the time?
|
||||
function timeAgoWithSecondOffset(offset: number) {
|
||||
const utc = new Date(new Date().getTime() + offset * 1000).toISOString();
|
||||
|
||||
const timeEl = document.createElement('time');
|
||||
timeEl.setAttribute('datetime', utc);
|
||||
timeEl.textContent = utc;
|
||||
|
||||
timeAgo([timeEl]);
|
||||
return timeEl.textContent;
|
||||
}
|
||||
|
||||
/* eslint-disable no-implicit-coercion */
|
||||
it('should parse a time as less than a minute', () => {
|
||||
expect(timeAgoWithSecondOffset(-15)).toEqual('less than a minute ago');
|
||||
expect(timeAgoWithSecondOffset(+15)).toEqual('less than a minute from now');
|
||||
});
|
||||
|
||||
it('should parse a time as about a minute', () => {
|
||||
expect(timeAgoWithSecondOffset(-75)).toEqual('about a minute ago');
|
||||
expect(timeAgoWithSecondOffset(+75)).toEqual('about a minute from now');
|
||||
});
|
||||
|
||||
it('should parse a time as 30 minutes', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 30))).toEqual('30 minutes ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 30))).toEqual('30 minutes from now');
|
||||
});
|
||||
|
||||
it('should parse a time as about an hour', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60))).toEqual('about an hour ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60))).toEqual('about an hour from now');
|
||||
});
|
||||
|
||||
it('should parse a time as about 6 hours', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 6))).toEqual('about 6 hours ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 6))).toEqual('about 6 hours from now');
|
||||
});
|
||||
|
||||
it('should parse a time as a day', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 36))).toEqual('a day ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 36))).toEqual('a day from now');
|
||||
});
|
||||
|
||||
it('should parse a time as 25 days', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 25))).toEqual('25 days ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 25))).toEqual('25 days from now');
|
||||
});
|
||||
|
||||
it('should parse a time as about a month', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 35))).toEqual('about a month ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 35))).toEqual('about a month from now');
|
||||
});
|
||||
|
||||
it('should parse a time as 3 months', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 3))).toEqual('3 months ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 3))).toEqual('3 months from now');
|
||||
});
|
||||
|
||||
it('should parse a time as about a year', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 13))).toEqual('about a year ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 13))).toEqual('about a year from now');
|
||||
});
|
||||
|
||||
it('should parse a time as 5 years', () => {
|
||||
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 12 * 5))).toEqual('5 years ago');
|
||||
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 12 * 5))).toEqual('5 years from now');
|
||||
});
|
||||
/* eslint-enable no-implicit-coercion */
|
||||
|
||||
it('should ignore time elements without a datetime attribute', () => {
|
||||
const timeEl = document.createElement('time');
|
||||
const value = Math.random().toString();
|
||||
|
||||
timeEl.textContent = value;
|
||||
timeAgo([timeEl]);
|
||||
|
||||
expect(timeEl.textContent).toEqual(value);
|
||||
});
|
||||
|
||||
it('should not reset title attribute if it already exists', () => {
|
||||
const timeEl = document.createElement('time');
|
||||
const value = Math.random().toString();
|
||||
|
||||
timeEl.setAttribute('datetime', epochRfc3339);
|
||||
timeEl.setAttribute('title', value);
|
||||
timeAgo([timeEl]);
|
||||
|
||||
expect(timeEl.getAttribute('title')).toEqual(value);
|
||||
expect(timeEl.textContent).not.toEqual(epochRfc3339);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Automatic timestamps', () => {
|
||||
it('should process all timestamps in the document', () => {
|
||||
for (let i = 0; i < 5; i += 1) {
|
||||
const timeEl = document.createElement('time');
|
||||
timeEl.setAttribute('datetime', epochRfc3339);
|
||||
timeEl.textContent = epochRfc3339;
|
||||
|
||||
document.documentElement.insertAdjacentElement('beforeend', timeEl);
|
||||
}
|
||||
|
||||
setupTimestamps();
|
||||
|
||||
for (const timeEl of document.getElementsByTagName('time')) {
|
||||
expect(timeEl.textContent).not.toEqual(epochRfc3339);
|
||||
}
|
||||
});
|
||||
});
|
330
assets/js/__tests__/ujs.spec.ts
Normal file
330
assets/js/__tests__/ujs.spec.ts
Normal file
|
@ -0,0 +1,330 @@
|
|||
import { fireEvent, waitFor } from '@testing-library/dom';
|
||||
import { assertType } from '../utils/assert';
|
||||
import '../ujs';
|
||||
import { fetchMock } from '../../test/fetch-mock';
|
||||
|
||||
const mockEndpoint = 'http://localhost/endpoint';
|
||||
const mockVerb = 'POST';
|
||||
|
||||
describe('Remote utilities', () => {
|
||||
beforeAll(() => {
|
||||
fetchMock.enableMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
fetchMock.disableMocks();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
window.booru.csrfToken = Math.random().toString();
|
||||
fetchMock.resetMocks();
|
||||
});
|
||||
|
||||
function addOneShotEventListener(name: string, cb: (e: Event) => void) {
|
||||
const handler = (event: Event) => {
|
||||
cb(event);
|
||||
document.removeEventListener(name, handler);
|
||||
};
|
||||
document.addEventListener(name, handler);
|
||||
}
|
||||
|
||||
describe('a[data-remote]', () => {
|
||||
const submitA = ({ setMethod }: { setMethod: boolean; }) => {
|
||||
const a = document.createElement('a');
|
||||
a.href = mockEndpoint;
|
||||
a.dataset.remote = 'remote';
|
||||
if (setMethod) {
|
||||
a.dataset.method = mockVerb;
|
||||
}
|
||||
|
||||
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||
fireEvent.click(a, { button: 0 });
|
||||
|
||||
return a;
|
||||
};
|
||||
|
||||
it('should call native fetch with the correct parameters (without body)', () => {
|
||||
submitA({ setMethod: true });
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||
method: mockVerb,
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
'x-csrf-token': window.booru.csrfToken,
|
||||
'x-requested-with': 'XMLHttpRequest'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should call native fetch for a get request without explicit method', () => {
|
||||
submitA({ setMethod: false });
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||
method: 'GET',
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
'x-csrf-token': window.booru.csrfToken,
|
||||
'x-requested-with': 'XMLHttpRequest'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should emit fetchcomplete event', () => new Promise<void>(resolve => {
|
||||
let a: HTMLAnchorElement | null = null;
|
||||
|
||||
addOneShotEventListener('fetchcomplete', event => {
|
||||
expect(event.target).toBe(a);
|
||||
resolve();
|
||||
});
|
||||
|
||||
a = submitA({ setMethod: true });
|
||||
}));
|
||||
});
|
||||
|
||||
describe('a[data-method]', () => {
|
||||
const submitA = () => {
|
||||
const a = document.createElement('a');
|
||||
a.href = mockEndpoint;
|
||||
a.dataset.method = mockVerb;
|
||||
|
||||
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||
fireEvent.click(a);
|
||||
|
||||
return a;
|
||||
};
|
||||
|
||||
it('should submit a form with the given action', () => new Promise<void>(resolve => {
|
||||
addOneShotEventListener('submit', event => {
|
||||
event.preventDefault();
|
||||
|
||||
const target = assertType(event.target, HTMLFormElement);
|
||||
const [ csrf, method ] = target.querySelectorAll('input');
|
||||
|
||||
expect(csrf.name).toBe('_csrf_token');
|
||||
expect(csrf.value).toBe(window.booru.csrfToken);
|
||||
|
||||
expect(method.name).toBe('_method');
|
||||
expect(method.value).toBe(mockVerb);
|
||||
|
||||
resolve();
|
||||
});
|
||||
|
||||
submitA();
|
||||
}));
|
||||
});
|
||||
|
||||
describe('form[data-remote]', () => {
|
||||
// https://www.benmvp.com/blog/mocking-window-location-methods-jest-jsdom/
|
||||
let oldWindowLocation: Location;
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
beforeAll(() => {
|
||||
oldWindowLocation = window.location;
|
||||
delete (window as any).location;
|
||||
|
||||
(window as any).location = Object.defineProperties(
|
||||
{},
|
||||
{
|
||||
...Object.getOwnPropertyDescriptors(oldWindowLocation),
|
||||
reload: {
|
||||
configurable: true,
|
||||
value: vi.fn(),
|
||||
},
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
(window.location.reload as any).mockReset();
|
||||
});
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
afterAll(() => {
|
||||
// restore window.location to the jsdom Location object
|
||||
window.location = oldWindowLocation;
|
||||
});
|
||||
|
||||
const configureForm = () => {
|
||||
const form = document.createElement('form');
|
||||
form.action = mockEndpoint;
|
||||
form.dataset.remote = 'remote';
|
||||
document.documentElement.insertAdjacentElement('beforeend', form);
|
||||
return form;
|
||||
};
|
||||
|
||||
const submitForm = () => {
|
||||
const form = configureForm();
|
||||
form.method = mockVerb;
|
||||
fireEvent.submit(form);
|
||||
return form;
|
||||
};
|
||||
|
||||
it('should call native fetch with the correct parameters (with body)', () => {
|
||||
submitForm();
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||
method: mockVerb,
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
'x-csrf-token': window.booru.csrfToken,
|
||||
'x-requested-with': 'XMLHttpRequest'
|
||||
},
|
||||
body: new FormData(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should submit a PUT request with put data-method specified', () => {
|
||||
const form = configureForm();
|
||||
form.dataset.method = 'put';
|
||||
fireEvent.submit(form);
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||
method: 'PUT',
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
'x-csrf-token': window.booru.csrfToken,
|
||||
'x-requested-with': 'XMLHttpRequest'
|
||||
},
|
||||
body: new FormData(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should emit fetchcomplete event', () => new Promise<void>(resolve => {
|
||||
let form: HTMLFormElement | null = null;
|
||||
|
||||
addOneShotEventListener('fetchcomplete', event => {
|
||||
expect(event.target).toBe(form);
|
||||
resolve();
|
||||
});
|
||||
|
||||
form = submitForm();
|
||||
}));
|
||||
|
||||
it('should reload the page on 300 multiple choices response', () => {
|
||||
vi.spyOn(global, 'fetch').mockResolvedValue(new Response('', { status: 300}));
|
||||
|
||||
submitForm();
|
||||
return waitFor(() => expect(window.location.reload).toHaveBeenCalledTimes(1));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Form utilities', () => {
|
||||
beforeEach(() => {
|
||||
vi.spyOn(window, 'requestAnimationFrame').mockImplementation(cb => {
|
||||
cb(1);
|
||||
return 1;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('[data-confirm]', () => {
|
||||
const createA = () => {
|
||||
const a = document.createElement('a');
|
||||
a.dataset.confirm = 'confirm';
|
||||
// We cannot use mockEndpoint here since anything except a hash change will log an error in the test output
|
||||
a.href = '#hash';
|
||||
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||
return a;
|
||||
};
|
||||
|
||||
it('should cancel the event on failed confirm', () => {
|
||||
const a = createA();
|
||||
const confirm = vi.spyOn(window, 'confirm').mockImplementationOnce(() => false);
|
||||
const event = new MouseEvent('click', { bubbles: true, cancelable: true });
|
||||
|
||||
expect(fireEvent(a, event)).toBe(false);
|
||||
expect(confirm).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should allow the event on confirm', () => {
|
||||
const a = createA();
|
||||
const confirm = vi.spyOn(window, 'confirm').mockImplementationOnce(() => true);
|
||||
const event = new MouseEvent('click', { bubbles: true, cancelable: true });
|
||||
|
||||
expect(fireEvent(a, event)).toBe(true);
|
||||
expect(confirm).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('[data-disable-with][data-enable-with]', () => {
|
||||
const createFormAndButton = (innerHTML: string, disableWith: string) => {
|
||||
const form = document.createElement('form');
|
||||
form.action = mockEndpoint;
|
||||
|
||||
// jsdom has no implementation for HTMLFormElement.prototype.submit
|
||||
// and will return an error if the event's default isn't prevented
|
||||
form.addEventListener('submit', event => event.preventDefault());
|
||||
|
||||
const button = document.createElement('button');
|
||||
button.type = 'submit';
|
||||
button.innerHTML = innerHTML;
|
||||
button.dataset.disableWith = disableWith;
|
||||
|
||||
form.insertAdjacentElement('beforeend', button);
|
||||
document.documentElement.insertAdjacentElement('beforeend', form);
|
||||
|
||||
return [ form, button ];
|
||||
};
|
||||
|
||||
const submitText = 'Submit';
|
||||
const loadingText = 'Loading...';
|
||||
const submitMarkup = '<em>Submit</em>';
|
||||
const loadingMarkup = '<em>Loading...</em>';
|
||||
|
||||
it('should disable submit button containing a text child on click', () => {
|
||||
const [ , button ] = createFormAndButton(submitText, loadingText);
|
||||
fireEvent.click(button);
|
||||
|
||||
expect(button.textContent).toEqual(' Loading...');
|
||||
expect(button.dataset.enableWith).toEqual(submitText);
|
||||
});
|
||||
|
||||
it('should disable submit button containing element children on click', () => {
|
||||
const [ , button ] = createFormAndButton(submitMarkup, loadingMarkup);
|
||||
fireEvent.click(button);
|
||||
|
||||
expect(button.innerHTML).toEqual(loadingMarkup);
|
||||
expect(button.dataset.enableWith).toEqual(submitMarkup);
|
||||
});
|
||||
|
||||
it('should not disable anything when the form is invalid', () => {
|
||||
const [ form, button ] = createFormAndButton(submitText, loadingText);
|
||||
form.insertAdjacentHTML('afterbegin', '<input type="text" name="valid" required="true" />');
|
||||
fireEvent.click(button);
|
||||
|
||||
expect(button.textContent).toEqual(submitText);
|
||||
expect(button.dataset.enableWith).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('should reset submit button containing a text child on completion', () => {
|
||||
const [ form, button ] = createFormAndButton(submitText, loadingText);
|
||||
fireEvent.click(button);
|
||||
fireEvent(form, new CustomEvent('reset', { bubbles: true }));
|
||||
|
||||
expect(button.textContent?.trim()).toEqual(submitText);
|
||||
expect(button.dataset.enableWith).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('should reset submit button containing element children on completion', () => {
|
||||
const [ form, button ] = createFormAndButton(submitMarkup, loadingMarkup);
|
||||
fireEvent.click(button);
|
||||
fireEvent(form, new CustomEvent('reset', { bubbles: true }));
|
||||
|
||||
expect(button.innerHTML).toEqual(submitMarkup);
|
||||
expect(button.dataset.enableWith).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('should reset disabled form elements on pageshow', () => {
|
||||
const [ , button ] = createFormAndButton(submitText, loadingText);
|
||||
fireEvent.click(button);
|
||||
fireEvent(window, new CustomEvent('pageshow'));
|
||||
|
||||
expect(button.textContent?.trim()).toEqual(submitText);
|
||||
expect(button.dataset.enableWith).not.toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
BIN
assets/js/__tests__/upload-test.png
Normal file
BIN
assets/js/__tests__/upload-test.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 527 B |
BIN
assets/js/__tests__/upload-test.webm
Normal file
BIN
assets/js/__tests__/upload-test.webm
Normal file
Binary file not shown.
192
assets/js/__tests__/upload.spec.ts
Normal file
192
assets/js/__tests__/upload.spec.ts
Normal file
|
@ -0,0 +1,192 @@
|
|||
import { $, $$, removeEl } from '../utils/dom';
|
||||
import { assertNotNull, assertNotUndefined } from '../utils/assert';
|
||||
|
||||
import { fetchMock } from '../../test/fetch-mock';
|
||||
import { fixEventListeners } from '../../test/fix-event-listeners';
|
||||
import { fireEvent, waitFor } from '@testing-library/dom';
|
||||
import { promises } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { setupImageUpload } from '../upload';
|
||||
|
||||
/* eslint-disable camelcase */
|
||||
const scrapeResponse = {
|
||||
description: 'test',
|
||||
images: [
|
||||
{ url: 'http://localhost/images/1', camo_url: 'http://localhost/images/1' },
|
||||
{ url: 'http://localhost/images/2', camo_url: 'http://localhost/images/2' },
|
||||
],
|
||||
source_url: 'http://localhost/images',
|
||||
author_name: 'test',
|
||||
};
|
||||
const nullResponse = null;
|
||||
const errorResponse = {
|
||||
errors: ['Error 1', 'Error 2'],
|
||||
};
|
||||
/* eslint-enable camelcase */
|
||||
|
||||
describe('Image upload form', () => {
|
||||
let mockPng: File;
|
||||
let mockWebm: File;
|
||||
|
||||
beforeAll(async() => {
|
||||
const mockPngPath = join(__dirname, 'upload-test.png');
|
||||
const mockWebmPath = join(__dirname, 'upload-test.webm');
|
||||
|
||||
mockPng = new File([(await promises.readFile(mockPngPath, { encoding: null })).buffer], 'upload-test.png', { type: 'image/png' });
|
||||
mockWebm = new File([(await promises.readFile(mockWebmPath, { encoding: null })).buffer], 'upload-test.webm', { type: 'video/webm' });
|
||||
});
|
||||
|
||||
beforeAll(() => {
|
||||
fetchMock.enableMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
fetchMock.disableMocks();
|
||||
});
|
||||
|
||||
fixEventListeners(window);
|
||||
|
||||
|
||||
let form: HTMLFormElement;
|
||||
let imgPreviews: HTMLDivElement;
|
||||
let fileField: HTMLInputElement;
|
||||
let remoteUrl: HTMLInputElement;
|
||||
let scraperError: HTMLDivElement;
|
||||
let fetchButton: HTMLButtonElement;
|
||||
let tagsEl: HTMLTextAreaElement;
|
||||
let sourceEl: HTMLInputElement;
|
||||
let descrEl: HTMLTextAreaElement;
|
||||
|
||||
const assertFetchButtonIsDisabled = () => {
|
||||
if (!fetchButton.hasAttribute('disabled')) throw new Error('fetchButton is not disabled');
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
document.documentElement.insertAdjacentHTML('beforeend', `
|
||||
<form action="/images">
|
||||
<div id="js-image-upload-previews"></div>
|
||||
<input id="image_image" name="image[image]" type="file" class="js-scraper" />
|
||||
<input id="image_scraper_url" name="image[scraper_url]" type="url" class="js-scraper" />
|
||||
<button id="js-scraper-preview" type="button">Fetch</button>
|
||||
<div class="field-error-js hidden js-scraper"></div>
|
||||
|
||||
<input id="image_sources_0_source" name="image[sources][0][source]" type="text" class="js-source-url" />
|
||||
<textarea id="image_tag_input" name="image[tag_input]" class="js-image-tags-input"></textarea>
|
||||
<textarea id="image_description" name="image[description]" class="js-image-descr-input"></textarea>
|
||||
</form>
|
||||
`);
|
||||
|
||||
form = assertNotNull($<HTMLFormElement>('form'));
|
||||
imgPreviews = assertNotNull($<HTMLDivElement>('#js-image-upload-previews'));
|
||||
fileField = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[0]);
|
||||
remoteUrl = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[1]);
|
||||
scraperError = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[2]);
|
||||
tagsEl = assertNotNull($<HTMLTextAreaElement>('.js-image-tags-input'));
|
||||
sourceEl = assertNotNull($<HTMLInputElement>('.js-source-url'));
|
||||
descrEl = assertNotNull($<HTMLTextAreaElement>('.js-image-descr-input'));
|
||||
fetchButton = assertNotNull($<HTMLButtonElement>('#js-scraper-preview'));
|
||||
|
||||
setupImageUpload();
|
||||
fetchMock.resetMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
removeEl(form);
|
||||
});
|
||||
|
||||
it('should disable fetch button on empty source', () => {
|
||||
fireEvent.input(remoteUrl, { target: { value: '' } });
|
||||
expect(fetchButton.disabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable fetch button on non-empty source', () => {
|
||||
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' } });
|
||||
expect(fetchButton.disabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should create a preview element when an image file is uploaded', () => {
|
||||
fireEvent.change(fileField, { target: { files: [mockPng] } });
|
||||
return waitFor(() => {
|
||||
assertFetchButtonIsDisabled();
|
||||
expect(imgPreviews.querySelectorAll('img')).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a preview element when a Matroska video file is uploaded', () => {
|
||||
fireEvent.change(fileField, { target: { files: [mockWebm] } });
|
||||
return waitFor(() => {
|
||||
assertFetchButtonIsDisabled();
|
||||
expect(imgPreviews.querySelectorAll('video')).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should block navigation away after an image file is attached, but not after form submission', async() => {
|
||||
fireEvent.change(fileField, { target: { files: [mockPng] } });
|
||||
await waitFor(() => {
|
||||
assertFetchButtonIsDisabled();
|
||||
expect(imgPreviews.querySelectorAll('img')).toHaveLength(1);
|
||||
});
|
||||
|
||||
const failedUnloadEvent = new Event('beforeunload', { cancelable: true });
|
||||
expect(fireEvent(window, failedUnloadEvent)).toBe(false);
|
||||
|
||||
await new Promise<void>(resolve => {
|
||||
form.addEventListener('submit', event => {
|
||||
event.preventDefault();
|
||||
resolve();
|
||||
});
|
||||
fireEvent.submit(form);
|
||||
});
|
||||
|
||||
const succeededUnloadEvent = new Event('beforeunload', { cancelable: true });
|
||||
expect(fireEvent(window, succeededUnloadEvent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should scrape images when the fetch button is clicked', async() => {
|
||||
fetchMock.mockResolvedValue(new Response(JSON.stringify(scrapeResponse), { status: 200 }));
|
||||
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' } });
|
||||
|
||||
await new Promise<void>(resolve => {
|
||||
tagsEl.addEventListener('addtag', (event: Event) => {
|
||||
expect((event as CustomEvent).detail).toEqual({ name: 'artist:test' });
|
||||
resolve();
|
||||
});
|
||||
|
||||
fireEvent.keyDown(remoteUrl, { keyCode: 13 });
|
||||
});
|
||||
|
||||
await waitFor(() => expect(fetch).toHaveBeenCalledTimes(1));
|
||||
await waitFor(() => expect(imgPreviews.querySelectorAll('img')).toHaveLength(2));
|
||||
|
||||
expect(scraperError.innerHTML).toEqual('');
|
||||
expect(sourceEl.value).toEqual('http://localhost/images');
|
||||
expect(descrEl.value).toEqual('test');
|
||||
});
|
||||
|
||||
it('should show null scrape result', () => {
|
||||
fetchMock.mockResolvedValue(new Response(JSON.stringify(nullResponse), { status: 200 }));
|
||||
|
||||
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' } });
|
||||
fireEvent.click(fetchButton);
|
||||
|
||||
return waitFor(() => {
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(imgPreviews.querySelectorAll('img')).toHaveLength(0);
|
||||
expect(scraperError.innerText).toEqual('No image found at that address.');
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error scrape result', () => {
|
||||
fetchMock.mockResolvedValue(new Response(JSON.stringify(errorResponse), { status: 200 }));
|
||||
|
||||
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' } });
|
||||
fireEvent.click(fetchButton);
|
||||
|
||||
return waitFor(() => {
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(imgPreviews.querySelectorAll('img')).toHaveLength(0);
|
||||
expect(scraperError.innerText).toEqual('Error 1 Error 2');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,14 +5,15 @@
|
|||
// the compiled file.
|
||||
//
|
||||
|
||||
// Third-party code, polyfills
|
||||
import './vendor/promise.polyfill';
|
||||
import './vendor/fetch.polyfill';
|
||||
import './vendor/closest.polyfill';
|
||||
import './vendor/customevent.polyfill';
|
||||
import './vendor/es6.polyfill';
|
||||
import './vendor/values-entries.polyfill';
|
||||
|
||||
// Our code
|
||||
import './ujs';
|
||||
import './when-ready';
|
||||
|
||||
// When developing CSS, include the relevant CSS you're working on here
|
||||
// in order to enable HMR (live reload) on it.
|
||||
// Would typically be either the theme file, or any additional file
|
||||
// you later intend to put in the <link> tag.
|
||||
|
||||
// import '../css/themes/default.scss';
|
||||
// import '../css/themes/dark.scss';
|
||||
// import '../css/themes/red.scss';
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
* Autocomplete.
|
||||
*/
|
||||
|
||||
import { LocalAutocompleter } from 'utils/local-autocompleter';
|
||||
import { handleError } from 'utils/requests';
|
||||
import { LocalAutocompleter } from './utils/local-autocompleter';
|
||||
import { handleError } from './utils/requests';
|
||||
|
||||
const cache = {};
|
||||
let inputField, originalTerm;
|
||||
|
@ -134,16 +134,19 @@ function listenAutocomplete() {
|
|||
document.addEventListener('input', event => {
|
||||
removeParent();
|
||||
fetchLocalAutocomplete(event);
|
||||
window.clearTimeout(timeout);
|
||||
|
||||
if (localAc !== null && 'ac' in event.target.dataset) {
|
||||
inputField = event.target;
|
||||
originalTerm = `${inputField.value}`.toLowerCase();
|
||||
|
||||
const suggestions = localAc.topK(originalTerm, 5).map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
||||
return showAutocomplete(suggestions, originalTerm, event.target);
|
||||
|
||||
if (suggestions.length) {
|
||||
return showAutocomplete(suggestions, originalTerm, event.target);
|
||||
}
|
||||
}
|
||||
|
||||
window.clearTimeout(timeout);
|
||||
// Use a timeout to delay requests until the user has stopped typing
|
||||
timeout = window.setTimeout(() => {
|
||||
inputField = event.target;
|
||||
|
@ -158,7 +161,11 @@ function listenAutocomplete() {
|
|||
}
|
||||
else {
|
||||
// inputField could get overwritten while the suggestions are being fetched - use event.target
|
||||
getSuggestions(fetchedTerm).then(suggestions => showAutocomplete(suggestions, fetchedTerm, event.target));
|
||||
getSuggestions(fetchedTerm).then(suggestions => {
|
||||
if (fetchedTerm === event.target.value) {
|
||||
showAutocomplete(suggestions, fetchedTerm, event.target);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}, 300);
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
// Action Cable provides the framework to deal with WebSockets in Rails.
|
||||
// You can generate new channels where WebSocket features live using the rails generate channel command.
|
||||
let cable;
|
||||
|
||||
function setupCable() {
|
||||
if (window.booru.userIsSignedIn) {
|
||||
cable = ActionCable.createConsumer();
|
||||
}
|
||||
}
|
||||
|
||||
export { cable, setupCable };
|
|
@ -6,6 +6,7 @@ import { $ } from './utils/dom';
|
|||
import { showOwnedComments } from './communications/comment';
|
||||
import { filterNode } from './imagesclientside';
|
||||
import { fetchHtml } from './utils/requests';
|
||||
import { timeAgo } from './timeago';
|
||||
|
||||
function handleError(response) {
|
||||
|
||||
|
@ -91,7 +92,7 @@ function insertParentPost(data, clickedLink, fullComment) {
|
|||
fullComment.previousSibling.classList.add('fetched-comment');
|
||||
|
||||
// Execute timeago on the new comment - it was not present when first run
|
||||
window.booru.timeAgo(fullComment.previousSibling.getElementsByTagName('time'));
|
||||
timeAgo(fullComment.previousSibling.getElementsByTagName('time'));
|
||||
|
||||
// Add class active_reply_link to the clicked link
|
||||
clickedLink.classList.add('active_reply_link');
|
||||
|
@ -125,7 +126,7 @@ function displayComments(container, commentsHtml) {
|
|||
container.innerHTML = commentsHtml;
|
||||
|
||||
// Execute timeago on comments
|
||||
window.booru.timeAgo(document.getElementsByTagName('time'));
|
||||
timeAgo(document.getElementsByTagName('time'));
|
||||
|
||||
// Filter images in the comments
|
||||
filterNode(container);
|
||||
|
|
|
@ -86,10 +86,13 @@ function pickAndResize(elem) {
|
|||
clearEl(elem);
|
||||
}
|
||||
|
||||
const muted = store.get('unmute_videos') ? '' : 'muted';
|
||||
const autoplay = elem.classList.contains('hidden') ? '' : 'autoplay'; // Fix for spoilered image pages
|
||||
|
||||
if (imageFormat === 'mp4') {
|
||||
elem.classList.add('full-height');
|
||||
elem.insertAdjacentHTML('afterbegin',
|
||||
`<video controls autoplay loop muted playsinline preload="auto" id="image-display"
|
||||
`<video controls ${autoplay} loop ${muted} playsinline preload="auto" id="image-display"
|
||||
width="${imageWidth}" height="${imageHeight}">
|
||||
<source src="${uris.webm}" type="video/webm">
|
||||
<source src="${uris.mp4}" type="video/mp4">
|
||||
|
@ -102,7 +105,7 @@ function pickAndResize(elem) {
|
|||
}
|
||||
else if (imageFormat === 'webm') {
|
||||
elem.insertAdjacentHTML('afterbegin',
|
||||
`<video controls autoplay loop muted playsinline id="image-display">
|
||||
`<video controls ${autoplay} loop ${muted} playsinline id="image-display">
|
||||
<source src="${uri}" type="video/webm">
|
||||
<source src="${uri.replace(/webm$/, 'mp4')}" type="video/mp4">
|
||||
<p class="block block--fixed block--warning">
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
import { $, $$, disableEl, enableEl, removeEl } from './utils/dom';
|
||||
import { delegate, leftClick } from './utils/events';
|
||||
|
||||
/**
|
||||
* @typedef InputDuplicatorOptions
|
||||
* @property {string} addButtonSelector
|
||||
* @property {string} fieldSelector
|
||||
* @property {string} maxInputCountSelector
|
||||
* @property {string} removeButtonSelector
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {InputDuplicatorOptions} options
|
||||
*/
|
||||
function inputDuplicatorCreator({
|
||||
addButtonSelector,
|
||||
fieldSelector,
|
||||
maxInputCountSelector,
|
||||
removeButtonSelector
|
||||
}) {
|
||||
const addButton = $(addButtonSelector);
|
||||
if (!addButton) {
|
||||
return;
|
||||
}
|
||||
|
||||
const form = addButton.closest('form');
|
||||
const fieldRemover = (event, target) => {
|
||||
event.preventDefault();
|
||||
|
||||
// Prevent removing the final field element to not "brick" the form
|
||||
const existingFields = $$(fieldSelector, form);
|
||||
if (existingFields.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
removeEl(target.closest(fieldSelector));
|
||||
enableEl(addButton);
|
||||
};
|
||||
|
||||
delegate(document, 'click', {
|
||||
[removeButtonSelector]: leftClick(fieldRemover)
|
||||
});
|
||||
|
||||
|
||||
const maxOptionCount = parseInt($(maxInputCountSelector, form).innerHTML, 10);
|
||||
addButton.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
|
||||
const existingFields = $$(fieldSelector, form);
|
||||
let existingFieldsLength = existingFields.length;
|
||||
if (existingFieldsLength < maxOptionCount) {
|
||||
// The last element matched by the `fieldSelector` will be the last field, make a copy
|
||||
const prevField = existingFields[existingFieldsLength - 1];
|
||||
const prevFieldCopy = prevField.cloneNode(true);
|
||||
const prevFieldCopyInputs = $$('input', prevFieldCopy);
|
||||
prevFieldCopyInputs.forEach(prevFieldCopyInput => {
|
||||
// Reset new input's value
|
||||
prevFieldCopyInput.value = '';
|
||||
prevFieldCopyInput.removeAttribute('value');
|
||||
// Increment sequential attributes of the input
|
||||
['name', 'id'].forEach(attr => {
|
||||
prevFieldCopyInput.setAttribute(attr, prevFieldCopyInput[attr].replace(/\d+/g, `${existingFieldsLength}`));
|
||||
});
|
||||
});
|
||||
|
||||
// Insert copy before the last field's next sibling, or if none, at the end of its parent
|
||||
if (prevField.nextElementSibling) {
|
||||
prevField.parentNode.insertBefore(prevFieldCopy, prevField.nextElementSibling);
|
||||
}
|
||||
else {
|
||||
prevField.parentNode.appendChild(prevFieldCopy);
|
||||
}
|
||||
existingFieldsLength++;
|
||||
}
|
||||
|
||||
// Remove the button if we reached the max number of options
|
||||
if (existingFieldsLength >= maxOptionCount) {
|
||||
disableEl(addButton);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export { inputDuplicatorCreator };
|
76
assets/js/input-duplicator.ts
Normal file
76
assets/js/input-duplicator.ts
Normal file
|
@ -0,0 +1,76 @@
|
|||
import { assertNotNull } from './utils/assert';
|
||||
import { $, $$, disableEl, enableEl, removeEl } from './utils/dom';
|
||||
import { delegate, leftClick } from './utils/events';
|
||||
|
||||
export interface InputDuplicatorOptions {
|
||||
addButtonSelector: string;
|
||||
fieldSelector: string;
|
||||
maxInputCountSelector: string;
|
||||
removeButtonSelector: string;
|
||||
}
|
||||
|
||||
export function inputDuplicatorCreator({
|
||||
addButtonSelector,
|
||||
fieldSelector,
|
||||
maxInputCountSelector,
|
||||
removeButtonSelector
|
||||
}: InputDuplicatorOptions) {
|
||||
const addButton = $<HTMLButtonElement>(addButtonSelector);
|
||||
if (!addButton) {
|
||||
return;
|
||||
}
|
||||
|
||||
const form = assertNotNull(addButton.closest('form'));
|
||||
const fieldRemover = (event: MouseEvent, target: HTMLElement) => {
|
||||
event.preventDefault();
|
||||
|
||||
// Prevent removing the final field element to not "brick" the form
|
||||
const existingFields = $$(fieldSelector, form);
|
||||
if (existingFields.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
removeEl(assertNotNull(target.closest<HTMLElement>(fieldSelector)));
|
||||
enableEl(addButton);
|
||||
};
|
||||
|
||||
delegate(form, 'click', {
|
||||
[removeButtonSelector]: leftClick(fieldRemover)
|
||||
});
|
||||
|
||||
|
||||
const maxOptionCountElement = assertNotNull($(maxInputCountSelector, form));
|
||||
const maxOptionCount = parseInt(maxOptionCountElement.innerHTML, 10);
|
||||
|
||||
addButton.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
|
||||
const existingFields = $$<HTMLElement>(fieldSelector, form);
|
||||
let existingFieldsLength = existingFields.length;
|
||||
|
||||
if (existingFieldsLength < maxOptionCount) {
|
||||
// The last element matched by the `fieldSelector` will be the last field, make a copy
|
||||
const prevField = existingFields[existingFieldsLength - 1];
|
||||
const prevFieldCopy = prevField.cloneNode(true) as HTMLElement;
|
||||
|
||||
$$<HTMLInputElement>('input', prevFieldCopy).forEach(prevFieldCopyInput => {
|
||||
// Reset new input's value
|
||||
prevFieldCopyInput.value = '';
|
||||
prevFieldCopyInput.removeAttribute('value');
|
||||
|
||||
// Increment sequential attributes of the input
|
||||
prevFieldCopyInput.setAttribute('name', prevFieldCopyInput.name.replace(/\d+/g, `${existingFieldsLength}`));
|
||||
prevFieldCopyInput.setAttribute('id', prevFieldCopyInput.id.replace(/\d+/g, `${existingFieldsLength}`));
|
||||
});
|
||||
|
||||
prevField.insertAdjacentElement('afterend', prevFieldCopy);
|
||||
|
||||
existingFieldsLength++;
|
||||
}
|
||||
|
||||
// Remove the button if we reached the max number of options
|
||||
if (existingFieldsLength >= maxOptionCount) {
|
||||
disableEl(addButton);
|
||||
}
|
||||
});
|
||||
}
|
|
@ -1,877 +0,0 @@
|
|||
/**
|
||||
* booru.match_query: A port and modification of the search_parser library for
|
||||
* performing client-side filtering.
|
||||
*/
|
||||
|
||||
const tokenList = [
|
||||
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
||||
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
||||
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
||||
['lparen', /^\s*\(\s*/],
|
||||
['rparen', /^\s*\)\s*/],
|
||||
['and_op', /^\s*(?:&&|AND)\s+/],
|
||||
['and_op', /^\s*,\s*/],
|
||||
['or_op', /^\s*(?:\|\||OR)\s+/],
|
||||
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
||||
['not_op', /^\s*[!-]\s*/],
|
||||
['space', /^\s+/],
|
||||
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
||||
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
||||
],
|
||||
numberFields = ['id', 'width', 'height', 'aspect_ratio',
|
||||
'comment_count', 'score', 'upvotes', 'downvotes',
|
||||
'faves', 'tag_count'],
|
||||
dateFields = ['created_at'],
|
||||
literalFields = ['tags', 'orig_sha512_hash', 'sha512_hash',
|
||||
'score', 'uploader', 'source_url', 'description'],
|
||||
termSpaceToImageField = {
|
||||
tags: 'data-image-tag-aliases',
|
||||
score: 'data-score',
|
||||
upvotes: 'data-upvotes',
|
||||
downvotes: 'data-downvotes',
|
||||
uploader: 'data-uploader',
|
||||
// Yeah, I don't think this is reasonably supportable.
|
||||
// faved_by: 'data-faved-by',
|
||||
id: 'data-image-id',
|
||||
width: 'data-width',
|
||||
height: 'data-height',
|
||||
/* eslint-disable camelcase */
|
||||
aspect_ratio: 'data-aspect-ratio',
|
||||
comment_count: 'data-comment-count',
|
||||
tag_count: 'data-tag-count',
|
||||
source_url: 'data-source-url',
|
||||
faves: 'data-faves',
|
||||
sha512_hash: 'data-sha512',
|
||||
orig_sha512_hash: 'data-orig-sha512',
|
||||
created_at: 'data-created-at'
|
||||
/* eslint-enable camelcase */
|
||||
};
|
||||
|
||||
|
||||
function SearchTerm(termStr) {
|
||||
this.term = termStr.trim();
|
||||
this.parsed = false;
|
||||
}
|
||||
|
||||
SearchTerm.prototype.append = function(substr) {
|
||||
this.term += substr;
|
||||
this.parsed = false;
|
||||
};
|
||||
|
||||
SearchTerm.prototype.parseRangeField = function(field) {
|
||||
if (numberFields.indexOf(field) !== -1) {
|
||||
return [field, 'eq', 'number'];
|
||||
}
|
||||
|
||||
if (dateFields.indexOf(field) !== -1) {
|
||||
return [field, 'eq', 'date'];
|
||||
}
|
||||
|
||||
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
||||
|
||||
if (qual) {
|
||||
if (numberFields.indexOf(qual[1]) !== -1) {
|
||||
return [qual[1], qual[2], 'number'];
|
||||
}
|
||||
|
||||
if (dateFields.indexOf(qual[1]) !== -1) {
|
||||
return [qual[1], qual[2], 'date'];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
SearchTerm.prototype.parseRelativeDate = function(dateVal, qual) {
|
||||
const match = /(\d+) (second|minute|hour|day|week|month|year)s? ago/.exec(dateVal);
|
||||
const bounds = {
|
||||
second: 1000,
|
||||
minute: 60000,
|
||||
hour: 3600000,
|
||||
day: 86400000,
|
||||
week: 604800000,
|
||||
month: 2592000000,
|
||||
year: 31536000000
|
||||
};
|
||||
|
||||
if (match) {
|
||||
const amount = parseInt(match[1], 10);
|
||||
const scale = bounds[match[2]];
|
||||
|
||||
const now = new Date().getTime();
|
||||
const bottomDate = new Date(now - (amount * scale));
|
||||
const topDate = new Date(now - ((amount - 1) * scale));
|
||||
|
||||
switch (qual) {
|
||||
case 'lte':
|
||||
return [bottomDate, 'lt'];
|
||||
case 'gte':
|
||||
return [bottomDate, 'gte'];
|
||||
case 'lt':
|
||||
return [bottomDate, 'lt'];
|
||||
case 'gt':
|
||||
return [bottomDate, 'gte'];
|
||||
default:
|
||||
return [[bottomDate, topDate], 'eq'];
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new Error(`Cannot parse date string: ${dateVal}`);
|
||||
}
|
||||
};
|
||||
|
||||
SearchTerm.prototype.parseAbsoluteDate = function(dateVal, qual) {
|
||||
const parseRes = [
|
||||
/^(\d{4})/,
|
||||
/^-(\d{2})/,
|
||||
/^-(\d{2})/,
|
||||
/^(?:\s+|T|t)(\d{2})/,
|
||||
/^:(\d{2})/,
|
||||
/^:(\d{2})/
|
||||
],
|
||||
timeZoneOffset = [0, 0],
|
||||
timeData = [0, 0, 1, 0, 0, 0],
|
||||
origDateVal = dateVal;
|
||||
let topDate = null,
|
||||
i,
|
||||
match,
|
||||
bottomDate = null,
|
||||
localDateVal = origDateVal;
|
||||
|
||||
match = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
||||
if (match) {
|
||||
timeZoneOffset[0] = parseInt(match[2], 10);
|
||||
timeZoneOffset[1] = parseInt(match[3], 10);
|
||||
if (match[1] === '-') {
|
||||
timeZoneOffset[0] *= -1;
|
||||
timeZoneOffset[1] *= -1;
|
||||
}
|
||||
localDateVal = localDateVal.substr(0, localDateVal.length - 6);
|
||||
}
|
||||
else {
|
||||
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
||||
}
|
||||
|
||||
for (i = 0; i < parseRes.length; i += 1) {
|
||||
if (localDateVal.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
match = parseRes[i].exec(localDateVal);
|
||||
if (match) {
|
||||
if (i === 1) {
|
||||
timeData[i] = parseInt(match[1], 10) - 1;
|
||||
}
|
||||
else {
|
||||
timeData[i] = parseInt(match[1], 10);
|
||||
}
|
||||
localDateVal = localDateVal.substr(
|
||||
match[0].length, localDateVal.length - match[0].length
|
||||
);
|
||||
}
|
||||
else {
|
||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (localDateVal.length > 0) {
|
||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
||||
}
|
||||
|
||||
// Apply the user-specified time zone offset. The JS Date constructor
|
||||
// is very flexible here.
|
||||
timeData[3] -= timeZoneOffset[0];
|
||||
timeData[4] -= timeZoneOffset[1];
|
||||
|
||||
switch (qual) {
|
||||
case 'lte':
|
||||
timeData[i - 1] += 1;
|
||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
||||
case 'gte':
|
||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
||||
case 'lt':
|
||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
||||
case 'gt':
|
||||
timeData[i - 1] += 1;
|
||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
||||
default:
|
||||
bottomDate = Date.UTC.apply(Date, timeData);
|
||||
timeData[i - 1] += 1;
|
||||
topDate = Date.UTC.apply(Date, timeData);
|
||||
return [[bottomDate, topDate], 'eq'];
|
||||
}
|
||||
};
|
||||
|
||||
SearchTerm.prototype.parseDate = function(dateVal, qual) {
|
||||
try {
|
||||
return this.parseAbsoluteDate(dateVal, qual);
|
||||
}
|
||||
catch (_) {
|
||||
return this.parseRelativeDate(dateVal, qual);
|
||||
}
|
||||
};
|
||||
|
||||
SearchTerm.prototype.parse = function() {
|
||||
let rangeParsing,
|
||||
candidateTermSpace,
|
||||
termCandidate;
|
||||
|
||||
this.wildcardable = !this.fuzz && !/^"([^"]|\\")+"$/.test(this.term);
|
||||
|
||||
if (!this.wildcardable && !this.fuzz) {
|
||||
this.term = this.term.substr(1, this.term.length - 2);
|
||||
}
|
||||
|
||||
this.term = this._normalizeTerm();
|
||||
|
||||
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
||||
|
||||
// Default.
|
||||
this.termSpace = 'tags';
|
||||
this.termType = 'literal';
|
||||
|
||||
const matchArr = this.term.split(':');
|
||||
|
||||
if (matchArr.length > 1) {
|
||||
candidateTermSpace = matchArr[0];
|
||||
termCandidate = matchArr.slice(1).join(':');
|
||||
rangeParsing = this.parseRangeField(candidateTermSpace);
|
||||
|
||||
if (rangeParsing) {
|
||||
this.termSpace = rangeParsing[0];
|
||||
this.termType = rangeParsing[2];
|
||||
|
||||
if (this.termType === 'date') {
|
||||
rangeParsing = this.parseDate(termCandidate, rangeParsing[1]);
|
||||
this.term = rangeParsing[0];
|
||||
this.compare = rangeParsing[1];
|
||||
}
|
||||
else {
|
||||
this.term = parseFloat(termCandidate);
|
||||
this.compare = rangeParsing[1];
|
||||
}
|
||||
|
||||
this.wildcardable = false;
|
||||
}
|
||||
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
||||
this.termType = 'literal';
|
||||
this.term = termCandidate;
|
||||
this.termSpace = candidateTermSpace;
|
||||
}
|
||||
else if (candidateTermSpace === 'my') {
|
||||
this.termType = 'my';
|
||||
this.termSpace = termCandidate;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.wildcardable) {
|
||||
// Transforms wildcard match into regular expression.
|
||||
// A custom NFA with caching may be more sophisticated but not
|
||||
// likely to be faster.
|
||||
this.term = new RegExp(
|
||||
`^${
|
||||
this.term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||
.replace(/^(?:\\\\)*\?/g, '.?')
|
||||
}$`, 'i'
|
||||
);
|
||||
}
|
||||
|
||||
// Update parse status flag to indicate the new properties are ready.
|
||||
this.parsed = true;
|
||||
};
|
||||
|
||||
SearchTerm.prototype._normalizeTerm = function() {
|
||||
if (!this.wildcardable) {
|
||||
return this.term.replace('"', '"');
|
||||
}
|
||||
return this.term.replace(/\\([^*?])/g, '$1');
|
||||
};
|
||||
|
||||
SearchTerm.prototype.fuzzyMatch = function(targetStr) {
|
||||
let targetDistance,
|
||||
i,
|
||||
j,
|
||||
// Work vectors, representing the last three populated
|
||||
// rows of the dynamic programming matrix of the iterative
|
||||
// optimal string alignment calculation.
|
||||
v0 = [],
|
||||
v1 = [],
|
||||
v2 = [],
|
||||
temp;
|
||||
|
||||
if (this.fuzz < 1.0) {
|
||||
targetDistance = targetStr.length * (1.0 - this.fuzz);
|
||||
}
|
||||
else {
|
||||
targetDistance = this.fuzz;
|
||||
}
|
||||
|
||||
const targetStrLower = targetStr.toLowerCase();
|
||||
|
||||
for (i = 0; i <= targetStrLower.length; i += 1) {
|
||||
v1.push(i);
|
||||
}
|
||||
|
||||
for (i = 0; i < this.term.length; i += 1) {
|
||||
v2[0] = i;
|
||||
for (j = 0; j < targetStrLower.length; j += 1) {
|
||||
const cost = this.term[i] === targetStrLower[j] ? 0 : 1;
|
||||
v2[j + 1] = Math.min(
|
||||
// Deletion.
|
||||
v1[j + 1] + 1,
|
||||
// Insertion.
|
||||
v2[j] + 1,
|
||||
// Substitution or No Change.
|
||||
v1[j] + cost
|
||||
);
|
||||
if (i > 1 && j > 1 && this.term[i] === targetStrLower[j - 1] &&
|
||||
targetStrLower[i - 1] === targetStrLower[j]) {
|
||||
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
||||
}
|
||||
}
|
||||
// Rotate dem vec pointers bra.
|
||||
temp = v0;
|
||||
v0 = v1;
|
||||
v1 = v2;
|
||||
v2 = temp;
|
||||
}
|
||||
|
||||
return v1[targetStrLower.length] <= targetDistance;
|
||||
};
|
||||
|
||||
SearchTerm.prototype.exactMatch = function(targetStr) {
|
||||
return this.term.toLowerCase() === targetStr.toLowerCase();
|
||||
};
|
||||
|
||||
SearchTerm.prototype.wildcardMatch = function(targetStr) {
|
||||
return this.term.test(targetStr);
|
||||
};
|
||||
|
||||
SearchTerm.prototype.interactionMatch = function(imageID, type, interaction, interactions) {
|
||||
let ret = false;
|
||||
|
||||
interactions.forEach(v => {
|
||||
if (v.image_id === imageID && v.interaction_type === type && (interaction === null || v.value === interaction)) {
|
||||
ret = true;
|
||||
}
|
||||
});
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
SearchTerm.prototype.match = function(target) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
||||
const ohffs = this;
|
||||
let ret = false,
|
||||
compFunc,
|
||||
numbuh,
|
||||
date;
|
||||
|
||||
if (!this.parsed) {
|
||||
this.parse();
|
||||
}
|
||||
|
||||
if (this.termType === 'literal') {
|
||||
// Literal matching.
|
||||
if (this.fuzz) {
|
||||
compFunc = this.fuzzyMatch;
|
||||
}
|
||||
else if (this.wildcardable) {
|
||||
compFunc = this.wildcardMatch;
|
||||
}
|
||||
else {
|
||||
compFunc = this.exactMatch;
|
||||
}
|
||||
|
||||
if (this.termSpace === 'tags') {
|
||||
target.getAttribute('data-image-tag-aliases').split(', ').every(
|
||||
str => {
|
||||
if (compFunc.call(ohffs, str)) {
|
||||
ret = true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
);
|
||||
}
|
||||
else {
|
||||
ret = compFunc.call(
|
||||
this, target.getAttribute(termSpaceToImageField[this.termSpace])
|
||||
);
|
||||
}
|
||||
}
|
||||
else if (this.termType === 'my' && window.booru.interactions.length > 0) {
|
||||
// Should work with most my:conditions except watched.
|
||||
switch (this.termSpace) {
|
||||
case 'faves':
|
||||
ret = this.interactionMatch(target.getAttribute('data-image-id'), 'faved', null, window.booru.interactions);
|
||||
|
||||
break;
|
||||
case 'upvotes':
|
||||
ret = this.interactionMatch(target.getAttribute('data-image-id'), 'voted', 'up', window.booru.interactions);
|
||||
|
||||
break;
|
||||
case 'downvotes':
|
||||
ret = this.interactionMatch(target.getAttribute('data-image-id'), 'voted', 'down', window.booru.interactions);
|
||||
|
||||
break;
|
||||
default:
|
||||
ret = false; // Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (this.termType === 'date') {
|
||||
// Date matching.
|
||||
date = new Date(
|
||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
||||
).getTime();
|
||||
|
||||
switch (this.compare) {
|
||||
// The open-left, closed-right date range specified by the
|
||||
// date/time format limits the types of comparisons that are
|
||||
// done compared to numeric ranges.
|
||||
case 'lt':
|
||||
ret = this.term > date;
|
||||
break;
|
||||
case 'gte':
|
||||
ret = this.term <= date;
|
||||
break;
|
||||
default:
|
||||
ret = this.term[0] <= date && this.term[1] > date;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Range matching.
|
||||
numbuh = parseFloat(
|
||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
||||
);
|
||||
|
||||
if (isNaN(this.term)) {
|
||||
ret = false;
|
||||
}
|
||||
else if (this.fuzz) {
|
||||
ret = this.term <= numbuh + this.fuzz &&
|
||||
this.term + this.fuzz >= numbuh;
|
||||
}
|
||||
else {
|
||||
switch (this.compare) {
|
||||
case 'lt':
|
||||
ret = this.term > numbuh;
|
||||
break;
|
||||
case 'gt':
|
||||
ret = this.term < numbuh;
|
||||
break;
|
||||
case 'lte':
|
||||
ret = this.term >= numbuh;
|
||||
break;
|
||||
case 'gte':
|
||||
ret = this.term <= numbuh;
|
||||
break;
|
||||
default:
|
||||
ret = this.term === numbuh;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
function generateLexArray(searchStr) {
|
||||
const opQueue = [],
|
||||
groupNegate = [],
|
||||
tokenStack = [];
|
||||
let searchTerm = null,
|
||||
boost = null,
|
||||
fuzz = null,
|
||||
lparenCtr = 0,
|
||||
negate = false,
|
||||
boostFuzzStr = '',
|
||||
localSearchStr = searchStr;
|
||||
|
||||
while (localSearchStr.length > 0) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
tokenList.every(tokenArr => {
|
||||
const tokenName = tokenArr[0],
|
||||
tokenRE = tokenArr[1];
|
||||
let match = tokenRE.exec(localSearchStr),
|
||||
op;
|
||||
|
||||
if (match) {
|
||||
match = match[0];
|
||||
|
||||
if (Boolean(searchTerm) && (
|
||||
['and_op', 'or_op'].indexOf(tokenName) !== -1 ||
|
||||
tokenName === 'rparen' && lparenCtr === 0)) {
|
||||
// Set options.
|
||||
searchTerm.boost = boost;
|
||||
searchTerm.fuzz = fuzz;
|
||||
// Push to stack.
|
||||
tokenStack.push(searchTerm);
|
||||
// Reset term and options data.
|
||||
searchTerm = fuzz = boost = null;
|
||||
boostFuzzStr = '';
|
||||
lparenCtr = 0;
|
||||
|
||||
if (negate) {
|
||||
tokenStack.push('not_op');
|
||||
negate = false;
|
||||
}
|
||||
}
|
||||
|
||||
switch (tokenName) {
|
||||
case 'and_op':
|
||||
while (opQueue[0] === 'and_op') {
|
||||
tokenStack.push(opQueue.shift());
|
||||
}
|
||||
opQueue.unshift('and_op');
|
||||
break;
|
||||
case 'or_op':
|
||||
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
||||
tokenStack.push(opQueue.shift());
|
||||
}
|
||||
opQueue.unshift('or_op');
|
||||
break;
|
||||
case 'not_op':
|
||||
if (searchTerm) {
|
||||
// We're already inside a search term, so it does
|
||||
// not apply, obv.
|
||||
searchTerm.append(match);
|
||||
}
|
||||
else {
|
||||
negate = !negate;
|
||||
}
|
||||
break;
|
||||
case 'lparen':
|
||||
if (searchTerm) {
|
||||
// If we are inside the search term, do not error
|
||||
// out just yet; instead, consider it as part of
|
||||
// the search term, as a user convenience.
|
||||
searchTerm.append(match);
|
||||
lparenCtr += 1;
|
||||
}
|
||||
else {
|
||||
opQueue.unshift('lparen');
|
||||
groupNegate.push(negate);
|
||||
negate = false;
|
||||
}
|
||||
break;
|
||||
case 'rparen':
|
||||
if (lparenCtr > 0) {
|
||||
if (searchTerm) {
|
||||
searchTerm.append(match);
|
||||
}
|
||||
else {
|
||||
searchTerm = new SearchTerm(match);
|
||||
}
|
||||
lparenCtr -= 1;
|
||||
}
|
||||
else {
|
||||
while (opQueue.length) {
|
||||
op = opQueue.shift();
|
||||
if (op === 'lparen') {
|
||||
break;
|
||||
}
|
||||
tokenStack.push(op);
|
||||
}
|
||||
if (groupNegate.length > 0 && groupNegate.pop()) {
|
||||
tokenStack.push('not_op');
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'fuzz':
|
||||
if (searchTerm) {
|
||||
// For this and boost operations, we store the
|
||||
// current match so far to a temporary string in
|
||||
// case this is actually inside the term.
|
||||
fuzz = parseFloat(match.substr(1));
|
||||
boostFuzzStr += match;
|
||||
}
|
||||
else {
|
||||
searchTerm = new SearchTerm(match);
|
||||
}
|
||||
break;
|
||||
case 'boost':
|
||||
if (searchTerm) {
|
||||
boost = match.substr(1);
|
||||
boostFuzzStr += match;
|
||||
}
|
||||
else {
|
||||
searchTerm = new SearchTerm(match);
|
||||
}
|
||||
break;
|
||||
case 'quoted_lit':
|
||||
if (searchTerm) {
|
||||
searchTerm.append(match);
|
||||
}
|
||||
else {
|
||||
searchTerm = new SearchTerm(match);
|
||||
}
|
||||
break;
|
||||
case 'word':
|
||||
if (searchTerm) {
|
||||
if (fuzz || boost) {
|
||||
boost = fuzz = null;
|
||||
searchTerm.append(boostFuzzStr);
|
||||
boostFuzzStr = '';
|
||||
}
|
||||
searchTerm.append(match);
|
||||
}
|
||||
else {
|
||||
searchTerm = new SearchTerm(match);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// Append extra spaces within search terms.
|
||||
if (searchTerm) {
|
||||
searchTerm.append(match);
|
||||
}
|
||||
}
|
||||
|
||||
// Truncate string and restart the token tests.
|
||||
localSearchStr = localSearchStr.substr(
|
||||
match.length, localSearchStr.length - match.length
|
||||
);
|
||||
|
||||
// Break since we have found a match.
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
// Append final tokens to the stack, starting with the search term.
|
||||
if (searchTerm) {
|
||||
searchTerm.boost = boost;
|
||||
searchTerm.fuzz = fuzz;
|
||||
tokenStack.push(searchTerm);
|
||||
}
|
||||
if (negate) {
|
||||
tokenStack.push('not_op');
|
||||
}
|
||||
|
||||
if (opQueue.indexOf('rparen') !== -1 ||
|
||||
opQueue.indexOf('lparen') !== -1) {
|
||||
throw new Error('Mismatched parentheses.');
|
||||
}
|
||||
|
||||
// Memory-efficient concatenation of remaining operators queue to the
|
||||
// token stack.
|
||||
tokenStack.push.apply(tokenStack, opQueue);
|
||||
|
||||
return tokenStack;
|
||||
}
|
||||
|
||||
function parseTokens(lexicalArray) {
|
||||
const operandStack = [];
|
||||
let negate, op1, op2;
|
||||
lexicalArray.forEach((token, i) => {
|
||||
if (token !== 'not_op') {
|
||||
negate = lexicalArray[i + 1] === 'not_op';
|
||||
|
||||
if (typeof token === 'string') {
|
||||
op2 = operandStack.pop();
|
||||
op1 = operandStack.pop();
|
||||
|
||||
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
||||
throw new Error('Missing operand.');
|
||||
}
|
||||
|
||||
operandStack.push(new SearchAST(token, negate, op1, op2));
|
||||
}
|
||||
else {
|
||||
if (negate) {
|
||||
operandStack.push(new SearchAST(null, true, token));
|
||||
}
|
||||
else {
|
||||
operandStack.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (operandStack.length > 1) {
|
||||
throw new Error('Missing operator.');
|
||||
}
|
||||
|
||||
op1 = operandStack.pop();
|
||||
|
||||
if (typeof op1 === 'undefined') {
|
||||
return new SearchAST();
|
||||
}
|
||||
|
||||
if (isTerminal(op1)) {
|
||||
return new SearchAST(null, false, op1);
|
||||
}
|
||||
|
||||
return op1;
|
||||
}
|
||||
|
||||
function parseSearch(searchStr) {
|
||||
return parseTokens(generateLexArray(searchStr));
|
||||
}
|
||||
|
||||
function isTerminal(operand) {
|
||||
// Whether operand is a terminal SearchTerm.
|
||||
return typeof operand.term !== 'undefined';
|
||||
}
|
||||
|
||||
function SearchAST(op, negate, leftOperand, rightOperand) {
|
||||
this.negate = Boolean(negate);
|
||||
this.leftOperand = leftOperand || null;
|
||||
this.op = op || null;
|
||||
this.rightOperand = rightOperand || null;
|
||||
}
|
||||
|
||||
function combineOperands(ast1, ast2, parentAST) {
|
||||
let localAst1;
|
||||
if (parentAST.op === 'and_op') {
|
||||
localAst1 = ast1 && ast2;
|
||||
}
|
||||
else {
|
||||
localAst1 = ast1 || ast2;
|
||||
}
|
||||
|
||||
if (parentAST.negate) {
|
||||
return !localAst1;
|
||||
}
|
||||
|
||||
return localAst1;
|
||||
}
|
||||
|
||||
// Evaluation of the AST in regard to a target image
|
||||
SearchAST.prototype.hitsImage = function(image) {
|
||||
const treeStack = [];
|
||||
// Left side node.
|
||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
||||
let ast1 = this,
|
||||
// Right side node.
|
||||
ast2,
|
||||
// Parent node of the current subtree.
|
||||
parentAST;
|
||||
|
||||
// Build the initial tree node traversal stack, of the "far left" side.
|
||||
// The general idea is to accumulate from the bottom and make stacks
|
||||
// of right-hand subtrees that themselves accumulate upward. The left
|
||||
// side node, ast1, will always be a Boolean representing the left-side
|
||||
// evaluated value, up to the current subtree (parentAST).
|
||||
while (!isTerminal(ast1)) {
|
||||
treeStack.push(ast1);
|
||||
ast1 = ast1.leftOperand;
|
||||
|
||||
if (!ast1) {
|
||||
// Empty tree.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
ast1 = ast1.match(image);
|
||||
treeStack.push(ast1);
|
||||
|
||||
while (treeStack.length > 0) {
|
||||
parentAST = treeStack.pop();
|
||||
|
||||
if (parentAST === null) {
|
||||
// We are at the end of a virtual stack for a right node
|
||||
// subtree. We switch the result of this stack from left
|
||||
// (ast1) to right (ast2), pop the original left node,
|
||||
// and finally pop the parent subtree itself. See near the
|
||||
// end of this function to view how this is populated.
|
||||
ast2 = ast1;
|
||||
ast1 = treeStack.pop();
|
||||
parentAST = treeStack.pop();
|
||||
}
|
||||
else {
|
||||
// First, check to see if we can do a short-circuit
|
||||
// evaluation to skip evaluating the right side entirely.
|
||||
if (!ast1 && parentAST.op === 'and_op') {
|
||||
ast1 = parentAST.negate;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ast1 && parentAST.op === 'or_op') {
|
||||
ast1 = !parentAST.negate;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we are not at the end of a stack, grab the right
|
||||
// node. The left node (ast1) is currently a terminal Boolean.
|
||||
ast2 = parentAST.rightOperand;
|
||||
}
|
||||
|
||||
if (typeof ast2 === 'boolean') {
|
||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
||||
}
|
||||
else if (!ast2) {
|
||||
// A subtree with a single node. This is generally the case
|
||||
// for negated tokens.
|
||||
if (parentAST.negate) {
|
||||
ast1 = !ast1;
|
||||
}
|
||||
}
|
||||
else if (isTerminal(ast2)) {
|
||||
// We are finally at a leaf and can evaluate.
|
||||
ast2 = ast2.match(image);
|
||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
||||
}
|
||||
else {
|
||||
// We are at a node whose right side is a new subtree.
|
||||
// We will build a new "virtual" stack, but instead of
|
||||
// building a new Array, we can insert a null object as a
|
||||
// marker.
|
||||
treeStack.push(parentAST, ast1, null);
|
||||
|
||||
do {
|
||||
treeStack.push(ast2);
|
||||
ast2 = ast2.leftOperand;
|
||||
} while (!isTerminal(ast2));
|
||||
|
||||
ast1 = ast2.match(image);
|
||||
}
|
||||
}
|
||||
|
||||
return ast1;
|
||||
};
|
||||
|
||||
SearchAST.prototype.dumpTree = function() {
|
||||
// Dumps to string a simple diagram of the syntax tree structure
|
||||
// (starting with this object as the root) for debugging purposes.
|
||||
const retStrArr = [],
|
||||
treeQueue = [['', this]];
|
||||
let treeArr,
|
||||
prefix,
|
||||
tree;
|
||||
|
||||
while (treeQueue.length > 0) {
|
||||
treeArr = treeQueue.shift();
|
||||
prefix = treeArr[0];
|
||||
tree = treeArr[1];
|
||||
|
||||
if (isTerminal(tree)) {
|
||||
retStrArr.push(`${prefix}-> ${tree.term}`);
|
||||
}
|
||||
else {
|
||||
if (tree.negate) {
|
||||
retStrArr.push(`${prefix}+ NOT_OP`);
|
||||
prefix += '\t';
|
||||
}
|
||||
if (tree.op) {
|
||||
retStrArr.push(`${prefix}+ ${tree.op.toUpperCase()}`);
|
||||
prefix += '\t';
|
||||
treeQueue.unshift([prefix, tree.rightOperand]);
|
||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
||||
}
|
||||
else {
|
||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retStrArr.join('\n');
|
||||
};
|
||||
|
||||
export default parseSearch;
|
15
assets/js/match_query.ts
Normal file
15
assets/js/match_query.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { defaultMatcher } from './query/matcher';
|
||||
import { generateLexArray } from './query/lex';
|
||||
import { parseTokens } from './query/parse';
|
||||
import { getAstMatcherForTerm } from './query/term';
|
||||
|
||||
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||
}
|
||||
|
||||
function parseSearch(query: string) {
|
||||
const tokens = generateLexArray(query, parseWithDefaultMatcher);
|
||||
return parseTokens(tokens);
|
||||
}
|
||||
|
||||
export default parseSearch;
|
|
@ -11,8 +11,7 @@ function formResult({target, detail}) {
|
|||
|
||||
const elements = {
|
||||
'#description-form': '.image-description',
|
||||
'#uploader-form': '.image_uploader',
|
||||
'#source-form': '#image-source'
|
||||
'#uploader-form': '.image_uploader'
|
||||
};
|
||||
|
||||
function showResult(resultEl, formEl, response) {
|
||||
|
|
|
@ -17,7 +17,7 @@ function makeRequest(verb) {
|
|||
function bindSubscriptionLinks() {
|
||||
delegate(document, 'fetchcomplete', {
|
||||
'.js-subscription-link': event => {
|
||||
const target = $('#js-subscription-target');
|
||||
const target = event.target.closest('.js-subscription-target');
|
||||
event.detail.text().then(text => {
|
||||
target.outerHTML = text;
|
||||
});
|
||||
|
|
106
assets/js/query/__tests__/date.spec.ts
Normal file
106
assets/js/query/__tests__/date.spec.ts
Normal file
|
@ -0,0 +1,106 @@
|
|||
import { makeDateMatcher } from '../date';
|
||||
|
||||
function daysAgo(days: number) {
|
||||
return new Date(Date.now() - days * 86400000).toISOString();
|
||||
}
|
||||
|
||||
describe('Date parsing', () => {
|
||||
it('should match relative dates (upper bound)', () => {
|
||||
const matcher = makeDateMatcher('3 days ago', 'lte');
|
||||
|
||||
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match relative dates (lower bound)', () => {
|
||||
const matcher = makeDateMatcher('3 days ago', 'gte');
|
||||
|
||||
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(true);
|
||||
});
|
||||
|
||||
it('should match absolute date ranges', () => {
|
||||
const ltMatcher = makeDateMatcher('2025', 'lt');
|
||||
const gtMatcher = makeDateMatcher('2023', 'gt');
|
||||
|
||||
expect(ltMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(ltMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(ltMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
|
||||
expect(gtMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(gtMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(gtMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through years', () => {
|
||||
const matcher = makeDateMatcher('2024', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through months', () => {
|
||||
const matcher = makeDateMatcher('2024-06', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 6, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 4, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through days', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through hours', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21T06', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 5)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through minutes', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21T06:21', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through seconds', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21T06:21:30Z', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through seconds with positive timezone offset', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21T06:21:30+01:30', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match absolute dates through seconds with negative timezone offset', () => {
|
||||
const matcher = makeDateMatcher('2024-06-21T06:21:30-01:30', 'eq');
|
||||
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not match malformed absolute date expressions', () => {
|
||||
expect(() => makeDateMatcher('2024-06-21T06:21:30+01:3020', 'eq')).toThrow('Cannot parse date string: 2024-06-21T06:21:30+01:3020');
|
||||
});
|
||||
|
||||
it('should not match malformed relative date expressions', () => {
|
||||
expect(() => makeDateMatcher('3 test failures ago', 'eq')).toThrow('Cannot parse date string: 3 test failures ago');
|
||||
});
|
||||
});
|
177
assets/js/query/__tests__/lex.spec.ts
Normal file
177
assets/js/query/__tests__/lex.spec.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import { generateLexArray } from '../lex';
|
||||
import { AstMatcher } from '../types';
|
||||
|
||||
describe('Lexical analysis', () => {
|
||||
let terms: string[];
|
||||
let fuzzes: number[];
|
||||
let boosts: number[];
|
||||
|
||||
function noMatch() {
|
||||
return false;
|
||||
}
|
||||
|
||||
function parseTerm(term: string, fuzz: number, boost: number): AstMatcher {
|
||||
terms.push(term);
|
||||
fuzzes.push(fuzz);
|
||||
boosts.push(boost);
|
||||
|
||||
return noMatch;
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
terms = [];
|
||||
fuzzes = [];
|
||||
boosts = [];
|
||||
});
|
||||
|
||||
it('should lex single terms', () => {
|
||||
const array = generateLexArray('safe', parseTerm);
|
||||
expect(terms).toEqual(['safe']);
|
||||
expect(fuzzes).toEqual([0]);
|
||||
expect(boosts).toEqual([1]);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should lex single terms with fuzzing', () => {
|
||||
const array = generateLexArray('safe~4', parseTerm);
|
||||
expect(terms).toEqual(['safe']);
|
||||
expect(fuzzes).toEqual([4]);
|
||||
expect(boosts).toEqual([1]);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should lex single terms with boosting', () => {
|
||||
const array = generateLexArray('safe^2', parseTerm);
|
||||
expect(terms).toEqual(['safe']);
|
||||
expect(fuzzes).toEqual([0]);
|
||||
expect(boosts).toEqual([2]);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should lex quoted single terms', () => {
|
||||
const array = generateLexArray('"safe"', parseTerm);
|
||||
expect(terms).toEqual(['"safe"']);
|
||||
expect(fuzzes).toEqual([0]);
|
||||
expect(boosts).toEqual([1]);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should lex multiple terms connected by AND', () => {
|
||||
const array = generateLexArray('safe AND solo', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo']);
|
||||
expect(fuzzes).toEqual([0, 0]);
|
||||
expect(boosts).toEqual([1, 1]);
|
||||
expect(array).toEqual([noMatch, noMatch, 'and_op']);
|
||||
});
|
||||
|
||||
it('should lex multiple terms connected by OR', () => {
|
||||
const array = generateLexArray('safe OR solo', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo']);
|
||||
expect(fuzzes).toEqual([0, 0]);
|
||||
expect(boosts).toEqual([1, 1]);
|
||||
expect(array).toEqual([noMatch, noMatch, 'or_op']);
|
||||
});
|
||||
|
||||
it('should prioritize AND over OR', () => {
|
||||
const array = generateLexArray('safe OR solo AND fluttershy', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||
expect(array).toEqual([noMatch, noMatch, noMatch, 'and_op', 'or_op']);
|
||||
});
|
||||
|
||||
it('should override ordering when using parenthetical expressions', () => {
|
||||
const array = generateLexArray('(safe OR solo) AND fluttershy', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||
expect(fuzzes).toEqual([0, 0, 0]);
|
||||
expect(boosts).toEqual([1, 1, 1]);
|
||||
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'and_op']);
|
||||
});
|
||||
|
||||
it('should lex unary NOT', () => {
|
||||
const array = generateLexArray('NOT safe', parseTerm);
|
||||
expect(terms).toEqual(['safe']);
|
||||
expect(array).toEqual([noMatch, 'not_op']);
|
||||
});
|
||||
|
||||
it('should prioritize NOT over AND', () => {
|
||||
const array = generateLexArray('NOT safe AND solo', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo']);
|
||||
expect(array).toEqual([noMatch, 'not_op', noMatch, 'and_op']);
|
||||
});
|
||||
|
||||
it('should prioritize NOT over OR', () => {
|
||||
const array = generateLexArray('NOT safe OR solo', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo']);
|
||||
expect(array).toEqual([noMatch, 'not_op', noMatch, 'or_op']);
|
||||
});
|
||||
|
||||
it('should allow group negation', () => {
|
||||
const array = generateLexArray('NOT (safe OR solo)', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo']);
|
||||
expect(array).toEqual([noMatch, noMatch, 'or_op', 'not_op']);
|
||||
});
|
||||
|
||||
it('should allow NOT expressions inside terms', () => {
|
||||
const array = generateLexArray('this NOT that', parseTerm);
|
||||
expect(terms).toEqual(['this NOT that']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should allow parenthetical expressions inside terms', () => {
|
||||
const array = generateLexArray('rose (flower)', parseTerm);
|
||||
expect(terms).toEqual(['rose (flower)']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should handle fuzz expressions in place of terms', () => {
|
||||
const array = generateLexArray('~2', parseTerm);
|
||||
expect(terms).toEqual(['~2']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should handle boost expressions in place of terms', () => {
|
||||
const array = generateLexArray('^2', parseTerm);
|
||||
expect(terms).toEqual(['^2']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should handle fuzz expressions in terms', () => {
|
||||
const array = generateLexArray('two~2~two', parseTerm);
|
||||
expect(terms).toEqual(['two~2~two']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should handle boost expressions in terms', () => {
|
||||
const array = generateLexArray('two^2^two', parseTerm);
|
||||
expect(terms).toEqual(['two^2^two']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should handle quotes in terms', () => {
|
||||
const array = generateLexArray('a "quoted" expression', parseTerm);
|
||||
expect(terms).toEqual(['a "quoted" expression']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should allow extra spaces in terms', () => {
|
||||
const array = generateLexArray('twilight sparkle', parseTerm);
|
||||
expect(terms).toEqual(['twilight sparkle']);
|
||||
expect(array).toEqual([noMatch]);
|
||||
});
|
||||
|
||||
it('should collapse consecutive AND expressions', () => {
|
||||
const array = generateLexArray('safe AND solo AND fluttershy AND applejack', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||
expect(array).toEqual([noMatch, noMatch, 'and_op', noMatch, 'and_op', noMatch, 'and_op']);
|
||||
});
|
||||
|
||||
it('should collapse consecutive OR expressions', () => {
|
||||
const array = generateLexArray('safe OR solo OR fluttershy OR applejack', parseTerm);
|
||||
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'or_op', noMatch, 'or_op']);
|
||||
});
|
||||
|
||||
it('should throw exception on mismatched parentheses', () => {
|
||||
expect(() => generateLexArray('(safe OR solo AND fluttershy', parseTerm)).toThrow('Mismatched parentheses.');
|
||||
// expect(() => generateLexArray(')bad', parseTerm)).toThrow('Mismatched parentheses.');
|
||||
});
|
||||
});
|
36
assets/js/query/__tests__/literal.spec.ts
Normal file
36
assets/js/query/__tests__/literal.spec.ts
Normal file
|
@ -0,0 +1,36 @@
|
|||
import { makeLiteralMatcher } from '../literal';
|
||||
|
||||
describe('Literal field parsing', () => {
|
||||
it('should handle exact matching in arrayed fields', () => {
|
||||
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||
expect(matcher('safe, solo', 'tags', 0)).toBe(true);
|
||||
expect(matcher('solo', 'tags', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle exact matching in non-arrayed fields', () => {
|
||||
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||
expect(matcher('safe, solo', 'description', 0)).toBe(false);
|
||||
expect(matcher('safe', 'description', 0)).toBe(true);
|
||||
expect(matcher('solo', 'description', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle fuzzy matching based on normalized edit distance', () => {
|
||||
const matcher = makeLiteralMatcher('fluttersho', 0.8, false);
|
||||
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle fuzzy matching based on raw edit distance', () => {
|
||||
const matcher = makeLiteralMatcher('fluttersho', 1, false);
|
||||
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle wildcard matching', () => {
|
||||
const matcher = makeLiteralMatcher('fl?tter*', 0, true);
|
||||
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||
expect(matcher('flitter', 'tags', 0)).toBe(true);
|
||||
expect(matcher('rainbow dash', 'tags', 0)).toBe(false);
|
||||
expect(matcher('gentle flutter', 'tags', 0)).toBe(false);
|
||||
});
|
||||
});
|
53
assets/js/query/__tests__/number.spec.ts
Normal file
53
assets/js/query/__tests__/number.spec.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { makeNumberMatcher } from '../number';
|
||||
|
||||
describe('Number parsing', () => {
|
||||
it('should match numbers directly', () => {
|
||||
const intMatch = makeNumberMatcher(2067, 0, 'eq');
|
||||
|
||||
expect(intMatch('2066', 'value', 0)).toBe(false);
|
||||
expect(intMatch('2067', 'value', 0)).toBe(true);
|
||||
expect(intMatch('2068', 'value', 0)).toBe(false);
|
||||
expect(intMatch('20677', 'value', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match number ranges', () => {
|
||||
const ltMatch = makeNumberMatcher(2067, 0, 'lt');
|
||||
const lteMatch = makeNumberMatcher(2067, 0, 'lte');
|
||||
const gtMatch = makeNumberMatcher(2067, 0, 'gt');
|
||||
const gteMatch = makeNumberMatcher(2067, 0, 'gte');
|
||||
|
||||
expect(ltMatch('2066', 'value', 0)).toBe(true);
|
||||
expect(ltMatch('2067', 'value', 0)).toBe(false);
|
||||
expect(ltMatch('2068', 'value', 0)).toBe(false);
|
||||
expect(lteMatch('2066', 'value', 0)).toBe(true);
|
||||
expect(lteMatch('2067', 'value', 0)).toBe(true);
|
||||
expect(lteMatch('2068', 'value', 0)).toBe(false);
|
||||
expect(gtMatch('2066', 'value', 0)).toBe(false);
|
||||
expect(gtMatch('2067', 'value', 0)).toBe(false);
|
||||
expect(gtMatch('2068', 'value', 0)).toBe(true);
|
||||
expect(gteMatch('2066', 'value', 0)).toBe(false);
|
||||
expect(gteMatch('2067', 'value', 0)).toBe(true);
|
||||
expect(gteMatch('2068', 'value', 0)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not match unparsed values', () => {
|
||||
const matcher = makeNumberMatcher(2067, 0, 'eq');
|
||||
|
||||
expect(matcher('NaN', 'value', 0)).toBe(false);
|
||||
expect(matcher('test', 'value', 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('should interpret fuzz as an inclusive range around the value', () => {
|
||||
const matcher = makeNumberMatcher(2067, 3, 'eq');
|
||||
|
||||
expect(matcher('2063', 'value', 0)).toBe(false);
|
||||
expect(matcher('2064', 'value', 0)).toBe(true);
|
||||
expect(matcher('2065', 'value', 0)).toBe(true);
|
||||
expect(matcher('2066', 'value', 0)).toBe(true);
|
||||
expect(matcher('2067', 'value', 0)).toBe(true);
|
||||
expect(matcher('2068', 'value', 0)).toBe(true);
|
||||
expect(matcher('2069', 'value', 0)).toBe(true);
|
||||
expect(matcher('2070', 'value', 0)).toBe(true);
|
||||
expect(matcher('2071', 'value', 0)).toBe(false);
|
||||
});
|
||||
});
|
84
assets/js/query/__tests__/parse.spec.ts
Normal file
84
assets/js/query/__tests__/parse.spec.ts
Normal file
|
@ -0,0 +1,84 @@
|
|||
import { defaultMatcher } from '../matcher';
|
||||
import { termSpaceToImageField } from '../fields';
|
||||
import { generateLexArray } from '../lex';
|
||||
import { getAstMatcherForTerm } from '../term';
|
||||
import { parseTokens } from '../parse';
|
||||
|
||||
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||
}
|
||||
|
||||
describe('Semantic analysis', () => {
|
||||
let documents: HTMLElement[];
|
||||
|
||||
beforeAll(() => {
|
||||
const e0 = document.createElement('div');
|
||||
e0.setAttribute(termSpaceToImageField.id, '0');
|
||||
e0.setAttribute(termSpaceToImageField.tags, 'safe, solo, fluttershy');
|
||||
|
||||
const e1 = document.createElement('div');
|
||||
e1.setAttribute(termSpaceToImageField.id, '1');
|
||||
e1.setAttribute(termSpaceToImageField.tags, 'suggestive, solo, fluttershy');
|
||||
|
||||
const e2 = document.createElement('div');
|
||||
e2.setAttribute(termSpaceToImageField.id, '2');
|
||||
e2.setAttribute(termSpaceToImageField.tags, 'suggestive, fluttershy, twilight sparkle');
|
||||
|
||||
documents = [e0, e1, e2];
|
||||
});
|
||||
|
||||
it('should match single term expressions', () => {
|
||||
const tokens = generateLexArray('fluttershy', parseWithDefaultMatcher);
|
||||
const matcher = parseTokens(tokens);
|
||||
|
||||
expect(matcher(documents[0])).toBe(true);
|
||||
expect(matcher(documents[1])).toBe(true);
|
||||
expect(matcher(documents[2])).toBe(true);
|
||||
});
|
||||
|
||||
it('should match AND expressions', () => {
|
||||
const tokens = generateLexArray('fluttershy,solo', parseWithDefaultMatcher);
|
||||
const matcher = parseTokens(tokens);
|
||||
|
||||
expect(matcher(documents[0])).toBe(true);
|
||||
expect(matcher(documents[1])).toBe(true);
|
||||
expect(matcher(documents[2])).toBe(false);
|
||||
});
|
||||
|
||||
it('should match OR expressions', () => {
|
||||
const tokens = generateLexArray('suggestive || twilight sparkle', parseWithDefaultMatcher);
|
||||
const matcher = parseTokens(tokens);
|
||||
|
||||
expect(matcher(documents[0])).toBe(false);
|
||||
expect(matcher(documents[1])).toBe(true);
|
||||
expect(matcher(documents[2])).toBe(true);
|
||||
});
|
||||
|
||||
it('should match NOT expressions', () => {
|
||||
const tokens = generateLexArray('NOT twilight sparkle', parseWithDefaultMatcher);
|
||||
const matcher = parseTokens(tokens);
|
||||
|
||||
expect(matcher(documents[0])).toBe(true);
|
||||
expect(matcher(documents[1])).toBe(true);
|
||||
expect(matcher(documents[2])).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow empty expressions', () => {
|
||||
const tokens = generateLexArray('', parseWithDefaultMatcher);
|
||||
const matcher = parseTokens(tokens);
|
||||
|
||||
expect(matcher(documents[0])).toBe(false);
|
||||
expect(matcher(documents[1])).toBe(false);
|
||||
expect(matcher(documents[2])).toBe(false);
|
||||
});
|
||||
|
||||
it('should throw on unpaired AND', () => {
|
||||
const tokens = generateLexArray(' AND ', parseWithDefaultMatcher);
|
||||
expect(() => parseTokens(tokens)).toThrow('Missing operand.');
|
||||
});
|
||||
|
||||
it('should throw on unjoined parenthetical', () => {
|
||||
const tokens = generateLexArray('(safe) solo', parseWithDefaultMatcher);
|
||||
expect(() => parseTokens(tokens)).toThrow('Missing operator.');
|
||||
});
|
||||
});
|
131
assets/js/query/__tests__/term.spec.ts
Normal file
131
assets/js/query/__tests__/term.spec.ts
Normal file
|
@ -0,0 +1,131 @@
|
|||
import { getAstMatcherForTerm } from '../term';
|
||||
import { MatcherFactory, defaultMatcher } from '../matcher';
|
||||
import { termSpaceToImageField } from '../fields';
|
||||
|
||||
function noMatch() {
|
||||
return false;
|
||||
}
|
||||
|
||||
class TestMatcherFactory implements MatcherFactory {
|
||||
public dateVals: string[];
|
||||
public literalVals: string[];
|
||||
public numberVals: number[];
|
||||
public userVals: string[];
|
||||
|
||||
constructor() {
|
||||
this.dateVals = [];
|
||||
this.literalVals = [];
|
||||
this.numberVals = [];
|
||||
this.userVals = [];
|
||||
}
|
||||
|
||||
makeDateMatcher(term: string) {
|
||||
this.dateVals.push(term);
|
||||
return noMatch;
|
||||
}
|
||||
|
||||
makeLiteralMatcher(term: string) {
|
||||
this.literalVals.push(term);
|
||||
return noMatch;
|
||||
}
|
||||
|
||||
makeNumberMatcher(term: number) {
|
||||
this.numberVals.push(term);
|
||||
return noMatch;
|
||||
}
|
||||
|
||||
makeUserMatcher(term: string) {
|
||||
this.userVals.push(term);
|
||||
return noMatch;
|
||||
}
|
||||
}
|
||||
|
||||
describe('Search terms', () => {
|
||||
let factory: TestMatcherFactory;
|
||||
|
||||
beforeEach(() => {
|
||||
factory = new TestMatcherFactory();
|
||||
});
|
||||
|
||||
it('should parse the default field', () => {
|
||||
getAstMatcherForTerm('default', 0, factory);
|
||||
expect(factory.literalVals).toEqual(['default']);
|
||||
});
|
||||
|
||||
it('should parse the default field with wildcarding', () => {
|
||||
getAstMatcherForTerm('def?ul*', 0, factory);
|
||||
expect(factory.literalVals).toEqual(['def?ul*']);
|
||||
});
|
||||
|
||||
it('should parse the default field with fuzzing', () => {
|
||||
getAstMatcherForTerm('default', 1, factory);
|
||||
expect(factory.literalVals).toEqual(['default']);
|
||||
});
|
||||
|
||||
it('should parse the default field within quotes', () => {
|
||||
getAstMatcherForTerm('"default"', 0, factory);
|
||||
expect(factory.literalVals).toEqual(['default']);
|
||||
});
|
||||
|
||||
it('should parse exact date field values', () => {
|
||||
getAstMatcherForTerm('created_at:2024', 0, factory);
|
||||
expect(factory.dateVals).toEqual(['2024']);
|
||||
});
|
||||
|
||||
it('should parse ranged date field values', () => {
|
||||
getAstMatcherForTerm('created_at.lte:2024', 0, factory);
|
||||
getAstMatcherForTerm('created_at.lt:2024', 0, factory);
|
||||
getAstMatcherForTerm('created_at.gte:2024', 0, factory);
|
||||
getAstMatcherForTerm('created_at.gt:2024', 0, factory);
|
||||
expect(factory.dateVals).toEqual(['2024', '2024', '2024', '2024']);
|
||||
});
|
||||
|
||||
it('should parse exact number field values', () => {
|
||||
getAstMatcherForTerm('width:1920', 0, factory);
|
||||
expect(factory.numberVals).toEqual([1920]);
|
||||
});
|
||||
|
||||
it('should parse ranged number field values', () => {
|
||||
getAstMatcherForTerm('width.lte:1920', 0, factory);
|
||||
getAstMatcherForTerm('width.lt:1920', 0, factory);
|
||||
getAstMatcherForTerm('width.gte:1920', 0, factory);
|
||||
getAstMatcherForTerm('width.gt:1920', 0, factory);
|
||||
expect(factory.numberVals).toEqual([1920, 1920, 1920, 1920]);
|
||||
});
|
||||
|
||||
it('should parse literal field values', () => {
|
||||
getAstMatcherForTerm('source_url:*twitter*', 0, factory);
|
||||
expect(factory.literalVals).toEqual(['*twitter*']);
|
||||
});
|
||||
|
||||
it('should parse user field values', () => {
|
||||
getAstMatcherForTerm('my:upvotes', 0, factory);
|
||||
getAstMatcherForTerm('my:downvotes', 0, factory);
|
||||
getAstMatcherForTerm('my:faves', 0, factory);
|
||||
expect(factory.userVals).toEqual(['upvotes', 'downvotes', 'faves']);
|
||||
});
|
||||
|
||||
it('should match document with proper field values', () => {
|
||||
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||
|
||||
const idAttribute = termSpaceToImageField.id;
|
||||
const sourceUrlAttribute = termSpaceToImageField.source_url;
|
||||
|
||||
const properElement = document.createElement('div');
|
||||
properElement.setAttribute(idAttribute, '0');
|
||||
properElement.setAttribute(sourceUrlAttribute, 'twitter.com');
|
||||
|
||||
expect(idMatcher(properElement)).toBe(true);
|
||||
expect(sourceMatcher(properElement)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not match document without field values', () => {
|
||||
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||
const improperElement = document.createElement('div');
|
||||
|
||||
expect(idMatcher(improperElement)).toBe(false);
|
||||
expect(sourceMatcher(improperElement)).toBe(false);
|
||||
});
|
||||
});
|
50
assets/js/query/__tests__/user.spec.ts
Normal file
50
assets/js/query/__tests__/user.spec.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { makeUserMatcher } from '../user';
|
||||
|
||||
describe('User field parsing', () => {
|
||||
beforeEach(() => {
|
||||
/* eslint-disable camelcase */
|
||||
window.booru.interactions = [
|
||||
{image_id: 0, user_id: 0, interaction_type: 'faved', value: null},
|
||||
{image_id: 0, user_id: 0, interaction_type: 'voted', value: 'up'},
|
||||
{image_id: 1, user_id: 0, interaction_type: 'voted', value: 'down'},
|
||||
{image_id: 2, user_id: 0, interaction_type: 'hidden', value: null},
|
||||
];
|
||||
/* eslint-enable camelcase */
|
||||
});
|
||||
|
||||
it('should parse my:faves', () => {
|
||||
const matcher = makeUserMatcher('faves');
|
||||
|
||||
expect(matcher('', 'my', 0)).toBe(true);
|
||||
expect(matcher('', 'my', 1)).toBe(false);
|
||||
expect(matcher('', 'my', 2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse my:upvotes', () => {
|
||||
const matcher = makeUserMatcher('upvotes');
|
||||
|
||||
expect(matcher('', 'my', 0)).toBe(true);
|
||||
expect(matcher('', 'my', 1)).toBe(false);
|
||||
expect(matcher('', 'my', 2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse my:downvotes', () => {
|
||||
const matcher = makeUserMatcher('downvotes');
|
||||
|
||||
expect(matcher('', 'my', 0)).toBe(false);
|
||||
expect(matcher('', 'my', 1)).toBe(true);
|
||||
expect(matcher('', 'my', 2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not parse other my: fields', () => {
|
||||
const hiddenMatcher = makeUserMatcher('hidden');
|
||||
const watchedMatcher = makeUserMatcher('watched');
|
||||
|
||||
expect(hiddenMatcher('', 'my', 0)).toBe(false);
|
||||
expect(hiddenMatcher('', 'my', 1)).toBe(false);
|
||||
expect(hiddenMatcher('', 'my', 2)).toBe(false);
|
||||
expect(watchedMatcher('', 'my', 0)).toBe(false);
|
||||
expect(watchedMatcher('', 'my', 1)).toBe(false);
|
||||
expect(watchedMatcher('', 'my', 2)).toBe(false);
|
||||
});
|
||||
});
|
17
assets/js/query/boolean.ts
Normal file
17
assets/js/query/boolean.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
import { AstMatcher } from './types';
|
||||
|
||||
export function matchAny(...matchers: AstMatcher[]): AstMatcher {
|
||||
return (e: HTMLElement) => matchers.some(matcher => matcher(e));
|
||||
}
|
||||
|
||||
export function matchAll(...matchers: AstMatcher[]): AstMatcher {
|
||||
return (e: HTMLElement) => matchers.every(matcher => matcher(e));
|
||||
}
|
||||
|
||||
export function matchNot(matcher: AstMatcher): AstMatcher {
|
||||
return (e: HTMLElement) => !matcher(e);
|
||||
}
|
||||
|
||||
export function matchNone(): AstMatcher {
|
||||
return () => false;
|
||||
}
|
140
assets/js/query/date.ts
Normal file
140
assets/js/query/date.ts
Normal file
|
@ -0,0 +1,140 @@
|
|||
import { assertNotNull } from '../utils/assert';
|
||||
import { FieldMatcher, ParseError, RangeEqualQualifier } from './types';
|
||||
|
||||
type Year = number;
|
||||
type Month = number;
|
||||
type Day = number;
|
||||
type Hours = number;
|
||||
type Minutes = number;
|
||||
type Seconds = number;
|
||||
type AbsoluteDate = [Year, Month, Day, Hours, Minutes, Seconds];
|
||||
type TimeZoneOffset = [Hours, Minutes];
|
||||
type PosixTimeMs = number;
|
||||
|
||||
function makeMatcher(bottomDate: PosixTimeMs, topDate: PosixTimeMs, qual: RangeEqualQualifier): FieldMatcher {
|
||||
// The open-left, closed-right date range specified by the
|
||||
// date/time format limits the types of comparisons that are
|
||||
// done compared to numeric ranges.
|
||||
switch (qual) {
|
||||
case 'lte':
|
||||
return v => new Date(v).getTime() < topDate;
|
||||
case 'gte':
|
||||
return v => new Date(v).getTime() >= bottomDate;
|
||||
case 'lt':
|
||||
return v => new Date(v).getTime() < bottomDate;
|
||||
case 'gt':
|
||||
return v => new Date(v).getTime() >= topDate;
|
||||
case 'eq':
|
||||
default:
|
||||
return v => {
|
||||
const t = new Date(v).getTime();
|
||||
return t >= bottomDate && t < topDate;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const relativeDateMatch = /(\d+) (second|minute|hour|day|week|month|year)s? ago/;
|
||||
|
||||
function makeRelativeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||
const match = assertNotNull(relativeDateMatch.exec(dateVal));
|
||||
const bounds: Record<string, number> = {
|
||||
second: 1000,
|
||||
minute: 60000,
|
||||
hour: 3600000,
|
||||
day: 86400000,
|
||||
week: 604800000,
|
||||
month: 2592000000,
|
||||
year: 31536000000
|
||||
};
|
||||
|
||||
const amount = parseInt(match[1], 10);
|
||||
const scale = bounds[match[2]];
|
||||
|
||||
const now = new Date().getTime();
|
||||
const bottomDate = new Date(now - amount * scale).getTime();
|
||||
const topDate = new Date(now - (amount - 1) * scale).getTime();
|
||||
|
||||
return makeMatcher(bottomDate, topDate, qual);
|
||||
}
|
||||
|
||||
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||
const parseRes: RegExp[] = [
|
||||
/^(\d{4})/,
|
||||
/^-(\d{2})/,
|
||||
/^-(\d{2})/,
|
||||
/^(?:\s+|T|t)(\d{2})/,
|
||||
/^:(\d{2})/,
|
||||
/^:(\d{2})/
|
||||
];
|
||||
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
||||
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
||||
|
||||
const origDateVal: string = dateVal;
|
||||
let localDateVal = origDateVal;
|
||||
|
||||
const offsetMatch = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
||||
if (offsetMatch) {
|
||||
timeZoneOffset[0] = parseInt(offsetMatch[2], 10);
|
||||
timeZoneOffset[1] = parseInt(offsetMatch[3], 10);
|
||||
if (offsetMatch[1] === '-') {
|
||||
timeZoneOffset[0] *= -1;
|
||||
timeZoneOffset[1] *= -1;
|
||||
}
|
||||
localDateVal = localDateVal.substring(0, localDateVal.length - 6);
|
||||
}
|
||||
else {
|
||||
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
||||
}
|
||||
|
||||
let matchIndex = 0;
|
||||
for (; matchIndex < parseRes.length; matchIndex += 1) {
|
||||
if (localDateVal.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
const componentMatch = parseRes[matchIndex].exec(localDateVal);
|
||||
if (componentMatch) {
|
||||
if (matchIndex === 1) {
|
||||
// Months are offset by 1.
|
||||
timeData[matchIndex] = parseInt(componentMatch[1], 10) - 1;
|
||||
}
|
||||
else {
|
||||
// All other components are not offset.
|
||||
timeData[matchIndex] = parseInt(componentMatch[1], 10);
|
||||
}
|
||||
|
||||
// Truncate string.
|
||||
localDateVal = localDateVal.substring(componentMatch[0].length);
|
||||
}
|
||||
else {
|
||||
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (localDateVal.length > 0) {
|
||||
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||
}
|
||||
|
||||
// Apply the user-specified time zone offset. The JS Date constructor
|
||||
// is very flexible here.
|
||||
timeData[3] -= timeZoneOffset[0];
|
||||
timeData[4] -= timeZoneOffset[1];
|
||||
|
||||
const asPosix = (data: AbsoluteDate) => {
|
||||
return new Date(Date.UTC.apply(Date, data)).getTime();
|
||||
};
|
||||
|
||||
const bottomDate = asPosix(timeData);
|
||||
timeData[matchIndex - 1] += 1;
|
||||
const topDate = asPosix(timeData);
|
||||
|
||||
return makeMatcher(bottomDate, topDate, qual);
|
||||
}
|
||||
|
||||
export function makeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||
if (relativeDateMatch.test(dateVal)) {
|
||||
return makeRelativeDateMatcher(dateVal, qual);
|
||||
}
|
||||
|
||||
return makeAbsoluteDateMatcher(dateVal, qual);
|
||||
}
|
39
assets/js/query/fields.ts
Normal file
39
assets/js/query/fields.ts
Normal file
|
@ -0,0 +1,39 @@
|
|||
import { FieldName } from './types';
|
||||
|
||||
type AttributeName = string;
|
||||
|
||||
export const numberFields: FieldName[] =
|
||||
['id', 'width', 'height', 'aspect_ratio',
|
||||
'comment_count', 'score', 'upvotes', 'downvotes',
|
||||
'faves', 'tag_count', 'score'];
|
||||
|
||||
export const dateFields: FieldName[] = ['created_at'];
|
||||
|
||||
export const literalFields =
|
||||
['tags', 'orig_sha512_hash', 'sha512_hash',
|
||||
'uploader', 'source_url', 'description'];
|
||||
|
||||
export const termSpaceToImageField: Record<FieldName, AttributeName> = {
|
||||
tags: 'data-image-tag-aliases',
|
||||
score: 'data-score',
|
||||
upvotes: 'data-upvotes',
|
||||
downvotes: 'data-downvotes',
|
||||
uploader: 'data-uploader',
|
||||
// Yeah, I don't think this is reasonably supportable.
|
||||
// faved_by: 'data-faved-by',
|
||||
id: 'data-image-id',
|
||||
width: 'data-width',
|
||||
height: 'data-height',
|
||||
/* eslint-disable camelcase */
|
||||
aspect_ratio: 'data-aspect-ratio',
|
||||
comment_count: 'data-comment-count',
|
||||
tag_count: 'data-tag-count',
|
||||
source_url: 'data-source-url',
|
||||
faves: 'data-faves',
|
||||
sha512_hash: 'data-sha512',
|
||||
orig_sha512_hash: 'data-orig-sha512',
|
||||
created_at: 'data-created-at'
|
||||
/* eslint-enable camelcase */
|
||||
};
|
||||
|
||||
export const defaultField = 'tags';
|
191
assets/js/query/lex.ts
Normal file
191
assets/js/query/lex.ts
Normal file
|
@ -0,0 +1,191 @@
|
|||
import { assertNotNull, assertNotUndefined } from '../utils/assert';
|
||||
import { AstMatcher, ParseError, TokenList } from './types';
|
||||
|
||||
type TokenName = string;
|
||||
type Token = [TokenName, RegExp];
|
||||
|
||||
const tokenList: Token[] = [
|
||||
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
||||
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
||||
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
||||
['lparen', /^\s*\(\s*/],
|
||||
['rparen', /^\s*\)\s*/],
|
||||
['and_op', /^\s*(?:&&|AND)\s+/],
|
||||
['and_op', /^\s*,\s*/],
|
||||
['or_op', /^\s*(?:\|\||OR)\s+/],
|
||||
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
||||
['not_op', /^\s*[!-]\s*/],
|
||||
['space', /^\s+/],
|
||||
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
||||
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
||||
];
|
||||
|
||||
export type ParseTerm = (term: string, fuzz: number, boost: number) => AstMatcher;
|
||||
|
||||
export function generateLexArray(searchStr: string, parseTerm: ParseTerm): TokenList {
|
||||
const opQueue: string[] = [],
|
||||
groupNegate: boolean[] = [],
|
||||
tokenStack: TokenList = [];
|
||||
|
||||
let searchTerm: string | null = null;
|
||||
let boostFuzzStr = '';
|
||||
let localSearchStr: string = searchStr;
|
||||
let negate = false;
|
||||
let boost = 1;
|
||||
let fuzz = 0;
|
||||
let lparenCtr = 0;
|
||||
|
||||
const pushTerm = () => {
|
||||
if (searchTerm !== null) {
|
||||
// Push to stack.
|
||||
tokenStack.push(parseTerm(searchTerm, fuzz, boost));
|
||||
// Reset term and options data.
|
||||
boost = 1;
|
||||
fuzz = 0;
|
||||
searchTerm = null;
|
||||
boostFuzzStr = '';
|
||||
lparenCtr = 0;
|
||||
}
|
||||
|
||||
if (negate) {
|
||||
tokenStack.push('not_op');
|
||||
negate = false;
|
||||
}
|
||||
};
|
||||
|
||||
while (localSearchStr.length > 0) {
|
||||
for (const [tokenName, tokenRe] of tokenList) {
|
||||
const match = tokenRe.exec(localSearchStr);
|
||||
|
||||
if (!match) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const token = match[0];
|
||||
|
||||
if (searchTerm !== null && (['and_op', 'or_op'].indexOf(tokenName) !== -1 || tokenName === 'rparen' && lparenCtr === 0)) {
|
||||
pushTerm();
|
||||
}
|
||||
|
||||
switch (tokenName) {
|
||||
case 'and_op':
|
||||
while (opQueue[0] === 'and_op') {
|
||||
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||
}
|
||||
opQueue.unshift('and_op');
|
||||
break;
|
||||
case 'or_op':
|
||||
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
||||
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||
}
|
||||
opQueue.unshift('or_op');
|
||||
break;
|
||||
case 'not_op':
|
||||
if (searchTerm) {
|
||||
// We're already inside a search term, so it does not apply, obv.
|
||||
searchTerm += token;
|
||||
}
|
||||
else {
|
||||
negate = !negate;
|
||||
}
|
||||
break;
|
||||
case 'lparen':
|
||||
if (searchTerm) {
|
||||
// If we are inside the search term, do not error out just yet;
|
||||
// instead, consider it as part of the search term, as a user convenience.
|
||||
searchTerm += token;
|
||||
lparenCtr += 1;
|
||||
}
|
||||
else {
|
||||
opQueue.unshift('lparen');
|
||||
groupNegate.push(negate);
|
||||
negate = false;
|
||||
}
|
||||
break;
|
||||
case 'rparen':
|
||||
if (lparenCtr > 0) {
|
||||
searchTerm = assertNotNull(searchTerm) + token;
|
||||
lparenCtr -= 1;
|
||||
}
|
||||
else {
|
||||
while (opQueue.length > 0) {
|
||||
const op = assertNotUndefined(opQueue.shift());
|
||||
if (op === 'lparen') {
|
||||
break;
|
||||
}
|
||||
tokenStack.push(op);
|
||||
}
|
||||
if (groupNegate.length > 0 && groupNegate.pop()) {
|
||||
tokenStack.push('not_op');
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'fuzz':
|
||||
if (searchTerm) {
|
||||
// For this and boost operations, we store the current match so far
|
||||
// to a temporary string in case this is actually inside the term.
|
||||
fuzz = parseFloat(token.substring(1));
|
||||
boostFuzzStr += token;
|
||||
}
|
||||
else {
|
||||
searchTerm = token;
|
||||
}
|
||||
break;
|
||||
case 'boost':
|
||||
if (searchTerm) {
|
||||
boost = parseFloat(token.substring(1));
|
||||
boostFuzzStr += token;
|
||||
}
|
||||
else {
|
||||
searchTerm = token;
|
||||
}
|
||||
break;
|
||||
case 'quoted_lit':
|
||||
if (searchTerm) {
|
||||
searchTerm += token;
|
||||
}
|
||||
else {
|
||||
searchTerm = token;
|
||||
}
|
||||
break;
|
||||
case 'word':
|
||||
if (searchTerm) {
|
||||
if (fuzz !== 0 || boost !== 1) {
|
||||
boost = 1;
|
||||
fuzz = 0;
|
||||
searchTerm += boostFuzzStr;
|
||||
boostFuzzStr = '';
|
||||
}
|
||||
searchTerm += token;
|
||||
}
|
||||
else {
|
||||
searchTerm = token;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// Append extra spaces within search terms.
|
||||
if (searchTerm) {
|
||||
searchTerm += token;
|
||||
}
|
||||
}
|
||||
|
||||
// Truncate string and restart the token tests.
|
||||
localSearchStr = localSearchStr.substring(token.length);
|
||||
|
||||
// Break since we have found a match.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Append final tokens to the stack.
|
||||
pushTerm();
|
||||
|
||||
if (opQueue.indexOf('rparen') !== -1 || opQueue.indexOf('lparen') !== -1) {
|
||||
throw new ParseError('Mismatched parentheses.');
|
||||
}
|
||||
|
||||
// Concatenatte remaining operators to the token stack.
|
||||
tokenStack.push(...opQueue);
|
||||
|
||||
return tokenStack;
|
||||
}
|
113
assets/js/query/literal.ts
Normal file
113
assets/js/query/literal.ts
Normal file
|
@ -0,0 +1,113 @@
|
|||
import { FieldMatcher } from './types';
|
||||
|
||||
function extractValues(v: string, name: string) {
|
||||
return name === 'tags' ? v.split(', ') : [v];
|
||||
}
|
||||
|
||||
function makeExactMatcher(term: string): FieldMatcher {
|
||||
return (v, name) => {
|
||||
const values = extractValues(v, name);
|
||||
|
||||
for (const val of values) {
|
||||
if (val.toLowerCase() === term.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
function makeWildcardMatcher(term: string): FieldMatcher {
|
||||
// Transforms wildcard match into regular expression.
|
||||
// A custom NFA with caching may be more sophisticated but not
|
||||
// likely to be faster.
|
||||
const wildcard = new RegExp(
|
||||
`^${term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||
.replace(/^(?:\\\\)*\?/g, '.?')}$`, 'i'
|
||||
);
|
||||
|
||||
return (v, name) => {
|
||||
const values = extractValues(v, name);
|
||||
|
||||
for (const val of values) {
|
||||
if (wildcard.test(val)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
function fuzzyMatch(term: string, targetStr: string, fuzz: number): boolean {
|
||||
const targetDistance = fuzz < 1.0 ? targetStr.length * (1.0 - fuzz) : fuzz;
|
||||
const targetStrLower = targetStr.toLowerCase();
|
||||
|
||||
// Work vectors, representing the last three populated
|
||||
// rows of the dynamic programming matrix of the iterative
|
||||
// optimal string alignment calculation.
|
||||
let v0: number[] = [];
|
||||
let v1: number[] = [];
|
||||
let v2: number[] = [];
|
||||
let temp: number[];
|
||||
|
||||
for (let i = 0; i <= targetStrLower.length; i += 1) {
|
||||
v1.push(i);
|
||||
}
|
||||
|
||||
for (let i = 0; i < term.length; i += 1) {
|
||||
v2[0] = i;
|
||||
for (let j = 0; j < targetStrLower.length; j += 1) {
|
||||
const cost = term[i] === targetStrLower[j] ? 0 : 1;
|
||||
v2[j + 1] = Math.min(
|
||||
// Deletion.
|
||||
v1[j + 1] + 1,
|
||||
// Insertion.
|
||||
v2[j] + 1,
|
||||
// Substitution or No Change.
|
||||
v1[j] + cost
|
||||
);
|
||||
if (i > 1 && j > 1 && term[i] === targetStrLower[j - 1] &&
|
||||
targetStrLower[i - 1] === targetStrLower[j]) {
|
||||
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
||||
}
|
||||
}
|
||||
// Rotate dem vec pointers bra.
|
||||
temp = v0;
|
||||
v0 = v1;
|
||||
v1 = v2;
|
||||
v2 = temp;
|
||||
}
|
||||
|
||||
return v1[targetStrLower.length] <= targetDistance;
|
||||
}
|
||||
|
||||
function makeFuzzyMatcher(term: string, fuzz: number): FieldMatcher {
|
||||
return (v, name) => {
|
||||
const values = extractValues(v, name);
|
||||
|
||||
for (const val of values) {
|
||||
if (fuzzyMatch(term, val, fuzz)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
export function makeLiteralMatcher(term: string, fuzz: number, wildcardable: boolean): FieldMatcher {
|
||||
if (fuzz === 0 && !wildcardable) {
|
||||
return makeExactMatcher(term);
|
||||
}
|
||||
|
||||
if (!wildcardable) {
|
||||
return makeFuzzyMatcher(term, fuzz);
|
||||
}
|
||||
|
||||
return makeWildcardMatcher(term);
|
||||
}
|
20
assets/js/query/matcher.ts
Normal file
20
assets/js/query/matcher.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import { makeDateMatcher } from './date';
|
||||
import { makeLiteralMatcher } from './literal';
|
||||
import { makeNumberMatcher } from './number';
|
||||
import { makeUserMatcher } from './user';
|
||||
|
||||
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||
|
||||
export interface MatcherFactory {
|
||||
makeDateMatcher: (dateVal: string, qual: RangeEqualQualifier) => FieldMatcher,
|
||||
makeLiteralMatcher: (term: string, fuzz: number, wildcardable: boolean) => FieldMatcher,
|
||||
makeNumberMatcher: (term: number, fuzz: number, qual: RangeEqualQualifier) => FieldMatcher,
|
||||
makeUserMatcher: (term: string) => FieldMatcher
|
||||
}
|
||||
|
||||
export const defaultMatcher: MatcherFactory = {
|
||||
makeDateMatcher,
|
||||
makeLiteralMatcher,
|
||||
makeNumberMatcher,
|
||||
makeUserMatcher,
|
||||
};
|
30
assets/js/query/number.ts
Normal file
30
assets/js/query/number.ts
Normal file
|
@ -0,0 +1,30 @@
|
|||
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||
|
||||
export function makeNumberMatcher(term: number, fuzz: number, qual: RangeEqualQualifier): FieldMatcher {
|
||||
// Range matching.
|
||||
return v => {
|
||||
const attrVal = parseFloat(v);
|
||||
|
||||
if (isNaN(attrVal)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fuzz !== 0) {
|
||||
return term - fuzz <= attrVal && term + fuzz >= attrVal;
|
||||
}
|
||||
|
||||
switch (qual) {
|
||||
case 'lt':
|
||||
return attrVal < term;
|
||||
case 'gt':
|
||||
return attrVal > term;
|
||||
case 'lte':
|
||||
return attrVal <= term;
|
||||
case 'gte':
|
||||
return attrVal >= term;
|
||||
case 'eq':
|
||||
default:
|
||||
return attrVal === term;
|
||||
}
|
||||
};
|
||||
}
|
52
assets/js/query/parse.ts
Normal file
52
assets/js/query/parse.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
import { matchAll, matchAny, matchNone, matchNot } from './boolean';
|
||||
import { AstMatcher, ParseError, TokenList } from './types';
|
||||
|
||||
export function parseTokens(lexicalArray: TokenList): AstMatcher {
|
||||
const operandStack: AstMatcher[] = [];
|
||||
|
||||
lexicalArray.forEach((token, i) => {
|
||||
if (token === 'not_op') {
|
||||
return;
|
||||
}
|
||||
|
||||
let intermediate: AstMatcher;
|
||||
|
||||
if (typeof token === 'string') {
|
||||
const op2 = operandStack.pop();
|
||||
const op1 = operandStack.pop();
|
||||
|
||||
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
||||
throw new ParseError('Missing operand.');
|
||||
}
|
||||
|
||||
if (token === 'and_op') {
|
||||
intermediate = matchAll(op1, op2);
|
||||
}
|
||||
else {
|
||||
intermediate = matchAny(op1, op2);
|
||||
}
|
||||
}
|
||||
else {
|
||||
intermediate = token;
|
||||
}
|
||||
|
||||
if (lexicalArray[i + 1] === 'not_op') {
|
||||
operandStack.push(matchNot(intermediate));
|
||||
}
|
||||
else {
|
||||
operandStack.push(intermediate);
|
||||
}
|
||||
});
|
||||
|
||||
if (operandStack.length > 1) {
|
||||
throw new ParseError('Missing operator.');
|
||||
}
|
||||
|
||||
const op1 = operandStack.pop();
|
||||
|
||||
if (typeof op1 === 'undefined') {
|
||||
return matchNone();
|
||||
}
|
||||
|
||||
return op1;
|
||||
}
|
90
assets/js/query/term.ts
Normal file
90
assets/js/query/term.ts
Normal file
|
@ -0,0 +1,90 @@
|
|||
import { MatcherFactory } from './matcher';
|
||||
|
||||
import { numberFields, dateFields, literalFields, termSpaceToImageField, defaultField } from './fields';
|
||||
import { FieldName, FieldMatcher, RangeEqualQualifier, TermType, AstMatcher } from './types';
|
||||
|
||||
type RangeInfo = [FieldName, RangeEqualQualifier, TermType];
|
||||
|
||||
function normalizeTerm(term: string, wildcardable: boolean) {
|
||||
if (!wildcardable) {
|
||||
return term.replace('\\"', '"');
|
||||
}
|
||||
return term.replace(/\\([^*?])/g, '$1');
|
||||
}
|
||||
|
||||
function parseRangeField(field: string): RangeInfo | null {
|
||||
if (numberFields.indexOf(field) !== -1) {
|
||||
return [field, 'eq', 'number'];
|
||||
}
|
||||
|
||||
if (dateFields.indexOf(field) !== -1) {
|
||||
return [field, 'eq', 'date'];
|
||||
}
|
||||
|
||||
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
||||
|
||||
if (qual) {
|
||||
const fieldName: FieldName = qual[1];
|
||||
const rangeQual = qual[2] as RangeEqualQualifier;
|
||||
|
||||
if (numberFields.indexOf(fieldName) !== -1) {
|
||||
return [fieldName, rangeQual, 'number'];
|
||||
}
|
||||
|
||||
if (dateFields.indexOf(fieldName) !== -1) {
|
||||
return [fieldName, rangeQual, 'date'];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function makeTermMatcher(term: string, fuzz: number, factory: MatcherFactory): [FieldName, FieldMatcher] {
|
||||
let rangeParsing, candidateTermSpace, termCandidate;
|
||||
let localTerm = term;
|
||||
const wildcardable = fuzz === 0 && !/^"([^"]|\\")+"$/.test(localTerm);
|
||||
|
||||
if (!wildcardable && !fuzz) {
|
||||
// Remove quotes around quoted literal term
|
||||
localTerm = localTerm.substring(1, localTerm.length - 1);
|
||||
}
|
||||
|
||||
localTerm = normalizeTerm(localTerm, wildcardable);
|
||||
|
||||
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
||||
const matchArr = localTerm.split(':');
|
||||
|
||||
if (matchArr.length > 1) {
|
||||
candidateTermSpace = matchArr[0];
|
||||
termCandidate = matchArr.slice(1).join(':');
|
||||
rangeParsing = parseRangeField(candidateTermSpace);
|
||||
|
||||
if (rangeParsing) {
|
||||
const [fieldName, rangeType, fieldType] = rangeParsing;
|
||||
|
||||
if (fieldType === 'date') {
|
||||
return [fieldName, factory.makeDateMatcher(termCandidate, rangeType)];
|
||||
}
|
||||
|
||||
return [fieldName, factory.makeNumberMatcher(parseFloat(termCandidate), fuzz, rangeType)];
|
||||
}
|
||||
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
||||
return [candidateTermSpace, factory.makeLiteralMatcher(termCandidate, fuzz, wildcardable)];
|
||||
}
|
||||
else if (candidateTermSpace === 'my') {
|
||||
return [candidateTermSpace, factory.makeUserMatcher(termCandidate)];
|
||||
}
|
||||
}
|
||||
|
||||
return [defaultField, factory.makeLiteralMatcher(localTerm, fuzz, wildcardable)];
|
||||
}
|
||||
|
||||
export function getAstMatcherForTerm(term: string, fuzz: number, factory: MatcherFactory): AstMatcher {
|
||||
const [fieldName, matcher] = makeTermMatcher(term, fuzz, factory);
|
||||
|
||||
return (e: HTMLElement) => {
|
||||
const value = e.getAttribute(termSpaceToImageField[fieldName]) || '';
|
||||
const documentId = parseInt(e.getAttribute(termSpaceToImageField.id) || '0', 10);
|
||||
return matcher(value, fieldName, documentId);
|
||||
};
|
||||
}
|
12
assets/js/query/types.ts
Normal file
12
assets/js/query/types.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
export type TermType = 'number' | 'date' | 'literal' | 'my';
|
||||
export type RangeQualifier = 'gt' | 'gte' | 'lt' | 'lte';
|
||||
export type RangeEqualQualifier = RangeQualifier | 'eq';
|
||||
|
||||
export type FieldValue = string;
|
||||
export type FieldName = string;
|
||||
export type FieldMatcher = (value: FieldValue, name: FieldName, documentId: number) => boolean;
|
||||
|
||||
export type AstMatcher = (e: HTMLElement) => boolean;
|
||||
export type TokenList = (string | AstMatcher)[];
|
||||
|
||||
export class ParseError extends Error {}
|
25
assets/js/query/user.ts
Normal file
25
assets/js/query/user.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { Interaction, InteractionType, InteractionValue } from '../../types/booru-object';
|
||||
import { FieldMatcher } from './types';
|
||||
|
||||
function interactionMatch(imageId: number, type: InteractionType, value: InteractionValue, interactions: Interaction[]): boolean {
|
||||
return interactions.some(v => v.image_id === imageId && v.interaction_type === type && (value === null || v.value === value));
|
||||
}
|
||||
|
||||
export function makeUserMatcher(term: string): FieldMatcher {
|
||||
// Should work with most my:conditions except watched.
|
||||
return (value, field, documentId) => {
|
||||
switch (term) {
|
||||
case 'faves':
|
||||
return interactionMatch(documentId, 'faved', null, window.booru.interactions);
|
||||
case 'upvotes':
|
||||
return interactionMatch(documentId, 'voted', 'up', window.booru.interactions);
|
||||
case 'downvotes':
|
||||
return interactionMatch(documentId, 'voted', 'down', window.booru.interactions);
|
||||
case 'watched':
|
||||
case 'hidden':
|
||||
default:
|
||||
// Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import { inputDuplicatorCreator } from './input-duplicator';
|
||||
|
||||
function imageSourcesCreator() {
|
||||
function setupInputs() {
|
||||
inputDuplicatorCreator({
|
||||
addButtonSelector: '.js-image-add-source',
|
||||
fieldSelector: '.js-image-source',
|
||||
|
@ -9,4 +9,18 @@ function imageSourcesCreator() {
|
|||
});
|
||||
}
|
||||
|
||||
function imageSourcesCreator() {
|
||||
setupInputs();
|
||||
document.addEventListener('fetchcomplete', ({ target, detail }) => {
|
||||
const sourceSauce = document.querySelector('.js-sourcesauce');
|
||||
|
||||
if (target.matches('#source-form')) {
|
||||
detail.text().then(text => {
|
||||
sourceSauce.outerHTML = text;
|
||||
setupInputs();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export { imageSourcesCreator };
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
* Frontend timestamps.
|
||||
*/
|
||||
|
||||
const strings = {
|
||||
import { assertNotNull } from './utils/assert';
|
||||
|
||||
const strings: Record<string, string> = {
|
||||
seconds: 'less than a minute',
|
||||
minute: 'about a minute',
|
||||
minutes: '%d minutes',
|
||||
|
@ -16,16 +18,21 @@ const strings = {
|
|||
years: '%d years',
|
||||
};
|
||||
|
||||
function distance(time) {
|
||||
return new Date() - time;
|
||||
function distance(time: Date) {
|
||||
return new Date().getTime() - time.getTime();
|
||||
}
|
||||
|
||||
function substitute(key, amount) {
|
||||
return strings[key].replace('%d', Math.round(amount));
|
||||
function substitute(key: string, amount: number) {
|
||||
return strings[key].replace('%d', Math.round(amount).toString());
|
||||
}
|
||||
|
||||
function setTimeAgo(el) {
|
||||
const date = new Date(el.getAttribute('datetime'));
|
||||
function setTimeAgo(el: HTMLTimeElement) {
|
||||
const datetime = el.getAttribute('datetime');
|
||||
if (!datetime) {
|
||||
return;
|
||||
}
|
||||
|
||||
const date = new Date(datetime);
|
||||
const distMillis = distance(date);
|
||||
|
||||
const seconds = Math.abs(distMillis) / 1000,
|
||||
|
@ -49,20 +56,20 @@ function setTimeAgo(el) {
|
|||
substitute('years', years);
|
||||
|
||||
if (!el.getAttribute('title')) {
|
||||
el.setAttribute('title', el.textContent);
|
||||
el.setAttribute('title', assertNotNull(el.textContent));
|
||||
}
|
||||
el.textContent = words + (distMillis < 0 ? ' from now' : ' ago');
|
||||
}
|
||||
|
||||
function timeAgo(args) {
|
||||
[].forEach.call(args, el => setTimeAgo(el));
|
||||
export function timeAgo(args: HTMLTimeElement[] | HTMLCollectionOf<HTMLTimeElement>) {
|
||||
for (const el of args) {
|
||||
setTimeAgo(el);
|
||||
}
|
||||
}
|
||||
|
||||
function setupTimestamps() {
|
||||
export function setupTimestamps() {
|
||||
timeAgo(document.getElementsByTagName('time'));
|
||||
window.setTimeout(setupTimestamps, 60000);
|
||||
}
|
||||
|
||||
export { setupTimestamps };
|
||||
|
||||
window.booru.timeAgo = timeAgo;
|
|
@ -1,3 +1,4 @@
|
|||
import { assertNotNull, assertNotUndefined } from './utils/assert';
|
||||
import { $$, makeEl, findFirstTextNode } from './utils/dom';
|
||||
import { fire, delegate, leftClick } from './utils/events';
|
||||
|
||||
|
@ -6,7 +7,7 @@ const headers = () => ({
|
|||
'x-requested-with': 'XMLHttpRequest'
|
||||
});
|
||||
|
||||
function confirm(event, target) {
|
||||
function confirm(event: Event, target: HTMLElement) {
|
||||
if (!window.confirm(target.dataset.confirm)) {
|
||||
event.preventDefault();
|
||||
event.stopImmediatePropagation();
|
||||
|
@ -14,28 +15,28 @@ function confirm(event, target) {
|
|||
}
|
||||
}
|
||||
|
||||
function disable(event, target) {
|
||||
function disable(event: Event, target: HTMLAnchorElement | HTMLButtonElement | HTMLInputElement) {
|
||||
// failed validations prevent the form from being submitted;
|
||||
// stop here or the form will be permanently locked
|
||||
if (target.type === 'submit' && target.closest(':invalid') !== null) return;
|
||||
|
||||
// Store what's already there so we don't lose it
|
||||
const label = findFirstTextNode(target);
|
||||
const label = findFirstTextNode<Text>(target);
|
||||
if (label) {
|
||||
target.dataset.enableWith = label.nodeValue;
|
||||
target.dataset.enableWith = assertNotNull(label.nodeValue);
|
||||
label.nodeValue = ` ${target.dataset.disableWith}`;
|
||||
}
|
||||
else {
|
||||
target.dataset.enableWith = target.innerHTML;
|
||||
target.innerHTML = target.dataset.disableWith;
|
||||
target.innerHTML = assertNotUndefined(target.dataset.disableWith);
|
||||
}
|
||||
|
||||
// delay is needed because Safari stops the submit if the button is immediately disabled
|
||||
requestAnimationFrame(() => target.disabled = 'disabled');
|
||||
requestAnimationFrame(() => target.setAttribute('disabled', 'disabled'));
|
||||
}
|
||||
|
||||
// you should use button_to instead of link_to[method]!
|
||||
function linkMethod(event, target) {
|
||||
function linkMethod(event: Event, target: HTMLAnchorElement) {
|
||||
event.preventDefault();
|
||||
|
||||
const form = makeEl('form', { action: target.href, method: 'POST' });
|
||||
|
@ -49,41 +50,42 @@ function linkMethod(event, target) {
|
|||
form.submit();
|
||||
}
|
||||
|
||||
function formRemote(event, target) {
|
||||
function formRemote(event: Event, target: HTMLFormElement) {
|
||||
event.preventDefault();
|
||||
|
||||
fetch(target.action, {
|
||||
credentials: 'same-origin',
|
||||
method: (target.dataset.method || target.method || 'POST').toUpperCase(),
|
||||
method: (target.dataset.method || target.method).toUpperCase(),
|
||||
headers: headers(),
|
||||
body: new FormData(target)
|
||||
}).then(response => {
|
||||
if (response && response.status === 300) {
|
||||
window.location.reload(true);
|
||||
return;
|
||||
}
|
||||
fire(target, 'fetchcomplete', response);
|
||||
if (response && response.status === 300) {
|
||||
window.location.reload();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function formReset(event, target) {
|
||||
$$('[disabled][data-disable-with][data-enable-with]', target).forEach(input => {
|
||||
function formReset(_event: Event | null, target: HTMLElement) {
|
||||
$$<HTMLElement>('[disabled][data-disable-with][data-enable-with]', target).forEach(input => {
|
||||
const label = findFirstTextNode(input);
|
||||
if (label) {
|
||||
label.nodeValue = ` ${input.dataset.enableWith}`;
|
||||
}
|
||||
else { input.innerHTML = target.dataset.enableWith; }
|
||||
else {
|
||||
input.innerHTML = assertNotUndefined(input.dataset.enableWith);
|
||||
}
|
||||
delete input.dataset.enableWith;
|
||||
input.removeAttribute('disabled');
|
||||
});
|
||||
}
|
||||
|
||||
function linkRemote(event, target) {
|
||||
function linkRemote(event: Event, target: HTMLAnchorElement) {
|
||||
event.preventDefault();
|
||||
|
||||
fetch(target.href, {
|
||||
credentials: 'same-origin',
|
||||
method: target.dataset.method.toUpperCase(),
|
||||
method: (target.dataset.method || 'get').toUpperCase(),
|
||||
headers: headers()
|
||||
}).then(response =>
|
||||
fire(target, 'fetchcomplete', response)
|
||||
|
@ -106,5 +108,7 @@ delegate(document, 'reset', {
|
|||
});
|
||||
|
||||
window.addEventListener('pageshow', () => {
|
||||
[].forEach.call(document.forms, form => formReset(null, form));
|
||||
for (const form of document.forms) {
|
||||
formReset(null, form);
|
||||
}
|
||||
});
|
|
@ -34,7 +34,7 @@ function setupImageUpload() {
|
|||
const [fileField, remoteUrl, scraperError] = $$('.js-scraper', form);
|
||||
const descrEl = $('.js-image-descr-input', form);
|
||||
const tagsEl = $('.js-image-tags-input', form);
|
||||
const sourceEl = $$('.js-image-source', form).find(input => input.value === '');
|
||||
const sourceEl = $$('.js-source-url', form).find(input => input.value === '');
|
||||
const fetchButton = $('#js-scraper-preview');
|
||||
if (!fetchButton) return;
|
||||
|
||||
|
@ -132,21 +132,17 @@ function setupImageUpload() {
|
|||
});
|
||||
|
||||
// Enable/disable the fetch button based on content in the image scraper. Fetching with no URL makes no sense.
|
||||
remoteUrl.addEventListener('input', () => {
|
||||
function setFetchEnabled() {
|
||||
if (remoteUrl.value.length > 0) {
|
||||
enableFetch();
|
||||
}
|
||||
else {
|
||||
disableFetch();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (remoteUrl.value.length > 0) {
|
||||
enableFetch();
|
||||
}
|
||||
else {
|
||||
disableFetch();
|
||||
}
|
||||
remoteUrl.addEventListener('input', () => setFetchEnabled());
|
||||
setFetchEnabled();
|
||||
|
||||
// Catch unintentional navigation away from the page
|
||||
|
||||
|
|
35
assets/js/utils/__tests__/assert.spec.ts
Normal file
35
assets/js/utils/__tests__/assert.spec.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
import { assertNotNull, assertNotUndefined, assertType } from '../assert';
|
||||
|
||||
describe('Assertion utilities', () => {
|
||||
describe('assertNotNull', () => {
|
||||
it('should return non-null values', () => {
|
||||
expect(assertNotNull(1)).toEqual(1);
|
||||
expect(assertNotNull('anything')).toEqual('anything');
|
||||
});
|
||||
|
||||
it('should throw when passed a null value', () => {
|
||||
expect(() => assertNotNull(null)).toThrow('Expected non-null value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('assertNotUndefined', () => {
|
||||
it('should return non-undefined values', () => {
|
||||
expect(assertNotUndefined(1)).toEqual(1);
|
||||
expect(assertNotUndefined('anything')).toEqual('anything');
|
||||
});
|
||||
|
||||
it('should throw when passed an undefined value', () => {
|
||||
expect(() => assertNotUndefined(undefined)).toThrow('Expected non-undefined value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('assertType', () => {
|
||||
it('should return values of the generic type', () => {
|
||||
expect(assertType({}, Object)).toEqual({});
|
||||
});
|
||||
|
||||
it('should throw when passed a value of the wrong type', () => {
|
||||
expect(() => assertType('anything', Number)).toThrow('Expected value of type');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -29,11 +29,11 @@ describe('DOM Utilities', () => {
|
|||
|
||||
describe('$', () => {
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should call the native querySelector method on document by default', () => {
|
||||
const spy = jest.spyOn(document, 'querySelector');
|
||||
const spy = vi.spyOn(document, 'querySelector');
|
||||
|
||||
mockSelectors.forEach((selector, nthCall) => {
|
||||
$(selector);
|
||||
|
@ -43,7 +43,7 @@ describe('DOM Utilities', () => {
|
|||
|
||||
it('should call the native querySelector method on the passed element', () => {
|
||||
const mockElement = document.createElement('br');
|
||||
const spy = jest.spyOn(mockElement, 'querySelector');
|
||||
const spy = vi.spyOn(mockElement, 'querySelector');
|
||||
|
||||
mockSelectors.forEach((selector, nthCall) => {
|
||||
// FIXME This will not be necessary once the file is properly typed
|
||||
|
@ -55,11 +55,11 @@ describe('DOM Utilities', () => {
|
|||
|
||||
describe('$$', () => {
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should call the native querySelectorAll method on document by default', () => {
|
||||
const spy = jest.spyOn(document, 'querySelectorAll');
|
||||
const spy = vi.spyOn(document, 'querySelectorAll');
|
||||
|
||||
mockSelectors.forEach((selector, nthCall) => {
|
||||
$$(selector);
|
||||
|
@ -69,7 +69,7 @@ describe('DOM Utilities', () => {
|
|||
|
||||
it('should call the native querySelectorAll method on the passed element', () => {
|
||||
const mockElement = document.createElement('br');
|
||||
const spy = jest.spyOn(mockElement, 'querySelectorAll');
|
||||
const spy = vi.spyOn(mockElement, 'querySelectorAll');
|
||||
|
||||
mockSelectors.forEach((selector, nthCall) => {
|
||||
// FIXME This will not be necessary once the file is properly typed
|
||||
|
@ -311,7 +311,7 @@ describe('DOM Utilities', () => {
|
|||
|
||||
describe('removeEl', () => {
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should NOT throw error if element has no parent', () => {
|
||||
|
@ -324,7 +324,7 @@ describe('DOM Utilities', () => {
|
|||
const childNode = document.createElement('p');
|
||||
parentNode.appendChild(childNode);
|
||||
|
||||
const spy = jest.spyOn(parentNode, 'removeChild');
|
||||
const spy = vi.spyOn(parentNode, 'removeChild');
|
||||
|
||||
removeEl(childNode);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
@ -374,7 +374,7 @@ describe('DOM Utilities', () => {
|
|||
});
|
||||
|
||||
it('should call callback on left click', () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCallback = vi.fn();
|
||||
const element = document.createElement('div');
|
||||
cleanup = onLeftClick(mockCallback, element as unknown as Document);
|
||||
|
||||
|
@ -384,7 +384,7 @@ describe('DOM Utilities', () => {
|
|||
});
|
||||
|
||||
it('should NOT call callback on non-left click', () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCallback = vi.fn();
|
||||
const element = document.createElement('div');
|
||||
cleanup = onLeftClick(mockCallback, element as unknown as Document);
|
||||
|
||||
|
@ -395,7 +395,7 @@ describe('DOM Utilities', () => {
|
|||
});
|
||||
|
||||
it('should add click event listener to the document by default', () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCallback = vi.fn();
|
||||
cleanup = onLeftClick(mockCallback);
|
||||
|
||||
fireEvent.click(document.body);
|
||||
|
@ -404,7 +404,7 @@ describe('DOM Utilities', () => {
|
|||
});
|
||||
|
||||
it('should return a cleanup function that removes the listener', () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCallback = vi.fn();
|
||||
const element = document.createElement('div');
|
||||
const localCleanup = onLeftClick(mockCallback, element as unknown as Document);
|
||||
|
||||
|
@ -424,8 +424,8 @@ describe('DOM Utilities', () => {
|
|||
describe('whenReady', () => {
|
||||
it('should call callback immediately if document ready state is not loading', () => {
|
||||
const mockReadyStateValue = getRandomArrayItem<DocumentReadyState>(['complete', 'interactive']);
|
||||
const readyStateSpy = jest.spyOn(document, 'readyState', 'get').mockReturnValue(mockReadyStateValue);
|
||||
const mockCallback = jest.fn();
|
||||
const readyStateSpy = vi.spyOn(document, 'readyState', 'get').mockReturnValue(mockReadyStateValue);
|
||||
const mockCallback = vi.fn();
|
||||
|
||||
try {
|
||||
whenReady(mockCallback);
|
||||
|
@ -437,9 +437,9 @@ describe('DOM Utilities', () => {
|
|||
});
|
||||
|
||||
it('should add event listener with callback if document ready state is loading', () => {
|
||||
const readyStateSpy = jest.spyOn(document, 'readyState', 'get').mockReturnValue('loading');
|
||||
const addEventListenerSpy = jest.spyOn(document, 'addEventListener');
|
||||
const mockCallback = jest.fn();
|
||||
const readyStateSpy = vi.spyOn(document, 'readyState', 'get').mockReturnValue('loading');
|
||||
const addEventListenerSpy = vi.spyOn(document, 'addEventListener');
|
||||
const mockCallback = vi.fn();
|
||||
|
||||
try {
|
||||
whenReady(mockCallback);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { clearDragSource, initDraggables } from '../draggable';
|
||||
import { fireEvent } from '@testing-library/dom';
|
||||
import { getRandomArrayItem } from '../../../test/randomness';
|
||||
import { MockInstance } from 'vitest';
|
||||
|
||||
describe('Draggable Utilities', () => {
|
||||
// jsdom lacks proper support for window.DragEvent so this is an attempt at a minimal recreation
|
||||
|
@ -30,7 +31,7 @@ describe('Draggable Utilities', () => {
|
|||
const draggingClass = 'dragging';
|
||||
const dragContainerClass = 'drag-container';
|
||||
const dragOverClass = 'over';
|
||||
let documentEventListenerSpy: jest.SpyInstance;
|
||||
let documentEventListenerSpy: MockInstance;
|
||||
|
||||
let mockDragContainer: HTMLDivElement;
|
||||
let mockDraggable: HTMLDivElement;
|
||||
|
@ -45,7 +46,7 @@ describe('Draggable Utilities', () => {
|
|||
|
||||
|
||||
// Redirect all document event listeners to this element for easier cleanup
|
||||
documentEventListenerSpy = jest.spyOn(document, 'addEventListener').mockImplementation((...params) => {
|
||||
documentEventListenerSpy = vi.spyOn(document, 'addEventListener').mockImplementation((...params) => {
|
||||
mockDragContainer.addEventListener(...params);
|
||||
});
|
||||
});
|
||||
|
@ -192,7 +193,7 @@ describe('Draggable Utilities', () => {
|
|||
|
||||
const mockDropEvent = createDragEvent('drop');
|
||||
Object.assign(mockDropEvent, { clientX: 124 });
|
||||
const boundingBoxSpy = jest.spyOn(mockDraggable, 'getBoundingClientRect').mockReturnValue({
|
||||
const boundingBoxSpy = vi.spyOn(mockDraggable, 'getBoundingClientRect').mockReturnValue({
|
||||
left: 100,
|
||||
width: 50,
|
||||
} as unknown as DOMRect);
|
||||
|
@ -221,7 +222,7 @@ describe('Draggable Utilities', () => {
|
|||
|
||||
const mockDropEvent = createDragEvent('drop');
|
||||
Object.assign(mockDropEvent, { clientX: 125 });
|
||||
const boundingBoxSpy = jest.spyOn(mockDraggable, 'getBoundingClientRect').mockReturnValue({
|
||||
const boundingBoxSpy = vi.spyOn(mockDraggable, 'getBoundingClientRect').mockReturnValue({
|
||||
left: 100,
|
||||
width: 50,
|
||||
} as unknown as DOMRect);
|
||||
|
@ -291,7 +292,7 @@ describe('Draggable Utilities', () => {
|
|||
initDraggables();
|
||||
|
||||
const mockEvent = createDragEvent('dragstart');
|
||||
const draggableClosestSpy = jest.spyOn(mockDraggable, 'closest').mockReturnValue(null);
|
||||
const draggableClosestSpy = vi.spyOn(mockDraggable, 'closest').mockReturnValue(null);
|
||||
|
||||
try {
|
||||
fireEvent(mockDraggable, mockEvent);
|
||||
|
|
|
@ -8,7 +8,7 @@ describe('Event utils', () => {
|
|||
describe('fire', () => {
|
||||
it('should call the native dispatchEvent method on the element', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
const dispatchEventSpy = jest.spyOn(mockElement, 'dispatchEvent');
|
||||
const dispatchEventSpy = vi.spyOn(mockElement, 'dispatchEvent');
|
||||
const mockDetail = getRandomArrayItem([0, 'test', null]);
|
||||
|
||||
fire(mockElement, mockEvent, mockDetail);
|
||||
|
@ -42,7 +42,7 @@ describe('Event utils', () => {
|
|||
mockButton.classList.add('mock-button');
|
||||
mockInnerElement.appendChild(mockButton);
|
||||
|
||||
const mockHandler = jest.fn();
|
||||
const mockHandler = vi.fn();
|
||||
on(mockElement, 'click', `.${innerClass}`, mockHandler);
|
||||
|
||||
fireEvent(mockButton, new Event('click', { bubbles: true }));
|
||||
|
@ -58,7 +58,7 @@ describe('Event utils', () => {
|
|||
describe('leftClick', () => {
|
||||
it('should fire on left click', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = jest.fn();
|
||||
const mockHandler = vi.fn();
|
||||
|
||||
mockButton.addEventListener('click', e => leftClick(mockHandler)(e, mockButton));
|
||||
|
||||
|
@ -69,7 +69,7 @@ describe('Event utils', () => {
|
|||
|
||||
it('should NOT fire on any other click', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = jest.fn();
|
||||
const mockHandler = vi.fn();
|
||||
const mockButtonNumber = getRandomArrayItem([1, 2, 3, 4, 5]);
|
||||
|
||||
mockButton.addEventListener('click', e => leftClick(mockHandler)(e, mockButton));
|
||||
|
@ -83,7 +83,7 @@ describe('Event utils', () => {
|
|||
describe('delegate', () => {
|
||||
it('should call the native addEventListener method on the element', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
const addEventListenerSpy = jest.spyOn(mockElement, 'addEventListener');
|
||||
const addEventListenerSpy = vi.spyOn(mockElement, 'addEventListener');
|
||||
|
||||
delegate(mockElement, mockEvent, {});
|
||||
|
||||
|
@ -102,7 +102,7 @@ describe('Event utils', () => {
|
|||
const mockButton = document.createElement('button');
|
||||
mockElement.appendChild(mockButton);
|
||||
|
||||
const mockHandler = jest.fn();
|
||||
const mockHandler = vi.fn();
|
||||
delegate(mockElement, 'click', { [`.${parentClass}`]: mockHandler });
|
||||
|
||||
fireEvent(mockButton, new Event('click', { bubbles: true }));
|
||||
|
@ -127,8 +127,8 @@ describe('Event utils', () => {
|
|||
const mockButton = document.createElement('button');
|
||||
mockWrapperElement.appendChild(mockButton);
|
||||
|
||||
const mockParentHandler = jest.fn();
|
||||
const mockWrapperHandler = jest.fn().mockReturnValue(false);
|
||||
const mockParentHandler = vi.fn();
|
||||
const mockWrapperHandler = vi.fn().mockReturnValue(false);
|
||||
delegate(mockElement, 'click', {
|
||||
[`.${wrapperClass}`]: mockWrapperHandler,
|
||||
[`.${parentClass}`]: mockParentHandler,
|
||||
|
|
|
@ -3,6 +3,8 @@ import { getRandomArrayItem } from '../../../test/randomness';
|
|||
import { mockStorage } from '../../../test/mock-storage';
|
||||
import { createEvent, fireEvent } from '@testing-library/dom';
|
||||
import { EventType } from '@testing-library/dom/types/events';
|
||||
import { SpoilerType } from '../../../types/booru-object';
|
||||
import { beforeEach } from 'vitest';
|
||||
|
||||
describe('Image utils', () => {
|
||||
const hiddenClass = 'hidden';
|
||||
|
@ -81,6 +83,10 @@ describe('Image utils', () => {
|
|||
},
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mockServeHidpiValue = null;
|
||||
});
|
||||
|
||||
describe('video thumbnail', () => {
|
||||
type CreateMockElementsOptions = {
|
||||
extension: string;
|
||||
|
@ -108,7 +114,7 @@ describe('Image utils', () => {
|
|||
});
|
||||
}
|
||||
mockElement.appendChild(mockVideo);
|
||||
const playSpy = jest.spyOn(mockVideo, 'play').mockReturnValue(Promise.resolve());
|
||||
const playSpy = vi.spyOn(mockVideo, 'play').mockReturnValue(Promise.resolve());
|
||||
|
||||
const mockSpoilerOverlay = createMockSpoilerOverlay();
|
||||
mockElement.appendChild(mockSpoilerOverlay);
|
||||
|
@ -167,7 +173,7 @@ describe('Image utils', () => {
|
|||
const { mockElement } = createMockElements({
|
||||
extension: 'webm',
|
||||
});
|
||||
const jsonParseSpy = jest.spyOn(JSON, 'parse');
|
||||
const jsonParseSpy = vi.spyOn(JSON, 'parse');
|
||||
|
||||
mockElement.removeAttribute(missingAttributeName);
|
||||
|
||||
|
@ -363,6 +369,19 @@ describe('Image utils', () => {
|
|||
expect(mockShowElement).toHaveClass(spoilerPendingClass);
|
||||
});
|
||||
|
||||
it('should play the video if it is present', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
const { mockShowElement } = createImageShowElement(mockElement);
|
||||
const mockVideo = document.createElement('video');
|
||||
mockShowElement.appendChild(mockVideo);
|
||||
|
||||
const playSpy = vi.spyOn(mockVideo, 'play').mockReturnValue(Promise.resolve());
|
||||
|
||||
showBlock(mockElement);
|
||||
|
||||
expect(playSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should not throw if image-filtered element is missing', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
createImageShowElement(mockElement);
|
||||
|
@ -381,7 +400,7 @@ describe('Image utils', () => {
|
|||
it('should return early if picture AND video elements are missing', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
|
||||
const querySelectorSpy = jest.spyOn(mockElement, 'querySelector');
|
||||
const querySelectorSpy = vi.spyOn(mockElement, 'querySelector');
|
||||
|
||||
hideThumb(mockElement, mockSpoilerUri, mockSpoilerReason);
|
||||
|
||||
|
@ -398,9 +417,9 @@ describe('Image utils', () => {
|
|||
const mockElement = document.createElement('div');
|
||||
const mockVideo = document.createElement('video');
|
||||
mockElement.appendChild(mockVideo);
|
||||
const pauseSpy = jest.spyOn(mockVideo, 'pause').mockReturnValue(undefined);
|
||||
const pauseSpy = vi.spyOn(mockVideo, 'pause').mockReturnValue(undefined);
|
||||
|
||||
const querySelectorSpy = jest.spyOn(mockElement, 'querySelector');
|
||||
const querySelectorSpy = vi.spyOn(mockElement, 'querySelector');
|
||||
|
||||
hideThumb(mockElement, mockSpoilerUri, mockSpoilerReason);
|
||||
|
||||
|
@ -422,7 +441,7 @@ describe('Image utils', () => {
|
|||
const mockElement = document.createElement('div');
|
||||
const mockVideo = document.createElement('video');
|
||||
mockElement.appendChild(mockVideo);
|
||||
const pauseSpy = jest.spyOn(mockVideo, 'pause').mockReturnValue(undefined);
|
||||
const pauseSpy = vi.spyOn(mockVideo, 'pause').mockReturnValue(undefined);
|
||||
const mockImage = document.createElement('img');
|
||||
mockImage.classList.add(hiddenClass);
|
||||
mockElement.appendChild(mockImage);
|
||||
|
@ -451,8 +470,8 @@ describe('Image utils', () => {
|
|||
const mockPicture = document.createElement('picture');
|
||||
mockElement.appendChild(mockPicture);
|
||||
|
||||
const imgQuerySelectorSpy = jest.spyOn(mockElement, 'querySelector');
|
||||
const pictureQuerySelectorSpy = jest.spyOn(mockPicture, 'querySelector');
|
||||
const imgQuerySelectorSpy = vi.spyOn(mockElement, 'querySelector');
|
||||
const pictureQuerySelectorSpy = vi.spyOn(mockPicture, 'querySelector');
|
||||
|
||||
hideThumb(mockElement, mockSpoilerUri, mockSpoilerReason);
|
||||
|
||||
|
@ -492,7 +511,7 @@ describe('Image utils', () => {
|
|||
describe('spoilerThumb', () => {
|
||||
const testSpoilerThumb = (handlers?: [EventType, EventType]) => {
|
||||
const { mockElement, mockSpoilerOverlay, mockSizeImage } = createMockElementWithPicture('jpg');
|
||||
const addEventListenerSpy = jest.spyOn(mockElement, 'addEventListener');
|
||||
const addEventListenerSpy = vi.spyOn(mockElement, 'addEventListener');
|
||||
|
||||
spoilerThumb(mockElement, mockSpoilerUri, mockSpoilerReason);
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { fetchHtml, fetchJson, handleError } from '../requests';
|
||||
import fetchMock from 'jest-fetch-mock';
|
||||
import { fetchMock } from '../../../test/fetch-mock.ts';
|
||||
|
||||
describe('Request utils', () => {
|
||||
const mockEndpoint = '/endpoint';
|
||||
|
|
|
@ -117,11 +117,11 @@ describe('Store utilities', () => {
|
|||
it('should attach a storage event listener and fire when the provide key changes', () => {
|
||||
const mockKey = `mock-watch-key-${getRandomIntBetween(1, 10)}`;
|
||||
const mockValue = Math.random();
|
||||
const mockCallback = jest.fn();
|
||||
const mockCallback = vi.fn();
|
||||
setStorageValue({
|
||||
[mockKey]: JSON.stringify(mockValue),
|
||||
});
|
||||
const addEventListenerSpy = jest.spyOn(window, 'addEventListener');
|
||||
const addEventListenerSpy = vi.spyOn(window, 'addEventListener');
|
||||
|
||||
const cleanup = store.watch(mockKey, mockCallback);
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ import { displayTags, getHiddenTags, getSpoileredTags, imageHitsComplex, imageHi
|
|||
import { mockStorage } from '../../../test/mock-storage';
|
||||
import { getRandomArrayItem } from '../../../test/randomness';
|
||||
import parseSearch from '../../match_query';
|
||||
import { SpoilerType } from '../../../types/booru-object';
|
||||
|
||||
describe('Tag utilities', () => {
|
||||
const tagStorageKeyPrefix = 'bor_tags_';
|
||||
|
|
28
assets/js/utils/assert.ts
Normal file
28
assets/js/utils/assert.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
export function assertNotNull<T>(value: T | null): T {
|
||||
if (value === null) {
|
||||
throw new Error('Expected non-null value');
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function assertNotUndefined<T>(value: T | undefined): T {
|
||||
// eslint-disable-next-line no-undefined
|
||||
if (value === undefined) {
|
||||
throw new Error('Expected non-undefined value');
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
type Constructor<T> = { new (...args: any[]): T };
|
||||
|
||||
export function assertType<T>(value: any, c: Constructor<T>): T {
|
||||
if (value instanceof c) {
|
||||
return value;
|
||||
}
|
||||
|
||||
throw new Error('Expected value of type');
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
@ -57,8 +57,8 @@ export function makeEl<Tag extends keyof HTMLElementTagNameMap>(tag: Tag, attr?:
|
|||
if (attr) {
|
||||
for (const prop in attr) {
|
||||
const newValue = attr[prop];
|
||||
if (typeof newValue !== 'undefined') {
|
||||
el[prop] = newValue as Exclude<typeof newValue, undefined>;
|
||||
if (newValue) {
|
||||
el[prop] = newValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,9 +64,15 @@ export function showThumb(img: HTMLDivElement) {
|
|||
export function showBlock(img: HTMLDivElement) {
|
||||
img.querySelector('.image-filtered')?.classList.add('hidden');
|
||||
const imageShowClasses = img.querySelector('.image-show')?.classList;
|
||||
|
||||
if (imageShowClasses) {
|
||||
imageShowClasses.remove('hidden');
|
||||
imageShowClasses.add('spoiler-pending');
|
||||
|
||||
const vidEl = img.querySelector('video');
|
||||
if (vidEl) {
|
||||
vidEl.play();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,8 +63,7 @@ export class LocalAutocompleter {
|
|||
const nameLength = this.view.getUint8(location);
|
||||
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
||||
|
||||
/** @type {number[]} */
|
||||
const associations = [];
|
||||
const associations: number[] = [];
|
||||
const name = this.decoder.decode(this.data.slice(location + 1, location + nameLength + 1));
|
||||
|
||||
for (let i = 0; i < assnLength; i++) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { escapeHtml } from './dom';
|
||||
import { getTag } from '../booru';
|
||||
import { AstMatcher } from '../query/types';
|
||||
|
||||
export interface TagData {
|
||||
id: number;
|
||||
|
@ -42,7 +43,7 @@ export function getSpoileredTags() {
|
|||
.sort(sortTags.bind(null, false));
|
||||
}
|
||||
|
||||
export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagData[] {
|
||||
export function imageHitsTags(img: HTMLElement, matchTags: TagData[]): TagData[] {
|
||||
const imageTagsString = img.dataset.imageTags;
|
||||
if (typeof imageTagsString === 'undefined') {
|
||||
return [];
|
||||
|
@ -51,8 +52,8 @@ export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagD
|
|||
return matchTags.filter(t => imageTags.indexOf(t.id) !== -1);
|
||||
}
|
||||
|
||||
export function imageHitsComplex(img: HTMLImageElement, matchComplex: { hitsImage: (img: HTMLImageElement) => boolean }) {
|
||||
return matchComplex.hitsImage(img);
|
||||
export function imageHitsComplex(img: HTMLElement, matchComplex: AstMatcher) {
|
||||
return matchComplex(img);
|
||||
}
|
||||
|
||||
export function displayTags(tags: TagData[]): string {
|
||||
|
|
31
assets/js/vendor/closest.polyfill.js
vendored
31
assets/js/vendor/closest.polyfill.js
vendored
|
@ -1,31 +0,0 @@
|
|||
// element-closest | CC0-1.0 | github.com/jonathantneal/closest
|
||||
|
||||
if (typeof Element.prototype.matches !== 'function') {
|
||||
Element.prototype.matches = Element.prototype.msMatchesSelector || Element.prototype.mozMatchesSelector || Element.prototype.webkitMatchesSelector || function matches(selector) {
|
||||
var element = this;
|
||||
var elements = (element.document || element.ownerDocument).querySelectorAll(selector);
|
||||
var index = 0;
|
||||
|
||||
while (elements[index] && elements[index] !== element) {
|
||||
++index;
|
||||
}
|
||||
|
||||
return Boolean(elements[index]);
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof Element.prototype.closest !== 'function') {
|
||||
Element.prototype.closest = function closest(selector) {
|
||||
var element = this;
|
||||
|
||||
while (element && element.nodeType === 1) {
|
||||
if (element.matches(selector)) {
|
||||
return element;
|
||||
}
|
||||
|
||||
element = element.parentNode;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
}
|
17
assets/js/vendor/customevent.polyfill.js
vendored
17
assets/js/vendor/customevent.polyfill.js
vendored
|
@ -1,17 +0,0 @@
|
|||
// https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent/CustomEvent
|
||||
|
||||
(function () {
|
||||
|
||||
if ( typeof window.CustomEvent === "function" ) return false;
|
||||
|
||||
function CustomEvent ( event, params ) {
|
||||
params = params || { bubbles: false, cancelable: false, detail: undefined };
|
||||
var evt = document.createEvent( 'CustomEvent' );
|
||||
evt.initCustomEvent( event, params.bubbles, params.cancelable, params.detail );
|
||||
return evt;
|
||||
}
|
||||
|
||||
CustomEvent.prototype = window.Event.prototype;
|
||||
|
||||
window.CustomEvent = CustomEvent;
|
||||
})();
|
104
assets/js/vendor/es6.polyfill.js
vendored
104
assets/js/vendor/es6.polyfill.js
vendored
|
@ -1,104 +0,0 @@
|
|||
/**
|
||||
* ES6 methods polyfill
|
||||
* Sourced from their respective articles on MDN
|
||||
*/
|
||||
|
||||
if (!Array.prototype.find) {
|
||||
Array.prototype.find = function(predicate) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw new TypeError('Array.prototype.find called on null or undefined');
|
||||
}
|
||||
if (typeof predicate !== 'function') {
|
||||
throw new TypeError('predicate must be a function');
|
||||
}
|
||||
var list = Object(this);
|
||||
var length = list.length >>> 0;
|
||||
var thisArg = arguments[1];
|
||||
var value;
|
||||
|
||||
for (var i = 0; i < length; i++) {
|
||||
value = list[i];
|
||||
if (predicate.call(thisArg, value, i, list)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
if (!Array.prototype.findIndex) {
|
||||
Array.prototype.findIndex = function(predicate) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw new TypeError('Array.prototype.findIndex called on null or undefined');
|
||||
}
|
||||
if (typeof predicate !== 'function') {
|
||||
throw new TypeError('predicate must be a function');
|
||||
}
|
||||
var list = Object(this);
|
||||
var length = list.length >>> 0;
|
||||
var thisArg = arguments[1];
|
||||
var value;
|
||||
|
||||
for (var i = 0; i < length; i++) {
|
||||
value = list[i];
|
||||
if (predicate.call(thisArg, value, i, list)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
}
|
||||
|
||||
if (!Array.prototype.includes) {
|
||||
Array.prototype.includes = function(searchElement /*, fromIndex*/) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw new TypeError('Array.prototype.includes called on null or undefined');
|
||||
}
|
||||
|
||||
var O = Object(this);
|
||||
var len = parseInt(O.length, 10) || 0;
|
||||
if (len === 0) {
|
||||
return false;
|
||||
}
|
||||
var n = parseInt(arguments[1], 10) || 0;
|
||||
var k;
|
||||
if (n >= 0) {
|
||||
k = n;
|
||||
} else {
|
||||
k = len + n;
|
||||
if (k < 0) {k = 0;}
|
||||
}
|
||||
var currentElement;
|
||||
while (k < len) {
|
||||
currentElement = O[k];
|
||||
if (searchElement === currentElement ||
|
||||
(searchElement !== searchElement && currentElement !== currentElement)) { // NaN !== NaN
|
||||
return true;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.startsWith) {
|
||||
String.prototype.startsWith = function(searchString, position){
|
||||
position = position || 0;
|
||||
return this.substr(position, searchString.length) === searchString;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function(searchString, position) {
|
||||
var subjectString = this.toString();
|
||||
if (typeof position !== 'number' || !isFinite(position) || Math.floor(position) !== position || position > subjectString.length) {
|
||||
position = subjectString.length;
|
||||
}
|
||||
position -= searchString.length;
|
||||
var lastIndex = subjectString.lastIndexOf(searchString, position);
|
||||
return lastIndex !== -1 && lastIndex === position;
|
||||
};
|
||||
}
|
53
assets/js/vendor/fetch.polyfill.js
vendored
53
assets/js/vendor/fetch.polyfill.js
vendored
|
@ -1,53 +0,0 @@
|
|||
if (typeof window.fetch !== 'function') {
|
||||
window.fetch = function fetch(url, options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let request = new XMLHttpRequest();
|
||||
|
||||
options = options || {};
|
||||
request.open(options.method || 'GET', url);
|
||||
|
||||
for (const i in options.headers) {
|
||||
request.setRequestHeader(i, options.headers[i]);
|
||||
}
|
||||
|
||||
request.withCredentials = options.credentials === 'include' || options.credentials === 'same-origin';
|
||||
request.onload = () => resolve(response());
|
||||
request.onerror = reject;
|
||||
|
||||
// IE11 hack: don't send null/undefined
|
||||
if (options.body != null)
|
||||
request.send(options.body);
|
||||
else
|
||||
request.send();
|
||||
|
||||
function response() {
|
||||
const keys = [], all = [], headers = {};
|
||||
let header;
|
||||
|
||||
request.getAllResponseHeaders().replace(/^(.*?):\s*([\s\S]*?)$/gm, (m, key, value) => {
|
||||
keys.push(key = key.toLowerCase());
|
||||
all.push([key, value]);
|
||||
header = headers[key];
|
||||
headers[key] = header ? `${header},${value}` : value;
|
||||
});
|
||||
|
||||
return {
|
||||
ok: (request.status/200|0) === 1,
|
||||
status: request.status,
|
||||
statusText: request.statusText,
|
||||
url: request.responseURL,
|
||||
clone: response,
|
||||
text: () => Promise.resolve(request.responseText),
|
||||
json: () => Promise.resolve(request.responseText).then(JSON.parse),
|
||||
blob: () => Promise.resolve(new Blob([request.response])),
|
||||
headers: {
|
||||
keys: () => keys,
|
||||
entries: () => all,
|
||||
get: n => headers[n.toLowerCase()],
|
||||
has: n => n.toLowerCase() in headers
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
225
assets/js/vendor/promise.polyfill.js
vendored
225
assets/js/vendor/promise.polyfill.js
vendored
|
@ -1,225 +0,0 @@
|
|||
(function (root) {
|
||||
if (root.Promise) return;
|
||||
|
||||
// Store setTimeout reference so promise-polyfill will be unaffected by
|
||||
// other code modifying setTimeout (like sinon.useFakeTimers())
|
||||
var setTimeoutFunc = setTimeout;
|
||||
|
||||
function noop() {
|
||||
}
|
||||
|
||||
// Use polyfill for setImmediate for performance gains
|
||||
function asap(fn) {
|
||||
setTimeoutFunc(fn, 1);
|
||||
}
|
||||
|
||||
var onUnhandledRejection = function onUnhandledRejection(err) {
|
||||
console.warn('Possible Unhandled Promise Rejection:', err); // eslint-disable-line no-console
|
||||
};
|
||||
|
||||
// Polyfill for Function.prototype.bind
|
||||
function bind(fn, thisArg) {
|
||||
return function () {
|
||||
fn.apply(thisArg, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
var isArray = Array.isArray || function (value) {
|
||||
return Object.prototype.toString.call(value) === '[object Array]';
|
||||
};
|
||||
|
||||
function Promise(fn) {
|
||||
if (typeof this !== 'object') throw new TypeError('Promises must be constructed via new');
|
||||
if (typeof fn !== 'function') throw new TypeError('not a function');
|
||||
this._state = 0;
|
||||
this._handled = false;
|
||||
this._value = undefined;
|
||||
this._deferreds = [];
|
||||
|
||||
doResolve(fn, this);
|
||||
}
|
||||
|
||||
function handle(self, deferred) {
|
||||
while (self._state === 3) {
|
||||
self = self._value;
|
||||
}
|
||||
if (self._state === 0) {
|
||||
self._deferreds.push(deferred);
|
||||
return;
|
||||
}
|
||||
self._handled = true;
|
||||
asap(function () {
|
||||
var cb = self._state === 1 ? deferred.onFulfilled : deferred.onRejected;
|
||||
if (cb === null) {
|
||||
(self._state === 1 ? resolve : reject)(deferred.promise, self._value);
|
||||
return;
|
||||
}
|
||||
var ret;
|
||||
try {
|
||||
ret = cb(self._value);
|
||||
} catch (e) {
|
||||
reject(deferred.promise, e);
|
||||
return;
|
||||
}
|
||||
resolve(deferred.promise, ret);
|
||||
});
|
||||
}
|
||||
|
||||
function resolve(self, newValue) {
|
||||
try {
|
||||
// Promise Resolution Procedure: https://github.com/promises-aplus/promises-spec#the-promise-resolution-procedure
|
||||
if (newValue === self) throw new TypeError('A promise cannot be resolved with itself.');
|
||||
if (newValue && (typeof newValue === 'object' || typeof newValue === 'function')) {
|
||||
var then = newValue.then;
|
||||
if (newValue instanceof Promise) {
|
||||
self._state = 3;
|
||||
self._value = newValue;
|
||||
finale(self);
|
||||
return;
|
||||
} else if (typeof then === 'function') {
|
||||
doResolve(bind(then, newValue), self);
|
||||
return;
|
||||
}
|
||||
}
|
||||
self._state = 1;
|
||||
self._value = newValue;
|
||||
finale(self);
|
||||
} catch (e) {
|
||||
reject(self, e);
|
||||
}
|
||||
}
|
||||
|
||||
function reject(self, newValue) {
|
||||
self._state = 2;
|
||||
self._value = newValue;
|
||||
finale(self);
|
||||
}
|
||||
|
||||
function finale(self) {
|
||||
if (self._state === 2 && self._deferreds.length === 0) {
|
||||
setTimeout(function() {
|
||||
if (!self._handled) {
|
||||
onUnhandledRejection(self._value);
|
||||
}
|
||||
}, 1);
|
||||
}
|
||||
|
||||
for (var i = 0, len = self._deferreds.length; i < len; i++) {
|
||||
handle(self, self._deferreds[i]);
|
||||
}
|
||||
self._deferreds = null;
|
||||
}
|
||||
|
||||
function Handler(onFulfilled, onRejected, promise) {
|
||||
this.onFulfilled = typeof onFulfilled === 'function' ? onFulfilled : null;
|
||||
this.onRejected = typeof onRejected === 'function' ? onRejected : null;
|
||||
this.promise = promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a potentially misbehaving resolver function and make sure
|
||||
* onFulfilled and onRejected are only called once.
|
||||
*
|
||||
* Makes no guarantees about asynchrony.
|
||||
*/
|
||||
function doResolve(fn, self) {
|
||||
var done = false;
|
||||
try {
|
||||
fn(function (value) {
|
||||
if (done) return;
|
||||
done = true;
|
||||
resolve(self, value);
|
||||
}, function (reason) {
|
||||
if (done) return;
|
||||
done = true;
|
||||
reject(self, reason);
|
||||
});
|
||||
} catch (ex) {
|
||||
if (done) return;
|
||||
done = true;
|
||||
reject(self, ex);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.prototype['catch'] = function (onRejected) {
|
||||
return this.then(null, onRejected);
|
||||
};
|
||||
|
||||
Promise.prototype.then = function (onFulfilled, onRejected) {
|
||||
var prom = new Promise(noop);
|
||||
handle(this, new Handler(onFulfilled, onRejected, prom));
|
||||
return prom;
|
||||
};
|
||||
|
||||
Promise.all = function () {
|
||||
var args = Array.prototype.slice.call(arguments.length === 1 && isArray(arguments[0]) ? arguments[0] : arguments);
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (args.length === 0) return resolve([]);
|
||||
var remaining = args.length;
|
||||
|
||||
function res(i, val) {
|
||||
try {
|
||||
if (val && (typeof val === 'object' || typeof val === 'function')) {
|
||||
var then = val.then;
|
||||
if (typeof then === 'function') {
|
||||
then.call(val, function (val) {
|
||||
res(i, val);
|
||||
}, reject);
|
||||
return;
|
||||
}
|
||||
}
|
||||
args[i] = val;
|
||||
if (--remaining === 0) {
|
||||
resolve(args);
|
||||
}
|
||||
} catch (ex) {
|
||||
reject(ex);
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < args.length; i++) {
|
||||
res(i, args[i]);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Promise.resolve = function (value) {
|
||||
if (value && typeof value === 'object' && value.constructor === Promise) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return new Promise(function (resolve) {
|
||||
resolve(value);
|
||||
});
|
||||
};
|
||||
|
||||
Promise.reject = function (value) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
reject(value);
|
||||
});
|
||||
};
|
||||
|
||||
Promise.race = function (values) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
for (var i = 0, len = values.length; i < len; i++) {
|
||||
values[i].then(resolve, reject);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the immediate function to execute callbacks
|
||||
* @param fn {function} Function to execute
|
||||
* @private
|
||||
*/
|
||||
Promise._setImmediateFn = function _setImmediateFn(fn) {
|
||||
asap = fn;
|
||||
};
|
||||
|
||||
Promise._setUnhandledRejectionFn = function _setUnhandledRejectionFn(fn) {
|
||||
onUnhandledRejection = fn;
|
||||
};
|
||||
|
||||
root.Promise = Promise;
|
||||
})(window);
|
17
assets/js/vendor/values-entries.polyfill.js
vendored
17
assets/js/vendor/values-entries.polyfill.js
vendored
|
@ -1,17 +0,0 @@
|
|||
// object-values | MIT | github.com/tc39/proposal-object-values-entries
|
||||
|
||||
const reduce = Function.bind.call(Function.call, Array.prototype.reduce);
|
||||
const isEnumerable = Function.bind.call(Function.call, Object.prototype.propertyIsEnumerable);
|
||||
const concat = Function.bind.call(Function.call, Array.prototype.concat);
|
||||
|
||||
if (!Object.values) {
|
||||
Object.values = function values(O) {
|
||||
return reduce(Object.keys(O), (v, k) => concat(v, typeof k === 'string' && isEnumerable(O, k) ? [O[k]] : []), []);
|
||||
};
|
||||
}
|
||||
|
||||
if (!Object.entries) {
|
||||
Object.entries = function entries(O) {
|
||||
return reduce(Object.keys(O), (e, k) => concat(e, typeof k === 'string' && isEnumerable(O, k) ? [[k, O[k]]] : []), []);
|
||||
};
|
||||
}
|
9019
assets/package-lock.json
generated
9019
assets/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,60 +1,37 @@
|
|||
{
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"deploy": "cross-env NODE_ENV=production webpack",
|
||||
"deploy": "cross-env NODE_ENV=production tsc && cross-env NODE_ENV=production vite build",
|
||||
"lint": "eslint . --ext .js,.ts",
|
||||
"test": "jest --ci",
|
||||
"test:watch": "jest --watch",
|
||||
"watch": "webpack --watch"
|
||||
"test": "vitest run --coverage",
|
||||
"test:watch": "vitest watch --coverage",
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-free": "^6.3.0",
|
||||
"@rollup/plugin-multi-entry": "^6.0.0",
|
||||
"@rollup/plugin-typescript": "^11.0.0",
|
||||
"@rollup/plugin-virtual": "^3.0.1",
|
||||
"@types/web": "^0.0.91",
|
||||
"@typescript-eslint/eslint-plugin": "^5.52.0",
|
||||
"@typescript-eslint/parser": "^5.52.0",
|
||||
"acorn": "^8.8.2",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"brunch": "^4.0.2",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"copycat-brunch": "^1.1.1",
|
||||
"@fortawesome/fontawesome-free": "^6.5.2",
|
||||
"@types/web": "^0.0.143",
|
||||
"@typescript-eslint/eslint-plugin": "^7.8.0",
|
||||
"@typescript-eslint/parser": "^7.8.0",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"cross-env": "^7.0.3",
|
||||
"css-loader": "^6.7.3",
|
||||
"css-minimizer-webpack-plugin": "^5.0.0",
|
||||
"eslint": "^8.34.0",
|
||||
"eslint-webpack-plugin": "^4.0.0",
|
||||
"file-loader": "^6.2.0",
|
||||
"ignore-emit-webpack-plugin": "^2.0.6",
|
||||
"jest-environment-jsdom": "^29.4.3",
|
||||
"mini-css-extract-plugin": "^2.7.2",
|
||||
"normalize-scss": "^7.0.1",
|
||||
"postcss": "^8.4.21",
|
||||
"postcss-loader": "^7.2.4",
|
||||
"postcss-scss": "^4.0.6",
|
||||
"postcss-url": "^10.1.3",
|
||||
"rollup": "^2.57.0",
|
||||
"rollup-plugin-includepaths": "^0.2.4",
|
||||
"sass": "^1.58.3",
|
||||
"sass-loader": "^13.2.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
"style-loader": "^3.3.1",
|
||||
"terser-webpack-plugin": "^5.3.6",
|
||||
"tslib": "^2.5.0",
|
||||
"typescript": "^4.9",
|
||||
"webpack": "^5.76.0",
|
||||
"webpack-cli": "^5.0.1",
|
||||
"webpack-rollup-loader": "^0.8.1"
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"normalize-scss": "^8.0.0",
|
||||
"sass": "^1.75.0",
|
||||
"typescript": "^5.4",
|
||||
"vite": "^5.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/dom": "^9.0.0",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@types/jest": "^29.4.0",
|
||||
"eslint-plugin-jest": "^27.2.1",
|
||||
"eslint-plugin-jest-dom": "^4.0.3",
|
||||
"jest": "^29.4.3",
|
||||
"jest-fetch-mock": "^3.0.3",
|
||||
"ts-jest": "^29.1.0"
|
||||
"@testing-library/dom": "^10.1.0",
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
"@types/chai-dom": "^1.11.3",
|
||||
"@vitest/coverage-v8": "^1.5.3",
|
||||
"chai": "^5",
|
||||
"eslint-plugin-vitest": "^0.5.4",
|
||||
"jsdom": "^24.0.0",
|
||||
"vitest": "^1.5.3",
|
||||
"vitest-fetch-mock": "^0.2.2"
|
||||
}
|
||||
}
|
||||
|
|
4
assets/test/fetch-mock.ts
Normal file
4
assets/test/fetch-mock.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import createFetchMock from 'vitest-fetch-mock';
|
||||
import { vi } from 'vitest';
|
||||
|
||||
export const fetchMock = createFetchMock(vi);
|
26
assets/test/fix-event-listeners.ts
Normal file
26
assets/test/fix-event-listeners.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
// Add helper to fix event listeners on a given target
|
||||
|
||||
export function fixEventListeners(t: EventTarget) {
|
||||
let eventListeners: Record<string, unknown[]>;
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
beforeAll(() => {
|
||||
eventListeners = {};
|
||||
const oldAddEventListener = t.addEventListener;
|
||||
|
||||
t.addEventListener = function(type: string, listener: any, options: any): void {
|
||||
eventListeners[type] = eventListeners[type] || [];
|
||||
eventListeners[type].push(listener);
|
||||
return oldAddEventListener(type, listener, options);
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
for (const key in eventListeners) {
|
||||
for (const listener of eventListeners[key]) {
|
||||
(t.removeEventListener as any)(key, listener);
|
||||
}
|
||||
}
|
||||
eventListeners = {};
|
||||
});
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
import '@testing-library/jest-dom';
|
||||
|
||||
const blankFilter = {
|
||||
leftOperand: null,
|
||||
negate: false,
|
||||
op: null,
|
||||
rightOperand: null,
|
||||
};
|
||||
|
||||
window.booru = {
|
||||
csrfToken: 'mockCsrfToken',
|
||||
hiddenTag: '/mock-tagblocked.svg',
|
||||
hiddenTagList: [],
|
||||
ignoredTagList: [],
|
||||
imagesWithDownvotingDisabled: [],
|
||||
spoilerType: 'off',
|
||||
spoileredTagList: [],
|
||||
userCanEditFilter: false,
|
||||
userIsSignedIn: false,
|
||||
watchedTagList: [],
|
||||
hiddenFilter: blankFilter,
|
||||
spoileredFilter: blankFilter,
|
||||
tagsVersion: 5
|
||||
};
|
|
@ -1,9 +1,9 @@
|
|||
export function mockDateNow(initialDateNow: number): void {
|
||||
beforeAll(() => {
|
||||
jest.useFakeTimers().setSystemTime(initialDateNow);
|
||||
vi.useFakeTimers().setSystemTime(initialDateNow);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.useRealTimers();
|
||||
vi.useRealTimers();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import { MockInstance } from 'vitest';
|
||||
|
||||
type MockStorageKeys = 'getItem' | 'setItem' | 'removeItem';
|
||||
|
||||
export function mockStorage<Keys extends MockStorageKeys>(options: Pick<Storage, Keys>): { [k in `${Keys}Spy`]: jest.SpyInstance } {
|
||||
const getItemSpy = 'getItem' in options ? jest.spyOn(Storage.prototype, 'getItem') : undefined;
|
||||
const setItemSpy = 'setItem' in options ? jest.spyOn(Storage.prototype, 'setItem') : undefined;
|
||||
const removeItemSpy = 'removeItem' in options ? jest.spyOn(Storage.prototype, 'removeItem') : undefined;
|
||||
export function mockStorage<Keys extends MockStorageKeys>(options: Pick<Storage, Keys>): { [k in `${Keys}Spy`]: MockInstance } {
|
||||
const getItemSpy = 'getItem' in options ? vi.spyOn(Storage.prototype, 'getItem') : undefined;
|
||||
const setItemSpy = 'setItem' in options ? vi.spyOn(Storage.prototype, 'setItem') : undefined;
|
||||
const removeItemSpy = 'removeItem' in options ? vi.spyOn(Storage.prototype, 'removeItem') : undefined;
|
||||
|
||||
beforeAll(() => {
|
||||
getItemSpy && getItemSpy.mockImplementation((options as Storage).getItem);
|
||||
|
@ -26,7 +28,7 @@ export function mockStorage<Keys extends MockStorageKeys>(options: Pick<Storage,
|
|||
return { getItemSpy, setItemSpy, removeItemSpy } as ReturnType<typeof mockStorage>;
|
||||
}
|
||||
|
||||
type MockStorageImplApi = { [k in `${MockStorageKeys}Spy`]: jest.SpyInstance } & {
|
||||
type MockStorageImplApi = { [k in `${MockStorageKeys}Spy`]: MockInstance } & {
|
||||
/**
|
||||
* Forces the mock storage back to its default (empty) state
|
||||
* @param value
|
||||
|
|
35
assets/test/vitest-setup.ts
Normal file
35
assets/test/vitest-setup.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
import { matchNone } from '../js/query/boolean';
|
||||
import '@testing-library/jest-dom/vitest';
|
||||
import { URL } from 'node:url';
|
||||
import { Blob } from 'node:buffer';
|
||||
import { fireEvent } from '@testing-library/dom';
|
||||
|
||||
window.booru = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
timeAgo: () => {},
|
||||
csrfToken: 'mockCsrfToken',
|
||||
hiddenTag: '/mock-tagblocked.svg',
|
||||
hiddenTagList: [],
|
||||
ignoredTagList: [],
|
||||
imagesWithDownvotingDisabled: [],
|
||||
spoilerType: 'off',
|
||||
spoileredTagList: [],
|
||||
userCanEditFilter: false,
|
||||
userIsSignedIn: false,
|
||||
watchedTagList: [],
|
||||
hiddenFilter: matchNone(),
|
||||
spoileredFilter: matchNone(),
|
||||
interactions: [],
|
||||
tagsVersion: 5
|
||||
};
|
||||
|
||||
// https://github.com/jsdom/jsdom/issues/1721#issuecomment-1484202038
|
||||
// jsdom URL and Blob are missing most of the implementation
|
||||
// Use the node version of these types instead
|
||||
Object.assign(globalThis, { URL, Blob });
|
||||
|
||||
// Prevents an error when calling `form.submit()` directly in
|
||||
// the code that is being tested
|
||||
HTMLFormElement.prototype.submit = function() {
|
||||
fireEvent.submit(this);
|
||||
};
|
|
@ -1,16 +1,22 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"noEmit": true,
|
||||
"baseUrl": "./js",
|
||||
"target": "ES2018",
|
||||
"target": "ES2016",
|
||||
"useDefineForClassFields": true,
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "Node",
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"lib": [
|
||||
"ES2018",
|
||||
"DOM"
|
||||
"ES2016",
|
||||
"DOM",
|
||||
"DOM.Iterable"
|
||||
],
|
||||
"strict": true
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"types": ["vitest/globals"]
|
||||
}
|
||||
}
|
||||
|
|
41
assets/types/booru-object.d.ts
vendored
41
assets/types/booru-object.d.ts
vendored
|
@ -1,6 +1,25 @@
|
|||
import { AstMatcher } from 'query/types';
|
||||
|
||||
type SpoilerType = 'click' | 'hover' | 'static' | 'off';
|
||||
|
||||
type InteractionType = 'voted' | 'faved' | 'hidden';
|
||||
type InteractionValue = 'up' | 'down' | null;
|
||||
|
||||
interface Interaction {
|
||||
image_id: number;
|
||||
user_id: number;
|
||||
interaction_type: InteractionType;
|
||||
value: 'up' | 'down' | null;
|
||||
}
|
||||
|
||||
interface BooruObject {
|
||||
/**
|
||||
* Automatic timestamp recalculation function for userscript use
|
||||
*/
|
||||
timeAgo: (args: HTMLTimeElement[]) => void;
|
||||
/**
|
||||
* Anti-forgery token sent by the server
|
||||
*/
|
||||
csrfToken: string;
|
||||
/**
|
||||
* One of the specified values, based on user setting
|
||||
|
@ -36,24 +55,20 @@ interface BooruObject {
|
|||
*/
|
||||
userCanEditFilter: boolean;
|
||||
/**
|
||||
* SearchAST instance for hidden tags, converted from raw AST data in {@see import('../js/booru.js')}
|
||||
* AST matcher instance for filter hidden query
|
||||
*
|
||||
* TODO Properly type after TypeScript migration
|
||||
*
|
||||
* @type {import('../js/match_query.js').SearchAST}
|
||||
*/
|
||||
hiddenFilter: unknown;
|
||||
hiddenFilter: AstMatcher;
|
||||
/**
|
||||
* SearchAST instance for spoilered tags, converted from raw AST data in {@see import('../js/booru.js')}
|
||||
*
|
||||
* TODO Properly type after TypeScript migration
|
||||
*
|
||||
* @type {import('../js/match_query.js').SearchAST}
|
||||
* AST matcher instance for filter spoilered query
|
||||
*/
|
||||
spoileredFilter: unknown;
|
||||
spoileredFilter: AstMatcher;
|
||||
tagsVersion: number;
|
||||
interactions: Interaction[];
|
||||
}
|
||||
|
||||
interface Window {
|
||||
booru: BooruObject;
|
||||
declare global {
|
||||
interface Window {
|
||||
booru: BooruObject;
|
||||
}
|
||||
}
|
||||
|
|
86
assets/vite.config.ts
Normal file
86
assets/vite.config.ts
Normal file
|
@ -0,0 +1,86 @@
|
|||
/// <reference types="vitest" />
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import autoprefixer from 'autoprefixer';
|
||||
import { defineConfig, UserConfig, ConfigEnv } from 'vite';
|
||||
|
||||
export default defineConfig(({ command, mode }: ConfigEnv): UserConfig => {
|
||||
const isDev = command !== 'build' && mode !== 'test';
|
||||
|
||||
const themeNames =
|
||||
fs.readdirSync(path.resolve(__dirname, 'css/themes/')).map(name => {
|
||||
const m = name.match(/([-a-z]+).scss/);
|
||||
|
||||
if (m) { return m[1]; }
|
||||
return null;
|
||||
});
|
||||
|
||||
const themes = new Map();
|
||||
|
||||
for (const name of themeNames) {
|
||||
themes.set(`css/${name}`, `./css/themes/${name}.scss`);
|
||||
}
|
||||
|
||||
return {
|
||||
publicDir: 'static',
|
||||
plugins: [],
|
||||
resolve: {
|
||||
alias: {
|
||||
common: path.resolve(__dirname, 'css/common/'),
|
||||
views: path.resolve(__dirname, 'css/views/')
|
||||
}
|
||||
},
|
||||
build: {
|
||||
target: ['es2016', 'chrome67', 'firefox62', 'edge18', 'safari12'],
|
||||
outDir: path.resolve(__dirname, '../priv/static'),
|
||||
emptyOutDir: false,
|
||||
sourcemap: isDev,
|
||||
manifest: false,
|
||||
cssCodeSplit: true,
|
||||
rollupOptions: {
|
||||
input: {
|
||||
'js/app': './js/app.js',
|
||||
...Object.fromEntries(themes)
|
||||
},
|
||||
output: {
|
||||
entryFileNames: '[name].js',
|
||||
chunkFileNames: '[name].js',
|
||||
assetFileNames: '[name][extname]'
|
||||
}
|
||||
}
|
||||
},
|
||||
css: {
|
||||
postcss: {
|
||||
plugins: [autoprefixer]
|
||||
}
|
||||
},
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'jsdom',
|
||||
// TODO Jest --randomize CLI flag equivalent, consider enabling in the future
|
||||
// sequence: { shuffle: true },
|
||||
setupFiles: './test/vitest-setup.ts',
|
||||
coverage: {
|
||||
reporter: ['text', 'html'],
|
||||
include: ['js/**/*.{js,ts}'],
|
||||
exclude: [
|
||||
'node_modules/',
|
||||
'.*\\.test\\.ts$',
|
||||
'.*\\.d\\.ts$',
|
||||
],
|
||||
thresholds: {
|
||||
statements: 0,
|
||||
branches: 0,
|
||||
functions: 0,
|
||||
lines: 0,
|
||||
'**/utils/**/*.ts': {
|
||||
statements: 100,
|
||||
branches: 100,
|
||||
functions: 100,
|
||||
lines: 100,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
|
@ -1,156 +0,0 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import TerserPlugin from 'terser-webpack-plugin';
|
||||
import CssMinimizerPlugin from 'css-minimizer-webpack-plugin';
|
||||
import CopyPlugin from 'copy-webpack-plugin';
|
||||
import MiniCssExtractPlugin from "mini-css-extract-plugin";
|
||||
import IgnoreEmitPlugin from 'ignore-emit-webpack-plugin';
|
||||
import ESLintPlugin from 'eslint-webpack-plugin';
|
||||
import autoprefixer from 'autoprefixer';
|
||||
import rollupPluginIncludepaths from 'rollup-plugin-includepaths';
|
||||
import rollupPluginMultiEntry from '@rollup/plugin-multi-entry';
|
||||
import rollupPluginTypescript from '@rollup/plugin-typescript';
|
||||
|
||||
const isDevelopment = process.env.NODE_ENV !== 'production';
|
||||
const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
|
||||
|
||||
const includePaths = rollupPluginIncludepaths();
|
||||
const multiEntry = rollupPluginMultiEntry();
|
||||
const typescript = rollupPluginTypescript();
|
||||
|
||||
let plugins = [
|
||||
new IgnoreEmitPlugin(/css\/.*(?<!css)$/),
|
||||
new MiniCssExtractPlugin({
|
||||
filename: '[name].css',
|
||||
chunkFilename: '[id].css'
|
||||
}),
|
||||
new CopyPlugin({
|
||||
patterns: [
|
||||
{ from: path.resolve(__dirname, 'static') },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
if (isDevelopment) {
|
||||
plugins = plugins.concat([
|
||||
new ESLintPlugin({
|
||||
extensions: ['js', 'ts'],
|
||||
failOnError: true,
|
||||
failOnWarning: isDevelopment
|
||||
})
|
||||
]);
|
||||
}
|
||||
else {
|
||||
plugins = plugins.concat([
|
||||
new TerserPlugin({
|
||||
parallel: true,
|
||||
}),
|
||||
new CssMinimizerPlugin(),
|
||||
]);
|
||||
}
|
||||
|
||||
const themeNames =
|
||||
fs.readdirSync(path.resolve(__dirname, 'css/themes')).map(name =>
|
||||
name.match(/([-a-z]+).scss/)[1]
|
||||
);
|
||||
|
||||
const themes = {};
|
||||
for (const name of themeNames) {
|
||||
themes[`css/${name}`] = `./css/themes/${name}.scss`;
|
||||
}
|
||||
|
||||
export default {
|
||||
mode: isDevelopment ? 'development' : 'production',
|
||||
entry: {
|
||||
'js/app.js': './js/app.js',
|
||||
...themes
|
||||
},
|
||||
output: {
|
||||
filename: '[name]',
|
||||
path: path.resolve(__dirname, '../priv/static'),
|
||||
},
|
||||
optimization: {
|
||||
minimize: !isDevelopment,
|
||||
providedExports: true,
|
||||
usedExports: true,
|
||||
concatenateModules: true,
|
||||
},
|
||||
devtool: isDevelopment ? 'inline-source-map' : undefined,
|
||||
performance: { hints: false },
|
||||
resolve: {
|
||||
alias: {
|
||||
common: path.resolve(__dirname, 'css/common/'),
|
||||
views: path.resolve(__dirname, 'css/views/')
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(ttf|eot|svg|woff2?)$/,
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: './fonts',
|
||||
publicPath: '../fonts',
|
||||
},
|
||||
dependency: { not: ['url'] },
|
||||
},
|
||||
{
|
||||
test: /app\.js/,
|
||||
use: [
|
||||
{
|
||||
loader: 'webpack-rollup-loader',
|
||||
options: {
|
||||
plugins: [
|
||||
includePaths,
|
||||
multiEntry,
|
||||
typescript,
|
||||
]
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.scss$/,
|
||||
use: [
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: 'css-loader',
|
||||
options: {
|
||||
sourceMap: isDevelopment,
|
||||
url: {
|
||||
filter: (url, _resourcePath) => {
|
||||
return !url.startsWith('/');
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: 'postcss-loader',
|
||||
options: {
|
||||
postcssOptions: {
|
||||
sourceMaps: isDevelopment,
|
||||
ident: 'postcss',
|
||||
syntax: 'postcss-scss',
|
||||
plugins: [
|
||||
autoprefixer(),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: 'sass-loader',
|
||||
options: {
|
||||
sourceMap: isDevelopment,
|
||||
sassOptions: {
|
||||
quietDeps: true
|
||||
}
|
||||
}
|
||||
},
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins,
|
||||
};
|
|
@ -8,7 +8,7 @@ config :philomena, Philomena.Repo, show_sensitive_data_on_connection_error: true
|
|||
#
|
||||
# The watchers configuration can be used to run external
|
||||
# watchers to your application. For example, we use it
|
||||
# with webpack to recompile .js and .css sources.
|
||||
# with vite to recompile .js and .css sources.
|
||||
config :philomena, PhilomenaWeb.Endpoint,
|
||||
http: [port: 4000],
|
||||
debug_errors: true,
|
||||
|
@ -16,11 +16,23 @@ config :philomena, PhilomenaWeb.Endpoint,
|
|||
check_origin: false,
|
||||
watchers: [
|
||||
node: [
|
||||
"node_modules/webpack/bin/webpack.js",
|
||||
"node_modules/vite/bin/vite.js",
|
||||
"--mode",
|
||||
"development",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--config",
|
||||
"vite.config.ts",
|
||||
cd: Path.expand("../assets", __DIR__)
|
||||
],
|
||||
node: [
|
||||
"node_modules/vite/bin/vite.js",
|
||||
"build",
|
||||
"--mode",
|
||||
"development",
|
||||
"--watch",
|
||||
"--watch-options-stdin",
|
||||
"--config",
|
||||
"vite.config.ts",
|
||||
cd: Path.expand("../assets", __DIR__)
|
||||
]
|
||||
]
|
||||
|
@ -60,6 +72,12 @@ config :philomena, PhilomenaWeb.Endpoint,
|
|||
]
|
||||
]
|
||||
|
||||
# Relax CSP rules in development
|
||||
config :philomena, csp_relaxed: true
|
||||
|
||||
# Enable Vite HMR
|
||||
config :philomena, vite_reload: true
|
||||
|
||||
# Do not include metadata nor timestamps in development logs
|
||||
config :logger, :console, format: "[$level] $message\n"
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import Config
|
|||
config :philomena, PhilomenaWeb.Endpoint, cache_static_manifest: "priv/static/cache_manifest.json"
|
||||
|
||||
# Do not print debug messages in production
|
||||
config :logger, level: :warn
|
||||
config :logger, level: :warning
|
||||
|
||||
# ## SSL Support
|
||||
#
|
||||
|
|
|
@ -76,6 +76,7 @@
|
|||
"image macro",
|
||||
"monochrome",
|
||||
"oc",
|
||||
"oc only",
|
||||
"photo",
|
||||
"Tag original characters oc:name"
|
||||
]
|
||||
|
|
|
@ -6,6 +6,7 @@ import Config
|
|||
# by calling `mix release`.
|
||||
#
|
||||
# See `mix help release` for more information.
|
||||
{:ok, _} = Application.ensure_all_started(:tls_certificate_check)
|
||||
|
||||
config :bcrypt_elixir,
|
||||
log_rounds: String.to_integer(System.get_env("BCRYPT_ROUNDS", "12"))
|
||||
|
@ -124,13 +125,16 @@ if config_env() == :prod do
|
|||
username: System.fetch_env!("SMTP_USERNAME"),
|
||||
password: System.fetch_env!("SMTP_PASSWORD"),
|
||||
tls: :always,
|
||||
auth: :always
|
||||
auth: :always,
|
||||
tls_options:
|
||||
[middlebox_comp_mode: false] ++
|
||||
:tls_certificate_check.options(System.fetch_env!("SMTP_RELAY"))
|
||||
|
||||
# Production endpoint config
|
||||
{:ok, ip} = :inet.parse_address(System.get_env("APP_IP", "127.0.0.1") |> String.to_charlist())
|
||||
|
||||
config :philomena, PhilomenaWeb.Endpoint,
|
||||
http: [ip: ip, port: {:system, "PORT"}],
|
||||
http: [ip: ip, port: System.fetch_env!("PORT")],
|
||||
url: [host: System.fetch_env!("APP_HOSTNAME"), scheme: "https", port: 443],
|
||||
secret_key_base: System.fetch_env!("SECRET_KEY_BASE"),
|
||||
server: not is_nil(System.get_env("START_ENDPOINT"))
|
||||
|
|
|
@ -19,4 +19,4 @@ config :philomena, PhilomenaWeb.Endpoint,
|
|||
server: false
|
||||
|
||||
# Print only warnings and errors during test
|
||||
config :logger, level: :warn
|
||||
config :logger, level: :warning
|
||||
|
|
|
@ -2,6 +2,10 @@ version: '3'
|
|||
volumes:
|
||||
postgres_data: {}
|
||||
opensearch_data: {}
|
||||
app_cargo_data: {}
|
||||
app_build_data: {}
|
||||
app_deps_data: {}
|
||||
app_native_data: {}
|
||||
|
||||
services:
|
||||
app:
|
||||
|
@ -28,7 +32,7 @@ services:
|
|||
- BADGE_URL_ROOT=/badge-img
|
||||
- TAG_URL_ROOT=/tag-img
|
||||
- OPENSEARCH_URL=https://admin:admin@opensearch:9200
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_HOST=valkey
|
||||
- DATABASE_URL=ecto://postgres:postgres@postgres/philomena_dev
|
||||
- CDN_HOST=localhost
|
||||
- MAILER_ADDRESS=noreply@philomena.local
|
||||
|
@ -44,13 +48,19 @@ services:
|
|||
tty: true
|
||||
volumes:
|
||||
- .:/srv/philomena
|
||||
- app_cargo_data:/srv/philomena/.cargo
|
||||
- app_build_data:/srv/philomena/_build
|
||||
- app_deps_data:/srv/philomena/deps
|
||||
- app_native_data:/srv/philomena/priv/native
|
||||
depends_on:
|
||||
- postgres
|
||||
- opensearch
|
||||
- redis
|
||||
- valkey
|
||||
ports:
|
||||
- '5173:5173'
|
||||
|
||||
postgres:
|
||||
image: postgres:15.3-alpine
|
||||
image: postgres:16.2-alpine
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
volumes:
|
||||
|
@ -59,7 +69,7 @@ services:
|
|||
driver: "none"
|
||||
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.6.0
|
||||
image: opensearchproject/opensearch:2.14.0
|
||||
volumes:
|
||||
- opensearch_data:/usr/share/opensearch/data
|
||||
logging:
|
||||
|
@ -71,13 +81,13 @@ services:
|
|||
soft: 65536
|
||||
hard: 65536
|
||||
|
||||
redis:
|
||||
image: redis:7.0.11-alpine
|
||||
valkey:
|
||||
image: valkey/valkey:7.2.5-alpine
|
||||
logging:
|
||||
driver: "none"
|
||||
|
||||
files:
|
||||
image: andrewgaul/s3proxy:sha-ba0fd6d
|
||||
image: andrewgaul/s3proxy:sha-ec12ae0
|
||||
environment:
|
||||
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
||||
volumes:
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
FROM elixir:1.14.4-alpine
|
||||
FROM elixir:1.16.2-alpine
|
||||
|
||||
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/5.1 /tmp/ffmpeg_version.json
|
||||
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1 /tmp/ffmpeg_version.json
|
||||
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
||||
&& cp /tmp/repositories /etc/apk/repositories \
|
||||
&& apk update --allow-untrusted \
|
||||
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql15-client wget rust cargo --allow-untrusted \
|
||||
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql16-client wget rust cargo --allow-untrusted \
|
||||
&& mix local.hex --force \
|
||||
&& mix local.rebar --force
|
||||
|
||||
|
@ -24,4 +24,5 @@ COPY docker/app/run-test /usr/local/bin/run-test
|
|||
COPY docker/app/safe-rsvg-convert /usr/local/bin/safe-rsvg-convert
|
||||
COPY docker/app/purge-cache /usr/local/bin/purge-cache
|
||||
ENV PATH=$PATH:/root/.cargo/bin
|
||||
EXPOSE 5173
|
||||
CMD run-development
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM openresty/openresty:1.21.4.1-7-alpine
|
||||
FROM openresty/openresty:1.25.3.1-2-alpine
|
||||
ARG APP_DIR
|
||||
ARG S3_SCHEME
|
||||
ARG S3_HOST
|
||||
|
|
|
@ -7,7 +7,7 @@ all: import_es
|
|||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=images.jsonl --output=http://localhost:9200/ --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata true_uploaders uploaders deleters galleries tags hides upvotes downvotes faves tag_names
|
||||
dump_jsonl: metadata true_uploaders uploaders deleters galleries tags sources hides upvotes downvotes faves tag_names
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_images cascade;'
|
||||
sed -i images.jsonl -e 's/\\\\/\\/g'
|
||||
|
@ -15,6 +15,8 @@ dump_jsonl: metadata true_uploaders uploaders deleters galleries tags hides upvo
|
|||
metadata: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select id, jsonb_build_object(
|
||||
'approved', approved,
|
||||
'animated', is_animated,
|
||||
'anonymous', anonymous,
|
||||
'aspect_ratio', nullif(image_aspect_ratio, 'NaN'::float8),
|
||||
'comment_count', comments_count,
|
||||
|
@ -23,6 +25,7 @@ metadata: image_search_json
|
|||
'description', description,
|
||||
'downvotes', downvotes_count,
|
||||
'duplicate_id', duplicate_id,
|
||||
'duration', (case when is_animated then image_duration else 0::float end),
|
||||
'faves', faves_count,
|
||||
'file_name', image_name,
|
||||
'fingerprint', fingerprint,
|
||||
|
@ -35,10 +38,11 @@ metadata: image_search_json
|
|||
'orig_sha512_hash', image_orig_sha512_hash,
|
||||
'original_format', image_format,
|
||||
'pixels', cast(image_width as bigint)*cast(image_height as bigint),
|
||||
'processed', processed,
|
||||
'score', score,
|
||||
'size', image_size,
|
||||
'sha512_hash', image_sha512_hash,
|
||||
'source_url', lower(source_url),
|
||||
'thumbnails_generated', thumbnails_generated,
|
||||
'updated_at', updated_at,
|
||||
'upvotes', upvotes_count,
|
||||
'width', image_width,
|
||||
|
@ -64,33 +68,49 @@ deleters: image_search_json
|
|||
galleries: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_interactions', jsonb_agg(jsonb_build_object('id', gi.gallery_id, 'position', gi.position))) from gallery_interactions gi group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_id', jsonb_agg(gi.gallery_id)) from gallery_interactions gi group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_position', jsonb_object_agg(gi.gallery_id, gi.position)) from gallery_interactions gi group by image_id;
|
||||
SQL
|
||||
|
||||
tags: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select it.image_id, jsonb_build_object('tag_ids', jsonb_agg(it.tag_id), 'tag_count', count(*)) from image_taggings it group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select it.image_id, jsonb_build_object(
|
||||
'tag_ids', jsonb_agg(it.tag_id),
|
||||
'tag_count', count(*),
|
||||
'error_tag_count', count(case when t.category = 'error' then t.category else null end),
|
||||
'rating_tag_count', count(case when t.category = 'rating' then t.category else null end),
|
||||
'origin_tag_count', count(case when t.category = 'origin' then t.category else null end),
|
||||
'character_tag_count', count(case when t.category = 'character' then t.category else null end),
|
||||
'oc_tag_count', count(case when t.category = 'oc' then t.category else null end),
|
||||
'species_tag_count', count(case when t.category = 'species' then t.category else null end),
|
||||
'body_type_tag_count', count(case when t.category = 'body-type' then t.category else null end),
|
||||
'content_fanmade_tag_count', count(case when t.category = 'content-fanmade' then t.category else null end),
|
||||
'content_official_tag_count', count(case when t.category = 'content-official' then t.category else null end),
|
||||
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end),
|
||||
) from image_taggings it inner join tags t on t.id = it.tag_id group by image_id;
|
||||
SQL
|
||||
|
||||
sources: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select s.image_id, jsonb_build_object('source_url', jsonb_agg(lower(s.source)), 'source_count', count(*)) from image_sources s group by image_id;
|
||||
SQL
|
||||
|
||||
hides: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select ih.image_id, jsonb_build_object('hidden_by_ids', jsonb_agg(ih.user_id), 'hidden_by', jsonb_agg(lower(u.name))) from image_hides ih inner join users u on u.id = ih.user_id group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select ih.image_id, jsonb_build_object('hidden_by_user_ids', jsonb_agg(ih.user_id), 'hidden_by_users', jsonb_agg(lower(u.name))) from image_hides ih inner join users u on u.id = ih.user_id group by image_id;
|
||||
SQL
|
||||
|
||||
downvotes: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('downvoted_by_ids', jsonb_agg(iv.user_id), 'downvoted_by', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = false group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('downvoter_ids', jsonb_agg(iv.user_id), 'downvoters', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = false group by image_id;
|
||||
SQL
|
||||
|
||||
upvotes: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('upvoted_by_ids', jsonb_agg(iv.user_id), 'upvoted_by', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = true group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('upvoter_ids', jsonb_agg(iv.user_id), 'upvoters', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = true group by image_id;
|
||||
SQL
|
||||
|
||||
faves: image_search_json
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
|
||||
insert into temp_images.image_search_json (image_id, object) select if.image_id, jsonb_build_object('faved_by_ids', jsonb_agg(if.user_id), 'faved_by', jsonb_agg(lower(u.name))) from image_faves if inner join users u on u.id = if.user_id group by image_id;
|
||||
insert into temp_images.image_search_json (image_id, object) select if.image_id, jsonb_build_object('favourited_by_user_ids', jsonb_agg(if.user_id), 'favourited_by_users', jsonb_agg(lower(u.name))) from image_faves if inner join users u on u.id = if.user_id group by image_id;
|
||||
SQL
|
||||
|
||||
tag_names: tags_with_aliases
|
||||
|
|
|
@ -203,7 +203,7 @@ defmodule Philomena.Galleries do
|
|||
|> case do
|
||||
{:ok, result} ->
|
||||
Images.reindex_image(image)
|
||||
notify_gallery(gallery)
|
||||
notify_gallery(gallery, image)
|
||||
reindex_gallery(gallery)
|
||||
|
||||
{:ok, result}
|
||||
|
@ -261,11 +261,11 @@ defmodule Philomena.Galleries do
|
|||
|> Repo.aggregate(:max, :position)
|
||||
end
|
||||
|
||||
def notify_gallery(gallery) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", gallery.id])
|
||||
def notify_gallery(gallery, image) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", [gallery.id, image.id]])
|
||||
end
|
||||
|
||||
def perform_notify(gallery_id) do
|
||||
def perform_notify([gallery_id, image_id]) do
|
||||
gallery = get_gallery!(gallery_id)
|
||||
|
||||
subscriptions =
|
||||
|
@ -279,8 +279,8 @@ defmodule Philomena.Galleries do
|
|||
%{
|
||||
actor_id: gallery.id,
|
||||
actor_type: "Gallery",
|
||||
actor_child_id: nil,
|
||||
actor_child_type: nil,
|
||||
actor_child_id: image_id,
|
||||
actor_child_type: "Image",
|
||||
action: "added images to"
|
||||
}
|
||||
)
|
||||
|
|
|
@ -210,11 +210,15 @@ defmodule Philomena.Images do
|
|||
|
||||
defp maybe_suggest_user_verification(_user), do: false
|
||||
|
||||
def count_pending_approvals() do
|
||||
Image
|
||||
|> where(hidden_from_users: false)
|
||||
|> where(approved: false)
|
||||
|> Repo.aggregate(:count)
|
||||
def count_pending_approvals(user) do
|
||||
if Canada.Can.can?(user, :approve, %Image{}) do
|
||||
Image
|
||||
|> where(hidden_from_users: false)
|
||||
|> where(approved: false)
|
||||
|> Repo.aggregate(:count)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def feature_image(featurer, %Image{} = image) do
|
||||
|
|
|
@ -56,6 +56,7 @@ defmodule Philomena.Images.ElasticsearchIndex do
|
|||
size: %{type: "integer"},
|
||||
sha512_hash: %{type: "keyword"},
|
||||
source_url: %{type: "keyword"},
|
||||
source_count: %{type: "integer"},
|
||||
tag_count: %{type: "integer"},
|
||||
tag_ids: %{type: "keyword"},
|
||||
tags: %{type: "text", analyzer: "keyword"},
|
||||
|
@ -87,7 +88,17 @@ defmodule Philomena.Images.ElasticsearchIndex do
|
|||
namespace: %{type: "keyword"}
|
||||
}
|
||||
},
|
||||
approved: %{type: "boolean"}
|
||||
approved: %{type: "boolean"},
|
||||
error_tag_count: %{type: "integer"},
|
||||
rating_tag_count: %{type: "integer"},
|
||||
origin_tag_count: %{type: "integer"},
|
||||
character_tag_count: %{type: "integer"},
|
||||
oc_tag_count: %{type: "integer"},
|
||||
species_tag_count: %{type: "integer"},
|
||||
body_type_tag_count: %{type: "integer"},
|
||||
content_fanmade_tag_count: %{type: "integer"},
|
||||
content_official_tag_count: %{type: "integer"},
|
||||
spoiler_tag_count: %{type: "integer"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -120,6 +131,7 @@ defmodule Philomena.Images.ElasticsearchIndex do
|
|||
uploader: if(!!image.user and !image.anonymous, do: String.downcase(image.user.name)),
|
||||
true_uploader: if(!!image.user, do: String.downcase(image.user.name)),
|
||||
source_url: image.sources |> Enum.map(&String.downcase(&1.source)),
|
||||
source_count: length(image.sources),
|
||||
file_name: image.image_name,
|
||||
original_format: image.image_format,
|
||||
processed: image.processed,
|
||||
|
@ -151,7 +163,17 @@ defmodule Philomena.Images.ElasticsearchIndex do
|
|||
upvoters: image.upvoters |> Enum.map(&String.downcase(&1.name)),
|
||||
downvoters: image.downvoters |> Enum.map(&String.downcase(&1.name)),
|
||||
deleted_by_user: if(!!image.deleter, do: image.deleter.name),
|
||||
approved: image.approved
|
||||
approved: image.approved,
|
||||
error_tag_count: Enum.count(image.tags, &(&1.category == "error")),
|
||||
rating_tag_count: Enum.count(image.tags, &(&1.category == "rating")),
|
||||
origin_tag_count: Enum.count(image.tags, &(&1.category == "origin")),
|
||||
character_tag_count: Enum.count(image.tags, &(&1.category == "character")),
|
||||
oc_tag_count: Enum.count(image.tags, &(&1.category == "oc")),
|
||||
species_tag_count: Enum.count(image.tags, &(&1.category == "species")),
|
||||
body_type_tag_count: Enum.count(image.tags, &(&1.category == "body-type")),
|
||||
content_fanmade_tag_count: Enum.count(image.tags, &(&1.category == "content-fanmade")),
|
||||
content_official_tag_count: Enum.count(image.tags, &(&1.category == "content-official")),
|
||||
spoiler_tag_count: Enum.count(image.tags, &(&1.category == "spoiler"))
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ defmodule Philomena.Images.Image do
|
|||
height = fetch_field!(changeset, :image_height)
|
||||
|
||||
cond do
|
||||
width <= 0 or height <= 0 ->
|
||||
is_nil(width) or is_nil(height) or width <= 0 or height <= 0 ->
|
||||
add_error(
|
||||
changeset,
|
||||
:image,
|
||||
|
@ -379,7 +379,7 @@ defmodule Philomena.Images.Image do
|
|||
tags
|
||||
|> Enum.map_join("_", & &1.slug)
|
||||
|> String.to_charlist()
|
||||
|> Enum.filter(&(&1 in ?a..?z or &1 in '0123456789_-'))
|
||||
|> Enum.filter(&(&1 in ?a..?z or &1 in ~c"0123456789_-"))
|
||||
|> List.to_string()
|
||||
|> String.slice(0..150)
|
||||
|
||||
|
|
|
@ -66,14 +66,30 @@ defmodule Philomena.Images.Query do
|
|||
end
|
||||
end
|
||||
|
||||
defp tag_count_fields do
|
||||
[
|
||||
"body_type_tag_count",
|
||||
"error_tag_count",
|
||||
"character_tag_count",
|
||||
"content_fanmade_tag_count",
|
||||
"content_official_tag_count",
|
||||
"oc_tag_count",
|
||||
"origin_tag_count",
|
||||
"rating_tag_count",
|
||||
"species_tag_count",
|
||||
"spoiler_tag_count"
|
||||
]
|
||||
end
|
||||
|
||||
defp anonymous_fields do
|
||||
[
|
||||
int_fields:
|
||||
~W(id width height comment_count score upvotes downvotes faves uploader_id faved_by_id tag_count pixels size),
|
||||
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size comment_count source_count tag_count) ++
|
||||
tag_count_fields(),
|
||||
float_fields: ~W(aspect_ratio wilson_score duration),
|
||||
date_fields: ~W(created_at updated_at first_seen_at),
|
||||
literal_fields:
|
||||
~W(faved_by orig_sha512_hash sha512_hash uploader source_url original_format mime_type),
|
||||
~W(faved_by orig_sha512_hash sha512_hash uploader source_url original_format mime_type file_name),
|
||||
bool_fields: ~W(animated processed thumbnails_generated),
|
||||
ngram_fields: ~W(description),
|
||||
custom_fields: ~W(gallery_id),
|
||||
|
@ -82,7 +98,8 @@ defmodule Philomena.Images.Query do
|
|||
aliases: %{
|
||||
"faved_by" => "favourited_by_users",
|
||||
"faved_by_id" => "favourited_by_user_ids"
|
||||
}
|
||||
},
|
||||
no_downcase_fields: ~W(file_name)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
|
||||
alias Philomena.DuplicateReports
|
||||
alias Philomena.ImageIntensities
|
||||
alias Philomena.ImagePurgeWorker
|
||||
alias Philomena.Images.Image
|
||||
alias Philomena.Processors
|
||||
alias Philomena.Analyzers
|
||||
|
@ -76,22 +77,41 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
file = download_image_file(image)
|
||||
{:ok, analysis} = Analyzers.analyze(file)
|
||||
|
||||
apply_edit_script(image, Processors.process(analysis, file, generated_sizes(image)))
|
||||
file =
|
||||
apply_edit_script(image, file, Processors.process(analysis, file, generated_sizes(image)))
|
||||
|
||||
generate_dupe_reports(image)
|
||||
recompute_meta(image, file, &Image.thumbnail_changeset/2)
|
||||
|
||||
apply_edit_script(image, Processors.post_process(analysis, file))
|
||||
file = apply_edit_script(image, file, Processors.post_process(analysis, file))
|
||||
recompute_meta(image, file, &Image.process_changeset/2)
|
||||
end
|
||||
|
||||
defp apply_edit_script(image, changes),
|
||||
do: Enum.map(changes, &apply_change(image, &1))
|
||||
defp apply_edit_script(image, file, changes) do
|
||||
Enum.reduce(changes, file, fn change, existing_file ->
|
||||
apply_change(image, change)
|
||||
|
||||
case change do
|
||||
{:replace_original, new_file} ->
|
||||
new_file
|
||||
|
||||
_ ->
|
||||
existing_file
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp apply_change(image, {:intensities, intensities}),
|
||||
do: ImageIntensities.create_image_intensity(image, intensities)
|
||||
|
||||
defp apply_change(image, {:replace_original, new_file}),
|
||||
do: upload_file(image, new_file, "full.#{image.image_format}")
|
||||
defp apply_change(image, {:replace_original, new_file}) do
|
||||
full = "full.#{image.image_format}"
|
||||
upload_file(image, new_file, full)
|
||||
|
||||
Exq.enqueue(Exq, "indexing", ImagePurgeWorker, [
|
||||
Path.join(image_url_base(image, nil), full)
|
||||
])
|
||||
end
|
||||
|
||||
defp apply_change(image, {:thumbnails, thumbnails}),
|
||||
do: Enum.map(thumbnails, &apply_thumbnail(image, &1))
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
defmodule Philomena.Native do
|
||||
@moduledoc false
|
||||
|
||||
use Rustler, otp_app: :philomena
|
||||
use Rustler, otp_app: :philomena, crate: "philomena"
|
||||
|
||||
@spec markdown_to_html(String.t(), %{String.t() => String.t()}) :: String.t()
|
||||
def markdown_to_html(_text, _replacements), do: :erlang.nif_error(:nif_not_loaded)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue