mirror of
https://github.com/philomena-dev/philomena.git
synced 2025-01-20 06:37:59 +01:00
Merge branch 'master' into redesign
This commit is contained in:
commit
bec260fffb
85 changed files with 4043 additions and 3216 deletions
22
.github/workflows/elixir.yml
vendored
22
.github/workflows/elixir.yml
vendored
|
@ -7,40 +7,40 @@ jobs:
|
||||||
name: 'Build Elixir app'
|
name: 'Build Elixir app'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Cache mix deps
|
- name: Cache mix deps
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
_build
|
_build
|
||||||
deps
|
deps
|
||||||
key: ${{ runner.os }}-build-deps-${{ hashFiles('mix.lock') }}
|
key: ${{ runner.os }}-build-deps-${{ hashFiles('mix.lock') }}
|
||||||
|
|
||||||
- run: docker-compose pull
|
- run: docker compose pull
|
||||||
- run: docker-compose build
|
- run: docker compose build
|
||||||
|
|
||||||
- name: Build and test
|
- name: Build and test
|
||||||
run: docker-compose run app run-test
|
run: docker compose run app run-test
|
||||||
|
|
||||||
- name: Security lint
|
- name: Security lint
|
||||||
run: |
|
run: |
|
||||||
docker-compose run app mix sobelow --config
|
docker compose run app mix sobelow --config
|
||||||
docker-compose run app mix deps.audit
|
docker compose run app mix deps.audit
|
||||||
lint-and-test:
|
lint-and-test:
|
||||||
name: 'JavaScript Linting and Unit Tests'
|
name: 'JavaScript Linting and Unit Tests'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '20'
|
||||||
|
|
||||||
- name: Cache node_modules
|
- name: Cache node_modules
|
||||||
id: cache-node-modules
|
id: cache-node-modules
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ./assets/node_modules
|
path: ./assets/node_modules
|
||||||
key: node_modules-${{ hashFiles('./assets/package-lock.json') }}
|
key: node_modules-${{ hashFiles('./assets/package-lock.json') }}
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
![Philomena](/assets/static/images/phoenix.svg)
|
![Philomena](/assets/static/images/phoenix.svg)
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
On systems with `docker` and `docker-compose` installed, the process should be as simple as:
|
On systems with `docker` and `docker compose` installed, the process should be as simple as:
|
||||||
|
|
||||||
```
|
```
|
||||||
docker-compose build
|
docker compose build
|
||||||
docker-compose up
|
docker compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
If you use `podman` and `podman-compose` instead, the process for constructing a rootless container is nearly identical:
|
If you use `podman` and `podman-compose` instead, the process for constructing a rootless container is nearly identical:
|
||||||
|
|
|
@ -30,12 +30,13 @@ export default {
|
||||||
moduleNameMapper: {
|
moduleNameMapper: {
|
||||||
'./js/(.*)': '<rootDir>/js/$1',
|
'./js/(.*)': '<rootDir>/js/$1',
|
||||||
},
|
},
|
||||||
transform: {},
|
transform: {
|
||||||
globals: {
|
'^.+\\.tsx?$': ['ts-jest', {
|
||||||
extensionsToTreatAsEsm: ['.ts', '.js'],
|
|
||||||
'ts-jest': {
|
|
||||||
tsconfig: '<rootDir>/tsconfig.json',
|
tsconfig: '<rootDir>/tsconfig.json',
|
||||||
useESM: true,
|
useESM: true,
|
||||||
|
}]
|
||||||
},
|
},
|
||||||
},
|
globals: {
|
||||||
|
extensionsToTreatAsEsm: ['.ts', '.js'],
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
114
assets/js/__tests__/timeago.spec.ts
Normal file
114
assets/js/__tests__/timeago.spec.ts
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
import { timeAgo, setupTimestamps } from '../timeago';
|
||||||
|
|
||||||
|
const epochRfc3339 = '1970-01-01T00:00:00.000Z';
|
||||||
|
|
||||||
|
describe('Timeago functionality', () => {
|
||||||
|
// TODO: is this robust? do we need e.g. timekeeper to freeze the time?
|
||||||
|
function timeAgoWithSecondOffset(offset: number) {
|
||||||
|
const utc = new Date(new Date().getTime() + offset * 1000).toISOString();
|
||||||
|
|
||||||
|
const timeEl = document.createElement('time');
|
||||||
|
timeEl.setAttribute('datetime', utc);
|
||||||
|
timeEl.textContent = utc;
|
||||||
|
|
||||||
|
timeAgo([timeEl]);
|
||||||
|
return timeEl.textContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-disable no-implicit-coercion */
|
||||||
|
it('should parse a time as less than a minute', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-15)).toEqual('less than a minute ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+15)).toEqual('less than a minute from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as about a minute', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-75)).toEqual('about a minute ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+75)).toEqual('about a minute from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as 30 minutes', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 30))).toEqual('30 minutes ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 30))).toEqual('30 minutes from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as about an hour', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60))).toEqual('about an hour ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60))).toEqual('about an hour from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as about 6 hours', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 6))).toEqual('about 6 hours ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 6))).toEqual('about 6 hours from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as a day', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 36))).toEqual('a day ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 36))).toEqual('a day from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as 25 days', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 25))).toEqual('25 days ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 25))).toEqual('25 days from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as about a month', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 35))).toEqual('about a month ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 35))).toEqual('about a month from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as 3 months', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 3))).toEqual('3 months ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 3))).toEqual('3 months from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as about a year', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 13))).toEqual('about a year ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 13))).toEqual('about a year from now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse a time as 5 years', () => {
|
||||||
|
expect(timeAgoWithSecondOffset(-(60 * 60 * 24 * 30 * 12 * 5))).toEqual('5 years ago');
|
||||||
|
expect(timeAgoWithSecondOffset(+(60 * 60 * 24 * 30 * 12 * 5))).toEqual('5 years from now');
|
||||||
|
});
|
||||||
|
/* eslint-enable no-implicit-coercion */
|
||||||
|
|
||||||
|
it('should ignore time elements without a datetime attribute', () => {
|
||||||
|
const timeEl = document.createElement('time');
|
||||||
|
const value = Math.random().toString();
|
||||||
|
|
||||||
|
timeEl.textContent = value;
|
||||||
|
timeAgo([timeEl]);
|
||||||
|
|
||||||
|
expect(timeEl.textContent).toEqual(value);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not reset title attribute if it already exists', () => {
|
||||||
|
const timeEl = document.createElement('time');
|
||||||
|
const value = Math.random().toString();
|
||||||
|
|
||||||
|
timeEl.setAttribute('datetime', epochRfc3339);
|
||||||
|
timeEl.setAttribute('title', value);
|
||||||
|
timeAgo([timeEl]);
|
||||||
|
|
||||||
|
expect(timeEl.getAttribute('title')).toEqual(value);
|
||||||
|
expect(timeEl.textContent).not.toEqual(epochRfc3339);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Automatic timestamps', () => {
|
||||||
|
it('should process all timestamps in the document', () => {
|
||||||
|
for (let i = 0; i < 5; i += 1) {
|
||||||
|
const timeEl = document.createElement('time');
|
||||||
|
timeEl.setAttribute('datetime', epochRfc3339);
|
||||||
|
timeEl.textContent = epochRfc3339;
|
||||||
|
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', timeEl);
|
||||||
|
}
|
||||||
|
|
||||||
|
setupTimestamps();
|
||||||
|
|
||||||
|
for (const timeEl of document.getElementsByTagName('time')) {
|
||||||
|
expect(timeEl.textContent).not.toEqual(epochRfc3339);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
335
assets/js/__tests__/ujs.spec.ts
Normal file
335
assets/js/__tests__/ujs.spec.ts
Normal file
|
@ -0,0 +1,335 @@
|
||||||
|
import fetchMock from 'jest-fetch-mock';
|
||||||
|
import { fireEvent } from '@testing-library/dom';
|
||||||
|
import { assertType } from '../utils/assert';
|
||||||
|
import '../ujs';
|
||||||
|
|
||||||
|
const mockEndpoint = 'http://localhost/endpoint';
|
||||||
|
const mockVerb = 'POST';
|
||||||
|
|
||||||
|
describe('Remote utilities', () => {
|
||||||
|
beforeAll(() => {
|
||||||
|
fetchMock.enableMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
fetchMock.disableMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
window.booru.csrfToken = Math.random().toString();
|
||||||
|
fetchMock.resetMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
function addOneShotEventListener(name: string, cb: (e: Event) => void) {
|
||||||
|
const handler = (event: Event) => {
|
||||||
|
cb(event);
|
||||||
|
document.removeEventListener(name, handler);
|
||||||
|
};
|
||||||
|
document.addEventListener(name, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('a[data-remote]', () => {
|
||||||
|
const submitA = ({ setMethod }: { setMethod: boolean; }) => {
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = mockEndpoint;
|
||||||
|
a.dataset.remote = 'remote';
|
||||||
|
if (setMethod) {
|
||||||
|
a.dataset.method = mockVerb;
|
||||||
|
}
|
||||||
|
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||||
|
a.click();
|
||||||
|
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should call native fetch with the correct parameters (without body)', () => {
|
||||||
|
submitA({ setMethod: true });
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||||
|
method: mockVerb,
|
||||||
|
credentials: 'same-origin',
|
||||||
|
headers: {
|
||||||
|
'x-csrf-token': window.booru.csrfToken,
|
||||||
|
'x-requested-with': 'XMLHttpRequest'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call native fetch for a get request without explicit method', () => {
|
||||||
|
submitA({ setMethod: false });
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||||
|
method: 'GET',
|
||||||
|
credentials: 'same-origin',
|
||||||
|
headers: {
|
||||||
|
'x-csrf-token': window.booru.csrfToken,
|
||||||
|
'x-requested-with': 'XMLHttpRequest'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should emit fetchcomplete event', () => new Promise<void>(resolve => {
|
||||||
|
let a: HTMLAnchorElement | null = null;
|
||||||
|
|
||||||
|
addOneShotEventListener('fetchcomplete', event => {
|
||||||
|
expect(event.target).toBe(a);
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
a = submitA({ setMethod: true });
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('a[data-method]', () => {
|
||||||
|
const submitA = () => {
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = mockEndpoint;
|
||||||
|
a.dataset.method = mockVerb;
|
||||||
|
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||||
|
a.click();
|
||||||
|
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should submit a form with the given action', () => new Promise<void>(resolve => {
|
||||||
|
addOneShotEventListener('submit', event => {
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
|
const target = assertType(event.target, HTMLFormElement);
|
||||||
|
const [ csrf, method ] = target.querySelectorAll('input');
|
||||||
|
|
||||||
|
expect(csrf.name).toBe('_csrf_token');
|
||||||
|
expect(csrf.value).toBe(window.booru.csrfToken);
|
||||||
|
|
||||||
|
expect(method.name).toBe('_method');
|
||||||
|
expect(method.value).toBe(mockVerb);
|
||||||
|
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
submitA();
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('form[data-remote]', () => {
|
||||||
|
// https://www.benmvp.com/blog/mocking-window-location-methods-jest-jsdom/
|
||||||
|
let oldWindowLocation: Location;
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
oldWindowLocation = window.location;
|
||||||
|
delete (window as any).location;
|
||||||
|
|
||||||
|
(window as any).location = Object.defineProperties(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
...Object.getOwnPropertyDescriptors(oldWindowLocation),
|
||||||
|
reload: {
|
||||||
|
configurable: true,
|
||||||
|
value: jest.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
(window.location.reload as any).mockReset();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
// restore window.location to the jsdom Location object
|
||||||
|
window.location = oldWindowLocation;
|
||||||
|
});
|
||||||
|
|
||||||
|
const configureForm = () => {
|
||||||
|
const form = document.createElement('form');
|
||||||
|
form.action = mockEndpoint;
|
||||||
|
form.dataset.remote = 'remote';
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', form);
|
||||||
|
return form;
|
||||||
|
};
|
||||||
|
|
||||||
|
const submitForm = () => {
|
||||||
|
const form = configureForm();
|
||||||
|
form.method = mockVerb;
|
||||||
|
form.submit();
|
||||||
|
return form;
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should call native fetch with the correct parameters (with body)', () => {
|
||||||
|
submitForm();
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||||
|
method: mockVerb,
|
||||||
|
credentials: 'same-origin',
|
||||||
|
headers: {
|
||||||
|
'x-csrf-token': window.booru.csrfToken,
|
||||||
|
'x-requested-with': 'XMLHttpRequest'
|
||||||
|
},
|
||||||
|
body: new FormData(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should submit a PUT request with put data-method specified', () => {
|
||||||
|
const form = configureForm();
|
||||||
|
form.dataset.method = 'put';
|
||||||
|
form.submit();
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
expect(fetch).toHaveBeenNthCalledWith(1, mockEndpoint, {
|
||||||
|
method: 'PUT',
|
||||||
|
credentials: 'same-origin',
|
||||||
|
headers: {
|
||||||
|
'x-csrf-token': window.booru.csrfToken,
|
||||||
|
'x-requested-with': 'XMLHttpRequest'
|
||||||
|
},
|
||||||
|
body: new FormData(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should emit fetchcomplete event', () => new Promise<void>(resolve => {
|
||||||
|
let form: HTMLFormElement | null = null;
|
||||||
|
|
||||||
|
addOneShotEventListener('fetchcomplete', event => {
|
||||||
|
expect(event.target).toBe(form);
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
form = submitForm();
|
||||||
|
}));
|
||||||
|
|
||||||
|
it('should reload the page on 300 multiple choices response', () => {
|
||||||
|
const promiseLike = {
|
||||||
|
then(cb: (r: Response) => void) {
|
||||||
|
if (cb) {
|
||||||
|
cb(new Response('', { status: 300 }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
jest.spyOn(global, 'fetch').mockReturnValue(promiseLike as any);
|
||||||
|
|
||||||
|
submitForm();
|
||||||
|
expect(window.location.reload).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Form utilities', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.spyOn(window, 'requestAnimationFrame').mockImplementation(cb => {
|
||||||
|
cb(1);
|
||||||
|
return 1;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[data-confirm]', () => {
|
||||||
|
const createA = () => {
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.dataset.confirm = 'confirm';
|
||||||
|
a.href = mockEndpoint;
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', a);
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should cancel the event on failed confirm', () => {
|
||||||
|
const a = createA();
|
||||||
|
const confirm = jest.spyOn(window, 'confirm').mockImplementationOnce(() => false);
|
||||||
|
const event = new MouseEvent('click', { bubbles: true, cancelable: true });
|
||||||
|
|
||||||
|
expect(fireEvent(a, event)).toBe(false);
|
||||||
|
expect(confirm).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow the event on confirm', () => {
|
||||||
|
const a = createA();
|
||||||
|
const confirm = jest.spyOn(window, 'confirm').mockImplementationOnce(() => true);
|
||||||
|
const event = new MouseEvent('click', { bubbles: true, cancelable: true });
|
||||||
|
|
||||||
|
expect(fireEvent(a, event)).toBe(true);
|
||||||
|
expect(confirm).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[data-disable-with][data-enable-with]', () => {
|
||||||
|
const createFormAndButton = (innerHTML: string, disableWith: string) => {
|
||||||
|
const form = document.createElement('form');
|
||||||
|
form.action = mockEndpoint;
|
||||||
|
|
||||||
|
// jsdom has no implementation for HTMLFormElement.prototype.submit
|
||||||
|
// and will return an error if the event's default isn't prevented
|
||||||
|
form.addEventListener('submit', event => event.preventDefault());
|
||||||
|
|
||||||
|
const button = document.createElement('button');
|
||||||
|
button.type = 'submit';
|
||||||
|
button.innerHTML = innerHTML;
|
||||||
|
button.dataset.disableWith = disableWith;
|
||||||
|
|
||||||
|
form.insertAdjacentElement('beforeend', button);
|
||||||
|
document.documentElement.insertAdjacentElement('beforeend', form);
|
||||||
|
|
||||||
|
return [ form, button ];
|
||||||
|
};
|
||||||
|
|
||||||
|
const submitText = 'Submit';
|
||||||
|
const loadingText = 'Loading...';
|
||||||
|
const submitMarkup = '<em>Submit</em>';
|
||||||
|
const loadingMarkup = '<em>Loading...</em>';
|
||||||
|
|
||||||
|
it('should disable submit button containing a text child on click', () => {
|
||||||
|
const [ , button ] = createFormAndButton(submitText, loadingText);
|
||||||
|
button.click();
|
||||||
|
|
||||||
|
expect(button.textContent).toEqual(' Loading...');
|
||||||
|
expect(button.dataset.enableWith).toEqual(submitText);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should disable submit button containing element children on click', () => {
|
||||||
|
const [ , button ] = createFormAndButton(submitMarkup, loadingMarkup);
|
||||||
|
button.click();
|
||||||
|
|
||||||
|
expect(button.innerHTML).toEqual(loadingMarkup);
|
||||||
|
expect(button.dataset.enableWith).toEqual(submitMarkup);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not disable anything when the form is invalid', () => {
|
||||||
|
const [ form, button ] = createFormAndButton(submitText, loadingText);
|
||||||
|
form.insertAdjacentHTML('afterbegin', '<input type="text" name="valid" required="true" />');
|
||||||
|
button.click();
|
||||||
|
|
||||||
|
expect(button.textContent).toEqual(submitText);
|
||||||
|
expect(button.dataset.enableWith).not.toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset submit button containing a text child on completion', () => {
|
||||||
|
const [ form, button ] = createFormAndButton(submitText, loadingText);
|
||||||
|
button.click();
|
||||||
|
fireEvent(form, new CustomEvent('reset', { bubbles: true }));
|
||||||
|
|
||||||
|
expect(button.textContent?.trim()).toEqual(submitText);
|
||||||
|
expect(button.dataset.enableWith).not.toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset submit button containing element children on completion', () => {
|
||||||
|
const [ form, button ] = createFormAndButton(submitMarkup, loadingMarkup);
|
||||||
|
button.click();
|
||||||
|
fireEvent(form, new CustomEvent('reset', { bubbles: true }));
|
||||||
|
|
||||||
|
expect(button.innerHTML).toEqual(submitMarkup);
|
||||||
|
expect(button.dataset.enableWith).not.toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset disabled form elements on pageshow', () => {
|
||||||
|
const [ , button ] = createFormAndButton(submitText, loadingText);
|
||||||
|
button.click();
|
||||||
|
fireEvent(window, new CustomEvent('pageshow'));
|
||||||
|
|
||||||
|
expect(button.textContent?.trim()).toEqual(submitText);
|
||||||
|
expect(button.dataset.enableWith).not.toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -134,16 +134,19 @@ function listenAutocomplete() {
|
||||||
document.addEventListener('input', event => {
|
document.addEventListener('input', event => {
|
||||||
removeParent();
|
removeParent();
|
||||||
fetchLocalAutocomplete(event);
|
fetchLocalAutocomplete(event);
|
||||||
|
window.clearTimeout(timeout);
|
||||||
|
|
||||||
if (localAc !== null && 'ac' in event.target.dataset) {
|
if (localAc !== null && 'ac' in event.target.dataset) {
|
||||||
inputField = event.target;
|
inputField = event.target;
|
||||||
originalTerm = `${inputField.value}`.toLowerCase();
|
originalTerm = `${inputField.value}`.toLowerCase();
|
||||||
|
|
||||||
const suggestions = localAc.topK(originalTerm, 5).map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
const suggestions = localAc.topK(originalTerm, 5).map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
||||||
|
|
||||||
|
if (suggestions.length) {
|
||||||
return showAutocomplete(suggestions, originalTerm, event.target);
|
return showAutocomplete(suggestions, originalTerm, event.target);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
window.clearTimeout(timeout);
|
|
||||||
// Use a timeout to delay requests until the user has stopped typing
|
// Use a timeout to delay requests until the user has stopped typing
|
||||||
timeout = window.setTimeout(() => {
|
timeout = window.setTimeout(() => {
|
||||||
inputField = event.target;
|
inputField = event.target;
|
||||||
|
@ -158,7 +161,11 @@ function listenAutocomplete() {
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// inputField could get overwritten while the suggestions are being fetched - use event.target
|
// inputField could get overwritten while the suggestions are being fetched - use event.target
|
||||||
getSuggestions(fetchedTerm).then(suggestions => showAutocomplete(suggestions, fetchedTerm, event.target));
|
getSuggestions(fetchedTerm).then(suggestions => {
|
||||||
|
if (fetchedTerm === event.target.value) {
|
||||||
|
showAutocomplete(suggestions, fetchedTerm, event.target);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, 300);
|
}, 300);
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
// Action Cable provides the framework to deal with WebSockets in Rails.
|
|
||||||
// You can generate new channels where WebSocket features live using the rails generate channel command.
|
|
||||||
let cable;
|
|
||||||
|
|
||||||
function setupCable() {
|
|
||||||
if (window.booru.userIsSignedIn) {
|
|
||||||
cable = ActionCable.createConsumer();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { cable, setupCable };
|
|
|
@ -6,6 +6,7 @@ import { $ } from './utils/dom';
|
||||||
import { showOwnedComments } from './communications/comment';
|
import { showOwnedComments } from './communications/comment';
|
||||||
import { filterNode } from './imagesclientside';
|
import { filterNode } from './imagesclientside';
|
||||||
import { fetchHtml } from './utils/requests';
|
import { fetchHtml } from './utils/requests';
|
||||||
|
import { timeAgo } from './timeago';
|
||||||
|
|
||||||
function handleError(response) {
|
function handleError(response) {
|
||||||
|
|
||||||
|
@ -91,7 +92,7 @@ function insertParentPost(data, clickedLink, fullComment) {
|
||||||
fullComment.previousSibling.classList.add('fetched-comment');
|
fullComment.previousSibling.classList.add('fetched-comment');
|
||||||
|
|
||||||
// Execute timeago on the new comment - it was not present when first run
|
// Execute timeago on the new comment - it was not present when first run
|
||||||
window.booru.timeAgo(fullComment.previousSibling.getElementsByTagName('time'));
|
timeAgo(fullComment.previousSibling.getElementsByTagName('time'));
|
||||||
|
|
||||||
// Add class active_reply_link to the clicked link
|
// Add class active_reply_link to the clicked link
|
||||||
clickedLink.classList.add('active_reply_link');
|
clickedLink.classList.add('active_reply_link');
|
||||||
|
@ -125,7 +126,7 @@ function displayComments(container, commentsHtml) {
|
||||||
container.innerHTML = commentsHtml;
|
container.innerHTML = commentsHtml;
|
||||||
|
|
||||||
// Execute timeago on comments
|
// Execute timeago on comments
|
||||||
window.booru.timeAgo(document.getElementsByTagName('time'));
|
timeAgo(document.getElementsByTagName('time'));
|
||||||
|
|
||||||
// Filter images in the comments
|
// Filter images in the comments
|
||||||
filterNode(container);
|
filterNode(container);
|
||||||
|
|
|
@ -1,877 +0,0 @@
|
||||||
/**
|
|
||||||
* booru.match_query: A port and modification of the search_parser library for
|
|
||||||
* performing client-side filtering.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const tokenList = [
|
|
||||||
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
|
||||||
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
|
||||||
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
|
||||||
['lparen', /^\s*\(\s*/],
|
|
||||||
['rparen', /^\s*\)\s*/],
|
|
||||||
['and_op', /^\s*(?:&&|AND)\s+/],
|
|
||||||
['and_op', /^\s*,\s*/],
|
|
||||||
['or_op', /^\s*(?:\|\||OR)\s+/],
|
|
||||||
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
|
||||||
['not_op', /^\s*[!-]\s*/],
|
|
||||||
['space', /^\s+/],
|
|
||||||
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
|
||||||
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
|
||||||
],
|
|
||||||
numberFields = ['id', 'width', 'height', 'aspect_ratio',
|
|
||||||
'comment_count', 'score', 'upvotes', 'downvotes',
|
|
||||||
'faves', 'tag_count'],
|
|
||||||
dateFields = ['created_at'],
|
|
||||||
literalFields = ['tags', 'orig_sha512_hash', 'sha512_hash',
|
|
||||||
'score', 'uploader', 'source_url', 'description'],
|
|
||||||
termSpaceToImageField = {
|
|
||||||
tags: 'data-image-tag-aliases',
|
|
||||||
score: 'data-score',
|
|
||||||
upvotes: 'data-upvotes',
|
|
||||||
downvotes: 'data-downvotes',
|
|
||||||
uploader: 'data-uploader',
|
|
||||||
// Yeah, I don't think this is reasonably supportable.
|
|
||||||
// faved_by: 'data-faved-by',
|
|
||||||
id: 'data-image-id',
|
|
||||||
width: 'data-width',
|
|
||||||
height: 'data-height',
|
|
||||||
/* eslint-disable camelcase */
|
|
||||||
aspect_ratio: 'data-aspect-ratio',
|
|
||||||
comment_count: 'data-comment-count',
|
|
||||||
tag_count: 'data-tag-count',
|
|
||||||
source_url: 'data-source-url',
|
|
||||||
faves: 'data-faves',
|
|
||||||
sha512_hash: 'data-sha512',
|
|
||||||
orig_sha512_hash: 'data-orig-sha512',
|
|
||||||
created_at: 'data-created-at'
|
|
||||||
/* eslint-enable camelcase */
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
function SearchTerm(termStr) {
|
|
||||||
this.term = termStr.trim();
|
|
||||||
this.parsed = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchTerm.prototype.append = function(substr) {
|
|
||||||
this.term += substr;
|
|
||||||
this.parsed = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseRangeField = function(field) {
|
|
||||||
if (numberFields.indexOf(field) !== -1) {
|
|
||||||
return [field, 'eq', 'number'];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dateFields.indexOf(field) !== -1) {
|
|
||||||
return [field, 'eq', 'date'];
|
|
||||||
}
|
|
||||||
|
|
||||||
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
|
||||||
|
|
||||||
if (qual) {
|
|
||||||
if (numberFields.indexOf(qual[1]) !== -1) {
|
|
||||||
return [qual[1], qual[2], 'number'];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dateFields.indexOf(qual[1]) !== -1) {
|
|
||||||
return [qual[1], qual[2], 'date'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseRelativeDate = function(dateVal, qual) {
|
|
||||||
const match = /(\d+) (second|minute|hour|day|week|month|year)s? ago/.exec(dateVal);
|
|
||||||
const bounds = {
|
|
||||||
second: 1000,
|
|
||||||
minute: 60000,
|
|
||||||
hour: 3600000,
|
|
||||||
day: 86400000,
|
|
||||||
week: 604800000,
|
|
||||||
month: 2592000000,
|
|
||||||
year: 31536000000
|
|
||||||
};
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
const amount = parseInt(match[1], 10);
|
|
||||||
const scale = bounds[match[2]];
|
|
||||||
|
|
||||||
const now = new Date().getTime();
|
|
||||||
const bottomDate = new Date(now - (amount * scale));
|
|
||||||
const topDate = new Date(now - ((amount - 1) * scale));
|
|
||||||
|
|
||||||
switch (qual) {
|
|
||||||
case 'lte':
|
|
||||||
return [bottomDate, 'lt'];
|
|
||||||
case 'gte':
|
|
||||||
return [bottomDate, 'gte'];
|
|
||||||
case 'lt':
|
|
||||||
return [bottomDate, 'lt'];
|
|
||||||
case 'gt':
|
|
||||||
return [bottomDate, 'gte'];
|
|
||||||
default:
|
|
||||||
return [[bottomDate, topDate], 'eq'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error(`Cannot parse date string: ${dateVal}`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseAbsoluteDate = function(dateVal, qual) {
|
|
||||||
const parseRes = [
|
|
||||||
/^(\d{4})/,
|
|
||||||
/^-(\d{2})/,
|
|
||||||
/^-(\d{2})/,
|
|
||||||
/^(?:\s+|T|t)(\d{2})/,
|
|
||||||
/^:(\d{2})/,
|
|
||||||
/^:(\d{2})/
|
|
||||||
],
|
|
||||||
timeZoneOffset = [0, 0],
|
|
||||||
timeData = [0, 0, 1, 0, 0, 0],
|
|
||||||
origDateVal = dateVal;
|
|
||||||
let topDate = null,
|
|
||||||
i,
|
|
||||||
match,
|
|
||||||
bottomDate = null,
|
|
||||||
localDateVal = origDateVal;
|
|
||||||
|
|
||||||
match = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
|
||||||
if (match) {
|
|
||||||
timeZoneOffset[0] = parseInt(match[2], 10);
|
|
||||||
timeZoneOffset[1] = parseInt(match[3], 10);
|
|
||||||
if (match[1] === '-') {
|
|
||||||
timeZoneOffset[0] *= -1;
|
|
||||||
timeZoneOffset[1] *= -1;
|
|
||||||
}
|
|
||||||
localDateVal = localDateVal.substr(0, localDateVal.length - 6);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 0; i < parseRes.length; i += 1) {
|
|
||||||
if (localDateVal.length === 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
match = parseRes[i].exec(localDateVal);
|
|
||||||
if (match) {
|
|
||||||
if (i === 1) {
|
|
||||||
timeData[i] = parseInt(match[1], 10) - 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
timeData[i] = parseInt(match[1], 10);
|
|
||||||
}
|
|
||||||
localDateVal = localDateVal.substr(
|
|
||||||
match[0].length, localDateVal.length - match[0].length
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (localDateVal.length > 0) {
|
|
||||||
throw new Error(`Cannot parse date string: ${origDateVal}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply the user-specified time zone offset. The JS Date constructor
|
|
||||||
// is very flexible here.
|
|
||||||
timeData[3] -= timeZoneOffset[0];
|
|
||||||
timeData[4] -= timeZoneOffset[1];
|
|
||||||
|
|
||||||
switch (qual) {
|
|
||||||
case 'lte':
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
|
||||||
case 'gte':
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
|
||||||
case 'lt':
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'lt'];
|
|
||||||
case 'gt':
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
return [Date.UTC.apply(Date, timeData), 'gte'];
|
|
||||||
default:
|
|
||||||
bottomDate = Date.UTC.apply(Date, timeData);
|
|
||||||
timeData[i - 1] += 1;
|
|
||||||
topDate = Date.UTC.apply(Date, timeData);
|
|
||||||
return [[bottomDate, topDate], 'eq'];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parseDate = function(dateVal, qual) {
|
|
||||||
try {
|
|
||||||
return this.parseAbsoluteDate(dateVal, qual);
|
|
||||||
}
|
|
||||||
catch (_) {
|
|
||||||
return this.parseRelativeDate(dateVal, qual);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.parse = function() {
|
|
||||||
let rangeParsing,
|
|
||||||
candidateTermSpace,
|
|
||||||
termCandidate;
|
|
||||||
|
|
||||||
this.wildcardable = !this.fuzz && !/^"([^"]|\\")+"$/.test(this.term);
|
|
||||||
|
|
||||||
if (!this.wildcardable && !this.fuzz) {
|
|
||||||
this.term = this.term.substr(1, this.term.length - 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.term = this._normalizeTerm();
|
|
||||||
|
|
||||||
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
|
||||||
|
|
||||||
// Default.
|
|
||||||
this.termSpace = 'tags';
|
|
||||||
this.termType = 'literal';
|
|
||||||
|
|
||||||
const matchArr = this.term.split(':');
|
|
||||||
|
|
||||||
if (matchArr.length > 1) {
|
|
||||||
candidateTermSpace = matchArr[0];
|
|
||||||
termCandidate = matchArr.slice(1).join(':');
|
|
||||||
rangeParsing = this.parseRangeField(candidateTermSpace);
|
|
||||||
|
|
||||||
if (rangeParsing) {
|
|
||||||
this.termSpace = rangeParsing[0];
|
|
||||||
this.termType = rangeParsing[2];
|
|
||||||
|
|
||||||
if (this.termType === 'date') {
|
|
||||||
rangeParsing = this.parseDate(termCandidate, rangeParsing[1]);
|
|
||||||
this.term = rangeParsing[0];
|
|
||||||
this.compare = rangeParsing[1];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this.term = parseFloat(termCandidate);
|
|
||||||
this.compare = rangeParsing[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
this.wildcardable = false;
|
|
||||||
}
|
|
||||||
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
|
||||||
this.termType = 'literal';
|
|
||||||
this.term = termCandidate;
|
|
||||||
this.termSpace = candidateTermSpace;
|
|
||||||
}
|
|
||||||
else if (candidateTermSpace === 'my') {
|
|
||||||
this.termType = 'my';
|
|
||||||
this.termSpace = termCandidate;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.wildcardable) {
|
|
||||||
// Transforms wildcard match into regular expression.
|
|
||||||
// A custom NFA with caching may be more sophisticated but not
|
|
||||||
// likely to be faster.
|
|
||||||
this.term = new RegExp(
|
|
||||||
`^${
|
|
||||||
this.term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
|
||||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
|
||||||
.replace(/^(?:\\\\)*\?/g, '.?')
|
|
||||||
}$`, 'i'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update parse status flag to indicate the new properties are ready.
|
|
||||||
this.parsed = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype._normalizeTerm = function() {
|
|
||||||
if (!this.wildcardable) {
|
|
||||||
return this.term.replace('"', '"');
|
|
||||||
}
|
|
||||||
return this.term.replace(/\\([^*?])/g, '$1');
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.fuzzyMatch = function(targetStr) {
|
|
||||||
let targetDistance,
|
|
||||||
i,
|
|
||||||
j,
|
|
||||||
// Work vectors, representing the last three populated
|
|
||||||
// rows of the dynamic programming matrix of the iterative
|
|
||||||
// optimal string alignment calculation.
|
|
||||||
v0 = [],
|
|
||||||
v1 = [],
|
|
||||||
v2 = [],
|
|
||||||
temp;
|
|
||||||
|
|
||||||
if (this.fuzz < 1.0) {
|
|
||||||
targetDistance = targetStr.length * (1.0 - this.fuzz);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
targetDistance = this.fuzz;
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetStrLower = targetStr.toLowerCase();
|
|
||||||
|
|
||||||
for (i = 0; i <= targetStrLower.length; i += 1) {
|
|
||||||
v1.push(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 0; i < this.term.length; i += 1) {
|
|
||||||
v2[0] = i;
|
|
||||||
for (j = 0; j < targetStrLower.length; j += 1) {
|
|
||||||
const cost = this.term[i] === targetStrLower[j] ? 0 : 1;
|
|
||||||
v2[j + 1] = Math.min(
|
|
||||||
// Deletion.
|
|
||||||
v1[j + 1] + 1,
|
|
||||||
// Insertion.
|
|
||||||
v2[j] + 1,
|
|
||||||
// Substitution or No Change.
|
|
||||||
v1[j] + cost
|
|
||||||
);
|
|
||||||
if (i > 1 && j > 1 && this.term[i] === targetStrLower[j - 1] &&
|
|
||||||
targetStrLower[i - 1] === targetStrLower[j]) {
|
|
||||||
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Rotate dem vec pointers bra.
|
|
||||||
temp = v0;
|
|
||||||
v0 = v1;
|
|
||||||
v1 = v2;
|
|
||||||
v2 = temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
return v1[targetStrLower.length] <= targetDistance;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.exactMatch = function(targetStr) {
|
|
||||||
return this.term.toLowerCase() === targetStr.toLowerCase();
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.wildcardMatch = function(targetStr) {
|
|
||||||
return this.term.test(targetStr);
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.interactionMatch = function(imageID, type, interaction, interactions) {
|
|
||||||
let ret = false;
|
|
||||||
|
|
||||||
interactions.forEach(v => {
|
|
||||||
if (v.image_id === imageID && v.interaction_type === type && (interaction === null || v.value === interaction)) {
|
|
||||||
ret = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchTerm.prototype.match = function(target) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
|
||||||
const ohffs = this;
|
|
||||||
let ret = false,
|
|
||||||
compFunc,
|
|
||||||
numbuh,
|
|
||||||
date;
|
|
||||||
|
|
||||||
if (!this.parsed) {
|
|
||||||
this.parse();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.termType === 'literal') {
|
|
||||||
// Literal matching.
|
|
||||||
if (this.fuzz) {
|
|
||||||
compFunc = this.fuzzyMatch;
|
|
||||||
}
|
|
||||||
else if (this.wildcardable) {
|
|
||||||
compFunc = this.wildcardMatch;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
compFunc = this.exactMatch;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.termSpace === 'tags') {
|
|
||||||
target.getAttribute('data-image-tag-aliases').split(', ').every(
|
|
||||||
str => {
|
|
||||||
if (compFunc.call(ohffs, str)) {
|
|
||||||
ret = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
ret = compFunc.call(
|
|
||||||
this, target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this.termType === 'my' && window.booru.interactions.length > 0) {
|
|
||||||
// Should work with most my:conditions except watched.
|
|
||||||
switch (this.termSpace) {
|
|
||||||
case 'faves':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'faved', null, window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
case 'upvotes':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'voted', 'up', window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
case 'downvotes':
|
|
||||||
ret = this.interactionMatch(Number(target.getAttribute('data-image-id')), 'voted', 'down', window.booru.interactions);
|
|
||||||
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = false; // Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this.termType === 'date') {
|
|
||||||
// Date matching.
|
|
||||||
date = new Date(
|
|
||||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
).getTime();
|
|
||||||
|
|
||||||
switch (this.compare) {
|
|
||||||
// The open-left, closed-right date range specified by the
|
|
||||||
// date/time format limits the types of comparisons that are
|
|
||||||
// done compared to numeric ranges.
|
|
||||||
case 'lt':
|
|
||||||
ret = this.term > date;
|
|
||||||
break;
|
|
||||||
case 'gte':
|
|
||||||
ret = this.term <= date;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = this.term[0] <= date && this.term[1] > date;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Range matching.
|
|
||||||
numbuh = parseFloat(
|
|
||||||
target.getAttribute(termSpaceToImageField[this.termSpace])
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isNaN(this.term)) {
|
|
||||||
ret = false;
|
|
||||||
}
|
|
||||||
else if (this.fuzz) {
|
|
||||||
ret = this.term <= numbuh + this.fuzz &&
|
|
||||||
this.term + this.fuzz >= numbuh;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
switch (this.compare) {
|
|
||||||
case 'lt':
|
|
||||||
ret = this.term > numbuh;
|
|
||||||
break;
|
|
||||||
case 'gt':
|
|
||||||
ret = this.term < numbuh;
|
|
||||||
break;
|
|
||||||
case 'lte':
|
|
||||||
ret = this.term >= numbuh;
|
|
||||||
break;
|
|
||||||
case 'gte':
|
|
||||||
ret = this.term <= numbuh;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ret = this.term === numbuh;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
function generateLexArray(searchStr) {
|
|
||||||
const opQueue = [],
|
|
||||||
groupNegate = [],
|
|
||||||
tokenStack = [];
|
|
||||||
let searchTerm = null,
|
|
||||||
boost = null,
|
|
||||||
fuzz = null,
|
|
||||||
lparenCtr = 0,
|
|
||||||
negate = false,
|
|
||||||
boostFuzzStr = '',
|
|
||||||
localSearchStr = searchStr;
|
|
||||||
|
|
||||||
while (localSearchStr.length > 0) {
|
|
||||||
// eslint-disable-next-line no-loop-func
|
|
||||||
tokenList.every(tokenArr => {
|
|
||||||
const tokenName = tokenArr[0],
|
|
||||||
tokenRE = tokenArr[1];
|
|
||||||
let match = tokenRE.exec(localSearchStr),
|
|
||||||
op;
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
match = match[0];
|
|
||||||
|
|
||||||
if (Boolean(searchTerm) && (
|
|
||||||
['and_op', 'or_op'].indexOf(tokenName) !== -1 ||
|
|
||||||
tokenName === 'rparen' && lparenCtr === 0)) {
|
|
||||||
// Set options.
|
|
||||||
searchTerm.boost = boost;
|
|
||||||
searchTerm.fuzz = fuzz;
|
|
||||||
// Push to stack.
|
|
||||||
tokenStack.push(searchTerm);
|
|
||||||
// Reset term and options data.
|
|
||||||
searchTerm = fuzz = boost = null;
|
|
||||||
boostFuzzStr = '';
|
|
||||||
lparenCtr = 0;
|
|
||||||
|
|
||||||
if (negate) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
negate = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (tokenName) {
|
|
||||||
case 'and_op':
|
|
||||||
while (opQueue[0] === 'and_op') {
|
|
||||||
tokenStack.push(opQueue.shift());
|
|
||||||
}
|
|
||||||
opQueue.unshift('and_op');
|
|
||||||
break;
|
|
||||||
case 'or_op':
|
|
||||||
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
|
||||||
tokenStack.push(opQueue.shift());
|
|
||||||
}
|
|
||||||
opQueue.unshift('or_op');
|
|
||||||
break;
|
|
||||||
case 'not_op':
|
|
||||||
if (searchTerm) {
|
|
||||||
// We're already inside a search term, so it does
|
|
||||||
// not apply, obv.
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
negate = !negate;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'lparen':
|
|
||||||
if (searchTerm) {
|
|
||||||
// If we are inside the search term, do not error
|
|
||||||
// out just yet; instead, consider it as part of
|
|
||||||
// the search term, as a user convenience.
|
|
||||||
searchTerm.append(match);
|
|
||||||
lparenCtr += 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
opQueue.unshift('lparen');
|
|
||||||
groupNegate.push(negate);
|
|
||||||
negate = false;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'rparen':
|
|
||||||
if (lparenCtr > 0) {
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
lparenCtr -= 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
while (opQueue.length) {
|
|
||||||
op = opQueue.shift();
|
|
||||||
if (op === 'lparen') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
tokenStack.push(op);
|
|
||||||
}
|
|
||||||
if (groupNegate.length > 0 && groupNegate.pop()) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'fuzz':
|
|
||||||
if (searchTerm) {
|
|
||||||
// For this and boost operations, we store the
|
|
||||||
// current match so far to a temporary string in
|
|
||||||
// case this is actually inside the term.
|
|
||||||
fuzz = parseFloat(match.substr(1));
|
|
||||||
boostFuzzStr += match;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'boost':
|
|
||||||
if (searchTerm) {
|
|
||||||
boost = match.substr(1);
|
|
||||||
boostFuzzStr += match;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'quoted_lit':
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'word':
|
|
||||||
if (searchTerm) {
|
|
||||||
if (fuzz || boost) {
|
|
||||||
boost = fuzz = null;
|
|
||||||
searchTerm.append(boostFuzzStr);
|
|
||||||
boostFuzzStr = '';
|
|
||||||
}
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
searchTerm = new SearchTerm(match);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
// Append extra spaces within search terms.
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.append(match);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate string and restart the token tests.
|
|
||||||
localSearchStr = localSearchStr.substr(
|
|
||||||
match.length, localSearchStr.length - match.length
|
|
||||||
);
|
|
||||||
|
|
||||||
// Break since we have found a match.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append final tokens to the stack, starting with the search term.
|
|
||||||
if (searchTerm) {
|
|
||||||
searchTerm.boost = boost;
|
|
||||||
searchTerm.fuzz = fuzz;
|
|
||||||
tokenStack.push(searchTerm);
|
|
||||||
}
|
|
||||||
if (negate) {
|
|
||||||
tokenStack.push('not_op');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opQueue.indexOf('rparen') !== -1 ||
|
|
||||||
opQueue.indexOf('lparen') !== -1) {
|
|
||||||
throw new Error('Mismatched parentheses.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Memory-efficient concatenation of remaining operators queue to the
|
|
||||||
// token stack.
|
|
||||||
tokenStack.push.apply(tokenStack, opQueue);
|
|
||||||
|
|
||||||
return tokenStack;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseTokens(lexicalArray) {
|
|
||||||
const operandStack = [];
|
|
||||||
let negate, op1, op2;
|
|
||||||
lexicalArray.forEach((token, i) => {
|
|
||||||
if (token !== 'not_op') {
|
|
||||||
negate = lexicalArray[i + 1] === 'not_op';
|
|
||||||
|
|
||||||
if (typeof token === 'string') {
|
|
||||||
op2 = operandStack.pop();
|
|
||||||
op1 = operandStack.pop();
|
|
||||||
|
|
||||||
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
|
||||||
throw new Error('Missing operand.');
|
|
||||||
}
|
|
||||||
|
|
||||||
operandStack.push(new SearchAST(token, negate, op1, op2));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (negate) {
|
|
||||||
operandStack.push(new SearchAST(null, true, token));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
operandStack.push(token);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (operandStack.length > 1) {
|
|
||||||
throw new Error('Missing operator.');
|
|
||||||
}
|
|
||||||
|
|
||||||
op1 = operandStack.pop();
|
|
||||||
|
|
||||||
if (typeof op1 === 'undefined') {
|
|
||||||
return new SearchAST();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isTerminal(op1)) {
|
|
||||||
return new SearchAST(null, false, op1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return op1;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseSearch(searchStr) {
|
|
||||||
return parseTokens(generateLexArray(searchStr));
|
|
||||||
}
|
|
||||||
|
|
||||||
function isTerminal(operand) {
|
|
||||||
// Whether operand is a terminal SearchTerm.
|
|
||||||
return typeof operand.term !== 'undefined';
|
|
||||||
}
|
|
||||||
|
|
||||||
function SearchAST(op, negate, leftOperand, rightOperand) {
|
|
||||||
this.negate = Boolean(negate);
|
|
||||||
this.leftOperand = leftOperand || null;
|
|
||||||
this.op = op || null;
|
|
||||||
this.rightOperand = rightOperand || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function combineOperands(ast1, ast2, parentAST) {
|
|
||||||
let localAst1;
|
|
||||||
if (parentAST.op === 'and_op') {
|
|
||||||
localAst1 = ast1 && ast2;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
localAst1 = ast1 || ast2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parentAST.negate) {
|
|
||||||
return !localAst1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return localAst1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Evaluation of the AST in regard to a target image
|
|
||||||
SearchAST.prototype.hitsImage = function(image) {
|
|
||||||
const treeStack = [];
|
|
||||||
// Left side node.
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-this-alias,consistent-this
|
|
||||||
let ast1 = this,
|
|
||||||
// Right side node.
|
|
||||||
ast2,
|
|
||||||
// Parent node of the current subtree.
|
|
||||||
parentAST;
|
|
||||||
|
|
||||||
// Build the initial tree node traversal stack, of the "far left" side.
|
|
||||||
// The general idea is to accumulate from the bottom and make stacks
|
|
||||||
// of right-hand subtrees that themselves accumulate upward. The left
|
|
||||||
// side node, ast1, will always be a Boolean representing the left-side
|
|
||||||
// evaluated value, up to the current subtree (parentAST).
|
|
||||||
while (!isTerminal(ast1)) {
|
|
||||||
treeStack.push(ast1);
|
|
||||||
ast1 = ast1.leftOperand;
|
|
||||||
|
|
||||||
if (!ast1) {
|
|
||||||
// Empty tree.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ast1 = ast1.match(image);
|
|
||||||
treeStack.push(ast1);
|
|
||||||
|
|
||||||
while (treeStack.length > 0) {
|
|
||||||
parentAST = treeStack.pop();
|
|
||||||
|
|
||||||
if (parentAST === null) {
|
|
||||||
// We are at the end of a virtual stack for a right node
|
|
||||||
// subtree. We switch the result of this stack from left
|
|
||||||
// (ast1) to right (ast2), pop the original left node,
|
|
||||||
// and finally pop the parent subtree itself. See near the
|
|
||||||
// end of this function to view how this is populated.
|
|
||||||
ast2 = ast1;
|
|
||||||
ast1 = treeStack.pop();
|
|
||||||
parentAST = treeStack.pop();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// First, check to see if we can do a short-circuit
|
|
||||||
// evaluation to skip evaluating the right side entirely.
|
|
||||||
if (!ast1 && parentAST.op === 'and_op') {
|
|
||||||
ast1 = parentAST.negate;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ast1 && parentAST.op === 'or_op') {
|
|
||||||
ast1 = !parentAST.negate;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we are not at the end of a stack, grab the right
|
|
||||||
// node. The left node (ast1) is currently a terminal Boolean.
|
|
||||||
ast2 = parentAST.rightOperand;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof ast2 === 'boolean') {
|
|
||||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
|
||||||
}
|
|
||||||
else if (!ast2) {
|
|
||||||
// A subtree with a single node. This is generally the case
|
|
||||||
// for negated tokens.
|
|
||||||
if (parentAST.negate) {
|
|
||||||
ast1 = !ast1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isTerminal(ast2)) {
|
|
||||||
// We are finally at a leaf and can evaluate.
|
|
||||||
ast2 = ast2.match(image);
|
|
||||||
ast1 = combineOperands(ast1, ast2, parentAST);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// We are at a node whose right side is a new subtree.
|
|
||||||
// We will build a new "virtual" stack, but instead of
|
|
||||||
// building a new Array, we can insert a null object as a
|
|
||||||
// marker.
|
|
||||||
treeStack.push(parentAST, ast1, null);
|
|
||||||
|
|
||||||
do {
|
|
||||||
treeStack.push(ast2);
|
|
||||||
ast2 = ast2.leftOperand;
|
|
||||||
} while (!isTerminal(ast2));
|
|
||||||
|
|
||||||
ast1 = ast2.match(image);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ast1;
|
|
||||||
};
|
|
||||||
|
|
||||||
SearchAST.prototype.dumpTree = function() {
|
|
||||||
// Dumps to string a simple diagram of the syntax tree structure
|
|
||||||
// (starting with this object as the root) for debugging purposes.
|
|
||||||
const retStrArr = [],
|
|
||||||
treeQueue = [['', this]];
|
|
||||||
let treeArr,
|
|
||||||
prefix,
|
|
||||||
tree;
|
|
||||||
|
|
||||||
while (treeQueue.length > 0) {
|
|
||||||
treeArr = treeQueue.shift();
|
|
||||||
prefix = treeArr[0];
|
|
||||||
tree = treeArr[1];
|
|
||||||
|
|
||||||
if (isTerminal(tree)) {
|
|
||||||
retStrArr.push(`${prefix}-> ${tree.term}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (tree.negate) {
|
|
||||||
retStrArr.push(`${prefix}+ NOT_OP`);
|
|
||||||
prefix += '\t';
|
|
||||||
}
|
|
||||||
if (tree.op) {
|
|
||||||
retStrArr.push(`${prefix}+ ${tree.op.toUpperCase()}`);
|
|
||||||
prefix += '\t';
|
|
||||||
treeQueue.unshift([prefix, tree.rightOperand]);
|
|
||||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
treeQueue.unshift([prefix, tree.leftOperand]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retStrArr.join('\n');
|
|
||||||
};
|
|
||||||
|
|
||||||
export default parseSearch;
|
|
15
assets/js/match_query.ts
Normal file
15
assets/js/match_query.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { defaultMatcher } from './query/matcher';
|
||||||
|
import { generateLexArray } from './query/lex';
|
||||||
|
import { parseTokens } from './query/parse';
|
||||||
|
import { getAstMatcherForTerm } from './query/term';
|
||||||
|
|
||||||
|
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||||
|
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSearch(query: string) {
|
||||||
|
const tokens = generateLexArray(query, parseWithDefaultMatcher);
|
||||||
|
return parseTokens(tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default parseSearch;
|
|
@ -11,8 +11,7 @@ function formResult({target, detail}) {
|
||||||
|
|
||||||
const elements = {
|
const elements = {
|
||||||
'#description-form': '.image-description',
|
'#description-form': '.image-description',
|
||||||
'#uploader-form': '.image_uploader',
|
'#uploader-form': '.image_uploader'
|
||||||
'#source-form': '#image-source'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
function showResult(resultEl, formEl, response) {
|
function showResult(resultEl, formEl, response) {
|
||||||
|
|
|
@ -17,7 +17,7 @@ function makeRequest(verb) {
|
||||||
function bindSubscriptionLinks() {
|
function bindSubscriptionLinks() {
|
||||||
delegate(document, 'fetchcomplete', {
|
delegate(document, 'fetchcomplete', {
|
||||||
'.js-subscription-link': event => {
|
'.js-subscription-link': event => {
|
||||||
const target = $('#js-subscription-target');
|
const target = event.target.closest('.js-subscription-target');
|
||||||
event.detail.text().then(text => {
|
event.detail.text().then(text => {
|
||||||
target.outerHTML = text;
|
target.outerHTML = text;
|
||||||
});
|
});
|
||||||
|
|
106
assets/js/query/__tests__/date.spec.ts
Normal file
106
assets/js/query/__tests__/date.spec.ts
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
import { makeDateMatcher } from '../date';
|
||||||
|
|
||||||
|
function daysAgo(days: number) {
|
||||||
|
return new Date(Date.now() - days * 86400000).toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Date parsing', () => {
|
||||||
|
it('should match relative dates (upper bound)', () => {
|
||||||
|
const matcher = makeDateMatcher('3 days ago', 'lte');
|
||||||
|
|
||||||
|
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match relative dates (lower bound)', () => {
|
||||||
|
const matcher = makeDateMatcher('3 days ago', 'gte');
|
||||||
|
|
||||||
|
expect(matcher(daysAgo(4), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(daysAgo(2), 'created_at', 0)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute date ranges', () => {
|
||||||
|
const ltMatcher = makeDateMatcher('2025', 'lt');
|
||||||
|
const gtMatcher = makeDateMatcher('2023', 'gt');
|
||||||
|
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(ltMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(gtMatcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through years', () => {
|
||||||
|
const matcher = makeDateMatcher('2024', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2025, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2023, 5, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through months', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 6, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 4, 21)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through days', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through hours', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 5)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through minutes', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 22)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 20)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30Z', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 6, 21, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds with positive timezone offset', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30+01:30', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 4, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match absolute dates through seconds with negative timezone offset', () => {
|
||||||
|
const matcher = makeDateMatcher('2024-06-21T06:21:30-01:30', 'eq');
|
||||||
|
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 31)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 30)).toISOString(), 'created_at', 0)).toBe(true);
|
||||||
|
expect(matcher(new Date(Date.UTC(2024, 5, 21, 7, 51, 29)).toISOString(), 'created_at', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match malformed absolute date expressions', () => {
|
||||||
|
expect(() => makeDateMatcher('2024-06-21T06:21:30+01:3020', 'eq')).toThrow('Cannot parse date string: 2024-06-21T06:21:30+01:3020');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match malformed relative date expressions', () => {
|
||||||
|
expect(() => makeDateMatcher('3 test failures ago', 'eq')).toThrow('Cannot parse date string: 3 test failures ago');
|
||||||
|
});
|
||||||
|
});
|
177
assets/js/query/__tests__/lex.spec.ts
Normal file
177
assets/js/query/__tests__/lex.spec.ts
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
import { generateLexArray } from '../lex';
|
||||||
|
import { AstMatcher } from '../types';
|
||||||
|
|
||||||
|
describe('Lexical analysis', () => {
|
||||||
|
let terms: string[];
|
||||||
|
let fuzzes: number[];
|
||||||
|
let boosts: number[];
|
||||||
|
|
||||||
|
function noMatch() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseTerm(term: string, fuzz: number, boost: number): AstMatcher {
|
||||||
|
terms.push(term);
|
||||||
|
fuzzes.push(fuzz);
|
||||||
|
boosts.push(boost);
|
||||||
|
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
terms = [];
|
||||||
|
fuzzes = [];
|
||||||
|
boosts = [];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms', () => {
|
||||||
|
const array = generateLexArray('safe', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms with fuzzing', () => {
|
||||||
|
const array = generateLexArray('safe~4', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([4]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex single terms with boosting', () => {
|
||||||
|
const array = generateLexArray('safe^2', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([2]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex quoted single terms', () => {
|
||||||
|
const array = generateLexArray('"safe"', parseTerm);
|
||||||
|
expect(terms).toEqual(['"safe"']);
|
||||||
|
expect(fuzzes).toEqual([0]);
|
||||||
|
expect(boosts).toEqual([1]);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex multiple terms connected by AND', () => {
|
||||||
|
const array = generateLexArray('safe AND solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(fuzzes).toEqual([0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex multiple terms connected by OR', () => {
|
||||||
|
const array = generateLexArray('safe OR solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(fuzzes).toEqual([0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize AND over OR', () => {
|
||||||
|
const array = generateLexArray('safe OR solo AND fluttershy', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, noMatch, 'and_op', 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should override ordering when using parenthetical expressions', () => {
|
||||||
|
const array = generateLexArray('(safe OR solo) AND fluttershy', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy']);
|
||||||
|
expect(fuzzes).toEqual([0, 0, 0]);
|
||||||
|
expect(boosts).toEqual([1, 1, 1]);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should lex unary NOT', () => {
|
||||||
|
const array = generateLexArray('NOT safe', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize NOT over AND', () => {
|
||||||
|
const array = generateLexArray('NOT safe AND solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize NOT over OR', () => {
|
||||||
|
const array = generateLexArray('NOT safe OR solo', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, 'not_op', noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow group negation', () => {
|
||||||
|
const array = generateLexArray('NOT (safe OR solo)', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', 'not_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow NOT expressions inside terms', () => {
|
||||||
|
const array = generateLexArray('this NOT that', parseTerm);
|
||||||
|
expect(terms).toEqual(['this NOT that']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow parenthetical expressions inside terms', () => {
|
||||||
|
const array = generateLexArray('rose (flower)', parseTerm);
|
||||||
|
expect(terms).toEqual(['rose (flower)']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzz expressions in place of terms', () => {
|
||||||
|
const array = generateLexArray('~2', parseTerm);
|
||||||
|
expect(terms).toEqual(['~2']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle boost expressions in place of terms', () => {
|
||||||
|
const array = generateLexArray('^2', parseTerm);
|
||||||
|
expect(terms).toEqual(['^2']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzz expressions in terms', () => {
|
||||||
|
const array = generateLexArray('two~2~two', parseTerm);
|
||||||
|
expect(terms).toEqual(['two~2~two']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle boost expressions in terms', () => {
|
||||||
|
const array = generateLexArray('two^2^two', parseTerm);
|
||||||
|
expect(terms).toEqual(['two^2^two']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle quotes in terms', () => {
|
||||||
|
const array = generateLexArray('a "quoted" expression', parseTerm);
|
||||||
|
expect(terms).toEqual(['a "quoted" expression']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow extra spaces in terms', () => {
|
||||||
|
const array = generateLexArray('twilight sparkle', parseTerm);
|
||||||
|
expect(terms).toEqual(['twilight sparkle']);
|
||||||
|
expect(array).toEqual([noMatch]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should collapse consecutive AND expressions', () => {
|
||||||
|
const array = generateLexArray('safe AND solo AND fluttershy AND applejack', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'and_op', noMatch, 'and_op', noMatch, 'and_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should collapse consecutive OR expressions', () => {
|
||||||
|
const array = generateLexArray('safe OR solo OR fluttershy OR applejack', parseTerm);
|
||||||
|
expect(terms).toEqual(['safe', 'solo', 'fluttershy', 'applejack']);
|
||||||
|
expect(array).toEqual([noMatch, noMatch, 'or_op', noMatch, 'or_op', noMatch, 'or_op']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw exception on mismatched parentheses', () => {
|
||||||
|
expect(() => generateLexArray('(safe OR solo AND fluttershy', parseTerm)).toThrow('Mismatched parentheses.');
|
||||||
|
// expect(() => generateLexArray(')bad', parseTerm)).toThrow('Mismatched parentheses.');
|
||||||
|
});
|
||||||
|
});
|
36
assets/js/query/__tests__/literal.spec.ts
Normal file
36
assets/js/query/__tests__/literal.spec.ts
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import { makeLiteralMatcher } from '../literal';
|
||||||
|
|
||||||
|
describe('Literal field parsing', () => {
|
||||||
|
it('should handle exact matching in arrayed fields', () => {
|
||||||
|
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||||
|
expect(matcher('safe, solo', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('solo', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle exact matching in non-arrayed fields', () => {
|
||||||
|
const matcher = makeLiteralMatcher('safe', 0, false);
|
||||||
|
expect(matcher('safe, solo', 'description', 0)).toBe(false);
|
||||||
|
expect(matcher('safe', 'description', 0)).toBe(true);
|
||||||
|
expect(matcher('solo', 'description', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzzy matching based on normalized edit distance', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fluttersho', 0.8, false);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle fuzzy matching based on raw edit distance', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fluttersho', 1, false);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rarity', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle wildcard matching', () => {
|
||||||
|
const matcher = makeLiteralMatcher('fl?tter*', 0, true);
|
||||||
|
expect(matcher('fluttershy', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('flitter', 'tags', 0)).toBe(true);
|
||||||
|
expect(matcher('rainbow dash', 'tags', 0)).toBe(false);
|
||||||
|
expect(matcher('gentle flutter', 'tags', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
53
assets/js/query/__tests__/number.spec.ts
Normal file
53
assets/js/query/__tests__/number.spec.ts
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import { makeNumberMatcher } from '../number';
|
||||||
|
|
||||||
|
describe('Number parsing', () => {
|
||||||
|
it('should match numbers directly', () => {
|
||||||
|
const intMatch = makeNumberMatcher(2067, 0, 'eq');
|
||||||
|
|
||||||
|
expect(intMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(intMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(intMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(intMatch('20677', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match number ranges', () => {
|
||||||
|
const ltMatch = makeNumberMatcher(2067, 0, 'lt');
|
||||||
|
const lteMatch = makeNumberMatcher(2067, 0, 'lte');
|
||||||
|
const gtMatch = makeNumberMatcher(2067, 0, 'gt');
|
||||||
|
const gteMatch = makeNumberMatcher(2067, 0, 'gte');
|
||||||
|
|
||||||
|
expect(ltMatch('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(ltMatch('2067', 'value', 0)).toBe(false);
|
||||||
|
expect(ltMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(lteMatch('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(lteMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(lteMatch('2068', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2067', 'value', 0)).toBe(false);
|
||||||
|
expect(gtMatch('2068', 'value', 0)).toBe(true);
|
||||||
|
expect(gteMatch('2066', 'value', 0)).toBe(false);
|
||||||
|
expect(gteMatch('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(gteMatch('2068', 'value', 0)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match unparsed values', () => {
|
||||||
|
const matcher = makeNumberMatcher(2067, 0, 'eq');
|
||||||
|
|
||||||
|
expect(matcher('NaN', 'value', 0)).toBe(false);
|
||||||
|
expect(matcher('test', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should interpret fuzz as an inclusive range around the value', () => {
|
||||||
|
const matcher = makeNumberMatcher(2067, 3, 'eq');
|
||||||
|
|
||||||
|
expect(matcher('2063', 'value', 0)).toBe(false);
|
||||||
|
expect(matcher('2064', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2065', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2066', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2067', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2068', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2069', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2070', 'value', 0)).toBe(true);
|
||||||
|
expect(matcher('2071', 'value', 0)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
84
assets/js/query/__tests__/parse.spec.ts
Normal file
84
assets/js/query/__tests__/parse.spec.ts
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
import { defaultMatcher } from '../matcher';
|
||||||
|
import { termSpaceToImageField } from '../fields';
|
||||||
|
import { generateLexArray } from '../lex';
|
||||||
|
import { getAstMatcherForTerm } from '../term';
|
||||||
|
import { parseTokens } from '../parse';
|
||||||
|
|
||||||
|
function parseWithDefaultMatcher(term: string, fuzz: number) {
|
||||||
|
return getAstMatcherForTerm(term, fuzz, defaultMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Semantic analysis', () => {
|
||||||
|
let documents: HTMLElement[];
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
const e0 = document.createElement('div');
|
||||||
|
e0.setAttribute(termSpaceToImageField.id, '0');
|
||||||
|
e0.setAttribute(termSpaceToImageField.tags, 'safe, solo, fluttershy');
|
||||||
|
|
||||||
|
const e1 = document.createElement('div');
|
||||||
|
e1.setAttribute(termSpaceToImageField.id, '1');
|
||||||
|
e1.setAttribute(termSpaceToImageField.tags, 'suggestive, solo, fluttershy');
|
||||||
|
|
||||||
|
const e2 = document.createElement('div');
|
||||||
|
e2.setAttribute(termSpaceToImageField.id, '2');
|
||||||
|
e2.setAttribute(termSpaceToImageField.tags, 'suggestive, fluttershy, twilight sparkle');
|
||||||
|
|
||||||
|
documents = [e0, e1, e2];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match single term expressions', () => {
|
||||||
|
const tokens = generateLexArray('fluttershy', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match AND expressions', () => {
|
||||||
|
const tokens = generateLexArray('fluttershy,solo', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match OR expressions', () => {
|
||||||
|
const tokens = generateLexArray('suggestive || twilight sparkle', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(false);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match NOT expressions', () => {
|
||||||
|
const tokens = generateLexArray('NOT twilight sparkle', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(true);
|
||||||
|
expect(matcher(documents[1])).toBe(true);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow empty expressions', () => {
|
||||||
|
const tokens = generateLexArray('', parseWithDefaultMatcher);
|
||||||
|
const matcher = parseTokens(tokens);
|
||||||
|
|
||||||
|
expect(matcher(documents[0])).toBe(false);
|
||||||
|
expect(matcher(documents[1])).toBe(false);
|
||||||
|
expect(matcher(documents[2])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on unpaired AND', () => {
|
||||||
|
const tokens = generateLexArray(' AND ', parseWithDefaultMatcher);
|
||||||
|
expect(() => parseTokens(tokens)).toThrow('Missing operand.');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on unjoined parenthetical', () => {
|
||||||
|
const tokens = generateLexArray('(safe) solo', parseWithDefaultMatcher);
|
||||||
|
expect(() => parseTokens(tokens)).toThrow('Missing operator.');
|
||||||
|
});
|
||||||
|
});
|
131
assets/js/query/__tests__/term.spec.ts
Normal file
131
assets/js/query/__tests__/term.spec.ts
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
import { getAstMatcherForTerm } from '../term';
|
||||||
|
import { MatcherFactory, defaultMatcher } from '../matcher';
|
||||||
|
import { termSpaceToImageField } from '../fields';
|
||||||
|
|
||||||
|
function noMatch() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
class TestMatcherFactory implements MatcherFactory {
|
||||||
|
public dateVals: string[];
|
||||||
|
public literalVals: string[];
|
||||||
|
public numberVals: number[];
|
||||||
|
public userVals: string[];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.dateVals = [];
|
||||||
|
this.literalVals = [];
|
||||||
|
this.numberVals = [];
|
||||||
|
this.userVals = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
makeDateMatcher(term: string) {
|
||||||
|
this.dateVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLiteralMatcher(term: string) {
|
||||||
|
this.literalVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeNumberMatcher(term: number) {
|
||||||
|
this.numberVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeUserMatcher(term: string) {
|
||||||
|
this.userVals.push(term);
|
||||||
|
return noMatch;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Search terms', () => {
|
||||||
|
let factory: TestMatcherFactory;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
factory = new TestMatcherFactory();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field', () => {
|
||||||
|
getAstMatcherForTerm('default', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field with wildcarding', () => {
|
||||||
|
getAstMatcherForTerm('def?ul*', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['def?ul*']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field with fuzzing', () => {
|
||||||
|
getAstMatcherForTerm('default', 1, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the default field within quotes', () => {
|
||||||
|
getAstMatcherForTerm('"default"', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['default']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse exact date field values', () => {
|
||||||
|
getAstMatcherForTerm('created_at:2024', 0, factory);
|
||||||
|
expect(factory.dateVals).toEqual(['2024']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse ranged date field values', () => {
|
||||||
|
getAstMatcherForTerm('created_at.lte:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.lt:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.gte:2024', 0, factory);
|
||||||
|
getAstMatcherForTerm('created_at.gt:2024', 0, factory);
|
||||||
|
expect(factory.dateVals).toEqual(['2024', '2024', '2024', '2024']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse exact number field values', () => {
|
||||||
|
getAstMatcherForTerm('width:1920', 0, factory);
|
||||||
|
expect(factory.numberVals).toEqual([1920]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse ranged number field values', () => {
|
||||||
|
getAstMatcherForTerm('width.lte:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.lt:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.gte:1920', 0, factory);
|
||||||
|
getAstMatcherForTerm('width.gt:1920', 0, factory);
|
||||||
|
expect(factory.numberVals).toEqual([1920, 1920, 1920, 1920]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse literal field values', () => {
|
||||||
|
getAstMatcherForTerm('source_url:*twitter*', 0, factory);
|
||||||
|
expect(factory.literalVals).toEqual(['*twitter*']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse user field values', () => {
|
||||||
|
getAstMatcherForTerm('my:upvotes', 0, factory);
|
||||||
|
getAstMatcherForTerm('my:downvotes', 0, factory);
|
||||||
|
getAstMatcherForTerm('my:faves', 0, factory);
|
||||||
|
expect(factory.userVals).toEqual(['upvotes', 'downvotes', 'faves']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match document with proper field values', () => {
|
||||||
|
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||||
|
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||||
|
|
||||||
|
const idAttribute = termSpaceToImageField.id;
|
||||||
|
const sourceUrlAttribute = termSpaceToImageField.source_url;
|
||||||
|
|
||||||
|
const properElement = document.createElement('div');
|
||||||
|
properElement.setAttribute(idAttribute, '0');
|
||||||
|
properElement.setAttribute(sourceUrlAttribute, 'twitter.com');
|
||||||
|
|
||||||
|
expect(idMatcher(properElement)).toBe(true);
|
||||||
|
expect(sourceMatcher(properElement)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not match document without field values', () => {
|
||||||
|
const idMatcher = getAstMatcherForTerm('id.lt:1', 0, defaultMatcher);
|
||||||
|
const sourceMatcher = getAstMatcherForTerm('source_url:twitter.com', 0, defaultMatcher);
|
||||||
|
const improperElement = document.createElement('div');
|
||||||
|
|
||||||
|
expect(idMatcher(improperElement)).toBe(false);
|
||||||
|
expect(sourceMatcher(improperElement)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
50
assets/js/query/__tests__/user.spec.ts
Normal file
50
assets/js/query/__tests__/user.spec.ts
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
import { makeUserMatcher } from '../user';
|
||||||
|
|
||||||
|
describe('User field parsing', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
window.booru.interactions = [
|
||||||
|
{image_id: 0, user_id: 0, interaction_type: 'faved', value: null},
|
||||||
|
{image_id: 0, user_id: 0, interaction_type: 'voted', value: 'up'},
|
||||||
|
{image_id: 1, user_id: 0, interaction_type: 'voted', value: 'down'},
|
||||||
|
{image_id: 2, user_id: 0, interaction_type: 'hidden', value: null},
|
||||||
|
];
|
||||||
|
/* eslint-enable camelcase */
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:faves', () => {
|
||||||
|
const matcher = makeUserMatcher('faves');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:upvotes', () => {
|
||||||
|
const matcher = makeUserMatcher('upvotes');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse my:downvotes', () => {
|
||||||
|
const matcher = makeUserMatcher('downvotes');
|
||||||
|
|
||||||
|
expect(matcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(matcher('', 'my', 1)).toBe(true);
|
||||||
|
expect(matcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not parse other my: fields', () => {
|
||||||
|
const hiddenMatcher = makeUserMatcher('hidden');
|
||||||
|
const watchedMatcher = makeUserMatcher('watched');
|
||||||
|
|
||||||
|
expect(hiddenMatcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(hiddenMatcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(hiddenMatcher('', 'my', 2)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 0)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 1)).toBe(false);
|
||||||
|
expect(watchedMatcher('', 'my', 2)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
17
assets/js/query/boolean.ts
Normal file
17
assets/js/query/boolean.ts
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
import { AstMatcher } from './types';
|
||||||
|
|
||||||
|
export function matchAny(...matchers: AstMatcher[]): AstMatcher {
|
||||||
|
return (e: HTMLElement) => matchers.some(matcher => matcher(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchAll(...matchers: AstMatcher[]): AstMatcher {
|
||||||
|
return (e: HTMLElement) => matchers.every(matcher => matcher(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchNot(matcher: AstMatcher): AstMatcher {
|
||||||
|
return (e: HTMLElement) => !matcher(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function matchNone(): AstMatcher {
|
||||||
|
return () => false;
|
||||||
|
}
|
140
assets/js/query/date.ts
Normal file
140
assets/js/query/date.ts
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
import { assertNotNull } from '../utils/assert';
|
||||||
|
import { FieldMatcher, ParseError, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
type Year = number;
|
||||||
|
type Month = number;
|
||||||
|
type Day = number;
|
||||||
|
type Hours = number;
|
||||||
|
type Minutes = number;
|
||||||
|
type Seconds = number;
|
||||||
|
type AbsoluteDate = [Year, Month, Day, Hours, Minutes, Seconds];
|
||||||
|
type TimeZoneOffset = [Hours, Minutes];
|
||||||
|
type PosixTimeMs = number;
|
||||||
|
|
||||||
|
function makeMatcher(bottomDate: PosixTimeMs, topDate: PosixTimeMs, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
// The open-left, closed-right date range specified by the
|
||||||
|
// date/time format limits the types of comparisons that are
|
||||||
|
// done compared to numeric ranges.
|
||||||
|
switch (qual) {
|
||||||
|
case 'lte':
|
||||||
|
return v => new Date(v).getTime() < topDate;
|
||||||
|
case 'gte':
|
||||||
|
return v => new Date(v).getTime() >= bottomDate;
|
||||||
|
case 'lt':
|
||||||
|
return v => new Date(v).getTime() < bottomDate;
|
||||||
|
case 'gt':
|
||||||
|
return v => new Date(v).getTime() >= topDate;
|
||||||
|
case 'eq':
|
||||||
|
default:
|
||||||
|
return v => {
|
||||||
|
const t = new Date(v).getTime();
|
||||||
|
return t >= bottomDate && t < topDate;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const relativeDateMatch = /(\d+) (second|minute|hour|day|week|month|year)s? ago/;
|
||||||
|
|
||||||
|
function makeRelativeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
const match = assertNotNull(relativeDateMatch.exec(dateVal));
|
||||||
|
const bounds: Record<string, number> = {
|
||||||
|
second: 1000,
|
||||||
|
minute: 60000,
|
||||||
|
hour: 3600000,
|
||||||
|
day: 86400000,
|
||||||
|
week: 604800000,
|
||||||
|
month: 2592000000,
|
||||||
|
year: 31536000000
|
||||||
|
};
|
||||||
|
|
||||||
|
const amount = parseInt(match[1], 10);
|
||||||
|
const scale = bounds[match[2]];
|
||||||
|
|
||||||
|
const now = new Date().getTime();
|
||||||
|
const bottomDate = new Date(now - amount * scale).getTime();
|
||||||
|
const topDate = new Date(now - (amount - 1) * scale).getTime();
|
||||||
|
|
||||||
|
return makeMatcher(bottomDate, topDate, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
const parseRes: RegExp[] = [
|
||||||
|
/^(\d{4})/,
|
||||||
|
/^-(\d{2})/,
|
||||||
|
/^-(\d{2})/,
|
||||||
|
/^(?:\s+|T|t)(\d{2})/,
|
||||||
|
/^:(\d{2})/,
|
||||||
|
/^:(\d{2})/
|
||||||
|
];
|
||||||
|
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
||||||
|
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
||||||
|
|
||||||
|
const origDateVal: string = dateVal;
|
||||||
|
let localDateVal = origDateVal;
|
||||||
|
|
||||||
|
const offsetMatch = /([+-])(\d{2}):(\d{2})$/.exec(localDateVal);
|
||||||
|
if (offsetMatch) {
|
||||||
|
timeZoneOffset[0] = parseInt(offsetMatch[2], 10);
|
||||||
|
timeZoneOffset[1] = parseInt(offsetMatch[3], 10);
|
||||||
|
if (offsetMatch[1] === '-') {
|
||||||
|
timeZoneOffset[0] *= -1;
|
||||||
|
timeZoneOffset[1] *= -1;
|
||||||
|
}
|
||||||
|
localDateVal = localDateVal.substring(0, localDateVal.length - 6);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
localDateVal = localDateVal.replace(/[Zz]$/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
let matchIndex = 0;
|
||||||
|
for (; matchIndex < parseRes.length; matchIndex += 1) {
|
||||||
|
if (localDateVal.length === 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const componentMatch = parseRes[matchIndex].exec(localDateVal);
|
||||||
|
if (componentMatch) {
|
||||||
|
if (matchIndex === 1) {
|
||||||
|
// Months are offset by 1.
|
||||||
|
timeData[matchIndex] = parseInt(componentMatch[1], 10) - 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// All other components are not offset.
|
||||||
|
timeData[matchIndex] = parseInt(componentMatch[1], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate string.
|
||||||
|
localDateVal = localDateVal.substring(componentMatch[0].length);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (localDateVal.length > 0) {
|
||||||
|
throw new ParseError(`Cannot parse date string: ${origDateVal}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the user-specified time zone offset. The JS Date constructor
|
||||||
|
// is very flexible here.
|
||||||
|
timeData[3] -= timeZoneOffset[0];
|
||||||
|
timeData[4] -= timeZoneOffset[1];
|
||||||
|
|
||||||
|
const asPosix = (data: AbsoluteDate) => {
|
||||||
|
return new Date(Date.UTC.apply(Date, data)).getTime();
|
||||||
|
};
|
||||||
|
|
||||||
|
const bottomDate = asPosix(timeData);
|
||||||
|
timeData[matchIndex - 1] += 1;
|
||||||
|
const topDate = asPosix(timeData);
|
||||||
|
|
||||||
|
return makeMatcher(bottomDate, topDate, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
if (relativeDateMatch.test(dateVal)) {
|
||||||
|
return makeRelativeDateMatcher(dateVal, qual);
|
||||||
|
}
|
||||||
|
|
||||||
|
return makeAbsoluteDateMatcher(dateVal, qual);
|
||||||
|
}
|
39
assets/js/query/fields.ts
Normal file
39
assets/js/query/fields.ts
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
import { FieldName } from './types';
|
||||||
|
|
||||||
|
type AttributeName = string;
|
||||||
|
|
||||||
|
export const numberFields: FieldName[] =
|
||||||
|
['id', 'width', 'height', 'aspect_ratio',
|
||||||
|
'comment_count', 'score', 'upvotes', 'downvotes',
|
||||||
|
'faves', 'tag_count', 'score'];
|
||||||
|
|
||||||
|
export const dateFields: FieldName[] = ['created_at'];
|
||||||
|
|
||||||
|
export const literalFields =
|
||||||
|
['tags', 'orig_sha512_hash', 'sha512_hash',
|
||||||
|
'uploader', 'source_url', 'description'];
|
||||||
|
|
||||||
|
export const termSpaceToImageField: Record<FieldName, AttributeName> = {
|
||||||
|
tags: 'data-image-tag-aliases',
|
||||||
|
score: 'data-score',
|
||||||
|
upvotes: 'data-upvotes',
|
||||||
|
downvotes: 'data-downvotes',
|
||||||
|
uploader: 'data-uploader',
|
||||||
|
// Yeah, I don't think this is reasonably supportable.
|
||||||
|
// faved_by: 'data-faved-by',
|
||||||
|
id: 'data-image-id',
|
||||||
|
width: 'data-width',
|
||||||
|
height: 'data-height',
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
aspect_ratio: 'data-aspect-ratio',
|
||||||
|
comment_count: 'data-comment-count',
|
||||||
|
tag_count: 'data-tag-count',
|
||||||
|
source_url: 'data-source-url',
|
||||||
|
faves: 'data-faves',
|
||||||
|
sha512_hash: 'data-sha512',
|
||||||
|
orig_sha512_hash: 'data-orig-sha512',
|
||||||
|
created_at: 'data-created-at'
|
||||||
|
/* eslint-enable camelcase */
|
||||||
|
};
|
||||||
|
|
||||||
|
export const defaultField = 'tags';
|
191
assets/js/query/lex.ts
Normal file
191
assets/js/query/lex.ts
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
import { assertNotNull, assertNotUndefined } from '../utils/assert';
|
||||||
|
import { AstMatcher, ParseError, TokenList } from './types';
|
||||||
|
|
||||||
|
type TokenName = string;
|
||||||
|
type Token = [TokenName, RegExp];
|
||||||
|
|
||||||
|
const tokenList: Token[] = [
|
||||||
|
['fuzz', /^~(?:\d+(\.\d+)?|\.\d+)/],
|
||||||
|
['boost', /^\^[-+]?\d+(\.\d+)?/],
|
||||||
|
['quoted_lit', /^\s*"(?:[^"]|\\")+"/],
|
||||||
|
['lparen', /^\s*\(\s*/],
|
||||||
|
['rparen', /^\s*\)\s*/],
|
||||||
|
['and_op', /^\s*(?:&&|AND)\s+/],
|
||||||
|
['and_op', /^\s*,\s*/],
|
||||||
|
['or_op', /^\s*(?:\|\||OR)\s+/],
|
||||||
|
['not_op', /^\s*NOT(?:\s+|(?=\())/],
|
||||||
|
['not_op', /^\s*[!-]\s*/],
|
||||||
|
['space', /^\s+/],
|
||||||
|
['word', /^(?:\\[\s,()^~]|[^\s,()^~])+/],
|
||||||
|
['word', /^(?:\\[\s,()]|[^\s,()])+/]
|
||||||
|
];
|
||||||
|
|
||||||
|
export type ParseTerm = (term: string, fuzz: number, boost: number) => AstMatcher;
|
||||||
|
|
||||||
|
export function generateLexArray(searchStr: string, parseTerm: ParseTerm): TokenList {
|
||||||
|
const opQueue: string[] = [],
|
||||||
|
groupNegate: boolean[] = [],
|
||||||
|
tokenStack: TokenList = [];
|
||||||
|
|
||||||
|
let searchTerm: string | null = null;
|
||||||
|
let boostFuzzStr = '';
|
||||||
|
let localSearchStr: string = searchStr;
|
||||||
|
let negate = false;
|
||||||
|
let boost = 1;
|
||||||
|
let fuzz = 0;
|
||||||
|
let lparenCtr = 0;
|
||||||
|
|
||||||
|
const pushTerm = () => {
|
||||||
|
if (searchTerm !== null) {
|
||||||
|
// Push to stack.
|
||||||
|
tokenStack.push(parseTerm(searchTerm, fuzz, boost));
|
||||||
|
// Reset term and options data.
|
||||||
|
boost = 1;
|
||||||
|
fuzz = 0;
|
||||||
|
searchTerm = null;
|
||||||
|
boostFuzzStr = '';
|
||||||
|
lparenCtr = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (negate) {
|
||||||
|
tokenStack.push('not_op');
|
||||||
|
negate = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
while (localSearchStr.length > 0) {
|
||||||
|
for (const [tokenName, tokenRe] of tokenList) {
|
||||||
|
const match = tokenRe.exec(localSearchStr);
|
||||||
|
|
||||||
|
if (!match) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = match[0];
|
||||||
|
|
||||||
|
if (searchTerm !== null && (['and_op', 'or_op'].indexOf(tokenName) !== -1 || tokenName === 'rparen' && lparenCtr === 0)) {
|
||||||
|
pushTerm();
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (tokenName) {
|
||||||
|
case 'and_op':
|
||||||
|
while (opQueue[0] === 'and_op') {
|
||||||
|
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||||
|
}
|
||||||
|
opQueue.unshift('and_op');
|
||||||
|
break;
|
||||||
|
case 'or_op':
|
||||||
|
while (opQueue[0] === 'and_op' || opQueue[0] === 'or_op') {
|
||||||
|
tokenStack.push(assertNotUndefined(opQueue.shift()));
|
||||||
|
}
|
||||||
|
opQueue.unshift('or_op');
|
||||||
|
break;
|
||||||
|
case 'not_op':
|
||||||
|
if (searchTerm) {
|
||||||
|
// We're already inside a search term, so it does not apply, obv.
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
negate = !negate;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'lparen':
|
||||||
|
if (searchTerm) {
|
||||||
|
// If we are inside the search term, do not error out just yet;
|
||||||
|
// instead, consider it as part of the search term, as a user convenience.
|
||||||
|
searchTerm += token;
|
||||||
|
lparenCtr += 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
opQueue.unshift('lparen');
|
||||||
|
groupNegate.push(negate);
|
||||||
|
negate = false;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'rparen':
|
||||||
|
if (lparenCtr > 0) {
|
||||||
|
searchTerm = assertNotNull(searchTerm) + token;
|
||||||
|
lparenCtr -= 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
while (opQueue.length > 0) {
|
||||||
|
const op = assertNotUndefined(opQueue.shift());
|
||||||
|
if (op === 'lparen') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
tokenStack.push(op);
|
||||||
|
}
|
||||||
|
if (groupNegate.length > 0 && groupNegate.pop()) {
|
||||||
|
tokenStack.push('not_op');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'fuzz':
|
||||||
|
if (searchTerm) {
|
||||||
|
// For this and boost operations, we store the current match so far
|
||||||
|
// to a temporary string in case this is actually inside the term.
|
||||||
|
fuzz = parseFloat(token.substring(1));
|
||||||
|
boostFuzzStr += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'boost':
|
||||||
|
if (searchTerm) {
|
||||||
|
boost = parseFloat(token.substring(1));
|
||||||
|
boostFuzzStr += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'quoted_lit':
|
||||||
|
if (searchTerm) {
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'word':
|
||||||
|
if (searchTerm) {
|
||||||
|
if (fuzz !== 0 || boost !== 1) {
|
||||||
|
boost = 1;
|
||||||
|
fuzz = 0;
|
||||||
|
searchTerm += boostFuzzStr;
|
||||||
|
boostFuzzStr = '';
|
||||||
|
}
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
searchTerm = token;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// Append extra spaces within search terms.
|
||||||
|
if (searchTerm) {
|
||||||
|
searchTerm += token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate string and restart the token tests.
|
||||||
|
localSearchStr = localSearchStr.substring(token.length);
|
||||||
|
|
||||||
|
// Break since we have found a match.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append final tokens to the stack.
|
||||||
|
pushTerm();
|
||||||
|
|
||||||
|
if (opQueue.indexOf('rparen') !== -1 || opQueue.indexOf('lparen') !== -1) {
|
||||||
|
throw new ParseError('Mismatched parentheses.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concatenatte remaining operators to the token stack.
|
||||||
|
tokenStack.push(...opQueue);
|
||||||
|
|
||||||
|
return tokenStack;
|
||||||
|
}
|
113
assets/js/query/literal.ts
Normal file
113
assets/js/query/literal.ts
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
import { FieldMatcher } from './types';
|
||||||
|
|
||||||
|
function extractValues(v: string, name: string) {
|
||||||
|
return name === 'tags' ? v.split(', ') : [v];
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeExactMatcher(term: string): FieldMatcher {
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (val.toLowerCase() === term.toLowerCase()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeWildcardMatcher(term: string): FieldMatcher {
|
||||||
|
// Transforms wildcard match into regular expression.
|
||||||
|
// A custom NFA with caching may be more sophisticated but not
|
||||||
|
// likely to be faster.
|
||||||
|
const wildcard = new RegExp(
|
||||||
|
`^${term.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||||
|
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||||
|
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||||
|
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||||
|
.replace(/^(?:\\\\)*\?/g, '.?')}$`, 'i'
|
||||||
|
);
|
||||||
|
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (wildcard.test(val)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function fuzzyMatch(term: string, targetStr: string, fuzz: number): boolean {
|
||||||
|
const targetDistance = fuzz < 1.0 ? targetStr.length * (1.0 - fuzz) : fuzz;
|
||||||
|
const targetStrLower = targetStr.toLowerCase();
|
||||||
|
|
||||||
|
// Work vectors, representing the last three populated
|
||||||
|
// rows of the dynamic programming matrix of the iterative
|
||||||
|
// optimal string alignment calculation.
|
||||||
|
let v0: number[] = [];
|
||||||
|
let v1: number[] = [];
|
||||||
|
let v2: number[] = [];
|
||||||
|
let temp: number[];
|
||||||
|
|
||||||
|
for (let i = 0; i <= targetStrLower.length; i += 1) {
|
||||||
|
v1.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < term.length; i += 1) {
|
||||||
|
v2[0] = i;
|
||||||
|
for (let j = 0; j < targetStrLower.length; j += 1) {
|
||||||
|
const cost = term[i] === targetStrLower[j] ? 0 : 1;
|
||||||
|
v2[j + 1] = Math.min(
|
||||||
|
// Deletion.
|
||||||
|
v1[j + 1] + 1,
|
||||||
|
// Insertion.
|
||||||
|
v2[j] + 1,
|
||||||
|
// Substitution or No Change.
|
||||||
|
v1[j] + cost
|
||||||
|
);
|
||||||
|
if (i > 1 && j > 1 && term[i] === targetStrLower[j - 1] &&
|
||||||
|
targetStrLower[i - 1] === targetStrLower[j]) {
|
||||||
|
v2[j + 1] = Math.min(v2[j], v0[j - 1] + cost);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Rotate dem vec pointers bra.
|
||||||
|
temp = v0;
|
||||||
|
v0 = v1;
|
||||||
|
v1 = v2;
|
||||||
|
v2 = temp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return v1[targetStrLower.length] <= targetDistance;
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeFuzzyMatcher(term: string, fuzz: number): FieldMatcher {
|
||||||
|
return (v, name) => {
|
||||||
|
const values = extractValues(v, name);
|
||||||
|
|
||||||
|
for (const val of values) {
|
||||||
|
if (fuzzyMatch(term, val, fuzz)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeLiteralMatcher(term: string, fuzz: number, wildcardable: boolean): FieldMatcher {
|
||||||
|
if (fuzz === 0 && !wildcardable) {
|
||||||
|
return makeExactMatcher(term);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!wildcardable) {
|
||||||
|
return makeFuzzyMatcher(term, fuzz);
|
||||||
|
}
|
||||||
|
|
||||||
|
return makeWildcardMatcher(term);
|
||||||
|
}
|
20
assets/js/query/matcher.ts
Normal file
20
assets/js/query/matcher.ts
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import { makeDateMatcher } from './date';
|
||||||
|
import { makeLiteralMatcher } from './literal';
|
||||||
|
import { makeNumberMatcher } from './number';
|
||||||
|
import { makeUserMatcher } from './user';
|
||||||
|
|
||||||
|
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
export interface MatcherFactory {
|
||||||
|
makeDateMatcher: (dateVal: string, qual: RangeEqualQualifier) => FieldMatcher,
|
||||||
|
makeLiteralMatcher: (term: string, fuzz: number, wildcardable: boolean) => FieldMatcher,
|
||||||
|
makeNumberMatcher: (term: number, fuzz: number, qual: RangeEqualQualifier) => FieldMatcher,
|
||||||
|
makeUserMatcher: (term: string) => FieldMatcher
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultMatcher: MatcherFactory = {
|
||||||
|
makeDateMatcher,
|
||||||
|
makeLiteralMatcher,
|
||||||
|
makeNumberMatcher,
|
||||||
|
makeUserMatcher,
|
||||||
|
};
|
30
assets/js/query/number.ts
Normal file
30
assets/js/query/number.ts
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import { FieldMatcher, RangeEqualQualifier } from './types';
|
||||||
|
|
||||||
|
export function makeNumberMatcher(term: number, fuzz: number, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
|
// Range matching.
|
||||||
|
return v => {
|
||||||
|
const attrVal = parseFloat(v);
|
||||||
|
|
||||||
|
if (isNaN(attrVal)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fuzz !== 0) {
|
||||||
|
return term - fuzz <= attrVal && term + fuzz >= attrVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (qual) {
|
||||||
|
case 'lt':
|
||||||
|
return attrVal < term;
|
||||||
|
case 'gt':
|
||||||
|
return attrVal > term;
|
||||||
|
case 'lte':
|
||||||
|
return attrVal <= term;
|
||||||
|
case 'gte':
|
||||||
|
return attrVal >= term;
|
||||||
|
case 'eq':
|
||||||
|
default:
|
||||||
|
return attrVal === term;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
52
assets/js/query/parse.ts
Normal file
52
assets/js/query/parse.ts
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import { matchAll, matchAny, matchNone, matchNot } from './boolean';
|
||||||
|
import { AstMatcher, ParseError, TokenList } from './types';
|
||||||
|
|
||||||
|
export function parseTokens(lexicalArray: TokenList): AstMatcher {
|
||||||
|
const operandStack: AstMatcher[] = [];
|
||||||
|
|
||||||
|
lexicalArray.forEach((token, i) => {
|
||||||
|
if (token === 'not_op') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let intermediate: AstMatcher;
|
||||||
|
|
||||||
|
if (typeof token === 'string') {
|
||||||
|
const op2 = operandStack.pop();
|
||||||
|
const op1 = operandStack.pop();
|
||||||
|
|
||||||
|
if (typeof op1 === 'undefined' || typeof op2 === 'undefined') {
|
||||||
|
throw new ParseError('Missing operand.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (token === 'and_op') {
|
||||||
|
intermediate = matchAll(op1, op2);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
intermediate = matchAny(op1, op2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
intermediate = token;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lexicalArray[i + 1] === 'not_op') {
|
||||||
|
operandStack.push(matchNot(intermediate));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
operandStack.push(intermediate);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (operandStack.length > 1) {
|
||||||
|
throw new ParseError('Missing operator.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const op1 = operandStack.pop();
|
||||||
|
|
||||||
|
if (typeof op1 === 'undefined') {
|
||||||
|
return matchNone();
|
||||||
|
}
|
||||||
|
|
||||||
|
return op1;
|
||||||
|
}
|
90
assets/js/query/term.ts
Normal file
90
assets/js/query/term.ts
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import { MatcherFactory } from './matcher';
|
||||||
|
|
||||||
|
import { numberFields, dateFields, literalFields, termSpaceToImageField, defaultField } from './fields';
|
||||||
|
import { FieldName, FieldMatcher, RangeEqualQualifier, TermType, AstMatcher } from './types';
|
||||||
|
|
||||||
|
type RangeInfo = [FieldName, RangeEqualQualifier, TermType];
|
||||||
|
|
||||||
|
function normalizeTerm(term: string, wildcardable: boolean) {
|
||||||
|
if (!wildcardable) {
|
||||||
|
return term.replace('\\"', '"');
|
||||||
|
}
|
||||||
|
return term.replace(/\\([^*?])/g, '$1');
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRangeField(field: string): RangeInfo | null {
|
||||||
|
if (numberFields.indexOf(field) !== -1) {
|
||||||
|
return [field, 'eq', 'number'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dateFields.indexOf(field) !== -1) {
|
||||||
|
return [field, 'eq', 'date'];
|
||||||
|
}
|
||||||
|
|
||||||
|
const qual = /^(\w+)\.([lg]te?|eq)$/.exec(field);
|
||||||
|
|
||||||
|
if (qual) {
|
||||||
|
const fieldName: FieldName = qual[1];
|
||||||
|
const rangeQual = qual[2] as RangeEqualQualifier;
|
||||||
|
|
||||||
|
if (numberFields.indexOf(fieldName) !== -1) {
|
||||||
|
return [fieldName, rangeQual, 'number'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dateFields.indexOf(fieldName) !== -1) {
|
||||||
|
return [fieldName, rangeQual, 'date'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTermMatcher(term: string, fuzz: number, factory: MatcherFactory): [FieldName, FieldMatcher] {
|
||||||
|
let rangeParsing, candidateTermSpace, termCandidate;
|
||||||
|
let localTerm = term;
|
||||||
|
const wildcardable = fuzz === 0 && !/^"([^"]|\\")+"$/.test(localTerm);
|
||||||
|
|
||||||
|
if (!wildcardable && !fuzz) {
|
||||||
|
// Remove quotes around quoted literal term
|
||||||
|
localTerm = localTerm.substring(1, localTerm.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
localTerm = normalizeTerm(localTerm, wildcardable);
|
||||||
|
|
||||||
|
// N.B.: For the purposes of this parser, boosting effects are ignored.
|
||||||
|
const matchArr = localTerm.split(':');
|
||||||
|
|
||||||
|
if (matchArr.length > 1) {
|
||||||
|
candidateTermSpace = matchArr[0];
|
||||||
|
termCandidate = matchArr.slice(1).join(':');
|
||||||
|
rangeParsing = parseRangeField(candidateTermSpace);
|
||||||
|
|
||||||
|
if (rangeParsing) {
|
||||||
|
const [fieldName, rangeType, fieldType] = rangeParsing;
|
||||||
|
|
||||||
|
if (fieldType === 'date') {
|
||||||
|
return [fieldName, factory.makeDateMatcher(termCandidate, rangeType)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [fieldName, factory.makeNumberMatcher(parseFloat(termCandidate), fuzz, rangeType)];
|
||||||
|
}
|
||||||
|
else if (literalFields.indexOf(candidateTermSpace) !== -1) {
|
||||||
|
return [candidateTermSpace, factory.makeLiteralMatcher(termCandidate, fuzz, wildcardable)];
|
||||||
|
}
|
||||||
|
else if (candidateTermSpace === 'my') {
|
||||||
|
return [candidateTermSpace, factory.makeUserMatcher(termCandidate)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [defaultField, factory.makeLiteralMatcher(localTerm, fuzz, wildcardable)];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAstMatcherForTerm(term: string, fuzz: number, factory: MatcherFactory): AstMatcher {
|
||||||
|
const [fieldName, matcher] = makeTermMatcher(term, fuzz, factory);
|
||||||
|
|
||||||
|
return (e: HTMLElement) => {
|
||||||
|
const value = e.getAttribute(termSpaceToImageField[fieldName]) || '';
|
||||||
|
const documentId = parseInt(e.getAttribute(termSpaceToImageField.id) || '0', 10);
|
||||||
|
return matcher(value, fieldName, documentId);
|
||||||
|
};
|
||||||
|
}
|
12
assets/js/query/types.ts
Normal file
12
assets/js/query/types.ts
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
export type TermType = 'number' | 'date' | 'literal' | 'my';
|
||||||
|
export type RangeQualifier = 'gt' | 'gte' | 'lt' | 'lte';
|
||||||
|
export type RangeEqualQualifier = RangeQualifier | 'eq';
|
||||||
|
|
||||||
|
export type FieldValue = string;
|
||||||
|
export type FieldName = string;
|
||||||
|
export type FieldMatcher = (value: FieldValue, name: FieldName, documentId: number) => boolean;
|
||||||
|
|
||||||
|
export type AstMatcher = (e: HTMLElement) => boolean;
|
||||||
|
export type TokenList = (string | AstMatcher)[];
|
||||||
|
|
||||||
|
export class ParseError extends Error {}
|
25
assets/js/query/user.ts
Normal file
25
assets/js/query/user.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import { Interaction, InteractionType, InteractionValue } from '../../types/booru-object';
|
||||||
|
import { FieldMatcher } from './types';
|
||||||
|
|
||||||
|
function interactionMatch(imageId: number, type: InteractionType, value: InteractionValue, interactions: Interaction[]): boolean {
|
||||||
|
return interactions.some(v => v.image_id === imageId && v.interaction_type === type && (value === null || v.value === value));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeUserMatcher(term: string): FieldMatcher {
|
||||||
|
// Should work with most my:conditions except watched.
|
||||||
|
return (value, field, documentId) => {
|
||||||
|
switch (term) {
|
||||||
|
case 'faves':
|
||||||
|
return interactionMatch(documentId, 'faved', null, window.booru.interactions);
|
||||||
|
case 'upvotes':
|
||||||
|
return interactionMatch(documentId, 'voted', 'up', window.booru.interactions);
|
||||||
|
case 'downvotes':
|
||||||
|
return interactionMatch(documentId, 'voted', 'down', window.booru.interactions);
|
||||||
|
case 'watched':
|
||||||
|
case 'hidden':
|
||||||
|
default:
|
||||||
|
// Other my: interactions aren't supported, return false to prevent them from triggering spoiler.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
import { inputDuplicatorCreator } from './input-duplicator';
|
import { inputDuplicatorCreator } from './input-duplicator';
|
||||||
|
|
||||||
function imageSourcesCreator() {
|
function setupInputs() {
|
||||||
inputDuplicatorCreator({
|
inputDuplicatorCreator({
|
||||||
addButtonSelector: '.js-image-add-source',
|
addButtonSelector: '.js-image-add-source',
|
||||||
fieldSelector: '.js-image-source',
|
fieldSelector: '.js-image-source',
|
||||||
|
@ -9,4 +9,18 @@ function imageSourcesCreator() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function imageSourcesCreator() {
|
||||||
|
setupInputs();
|
||||||
|
document.addEventListener('fetchcomplete', ({ target, detail }) => {
|
||||||
|
const sourceSauce = document.querySelector('.js-sourcesauce');
|
||||||
|
|
||||||
|
if (target.matches('#source-form')) {
|
||||||
|
detail.text().then(text => {
|
||||||
|
sourceSauce.outerHTML = text;
|
||||||
|
setupInputs();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export { imageSourcesCreator };
|
export { imageSourcesCreator };
|
||||||
|
|
|
@ -2,7 +2,9 @@
|
||||||
* Frontend timestamps.
|
* Frontend timestamps.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const strings = {
|
import { assertNotNull } from './utils/assert';
|
||||||
|
|
||||||
|
const strings: Record<string, string> = {
|
||||||
seconds: 'less than a minute',
|
seconds: 'less than a minute',
|
||||||
minute: 'about a minute',
|
minute: 'about a minute',
|
||||||
minutes: '%d minutes',
|
minutes: '%d minutes',
|
||||||
|
@ -16,16 +18,21 @@ const strings = {
|
||||||
years: '%d years',
|
years: '%d years',
|
||||||
};
|
};
|
||||||
|
|
||||||
function distance(time) {
|
function distance(time: Date) {
|
||||||
return new Date() - time;
|
return new Date().getTime() - time.getTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
function substitute(key, amount) {
|
function substitute(key: string, amount: number) {
|
||||||
return strings[key].replace('%d', Math.round(amount));
|
return strings[key].replace('%d', Math.round(amount).toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
function setTimeAgo(el) {
|
function setTimeAgo(el: HTMLTimeElement) {
|
||||||
const date = new Date(el.getAttribute('datetime'));
|
const datetime = el.getAttribute('datetime');
|
||||||
|
if (!datetime) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const date = new Date(datetime);
|
||||||
const distMillis = distance(date);
|
const distMillis = distance(date);
|
||||||
|
|
||||||
const seconds = Math.abs(distMillis) / 1000,
|
const seconds = Math.abs(distMillis) / 1000,
|
||||||
|
@ -49,20 +56,20 @@ function setTimeAgo(el) {
|
||||||
substitute('years', years);
|
substitute('years', years);
|
||||||
|
|
||||||
if (!el.getAttribute('title')) {
|
if (!el.getAttribute('title')) {
|
||||||
el.setAttribute('title', el.textContent);
|
el.setAttribute('title', assertNotNull(el.textContent));
|
||||||
}
|
}
|
||||||
el.textContent = words + (distMillis < 0 ? ' from now' : ' ago');
|
el.textContent = words + (distMillis < 0 ? ' from now' : ' ago');
|
||||||
}
|
}
|
||||||
|
|
||||||
function timeAgo(args) {
|
export function timeAgo(args: HTMLTimeElement[] | HTMLCollectionOf<HTMLTimeElement>) {
|
||||||
[].forEach.call(args, el => setTimeAgo(el));
|
for (const el of args) {
|
||||||
|
setTimeAgo(el);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupTimestamps() {
|
export function setupTimestamps() {
|
||||||
timeAgo(document.getElementsByTagName('time'));
|
timeAgo(document.getElementsByTagName('time'));
|
||||||
window.setTimeout(setupTimestamps, 60000);
|
window.setTimeout(setupTimestamps, 60000);
|
||||||
}
|
}
|
||||||
|
|
||||||
export { setupTimestamps };
|
|
||||||
|
|
||||||
window.booru.timeAgo = timeAgo;
|
window.booru.timeAgo = timeAgo;
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { assertNotNull, assertNotUndefined } from './utils/assert';
|
||||||
import { $$, makeEl, findFirstTextNode } from './utils/dom';
|
import { $$, makeEl, findFirstTextNode } from './utils/dom';
|
||||||
import { fire, delegate, leftClick } from './utils/events';
|
import { fire, delegate, leftClick } from './utils/events';
|
||||||
|
|
||||||
|
@ -6,7 +7,7 @@ const headers = () => ({
|
||||||
'x-requested-with': 'XMLHttpRequest'
|
'x-requested-with': 'XMLHttpRequest'
|
||||||
});
|
});
|
||||||
|
|
||||||
function confirm(event, target) {
|
function confirm(event: Event, target: HTMLElement) {
|
||||||
if (!window.confirm(target.dataset.confirm)) {
|
if (!window.confirm(target.dataset.confirm)) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
event.stopImmediatePropagation();
|
event.stopImmediatePropagation();
|
||||||
|
@ -14,28 +15,28 @@ function confirm(event, target) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function disable(event, target) {
|
function disable(event: Event, target: HTMLAnchorElement | HTMLButtonElement | HTMLInputElement) {
|
||||||
// failed validations prevent the form from being submitted;
|
// failed validations prevent the form from being submitted;
|
||||||
// stop here or the form will be permanently locked
|
// stop here or the form will be permanently locked
|
||||||
if (target.type === 'submit' && target.closest(':invalid') !== null) return;
|
if (target.type === 'submit' && target.closest(':invalid') !== null) return;
|
||||||
|
|
||||||
// Store what's already there so we don't lose it
|
// Store what's already there so we don't lose it
|
||||||
const label = findFirstTextNode(target);
|
const label = findFirstTextNode<Text>(target);
|
||||||
if (label) {
|
if (label) {
|
||||||
target.dataset.enableWith = label.nodeValue;
|
target.dataset.enableWith = assertNotNull(label.nodeValue);
|
||||||
label.nodeValue = ` ${target.dataset.disableWith}`;
|
label.nodeValue = ` ${target.dataset.disableWith}`;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
target.dataset.enableWith = target.innerHTML;
|
target.dataset.enableWith = target.innerHTML;
|
||||||
target.innerHTML = target.dataset.disableWith;
|
target.innerHTML = assertNotUndefined(target.dataset.disableWith);
|
||||||
}
|
}
|
||||||
|
|
||||||
// delay is needed because Safari stops the submit if the button is immediately disabled
|
// delay is needed because Safari stops the submit if the button is immediately disabled
|
||||||
requestAnimationFrame(() => target.disabled = 'disabled');
|
requestAnimationFrame(() => target.setAttribute('disabled', 'disabled'));
|
||||||
}
|
}
|
||||||
|
|
||||||
// you should use button_to instead of link_to[method]!
|
// you should use button_to instead of link_to[method]!
|
||||||
function linkMethod(event, target) {
|
function linkMethod(event: Event, target: HTMLAnchorElement) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|
||||||
const form = makeEl('form', { action: target.href, method: 'POST' });
|
const form = makeEl('form', { action: target.href, method: 'POST' });
|
||||||
|
@ -49,41 +50,42 @@ function linkMethod(event, target) {
|
||||||
form.submit();
|
form.submit();
|
||||||
}
|
}
|
||||||
|
|
||||||
function formRemote(event, target) {
|
function formRemote(event: Event, target: HTMLFormElement) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|
||||||
fetch(target.action, {
|
fetch(target.action, {
|
||||||
credentials: 'same-origin',
|
credentials: 'same-origin',
|
||||||
method: (target.dataset.method || target.method || 'POST').toUpperCase(),
|
method: (target.dataset.method || target.method).toUpperCase(),
|
||||||
headers: headers(),
|
headers: headers(),
|
||||||
body: new FormData(target)
|
body: new FormData(target)
|
||||||
}).then(response => {
|
}).then(response => {
|
||||||
if (response && response.status === 300) {
|
|
||||||
window.location.reload(true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
fire(target, 'fetchcomplete', response);
|
fire(target, 'fetchcomplete', response);
|
||||||
|
if (response && response.status === 300) {
|
||||||
|
window.location.reload();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function formReset(event, target) {
|
function formReset(_event: Event | null, target: HTMLElement) {
|
||||||
$$('[disabled][data-disable-with][data-enable-with]', target).forEach(input => {
|
$$<HTMLElement>('[disabled][data-disable-with][data-enable-with]', target).forEach(input => {
|
||||||
const label = findFirstTextNode(input);
|
const label = findFirstTextNode(input);
|
||||||
if (label) {
|
if (label) {
|
||||||
label.nodeValue = ` ${input.dataset.enableWith}`;
|
label.nodeValue = ` ${input.dataset.enableWith}`;
|
||||||
}
|
}
|
||||||
else { input.innerHTML = target.dataset.enableWith; }
|
else {
|
||||||
|
input.innerHTML = assertNotUndefined(input.dataset.enableWith);
|
||||||
|
}
|
||||||
delete input.dataset.enableWith;
|
delete input.dataset.enableWith;
|
||||||
input.removeAttribute('disabled');
|
input.removeAttribute('disabled');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function linkRemote(event, target) {
|
function linkRemote(event: Event, target: HTMLAnchorElement) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|
||||||
fetch(target.href, {
|
fetch(target.href, {
|
||||||
credentials: 'same-origin',
|
credentials: 'same-origin',
|
||||||
method: target.dataset.method.toUpperCase(),
|
method: (target.dataset.method || 'get').toUpperCase(),
|
||||||
headers: headers()
|
headers: headers()
|
||||||
}).then(response =>
|
}).then(response =>
|
||||||
fire(target, 'fetchcomplete', response)
|
fire(target, 'fetchcomplete', response)
|
||||||
|
@ -106,5 +108,7 @@ delegate(document, 'reset', {
|
||||||
});
|
});
|
||||||
|
|
||||||
window.addEventListener('pageshow', () => {
|
window.addEventListener('pageshow', () => {
|
||||||
[].forEach.call(document.forms, form => formReset(null, form));
|
for (const form of document.forms) {
|
||||||
|
formReset(null, form);
|
||||||
|
}
|
||||||
});
|
});
|
35
assets/js/utils/__tests__/assert.spec.ts
Normal file
35
assets/js/utils/__tests__/assert.spec.ts
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
import { assertNotNull, assertNotUndefined, assertType } from '../assert';
|
||||||
|
|
||||||
|
describe('Assertion utilities', () => {
|
||||||
|
describe('assertNotNull', () => {
|
||||||
|
it('should return non-null values', () => {
|
||||||
|
expect(assertNotNull(1)).toEqual(1);
|
||||||
|
expect(assertNotNull('anything')).toEqual('anything');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw when passed a null value', () => {
|
||||||
|
expect(() => assertNotNull(null)).toThrow('Expected non-null value');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('assertNotUndefined', () => {
|
||||||
|
it('should return non-undefined values', () => {
|
||||||
|
expect(assertNotUndefined(1)).toEqual(1);
|
||||||
|
expect(assertNotUndefined('anything')).toEqual('anything');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw when passed an undefined value', () => {
|
||||||
|
expect(() => assertNotUndefined(undefined)).toThrow('Expected non-undefined value');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('assertType', () => {
|
||||||
|
it('should return values of the generic type', () => {
|
||||||
|
expect(assertType({}, Object)).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw when passed a value of the wrong type', () => {
|
||||||
|
expect(() => assertType('anything', Number)).toThrow('Expected value of type');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -3,6 +3,7 @@ import { getRandomArrayItem } from '../../../test/randomness';
|
||||||
import { mockStorage } from '../../../test/mock-storage';
|
import { mockStorage } from '../../../test/mock-storage';
|
||||||
import { createEvent, fireEvent } from '@testing-library/dom';
|
import { createEvent, fireEvent } from '@testing-library/dom';
|
||||||
import { EventType } from '@testing-library/dom/types/events';
|
import { EventType } from '@testing-library/dom/types/events';
|
||||||
|
import { SpoilerType } from '../../../types/booru-object';
|
||||||
|
|
||||||
describe('Image utils', () => {
|
describe('Image utils', () => {
|
||||||
const hiddenClass = 'hidden';
|
const hiddenClass = 'hidden';
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { displayTags, getHiddenTags, getSpoileredTags, imageHitsComplex, imageHi
|
||||||
import { mockStorage } from '../../../test/mock-storage';
|
import { mockStorage } from '../../../test/mock-storage';
|
||||||
import { getRandomArrayItem } from '../../../test/randomness';
|
import { getRandomArrayItem } from '../../../test/randomness';
|
||||||
import parseSearch from '../../match_query';
|
import parseSearch from '../../match_query';
|
||||||
|
import { SpoilerType } from '../../../types/booru-object';
|
||||||
|
|
||||||
describe('Tag utilities', () => {
|
describe('Tag utilities', () => {
|
||||||
const tagStorageKeyPrefix = 'bor_tags_';
|
const tagStorageKeyPrefix = 'bor_tags_';
|
||||||
|
|
28
assets/js/utils/assert.ts
Normal file
28
assets/js/utils/assert.ts
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
export function assertNotNull<T>(value: T | null): T {
|
||||||
|
if (value === null) {
|
||||||
|
throw new Error('Expected non-null value');
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function assertNotUndefined<T>(value: T | undefined): T {
|
||||||
|
// eslint-disable-next-line no-undefined
|
||||||
|
if (value === undefined) {
|
||||||
|
throw new Error('Expected non-undefined value');
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
type Constructor<T> = { new (...args: any[]): T };
|
||||||
|
|
||||||
|
export function assertType<T>(value: any, c: Constructor<T>): T {
|
||||||
|
if (value instanceof c) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Expected value of type');
|
||||||
|
}
|
||||||
|
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
@ -63,8 +63,7 @@ export class LocalAutocompleter {
|
||||||
const nameLength = this.view.getUint8(location);
|
const nameLength = this.view.getUint8(location);
|
||||||
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
||||||
|
|
||||||
/** @type {number[]} */
|
const associations: number[] = [];
|
||||||
const associations = [];
|
|
||||||
const name = this.decoder.decode(this.data.slice(location + 1, location + nameLength + 1));
|
const name = this.decoder.decode(this.data.slice(location + 1, location + nameLength + 1));
|
||||||
|
|
||||||
for (let i = 0; i < assnLength; i++) {
|
for (let i = 0; i < assnLength; i++) {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { escapeHtml } from './dom';
|
import { escapeHtml } from './dom';
|
||||||
import { getTag } from '../booru';
|
import { getTag } from '../booru';
|
||||||
|
import { AstMatcher } from '../query/types';
|
||||||
|
|
||||||
export interface TagData {
|
export interface TagData {
|
||||||
id: number;
|
id: number;
|
||||||
|
@ -42,7 +43,7 @@ export function getSpoileredTags() {
|
||||||
.sort(sortTags.bind(null, false));
|
.sort(sortTags.bind(null, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagData[] {
|
export function imageHitsTags(img: HTMLElement, matchTags: TagData[]): TagData[] {
|
||||||
const imageTagsString = img.dataset.imageTags;
|
const imageTagsString = img.dataset.imageTags;
|
||||||
if (typeof imageTagsString === 'undefined') {
|
if (typeof imageTagsString === 'undefined') {
|
||||||
return [];
|
return [];
|
||||||
|
@ -51,8 +52,8 @@ export function imageHitsTags(img: HTMLImageElement, matchTags: TagData[]): TagD
|
||||||
return matchTags.filter(t => imageTags.indexOf(t.id) !== -1);
|
return matchTags.filter(t => imageTags.indexOf(t.id) !== -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function imageHitsComplex(img: HTMLImageElement, matchComplex: { hitsImage: (img: HTMLImageElement) => boolean }) {
|
export function imageHitsComplex(img: HTMLElement, matchComplex: AstMatcher) {
|
||||||
return matchComplex.hitsImage(img);
|
return matchComplex(img);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function displayTags(tags: TagData[]): string {
|
export function displayTags(tags: TagData[]): string {
|
||||||
|
|
2967
assets/package-lock.json
generated
2967
assets/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,13 +1,9 @@
|
||||||
import '@testing-library/jest-dom';
|
import '@testing-library/jest-dom';
|
||||||
|
import { matchNone } from '../js/query/boolean';
|
||||||
const blankFilter = {
|
|
||||||
leftOperand: null,
|
|
||||||
negate: false,
|
|
||||||
op: null,
|
|
||||||
rightOperand: null,
|
|
||||||
};
|
|
||||||
|
|
||||||
window.booru = {
|
window.booru = {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||||
|
timeAgo: () => {},
|
||||||
csrfToken: 'mockCsrfToken',
|
csrfToken: 'mockCsrfToken',
|
||||||
hiddenTag: '/mock-tagblocked.svg',
|
hiddenTag: '/mock-tagblocked.svg',
|
||||||
hiddenTagList: [],
|
hiddenTagList: [],
|
||||||
|
@ -18,7 +14,8 @@ window.booru = {
|
||||||
userCanEditFilter: false,
|
userCanEditFilter: false,
|
||||||
userIsSignedIn: false,
|
userIsSignedIn: false,
|
||||||
watchedTagList: [],
|
watchedTagList: [],
|
||||||
hiddenFilter: blankFilter,
|
hiddenFilter: matchNone(),
|
||||||
spoileredFilter: blankFilter,
|
spoileredFilter: matchNone(),
|
||||||
|
interactions: [],
|
||||||
tagsVersion: 5
|
tagsVersion: 5
|
||||||
};
|
};
|
||||||
|
|
37
assets/types/booru-object.d.ts
vendored
37
assets/types/booru-object.d.ts
vendored
|
@ -1,6 +1,25 @@
|
||||||
|
import { AstMatcher } from 'query/types';
|
||||||
|
|
||||||
type SpoilerType = 'click' | 'hover' | 'static' | 'off';
|
type SpoilerType = 'click' | 'hover' | 'static' | 'off';
|
||||||
|
|
||||||
|
type InteractionType = 'voted' | 'faved' | 'hidden';
|
||||||
|
type InteractionValue = 'up' | 'down' | null;
|
||||||
|
|
||||||
|
interface Interaction {
|
||||||
|
image_id: number;
|
||||||
|
user_id: number;
|
||||||
|
interaction_type: InteractionType;
|
||||||
|
value: 'up' | 'down' | null;
|
||||||
|
}
|
||||||
|
|
||||||
interface BooruObject {
|
interface BooruObject {
|
||||||
|
/**
|
||||||
|
* Automatic timestamp recalculation function for userscript use
|
||||||
|
*/
|
||||||
|
timeAgo: (args: HTMLTimeElement[]) => void;
|
||||||
|
/**
|
||||||
|
* Anti-forgery token sent by the server
|
||||||
|
*/
|
||||||
csrfToken: string;
|
csrfToken: string;
|
||||||
/**
|
/**
|
||||||
* One of the specified values, based on user setting
|
* One of the specified values, based on user setting
|
||||||
|
@ -36,24 +55,20 @@ interface BooruObject {
|
||||||
*/
|
*/
|
||||||
userCanEditFilter: boolean;
|
userCanEditFilter: boolean;
|
||||||
/**
|
/**
|
||||||
* SearchAST instance for hidden tags, converted from raw AST data in {@see import('../js/booru.js')}
|
* AST matcher instance for filter hidden query
|
||||||
*
|
*
|
||||||
* TODO Properly type after TypeScript migration
|
|
||||||
*
|
|
||||||
* @type {import('../js/match_query.js').SearchAST}
|
|
||||||
*/
|
*/
|
||||||
hiddenFilter: unknown;
|
hiddenFilter: AstMatcher;
|
||||||
/**
|
/**
|
||||||
* SearchAST instance for spoilered tags, converted from raw AST data in {@see import('../js/booru.js')}
|
* AST matcher instance for filter spoilered query
|
||||||
*
|
|
||||||
* TODO Properly type after TypeScript migration
|
|
||||||
*
|
|
||||||
* @type {import('../js/match_query.js').SearchAST}
|
|
||||||
*/
|
*/
|
||||||
spoileredFilter: unknown;
|
spoileredFilter: AstMatcher;
|
||||||
tagsVersion: number;
|
tagsVersion: number;
|
||||||
|
interactions: Interaction[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
declare global {
|
||||||
interface Window {
|
interface Window {
|
||||||
booru: BooruObject;
|
booru: BooruObject;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ import Config
|
||||||
config :philomena, PhilomenaWeb.Endpoint, cache_static_manifest: "priv/static/cache_manifest.json"
|
config :philomena, PhilomenaWeb.Endpoint, cache_static_manifest: "priv/static/cache_manifest.json"
|
||||||
|
|
||||||
# Do not print debug messages in production
|
# Do not print debug messages in production
|
||||||
config :logger, level: :warn
|
config :logger, level: :warning
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
|
|
|
@ -6,6 +6,7 @@ import Config
|
||||||
# by calling `mix release`.
|
# by calling `mix release`.
|
||||||
#
|
#
|
||||||
# See `mix help release` for more information.
|
# See `mix help release` for more information.
|
||||||
|
{:ok, _} = Application.ensure_all_started(:tls_certificate_check)
|
||||||
|
|
||||||
config :bcrypt_elixir,
|
config :bcrypt_elixir,
|
||||||
log_rounds: String.to_integer(System.get_env("BCRYPT_ROUNDS", "12"))
|
log_rounds: String.to_integer(System.get_env("BCRYPT_ROUNDS", "12"))
|
||||||
|
@ -120,13 +121,16 @@ if config_env() == :prod do
|
||||||
username: System.fetch_env!("SMTP_USERNAME"),
|
username: System.fetch_env!("SMTP_USERNAME"),
|
||||||
password: System.fetch_env!("SMTP_PASSWORD"),
|
password: System.fetch_env!("SMTP_PASSWORD"),
|
||||||
tls: :always,
|
tls: :always,
|
||||||
auth: :always
|
auth: :always,
|
||||||
|
tls_options:
|
||||||
|
[middlebox_comp_mode: false] ++
|
||||||
|
:tls_certificate_check.options(System.fetch_env!("SMTP_RELAY"))
|
||||||
|
|
||||||
# Production endpoint config
|
# Production endpoint config
|
||||||
{:ok, ip} = :inet.parse_address(System.get_env("APP_IP", "127.0.0.1") |> String.to_charlist())
|
{:ok, ip} = :inet.parse_address(System.get_env("APP_IP", "127.0.0.1") |> String.to_charlist())
|
||||||
|
|
||||||
config :philomena, PhilomenaWeb.Endpoint,
|
config :philomena, PhilomenaWeb.Endpoint,
|
||||||
http: [ip: ip, port: {:system, "PORT"}],
|
http: [ip: ip, port: System.fetch_env!("PORT")],
|
||||||
url: [host: System.fetch_env!("APP_HOSTNAME"), scheme: "https", port: 443],
|
url: [host: System.fetch_env!("APP_HOSTNAME"), scheme: "https", port: 443],
|
||||||
secret_key_base: System.fetch_env!("SECRET_KEY_BASE"),
|
secret_key_base: System.fetch_env!("SECRET_KEY_BASE"),
|
||||||
server: not is_nil(System.get_env("START_ENDPOINT"))
|
server: not is_nil(System.get_env("START_ENDPOINT"))
|
||||||
|
|
|
@ -19,4 +19,4 @@ config :philomena, PhilomenaWeb.Endpoint,
|
||||||
server: false
|
server: false
|
||||||
|
|
||||||
# Print only warnings and errors during test
|
# Print only warnings and errors during test
|
||||||
config :logger, level: :warn
|
config :logger, level: :warning
|
||||||
|
|
|
@ -50,7 +50,7 @@ services:
|
||||||
- redis
|
- redis
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15.3-alpine
|
image: postgres:16.2-alpine
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_PASSWORD=postgres
|
- POSTGRES_PASSWORD=postgres
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -72,12 +72,12 @@ services:
|
||||||
hard: 65536
|
hard: 65536
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7.0.11-alpine
|
image: redis:7.2.4-alpine
|
||||||
logging:
|
logging:
|
||||||
driver: "none"
|
driver: "none"
|
||||||
|
|
||||||
files:
|
files:
|
||||||
image: andrewgaul/s3proxy:sha-ba0fd6d
|
image: andrewgaul/s3proxy:sha-ec12ae0
|
||||||
environment:
|
environment:
|
||||||
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
||||||
volumes:
|
volumes:
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
FROM elixir:1.15.4-alpine
|
FROM elixir:1.16.1-alpine
|
||||||
|
|
||||||
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.0 /tmp/ffmpeg_version.json
|
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1 /tmp/ffmpeg_version.json
|
||||||
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
||||||
&& cp /tmp/repositories /etc/apk/repositories \
|
&& cp /tmp/repositories /etc/apk/repositories \
|
||||||
&& apk update --allow-untrusted \
|
&& apk update --allow-untrusted \
|
||||||
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql15-client wget rust cargo --allow-untrusted \
|
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql16-client wget rust cargo --allow-untrusted \
|
||||||
&& mix local.hex --force \
|
&& mix local.hex --force \
|
||||||
&& mix local.rebar --force
|
&& mix local.rebar --force
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM openresty/openresty:1.21.4.1-7-alpine
|
FROM openresty/openresty:1.25.3.1-2-alpine
|
||||||
ARG APP_DIR
|
ARG APP_DIR
|
||||||
ARG S3_SCHEME
|
ARG S3_SCHEME
|
||||||
ARG S3_HOST
|
ARG S3_HOST
|
||||||
|
|
|
@ -203,7 +203,7 @@ defmodule Philomena.Galleries do
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, result} ->
|
{:ok, result} ->
|
||||||
Images.reindex_image(image)
|
Images.reindex_image(image)
|
||||||
notify_gallery(gallery)
|
notify_gallery(gallery, image)
|
||||||
reindex_gallery(gallery)
|
reindex_gallery(gallery)
|
||||||
|
|
||||||
{:ok, result}
|
{:ok, result}
|
||||||
|
@ -261,11 +261,11 @@ defmodule Philomena.Galleries do
|
||||||
|> Repo.aggregate(:max, :position)
|
|> Repo.aggregate(:max, :position)
|
||||||
end
|
end
|
||||||
|
|
||||||
def notify_gallery(gallery) do
|
def notify_gallery(gallery, image) do
|
||||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", gallery.id])
|
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", [gallery.id, image.id]])
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform_notify(gallery_id) do
|
def perform_notify([gallery_id, image_id]) do
|
||||||
gallery = get_gallery!(gallery_id)
|
gallery = get_gallery!(gallery_id)
|
||||||
|
|
||||||
subscriptions =
|
subscriptions =
|
||||||
|
@ -279,8 +279,8 @@ defmodule Philomena.Galleries do
|
||||||
%{
|
%{
|
||||||
actor_id: gallery.id,
|
actor_id: gallery.id,
|
||||||
actor_type: "Gallery",
|
actor_type: "Gallery",
|
||||||
actor_child_id: nil,
|
actor_child_id: image_id,
|
||||||
actor_child_type: nil,
|
actor_child_type: "Image",
|
||||||
action: "added images to"
|
action: "added images to"
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -210,11 +210,15 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
defp maybe_suggest_user_verification(_user), do: false
|
defp maybe_suggest_user_verification(_user), do: false
|
||||||
|
|
||||||
def count_pending_approvals() do
|
def count_pending_approvals(user) do
|
||||||
|
if Canada.Can.can?(user, :approve, %Image{}) do
|
||||||
Image
|
Image
|
||||||
|> where(hidden_from_users: false)
|
|> where(hidden_from_users: false)
|
||||||
|> where(approved: false)
|
|> where(approved: false)
|
||||||
|> Repo.aggregate(:count)
|
|> Repo.aggregate(:count)
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def feature_image(featurer, %Image{} = image) do
|
def feature_image(featurer, %Image{} = image) do
|
||||||
|
|
|
@ -187,7 +187,7 @@ defmodule Philomena.Images.Image do
|
||||||
height = fetch_field!(changeset, :image_height)
|
height = fetch_field!(changeset, :image_height)
|
||||||
|
|
||||||
cond do
|
cond do
|
||||||
width <= 0 or height <= 0 ->
|
is_nil(width) or is_nil(height) or width <= 0 or height <= 0 ->
|
||||||
add_error(
|
add_error(
|
||||||
changeset,
|
changeset,
|
||||||
:image,
|
:image,
|
||||||
|
|
|
@ -73,7 +73,7 @@ defmodule Philomena.Images.Query do
|
||||||
float_fields: ~W(aspect_ratio wilson_score duration),
|
float_fields: ~W(aspect_ratio wilson_score duration),
|
||||||
date_fields: ~W(created_at updated_at first_seen_at),
|
date_fields: ~W(created_at updated_at first_seen_at),
|
||||||
literal_fields:
|
literal_fields:
|
||||||
~W(faved_by orig_sha512_hash sha512_hash uploader source_url original_format mime_type),
|
~W(faved_by orig_sha512_hash sha512_hash uploader source_url original_format mime_type file_name),
|
||||||
bool_fields: ~W(animated processed thumbnails_generated),
|
bool_fields: ~W(animated processed thumbnails_generated),
|
||||||
ngram_fields: ~W(description),
|
ngram_fields: ~W(description),
|
||||||
custom_fields: ~W(gallery_id),
|
custom_fields: ~W(gallery_id),
|
||||||
|
@ -82,7 +82,8 @@ defmodule Philomena.Images.Query do
|
||||||
aliases: %{
|
aliases: %{
|
||||||
"faved_by" => "favourited_by_users",
|
"faved_by" => "favourited_by_users",
|
||||||
"faved_by_id" => "favourited_by_user_ids"
|
"faved_by_id" => "favourited_by_user_ids"
|
||||||
}
|
},
|
||||||
|
no_downcase_fields: ~W(file_name)
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -76,16 +76,29 @@ defmodule Philomena.Images.Thumbnailer do
|
||||||
file = download_image_file(image)
|
file = download_image_file(image)
|
||||||
{:ok, analysis} = Analyzers.analyze(file)
|
{:ok, analysis} = Analyzers.analyze(file)
|
||||||
|
|
||||||
apply_edit_script(image, Processors.process(analysis, file, generated_sizes(image)))
|
file =
|
||||||
|
apply_edit_script(image, file, Processors.process(analysis, file, generated_sizes(image)))
|
||||||
|
|
||||||
generate_dupe_reports(image)
|
generate_dupe_reports(image)
|
||||||
recompute_meta(image, file, &Image.thumbnail_changeset/2)
|
recompute_meta(image, file, &Image.thumbnail_changeset/2)
|
||||||
|
|
||||||
apply_edit_script(image, Processors.post_process(analysis, file))
|
file = apply_edit_script(image, file, Processors.post_process(analysis, file))
|
||||||
recompute_meta(image, file, &Image.process_changeset/2)
|
recompute_meta(image, file, &Image.process_changeset/2)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp apply_edit_script(image, changes),
|
defp apply_edit_script(image, file, changes) do
|
||||||
do: Enum.map(changes, &apply_change(image, &1))
|
Enum.reduce(changes, file, fn change, existing_file ->
|
||||||
|
apply_change(image, change)
|
||||||
|
|
||||||
|
case change do
|
||||||
|
{:replace_original, new_file} ->
|
||||||
|
new_file
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
existing_file
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
defp apply_change(image, {:intensities, intensities}),
|
defp apply_change(image, {:intensities, intensities}),
|
||||||
do: ImageIntensities.create_image_intensity(image, intensities)
|
do: ImageIntensities.create_image_intensity(image, intensities)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
defmodule Philomena.Native do
|
defmodule Philomena.Native do
|
||||||
@moduledoc false
|
@moduledoc false
|
||||||
|
|
||||||
use Rustler, otp_app: :philomena
|
use Rustler, otp_app: :philomena, crate: "philomena"
|
||||||
|
|
||||||
@spec markdown_to_html(String.t(), %{String.t() => String.t()}) :: String.t()
|
@spec markdown_to_html(String.t(), %{String.t() => String.t()}) :: String.t()
|
||||||
def markdown_to_html(_text, _replacements), do: :erlang.nif_error(:nif_not_loaded)
|
def markdown_to_html(_text, _replacements), do: :erlang.nif_error(:nif_not_loaded)
|
||||||
|
|
|
@ -4,13 +4,13 @@ defmodule Philomena.Scrapers.Tumblr do
|
||||||
@size_regex ~r|_(\d+)(\..+)\z|
|
@size_regex ~r|_(\d+)(\..+)\z|
|
||||||
@sizes [1280, 540, 500, 400, 250, 100, 75]
|
@sizes [1280, 540, 500, 400, 250, 100, 75]
|
||||||
@tumblr_ranges [
|
@tumblr_ranges [
|
||||||
InetCidr.parse("66.6.32.0/24"),
|
InetCidr.parse_cidr!("66.6.32.0/24"),
|
||||||
InetCidr.parse("66.6.33.0/24"),
|
InetCidr.parse_cidr!("66.6.33.0/24"),
|
||||||
InetCidr.parse("66.6.44.0/24"),
|
InetCidr.parse_cidr!("66.6.44.0/24"),
|
||||||
InetCidr.parse("74.114.152.0/24"),
|
InetCidr.parse_cidr!("74.114.152.0/24"),
|
||||||
InetCidr.parse("74.114.153.0/24"),
|
InetCidr.parse_cidr!("74.114.153.0/24"),
|
||||||
InetCidr.parse("74.114.154.0/24"),
|
InetCidr.parse_cidr!("74.114.154.0/24"),
|
||||||
InetCidr.parse("74.114.155.0/24")
|
InetCidr.parse_cidr!("74.114.155.0/24")
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec can_handle?(URI.t(), String.t()) :: true | false
|
@spec can_handle?(URI.t(), String.t()) :: true | false
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
defmodule Philomena.Scrapers.Twitter do
|
defmodule Philomena.Scrapers.Twitter do
|
||||||
@url_regex ~r|\Ahttps?://(?:mobile\.)?twitter.com/([A-Za-z\d_]+)/status/([\d]+)/?|
|
@url_regex ~r|\Ahttps?://(?:mobile\.)?(?:twitter\|x).com/([A-Za-z\d_]+)/status/([\d]+)/?|
|
||||||
@script_regex ~r|="(https://abs.twimg.com/responsive-web/client-web(?:-legacy)?/main\.[\da-z]+\.js)"|
|
|
||||||
@bearer_regex ~r|"(AAAAAAAAAAAAA[^"]*)"|
|
|
||||||
@activate_url "https://api.twitter.com/1.1/guest/activate.json"
|
|
||||||
|
|
||||||
@spec can_handle?(URI.t(), String.t()) :: true | false
|
@spec can_handle?(URI.t(), String.t()) :: true | false
|
||||||
def can_handle?(_uri, url) do
|
def can_handle?(_uri, url) do
|
||||||
|
@ -10,69 +7,27 @@ defmodule Philomena.Scrapers.Twitter do
|
||||||
end
|
end
|
||||||
|
|
||||||
def scrape(_uri, url) do
|
def scrape(_uri, url) do
|
||||||
api_response!(url)
|
[user, status_id] = Regex.run(@url_regex, url, capture: :all_but_first)
|
||||||
|> extract_data()
|
|
||||||
end
|
api_url = "https://api.fxtwitter.com/#{user}/status/#{status_id}"
|
||||||
|
{:ok, %Tesla.Env{status: 200, body: body}} = Philomena.Http.get(api_url)
|
||||||
|
|
||||||
|
json = Jason.decode!(body)
|
||||||
|
tweet = json["tweet"]
|
||||||
|
|
||||||
defp extract_data(tweet) do
|
|
||||||
images =
|
images =
|
||||||
tweet["entities"]["media"]
|
Enum.map(tweet["media"]["photos"], fn p ->
|
||||||
|> Enum.map(
|
%{
|
||||||
&%{
|
url: "#{p["url"]}:orig",
|
||||||
url: &1["media_url_https"] <> "?format=jpg&name=4096x4096",
|
camo_url: Camo.Image.image_url(p["url"])
|
||||||
camo_url: Camo.Image.image_url(&1["media_url_https"])
|
|
||||||
}
|
}
|
||||||
)
|
end)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
source_url: tweet["url"],
|
source_url: tweet["url"],
|
||||||
author_name: tweet["user"],
|
author_name: tweet["author"]["screen_name"],
|
||||||
description: tweet["text"] || tweet["full_text"],
|
description: tweet["text"],
|
||||||
images: images
|
images: images
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
# We'd like to use the API anonymously. In order to do this, we need to
|
|
||||||
# extract the anonymous bearer token. Fortunately, this is pretty easy
|
|
||||||
# to identify in the minified mobile script source.
|
|
||||||
def api_response!(url) do
|
|
||||||
[user, status_id] = Regex.run(@url_regex, url, capture: :all_but_first)
|
|
||||||
|
|
||||||
page_url = "https://twitter.com/#{user}/status/#{status_id}"
|
|
||||||
|
|
||||||
api_url =
|
|
||||||
"https://api.twitter.com/2/timeline/conversation/#{status_id}.json?tweet_mode=extended"
|
|
||||||
|
|
||||||
url = "https://twitter.com/#{user}/status/#{status_id}"
|
|
||||||
|
|
||||||
{gt, bearer} =
|
|
||||||
Philomena.Http.get(page_url)
|
|
||||||
|> extract_guest_token_and_bearer()
|
|
||||||
|
|
||||||
{:ok, api_resp} =
|
|
||||||
Philomena.Http.get(api_url, [{"Authorization", "Bearer #{bearer}"}, {"x-guest-token", gt}])
|
|
||||||
|
|
||||||
api_resp
|
|
||||||
|> Map.get(:body)
|
|
||||||
|> Jason.decode!()
|
|
||||||
|> Map.get("globalObjects")
|
|
||||||
|> Map.get("tweets")
|
|
||||||
|> Map.get(status_id)
|
|
||||||
|> Map.put("user", user)
|
|
||||||
|> Map.put("url", url)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp extract_guest_token_and_bearer({:ok, %Tesla.Env{body: page}}) do
|
|
||||||
[script | _] = Regex.run(@script_regex, page, capture: :all_but_first)
|
|
||||||
{:ok, %{body: body}} = Philomena.Http.get(script)
|
|
||||||
|
|
||||||
[bearer] = Regex.run(@bearer_regex, body, capture: :all_but_first)
|
|
||||||
|
|
||||||
{:ok, %{body: body}} =
|
|
||||||
Philomena.Http.post(@activate_url, nil, [{"Authorization", "Bearer #{bearer}"}])
|
|
||||||
|
|
||||||
gt = Map.fetch!(Jason.decode!(body), "guest_token")
|
|
||||||
|
|
||||||
{gt, bearer}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -24,6 +24,7 @@ defmodule Philomena.Search.Parser do
|
||||||
custom_fields: [],
|
custom_fields: [],
|
||||||
transforms: %{},
|
transforms: %{},
|
||||||
aliases: %{},
|
aliases: %{},
|
||||||
|
no_downcase_fields: [],
|
||||||
__fields__: %{},
|
__fields__: %{},
|
||||||
__data__: nil
|
__data__: nil
|
||||||
]
|
]
|
||||||
|
@ -193,14 +194,19 @@ defmodule Philomena.Search.Parser do
|
||||||
# Types which do not support ranges
|
# Types which do not support ranges
|
||||||
|
|
||||||
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, literal: value]),
|
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, literal: value]),
|
||||||
do: {:ok, {%{term: %{field(parser, field_name) => normalize_value(parser, value)}}, []}}
|
do:
|
||||||
|
{:ok,
|
||||||
|
{%{term: %{field(parser, field_name) => normalize_value(parser, field_name, value)}}, []}}
|
||||||
|
|
||||||
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, literal: value, fuzz: fuzz]),
|
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, literal: value, fuzz: fuzz]),
|
||||||
do:
|
do:
|
||||||
{:ok,
|
{:ok,
|
||||||
{%{
|
{%{
|
||||||
fuzzy: %{
|
fuzzy: %{
|
||||||
field(parser, field_name) => %{value: normalize_value(parser, value), fuzziness: fuzz}
|
field(parser, field_name) => %{
|
||||||
|
value: normalize_value(parser, field_name, value),
|
||||||
|
fuzziness: fuzz
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, []}}
|
}, []}}
|
||||||
|
|
||||||
|
@ -208,21 +214,33 @@ defmodule Philomena.Search.Parser do
|
||||||
do: {:ok, {%{match_all: %{}}, []}}
|
do: {:ok, {%{match_all: %{}}, []}}
|
||||||
|
|
||||||
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, wildcard: value]),
|
defp field_type(parser, [{LiteralParser, field_name}, range: :eq, wildcard: value]),
|
||||||
do: {:ok, {%{wildcard: %{field(parser, field_name) => normalize_value(parser, value)}}, []}}
|
do:
|
||||||
|
{:ok,
|
||||||
|
{%{wildcard: %{field(parser, field_name) => normalize_value(parser, field_name, value)}},
|
||||||
|
[]}}
|
||||||
|
|
||||||
defp field_type(parser, [{NgramParser, field_name}, range: :eq, literal: value]),
|
defp field_type(parser, [{NgramParser, field_name}, range: :eq, literal: value]),
|
||||||
do:
|
do:
|
||||||
{:ok, {%{match_phrase: %{field(parser, field_name) => normalize_value(parser, value)}}, []}}
|
{:ok,
|
||||||
|
{%{
|
||||||
|
match_phrase: %{field(parser, field_name) => normalize_value(parser, field_name, value)}
|
||||||
|
}, []}}
|
||||||
|
|
||||||
defp field_type(parser, [{NgramParser, field_name}, range: :eq, literal: value, fuzz: _fuzz]),
|
defp field_type(parser, [{NgramParser, field_name}, range: :eq, literal: value, fuzz: _fuzz]),
|
||||||
do:
|
do:
|
||||||
{:ok, {%{match_phrase: %{field(parser, field_name) => normalize_value(parser, value)}}, []}}
|
{:ok,
|
||||||
|
{%{
|
||||||
|
match_phrase: %{field(parser, field_name) => normalize_value(parser, field_name, value)}
|
||||||
|
}, []}}
|
||||||
|
|
||||||
defp field_type(_parser, [{NgramParser, _field_name}, range: :eq, wildcard: "*"]),
|
defp field_type(_parser, [{NgramParser, _field_name}, range: :eq, wildcard: "*"]),
|
||||||
do: {:ok, {%{match_all: %{}}, []}}
|
do: {:ok, {%{match_all: %{}}, []}}
|
||||||
|
|
||||||
defp field_type(parser, [{NgramParser, field_name}, range: :eq, wildcard: value]),
|
defp field_type(parser, [{NgramParser, field_name}, range: :eq, wildcard: value]),
|
||||||
do: {:ok, {%{wildcard: %{field(parser, field_name) => normalize_value(parser, value)}}, []}}
|
do:
|
||||||
|
{:ok,
|
||||||
|
{%{wildcard: %{field(parser, field_name) => normalize_value(parser, field_name, value)}},
|
||||||
|
[]}}
|
||||||
|
|
||||||
defp field_type(parser, [{BoolParser, field_name}, range: :eq, bool: value]),
|
defp field_type(parser, [{BoolParser, field_name}, range: :eq, bool: value]),
|
||||||
do: {:ok, {%{term: %{field(parser, field_name) => value}}, []}}
|
do: {:ok, {%{term: %{field(parser, field_name) => value}}, []}}
|
||||||
|
@ -271,10 +289,18 @@ defmodule Philomena.Search.Parser do
|
||||||
parser.aliases[field_name] || field_name
|
parser.aliases[field_name] || field_name
|
||||||
end
|
end
|
||||||
|
|
||||||
defp normalize_value(_parser, value) do
|
defp normalize_value(parser, field_name, value) do
|
||||||
value
|
value
|
||||||
|> String.trim()
|
|> String.trim()
|
||||||
|> String.downcase()
|
|> maybe_downcase(parser, field_name)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_downcase(value, parser, field_name) do
|
||||||
|
if Enum.member?(parser.no_downcase_fields, field_name) do
|
||||||
|
value
|
||||||
|
else
|
||||||
|
String.downcase(value)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Flattens the child of a disjunction or conjunction to improve performance.
|
# Flattens the child of a disjunction or conjunction to improve performance.
|
||||||
|
|
|
@ -24,7 +24,10 @@ defmodule Philomena.TagChangeRevertWorker do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp revert_all(queryable, attributes) do
|
defp revert_all(queryable, attributes) do
|
||||||
Batch.query_batches(queryable, [batch_size: 100], fn queryable ->
|
batch_size = attributes["batch_size"] || 100
|
||||||
|
attributes = Map.delete(attributes, "batch_size")
|
||||||
|
|
||||||
|
Batch.query_batches(queryable, [batch_size: batch_size], fn queryable ->
|
||||||
ids = Repo.all(select(queryable, [tc], tc.id))
|
ids = Repo.all(select(queryable, [tc], tc.id))
|
||||||
TagChanges.mass_revert(ids, cast_ip(atomify_keys(attributes)))
|
TagChanges.mass_revert(ids, cast_ip(atomify_keys(attributes)))
|
||||||
end)
|
end)
|
||||||
|
|
|
@ -30,7 +30,7 @@ defmodule PhilomenaWeb.Autocomplete.TagController do
|
||||||
|> Elasticsearch.search_records(preload(Tag, :aliased_tag))
|
|> Elasticsearch.search_records(preload(Tag, :aliased_tag))
|
||||||
|> Enum.map(&(&1.aliased_tag || &1))
|
|> Enum.map(&(&1.aliased_tag || &1))
|
||||||
|> Enum.uniq_by(& &1.id)
|
|> Enum.uniq_by(& &1.id)
|
||||||
|> Enum.filter(&(&1.images_count > 3))
|
|> Enum.filter(&(&1.images_count > 0))
|
||||||
|> Enum.sort_by(&(-&1.images_count))
|
|> Enum.sort_by(&(-&1.images_count))
|
||||||
|> Enum.take(5)
|
|> Enum.take(5)
|
||||||
|> Enum.map(&%{label: "#{&1.name} (#{&1.images_count})", value: &1.name})
|
|> Enum.map(&%{label: "#{&1.name} (#{&1.images_count})", value: &1.name})
|
||||||
|
|
|
@ -34,6 +34,7 @@ defmodule PhilomenaWeb.SearchController do
|
||||||
render(conn, "index.html",
|
render(conn, "index.html",
|
||||||
title: "Searching for #{params["q"]}",
|
title: "Searching for #{params["q"]}",
|
||||||
images: [],
|
images: [],
|
||||||
|
tags: [],
|
||||||
error: msg,
|
error: msg,
|
||||||
search_query: params["q"]
|
search_query: params["q"]
|
||||||
)
|
)
|
||||||
|
|
|
@ -7,15 +7,14 @@ defmodule PhilomenaWeb.TagChange.FullRevertController do
|
||||||
plug :verify_authorized
|
plug :verify_authorized
|
||||||
plug PhilomenaWeb.UserAttributionPlug
|
plug PhilomenaWeb.UserAttributionPlug
|
||||||
|
|
||||||
def create(conn, params) do
|
def create(%{assigns: %{attributes: attributes}} = conn, params) do
|
||||||
attributes = conn.assigns.attributes
|
|
||||||
|
|
||||||
attributes = %{
|
attributes = %{
|
||||||
ip: to_string(attributes[:ip]),
|
ip: to_string(attributes[:ip]),
|
||||||
fingerprint: attributes[:fingerprint],
|
fingerprint: attributes[:fingerprint],
|
||||||
referrer: attributes[:referrer],
|
referrer: attributes[:referrer],
|
||||||
user_agent: attributes[:referrer],
|
user_agent: attributes[:referrer],
|
||||||
user_id: attributes[:user].id
|
user_id: attributes[:user].id,
|
||||||
|
batch_size: attributes[:batch_size] || 100
|
||||||
}
|
}
|
||||||
|
|
||||||
case params do
|
case params do
|
||||||
|
|
|
@ -2,12 +2,11 @@ defmodule PhilomenaWeb.MarkdownRenderer do
|
||||||
alias Philomena.Markdown
|
alias Philomena.Markdown
|
||||||
alias Philomena.Images.Image
|
alias Philomena.Images.Image
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
alias PhilomenaWeb.ImageView
|
||||||
import Phoenix.HTML
|
import Phoenix.HTML
|
||||||
import Phoenix.HTML.Link
|
import Phoenix.HTML.Link
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
@image_view Module.concat(["PhilomenaWeb.ImageView"])
|
|
||||||
|
|
||||||
def render_one(item, conn) do
|
def render_one(item, conn) do
|
||||||
hd(render_collection([item], conn))
|
hd(render_collection([item], conn))
|
||||||
end
|
end
|
||||||
|
@ -79,28 +78,28 @@ defmodule PhilomenaWeb.MarkdownRenderer do
|
||||||
img != nil ->
|
img != nil ->
|
||||||
case group do
|
case group do
|
||||||
[_id, "p"] when not img.hidden_from_users and img.approved ->
|
[_id, "p"] when not img.hidden_from_users and img.approved ->
|
||||||
Phoenix.View.render(@image_view, "_image_target.html",
|
Phoenix.View.render(ImageView, "_image_target.html",
|
||||||
embed_display: true,
|
embed_display: true,
|
||||||
image: img,
|
image: img,
|
||||||
size: @image_view.select_version(img, :medium),
|
size: ImageView.select_version(img, :medium),
|
||||||
conn: conn
|
conn: conn
|
||||||
)
|
)
|
||||||
|> safe_to_string()
|
|> safe_to_string()
|
||||||
|
|
||||||
[_id, "t"] when not img.hidden_from_users and img.approved ->
|
[_id, "t"] when not img.hidden_from_users and img.approved ->
|
||||||
Phoenix.View.render(@image_view, "_image_target.html",
|
Phoenix.View.render(ImageView, "_image_target.html",
|
||||||
embed_display: true,
|
embed_display: true,
|
||||||
image: img,
|
image: img,
|
||||||
size: @image_view.select_version(img, :small),
|
size: ImageView.select_version(img, :small),
|
||||||
conn: conn
|
conn: conn
|
||||||
)
|
)
|
||||||
|> safe_to_string()
|
|> safe_to_string()
|
||||||
|
|
||||||
[_id, "s"] when not img.hidden_from_users and img.approved ->
|
[_id, "s"] when not img.hidden_from_users and img.approved ->
|
||||||
Phoenix.View.render(@image_view, "_image_target.html",
|
Phoenix.View.render(ImageView, "_image_target.html",
|
||||||
embed_display: true,
|
embed_display: true,
|
||||||
image: img,
|
image: img,
|
||||||
size: @image_view.select_version(img, :thumb_small),
|
size: ImageView.select_version(img, :thumb_small),
|
||||||
conn: conn
|
conn: conn
|
||||||
)
|
)
|
||||||
|> safe_to_string()
|
|> safe_to_string()
|
||||||
|
|
|
@ -32,7 +32,7 @@ defmodule PhilomenaWeb.AdminCountersPlug do
|
||||||
defp maybe_assign_admin_metrics(conn, _user, false), do: conn
|
defp maybe_assign_admin_metrics(conn, _user, false), do: conn
|
||||||
|
|
||||||
defp maybe_assign_admin_metrics(conn, user, true) do
|
defp maybe_assign_admin_metrics(conn, user, true) do
|
||||||
pending_approvals = Images.count_pending_approvals()
|
pending_approvals = Images.count_pending_approvals(user)
|
||||||
duplicate_reports = DuplicateReports.count_duplicate_reports(user)
|
duplicate_reports = DuplicateReports.count_duplicate_reports(user)
|
||||||
reports = Reports.count_reports(user)
|
reports = Reports.count_reports(user)
|
||||||
artist_links = ArtistLinks.count_artist_links(user)
|
artist_links = ArtistLinks.count_artist_links(user)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
#js-subscription-target
|
.js-subscription-target
|
||||||
' Error!
|
' Error!
|
|
@ -6,7 +6,7 @@ elixir:
|
||||||
unwatch_class = if @watching, do: "", else: "hidden"
|
unwatch_class = if @watching, do: "", else: "hidden"
|
||||||
|
|
||||||
= if @conn.assigns.current_user do
|
= if @conn.assigns.current_user do
|
||||||
span#js-subscription-target
|
span.js-subscription-target
|
||||||
a.js-subscription-link.media-box__header.media-box__header--channel.media-box__header--link href=watch_path class=watch_class data-remote="true" data-method="post"
|
a.js-subscription-link.media-box__header.media-box__header--channel.media-box__header--link href=watch_path class=watch_class data-remote="true" data-method="post"
|
||||||
i.fa.fa-bell>
|
i.fa.fa-bell>
|
||||||
span.hidden--mobile
|
span.hidden--mobile
|
||||||
|
|
|
@ -20,6 +20,7 @@ ul
|
||||||
h2 Actions
|
h2 Actions
|
||||||
ul
|
ul
|
||||||
li = link "Revert all tag changes", to: Routes.tag_change_full_revert_path(@conn, :create, [fingerprint: @fingerprint]), data: [confirm: "Are you really, really sure?", method: "create"]
|
li = link "Revert all tag changes", to: Routes.tag_change_full_revert_path(@conn, :create, [fingerprint: @fingerprint]), data: [confirm: "Are you really, really sure?", method: "create"]
|
||||||
|
li = link "...the button above didn't work (use carefully, this is resource-intensive)", to: Routes.tag_change_full_revert_path(@conn, :create, [fingerprint: @fingerprint, batch_size: 1]), data: [confirm: "Please confirm that you're aware that this may crash the site and are ready to take on the full wrath of the admins if it does so after you press this button.", method: "create"]
|
||||||
|
|
||||||
h4 Observed users
|
h4 Observed users
|
||||||
table.table
|
table.table
|
||||||
|
|
|
@ -6,7 +6,7 @@ elixir:
|
||||||
unwatch_class = if @watching, do: "", else: "hidden"
|
unwatch_class = if @watching, do: "", else: "hidden"
|
||||||
|
|
||||||
= if @conn.assigns.current_user do
|
= if @conn.assigns.current_user do
|
||||||
span#js-subscription-target
|
span.js-subscription-target
|
||||||
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
||||||
i.fa.fa-bell>
|
i.fa.fa-bell>
|
||||||
span.hidden--mobile
|
span.hidden--mobile
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
#js-subscription-target
|
.js-subscription-target
|
||||||
' Error!
|
' Error!
|
|
@ -6,7 +6,7 @@ elixir:
|
||||||
unwatch_class = if @watching, do: "", else: "hidden"
|
unwatch_class = if @watching, do: "", else: "hidden"
|
||||||
|
|
||||||
= if @conn.assigns.current_user do
|
= if @conn.assigns.current_user do
|
||||||
span#js-subscription-target
|
span.js-subscription-target
|
||||||
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
||||||
i.fa.fa-bell>
|
i.fa.fa-bell>
|
||||||
span.hidden--mobile
|
span.hidden--mobile
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
.block
|
.js-sourcesauce
|
||||||
- has_sources = Enum.any?(@image.sources)
|
- has_sources = Enum.any?(@image.sources)
|
||||||
= form_for @changeset, Routes.image_source_path(@conn, :update, @image), [method: "put", class: "hidden", id: "source-form", data: [remote: "true"]], fn f ->
|
= form_for @changeset, Routes.image_source_path(@conn, :update, @image), [method: "put", class: "hidden", id: "source-form", data: [remote: "true"]], fn f ->
|
||||||
= if can?(@conn, :edit_metadata, @image) and !@conn.assigns.current_ban do
|
= if can?(@conn, :edit_metadata, @image) and !@conn.assigns.current_ban do
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
#js-subscription-target
|
.js-subscription-target
|
||||||
' Error!
|
' Error!
|
|
@ -6,7 +6,7 @@ elixir:
|
||||||
unwatch_class = if @watching, do: "", else: "hidden"
|
unwatch_class = if @watching, do: "", else: "hidden"
|
||||||
|
|
||||||
= if @conn.assigns.current_user do
|
= if @conn.assigns.current_user do
|
||||||
span#js-subscription-target
|
span.js-subscription-target
|
||||||
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
||||||
i.fa.fa-bell
|
i.fa.fa-bell
|
||||||
span.hidden--phone< Subscribe
|
span.hidden--phone< Subscribe
|
||||||
|
|
|
@ -20,6 +20,7 @@ ul
|
||||||
h2 Actions
|
h2 Actions
|
||||||
ul
|
ul
|
||||||
li = link "Revert all tag changes", to: Routes.tag_change_full_revert_path(@conn, :create, [ip: to_string(@ip)]), data: [confirm: "Are you really, really sure?", method: "create"]
|
li = link "Revert all tag changes", to: Routes.tag_change_full_revert_path(@conn, :create, [ip: to_string(@ip)]), data: [confirm: "Are you really, really sure?", method: "create"]
|
||||||
|
li = link "...the button above didn't work (use carefully, this is resource-intensive)", to: Routes.tag_change_full_revert_path(@conn, :create, [ip: to_string(@ip), batch_size: 1]), data: [confirm: "Please confirm that you're aware that this may crash the site and are ready to take on the full wrath of the admins if it does so after you press this button.", method: "create"]
|
||||||
|
|
||||||
h4 Observed users
|
h4 Observed users
|
||||||
table.table
|
table.table
|
||||||
|
|
|
@ -20,9 +20,13 @@ p
|
||||||
h3 API Key
|
h3 API Key
|
||||||
p
|
p
|
||||||
' Your API key is
|
' Your API key is
|
||||||
|
#api-key-button>
|
||||||
|
code>
|
||||||
|
= link("Click to show", to: "#", data: [click_show: "#api-key", click_hide: "#api-key-button"])
|
||||||
|
#api-key.hidden>
|
||||||
code>
|
code>
|
||||||
= @current_user.authentication_token
|
= @current_user.authentication_token
|
||||||
' - you can use this to allow API consumers to access your account.
|
p You can use this to allow API consumers to access your account.
|
||||||
p
|
p
|
||||||
' Avoid sharing this key with others, as it could be used to compromise
|
' Avoid sharing this key with others, as it could be used to compromise
|
||||||
' your account.
|
' your account.
|
||||||
|
|
|
@ -21,6 +21,7 @@ h1 Search
|
||||||
a data-search-add="comment_count.gt:20" data-search-select-last="2" data-search-show-help="numeric" Number of comments
|
a data-search-add="comment_count.gt:20" data-search-select-last="2" data-search-show-help="numeric" Number of comments
|
||||||
a data-search-add="uploader:k_a" data-search-select-last="3" data-search-show-help="literal" Uploader
|
a data-search-add="uploader:k_a" data-search-select-last="3" data-search-show-help="literal" Uploader
|
||||||
a data-search-add="original_format:gif" data-search-select-last="3" data-search-show-help="literal" File extension
|
a data-search-add="original_format:gif" data-search-select-last="3" data-search-show-help="literal" File extension
|
||||||
|
a data-search-add="file_name:tumblr_*" data-search-select-last="8" data-search-show-help="literal" Original file name
|
||||||
a data-search-add="mime_type:image/jpeg" data-search-select-last="10" data-search-show-help="literal" MIME type
|
a data-search-add="mime_type:image/jpeg" data-search-select-last="10" data-search-show-help="literal" MIME type
|
||||||
a data-search-add="source_url:*deviantart.com*" data-search-select-last="16" data-search-show-help="literal" Image source URL
|
a data-search-add="source_url:*deviantart.com*" data-search-select-last="16" data-search-show-help="literal" Image source URL
|
||||||
a data-search-add="width:1920" data-search-select-last="4" data-search-show-help="numeric" Image width
|
a data-search-add="width:1920" data-search-select-last="4" data-search-show-help="numeric" Image width
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
= cond do
|
= cond do
|
||||||
- Enum.any?(@images) ->
|
- Enum.any?(@images) or override_display(@tags) ->
|
||||||
= render PhilomenaWeb.ImageView, "index.html", conn: @conn, tags: @tags, images: @images, header: "Searching for #{@conn.params["q"]}", route: fn p -> Routes.search_path(@conn, :index, p) end, scope: scope(@conn)
|
= render PhilomenaWeb.ImageView, "index.html", conn: @conn, tags: @tags, images: @images, header: "Searching for #{@conn.params["q"]}", route: fn p -> Routes.search_path(@conn, :index, p) end, scope: scope(@conn)
|
||||||
|
|
||||||
- assigns[:error] ->
|
- assigns[:error] ->
|
||||||
|
|
|
@ -47,6 +47,10 @@ h1 Content Settings
|
||||||
p
|
p
|
||||||
' RSS feed link (for Newsblur, RSSOwl, Thunderbird, etc.):
|
' RSS feed link (for Newsblur, RSSOwl, Thunderbird, etc.):
|
||||||
br
|
br
|
||||||
|
#rss-feed-button>
|
||||||
|
code>
|
||||||
|
= link("Click to show", to: "#", data: [click_show: "#rss-link", click_hide: "#rss-feed-button"])
|
||||||
|
#rss-link.hidden
|
||||||
= url_input f, :subscribe_url, value: Routes.api_rss_watched_url(@conn, :index, key: @conn.assigns.current_user.authentication_token), class: "input input--wide"
|
= url_input f, :subscribe_url, value: Routes.api_rss_watched_url(@conn, :index, key: @conn.assigns.current_user.authentication_token), class: "input input--wide"
|
||||||
br
|
br
|
||||||
' Do not share this URL with anyone, it may allow an attacker to compromise your account.
|
' Do not share this URL with anyone, it may allow an attacker to compromise your account.
|
||||||
|
|
|
@ -6,7 +6,7 @@ elixir:
|
||||||
unwatch_class = if @watching, do: "", else: "hidden"
|
unwatch_class = if @watching, do: "", else: "hidden"
|
||||||
|
|
||||||
= if @conn.assigns.current_user do
|
= if @conn.assigns.current_user do
|
||||||
span#js-subscription-target
|
span.js-subscription-target
|
||||||
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
a.js-subscription-link href=watch_path class=watch_class data-remote="true" data-method="post"
|
||||||
i.fa.fa-bell>
|
i.fa.fa-bell>
|
||||||
span.hidden--mobile
|
span.hidden--mobile
|
||||||
|
|
|
@ -149,7 +149,4 @@ defmodule PhilomenaWeb.DuplicateReportView do
|
||||||
|
|
||||||
defp proper_subset?(set1, set2),
|
defp proper_subset?(set1, set2),
|
||||||
do: MapSet.subset?(set1, set2) and not MapSet.equal?(set1, set2)
|
do: MapSet.subset?(set1, set2) and not MapSet.equal?(set1, set2)
|
||||||
|
|
||||||
defp uri_host(nil), do: nil
|
|
||||||
defp uri_host(str), do: URI.parse(str).host
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -305,16 +305,40 @@ defmodule PhilomenaWeb.ImageView do
|
||||||
uri = URI.parse(source)
|
uri = URI.parse(source)
|
||||||
|
|
||||||
case uri.host do
|
case uri.host do
|
||||||
u when u in ["twitter.com", "www.twitter.com", "pbs.twimg.com", "twimg.com"] ->
|
u
|
||||||
|
when u in [
|
||||||
|
"twitter.com",
|
||||||
|
"www.twitter.com",
|
||||||
|
"mobile.twitter.com",
|
||||||
|
"x.com",
|
||||||
|
"mobile.x.com",
|
||||||
|
"pbs.twimg.com",
|
||||||
|
"twimg.com"
|
||||||
|
] ->
|
||||||
"fab fa-twitter"
|
"fab fa-twitter"
|
||||||
|
|
||||||
u when u in ["deviantart.com", "www.deviantart.com", "sta.sh", "www.sta.sh"] ->
|
u
|
||||||
|
when u in [
|
||||||
|
"deviantart.com",
|
||||||
|
"sta.sh",
|
||||||
|
"www.sta.sh",
|
||||||
|
"images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com",
|
||||||
|
"wixmp-ed30a86b8c4ca887773594c2.wixmp.com",
|
||||||
|
"api-da.wixmp.com",
|
||||||
|
"fav.me"
|
||||||
|
] ->
|
||||||
"fab fa-deviantart"
|
"fab fa-deviantart"
|
||||||
|
|
||||||
u when u in ["cdn.discordapp.com", "discordapp.com", "discord.com"] ->
|
u
|
||||||
|
when u in [
|
||||||
|
"cdn.discordapp.com",
|
||||||
|
"discordapp.com",
|
||||||
|
"discord.com",
|
||||||
|
"discord.gg"
|
||||||
|
] ->
|
||||||
"fab fa-discord"
|
"fab fa-discord"
|
||||||
|
|
||||||
u when u in ["youtube.com", "www.youtube.com"] ->
|
u when u in ["youtube.com", "www.youtube.com", "youtu.be", "m.youtube.com"] ->
|
||||||
"fab fa-youtube"
|
"fab fa-youtube"
|
||||||
|
|
||||||
u when u in ["pillowfort.social", "www.pillowfort.social"] ->
|
u when u in ["pillowfort.social", "www.pillowfort.social"] ->
|
||||||
|
@ -323,25 +347,53 @@ defmodule PhilomenaWeb.ImageView do
|
||||||
u when u in ["vk.com", "vk.ru"] ->
|
u when u in ["vk.com", "vk.ru"] ->
|
||||||
"fab fa-vk"
|
"fab fa-vk"
|
||||||
|
|
||||||
u when u in ["pixiv.net", "www.pixiv.net", "artfight.net", "www.artfight.net"] ->
|
u
|
||||||
|
when u in ["artfight.net", "www.artfight.net", "newgrounds.com"] ->
|
||||||
"fa fa-paintbrush"
|
"fa fa-paintbrush"
|
||||||
|
|
||||||
|
u when u in ["pixiv.net", "www.pixiv.net", "pixiv.me"] ->
|
||||||
|
"fab fa-pixiv"
|
||||||
|
|
||||||
u when u in ["patreon.com", "www.patreon.com"] ->
|
u when u in ["patreon.com", "www.patreon.com"] ->
|
||||||
"fab fa-patreon"
|
"fab fa-patreon"
|
||||||
|
|
||||||
u when u in ["ych.art", "ych.commishes.com", "commishes.com"] ->
|
u
|
||||||
|
when u in [
|
||||||
|
"ych.art",
|
||||||
|
"cdn.ych.art",
|
||||||
|
"ych.commishes.com",
|
||||||
|
"commishes.com",
|
||||||
|
"portfolio.commishes.com",
|
||||||
|
"commishes.io"
|
||||||
|
] ->
|
||||||
"fa fa-palette"
|
"fa fa-palette"
|
||||||
|
|
||||||
|
u
|
||||||
|
when u in ["ko-fi.com", "storage.ko-fi.com", "buymeacoffee.com", "www.buymeacoffee.com"] ->
|
||||||
|
"fa fa-coffee"
|
||||||
|
|
||||||
u when u in ["artstation.com", "www.artstation.com"] ->
|
u when u in ["artstation.com", "www.artstation.com"] ->
|
||||||
"fab fa-artstation"
|
"fab fa-artstation"
|
||||||
|
|
||||||
u when u in ["instagram.com", "www.instagram.com"] ->
|
u when u in ["instagram.com", "www.instagram.com"] ->
|
||||||
"fab fa-instagram"
|
"fab fa-instagram"
|
||||||
|
|
||||||
u when u in ["reddit.com", "www.reddit.com"] ->
|
u when u in ["t.me"] ->
|
||||||
|
"fab fa-telegram"
|
||||||
|
|
||||||
|
u
|
||||||
|
when u in [
|
||||||
|
"reddit.com",
|
||||||
|
"www.reddit.com",
|
||||||
|
"old.reddit.com",
|
||||||
|
"redd.it",
|
||||||
|
"i.redd.it",
|
||||||
|
"v.redd.it",
|
||||||
|
"preview.redd.it"
|
||||||
|
] ->
|
||||||
"fab fa-reddit"
|
"fab fa-reddit"
|
||||||
|
|
||||||
u when u in ["facebook.com", "www.facebook.com", "fb.me", "www.fb.me"] ->
|
u when u in ["facebook.com", "www.facebook.com", "fb.me", "www.fb.me", "m.facebook.com"] ->
|
||||||
"fab fa-facebook"
|
"fab fa-facebook"
|
||||||
|
|
||||||
u when u in ["tiktok.com", "www.tiktok.com"] ->
|
u when u in ["tiktok.com", "www.tiktok.com"] ->
|
||||||
|
@ -350,11 +402,14 @@ defmodule PhilomenaWeb.ImageView do
|
||||||
u
|
u
|
||||||
when u in [
|
when u in [
|
||||||
"furaffinity.net",
|
"furaffinity.net",
|
||||||
"www.furaffinity.net",
|
|
||||||
"furbooru.org",
|
"furbooru.org",
|
||||||
"inkbunny.net",
|
"inkbunny.net",
|
||||||
"e621.net",
|
"e621.net",
|
||||||
"e926.net"
|
"e926.net",
|
||||||
|
"sofurry.com",
|
||||||
|
"weasyl.com",
|
||||||
|
"www.weasyl.com",
|
||||||
|
"cdn.weasyl.com"
|
||||||
] ->
|
] ->
|
||||||
"fa fa-paw"
|
"fa fa-paw"
|
||||||
|
|
||||||
|
@ -373,17 +428,67 @@ defmodule PhilomenaWeb.ImageView do
|
||||||
"vulpine.club",
|
"vulpine.club",
|
||||||
"yiff.life",
|
"yiff.life",
|
||||||
"socel.net",
|
"socel.net",
|
||||||
"octodon.social"
|
"octodon.social",
|
||||||
|
"filly.social",
|
||||||
|
"pone.social",
|
||||||
|
"hooves.social"
|
||||||
] ->
|
] ->
|
||||||
"fab fa-mastodon"
|
"fab fa-mastodon"
|
||||||
|
|
||||||
|
u
|
||||||
|
when u in ["tumbex.com", "www.tumbex.com", "tumblr.com", "tmblr.co"] ->
|
||||||
|
"fab fa-tumblr"
|
||||||
|
|
||||||
|
u when u in ["flickr.com", "www.flickr.com"] ->
|
||||||
|
"fab fa-flickr"
|
||||||
|
|
||||||
|
u when u in ["etsy.com", "www.etsy.com"] ->
|
||||||
|
"fab fa-etsy"
|
||||||
|
|
||||||
link ->
|
link ->
|
||||||
cond do
|
cond do
|
||||||
Enum.member?(site_domains, link) -> "favicon-home"
|
Enum.member?(site_domains, link) ->
|
||||||
String.contains?(link, "tumblr") -> "fab fa-tumblr"
|
"favicon-home"
|
||||||
String.contains?(link, "deviantart") -> "fab fa-deviantart"
|
|
||||||
String.contains?(link, "sofurry") -> "fa fa-paw"
|
String.ends_with?(link, ".tumblr.com") ->
|
||||||
true -> "fa fa-link"
|
"fab fa-tumblr"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".deviantart.com") or String.ends_with?(link, ".deviantart.net") ->
|
||||||
|
"fab fa-deviantart"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".furaffinity.net") or String.ends_with?(link, ".sofurry.com") or
|
||||||
|
String.ends_with?(link, ".facdn.net") ->
|
||||||
|
"fa fa-paw"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".userapi.com") or String.ends_with?(link, ".vk.me") ->
|
||||||
|
"fab fa-vk"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".patreonusercontent.com") ->
|
||||||
|
"fab fa-patreon"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".discordapp.net") ->
|
||||||
|
"fab fa-discord"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".ytimg.com") ->
|
||||||
|
"fab fa-youtube"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".fbcdn.net") ->
|
||||||
|
"fab fa-facebook"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".newgrounds.com") or String.ends_with?(link, ".ngfiles.com") ->
|
||||||
|
"fa fa-paintbrush"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".apple.com") ->
|
||||||
|
"fab fa-apple"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".staticflickr.com") ->
|
||||||
|
"fab fa-flickr"
|
||||||
|
|
||||||
|
String.ends_with?(link, ".etsystatic.com") ->
|
||||||
|
"fab fa-etsy"
|
||||||
|
|
||||||
|
true ->
|
||||||
|
"fa fa-link"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,4 +3,10 @@ defmodule PhilomenaWeb.SearchView do
|
||||||
|
|
||||||
def scope(conn), do: PhilomenaWeb.ImageScope.scope(conn)
|
def scope(conn), do: PhilomenaWeb.ImageScope.scope(conn)
|
||||||
def hides_images?(conn), do: can?(conn, :hide, %Philomena.Images.Image{})
|
def hides_images?(conn), do: can?(conn, :hide, %Philomena.Images.Image{})
|
||||||
|
|
||||||
|
def override_display([{_tag, _description, dnp_entries}]) do
|
||||||
|
Enum.any?(dnp_entries)
|
||||||
|
end
|
||||||
|
|
||||||
|
def override_display(_), do: false
|
||||||
end
|
end
|
||||||
|
|
20
mix.exs
20
mix.exs
|
@ -11,8 +11,7 @@ defmodule Philomena.MixProject do
|
||||||
start_permanent: Mix.env() == :prod,
|
start_permanent: Mix.env() == :prod,
|
||||||
aliases: aliases(),
|
aliases: aliases(),
|
||||||
deps: deps(),
|
deps: deps(),
|
||||||
dialyzer: [plt_add_apps: [:mix]],
|
dialyzer: [plt_add_apps: [:mix]]
|
||||||
rustler_crates: [philomena: []]
|
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -48,7 +47,7 @@ defmodule Philomena.MixProject do
|
||||||
{:ranch, "~> 2.1", override: true},
|
{:ranch, "~> 2.1", override: true},
|
||||||
{:plug_cowboy, "~> 2.6"},
|
{:plug_cowboy, "~> 2.6"},
|
||||||
{:slime, "~> 1.3.0",
|
{:slime, "~> 1.3.0",
|
||||||
github: "liamwhite/slime", ref: "cd4ced179197daa596bbb9d313f3808103c9624e", override: true},
|
github: "liamwhite/slime", ref: "4c8ad4e9e9dcc792f4db769a9ef2ad7d6eba8f31", override: true},
|
||||||
{:phoenix_slime, "~> 0.13",
|
{:phoenix_slime, "~> 0.13",
|
||||||
github: "slime-lang/phoenix_slime", ref: "8944de91654d6fcf6bdcc0aed6b8647fe3398241"},
|
github: "slime-lang/phoenix_slime", ref: "8944de91654d6fcf6bdcc0aed6b8647fe3398241"},
|
||||||
{:phoenix_pubsub_redis, "~> 3.0"},
|
{:phoenix_pubsub_redis, "~> 3.0"},
|
||||||
|
@ -58,14 +57,12 @@ defmodule Philomena.MixProject do
|
||||||
{:secure_compare, "~> 0.1"},
|
{:secure_compare, "~> 0.1"},
|
||||||
{:elastix, "~> 0.10"},
|
{:elastix, "~> 0.10"},
|
||||||
{:nimble_parsec, "~> 1.2"},
|
{:nimble_parsec, "~> 1.2"},
|
||||||
{:canary, "~> 1.1"},
|
|
||||||
{:scrivener_ecto, "~> 2.7"},
|
{:scrivener_ecto, "~> 2.7"},
|
||||||
{:pbkdf2, ">= 0.0.0",
|
{:pbkdf2, ">= 0.0.0",
|
||||||
github: "basho/erlang-pbkdf2", ref: "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca"},
|
github: "basho/erlang-pbkdf2", ref: "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca"},
|
||||||
{:qrcode, "~> 0.1"},
|
{:qrcode, "~> 0.1"},
|
||||||
{:redix, "~> 1.2"},
|
{:redix, "~> 1.2"},
|
||||||
{:bamboo, "~> 2.2"},
|
{:bamboo, "~> 2.2"},
|
||||||
{:bamboo_smtp, "~> 4.2"},
|
|
||||||
{:remote_ip, "~> 1.1"},
|
{:remote_ip, "~> 1.1"},
|
||||||
{:briefly, "~> 0.4"},
|
{:briefly, "~> 0.4"},
|
||||||
{:tesla, "~> 1.5"},
|
{:tesla, "~> 1.5"},
|
||||||
|
@ -78,6 +75,13 @@ defmodule Philomena.MixProject do
|
||||||
{:sweet_xml, "~> 0.7"},
|
{:sweet_xml, "~> 0.7"},
|
||||||
{:inet_cidr, "~> 1.0"},
|
{:inet_cidr, "~> 1.0"},
|
||||||
|
|
||||||
|
# SMTP
|
||||||
|
{:tls_certificate_check, "~> 1.21"},
|
||||||
|
{:bamboo_smtp, "~> 4.2",
|
||||||
|
github: "botsquad/bamboo_smtp",
|
||||||
|
ref: "c630ccde40070deffc7d78ee6e4a08c9199f145b",
|
||||||
|
override: true},
|
||||||
|
|
||||||
# Markdown
|
# Markdown
|
||||||
{:rustler, "~> 0.27"},
|
{:rustler, "~> 0.27"},
|
||||||
|
|
||||||
|
@ -95,7 +99,11 @@ defmodule Philomena.MixProject do
|
||||||
|
|
||||||
# Fixes for OTP/25
|
# Fixes for OTP/25
|
||||||
{:neotoma, "~> 1.7.3", manager: :rebar3, override: true},
|
{:neotoma, "~> 1.7.3", manager: :rebar3, override: true},
|
||||||
{:hut, "~> 1.4.0", manager: :rebar3, override: true}
|
{:hut, "~> 1.4.0", manager: :rebar3, override: true},
|
||||||
|
|
||||||
|
# Fixes for Elixir v1.15+
|
||||||
|
{:canary, "~> 1.1",
|
||||||
|
github: "marcinkoziej/canary", ref: "704debde7a2c0600f78c687807884bf37c45bd79"}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
77
mix.lock
77
mix.lock
|
@ -1,89 +1,90 @@
|
||||||
%{
|
%{
|
||||||
"bamboo": {:hex, :bamboo, "2.2.0", "f10a406d2b7f5123eb1f02edfa043c259db04b47ab956041f279eaac776ef5ce", [:mix], [{:hackney, ">= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.4", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "8c3b14ba7d2f40cb4be04128ed1e2aff06d91d9413d38bafb4afccffa3ade4fc"},
|
"bamboo": {:hex, :bamboo, "2.2.0", "f10a406d2b7f5123eb1f02edfa043c259db04b47ab956041f279eaac776ef5ce", [:mix], [{:hackney, ">= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.4", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "8c3b14ba7d2f40cb4be04128ed1e2aff06d91d9413d38bafb4afccffa3ade4fc"},
|
||||||
"bamboo_smtp": {:hex, :bamboo_smtp, "4.2.2", "e9f57a2300df9cb496c48751bd7668a86a2b89aa2e79ccaa34e0c46a5f64c3ae", [:mix], [{:bamboo, "~> 2.2.0", [hex: :bamboo, repo: "hexpm", optional: false]}, {:gen_smtp, "~> 1.2.0", [hex: :gen_smtp, repo: "hexpm", optional: false]}], "hexpm", "28cac2ec8adaae02aed663bf68163992891a3b44cfd7ada0bebe3e09bed7207f"},
|
"bamboo_smtp": {:git, "https://github.com/botsquad/bamboo_smtp.git", "c630ccde40070deffc7d78ee6e4a08c9199f145b", [ref: "c630ccde40070deffc7d78ee6e4a08c9199f145b"]},
|
||||||
"bcrypt_elixir": {:hex, :bcrypt_elixir, "3.1.0", "0b110a9a6c619b19a7f73fa3004aa11d6e719a67e672d1633dc36b6b2290a0f7", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "2ad2acb5a8bc049e8d5aa267802631912bb80d5f4110a178ae7999e69dca1bf7"},
|
"bcrypt_elixir": {:hex, :bcrypt_elixir, "3.1.0", "0b110a9a6c619b19a7f73fa3004aa11d6e719a67e672d1633dc36b6b2290a0f7", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "2ad2acb5a8bc049e8d5aa267802631912bb80d5f4110a178ae7999e69dca1bf7"},
|
||||||
"briefly": {:hex, :briefly, "0.4.1", "c90c0511e64bde1fe8da7e244e14acf5bc78c3f6d033db778205e1fa2feafa5c", [:mix], [], "hexpm", "fc0cafcd19c4ed0d0906ae5cf627cc6ce76b8652a160c6bde0ab9d77304ebb0a"},
|
"briefly": {:hex, :briefly, "0.5.1", "ee10d48da7f79ed2aebdc3e536d5f9a0c3e36ff76c0ad0d4254653a152b13a8a", [:mix], [], "hexpm", "bd684aa92ad8b7b4e0d92c31200993c4bc1469fc68cd6d5f15144041bd15cb57"},
|
||||||
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
|
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
|
||||||
"canada": {:hex, :canada, "1.0.2", "040e4c47609b0a67d5773ac1fbe5e99f840cef173d69b739beda7c98453e0770", [:mix], [], "hexpm", "4269f74153fe89583fe50bd4d5de57bfe01f31258a6b676d296f3681f1483c68"},
|
"canada": {:hex, :canada, "1.0.2", "040e4c47609b0a67d5773ac1fbe5e99f840cef173d69b739beda7c98453e0770", [:mix], [], "hexpm", "4269f74153fe89583fe50bd4d5de57bfe01f31258a6b676d296f3681f1483c68"},
|
||||||
"canary": {:hex, :canary, "1.1.1", "4138d5e05db8497c477e4af73902eb9ae06e49dceaa13c2dd9f0b55525ded48b", [:mix], [{:canada, "~> 1.0.1", [hex: :canada, repo: "hexpm", optional: false]}, {:ecto, ">= 1.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "f348d9848693c830a65b707bba9e4dfdd6434e8c356a8d4477e4535afb0d653b"},
|
"canary": {:git, "https://github.com/marcinkoziej/canary.git", "704debde7a2c0600f78c687807884bf37c45bd79", [ref: "704debde7a2c0600f78c687807884bf37c45bd79"]},
|
||||||
"castore": {:hex, :castore, "1.0.3", "7130ba6d24c8424014194676d608cb989f62ef8039efd50ff4b3f33286d06db8", [:mix], [], "hexpm", "680ab01ef5d15b161ed6a95449fac5c6b8f60055677a8e79acf01b27baa4390b"},
|
"castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"},
|
||||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
"comeonin": {:hex, :comeonin, "5.4.0", "246a56ca3f41d404380fc6465650ddaa532c7f98be4bda1b4656b3a37cc13abe", [:mix], [], "hexpm", "796393a9e50d01999d56b7b8420ab0481a7538d0caf80919da493b4a6e51faf1"},
|
"comeonin": {:hex, :comeonin, "5.4.0", "246a56ca3f41d404380fc6465650ddaa532c7f98be4bda1b4656b3a37cc13abe", [:mix], [], "hexpm", "796393a9e50d01999d56b7b8420ab0481a7538d0caf80919da493b4a6e51faf1"},
|
||||||
"cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"},
|
"cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"},
|
||||||
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
||||||
"cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"},
|
"cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"},
|
||||||
"credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"},
|
"credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"},
|
||||||
"credo_envvar": {:hex, :credo_envvar, "0.1.4", "40817c10334e400f031012c0510bfa0d8725c19d867e4ae39cf14f2cbebc3b20", [:mix], [{:credo, "~> 1.0", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "5055cdb4bcbaf7d423bc2bb3ac62b4e2d825e2b1e816884c468dee59d0363009"},
|
"credo_envvar": {:hex, :credo_envvar, "0.1.4", "40817c10334e400f031012c0510bfa0d8725c19d867e4ae39cf14f2cbebc3b20", [:mix], [{:credo, "~> 1.0", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "5055cdb4bcbaf7d423bc2bb3ac62b4e2d825e2b1e816884c468dee59d0363009"},
|
||||||
"credo_naming": {:hex, :credo_naming, "2.0.1", "eedf2faa93b7ae0daac9aad9fb8e092dd6a52de202e377f746704402ea797e5e", [:make, :mix], [{:credo, "~> 1.6", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "20cb8ed97aa27c4190c1841283071726366ddb260cdf59989318e78c5bf4d523"},
|
"credo_naming": {:hex, :credo_naming, "2.1.0", "d44ad58890d4db552e141ce64756a74ac1573665af766d1ac64931aa90d47744", [:make, :mix], [{:credo, "~> 1.6", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "830e23b3fba972e2fccec49c0c089fe78c1e64bc16782a2682d78082351a2909"},
|
||||||
"db_connection": {:hex, :db_connection, "2.5.0", "bb6d4f30d35ded97b29fe80d8bd6f928a1912ca1ff110831edcd238a1973652c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"},
|
"db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
|
||||||
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
|
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
|
||||||
"dialyxir": {:hex, :dialyxir, "1.4.1", "a22ed1e7bd3a3e3f197b68d806ef66acb61ee8f57b3ac85fc5d57354c5482a93", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "84b795d6d7796297cca5a3118444b80c7d94f7ce247d49886e7c291e1ae49801"},
|
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
|
||||||
"ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"},
|
"ecto": {:hex, :ecto, "3.11.1", "4b4972b717e7ca83d30121b12998f5fcdc62ba0ed4f20fd390f16f3270d85c3e", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ebd3d3772cd0dfcd8d772659e41ed527c28b2a8bde4b00fe03e0463da0f1983b"},
|
||||||
"ecto_network": {:hex, :ecto_network, "1.3.0", "1e77fa37c20e0f6a426d3862732f3317b0fa4c18f123d325f81752a491d7304e", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:phoenix_html, ">= 0.0.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.14.0", [hex: :postgrex, repo: "hexpm", optional: false]}], "hexpm", "053a5e46ef2837e8ea5ea97c82fa0f5494699209eddd764e663c85f11b2865bd"},
|
"ecto_network": {:hex, :ecto_network, "1.5.0", "a930c910975e7a91237b858ebf0f4ad7b2aae32fa846275aa203cb858459ec73", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:phoenix_html, ">= 0.0.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.14.0", [hex: :postgrex, repo: "hexpm", optional: false]}], "hexpm", "4d614434ae3e6d373a2f693d56aafaa3f3349714668ffd6d24e760caf578aa2f"},
|
||||||
"ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"},
|
"ecto_sql": {:hex, :ecto_sql, "3.11.1", "e9abf28ae27ef3916b43545f9578b4750956ccea444853606472089e7d169470", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ce14063ab3514424276e7e360108ad6c2308f6d88164a076aac8a387e1fea634"},
|
||||||
"elastix": {:hex, :elastix, "0.10.0", "7567da885677ba9deffc20063db5f3ca8cd10f23cff1ab3ed9c52b7063b7e340", [:mix], [{:httpoison, "~> 1.4", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0", [hex: :poison, repo: "hexpm", optional: true]}, {:retry, "~> 0.8", [hex: :retry, repo: "hexpm", optional: false]}], "hexpm", "5fb342ce068b20f7845f5dd198c2dc80d967deafaa940a6e51b846db82696d1d"},
|
"elastix": {:hex, :elastix, "0.10.0", "7567da885677ba9deffc20063db5f3ca8cd10f23cff1ab3ed9c52b7063b7e340", [:mix], [{:httpoison, "~> 1.4", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0", [hex: :poison, repo: "hexpm", optional: true]}, {:retry, "~> 0.8", [hex: :retry, repo: "hexpm", optional: false]}], "hexpm", "5fb342ce068b20f7845f5dd198c2dc80d967deafaa940a6e51b846db82696d1d"},
|
||||||
"elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
|
"elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"},
|
||||||
"elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"},
|
"elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"},
|
||||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||||
"ex_aws": {:git, "https://github.com/liamwhite/ex_aws.git", "a340859dd8ac4d63bd7a3948f0994e493e49bda4", [ref: "a340859dd8ac4d63bd7a3948f0994e493e49bda4"]},
|
"ex_aws": {:git, "https://github.com/liamwhite/ex_aws.git", "a340859dd8ac4d63bd7a3948f0994e493e49bda4", [ref: "a340859dd8ac4d63bd7a3948f0994e493e49bda4"]},
|
||||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.4.0", "ce8decb6b523381812798396bc0e3aaa62282e1b40520125d1f4eff4abdff0f4", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "85dda6e27754d94582869d39cba3241d9ea60b6aa4167f9c88e309dc687e56bb"},
|
"ex_aws_s3": {:hex, :ex_aws_s3, "2.5.3", "422468e5c3e1a4da5298e66c3468b465cfd354b842e512cb1f6fbbe4e2f5bdaf", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "4f09dd372cc386550e484808c5ac5027766c8d0cd8271ccc578b82ee6ef4f3b8"},
|
||||||
"expo": {:hex, :expo, "0.4.1", "1c61d18a5df197dfda38861673d392e642649a9cef7694d2f97a587b2cfb319b", [:mix], [], "hexpm", "2ff7ba7a798c8c543c12550fa0e2cbc81b95d4974c65855d8d15ba7b37a1ce47"},
|
"expo": {:hex, :expo, "0.5.2", "beba786aab8e3c5431813d7a44b828e7b922bfa431d6bfbada0904535342efe2", [:mix], [], "hexpm", "8c9bfa06ca017c9cb4020fabe980bc7fdb1aaec059fd004c2ab3bff03b1c599c"},
|
||||||
"exq": {:hex, :exq, "0.19.0", "06eb92944dad39f0954dc8f63190d3e24d11734eef88cf5800883e57ebf74f3c", [:mix], [{:elixir_uuid, ">= 1.2.0", [hex: :elixir_uuid, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0 and < 6.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:redix, ">= 0.9.0", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "24fc0ebdd87cc7406e1034fb46c2419f9c8a362f0ec634d23b6b819514d36390"},
|
"exq": {:hex, :exq, "0.19.0", "06eb92944dad39f0954dc8f63190d3e24d11734eef88cf5800883e57ebf74f3c", [:mix], [{:elixir_uuid, ">= 1.2.0", [hex: :elixir_uuid, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0 and < 6.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:redix, ">= 0.9.0", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "24fc0ebdd87cc7406e1034fb46c2419f9c8a362f0ec634d23b6b819514d36390"},
|
||||||
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
|
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "1.2.0", "9cfc75c72a8821588b9b9fe947ae5ab2aed95a052b81237e0928633a13276fd3", [:rebar3], [{:ranch, ">= 1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "5ee0375680bca8f20c4d85f58c2894441443a743355430ff33a783fe03296779"},
|
"gen_smtp": {:hex, :gen_smtp, "1.2.0", "9cfc75c72a8821588b9b9fe947ae5ab2aed95a052b81237e0928633a13276fd3", [:rebar3], [{:ranch, ">= 1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "5ee0375680bca8f20c4d85f58c2894441443a743355430ff33a783fe03296779"},
|
||||||
"gettext": {:hex, :gettext, "0.23.1", "821e619a240e6000db2fc16a574ef68b3bd7fe0167ccc264a81563cc93e67a31", [:mix], [{:expo, "~> 0.4.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "19d744a36b809d810d610b57c27b934425859d158ebd56561bc41f7eeb8795db"},
|
"gettext": {:hex, :gettext, "0.24.0", "6f4d90ac5f3111673cbefc4ebee96fe5f37a114861ab8c7b7d5b30a1108ce6d8", [:mix], [{:expo, "~> 0.5.1", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "bdf75cdfcbe9e4622dd18e034b227d77dd17f0f133853a1c73b97b3d6c770e8b"},
|
||||||
"hackney": {:hex, :hackney, "1.19.1", "59de4716e985dd2b5cbd4954fa1ae187e2b610a9c4520ffcb0b1653c3d6e5559", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "8aa08234bdefc269995c63c2282cf3cd0e36febe3a6bfab11b610572fdd1cad0"},
|
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
||||||
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
|
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
|
||||||
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
||||||
"hut": {:hex, :hut, "1.4.0", "7a1238ec00f95c9ec75412587ee11ac652eca308a7f4b8cc9629746d579d6cf0", [:"erlang.mk", :rebar3], [], "hexpm", "7af8704b9bae98a336f70d9560fc3c97f15665265fa603dbd05352e63d6ebb03"},
|
"hut": {:hex, :hut, "1.4.0", "7a1238ec00f95c9ec75412587ee11ac652eca308a7f4b8cc9629746d579d6cf0", [:"erlang.mk", :rebar3], [], "hexpm", "7af8704b9bae98a336f70d9560fc3c97f15665265fa603dbd05352e63d6ebb03"},
|
||||||
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||||
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
|
||||||
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
|
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
|
||||||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
||||||
"mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"},
|
"mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"},
|
||||||
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
|
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
|
||||||
"mint": {:hex, :mint, "1.5.1", "8db5239e56738552d85af398798c80648db0e90f343c8469f6c6d8898944fb6f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"},
|
"mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"},
|
||||||
"mix_audit": {:hex, :mix_audit, "2.1.1", "653aa6d8f291fc4b017aa82bdb79a4017903902ebba57960ef199cbbc8c008a1", [:make, :mix], [{:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.9", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "541990c3ab3a7bb8c4aaa2ce2732a4ae160ad6237e5dcd5ad1564f4f85354db1"},
|
"mix_audit": {:hex, :mix_audit, "2.1.2", "6cd5c5e2edbc9298629c85347b39fb3210656e541153826efd0b2a63767f3395", [:make, :mix], [{:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.9", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "68d2f06f96b9c445a23434c9d5f09682866a5b4e90f631829db1c64f140e795b"},
|
||||||
"neotoma": {:hex, :neotoma, "1.7.3", "d8bd5404b73273989946e4f4f6d529e5c2088f5fa1ca790b4dbe81f4be408e61", [:rebar], [], "hexpm", "2da322b9b1567ffa0706a7f30f6bbbde70835ae44a1050615f4b4a3d436e0f28"},
|
"neotoma": {:hex, :neotoma, "1.7.3", "d8bd5404b73273989946e4f4f6d529e5c2088f5fa1ca790b4dbe81f4be408e61", [:rebar], [], "hexpm", "2da322b9b1567ffa0706a7f30f6bbbde70835ae44a1050615f4b4a3d436e0f28"},
|
||||||
"nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"},
|
"nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
|
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
|
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
|
||||||
"pbkdf2": {:git, "https://github.com/basho/erlang-pbkdf2.git", "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca", [ref: "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca"]},
|
"pbkdf2": {:git, "https://github.com/basho/erlang-pbkdf2.git", "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca", [ref: "7e9bd5fcd3cc3062159e4c9214bb628aa6feb5ca"]},
|
||||||
"phoenix": {:hex, :phoenix, "1.7.7", "4cc501d4d823015007ba3cdd9c41ecaaf2ffb619d6fb283199fa8ddba89191e0", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "8966e15c395e5e37591b6ed0bd2ae7f48e961f0f60ac4c733f9566b519453085"},
|
"phoenix": {:hex, :phoenix, "1.7.11", "1d88fc6b05ab0c735b250932c4e6e33bfa1c186f76dcf623d8dd52f07d6379c7", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "b1ec57f2e40316b306708fe59b92a16b9f6f4bf50ccfa41aa8c7feb79e0ec02a"},
|
||||||
"phoenix_ecto": {:hex, :phoenix_ecto, "4.4.2", "b21bd01fdeffcfe2fab49e4942aa938b6d3e89e93a480d4aee58085560a0bc0d", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "70242edd4601d50b69273b057ecf7b684644c19ee750989fd555625ae4ce8f5d"},
|
"phoenix_ecto": {:hex, :phoenix_ecto, "4.5.0", "1a1f841ccda19b15f1d82968840a5b895c5f687b6734e430e4b2dbe035ca1837", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "13990570fde09e16959ef214501fe2813e1192d62ca753ec8798980580436f94"},
|
||||||
"phoenix_html": {:hex, :phoenix_html, "3.3.2", "d6ce982c6d8247d2fc0defe625255c721fb8d5f1942c5ac051f6177bffa5973f", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "44adaf8e667c1c20fb9d284b6b0fa8dc7946ce29e81ce621860aa7e96de9a11d"},
|
"phoenix_html": {:hex, :phoenix_html, "3.3.3", "380b8fb45912b5638d2f1d925a3771b4516b9a78587249cabe394e0a5d579dc9", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "923ebe6fec6e2e3b3e569dfbdc6560de932cd54b000ada0208b5f45024bdd76c"},
|
||||||
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.4.1", "2aff698f5e47369decde4357ba91fc9c37c6487a512b41732818f2204a8ef1d3", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "9bffb834e7ddf08467fe54ae58b5785507aaba6255568ae22b4d46e2bb3615ab"},
|
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.5.1", "6ab463cf43938ee9906067b33c8d66782343de4280a70084cd5617accc6345a8", [:mix], [{:file_system, "~> 0.3 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "e8467d308b61f294f68afe12c81bf585584c7ceed40ec8adde88ec176d480a78"},
|
||||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
|
||||||
"phoenix_pubsub_redis": {:hex, :phoenix_pubsub_redis, "3.0.1", "d4d856b1e57a21358e448543e1d091e07e83403dde4383b8be04ed9d2c201cbc", [:mix], [{:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5.1 or ~> 1.6", [hex: :poolboy, repo: "hexpm", optional: false]}, {:redix, "~> 0.10.0 or ~> 1.0", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "0b36a17ff6e9a56159f8df8933d62b5c1f0695eae995a02e0c86c035ace6a309"},
|
"phoenix_pubsub_redis": {:hex, :phoenix_pubsub_redis, "3.0.1", "d4d856b1e57a21358e448543e1d091e07e83403dde4383b8be04ed9d2c201cbc", [:mix], [{:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5.1 or ~> 1.6", [hex: :poolboy, repo: "hexpm", optional: false]}, {:redix, "~> 0.10.0 or ~> 1.0", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "0b36a17ff6e9a56159f8df8933d62b5c1f0695eae995a02e0c86c035ace6a309"},
|
||||||
"phoenix_slime": {:git, "https://github.com/slime-lang/phoenix_slime.git", "8944de91654d6fcf6bdcc0aed6b8647fe3398241", [ref: "8944de91654d6fcf6bdcc0aed6b8647fe3398241"]},
|
"phoenix_slime": {:git, "https://github.com/slime-lang/phoenix_slime.git", "8944de91654d6fcf6bdcc0aed6b8647fe3398241", [ref: "8944de91654d6fcf6bdcc0aed6b8647fe3398241"]},
|
||||||
"phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"},
|
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
||||||
"phoenix_view": {:hex, :phoenix_view, "2.0.2", "6bd4d2fd595ef80d33b439ede6a19326b78f0f1d8d62b9a318e3d9c1af351098", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "a929e7230ea5c7ee0e149ffcf44ce7cf7f4b6d2bfe1752dd7c084cdff152d36f"},
|
"phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"},
|
||||||
"plug": {:hex, :plug, "1.14.2", "cff7d4ec45b4ae176a227acd94a7ab536d9b37b942c8e8fa6dfc0fff98ff4d80", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "842fc50187e13cf4ac3b253d47d9474ed6c296a8732752835ce4a86acdf68d13"},
|
"plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.6.1", "9a3bbfceeb65eff5f39dab529e5cd79137ac36e913c02067dba3963a26efe9b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "de36e1a21f451a18b790f37765db198075c25875c64834bcc82d90b309eb6613"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.7.0", "3ae9369c60641084363b08fe90267cbdd316df57e3557ea522114b30b63256ea", [:mix], [{:cowboy, "~> 2.7.0 or ~> 2.8.0 or ~> 2.9.0 or ~> 2.10.0", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d85444fb8aa1f2fc62eabe83bbe387d81510d773886774ebdcb429b3da3c1a4a"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"},
|
"plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"},
|
||||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
||||||
"postgrex": {:hex, :postgrex, "0.17.3", "c92cda8de2033a7585dae8c61b1d420a1a1322421df84da9a82a6764580c503d", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "946cf46935a4fdca7a81448be76ba3503cff082df42c6ec1ff16a4bdfbfb098d"},
|
"postgrex": {:hex, :postgrex, "0.17.5", "0483d054938a8dc069b21bdd636bf56c487404c241ce6c319c1f43588246b281", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "50b8b11afbb2c4095a3ba675b4f055c416d0f3d7de6633a595fc131a828a67eb"},
|
||||||
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
||||||
"qrcode": {:hex, :qrcode, "0.1.5", "551271830515c150f34568345b060c625deb0e6691db2a01b0a6de3aafc93886", [:mix], [], "hexpm", "a266b7fb7be0d3b713912055dde3575927eca920e5d604ded45cd534f6b7a447"},
|
"qrcode": {:hex, :qrcode, "0.1.5", "551271830515c150f34568345b060c625deb0e6691db2a01b0a6de3aafc93886", [:mix], [], "hexpm", "a266b7fb7be0d3b713912055dde3575927eca920e5d604ded45cd534f6b7a447"},
|
||||||
"ranch": {:hex, :ranch, "2.1.0", "2261f9ed9574dcfcc444106b9f6da155e6e540b2f82ba3d42b339b93673b72a3", [:make, :rebar3], [], "hexpm", "244ee3fa2a6175270d8e1fc59024fd9dbc76294a321057de8f803b1479e76916"},
|
"ranch": {:hex, :ranch, "2.1.0", "2261f9ed9574dcfcc444106b9f6da155e6e540b2f82ba3d42b339b93673b72a3", [:make, :rebar3], [], "hexpm", "244ee3fa2a6175270d8e1fc59024fd9dbc76294a321057de8f803b1479e76916"},
|
||||||
"redix": {:hex, :redix, "1.2.3", "3036e7c6080c42e1bbaa9168d1e28e367b01e8960a640a899b8ef8067273cb5e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "14e2bca8a03fad297a78a3d201032df260ee5f0e0ef9c173c0f9ca5b3e0331b7"},
|
"redix": {:hex, :redix, "1.3.0", "f4121163ff9d73bf72157539ff23b13e38422284520bb58c05e014b19d6f0577", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "60d483d320c77329c8cbd3df73007e51b23f3fae75b7693bc31120d83ab26131"},
|
||||||
"remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"},
|
"remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"},
|
||||||
"retry": {:hex, :retry, "0.18.0", "dc58ebe22c95aa00bc2459f9e0c5400e6005541cf8539925af0aa027dc860543", [:mix], [], "hexpm", "9483959cc7bf69c9e576d9dfb2b678b71c045d3e6f39ab7c9aa1489df4492d73"},
|
"retry": {:hex, :retry, "0.18.0", "dc58ebe22c95aa00bc2459f9e0c5400e6005541cf8539925af0aa027dc860543", [:mix], [], "hexpm", "9483959cc7bf69c9e576d9dfb2b678b71c045d3e6f39ab7c9aa1489df4492d73"},
|
||||||
"rustler": {:hex, :rustler, "0.29.1", "880f20ae3027bd7945def6cea767f5257bc926f33ff50c0d5d5a5315883c084d", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "109497d701861bfcd26eb8f5801fe327a8eef304f56a5b63ef61151ff44ac9b6"},
|
"rustler": {:hex, :rustler, "0.31.0", "7e5eefe61e6e6f8901e5aa3de60073d360c6320d9ec363027b0197297b80c46a", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "99e378459bfb9c3bda6d3548b2b3bc6f9ad97f728f76bdbae7bf5c770a4f8abd"},
|
||||||
"scrivener": {:hex, :scrivener, "2.7.2", "1d913c965ec352650a7f864ad7fd8d80462f76a32f33d57d1e48bc5e9d40aba2", [:mix], [], "hexpm", "7866a0ec4d40274efbee1db8bead13a995ea4926ecd8203345af8f90d2b620d9"},
|
"scrivener": {:hex, :scrivener, "2.7.2", "1d913c965ec352650a7f864ad7fd8d80462f76a32f33d57d1e48bc5e9d40aba2", [:mix], [], "hexpm", "7866a0ec4d40274efbee1db8bead13a995ea4926ecd8203345af8f90d2b620d9"},
|
||||||
"scrivener_ecto": {:hex, :scrivener_ecto, "2.7.0", "cf64b8cb8a96cd131cdbcecf64e7fd395e21aaa1cb0236c42a7c2e34b0dca580", [:mix], [{:ecto, "~> 3.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:scrivener, "~> 2.4", [hex: :scrivener, repo: "hexpm", optional: false]}], "hexpm", "e809f171687806b0031129034352f5ae44849720c48dd839200adeaf0ac3e260"},
|
"scrivener_ecto": {:hex, :scrivener_ecto, "2.7.0", "cf64b8cb8a96cd131cdbcecf64e7fd395e21aaa1cb0236c42a7c2e34b0dca580", [:mix], [{:ecto, "~> 3.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:scrivener, "~> 2.4", [hex: :scrivener, repo: "hexpm", optional: false]}], "hexpm", "e809f171687806b0031129034352f5ae44849720c48dd839200adeaf0ac3e260"},
|
||||||
"secure_compare": {:hex, :secure_compare, "0.1.0", "01b3c93c8edb696e8a5b38397ed48e10958c8a5ec740606656445bcbec0aadb8", [:mix], [], "hexpm", "6391a49eb4a6182f0d7425842fc774bbed715e78b2bfb0c83b99c94e02c78b5c"},
|
"secure_compare": {:hex, :secure_compare, "0.1.0", "01b3c93c8edb696e8a5b38397ed48e10958c8a5ec740606656445bcbec0aadb8", [:mix], [], "hexpm", "6391a49eb4a6182f0d7425842fc774bbed715e78b2bfb0c83b99c94e02c78b5c"},
|
||||||
"slime": {:git, "https://github.com/liamwhite/slime.git", "cd4ced179197daa596bbb9d313f3808103c9624e", [ref: "cd4ced179197daa596bbb9d313f3808103c9624e"]},
|
"slime": {:git, "https://github.com/liamwhite/slime.git", "4c8ad4e9e9dcc792f4db769a9ef2ad7d6eba8f31", [ref: "4c8ad4e9e9dcc792f4db769a9ef2ad7d6eba8f31"]},
|
||||||
"sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"},
|
"sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
|
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
|
||||||
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
|
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
|
||||||
"tesla": {:hex, :tesla, "1.7.0", "a62dda2f80d4f8a925eb7b8c5b78c461e0eb996672719fe1a63b26321a5f8b4e", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "2e64f01ebfdb026209b47bc651a0e65203fcff4ae79c11efb73c4852b00dc313"},
|
"tesla": {:hex, :tesla, "1.8.0", "d511a4f5c5e42538d97eef7c40ec4f3e44effdc5068206f42ed859e09e51d1fd", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "10501f360cd926a309501287470372af1a6e1cbed0f43949203a4c13300bc79f"},
|
||||||
|
"tls_certificate_check": {:hex, :tls_certificate_check, "1.21.0", "042ab2c0c860652bc5cf69c94e3a31f96676d14682e22ec7813bd173ceff1788", [:rebar3], [{:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "6cee6cffc35a390840d48d463541d50746a7b0e421acaadb833cfc7961e490e7"},
|
||||||
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
|
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
|
||||||
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
|
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
|
||||||
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
|
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
|
||||||
"websock_adapter": {:hex, :websock_adapter, "0.5.4", "7af8408e7ed9d56578539594d1ee7d8461e2dd5c3f57b0f2a5352d610ddde757", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "d2c238c79c52cbe223fcdae22ca0bb5007a735b9e933870e241fce66afb4f4ab"},
|
"websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"},
|
||||||
"yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"},
|
"yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"},
|
||||||
"yaml_elixir": {:hex, :yaml_elixir, "2.9.0", "9a256da867b37b8d2c1ffd5d9de373a4fda77a32a45b452f1708508ba7bbcb53", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "0cb0e7d4c56f5e99a6253ed1a670ed0e39c13fc45a6da054033928607ac08dfc"},
|
"yaml_elixir": {:hex, :yaml_elixir, "2.9.0", "9a256da867b37b8d2c1ffd5d9de373a4fda77a32a45b452f1708508ba7bbcb53", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "0cb0e7d4c56f5e99a6253ed1a670ed0e39c13fc45a6da054033928607ac08dfc"},
|
||||||
}
|
}
|
||||||
|
|
755
native/philomena/Cargo.lock
generated
755
native/philomena/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -10,7 +10,7 @@ path = "src/lib.rs"
|
||||||
crate-type = ["dylib"]
|
crate-type = ["dylib"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
comrak = { git = "https://github.com/philomena-dev/comrak", branch = "main" }
|
comrak = { git = "https://github.com/philomena-dev/comrak", branch = "main", default-features = false }
|
||||||
jemallocator = { version = "0.5.0", features = ["disable_initial_exec_tls"] }
|
jemallocator = { version = "0.5.0", features = ["disable_initial_exec_tls"] }
|
||||||
rustler = "0.28"
|
rustler = "0.28"
|
||||||
ring = "0.16"
|
ring = "0.16"
|
||||||
|
|
Loading…
Reference in a new issue