Merge remote-tracking branch 'origin/master' into redesign

This commit is contained in:
Luna D. 2024-04-28 13:11:39 +02:00
commit db237a9853
No known key found for this signature in database
GPG key ID: 4B1C63448394F688
20 changed files with 509 additions and 132 deletions

13
assets/fix-jsdom.ts Normal file
View file

@ -0,0 +1,13 @@
import JSDOMEnvironment from 'jest-environment-jsdom';
export default class FixJSDOMEnvironment extends JSDOMEnvironment {
constructor(...args: ConstructorParameters<typeof JSDOMEnvironment>) {
super(...args);
// https://github.com/jsdom/jsdom/issues/1721#issuecomment-1484202038
// jsdom URL and Blob are missing most of the implementation
// Use the node version of these types instead
this.global.URL = URL;
this.global.Blob = Blob;
}
}

View file

@ -25,7 +25,7 @@ export default {
},
preset: 'ts-jest/presets/js-with-ts-esm',
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
testEnvironment: 'jsdom',
testEnvironment: './fix-jsdom.ts',
testPathIgnorePatterns: ['/node_modules/', '/dist/'],
moduleNameMapper: {
'./js/(.*)': '<rootDir>/js/$1',

View file

@ -0,0 +1,91 @@
import { inputDuplicatorCreator } from '../input-duplicator';
import { assertNotNull } from '../utils/assert';
import { $, $$, removeEl } from '../utils/dom';
describe('Input duplicator functionality', () => {
beforeEach(() => {
document.documentElement.insertAdjacentHTML('beforeend', `<form action="/">
<div class="js-max-input-count">3</div>
<div class="js-input-source">
<input id="0" name="0" class="js-input" type="text"/>
<label>
<a href="#" class="js-remove-input">Delete</a>
</label>
</div>
<div class="js-button-container">
<button type="button" class="js-add-input">Add input</button>
</div>
</form>`);
});
afterEach(() => {
removeEl($$<HTMLFormElement>('form'));
});
function runCreator() {
inputDuplicatorCreator({
addButtonSelector: '.js-add-input',
fieldSelector: '.js-input-source',
maxInputCountSelector: '.js-max-input-count',
removeButtonSelector: '.js-remove-input',
});
}
it('should ignore forms without a duplicator button', () => {
removeEl($$<HTMLButtonElement>('button'));
expect(runCreator()).toBeUndefined();
});
it('should duplicate the input elements', () => {
runCreator();
expect($$('input')).toHaveLength(1);
assertNotNull($<HTMLButtonElement>('.js-add-input')).click();
expect($$('input')).toHaveLength(2);
});
it('should duplicate the input elements when the button is before the inputs', () => {
const form = assertNotNull($<HTMLFormElement>('form'));
const buttonDiv = assertNotNull($<HTMLDivElement>('.js-button-container'));
removeEl(buttonDiv);
form.insertAdjacentElement('afterbegin', buttonDiv);
runCreator();
assertNotNull($<HTMLButtonElement>('.js-add-input')).click();
expect($$('input')).toHaveLength(2);
});
it('should not create more input elements than the limit', () => {
runCreator();
for (let i = 0; i < 5; i += 1) {
assertNotNull($<HTMLButtonElement>('.js-add-input')).click();
}
expect($$('input')).toHaveLength(3);
});
it('should remove duplicated input elements', () => {
runCreator();
assertNotNull($<HTMLButtonElement>('.js-add-input')).click();
assertNotNull($<HTMLAnchorElement>('.js-remove-input')).click();
expect($$('input')).toHaveLength(1);
});
it('should not remove the last input element', () => {
runCreator();
assertNotNull($<HTMLAnchorElement>('.js-remove-input')).click();
assertNotNull($<HTMLAnchorElement>('.js-remove-input')).click();
for (let i = 0; i < 5; i += 1) {
assertNotNull($<HTMLAnchorElement>('.js-remove-input')).click();
}
expect($$('input')).toHaveLength(1);
});
});

View file

@ -1,5 +1,5 @@
import fetchMock from 'jest-fetch-mock';
import { fireEvent } from '@testing-library/dom';
import { fireEvent, waitFor } from '@testing-library/dom';
import { assertType } from '../utils/assert';
import '../ujs';
@ -117,6 +117,7 @@ describe('Remote utilities', () => {
// https://www.benmvp.com/blog/mocking-window-location-methods-jest-jsdom/
let oldWindowLocation: Location;
/* eslint-disable @typescript-eslint/no-explicit-any */
beforeAll(() => {
oldWindowLocation = window.location;
delete (window as any).location;
@ -136,6 +137,7 @@ describe('Remote utilities', () => {
beforeEach(() => {
(window.location.reload as any).mockReset();
});
/* eslint-enable @typescript-eslint/no-explicit-any */
afterAll(() => {
// restore window.location to the jsdom Location object
@ -199,18 +201,10 @@ describe('Remote utilities', () => {
}));
it('should reload the page on 300 multiple choices response', () => {
const promiseLike = {
then(cb: (r: Response) => void) {
if (cb) {
cb(new Response('', { status: 300 }));
}
}
};
jest.spyOn(global, 'fetch').mockReturnValue(promiseLike as any);
jest.spyOn(global, 'fetch').mockResolvedValue(new Response('', { status: 300}));
submitForm();
expect(window.location.reload).toHaveBeenCalledTimes(1);
return waitFor(() => expect(window.location.reload).toHaveBeenCalledTimes(1));
});
});
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 527 B

Binary file not shown.

View file

@ -0,0 +1,178 @@
import { $, $$, removeEl } from '../utils/dom';
import { assertNotNull, assertNotUndefined } from '../utils/assert';
import fetchMock from 'jest-fetch-mock';
import { fixEventListeners } from '../../test/fix-event-listeners';
import { fireEvent, waitFor } from '@testing-library/dom';
import { promises } from 'fs';
import { join } from 'path';
import { setupImageUpload } from '../upload';
/* eslint-disable camelcase */
const scrapeResponse = {
description: 'test',
images: [
{url: 'http://localhost/images/1', camo_url: 'http://localhost/images/1'},
{url: 'http://localhost/images/2', camo_url: 'http://localhost/images/2'},
],
source_url: 'http://localhost/images',
author_name: 'test',
};
const nullResponse = null;
const errorResponse = {
errors: ['Error 1', 'Error 2'],
};
/* eslint-enable camelcase */
describe('Image upload form', () => {
let mockPng: File;
let mockWebm: File;
beforeAll(async() => {
const mockPngPath = join(__dirname, 'upload-test.png');
const mockWebmPath = join(__dirname, 'upload-test.webm');
mockPng = new File([(await promises.readFile(mockPngPath, { encoding: null })).buffer], 'upload-test.png', { type: 'image/png' });
mockWebm = new File([(await promises.readFile(mockWebmPath, { encoding: null })).buffer], 'upload-test.webm', { type: 'video/webm' });
});
beforeAll(() => {
fetchMock.enableMocks();
});
afterAll(() => {
fetchMock.disableMocks();
});
fixEventListeners(window);
let form: HTMLFormElement;
let imgPreviews: HTMLDivElement;
let fileField: HTMLInputElement;
let remoteUrl: HTMLInputElement;
let scraperError: HTMLDivElement;
let fetchButton: HTMLButtonElement;
let tagsEl: HTMLTextAreaElement;
let sourceEl: HTMLInputElement;
let descrEl: HTMLTextAreaElement;
beforeEach(() => {
document.documentElement.insertAdjacentHTML('beforeend', `
<form action="/images">
<div id="js-image-upload-previews"></div>
<input id="image_image" name="image[image]" type="file" class="js-scraper" />
<input id="image_scraper_url" name="image[scraper_url]" type="url" class="js-scraper" />
<button id="js-scraper-preview" type="button">Fetch</button>
<div class="field-error-js hidden js-scraper"></div>
<input id="image_sources_0_source" name="image[sources][0][source]" type="text" class="js-source-url" />
<textarea id="image_tag_input" name="image[tag_input]" class="js-image-tags-input"></textarea>
<textarea id="image_description" name="image[description]" class="js-image-descr-input"></textarea>
</form>
`);
form = assertNotNull($<HTMLFormElement>('form'));
imgPreviews = assertNotNull($<HTMLDivElement>('#js-image-upload-previews'));
fileField = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[0]);
remoteUrl = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[1]);
scraperError = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[2]);
tagsEl = assertNotNull($<HTMLTextAreaElement>('.js-image-tags-input'));
sourceEl = assertNotNull($<HTMLInputElement>('.js-source-url'));
descrEl = assertNotNull($<HTMLTextAreaElement>('.js-image-descr-input'));
fetchButton = assertNotNull($<HTMLButtonElement>('#js-scraper-preview'));
setupImageUpload();
fetchMock.resetMocks();
});
afterEach(() => {
removeEl(form);
});
it('should disable fetch button on empty source', () => {
fireEvent.input(remoteUrl, { target: { value: '' }});
expect(fetchButton.disabled).toBe(true);
});
it('should enable fetch button on non-empty source', () => {
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' }});
expect(fetchButton.disabled).toBe(false);
});
it('should create a preview element when an image file is uploaded', () => {
fireEvent.change(fileField, { target: { files: [mockPng] }});
return waitFor(() => expect(imgPreviews.querySelectorAll('img')).toHaveLength(1));
});
it('should create a preview element when a Matroska video file is uploaded', () => {
fireEvent.change(fileField, { target: { files: [mockWebm] }});
return waitFor(() => expect(imgPreviews.querySelectorAll('video')).toHaveLength(1));
});
it('should block navigation away after an image file is attached, but not after form submission', async() => {
fireEvent.change(fileField, { target: { files: [mockPng] }});
await waitFor(() => { expect(imgPreviews.querySelectorAll('img')).toHaveLength(1); });
const failedUnloadEvent = new Event('beforeunload', { cancelable: true });
expect(fireEvent(window, failedUnloadEvent)).toBe(false);
await new Promise<void>(resolve => {
form.addEventListener('submit', event => {
event.preventDefault();
resolve();
});
form.submit();
});
const succeededUnloadEvent = new Event('beforeunload', { cancelable: true });
expect(fireEvent(window, succeededUnloadEvent)).toBe(true);
});
it('should scrape images when the fetch button is clicked', async() => {
fetchMock.mockResolvedValue(new Response(JSON.stringify(scrapeResponse), { status: 200 }));
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' }});
await new Promise<void>(resolve => {
tagsEl.addEventListener('addtag', (event: Event) => {
expect((event as CustomEvent).detail).toEqual({name: 'artist:test'});
resolve();
});
fireEvent.keyDown(remoteUrl, { keyCode: 13 });
});
await waitFor(() => expect(fetch).toHaveBeenCalledTimes(1));
await waitFor(() => expect(imgPreviews.querySelectorAll('img')).toHaveLength(2));
expect(scraperError.innerHTML).toEqual('');
expect(sourceEl.value).toEqual('http://localhost/images');
expect(descrEl.value).toEqual('test');
});
it('should show null scrape result', () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify(nullResponse), { status: 200 }));
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' }});
fetchButton.click();
return waitFor(() => {
expect(fetch).toHaveBeenCalledTimes(1);
expect(imgPreviews.querySelectorAll('img')).toHaveLength(0);
expect(scraperError.innerText).toEqual('No image found at that address.');
});
});
it('should show error scrape result', () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify(errorResponse), { status: 200 }));
fireEvent.input(remoteUrl, { target: { value: 'http://localhost/images/1' }});
fetchButton.click();
return waitFor(() => {
expect(fetch).toHaveBeenCalledTimes(1);
expect(imgPreviews.querySelectorAll('img')).toHaveLength(0);
expect(scraperError.innerText).toEqual('Error 1 Error 2');
});
});
});

View file

@ -1,83 +0,0 @@
import { $, $$, disableEl, enableEl, removeEl } from './utils/dom';
import { delegate, leftClick } from './utils/events';
/**
* @typedef InputDuplicatorOptions
* @property {string} addButtonSelector
* @property {string} fieldSelector
* @property {string} maxInputCountSelector
* @property {string} removeButtonSelector
*/
/**
* @param {InputDuplicatorOptions} options
*/
function inputDuplicatorCreator({
addButtonSelector,
fieldSelector,
maxInputCountSelector,
removeButtonSelector
}) {
const addButton = $(addButtonSelector);
if (!addButton) {
return;
}
const form = addButton.closest('form');
const fieldRemover = (event, target) => {
event.preventDefault();
// Prevent removing the final field element to not "brick" the form
const existingFields = $$(fieldSelector, form);
if (existingFields.length <= 1) {
return;
}
removeEl(target.closest(fieldSelector));
enableEl(addButton);
};
delegate(document, 'click', {
[removeButtonSelector]: leftClick(fieldRemover)
});
const maxOptionCount = parseInt($(maxInputCountSelector, form).innerHTML, 10);
addButton.addEventListener('click', e => {
e.preventDefault();
const existingFields = $$(fieldSelector, form);
let existingFieldsLength = existingFields.length;
if (existingFieldsLength < maxOptionCount) {
// The last element matched by the `fieldSelector` will be the last field, make a copy
const prevField = existingFields[existingFieldsLength - 1];
const prevFieldCopy = prevField.cloneNode(true);
const prevFieldCopyInputs = $$('input', prevFieldCopy);
prevFieldCopyInputs.forEach(prevFieldCopyInput => {
// Reset new input's value
prevFieldCopyInput.value = '';
prevFieldCopyInput.removeAttribute('value');
// Increment sequential attributes of the input
['name', 'id'].forEach(attr => {
prevFieldCopyInput.setAttribute(attr, prevFieldCopyInput[attr].replace(/\d+/g, `${existingFieldsLength}`));
});
});
// Insert copy before the last field's next sibling, or if none, at the end of its parent
if (prevField.nextElementSibling) {
prevField.parentNode.insertBefore(prevFieldCopy, prevField.nextElementSibling);
}
else {
prevField.parentNode.appendChild(prevFieldCopy);
}
existingFieldsLength++;
}
// Remove the button if we reached the max number of options
if (existingFieldsLength >= maxOptionCount) {
disableEl(addButton);
}
});
}
export { inputDuplicatorCreator };

View file

@ -0,0 +1,76 @@
import { assertNotNull } from './utils/assert';
import { $, $$, disableEl, enableEl, removeEl } from './utils/dom';
import { delegate, leftClick } from './utils/events';
export interface InputDuplicatorOptions {
addButtonSelector: string;
fieldSelector: string;
maxInputCountSelector: string;
removeButtonSelector: string;
}
export function inputDuplicatorCreator({
addButtonSelector,
fieldSelector,
maxInputCountSelector,
removeButtonSelector
}: InputDuplicatorOptions) {
const addButton = $<HTMLButtonElement>(addButtonSelector);
if (!addButton) {
return;
}
const form = assertNotNull(addButton.closest('form'));
const fieldRemover = (event: MouseEvent, target: HTMLElement) => {
event.preventDefault();
// Prevent removing the final field element to not "brick" the form
const existingFields = $$(fieldSelector, form);
if (existingFields.length <= 1) {
return;
}
removeEl(assertNotNull(target.closest<HTMLElement>(fieldSelector)));
enableEl(addButton);
};
delegate(form, 'click', {
[removeButtonSelector]: leftClick(fieldRemover)
});
const maxOptionCountElement = assertNotNull($(maxInputCountSelector, form));
const maxOptionCount = parseInt(maxOptionCountElement.innerHTML, 10);
addButton.addEventListener('click', e => {
e.preventDefault();
const existingFields = $$<HTMLElement>(fieldSelector, form);
let existingFieldsLength = existingFields.length;
if (existingFieldsLength < maxOptionCount) {
// The last element matched by the `fieldSelector` will be the last field, make a copy
const prevField = existingFields[existingFieldsLength - 1];
const prevFieldCopy = prevField.cloneNode(true) as HTMLElement;
$$<HTMLInputElement>('input', prevFieldCopy).forEach(prevFieldCopyInput => {
// Reset new input's value
prevFieldCopyInput.value = '';
prevFieldCopyInput.removeAttribute('value');
// Increment sequential attributes of the input
prevFieldCopyInput.setAttribute('name', prevFieldCopyInput.name.replace(/\d+/g, `${existingFieldsLength}`));
prevFieldCopyInput.setAttribute('id', prevFieldCopyInput.id.replace(/\d+/g, `${existingFieldsLength}`));
});
prevField.insertAdjacentElement('afterend', prevFieldCopy);
existingFieldsLength++;
}
// Remove the button if we reached the max number of options
if (existingFieldsLength >= maxOptionCount) {
disableEl(addButton);
}
});
}

View file

@ -132,21 +132,17 @@ function setupImageUpload() {
});
// Enable/disable the fetch button based on content in the image scraper. Fetching with no URL makes no sense.
remoteUrl.addEventListener('input', () => {
function setFetchEnabled() {
if (remoteUrl.value.length > 0) {
enableFetch();
}
else {
disableFetch();
}
});
}
if (remoteUrl.value.length > 0) {
enableFetch();
}
else {
disableFetch();
}
remoteUrl.addEventListener('input', () => setFetchEnabled());
setFetchEnabled();
// Catch unintentional navigation away from the page

View file

@ -0,0 +1,26 @@
// Add helper to fix event listeners on a given target
export function fixEventListeners(t: EventTarget) {
let eventListeners: Record<string, unknown[]>;
/* eslint-disable @typescript-eslint/no-explicit-any */
beforeAll(() => {
eventListeners = {};
const oldAddEventListener = t.addEventListener;
t.addEventListener = function(type: string, listener: any, options: any): void {
eventListeners[type] = eventListeners[type] || [];
eventListeners[type].push(listener);
return oldAddEventListener(type, listener, options);
};
});
afterEach(() => {
for (const key in eventListeners) {
for (const listener of eventListeners[key]) {
(t.removeEventListener as any)(key, listener);
}
}
eventListeners = {};
});
}

View file

@ -7,7 +7,7 @@ all: import_es
import_es: dump_jsonl
$(ELASTICDUMP) --input=images.jsonl --output=http://localhost:9200/ --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
dump_jsonl: metadata true_uploaders uploaders deleters galleries tags hides upvotes downvotes faves tag_names
dump_jsonl: metadata true_uploaders uploaders deleters galleries tags sources hides upvotes downvotes faves tag_names
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_images cascade;'
sed -i images.jsonl -e 's/\\\\/\\/g'
@ -15,6 +15,8 @@ dump_jsonl: metadata true_uploaders uploaders deleters galleries tags hides upvo
metadata: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select id, jsonb_build_object(
'approved', approved,
'animated', is_animated,
'anonymous', anonymous,
'aspect_ratio', nullif(image_aspect_ratio, 'NaN'::float8),
'comment_count', comments_count,
@ -23,6 +25,7 @@ metadata: image_search_json
'description', description,
'downvotes', downvotes_count,
'duplicate_id', duplicate_id,
'duration', (case when is_animated then image_duration else 0::float end),
'faves', faves_count,
'file_name', image_name,
'fingerprint', fingerprint,
@ -35,10 +38,11 @@ metadata: image_search_json
'orig_sha512_hash', image_orig_sha512_hash,
'original_format', image_format,
'pixels', cast(image_width as bigint)*cast(image_height as bigint),
'processed', processed,
'score', score,
'size', image_size,
'sha512_hash', image_sha512_hash,
'source_url', lower(source_url),
'thumbnails_generated', thumbnails_generated,
'updated_at', updated_at,
'upvotes', upvotes_count,
'width', image_width,
@ -64,33 +68,49 @@ deleters: image_search_json
galleries: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_interactions', jsonb_agg(jsonb_build_object('id', gi.gallery_id, 'position', gi.position))) from gallery_interactions gi group by image_id;
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_id', jsonb_agg(gi.gallery_id)) from gallery_interactions gi group by image_id;
insert into temp_images.image_search_json (image_id, object) select gi.image_id, jsonb_build_object('gallery_position', jsonb_object_agg(gi.gallery_id, gi.position)) from gallery_interactions gi group by image_id;
SQL
tags: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select it.image_id, jsonb_build_object('tag_ids', jsonb_agg(it.tag_id), 'tag_count', count(*)) from image_taggings it group by image_id;
insert into temp_images.image_search_json (image_id, object) select it.image_id, jsonb_build_object(
'tag_ids', jsonb_agg(it.tag_id),
'tag_count', count(*),
'error_tag_count', count(case when t.category = 'error' then t.category else null end),
'rating_tag_count', count(case when t.category = 'rating' then t.category else null end),
'origin_tag_count', count(case when t.category = 'origin' then t.category else null end),
'character_tag_count', count(case when t.category = 'character' then t.category else null end),
'oc_tag_count', count(case when t.category = 'oc' then t.category else null end),
'species_tag_count', count(case when t.category = 'species' then t.category else null end),
'body_type_tag_count', count(case when t.category = 'body-type' then t.category else null end),
'content_fanmade_tag_count', count(case when t.category = 'content-fanmade' then t.category else null end),
'content_official_tag_count', count(case when t.category = 'content-official' then t.category else null end),
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end),
) from image_taggings it inner join tags t on t.id = it.tag_id group by image_id;
SQL
sources: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select s.image_id, jsonb_build_object('source_url', jsonb_agg(lower(s.source)), 'source_count', count(*)) from image_sources s group by image_id;
SQL
hides: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select ih.image_id, jsonb_build_object('hidden_by_ids', jsonb_agg(ih.user_id), 'hidden_by', jsonb_agg(lower(u.name))) from image_hides ih inner join users u on u.id = ih.user_id group by image_id;
insert into temp_images.image_search_json (image_id, object) select ih.image_id, jsonb_build_object('hidden_by_user_ids', jsonb_agg(ih.user_id), 'hidden_by_users', jsonb_agg(lower(u.name))) from image_hides ih inner join users u on u.id = ih.user_id group by image_id;
SQL
downvotes: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('downvoted_by_ids', jsonb_agg(iv.user_id), 'downvoted_by', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = false group by image_id;
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('downvoter_ids', jsonb_agg(iv.user_id), 'downvoters', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = false group by image_id;
SQL
upvotes: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('upvoted_by_ids', jsonb_agg(iv.user_id), 'upvoted_by', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = true group by image_id;
insert into temp_images.image_search_json (image_id, object) select iv.image_id, jsonb_build_object('upvoter_ids', jsonb_agg(iv.user_id), 'upvoters', jsonb_agg(lower(u.name))) from image_votes iv inner join users u on u.id = iv.user_id where iv.up = true group by image_id;
SQL
faves: image_search_json
psql $(DATABASE) -v ON_ERROR_STOP=1 <<-SQL
insert into temp_images.image_search_json (image_id, object) select if.image_id, jsonb_build_object('faved_by_ids', jsonb_agg(if.user_id), 'faved_by', jsonb_agg(lower(u.name))) from image_faves if inner join users u on u.id = if.user_id group by image_id;
insert into temp_images.image_search_json (image_id, object) select if.image_id, jsonb_build_object('favourited_by_user_ids', jsonb_agg(if.user_id), 'favourited_by_users', jsonb_agg(lower(u.name))) from image_faves if inner join users u on u.id = if.user_id group by image_id;
SQL
tag_names: tags_with_aliases

View file

@ -56,6 +56,7 @@ defmodule Philomena.Images.ElasticsearchIndex do
size: %{type: "integer"},
sha512_hash: %{type: "keyword"},
source_url: %{type: "keyword"},
source_count: %{type: "integer"},
tag_count: %{type: "integer"},
tag_ids: %{type: "keyword"},
tags: %{type: "text", analyzer: "keyword"},
@ -87,7 +88,17 @@ defmodule Philomena.Images.ElasticsearchIndex do
namespace: %{type: "keyword"}
}
},
approved: %{type: "boolean"}
approved: %{type: "boolean"},
error_tag_count: %{type: "integer"},
rating_tag_count: %{type: "integer"},
origin_tag_count: %{type: "integer"},
character_tag_count: %{type: "integer"},
oc_tag_count: %{type: "integer"},
species_tag_count: %{type: "integer"},
body_type_tag_count: %{type: "integer"},
content_fanmade_tag_count: %{type: "integer"},
content_official_tag_count: %{type: "integer"},
spoiler_tag_count: %{type: "integer"}
}
}
}
@ -120,6 +131,7 @@ defmodule Philomena.Images.ElasticsearchIndex do
uploader: if(!!image.user and !image.anonymous, do: String.downcase(image.user.name)),
true_uploader: if(!!image.user, do: String.downcase(image.user.name)),
source_url: image.sources |> Enum.map(&String.downcase(&1.source)),
source_count: length(image.sources),
file_name: image.image_name,
original_format: image.image_format,
processed: image.processed,
@ -151,7 +163,17 @@ defmodule Philomena.Images.ElasticsearchIndex do
upvoters: image.upvoters |> Enum.map(&String.downcase(&1.name)),
downvoters: image.downvoters |> Enum.map(&String.downcase(&1.name)),
deleted_by_user: if(!!image.deleter, do: image.deleter.name),
approved: image.approved
approved: image.approved,
error_tag_count: Enum.count(image.tags, &(&1.category == "error")),
rating_tag_count: Enum.count(image.tags, &(&1.category == "rating")),
origin_tag_count: Enum.count(image.tags, &(&1.category == "origin")),
character_tag_count: Enum.count(image.tags, &(&1.category == "character")),
oc_tag_count: Enum.count(image.tags, &(&1.category == "oc")),
species_tag_count: Enum.count(image.tags, &(&1.category == "species")),
body_type_tag_count: Enum.count(image.tags, &(&1.category == "body-type")),
content_fanmade_tag_count: Enum.count(image.tags, &(&1.category == "content-fanmade")),
content_official_tag_count: Enum.count(image.tags, &(&1.category == "content-official")),
spoiler_tag_count: Enum.count(image.tags, &(&1.category == "spoiler"))
}
end

View file

@ -66,10 +66,26 @@ defmodule Philomena.Images.Query do
end
end
defp tag_count_fields do
[
"body_type_tag_count",
"error_tag_count",
"character_tag_count",
"content_fanmade_tag_count",
"content_official_tag_count",
"oc_tag_count",
"origin_tag_count",
"rating_tag_count",
"species_tag_count",
"spoiler_tag_count"
]
end
defp anonymous_fields do
[
int_fields:
~W(id width height comment_count score upvotes downvotes faves uploader_id faved_by_id tag_count pixels size),
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size comment_count source_count tag_count) ++
tag_count_fields(),
float_fields: ~W(aspect_ratio wilson_score duration),
date_fields: ~W(created_at updated_at first_seen_at),
literal_fields:

View file

@ -3,6 +3,7 @@ defmodule PhilomenaWeb.Profile.TagChangeController do
alias Philomena.Users.User
alias Philomena.Images.Image
alias Philomena.Tags.Tag
alias Philomena.TagChanges.TagChange
alias Philomena.Repo
import Ecto.Query
@ -16,19 +17,27 @@ defmodule PhilomenaWeb.Profile.TagChangeController do
tag_changes =
TagChange
|> join(:inner, [tc], i in Image, on: tc.image_id == i.id)
|> only_tag_join(params)
|> where(
[tc, i],
tc.user_id == ^user.id and not (i.user_id == ^user.id and i.anonymous == true)
)
|> added_filter(params)
|> only_tag_filter(params)
|> preload([:tag, :user, image: [:user, :sources, tags: :aliases]])
|> order_by(desc: :id)
|> Repo.paginate(conn.assigns.scrivener)
# params.permit(:added, :only_tag) ...
pagination_params =
[added: conn.params["added"], only_tag: conn.params["only_tag"]]
|> Keyword.filter(fn {_k, v} -> not is_nil(v) and v != "" end)
render(conn, "index.html",
title: "Tag Changes for User `#{user.name}'",
user: user,
tag_changes: tag_changes
tag_changes: tag_changes,
pagination_params: pagination_params
)
end
@ -40,4 +49,18 @@ defmodule PhilomenaWeb.Profile.TagChangeController do
defp added_filter(query, _params),
do: query
defp only_tag_join(query, %{"only_tag" => only_tag})
when is_binary(only_tag) and only_tag != "",
do: join(query, :inner, [tc], t in Tag, on: tc.tag_id == t.id)
defp only_tag_join(query, _params),
do: query
defp only_tag_filter(query, %{"only_tag" => only_tag})
when is_binary(only_tag) and only_tag != "",
do: where(query, [_, _, t], t.name == ^only_tag)
defp only_tag_filter(query, _params),
do: query
end

View file

@ -3,7 +3,6 @@ defmodule PhilomenaWeb.MarkdownRenderer do
alias Philomena.Images.Image
alias Philomena.Repo
alias PhilomenaWeb.ImageView
import Phoenix.HTML
import Phoenix.HTML.Link
import Ecto.Query
@ -84,7 +83,6 @@ defmodule PhilomenaWeb.MarkdownRenderer do
size: ImageView.select_version(img, :medium),
conn: conn
)
|> safe_to_string()
[_id, "t"] when not img.hidden_from_users and img.approved ->
Phoenix.View.render(ImageView, "_image_target.html",
@ -93,7 +91,6 @@ defmodule PhilomenaWeb.MarkdownRenderer do
size: ImageView.select_version(img, :small),
conn: conn
)
|> safe_to_string()
[_id, "s"] when not img.hidden_from_users and img.approved ->
Phoenix.View.render(ImageView, "_image_target.html",
@ -102,18 +99,15 @@ defmodule PhilomenaWeb.MarkdownRenderer do
size: ImageView.select_version(img, :thumb_small),
conn: conn
)
|> safe_to_string()
[_id, suffix] when not img.approved ->
">>#{img.id}#{suffix}#{link_suffix(img)}"
[_id, ""] ->
link(">>#{img.id}#{link_suffix(img)}", to: "/images/#{img.id}")
|> safe_to_string()
[_id, suffix] when suffix in ["t", "s", "p"] ->
link(">>#{img.id}#{suffix}#{link_suffix(img)}", to: "/images/#{img.id}")
|> safe_to_string()
# This condition should never trigger, but let's leave it here just in case.
[id, suffix] ->
@ -124,7 +118,12 @@ defmodule PhilomenaWeb.MarkdownRenderer do
">>#{text}"
end
[text, rendered]
string_contents =
rendered
|> Phoenix.HTML.Safe.to_iodata()
|> IO.iodata_to_binary()
[text, string_contents]
end)
|> Map.new(fn [id, html] -> {id, html} end)
end

View file

@ -45,13 +45,15 @@ defmodule PhilomenaWeb.StatsUpdater do
distinct_creators: distinct_creators,
images_in_galleries: images_in_galleries
)
|> Phoenix.HTML.Safe.to_iodata()
|> IO.iodata_to_binary()
now = DateTime.utc_now() |> DateTime.truncate(:second)
static_page = %{
title: "Statistics",
slug: "stats",
body: Phoenix.HTML.safe_to_string(result),
body: result,
created_at: now,
updated_at: now
}

View file

@ -37,6 +37,6 @@ p
strong> Q: Do you host streams?
| A: No, we cheat and just link to streams on Picarto since that's where (almost) everyone is already. This is simply a nice way to track streaming artists.
p
strong> Q: How do I get my stream/a friend's stream/<artist>'s stream here?
strong> Q: How do I get my stream/a friend's stream/&lt;artist&gt;'s stream here?
' A: Send a private message to a site administrator
' with a link to the stream and the artist tag if applicable.

View file

@ -4,16 +4,16 @@ h1
= @user.name
- route = fn p -> Routes.profile_tag_change_path(@conn, :index, @user, p) end
- params = if @conn.params["added"], do: [added: @conn.params["added"]]
- pagination = render PhilomenaWeb.PaginationView, "_pagination.html", page: @tag_changes, route: route, conn: @conn, params: params
- pagination = render PhilomenaWeb.PaginationView, "_pagination.html", page: @tag_changes, route: route, conn: @conn, params: @pagination_params
.block
.block__header
span
| Display only:
= form_for @conn, Routes.profile_tag_change_path(@conn, :index, @user), [method: "get", enforce_utf8: false], fn f ->
= text_input f, :only_tag, class: "input", placeholder: "Tag", title: "Only show this tag", autocapitalize: "none"
= submit "Search", class: "button", title: "Search"
= link "Removed", to: Routes.profile_tag_change_path(@conn, :index, @user, added: 0)
= link "Added", to: Routes.profile_tag_change_path(@conn, :index, @user, added: 1)
= link "All", to: Routes.profile_tag_change_path(@conn, :index, @user)
= link "Removed", to: Routes.profile_tag_change_path(@conn, :index, @user, Keyword.merge(@pagination_params, added: 0))
= link "Added", to: Routes.profile_tag_change_path(@conn, :index, @user, Keyword.merge(@pagination_params, added: 1))
= link "All", to: Routes.profile_tag_change_path(@conn, :index, @user, Keyword.delete(@pagination_params, :added))
= render PhilomenaWeb.TagChangeView, "index.html", conn: @conn, tag_changes: @tag_changes, pagination: pagination

View file

@ -103,6 +103,8 @@ defmodule PhilomenaWeb.TagView do
{tags, shipping, data}
end
# This is a rendered template, so raw/1 has no effect on safety
# sobelow_skip ["XSS.Raw"]
defp render_quick_tags({tags, shipping, data}, conn) do
render(PhilomenaWeb.TagView, "_quick_tag_table.html",
tags: tags,
@ -110,6 +112,8 @@ defmodule PhilomenaWeb.TagView do
data: data,
conn: conn
)
|> Phoenix.HTML.Safe.to_iodata()
|> Phoenix.HTML.raw()
end
defp names_in_tab("default", data) do