mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-27 13:47:58 +01:00
Merge remote-tracking branch 'origin/master' into redesign
This commit is contained in:
commit
67a904cb98
236 changed files with 5647 additions and 3637 deletions
3
.github/workflows/elixir.yml
vendored
3
.github/workflows/elixir.yml
vendored
|
@ -78,3 +78,6 @@ jobs:
|
||||||
|
|
||||||
- run: npm run test
|
- run: npm run test
|
||||||
working-directory: ./assets
|
working-directory: ./assets
|
||||||
|
|
||||||
|
- run: npm run build
|
||||||
|
working-directory: ./assets
|
11
assets/css/views/_notifications.scss
Normal file
11
assets/css/views/_notifications.scss
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
.notification-type-block:not(:last-child) {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.notification {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.notification:not(:last-child) {
|
||||||
|
border-bottom: 0;
|
||||||
|
}
|
|
@ -125,7 +125,7 @@ export default tsEslint.config(
|
||||||
'no-irregular-whitespace': 2,
|
'no-irregular-whitespace': 2,
|
||||||
'no-iterator': 2,
|
'no-iterator': 2,
|
||||||
'no-label-var': 2,
|
'no-label-var': 2,
|
||||||
'no-labels': 2,
|
'no-labels': [2, { allowSwitch: true, allowLoop: true }],
|
||||||
'no-lone-blocks': 2,
|
'no-lone-blocks': 2,
|
||||||
'no-lonely-if': 0,
|
'no-lonely-if': 0,
|
||||||
'no-loop-func': 2,
|
'no-loop-func': 2,
|
||||||
|
|
|
@ -8,17 +8,17 @@ describe('Input duplicator functionality', () => {
|
||||||
document.documentElement.insertAdjacentHTML(
|
document.documentElement.insertAdjacentHTML(
|
||||||
'beforeend',
|
'beforeend',
|
||||||
`<form action="/">
|
`<form action="/">
|
||||||
<div class="js-max-input-count">3</div>
|
<div class="js-max-input-count">3</div>
|
||||||
<div class="js-input-source">
|
<div class="js-input-source">
|
||||||
<input id="0" name="0" class="js-input" type="text"/>
|
<input id="0" name="0" class="js-input" type="text"/>
|
||||||
<label>
|
<label>
|
||||||
<a href="#" class="js-remove-input">Delete</a>
|
<a href="#" class="js-remove-input">Delete</a>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="js-button-container">
|
<div class="js-button-container">
|
||||||
<button type="button" class="js-add-input">Add input</button>
|
<button type="button" class="js-add-input">Add input</button>
|
||||||
</div>
|
</div>
|
||||||
</form>`,
|
</form>`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,9 @@ const errorResponse = {
|
||||||
};
|
};
|
||||||
/* eslint-enable camelcase */
|
/* eslint-enable camelcase */
|
||||||
|
|
||||||
|
const tagSets = ['', 'a tag', 'safe', 'one, two, three', 'safe, explicit', 'safe, explicit, three', 'safe, two, three'];
|
||||||
|
const tagErrorCounts = [1, 2, 1, 1, 2, 1, 0];
|
||||||
|
|
||||||
describe('Image upload form', () => {
|
describe('Image upload form', () => {
|
||||||
let mockPng: File;
|
let mockPng: File;
|
||||||
let mockWebm: File;
|
let mockWebm: File;
|
||||||
|
@ -58,18 +61,27 @@ describe('Image upload form', () => {
|
||||||
let scraperError: HTMLDivElement;
|
let scraperError: HTMLDivElement;
|
||||||
let fetchButton: HTMLButtonElement;
|
let fetchButton: HTMLButtonElement;
|
||||||
let tagsEl: HTMLTextAreaElement;
|
let tagsEl: HTMLTextAreaElement;
|
||||||
|
let taginputEl: HTMLDivElement;
|
||||||
let sourceEl: HTMLInputElement;
|
let sourceEl: HTMLInputElement;
|
||||||
let descrEl: HTMLTextAreaElement;
|
let descrEl: HTMLTextAreaElement;
|
||||||
|
let submitButton: HTMLButtonElement;
|
||||||
|
|
||||||
const assertFetchButtonIsDisabled = () => {
|
const assertFetchButtonIsDisabled = () => {
|
||||||
if (!fetchButton.hasAttribute('disabled')) throw new Error('fetchButton is not disabled');
|
if (!fetchButton.hasAttribute('disabled')) throw new Error('fetchButton is not disabled');
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const assertSubmitButtonIsDisabled = () => {
|
||||||
|
if (!submitButton.hasAttribute('disabled')) throw new Error('submitButton is not disabled');
|
||||||
|
};
|
||||||
|
|
||||||
|
const assertSubmitButtonIsEnabled = () => {
|
||||||
|
if (submitButton.hasAttribute('disabled')) throw new Error('submitButton is disabled');
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
document.documentElement.insertAdjacentHTML(
|
document.documentElement.insertAdjacentHTML(
|
||||||
'beforeend',
|
'beforeend',
|
||||||
`
|
`<form action="/images">
|
||||||
<form action="/images">
|
|
||||||
<div id="js-image-upload-previews"></div>
|
<div id="js-image-upload-previews"></div>
|
||||||
<input id="image_image" name="image[image]" type="file" class="js-scraper" />
|
<input id="image_image" name="image[image]" type="file" class="js-scraper" />
|
||||||
<input id="image_scraper_url" name="image[scraper_url]" type="url" class="js-scraper" />
|
<input id="image_scraper_url" name="image[scraper_url]" type="url" class="js-scraper" />
|
||||||
|
@ -78,9 +90,13 @@ describe('Image upload form', () => {
|
||||||
|
|
||||||
<input id="image_sources_0_source" name="image[sources][0][source]" type="text" class="js-source-url" />
|
<input id="image_sources_0_source" name="image[sources][0][source]" type="text" class="js-source-url" />
|
||||||
<textarea id="image_tag_input" name="image[tag_input]" class="js-image-tags-input"></textarea>
|
<textarea id="image_tag_input" name="image[tag_input]" class="js-image-tags-input"></textarea>
|
||||||
|
<div class="js-taginput"></div>
|
||||||
|
<button id="tagsinput-save" type="button" class="button">Save</button>
|
||||||
<textarea id="image_description" name="image[description]" class="js-image-descr-input"></textarea>
|
<textarea id="image_description" name="image[description]" class="js-image-descr-input"></textarea>
|
||||||
</form>
|
<div class="actions">
|
||||||
`,
|
<button class="button input--separate-top" type="submit">Upload</button>
|
||||||
|
</div>
|
||||||
|
</form>`,
|
||||||
);
|
);
|
||||||
|
|
||||||
form = assertNotNull($<HTMLFormElement>('form'));
|
form = assertNotNull($<HTMLFormElement>('form'));
|
||||||
|
@ -89,9 +105,11 @@ describe('Image upload form', () => {
|
||||||
remoteUrl = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[1]);
|
remoteUrl = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[1]);
|
||||||
scraperError = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[2]);
|
scraperError = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[2]);
|
||||||
tagsEl = assertNotNull($<HTMLTextAreaElement>('.js-image-tags-input'));
|
tagsEl = assertNotNull($<HTMLTextAreaElement>('.js-image-tags-input'));
|
||||||
|
taginputEl = assertNotNull($<HTMLDivElement>('.js-taginput'));
|
||||||
sourceEl = assertNotNull($<HTMLInputElement>('.js-source-url'));
|
sourceEl = assertNotNull($<HTMLInputElement>('.js-source-url'));
|
||||||
descrEl = assertNotNull($<HTMLTextAreaElement>('.js-image-descr-input'));
|
descrEl = assertNotNull($<HTMLTextAreaElement>('.js-image-descr-input'));
|
||||||
fetchButton = assertNotNull($<HTMLButtonElement>('#js-scraper-preview'));
|
fetchButton = assertNotNull($<HTMLButtonElement>('#js-scraper-preview'));
|
||||||
|
submitButton = assertNotNull($<HTMLButtonElement>('.actions > .button'));
|
||||||
|
|
||||||
setupImageUpload();
|
setupImageUpload();
|
||||||
fetchMock.resetMocks();
|
fetchMock.resetMocks();
|
||||||
|
@ -195,4 +213,42 @@ describe('Image upload form', () => {
|
||||||
expect(scraperError.innerText).toEqual('Error 1 Error 2');
|
expect(scraperError.innerText).toEqual('Error 1 Error 2');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
async function submitForm(frm: HTMLFormElement): Promise<boolean> {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
function onSubmit() {
|
||||||
|
frm.removeEventListener('submit', onSubmit);
|
||||||
|
resolve(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
frm.addEventListener('submit', onSubmit);
|
||||||
|
|
||||||
|
if (!fireEvent.submit(frm)) {
|
||||||
|
frm.removeEventListener('submit', onSubmit);
|
||||||
|
resolve(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should prevent form submission if tag checks fail', async () => {
|
||||||
|
for (let i = 0; i < tagSets.length; i += 1) {
|
||||||
|
taginputEl.innerText = tagSets[i];
|
||||||
|
|
||||||
|
if (await submitForm(form)) {
|
||||||
|
// form submit succeeded
|
||||||
|
await waitFor(() => {
|
||||||
|
assertSubmitButtonIsDisabled();
|
||||||
|
const succeededUnloadEvent = new Event('beforeunload', { cancelable: true });
|
||||||
|
expect(fireEvent(window, succeededUnloadEvent)).toBe(true);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// form submit prevented
|
||||||
|
const frm = form;
|
||||||
|
await waitFor(() => {
|
||||||
|
assertSubmitButtonIsEnabled();
|
||||||
|
expect(frm.querySelectorAll('.help-block')).toHaveLength(tagErrorCounts[i]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,296 +0,0 @@
|
||||||
/**
|
|
||||||
* Autocomplete.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { LocalAutocompleter } from './utils/local-autocompleter';
|
|
||||||
import { handleError } from './utils/requests';
|
|
||||||
import { getTermContexts } from './match_query';
|
|
||||||
import store from './utils/store';
|
|
||||||
|
|
||||||
const cache = {};
|
|
||||||
/** @type {HTMLInputElement} */
|
|
||||||
let inputField,
|
|
||||||
/** @type {string} */
|
|
||||||
originalTerm,
|
|
||||||
/** @type {string} */
|
|
||||||
originalQuery,
|
|
||||||
/** @type {TermContext} */
|
|
||||||
selectedTerm;
|
|
||||||
|
|
||||||
function removeParent() {
|
|
||||||
const parent = document.querySelector('.autocomplete');
|
|
||||||
if (parent) parent.parentNode.removeChild(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeSelected() {
|
|
||||||
const selected = document.querySelector('.autocomplete__item--selected');
|
|
||||||
if (selected) selected.classList.remove('autocomplete__item--selected');
|
|
||||||
}
|
|
||||||
|
|
||||||
function isSearchField() {
|
|
||||||
return inputField && inputField.dataset.acMode === 'search';
|
|
||||||
}
|
|
||||||
|
|
||||||
function restoreOriginalValue() {
|
|
||||||
inputField.value = isSearchField() ? originalQuery : originalTerm;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applySelectedValue(selection) {
|
|
||||||
if (!isSearchField()) {
|
|
||||||
inputField.value = selection;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!selectedTerm) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [startIndex, endIndex] = selectedTerm[0];
|
|
||||||
inputField.value = originalQuery.slice(0, startIndex) + selection + originalQuery.slice(endIndex);
|
|
||||||
inputField.setSelectionRange(startIndex + selection.length, startIndex + selection.length);
|
|
||||||
inputField.focus();
|
|
||||||
}
|
|
||||||
|
|
||||||
function changeSelected(firstOrLast, current, sibling) {
|
|
||||||
if (current && sibling) {
|
|
||||||
// if the currently selected item has a sibling, move selection to it
|
|
||||||
current.classList.remove('autocomplete__item--selected');
|
|
||||||
sibling.classList.add('autocomplete__item--selected');
|
|
||||||
} else if (current) {
|
|
||||||
// if the next keypress will take the user outside the list, restore the unautocompleted term
|
|
||||||
restoreOriginalValue();
|
|
||||||
removeSelected();
|
|
||||||
} else if (firstOrLast) {
|
|
||||||
// if no item in the list is selected, select the first or last
|
|
||||||
firstOrLast.classList.add('autocomplete__item--selected');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isSelectionOutsideCurrentTerm() {
|
|
||||||
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
|
||||||
const [startIndex, endIndex] = selectedTerm[0];
|
|
||||||
|
|
||||||
return startIndex > selectionIndex || endIndex < selectionIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
function keydownHandler(event) {
|
|
||||||
const selected = document.querySelector('.autocomplete__item--selected'),
|
|
||||||
firstItem = document.querySelector('.autocomplete__item:first-of-type'),
|
|
||||||
lastItem = document.querySelector('.autocomplete__item:last-of-type');
|
|
||||||
|
|
||||||
if (isSearchField()) {
|
|
||||||
// Prevent submission of the search field when Enter was hit
|
|
||||||
if (selected && event.keyCode === 13) event.preventDefault(); // Enter
|
|
||||||
|
|
||||||
// Close autocompletion popup when text cursor is outside current tag
|
|
||||||
if (selectedTerm && firstItem && (event.keyCode === 37 || event.keyCode === 39)) {
|
|
||||||
// ArrowLeft || ArrowRight
|
|
||||||
requestAnimationFrame(() => {
|
|
||||||
if (isSelectionOutsideCurrentTerm()) removeParent();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (event.keyCode === 38) changeSelected(lastItem, selected, selected && selected.previousSibling); // ArrowUp
|
|
||||||
if (event.keyCode === 40) changeSelected(firstItem, selected, selected && selected.nextSibling); // ArrowDown
|
|
||||||
if (event.keyCode === 13 || event.keyCode === 27 || event.keyCode === 188) removeParent(); // Enter || Esc || Comma
|
|
||||||
if (event.keyCode === 38 || event.keyCode === 40) {
|
|
||||||
// ArrowUp || ArrowDown
|
|
||||||
const newSelected = document.querySelector('.autocomplete__item--selected');
|
|
||||||
if (newSelected) applySelectedValue(newSelected.dataset.value);
|
|
||||||
event.preventDefault();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createItem(list, suggestion) {
|
|
||||||
const item = document.createElement('li');
|
|
||||||
item.className = 'autocomplete__item';
|
|
||||||
|
|
||||||
item.textContent = suggestion.label;
|
|
||||||
item.dataset.value = suggestion.value;
|
|
||||||
|
|
||||||
item.addEventListener('mouseover', () => {
|
|
||||||
removeSelected();
|
|
||||||
item.classList.add('autocomplete__item--selected');
|
|
||||||
});
|
|
||||||
|
|
||||||
item.addEventListener('mouseout', () => {
|
|
||||||
removeSelected();
|
|
||||||
});
|
|
||||||
|
|
||||||
item.addEventListener('click', () => {
|
|
||||||
applySelectedValue(item.dataset.value);
|
|
||||||
inputField.dispatchEvent(
|
|
||||||
new CustomEvent('autocomplete', {
|
|
||||||
detail: {
|
|
||||||
type: 'click',
|
|
||||||
label: suggestion.label,
|
|
||||||
value: suggestion.value,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
list.appendChild(item);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createList(suggestions) {
|
|
||||||
const parent = document.querySelector('.autocomplete'),
|
|
||||||
list = document.createElement('ul');
|
|
||||||
list.className = 'autocomplete__list';
|
|
||||||
|
|
||||||
suggestions.forEach(suggestion => createItem(list, suggestion));
|
|
||||||
|
|
||||||
parent.appendChild(list);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createParent() {
|
|
||||||
const parent = document.createElement('div');
|
|
||||||
parent.className = 'autocomplete';
|
|
||||||
|
|
||||||
// Position the parent below the inputfield
|
|
||||||
parent.style.position = 'absolute';
|
|
||||||
parent.style.left = `${inputField.offsetLeft}px`;
|
|
||||||
// Take the inputfield offset, add its height and subtract the amount by which the parent element has scrolled
|
|
||||||
parent.style.top = `${inputField.offsetTop + inputField.offsetHeight - inputField.parentNode.scrollTop}px`;
|
|
||||||
|
|
||||||
// We append the parent at the end of body
|
|
||||||
document.body.appendChild(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
function showAutocomplete(suggestions, fetchedTerm, targetInput) {
|
|
||||||
// Remove old autocomplete suggestions
|
|
||||||
removeParent();
|
|
||||||
|
|
||||||
// Save suggestions in cache
|
|
||||||
cache[fetchedTerm] = suggestions;
|
|
||||||
|
|
||||||
// If the input target is not empty, still visible, and suggestions were found
|
|
||||||
if (targetInput.value && targetInput.style.display !== 'none' && suggestions.length) {
|
|
||||||
createParent();
|
|
||||||
createList(suggestions);
|
|
||||||
inputField.addEventListener('keydown', keydownHandler);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestions(term) {
|
|
||||||
// In case source URL was not given at all, do not try sending the request.
|
|
||||||
if (!inputField.dataset.acSource) return [];
|
|
||||||
return fetch(`${inputField.dataset.acSource}${term}`).then(response => response.json());
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSelectedTerm() {
|
|
||||||
if (!inputField || !originalQuery) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
|
||||||
const terms = getTermContexts(originalQuery);
|
|
||||||
|
|
||||||
return terms.find(([range]) => range[0] < selectionIndex && range[1] >= selectionIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toggleSearchAutocomplete() {
|
|
||||||
const enable = store.get('enable_search_ac');
|
|
||||||
|
|
||||||
for (const searchField of document.querySelectorAll('input[data-ac-mode=search]')) {
|
|
||||||
if (enable) {
|
|
||||||
searchField.autocomplete = 'off';
|
|
||||||
} else {
|
|
||||||
searchField.removeAttribute('data-ac');
|
|
||||||
searchField.autocomplete = 'on';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function listenAutocomplete() {
|
|
||||||
let timeout;
|
|
||||||
|
|
||||||
/** @type {LocalAutocompleter} */
|
|
||||||
let localAc = null;
|
|
||||||
let localFetched = false;
|
|
||||||
|
|
||||||
document.addEventListener('focusin', fetchLocalAutocomplete);
|
|
||||||
|
|
||||||
document.addEventListener('input', event => {
|
|
||||||
removeParent();
|
|
||||||
fetchLocalAutocomplete(event);
|
|
||||||
window.clearTimeout(timeout);
|
|
||||||
|
|
||||||
if (localAc !== null && 'ac' in event.target.dataset) {
|
|
||||||
inputField = event.target;
|
|
||||||
let suggestionsCount = 5;
|
|
||||||
|
|
||||||
if (isSearchField()) {
|
|
||||||
originalQuery = inputField.value;
|
|
||||||
selectedTerm = getSelectedTerm();
|
|
||||||
suggestionsCount = 10;
|
|
||||||
|
|
||||||
// We don't need to run auto-completion if user is not selecting tag at all
|
|
||||||
if (!selectedTerm) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
originalTerm = selectedTerm[1].toLowerCase();
|
|
||||||
} else {
|
|
||||||
originalTerm = `${inputField.value}`.toLowerCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
const suggestions = localAc
|
|
||||||
.topK(originalTerm, suggestionsCount)
|
|
||||||
.map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
|
||||||
|
|
||||||
if (suggestions.length) {
|
|
||||||
return showAutocomplete(suggestions, originalTerm, event.target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use a timeout to delay requests until the user has stopped typing
|
|
||||||
timeout = window.setTimeout(() => {
|
|
||||||
inputField = event.target;
|
|
||||||
originalTerm = inputField.value;
|
|
||||||
|
|
||||||
const fetchedTerm = inputField.value;
|
|
||||||
const { ac, acMinLength, acSource } = inputField.dataset;
|
|
||||||
|
|
||||||
if (ac && acSource && fetchedTerm.length >= acMinLength) {
|
|
||||||
if (cache[fetchedTerm]) {
|
|
||||||
showAutocomplete(cache[fetchedTerm], fetchedTerm, event.target);
|
|
||||||
} else {
|
|
||||||
// inputField could get overwritten while the suggestions are being fetched - use event.target
|
|
||||||
getSuggestions(fetchedTerm).then(suggestions => {
|
|
||||||
if (fetchedTerm === event.target.value) {
|
|
||||||
showAutocomplete(suggestions, fetchedTerm, event.target);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 300);
|
|
||||||
});
|
|
||||||
|
|
||||||
// If there's a click outside the inputField, remove autocomplete
|
|
||||||
document.addEventListener('click', event => {
|
|
||||||
if (event.target && event.target !== inputField) removeParent();
|
|
||||||
if (event.target === inputField && isSearchField() && isSelectionOutsideCurrentTerm()) removeParent();
|
|
||||||
});
|
|
||||||
|
|
||||||
function fetchLocalAutocomplete(event) {
|
|
||||||
if (!localFetched && event.target.dataset && 'ac' in event.target.dataset) {
|
|
||||||
const now = new Date();
|
|
||||||
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
|
||||||
|
|
||||||
localFetched = true;
|
|
||||||
|
|
||||||
fetch(`/autocomplete/compiled?vsn=2&key=${cacheKey}`, { credentials: 'omit', cache: 'force-cache' })
|
|
||||||
.then(handleError)
|
|
||||||
.then(resp => resp.arrayBuffer())
|
|
||||||
.then(buf => {
|
|
||||||
localAc = new LocalAutocompleter(buf);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
toggleSearchAutocomplete();
|
|
||||||
}
|
|
||||||
|
|
||||||
export { listenAutocomplete };
|
|
230
assets/js/autocomplete.ts
Normal file
230
assets/js/autocomplete.ts
Normal file
|
@ -0,0 +1,230 @@
|
||||||
|
/**
|
||||||
|
* Autocomplete.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { LocalAutocompleter } from './utils/local-autocompleter';
|
||||||
|
import { getTermContexts } from './match_query';
|
||||||
|
import store from './utils/store';
|
||||||
|
import { TermContext } from './query/lex';
|
||||||
|
import { $$ } from './utils/dom';
|
||||||
|
import { fetchLocalAutocomplete, fetchSuggestions, SuggestionsPopup, TermSuggestion } from './utils/suggestions';
|
||||||
|
|
||||||
|
let inputField: HTMLInputElement | null = null,
|
||||||
|
originalTerm: string | undefined,
|
||||||
|
originalQuery: string | undefined,
|
||||||
|
selectedTerm: TermContext | null = null;
|
||||||
|
|
||||||
|
const popup = new SuggestionsPopup();
|
||||||
|
|
||||||
|
function isSearchField(targetInput: HTMLElement): boolean {
|
||||||
|
return targetInput && targetInput.dataset.acMode === 'search';
|
||||||
|
}
|
||||||
|
|
||||||
|
function restoreOriginalValue() {
|
||||||
|
if (!inputField) return;
|
||||||
|
|
||||||
|
if (isSearchField(inputField) && originalQuery) {
|
||||||
|
inputField.value = originalQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (originalTerm) {
|
||||||
|
inputField.value = originalTerm;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function applySelectedValue(selection: string) {
|
||||||
|
if (!inputField) return;
|
||||||
|
|
||||||
|
if (!isSearchField(inputField)) {
|
||||||
|
inputField.value = selection;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (selectedTerm && originalQuery) {
|
||||||
|
const [startIndex, endIndex] = selectedTerm[0];
|
||||||
|
inputField.value = originalQuery.slice(0, startIndex) + selection + originalQuery.slice(endIndex);
|
||||||
|
inputField.setSelectionRange(startIndex + selection.length, startIndex + selection.length);
|
||||||
|
inputField.focus();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isSelectionOutsideCurrentTerm(): boolean {
|
||||||
|
if (!inputField || !selectedTerm) return true;
|
||||||
|
if (inputField.selectionStart === null || inputField.selectionEnd === null) return true;
|
||||||
|
|
||||||
|
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
||||||
|
const [startIndex, endIndex] = selectedTerm[0];
|
||||||
|
|
||||||
|
return startIndex > selectionIndex || endIndex < selectionIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
function keydownHandler(event: KeyboardEvent) {
|
||||||
|
if (inputField !== event.currentTarget) return;
|
||||||
|
|
||||||
|
if (inputField && isSearchField(inputField)) {
|
||||||
|
// Prevent submission of the search field when Enter was hit
|
||||||
|
if (popup.selectedTerm && event.keyCode === 13) event.preventDefault(); // Enter
|
||||||
|
|
||||||
|
// Close autocompletion popup when text cursor is outside current tag
|
||||||
|
if (selectedTerm && (event.keyCode === 37 || event.keyCode === 39)) {
|
||||||
|
// ArrowLeft || ArrowRight
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
if (isSelectionOutsideCurrentTerm()) popup.hide();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!popup.isActive) return;
|
||||||
|
|
||||||
|
if (event.keyCode === 38) popup.selectPrevious(); // ArrowUp
|
||||||
|
if (event.keyCode === 40) popup.selectNext(); // ArrowDown
|
||||||
|
if (event.keyCode === 13 || event.keyCode === 27 || event.keyCode === 188) popup.hide(); // Enter || Esc || Comma
|
||||||
|
if (event.keyCode === 38 || event.keyCode === 40) {
|
||||||
|
// ArrowUp || ArrowDown
|
||||||
|
if (popup.selectedTerm) {
|
||||||
|
applySelectedValue(popup.selectedTerm);
|
||||||
|
} else {
|
||||||
|
restoreOriginalValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function findSelectedTerm(targetInput: HTMLInputElement, searchQuery: string): TermContext | null {
|
||||||
|
if (targetInput.selectionStart === null || targetInput.selectionEnd === null) return null;
|
||||||
|
|
||||||
|
const selectionIndex = Math.min(targetInput.selectionStart, targetInput.selectionEnd);
|
||||||
|
const terms = getTermContexts(searchQuery);
|
||||||
|
|
||||||
|
return terms.find(([range]) => range[0] < selectionIndex && range[1] >= selectionIndex) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleSearchAutocomplete() {
|
||||||
|
const enable = store.get('enable_search_ac');
|
||||||
|
|
||||||
|
for (const searchField of $$<HTMLInputElement>('input[data-ac-mode=search]')) {
|
||||||
|
if (enable) {
|
||||||
|
searchField.autocomplete = 'off';
|
||||||
|
} else {
|
||||||
|
searchField.removeAttribute('data-ac');
|
||||||
|
searchField.autocomplete = 'on';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function listenAutocomplete() {
|
||||||
|
let serverSideSuggestionsTimeout: number | undefined;
|
||||||
|
|
||||||
|
let localAc: LocalAutocompleter | null = null;
|
||||||
|
let isLocalLoading = false;
|
||||||
|
|
||||||
|
document.addEventListener('focusin', loadAutocompleteFromEvent);
|
||||||
|
|
||||||
|
document.addEventListener('input', event => {
|
||||||
|
popup.hide();
|
||||||
|
loadAutocompleteFromEvent(event);
|
||||||
|
window.clearTimeout(serverSideSuggestionsTimeout);
|
||||||
|
|
||||||
|
if (!(event.target instanceof HTMLInputElement)) return;
|
||||||
|
|
||||||
|
const targetedInput = event.target;
|
||||||
|
|
||||||
|
if (!targetedInput.dataset.ac) return;
|
||||||
|
|
||||||
|
targetedInput.addEventListener('keydown', keydownHandler);
|
||||||
|
|
||||||
|
if (localAc !== null) {
|
||||||
|
inputField = targetedInput;
|
||||||
|
let suggestionsCount = 5;
|
||||||
|
|
||||||
|
if (isSearchField(inputField)) {
|
||||||
|
originalQuery = inputField.value;
|
||||||
|
selectedTerm = findSelectedTerm(inputField, originalQuery);
|
||||||
|
suggestionsCount = 10;
|
||||||
|
|
||||||
|
// We don't need to run auto-completion if user is not selecting tag at all
|
||||||
|
if (!selectedTerm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
originalTerm = selectedTerm[1].toLowerCase();
|
||||||
|
} else {
|
||||||
|
originalTerm = `${inputField.value}`.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
const suggestions = localAc
|
||||||
|
.matchPrefix(originalTerm)
|
||||||
|
.topK(suggestionsCount)
|
||||||
|
.map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
||||||
|
|
||||||
|
if (suggestions.length) {
|
||||||
|
popup.renderSuggestions(suggestions).showForField(targetedInput);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { acMinLength: minTermLength, acSource: endpointUrl } = targetedInput.dataset;
|
||||||
|
|
||||||
|
if (!endpointUrl) return;
|
||||||
|
|
||||||
|
// Use a timeout to delay requests until the user has stopped typing
|
||||||
|
serverSideSuggestionsTimeout = window.setTimeout(() => {
|
||||||
|
inputField = targetedInput;
|
||||||
|
originalTerm = inputField.value;
|
||||||
|
|
||||||
|
const fetchedTerm = inputField.value;
|
||||||
|
|
||||||
|
if (minTermLength && fetchedTerm.length < parseInt(minTermLength, 10)) return;
|
||||||
|
|
||||||
|
fetchSuggestions(endpointUrl, fetchedTerm).then(suggestions => {
|
||||||
|
// inputField could get overwritten while the suggestions are being fetched - use previously targeted input
|
||||||
|
if (fetchedTerm === targetedInput.value) {
|
||||||
|
popup.renderSuggestions(suggestions).showForField(targetedInput);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 300);
|
||||||
|
});
|
||||||
|
|
||||||
|
// If there's a click outside the inputField, remove autocomplete
|
||||||
|
document.addEventListener('click', event => {
|
||||||
|
if (event.target && event.target !== inputField) popup.hide();
|
||||||
|
if (inputField && event.target === inputField && isSearchField(inputField) && isSelectionOutsideCurrentTerm()) {
|
||||||
|
popup.hide();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function loadAutocompleteFromEvent(event: Event) {
|
||||||
|
if (!(event.target instanceof HTMLInputElement)) return;
|
||||||
|
|
||||||
|
if (!isLocalLoading && event.target.dataset.ac) {
|
||||||
|
isLocalLoading = true;
|
||||||
|
|
||||||
|
fetchLocalAutocomplete().then(autocomplete => {
|
||||||
|
localAc = autocomplete;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toggleSearchAutocomplete();
|
||||||
|
|
||||||
|
popup.onItemSelected((event: CustomEvent<TermSuggestion>) => {
|
||||||
|
if (!event.detail || !inputField) return;
|
||||||
|
|
||||||
|
const originalSuggestion = event.detail;
|
||||||
|
applySelectedValue(originalSuggestion.value);
|
||||||
|
|
||||||
|
inputField.dispatchEvent(
|
||||||
|
new CustomEvent('autocomplete', {
|
||||||
|
detail: Object.assign(
|
||||||
|
{
|
||||||
|
type: 'click',
|
||||||
|
},
|
||||||
|
originalSuggestion,
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export { listenAutocomplete };
|
|
@ -9,47 +9,25 @@ import { fetchHtml, handleError } from './utils/requests';
|
||||||
import { showBlock } from './utils/image';
|
import { showBlock } from './utils/image';
|
||||||
import { addTag } from './tagsinput';
|
import { addTag } from './tagsinput';
|
||||||
|
|
||||||
|
/* eslint-disable prettier/prettier */
|
||||||
|
|
||||||
// Event types and any qualifying conditions - return true to not run action
|
// Event types and any qualifying conditions - return true to not run action
|
||||||
const types = {
|
const types = {
|
||||||
click(event) {
|
click(event) { return event.button !== 0; /* Left-click only */ },
|
||||||
return event.button !== 0; /* Left-click only */
|
change() { /* No qualifier */ },
|
||||||
},
|
fetchcomplete() { /* No qualifier */ },
|
||||||
|
|
||||||
change() {
|
|
||||||
/* No qualifier */
|
|
||||||
},
|
|
||||||
|
|
||||||
fetchcomplete() {
|
|
||||||
/* No qualifier */
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const actions = {
|
const actions = {
|
||||||
hide(data) {
|
hide(data) { selectorCb(data.base, data.value, el => el.classList.add('hidden')); },
|
||||||
selectorCb(data.base, data.value, el => el.classList.add('hidden'));
|
show(data) { selectorCb(data.base, data.value, el => el.classList.remove('hidden')); },
|
||||||
},
|
toggle(data) { selectorCb(data.base, data.value, el => el.classList.toggle('hidden')); },
|
||||||
|
submit(data) { selectorCb(data.base, data.value, el => el.submit()); },
|
||||||
tabHide(data) {
|
disable(data) { selectorCb(data.base, data.value, el => el.disabled = true); },
|
||||||
selectorCbChildren(data.base, data.value, el => el.classList.add('hidden'));
|
focus(data) { document.querySelector(data.value).focus(); },
|
||||||
},
|
unfilter(data) { showBlock(data.el.closest('.image-show-container')); },
|
||||||
|
tabHide(data) { selectorCbChildren(data.base, data.value, el => el.classList.add('hidden')); },
|
||||||
show(data) {
|
preventdefault() { /* The existence of this entry is enough */ },
|
||||||
selectorCb(data.base, data.value, el => el.classList.remove('hidden'));
|
|
||||||
},
|
|
||||||
|
|
||||||
toggle(data) {
|
|
||||||
selectorCb(data.base, data.value, el => el.classList.toggle('hidden'));
|
|
||||||
},
|
|
||||||
|
|
||||||
submit(data) {
|
|
||||||
selectorCb(data.base, data.value, el => el.submit());
|
|
||||||
},
|
|
||||||
|
|
||||||
disable(data) {
|
|
||||||
selectorCb(data.base, data.value, el => {
|
|
||||||
el.disabled = true;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
copy(data) {
|
copy(data) {
|
||||||
document.querySelector(data.value).select();
|
document.querySelector(data.value).select();
|
||||||
|
@ -70,18 +48,17 @@ const actions = {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
focus(data) {
|
|
||||||
document.querySelector(data.value).focus();
|
|
||||||
},
|
|
||||||
|
|
||||||
preventdefault() {
|
|
||||||
/* The existence of this entry is enough */
|
|
||||||
},
|
|
||||||
|
|
||||||
addtag(data) {
|
addtag(data) {
|
||||||
addTag(document.querySelector(data.el.closest('[data-target]').dataset.target), data.el.dataset.tagName);
|
addTag(document.querySelector(data.el.closest('[data-target]').dataset.target), data.el.dataset.tagName);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
hideParent(data) {
|
||||||
|
const base = data.el.closest(data.value);
|
||||||
|
if (base) {
|
||||||
|
base.classList.add('hidden');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
tab(data) {
|
tab(data) {
|
||||||
const block = data.el.parentNode.parentNode,
|
const block = data.el.parentNode.parentNode,
|
||||||
newTab = $(`.block__tab[data-tab="${data.value}"]`),
|
newTab = $(`.block__tab[data-tab="${data.value}"]`),
|
||||||
|
@ -114,12 +91,10 @@ const actions = {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
unfilter(data) {
|
|
||||||
showBlock(data.el.closest('.image-show-container'));
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* eslint-enable prettier/prettier */
|
||||||
|
|
||||||
// Use this function to apply a callback to elements matching the selectors
|
// Use this function to apply a callback to elements matching the selectors
|
||||||
function selectorCb(base = document, selector, cb) {
|
function selectorCb(base = document, selector, cb) {
|
||||||
[].forEach.call(base.querySelectorAll(selector), cb);
|
[].forEach.call(base.querySelectorAll(selector), cb);
|
||||||
|
|
|
@ -22,9 +22,9 @@ export function setupGalleryEditing() {
|
||||||
|
|
||||||
initDraggables();
|
initDraggables();
|
||||||
|
|
||||||
$$<HTMLDivElement>('.media-box', containerEl).forEach(i => {
|
for (const mediaBox of $$<HTMLDivElement>('.media-box', containerEl)) {
|
||||||
i.draggable = true;
|
mediaBox.draggable = true;
|
||||||
});
|
}
|
||||||
|
|
||||||
rearrangeEl.addEventListener('click', () => {
|
rearrangeEl.addEventListener('click', () => {
|
||||||
sortableEl.classList.add('editing');
|
sortableEl.classList.add('editing');
|
||||||
|
@ -46,8 +46,8 @@ export function setupGalleryEditing() {
|
||||||
|
|
||||||
fetchJson('PATCH', reorderPath, {
|
fetchJson('PATCH', reorderPath, {
|
||||||
image_ids: newImages,
|
image_ids: newImages,
|
||||||
// copy the array again so that we have the newly updated set
|
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
|
// copy the array again so that we have the newly updated set
|
||||||
oldImages = newImages.slice();
|
oldImages = newImages.slice();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -95,9 +95,7 @@ function showHidden(imageId) {
|
||||||
|
|
||||||
function resetVoted(imageId) {
|
function resetVoted(imageId) {
|
||||||
uncacheStatus(imageId, 'voted');
|
uncacheStatus(imageId, 'voted');
|
||||||
|
|
||||||
onImage(imageId, '.interaction--upvote', el => el.classList.remove('active'));
|
onImage(imageId, '.interaction--upvote', el => el.classList.remove('active'));
|
||||||
|
|
||||||
onImage(imageId, '.interaction--downvote', el => el.classList.remove('active'));
|
onImage(imageId, '.interaction--downvote', el => el.classList.remove('active'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,22 +4,40 @@
|
||||||
|
|
||||||
import { $, $$ } from './utils/dom';
|
import { $, $$ } from './utils/dom';
|
||||||
|
|
||||||
const markdownSyntax = {
|
// List of options provided to the syntax handler function.
|
||||||
|
interface SyntaxHandlerOptions {
|
||||||
|
prefix: string;
|
||||||
|
shortcutKeyCode: number;
|
||||||
|
suffix: string;
|
||||||
|
prefixMultiline: string;
|
||||||
|
suffixMultiline: string;
|
||||||
|
singleWrap: boolean;
|
||||||
|
escapeChar: string;
|
||||||
|
image: boolean;
|
||||||
|
text: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SyntaxHandler {
|
||||||
|
action: (textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) => void;
|
||||||
|
options: Partial<SyntaxHandlerOptions>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const markdownSyntax: Record<string, SyntaxHandler> = {
|
||||||
bold: {
|
bold: {
|
||||||
action: wrapSelection,
|
action: wrapSelection,
|
||||||
options: { prefix: '**', shortcutKey: 'b' },
|
options: { prefix: '**', shortcutKeyCode: 66 },
|
||||||
},
|
},
|
||||||
italics: {
|
italics: {
|
||||||
action: wrapSelection,
|
action: wrapSelection,
|
||||||
options: { prefix: '*', shortcutKey: 'i' },
|
options: { prefix: '*', shortcutKeyCode: 73 },
|
||||||
},
|
},
|
||||||
under: {
|
under: {
|
||||||
action: wrapSelection,
|
action: wrapSelection,
|
||||||
options: { prefix: '__', shortcutKey: 'u' },
|
options: { prefix: '__', shortcutKeyCode: 85 },
|
||||||
},
|
},
|
||||||
spoiler: {
|
spoiler: {
|
||||||
action: wrapSelection,
|
action: wrapSelection,
|
||||||
options: { prefix: '||', shortcutKey: 's' },
|
options: { prefix: '||', shortcutKeyCode: 83 },
|
||||||
},
|
},
|
||||||
code: {
|
code: {
|
||||||
action: wrapSelectionOrLines,
|
action: wrapSelectionOrLines,
|
||||||
|
@ -29,7 +47,7 @@ const markdownSyntax = {
|
||||||
prefixMultiline: '```\n',
|
prefixMultiline: '```\n',
|
||||||
suffixMultiline: '\n```',
|
suffixMultiline: '\n```',
|
||||||
singleWrap: true,
|
singleWrap: true,
|
||||||
shortcutKey: 'e',
|
shortcutKeyCode: 69,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
strike: {
|
strike: {
|
||||||
|
@ -50,11 +68,11 @@ const markdownSyntax = {
|
||||||
},
|
},
|
||||||
link: {
|
link: {
|
||||||
action: insertLink,
|
action: insertLink,
|
||||||
options: { shortcutKey: 'l' },
|
options: { shortcutKeyCode: 76 },
|
||||||
},
|
},
|
||||||
image: {
|
image: {
|
||||||
action: insertLink,
|
action: insertLink,
|
||||||
options: { image: true, shortcutKey: 'k' },
|
options: { image: true, shortcutKeyCode: 75 },
|
||||||
},
|
},
|
||||||
escape: {
|
escape: {
|
||||||
action: escapeSelection,
|
action: escapeSelection,
|
||||||
|
@ -62,14 +80,22 @@ const markdownSyntax = {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
function getSelections(textarea, linesOnly = false) {
|
interface SelectionResult {
|
||||||
|
processLinesOnly: boolean;
|
||||||
|
selectedText: string;
|
||||||
|
beforeSelection: string;
|
||||||
|
afterSelection: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSelections(textarea: HTMLTextAreaElement, linesOnly: RegExp | boolean = false): SelectionResult {
|
||||||
let { selectionStart, selectionEnd } = textarea,
|
let { selectionStart, selectionEnd } = textarea,
|
||||||
selection = textarea.value.substring(selectionStart, selectionEnd),
|
selection = textarea.value.substring(selectionStart, selectionEnd),
|
||||||
leadingSpace = '',
|
leadingSpace = '',
|
||||||
trailingSpace = '',
|
trailingSpace = '',
|
||||||
caret;
|
caret: number;
|
||||||
|
|
||||||
const processLinesOnly = linesOnly instanceof RegExp ? linesOnly.test(selection) : linesOnly;
|
const processLinesOnly = linesOnly instanceof RegExp ? linesOnly.test(selection) : linesOnly;
|
||||||
|
|
||||||
if (processLinesOnly) {
|
if (processLinesOnly) {
|
||||||
const explorer = /\n/g;
|
const explorer = /\n/g;
|
||||||
let startNewlineIndex = 0,
|
let startNewlineIndex = 0,
|
||||||
|
@ -119,7 +145,18 @@ function getSelections(textarea, linesOnly = false) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function transformSelection(textarea, transformer, eachLine) {
|
interface TransformResult {
|
||||||
|
newText: string;
|
||||||
|
caretOffset: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
type TransformCallback = (selectedText: string, processLinesOnly: boolean) => TransformResult;
|
||||||
|
|
||||||
|
function transformSelection(
|
||||||
|
textarea: HTMLTextAreaElement,
|
||||||
|
transformer: TransformCallback,
|
||||||
|
eachLine: RegExp | boolean = false,
|
||||||
|
) {
|
||||||
const { selectedText, beforeSelection, afterSelection, processLinesOnly } = getSelections(textarea, eachLine),
|
const { selectedText, beforeSelection, afterSelection, processLinesOnly } = getSelections(textarea, eachLine),
|
||||||
// For long comments, record scrollbar position to restore it later
|
// For long comments, record scrollbar position to restore it later
|
||||||
{ scrollTop } = textarea;
|
{ scrollTop } = textarea;
|
||||||
|
@ -140,7 +177,7 @@ function transformSelection(textarea, transformer, eachLine) {
|
||||||
textarea.dispatchEvent(new Event('change'));
|
textarea.dispatchEvent(new Event('change'));
|
||||||
}
|
}
|
||||||
|
|
||||||
function insertLink(textarea, options) {
|
function insertLink(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||||
let hyperlink = window.prompt(options.image ? 'Image link:' : 'Link:');
|
let hyperlink = window.prompt(options.image ? 'Image link:' : 'Link:');
|
||||||
if (!hyperlink || hyperlink === '') return;
|
if (!hyperlink || hyperlink === '') return;
|
||||||
|
|
||||||
|
@ -155,10 +192,11 @@ function insertLink(textarea, options) {
|
||||||
wrapSelection(textarea, { prefix, suffix });
|
wrapSelection(textarea, { prefix, suffix });
|
||||||
}
|
}
|
||||||
|
|
||||||
function wrapSelection(textarea, options) {
|
function wrapSelection(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||||
transformSelection(textarea, selectedText => {
|
transformSelection(textarea, (selectedText: string): TransformResult => {
|
||||||
const { text = selectedText, prefix = '', suffix = options.prefix } = options,
|
const { text = selectedText, prefix = '', suffix = options.prefix } = options,
|
||||||
emptyText = text === '';
|
emptyText = text === '';
|
||||||
|
|
||||||
let newText = text;
|
let newText = text;
|
||||||
|
|
||||||
if (!emptyText) {
|
if (!emptyText) {
|
||||||
|
@ -176,10 +214,14 @@ function wrapSelection(textarea, options) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function wrapLines(textarea, options, eachLine = true) {
|
function wrapLines(
|
||||||
|
textarea: HTMLTextAreaElement,
|
||||||
|
options: Partial<SyntaxHandlerOptions>,
|
||||||
|
eachLine: RegExp | boolean = true,
|
||||||
|
) {
|
||||||
transformSelection(
|
transformSelection(
|
||||||
textarea,
|
textarea,
|
||||||
(selectedText, processLinesOnly) => {
|
(selectedText: string, processLinesOnly: boolean): TransformResult => {
|
||||||
const { text = selectedText, singleWrap = false } = options,
|
const { text = selectedText, singleWrap = false } = options,
|
||||||
prefix = (processLinesOnly && options.prefixMultiline) || options.prefix || '',
|
prefix = (processLinesOnly && options.prefixMultiline) || options.prefix || '',
|
||||||
suffix = (processLinesOnly && options.suffixMultiline) || options.suffix || '',
|
suffix = (processLinesOnly && options.suffixMultiline) || options.suffix || '',
|
||||||
|
@ -200,16 +242,22 @@ function wrapLines(textarea, options, eachLine = true) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function wrapSelectionOrLines(textarea, options) {
|
function wrapSelectionOrLines(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||||
wrapLines(textarea, options, /\n/);
|
wrapLines(textarea, options, /\n/);
|
||||||
}
|
}
|
||||||
|
|
||||||
function escapeSelection(textarea, options) {
|
function escapeSelection(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||||
transformSelection(textarea, selectedText => {
|
transformSelection(textarea, (selectedText: string): TransformResult => {
|
||||||
const { text = selectedText } = options,
|
const { text = selectedText } = options,
|
||||||
emptyText = text === '';
|
emptyText = text === '';
|
||||||
|
|
||||||
if (emptyText) return;
|
// Nothing to escape, so do nothing
|
||||||
|
if (emptyText) {
|
||||||
|
return {
|
||||||
|
newText: text,
|
||||||
|
caretOffset: text.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const newText = text.replace(/([*_[\]()^`%\\~<>#|])/g, '\\$1');
|
const newText = text.replace(/([*_[\]()^`%\\~<>#|])/g, '\\$1');
|
||||||
|
|
||||||
|
@ -220,34 +268,55 @@ function escapeSelection(textarea, options) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function clickHandler(event) {
|
function clickHandler(event: MouseEvent) {
|
||||||
const button = event.target.closest('.communication__toolbar__button');
|
if (!(event.target instanceof HTMLElement)) return;
|
||||||
if (!button) return;
|
|
||||||
const toolbar = button.closest('.communication__toolbar'),
|
const button = event.target.closest<HTMLElement>('.communication__toolbar__button');
|
||||||
// There may be multiple toolbars present on the page,
|
const toolbar = button?.closest<HTMLElement>('.communication__toolbar');
|
||||||
// in the case of image pages with description edit active
|
|
||||||
// we target the textarea that shares the same parent as the toolbar
|
if (!button || !toolbar?.parentElement) return;
|
||||||
textarea = $('.js-toolbar-input', toolbar.parentNode),
|
|
||||||
|
// There may be multiple toolbars present on the page,
|
||||||
|
// in the case of image pages with description edit active
|
||||||
|
// we target the textarea that shares the same parent as the toolbar
|
||||||
|
const textarea = $<HTMLTextAreaElement>('.js-toolbar-input', toolbar.parentElement),
|
||||||
id = button.dataset.syntaxId;
|
id = button.dataset.syntaxId;
|
||||||
|
|
||||||
|
if (!textarea || !id) return;
|
||||||
|
|
||||||
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
||||||
textarea.focus();
|
textarea.focus();
|
||||||
}
|
}
|
||||||
|
|
||||||
function shortcutHandler(event) {
|
function canAcceptShortcut(event: KeyboardEvent): boolean {
|
||||||
if (
|
let ctrl: boolean, otherModifier: boolean;
|
||||||
!event.ctrlKey ||
|
|
||||||
(window.navigator.platform === 'MacIntel' && !event.metaKey) ||
|
switch (window.navigator.platform) {
|
||||||
event.shiftKey ||
|
case 'MacIntel':
|
||||||
event.altKey
|
ctrl = event.metaKey;
|
||||||
) {
|
otherModifier = event.ctrlKey || event.shiftKey || event.altKey;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
ctrl = event.ctrlKey;
|
||||||
|
otherModifier = event.metaKey || event.shiftKey || event.altKey;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ctrl && !otherModifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
function shortcutHandler(event: KeyboardEvent) {
|
||||||
|
if (!canAcceptShortcut(event)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const textarea = event.target,
|
const textarea = event.target,
|
||||||
key = event.key.toLowerCase();
|
keyCode = event.keyCode;
|
||||||
|
|
||||||
|
if (!(textarea instanceof HTMLTextAreaElement)) return;
|
||||||
|
|
||||||
for (const id in markdownSyntax) {
|
for (const id in markdownSyntax) {
|
||||||
if (key === markdownSyntax[id].options.shortcutKey) {
|
if (keyCode === markdownSyntax[id].options.shortcutKeyCode) {
|
||||||
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
}
|
}
|
||||||
|
@ -255,10 +324,10 @@ function shortcutHandler(event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupToolbar() {
|
function setupToolbar() {
|
||||||
$$('.communication__toolbar').forEach(toolbar => {
|
$$<HTMLElement>('.communication__toolbar').forEach(toolbar => {
|
||||||
toolbar.addEventListener('click', clickHandler);
|
toolbar.addEventListener('click', clickHandler);
|
||||||
});
|
});
|
||||||
$$('.js-toolbar-input').forEach(textarea => {
|
$$<HTMLTextAreaElement>('.js-toolbar-input').forEach(textarea => {
|
||||||
textarea.addEventListener('keydown', shortcutHandler);
|
textarea.addEventListener('keydown', shortcutHandler);
|
||||||
});
|
});
|
||||||
}
|
}
|
|
@ -8,8 +8,8 @@ import { delegate } from './utils/events';
|
||||||
import { assertNotNull, assertNotUndefined } from './utils/assert';
|
import { assertNotNull, assertNotUndefined } from './utils/assert';
|
||||||
import store from './utils/store';
|
import store from './utils/store';
|
||||||
|
|
||||||
const NOTIFICATION_INTERVAL = 600000,
|
const NOTIFICATION_INTERVAL = 600000;
|
||||||
NOTIFICATION_EXPIRES = 300000;
|
const NOTIFICATION_EXPIRES = 300000;
|
||||||
|
|
||||||
function bindSubscriptionLinks() {
|
function bindSubscriptionLinks() {
|
||||||
delegate(document, 'fetchcomplete', {
|
delegate(document, 'fetchcomplete', {
|
||||||
|
|
|
@ -18,7 +18,7 @@ export function warnAboutPMs() {
|
||||||
|
|
||||||
if (value.match(imageEmbedRegex)) {
|
if (value.match(imageEmbedRegex)) {
|
||||||
showEl(warning);
|
showEl(warning);
|
||||||
} else if (!warning.classList.contains('hidden')) {
|
} else {
|
||||||
hideEl(warning);
|
hideEl(warning);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -57,8 +57,22 @@ function makeRelativeDateMatcher(dateVal: string, qual: RangeEqualQualifier): Fi
|
||||||
return makeMatcher(bottomDate, topDate, qual);
|
return makeMatcher(bottomDate, topDate, qual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const parseRes: RegExp[] = [
|
||||||
|
// year
|
||||||
|
/^(\d{4})/,
|
||||||
|
// month
|
||||||
|
/^-(\d{2})/,
|
||||||
|
// day
|
||||||
|
/^-(\d{2})/,
|
||||||
|
// hour
|
||||||
|
/^(?:\s+|T|t)(\d{2})/,
|
||||||
|
// minute
|
||||||
|
/^:(\d{2})/,
|
||||||
|
// second
|
||||||
|
/^:(\d{2})/,
|
||||||
|
];
|
||||||
|
|
||||||
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||||
const parseRes: RegExp[] = [/^(\d{4})/, /^-(\d{2})/, /^-(\d{2})/, /^(?:\s+|T|t)(\d{2})/, /^:(\d{2})/, /^:(\d{2})/];
|
|
||||||
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
||||||
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
||||||
|
|
||||||
|
|
|
@ -32,8 +32,8 @@ export interface LexResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateLexResult(searchStr: string, parseTerm: ParseTerm): LexResult {
|
export function generateLexResult(searchStr: string, parseTerm: ParseTerm): LexResult {
|
||||||
const opQueue: string[] = [],
|
const opQueue: string[] = [];
|
||||||
groupNegate: boolean[] = [];
|
const groupNegate: boolean[] = [];
|
||||||
|
|
||||||
let searchTerm: string | null = null;
|
let searchTerm: string | null = null;
|
||||||
let boostFuzzStr = '';
|
let boostFuzzStr = '';
|
||||||
|
@ -85,11 +85,10 @@ export function generateLexResult(searchStr: string, parseTerm: ParseTerm): LexR
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = match[0];
|
const token = match[0];
|
||||||
|
const tokenIsBinaryOp = ['and_op', 'or_op'].indexOf(tokenName) !== -1;
|
||||||
|
const tokenIsGroupStart = tokenName === 'rparen' && lparenCtr === 0;
|
||||||
|
|
||||||
if (
|
if (searchTerm !== null && (tokenIsBinaryOp || tokenIsGroupStart)) {
|
||||||
searchTerm !== null &&
|
|
||||||
(['and_op', 'or_op'].indexOf(tokenName) !== -1 || (tokenName === 'rparen' && lparenCtr === 0))
|
|
||||||
) {
|
|
||||||
endTerm();
|
endTerm();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,15 +22,15 @@ function makeWildcardMatcher(term: string): FieldMatcher {
|
||||||
// Transforms wildcard match into regular expression.
|
// Transforms wildcard match into regular expression.
|
||||||
// A custom NFA with caching may be more sophisticated but not
|
// A custom NFA with caching may be more sophisticated but not
|
||||||
// likely to be faster.
|
// likely to be faster.
|
||||||
const wildcard = new RegExp(
|
|
||||||
`^${term
|
const regexpForm = term
|
||||||
.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||||
.replace(/^(?:\\\\)*\?/g, '.?')}$`,
|
.replace(/^(?:\\\\)*\?/g, '.?');
|
||||||
'i',
|
|
||||||
);
|
const wildcard = new RegExp(`^${regexpForm}$`, 'i');
|
||||||
|
|
||||||
return (v, name) => {
|
return (v, name) => {
|
||||||
const values = extractValues(v, name);
|
const values = extractValues(v, name);
|
||||||
|
|
|
@ -74,9 +74,9 @@ function submit() {
|
||||||
|
|
||||||
function modifyImageQueue(mediaBox) {
|
function modifyImageQueue(mediaBox) {
|
||||||
if (currentTags()) {
|
if (currentTags()) {
|
||||||
const imageId = mediaBox.dataset.imageId,
|
const imageId = mediaBox.dataset.imageId;
|
||||||
queue = currentQueue(),
|
const queue = currentQueue();
|
||||||
isSelected = queue.includes(imageId);
|
const isSelected = queue.includes(imageId);
|
||||||
|
|
||||||
isSelected ? queue.splice(queue.indexOf(imageId), 1) : queue.push(imageId);
|
isSelected ? queue.splice(queue.indexOf(imageId), 1) : queue.push(imageId);
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
import { $ } from './utils/dom';
|
import { $ } from './utils/dom';
|
||||||
|
|
||||||
type ShortcutKeyMap = Record<string, () => void>;
|
type ShortcutKeyMap = Record<number, () => void>;
|
||||||
|
|
||||||
function getHover(): string | null {
|
function getHover(): string | null {
|
||||||
const thumbBoxHover = $<HTMLDivElement>('.media-box:hover');
|
const thumbBoxHover = $<HTMLDivElement>('.media-box:hover');
|
||||||
|
@ -48,30 +48,32 @@ function isOK(event: KeyboardEvent): boolean {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* eslint-disable prettier/prettier */
|
/* eslint-disable prettier/prettier */
|
||||||
|
|
||||||
const keyCodes: ShortcutKeyMap = {
|
const keyCodes: ShortcutKeyMap = {
|
||||||
j() { click('.js-prev'); }, // J - go to previous image
|
74() { click('.js-prev'); }, // J - go to previous image
|
||||||
i() { click('.js-up'); }, // I - go to index page
|
73() { click('.js-up'); }, // I - go to index page
|
||||||
k() { click('.js-next'); }, // K - go to next image
|
75() { click('.js-next'); }, // K - go to next image
|
||||||
r() { click('.js-rand'); }, // R - go to random image
|
82() { click('.js-rand'); }, // R - go to random image
|
||||||
s() { click('.js-source-link'); }, // S - go to image source
|
83() { click('.js-source-link'); }, // S - go to image source
|
||||||
l() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
76() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
||||||
o() { openFullView(); }, // O - open original
|
79() { openFullView(); }, // O - open original
|
||||||
v() { openFullViewNewTab(); }, // V - open original in a new tab
|
86() { openFullViewNewTab(); }, // V - open original in a new tab
|
||||||
f() {
|
70() {
|
||||||
// F - favourite image
|
// F - favourite image
|
||||||
click(getHover() ? `a.interaction--fave[data-image-id="${getHover()}"]` : '.block__header a.interaction--fave');
|
click(getHover() ? `a.interaction--fave[data-image-id="${getHover()}"]` : '.block__header a.interaction--fave');
|
||||||
},
|
},
|
||||||
u() {
|
85() {
|
||||||
// U - upvote image
|
// U - upvote image
|
||||||
click(getHover() ? `a.interaction--upvote[data-image-id="${getHover()}"]` : '.block__header a.interaction--upvote');
|
click(getHover() ? `a.interaction--upvote[data-image-id="${getHover()}"]` : '.block__header a.interaction--upvote');
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
/* eslint-enable prettier/prettier */
|
/* eslint-enable prettier/prettier */
|
||||||
|
|
||||||
export function listenForKeys() {
|
export function listenForKeys() {
|
||||||
document.addEventListener('keydown', (event: KeyboardEvent) => {
|
document.addEventListener('keydown', (event: KeyboardEvent) => {
|
||||||
if (isOK(event) && keyCodes[event.key]) {
|
if (isOK(event) && keyCodes[event.keyCode]) {
|
||||||
keyCodes[event.key]();
|
keyCodes[event.keyCode]();
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -35,12 +35,12 @@ function setTimeAgo(el: HTMLTimeElement) {
|
||||||
const date = new Date(datetime);
|
const date = new Date(datetime);
|
||||||
const distMillis = distance(date);
|
const distMillis = distance(date);
|
||||||
|
|
||||||
const seconds = Math.abs(distMillis) / 1000,
|
const seconds = Math.abs(distMillis) / 1000;
|
||||||
minutes = seconds / 60,
|
const minutes = seconds / 60;
|
||||||
hours = minutes / 60,
|
const hours = minutes / 60;
|
||||||
days = hours / 24,
|
const days = hours / 24;
|
||||||
months = days / 30,
|
const months = days / 30;
|
||||||
years = days / 365;
|
const years = days / 365;
|
||||||
|
|
||||||
const words =
|
const words =
|
||||||
(seconds < 45 && substitute('seconds', seconds)) ||
|
(seconds < 45 && substitute('seconds', seconds)) ||
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
* Fetch and display preview images for various image upload forms.
|
* Fetch and display preview images for various image upload forms.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { assertNotNull } from './utils/assert';
|
||||||
import { fetchJson, handleError } from './utils/requests';
|
import { fetchJson, handleError } from './utils/requests';
|
||||||
import { $, $$, clearEl, hideEl, makeEl, showEl } from './utils/dom';
|
import { $, $$, clearEl, hideEl, makeEl, showEl } from './utils/dom';
|
||||||
import { addTag } from './tagsinput';
|
import { addTag } from './tagsinput';
|
||||||
|
@ -171,9 +172,98 @@ function setupImageUpload() {
|
||||||
window.removeEventListener('beforeunload', beforeUnload);
|
window.removeEventListener('beforeunload', beforeUnload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function createTagError(message) {
|
||||||
|
const buttonAfter = $('#tagsinput-save');
|
||||||
|
const errorElement = makeEl('span', { className: 'help-block tag-error', innerText: message });
|
||||||
|
|
||||||
|
buttonAfter.insertAdjacentElement('beforebegin', errorElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearTagErrors() {
|
||||||
|
$$('.tag-error').forEach(el => el.remove());
|
||||||
|
}
|
||||||
|
|
||||||
|
const ratingsTags = ['safe', 'suggestive', 'questionable', 'explicit', 'semi-grimdark', 'grimdark', 'grotesque'];
|
||||||
|
|
||||||
|
// populate tag error helper bars as necessary
|
||||||
|
// return true if all checks pass
|
||||||
|
// return false if any check fails
|
||||||
|
function validateTags() {
|
||||||
|
const tagInput = $('textarea.js-taginput');
|
||||||
|
|
||||||
|
if (!tagInput) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tagsArr = tagInput.value.split(',').map(t => t.trim());
|
||||||
|
|
||||||
|
const errors = [];
|
||||||
|
|
||||||
|
let hasRating = false;
|
||||||
|
let hasSafe = false;
|
||||||
|
let hasOtherRating = false;
|
||||||
|
|
||||||
|
tagsArr.forEach(tag => {
|
||||||
|
if (ratingsTags.includes(tag)) {
|
||||||
|
hasRating = true;
|
||||||
|
if (tag === 'safe') {
|
||||||
|
hasSafe = true;
|
||||||
|
} else {
|
||||||
|
hasOtherRating = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!hasRating) {
|
||||||
|
errors.push('Tag input must contain at least one rating tag');
|
||||||
|
} else if (hasSafe && hasOtherRating) {
|
||||||
|
errors.push('Tag input may not contain any other rating if safe');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tagsArr.length < 3) {
|
||||||
|
errors.push('Tag input must contain at least 3 tags');
|
||||||
|
}
|
||||||
|
|
||||||
|
errors.forEach(msg => createTagError(msg));
|
||||||
|
|
||||||
|
return errors.length === 0; // true: valid if no errors
|
||||||
|
}
|
||||||
|
|
||||||
|
function disableUploadButton() {
|
||||||
|
const submitButton = $('.button.input--separate-top');
|
||||||
|
if (submitButton !== null) {
|
||||||
|
submitButton.disabled = true;
|
||||||
|
submitButton.innerText = 'Please wait...';
|
||||||
|
}
|
||||||
|
|
||||||
|
// delay is needed because Safari stops the submit if the button is immediately disabled
|
||||||
|
requestAnimationFrame(() => submitButton.setAttribute('disabled', 'disabled'));
|
||||||
|
}
|
||||||
|
|
||||||
|
function submitHandler(event) {
|
||||||
|
// Remove any existing tag error elements
|
||||||
|
clearTagErrors();
|
||||||
|
|
||||||
|
if (validateTags()) {
|
||||||
|
// Disable navigation check
|
||||||
|
unregisterBeforeUnload();
|
||||||
|
|
||||||
|
// Prevent duplicate attempts to submit the form
|
||||||
|
disableUploadButton();
|
||||||
|
|
||||||
|
// Let the form submission complete
|
||||||
|
} else {
|
||||||
|
// Scroll to view validation errors
|
||||||
|
assertNotNull($('.fancy-tag-upload')).scrollIntoView();
|
||||||
|
|
||||||
|
// Prevent the form from being submitted
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fileField.addEventListener('change', registerBeforeUnload);
|
fileField.addEventListener('change', registerBeforeUnload);
|
||||||
fetchButton.addEventListener('click', registerBeforeUnload);
|
fetchButton.addEventListener('click', registerBeforeUnload);
|
||||||
form.addEventListener('submit', unregisterBeforeUnload);
|
form.addEventListener('submit', submitHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
export { setupImageUpload };
|
export { setupImageUpload };
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { delegate, fire, leftClick, on, PhilomenaAvailableEventsMap } from '../events';
|
import { delegate, fire, mouseMoveThenOver, leftClick, on, PhilomenaAvailableEventsMap } from '../events';
|
||||||
import { getRandomArrayItem } from '../../../test/randomness';
|
import { getRandomArrayItem } from '../../../test/randomness';
|
||||||
import { fireEvent } from '@testing-library/dom';
|
import { fireEvent } from '@testing-library/dom';
|
||||||
|
|
||||||
|
@ -80,6 +80,55 @@ describe('Event utils', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('mouseMoveThenOver', () => {
|
||||||
|
it('should NOT fire on first mouseover', () => {
|
||||||
|
const mockButton = document.createElement('button');
|
||||||
|
const mockHandler = vi.fn();
|
||||||
|
|
||||||
|
mouseMoveThenOver(mockButton, mockHandler);
|
||||||
|
|
||||||
|
fireEvent.mouseOver(mockButton);
|
||||||
|
|
||||||
|
expect(mockHandler).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fire on the first mousemove', () => {
|
||||||
|
const mockButton = document.createElement('button');
|
||||||
|
const mockHandler = vi.fn();
|
||||||
|
|
||||||
|
mouseMoveThenOver(mockButton, mockHandler);
|
||||||
|
|
||||||
|
fireEvent.mouseMove(mockButton);
|
||||||
|
|
||||||
|
expect(mockHandler).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fire on subsequent mouseover', () => {
|
||||||
|
const mockButton = document.createElement('button');
|
||||||
|
const mockHandler = vi.fn();
|
||||||
|
|
||||||
|
mouseMoveThenOver(mockButton, mockHandler);
|
||||||
|
|
||||||
|
fireEvent.mouseMove(mockButton);
|
||||||
|
fireEvent.mouseOver(mockButton);
|
||||||
|
|
||||||
|
expect(mockHandler).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should NOT fire on subsequent mousemove', () => {
|
||||||
|
const mockButton = document.createElement('button');
|
||||||
|
const mockHandler = vi.fn();
|
||||||
|
|
||||||
|
mouseMoveThenOver(mockButton, mockHandler);
|
||||||
|
|
||||||
|
fireEvent.mouseMove(mockButton);
|
||||||
|
fireEvent.mouseOver(mockButton);
|
||||||
|
fireEvent.mouseMove(mockButton);
|
||||||
|
|
||||||
|
expect(mockHandler).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('delegate', () => {
|
describe('delegate', () => {
|
||||||
it('should call the native addEventListener method on the element', () => {
|
it('should call the native addEventListener method on the element', () => {
|
||||||
const mockElement = document.createElement('div');
|
const mockElement = document.createElement('div');
|
||||||
|
|
|
@ -58,42 +58,44 @@ describe('Local Autocompleter', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return suggestions for exact tag name match', () => {
|
it('should return suggestions for exact tag name match', () => {
|
||||||
const result = localAc.topK('safe', defaultK);
|
const result = localAc.matchPrefix('safe').topK(defaultK);
|
||||||
expect(result).toEqual([expect.objectContaining({ name: 'safe', imageCount: 6 })]);
|
expect(result).toEqual([expect.objectContaining({ aliasName: 'safe', name: 'safe', imageCount: 6 })]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return suggestion for original tag when passed an alias', () => {
|
it('should return suggestion for original tag when passed an alias', () => {
|
||||||
const result = localAc.topK('flowers', defaultK);
|
const result = localAc.matchPrefix('flowers').topK(defaultK);
|
||||||
expect(result).toEqual([expect.objectContaining({ name: 'flower', imageCount: 1 })]);
|
expect(result).toEqual([expect.objectContaining({ aliasName: 'flowers', name: 'flower', imageCount: 1 })]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return suggestions sorted by image count', () => {
|
it('should return suggestions sorted by image count', () => {
|
||||||
const result = localAc.topK(termStem, defaultK);
|
const result = localAc.matchPrefix(termStem).topK(defaultK);
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
expect.objectContaining({ name: 'forest', imageCount: 3 }),
|
expect.objectContaining({ aliasName: 'forest', name: 'forest', imageCount: 3 }),
|
||||||
expect.objectContaining({ name: 'fog', imageCount: 1 }),
|
expect.objectContaining({ aliasName: 'fog', name: 'fog', imageCount: 1 }),
|
||||||
expect.objectContaining({ name: 'force field', imageCount: 1 }),
|
expect.objectContaining({ aliasName: 'force field', name: 'force field', imageCount: 1 }),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return namespaced suggestions without including namespace', () => {
|
it('should return namespaced suggestions without including namespace', () => {
|
||||||
const result = localAc.topK('test', defaultK);
|
const result = localAc.matchPrefix('test').topK(defaultK);
|
||||||
expect(result).toEqual([expect.objectContaining({ name: 'artist:test', imageCount: 1 })]);
|
expect(result).toEqual([
|
||||||
|
expect.objectContaining({ aliasName: 'artist:test', name: 'artist:test', imageCount: 1 }),
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return only the required number of suggestions', () => {
|
it('should return only the required number of suggestions', () => {
|
||||||
const result = localAc.topK(termStem, 1);
|
const result = localAc.matchPrefix(termStem).topK(1);
|
||||||
expect(result).toEqual([expect.objectContaining({ name: 'forest', imageCount: 3 })]);
|
expect(result).toEqual([expect.objectContaining({ aliasName: 'forest', name: 'forest', imageCount: 3 })]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should NOT return suggestions associated with hidden tags', () => {
|
it('should NOT return suggestions associated with hidden tags', () => {
|
||||||
window.booru.hiddenTagList = [1];
|
window.booru.hiddenTagList = [1];
|
||||||
const result = localAc.topK(termStem, defaultK);
|
const result = localAc.matchPrefix(termStem).topK(defaultK);
|
||||||
expect(result).toEqual([]);
|
expect(result).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return empty array for empty prefix', () => {
|
it('should return empty array for empty prefix', () => {
|
||||||
const result = localAc.topK('', defaultK);
|
const result = localAc.matchPrefix('').topK(defaultK);
|
||||||
expect(result).toEqual([]);
|
expect(result).toEqual([]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
334
assets/js/utils/__tests__/suggestions.spec.ts
Normal file
334
assets/js/utils/__tests__/suggestions.spec.ts
Normal file
|
@ -0,0 +1,334 @@
|
||||||
|
import { fetchMock } from '../../../test/fetch-mock.ts';
|
||||||
|
import {
|
||||||
|
fetchLocalAutocomplete,
|
||||||
|
fetchSuggestions,
|
||||||
|
purgeSuggestionsCache,
|
||||||
|
SuggestionsPopup,
|
||||||
|
TermSuggestion,
|
||||||
|
} from '../suggestions.ts';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { LocalAutocompleter } from '../local-autocompleter.ts';
|
||||||
|
import { afterEach } from 'vitest';
|
||||||
|
import { fireEvent } from '@testing-library/dom';
|
||||||
|
|
||||||
|
const mockedSuggestionsEndpoint = '/endpoint?term=';
|
||||||
|
const mockedSuggestionsResponse = [
|
||||||
|
{ label: 'artist:assasinmonkey (1)', value: 'artist:assasinmonkey' },
|
||||||
|
{ label: 'artist:hydrusbeta (1)', value: 'artist:hydrusbeta' },
|
||||||
|
{ label: 'artist:the sexy assistant (1)', value: 'artist:the sexy assistant' },
|
||||||
|
{ label: 'artist:devinian (1)', value: 'artist:devinian' },
|
||||||
|
{ label: 'artist:moe (1)', value: 'artist:moe' },
|
||||||
|
];
|
||||||
|
|
||||||
|
function mockBaseSuggestionsPopup(includeMockedSuggestions: boolean = false): [SuggestionsPopup, HTMLInputElement] {
|
||||||
|
const input = document.createElement('input');
|
||||||
|
const popup = new SuggestionsPopup();
|
||||||
|
|
||||||
|
document.body.append(input);
|
||||||
|
popup.showForField(input);
|
||||||
|
|
||||||
|
if (includeMockedSuggestions) {
|
||||||
|
popup.renderSuggestions(mockedSuggestionsResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
return [popup, input];
|
||||||
|
}
|
||||||
|
|
||||||
|
const selectedItemClassName = 'autocomplete__item--selected';
|
||||||
|
|
||||||
|
describe('Suggestions', () => {
|
||||||
|
let mockedAutocompleteBuffer: ArrayBuffer;
|
||||||
|
let popup: SuggestionsPopup | undefined;
|
||||||
|
let input: HTMLInputElement | undefined;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
fetchMock.enableMocks();
|
||||||
|
|
||||||
|
mockedAutocompleteBuffer = await fs.promises
|
||||||
|
.readFile(path.join(__dirname, 'autocomplete-compiled-v2.bin'))
|
||||||
|
.then(fileBuffer => fileBuffer.buffer);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
fetchMock.disableMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
purgeSuggestionsCache();
|
||||||
|
fetchMock.resetMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
if (input) {
|
||||||
|
input.remove();
|
||||||
|
input = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (popup) {
|
||||||
|
popup.hide();
|
||||||
|
popup = undefined;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('SuggestionsPopup', () => {
|
||||||
|
it('should create the popup container', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup();
|
||||||
|
|
||||||
|
expect(document.querySelector('.autocomplete')).toBeInstanceOf(HTMLElement);
|
||||||
|
expect(popup.isActive).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be removed when hidden', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup();
|
||||||
|
|
||||||
|
popup.hide();
|
||||||
|
|
||||||
|
expect(document.querySelector('.autocomplete')).not.toBeInstanceOf(HTMLElement);
|
||||||
|
expect(popup.isActive).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should render suggestions', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
expect(document.querySelectorAll('.autocomplete__item').length).toBe(mockedSuggestionsResponse.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should initially select first element when selectNext called', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
popup.selectNext();
|
||||||
|
|
||||||
|
expect(document.querySelector('.autocomplete__item:first-child')).toHaveClass(selectedItemClassName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should initially select last element when selectPrevious called', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
popup.selectPrevious();
|
||||||
|
|
||||||
|
expect(document.querySelector('.autocomplete__item:last-child')).toHaveClass(selectedItemClassName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should select and de-select items when hovering items over', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
const firstItem = document.querySelector('.autocomplete__item:first-child');
|
||||||
|
const lastItem = document.querySelector('.autocomplete__item:last-child');
|
||||||
|
|
||||||
|
if (firstItem) {
|
||||||
|
fireEvent.mouseOver(firstItem);
|
||||||
|
fireEvent.mouseMove(firstItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(firstItem).toHaveClass(selectedItemClassName);
|
||||||
|
|
||||||
|
if (lastItem) {
|
||||||
|
fireEvent.mouseOver(lastItem);
|
||||||
|
fireEvent.mouseMove(lastItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(firstItem).not.toHaveClass(selectedItemClassName);
|
||||||
|
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||||
|
|
||||||
|
if (lastItem) {
|
||||||
|
fireEvent.mouseOut(lastItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(lastItem).not.toHaveClass(selectedItemClassName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow switching between mouse and selection', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
const secondItem = document.querySelector('.autocomplete__item:nth-child(2)');
|
||||||
|
const thirdItem = document.querySelector('.autocomplete__item:nth-child(3)');
|
||||||
|
|
||||||
|
if (secondItem) {
|
||||||
|
fireEvent.mouseOver(secondItem);
|
||||||
|
fireEvent.mouseMove(secondItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(secondItem).toHaveClass(selectedItemClassName);
|
||||||
|
|
||||||
|
popup.selectNext();
|
||||||
|
|
||||||
|
expect(secondItem).not.toHaveClass(selectedItemClassName);
|
||||||
|
expect(thirdItem).toHaveClass(selectedItemClassName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should loop around when selecting next on last and previous on first', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
const firstItem = document.querySelector('.autocomplete__item:first-child');
|
||||||
|
const lastItem = document.querySelector('.autocomplete__item:last-child');
|
||||||
|
|
||||||
|
if (lastItem) {
|
||||||
|
fireEvent.mouseOver(lastItem);
|
||||||
|
fireEvent.mouseMove(lastItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||||
|
|
||||||
|
popup.selectNext();
|
||||||
|
|
||||||
|
expect(document.querySelector(`.${selectedItemClassName}`)).toBeNull();
|
||||||
|
|
||||||
|
popup.selectNext();
|
||||||
|
|
||||||
|
expect(firstItem).toHaveClass(selectedItemClassName);
|
||||||
|
|
||||||
|
popup.selectPrevious();
|
||||||
|
|
||||||
|
expect(document.querySelector(`.${selectedItemClassName}`)).toBeNull();
|
||||||
|
|
||||||
|
popup.selectPrevious();
|
||||||
|
|
||||||
|
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return selected item value', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
expect(popup.selectedTerm).toBe(null);
|
||||||
|
|
||||||
|
popup.selectNext();
|
||||||
|
|
||||||
|
expect(popup.selectedTerm).toBe(mockedSuggestionsResponse[0].value);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should emit an event when item was clicked with mouse', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||||
|
|
||||||
|
let clickEvent: CustomEvent<TermSuggestion> | undefined;
|
||||||
|
|
||||||
|
const itemSelectedHandler = vi.fn((event: CustomEvent<TermSuggestion>) => {
|
||||||
|
clickEvent = event;
|
||||||
|
});
|
||||||
|
|
||||||
|
popup.onItemSelected(itemSelectedHandler);
|
||||||
|
|
||||||
|
const firstItem = document.querySelector('.autocomplete__item');
|
||||||
|
|
||||||
|
if (firstItem) {
|
||||||
|
fireEvent.click(firstItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(itemSelectedHandler).toBeCalledTimes(1);
|
||||||
|
expect(clickEvent?.detail).toEqual(mockedSuggestionsResponse[0]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not emit selection on items without value', () => {
|
||||||
|
[popup, input] = mockBaseSuggestionsPopup();
|
||||||
|
|
||||||
|
popup.renderSuggestions([{ label: 'Option without value', value: '' }]);
|
||||||
|
|
||||||
|
const itemSelectionHandler = vi.fn();
|
||||||
|
|
||||||
|
popup.onItemSelected(itemSelectionHandler);
|
||||||
|
|
||||||
|
const firstItem = document.querySelector('.autocomplete__item:first-child')!;
|
||||||
|
|
||||||
|
if (firstItem) {
|
||||||
|
fireEvent.click(firstItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(itemSelectionHandler).not.toBeCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('fetchSuggestions', () => {
|
||||||
|
it('should only call fetch once per single term', () => {
|
||||||
|
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be case-insensitive to terms and trim spaces', () => {
|
||||||
|
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
fetchSuggestions(mockedSuggestionsEndpoint, 'Art');
|
||||||
|
fetchSuggestions(mockedSuggestionsEndpoint, ' ART ');
|
||||||
|
|
||||||
|
expect(fetch).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the same suggestions from cache', async () => {
|
||||||
|
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||||
|
|
||||||
|
const firstSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
const secondSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
|
||||||
|
expect(firstSuggestions).toBe(secondSuggestions);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse and return array of suggestions', async () => {
|
||||||
|
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||||
|
|
||||||
|
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
|
||||||
|
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||||
|
expect(resolvedSuggestions.length).toBe(mockedSuggestionsResponse.length);
|
||||||
|
expect(resolvedSuggestions).toEqual(mockedSuggestionsResponse);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array on server error', async () => {
|
||||||
|
fetchMock.mockResolvedValueOnce(new Response('', { status: 500 }));
|
||||||
|
|
||||||
|
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'unknown tag');
|
||||||
|
|
||||||
|
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||||
|
expect(resolvedSuggestions.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array on invalid response format', async () => {
|
||||||
|
fetchMock.mockResolvedValueOnce(new Response('invalid non-JSON response', { status: 200 }));
|
||||||
|
|
||||||
|
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'invalid response');
|
||||||
|
|
||||||
|
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||||
|
expect(resolvedSuggestions.length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('purgeSuggestionsCache', () => {
|
||||||
|
it('should clear cached responses', async () => {
|
||||||
|
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||||
|
|
||||||
|
const firstResult = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
purgeSuggestionsCache();
|
||||||
|
const resultAfterPurge = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||||
|
|
||||||
|
expect(fetch).toBeCalledTimes(2);
|
||||||
|
expect(firstResult).not.toBe(resultAfterPurge);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('fetchLocalAutocomplete', () => {
|
||||||
|
it('should request binary with date-related cache key', () => {
|
||||||
|
fetchMock.mockResolvedValue(new Response(mockedAutocompleteBuffer, { status: 200 }));
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
||||||
|
const expectedEndpoint = `/autocomplete/compiled?vsn=2&key=${cacheKey}`;
|
||||||
|
|
||||||
|
fetchLocalAutocomplete();
|
||||||
|
|
||||||
|
expect(fetch).toBeCalledWith(expectedEndpoint, { credentials: 'omit', cache: 'force-cache' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return auto-completer instance', async () => {
|
||||||
|
fetchMock.mockResolvedValue(new Response(mockedAutocompleteBuffer, { status: 200 }));
|
||||||
|
|
||||||
|
const autocomplete = await fetchLocalAutocomplete();
|
||||||
|
|
||||||
|
expect(autocomplete).toBeInstanceOf(LocalAutocompleter);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw generic server error on failing response', async () => {
|
||||||
|
fetchMock.mockResolvedValue(new Response('error', { status: 500 }));
|
||||||
|
|
||||||
|
expect(() => fetchLocalAutocomplete()).rejects.toThrowError('Received error from server');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
70
assets/js/utils/__tests__/unique-heap.spec.ts
Normal file
70
assets/js/utils/__tests__/unique-heap.spec.ts
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
import { UniqueHeap } from '../unique-heap';
|
||||||
|
|
||||||
|
describe('Unique Heap', () => {
|
||||||
|
interface Result {
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function compare(a: Result, b: Result): boolean {
|
||||||
|
return a.name < b.name;
|
||||||
|
}
|
||||||
|
|
||||||
|
test('it should return no results when empty', () => {
|
||||||
|
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||||
|
expect(heap.topK(5)).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("doesn't insert duplicate results", () => {
|
||||||
|
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||||
|
|
||||||
|
heap.append({ name: 'name' });
|
||||||
|
heap.append({ name: 'name' });
|
||||||
|
|
||||||
|
expect(heap.topK(2)).toEqual([expect.objectContaining({ name: 'name' })]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('it should return results in reverse sorted order', () => {
|
||||||
|
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||||
|
|
||||||
|
const names = [
|
||||||
|
'alpha',
|
||||||
|
'beta',
|
||||||
|
'gamma',
|
||||||
|
'delta',
|
||||||
|
'epsilon',
|
||||||
|
'zeta',
|
||||||
|
'eta',
|
||||||
|
'theta',
|
||||||
|
'iota',
|
||||||
|
'kappa',
|
||||||
|
'lambda',
|
||||||
|
'mu',
|
||||||
|
'nu',
|
||||||
|
'xi',
|
||||||
|
'omicron',
|
||||||
|
'pi',
|
||||||
|
'rho',
|
||||||
|
'sigma',
|
||||||
|
'tau',
|
||||||
|
'upsilon',
|
||||||
|
'phi',
|
||||||
|
'chi',
|
||||||
|
'psi',
|
||||||
|
'omega',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const name of names) {
|
||||||
|
heap.append({ name });
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = heap.topK(5);
|
||||||
|
|
||||||
|
expect(results).toEqual([
|
||||||
|
expect.objectContaining({ name: 'zeta' }),
|
||||||
|
expect.objectContaining({ name: 'xi' }),
|
||||||
|
expect.objectContaining({ name: 'upsilon' }),
|
||||||
|
expect.objectContaining({ name: 'theta' }),
|
||||||
|
expect.objectContaining({ name: 'tau' }),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
|
@ -43,6 +43,17 @@ export function leftClick<E extends MouseEvent, Target extends EventTarget>(func
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function mouseMoveThenOver<El extends HTMLElement>(element: El, func: (e: MouseEvent) => void) {
|
||||||
|
element.addEventListener(
|
||||||
|
'mousemove',
|
||||||
|
(event: MouseEvent) => {
|
||||||
|
func(event);
|
||||||
|
element.addEventListener('mouseover', func);
|
||||||
|
},
|
||||||
|
{ once: true },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function delegate<K extends keyof PhilomenaAvailableEventsMap, Target extends Element>(
|
export function delegate<K extends keyof PhilomenaAvailableEventsMap, Target extends Element>(
|
||||||
node: PhilomenaEventElement,
|
node: PhilomenaEventElement,
|
||||||
event: K,
|
event: K,
|
||||||
|
|
|
@ -1,12 +1,21 @@
|
||||||
// Client-side tag completion.
|
// Client-side tag completion.
|
||||||
|
import { UniqueHeap } from './unique-heap';
|
||||||
import store from './store';
|
import store from './store';
|
||||||
|
|
||||||
interface Result {
|
export interface Result {
|
||||||
|
aliasName: string;
|
||||||
name: string;
|
name: string;
|
||||||
imageCount: number;
|
imageCount: number;
|
||||||
associations: number[];
|
associations: number[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether Result a is considered less than Result b.
|
||||||
|
*/
|
||||||
|
function compareResult(a: Result, b: Result): boolean {
|
||||||
|
return a.imageCount === b.imageCount ? a.name > b.name : a.imageCount < b.imageCount;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compare two strings, C-style.
|
* Compare two strings, C-style.
|
||||||
*/
|
*/
|
||||||
|
@ -18,10 +27,13 @@ function strcmp(a: string, b: string): number {
|
||||||
* Returns the name of a tag without any namespace component.
|
* Returns the name of a tag without any namespace component.
|
||||||
*/
|
*/
|
||||||
function nameInNamespace(s: string): string {
|
function nameInNamespace(s: string): string {
|
||||||
const v = s.split(':', 2);
|
const first = s.indexOf(':');
|
||||||
|
|
||||||
if (v.length === 2) return v[1];
|
if (first !== -1) {
|
||||||
return v[0];
|
return s.slice(first + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -59,7 +71,7 @@ export class LocalAutocompleter {
|
||||||
/**
|
/**
|
||||||
* Get a tag's name and its associations given a byte location inside the file.
|
* Get a tag's name and its associations given a byte location inside the file.
|
||||||
*/
|
*/
|
||||||
getTagFromLocation(location: number): [string, number[]] {
|
private getTagFromLocation(location: number, imageCount: number, aliasName?: string): Result {
|
||||||
const nameLength = this.view.getUint8(location);
|
const nameLength = this.view.getUint8(location);
|
||||||
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
||||||
|
|
||||||
|
@ -70,29 +82,29 @@ export class LocalAutocompleter {
|
||||||
associations.push(this.view.getUint32(location + 1 + nameLength + 1 + i * 4, true));
|
associations.push(this.view.getUint32(location + 1 + nameLength + 1 + i * 4, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
return [name, associations];
|
return { aliasName: aliasName || name, name, imageCount, associations };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a Result object as the ith tag inside the file.
|
* Get a Result object as the ith tag inside the file.
|
||||||
*/
|
*/
|
||||||
getResultAt(i: number): [string, Result] {
|
private getResultAt(i: number, aliasName?: string): Result {
|
||||||
const nameLocation = this.view.getUint32(this.referenceStart + i * 8, true);
|
const tagLocation = this.view.getUint32(this.referenceStart + i * 8, true);
|
||||||
const imageCount = this.view.getInt32(this.referenceStart + i * 8 + 4, true);
|
const imageCount = this.view.getInt32(this.referenceStart + i * 8 + 4, true);
|
||||||
const [name, associations] = this.getTagFromLocation(nameLocation);
|
const result = this.getTagFromLocation(tagLocation, imageCount, aliasName);
|
||||||
|
|
||||||
if (imageCount < 0) {
|
if (imageCount < 0) {
|
||||||
// This is actually an alias, so follow it
|
// This is actually an alias, so follow it
|
||||||
return [name, this.getResultAt(-imageCount - 1)[1]];
|
return this.getResultAt(-imageCount - 1, aliasName || result.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
return [name, { name, imageCount, associations }];
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a Result object as the ith tag inside the file, secondary ordering.
|
* Get a Result object as the ith tag inside the file, secondary ordering.
|
||||||
*/
|
*/
|
||||||
getSecondaryResultAt(i: number): [string, Result] {
|
private getSecondaryResultAt(i: number): Result {
|
||||||
const referenceIndex = this.view.getUint32(this.secondaryStart + i * 4, true);
|
const referenceIndex = this.view.getUint32(this.secondaryStart + i * 4, true);
|
||||||
return this.getResultAt(referenceIndex);
|
return this.getResultAt(referenceIndex);
|
||||||
}
|
}
|
||||||
|
@ -100,23 +112,22 @@ export class LocalAutocompleter {
|
||||||
/**
|
/**
|
||||||
* Perform a binary search to fetch all results matching a condition.
|
* Perform a binary search to fetch all results matching a condition.
|
||||||
*/
|
*/
|
||||||
scanResults(
|
private scanResults(
|
||||||
getResult: (i: number) => [string, Result],
|
getResult: (i: number) => Result,
|
||||||
compare: (name: string) => number,
|
compare: (name: string) => number,
|
||||||
results: Record<string, Result>,
|
results: UniqueHeap<Result>,
|
||||||
|
hiddenTags: Set<number>,
|
||||||
) {
|
) {
|
||||||
const unfilter = store.get('unfilter_tag_suggestions');
|
const filter = !store.get('unfilter_tag_suggestions');
|
||||||
|
|
||||||
let min = 0;
|
let min = 0;
|
||||||
let max = this.numTags;
|
let max = this.numTags;
|
||||||
|
|
||||||
const hiddenTags = window.booru.hiddenTagList;
|
|
||||||
|
|
||||||
while (min < max - 1) {
|
while (min < max - 1) {
|
||||||
const med = (min + (max - min) / 2) | 0;
|
const med = min + (((max - min) / 2) | 0);
|
||||||
const sortKey = getResult(med)[0];
|
const result = getResult(med);
|
||||||
|
|
||||||
if (compare(sortKey) >= 0) {
|
if (compare(result.aliasName) >= 0) {
|
||||||
// too large, go left
|
// too large, go left
|
||||||
max = med;
|
max = med;
|
||||||
} else {
|
} else {
|
||||||
|
@ -126,40 +137,47 @@ export class LocalAutocompleter {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scan forward until no more matches occur
|
// Scan forward until no more matches occur
|
||||||
while (min < this.numTags - 1) {
|
outer: while (min < this.numTags - 1) {
|
||||||
const [sortKey, result] = getResult(++min);
|
const result = getResult(++min);
|
||||||
if (compare(sortKey) !== 0) {
|
|
||||||
|
if (compare(result.aliasName) !== 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add if not filtering or no associations are filtered
|
// Check if any associations are filtered
|
||||||
if (unfilter || hiddenTags.findIndex(ht => result.associations.includes(ht)) === -1) {
|
if (filter) {
|
||||||
results[result.name] = result;
|
for (const association of result.associations) {
|
||||||
|
if (hiddenTags.has(association)) {
|
||||||
|
continue outer;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Nothing was filtered, so add
|
||||||
|
results.append(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the top k results by image count which match the given string prefix.
|
* Find the top k results by image count which match the given string prefix.
|
||||||
*/
|
*/
|
||||||
topK(prefix: string, k: number): Result[] {
|
matchPrefix(prefix: string): UniqueHeap<Result> {
|
||||||
const results: Record<string, Result> = {};
|
const results = new UniqueHeap<Result>(compareResult, 'name');
|
||||||
|
|
||||||
if (prefix === '') {
|
if (prefix === '') {
|
||||||
return [];
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const hiddenTags = new Set(window.booru.hiddenTagList);
|
||||||
|
|
||||||
// Find normally, in full name-sorted order
|
// Find normally, in full name-sorted order
|
||||||
const prefixMatch = (name: string) => strcmp(name.slice(0, prefix.length), prefix);
|
const prefixMatch = (name: string) => strcmp(name.slice(0, prefix.length), prefix);
|
||||||
this.scanResults(this.getResultAt.bind(this), prefixMatch, results);
|
this.scanResults(this.getResultAt.bind(this), prefixMatch, results, hiddenTags);
|
||||||
|
|
||||||
// Find in secondary order
|
// Find in secondary order
|
||||||
const namespaceMatch = (name: string) => strcmp(nameInNamespace(name).slice(0, prefix.length), prefix);
|
const namespaceMatch = (name: string) => strcmp(nameInNamespace(name).slice(0, prefix.length), prefix);
|
||||||
this.scanResults(this.getSecondaryResultAt.bind(this), namespaceMatch, results);
|
this.scanResults(this.getSecondaryResultAt.bind(this), namespaceMatch, results, hiddenTags);
|
||||||
|
|
||||||
// Sort results by image count
|
return results;
|
||||||
const sorted = Object.values(results).sort((a, b) => b.imageCount - a.imageCount);
|
|
||||||
|
|
||||||
return sorted.slice(0, k);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
177
assets/js/utils/suggestions.ts
Normal file
177
assets/js/utils/suggestions.ts
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
import { makeEl } from './dom.ts';
|
||||||
|
import { mouseMoveThenOver } from './events.ts';
|
||||||
|
import { handleError } from './requests.ts';
|
||||||
|
import { LocalAutocompleter } from './local-autocompleter.ts';
|
||||||
|
|
||||||
|
export interface TermSuggestion {
|
||||||
|
label: string;
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const selectedSuggestionClassName = 'autocomplete__item--selected';
|
||||||
|
|
||||||
|
export class SuggestionsPopup {
|
||||||
|
private readonly container: HTMLElement;
|
||||||
|
private readonly listElement: HTMLUListElement;
|
||||||
|
private selectedElement: HTMLElement | null = null;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.container = makeEl('div', {
|
||||||
|
className: 'autocomplete',
|
||||||
|
});
|
||||||
|
|
||||||
|
this.listElement = makeEl('ul', {
|
||||||
|
className: 'autocomplete__list',
|
||||||
|
});
|
||||||
|
|
||||||
|
this.container.appendChild(this.listElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
get selectedTerm(): string | null {
|
||||||
|
return this.selectedElement?.dataset.value || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
get isActive(): boolean {
|
||||||
|
return this.container.isConnected;
|
||||||
|
}
|
||||||
|
|
||||||
|
hide() {
|
||||||
|
this.clearSelection();
|
||||||
|
this.container.remove();
|
||||||
|
}
|
||||||
|
|
||||||
|
private clearSelection() {
|
||||||
|
if (!this.selectedElement) return;
|
||||||
|
|
||||||
|
this.selectedElement.classList.remove(selectedSuggestionClassName);
|
||||||
|
this.selectedElement = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private updateSelection(targetItem: HTMLElement) {
|
||||||
|
this.clearSelection();
|
||||||
|
|
||||||
|
this.selectedElement = targetItem;
|
||||||
|
this.selectedElement.classList.add(selectedSuggestionClassName);
|
||||||
|
}
|
||||||
|
|
||||||
|
renderSuggestions(suggestions: TermSuggestion[]): SuggestionsPopup {
|
||||||
|
this.clearSelection();
|
||||||
|
|
||||||
|
this.listElement.innerHTML = '';
|
||||||
|
|
||||||
|
for (const suggestedTerm of suggestions) {
|
||||||
|
const listItem = makeEl('li', {
|
||||||
|
className: 'autocomplete__item',
|
||||||
|
innerText: suggestedTerm.label,
|
||||||
|
});
|
||||||
|
|
||||||
|
listItem.dataset.value = suggestedTerm.value;
|
||||||
|
|
||||||
|
this.watchItem(listItem, suggestedTerm);
|
||||||
|
this.listElement.appendChild(listItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
private watchItem(listItem: HTMLElement, suggestion: TermSuggestion) {
|
||||||
|
mouseMoveThenOver(listItem, () => this.updateSelection(listItem));
|
||||||
|
|
||||||
|
listItem.addEventListener('mouseout', () => this.clearSelection());
|
||||||
|
|
||||||
|
listItem.addEventListener('click', () => {
|
||||||
|
if (!listItem.dataset.value) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.container.dispatchEvent(new CustomEvent('item_selected', { detail: suggestion }));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private changeSelection(direction: number) {
|
||||||
|
let nextTargetElement: Element | null;
|
||||||
|
|
||||||
|
if (!this.selectedElement) {
|
||||||
|
nextTargetElement = direction > 0 ? this.listElement.firstElementChild : this.listElement.lastElementChild;
|
||||||
|
} else {
|
||||||
|
nextTargetElement =
|
||||||
|
direction > 0 ? this.selectedElement.nextElementSibling : this.selectedElement.previousElementSibling;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(nextTargetElement instanceof HTMLElement)) {
|
||||||
|
this.clearSelection();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.updateSelection(nextTargetElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
selectNext() {
|
||||||
|
this.changeSelection(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
selectPrevious() {
|
||||||
|
this.changeSelection(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
showForField(targetElement: HTMLElement) {
|
||||||
|
this.container.style.position = 'absolute';
|
||||||
|
this.container.style.left = `${targetElement.offsetLeft}px`;
|
||||||
|
|
||||||
|
let topPosition = targetElement.offsetTop + targetElement.offsetHeight;
|
||||||
|
|
||||||
|
if (targetElement.parentElement) {
|
||||||
|
topPosition -= targetElement.parentElement.scrollTop;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.container.style.top = `${topPosition}px`;
|
||||||
|
|
||||||
|
document.body.appendChild(this.container);
|
||||||
|
}
|
||||||
|
|
||||||
|
onItemSelected(callback: (event: CustomEvent<TermSuggestion>) => void) {
|
||||||
|
this.container.addEventListener('item_selected', callback as EventListener);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const cachedSuggestions = new Map<string, Promise<TermSuggestion[]>>();
|
||||||
|
|
||||||
|
export async function fetchSuggestions(endpoint: string, targetTerm: string): Promise<TermSuggestion[]> {
|
||||||
|
const normalizedTerm = targetTerm.trim().toLowerCase();
|
||||||
|
|
||||||
|
if (cachedSuggestions.has(normalizedTerm)) {
|
||||||
|
return cachedSuggestions.get(normalizedTerm)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
const promisedSuggestions: Promise<TermSuggestion[]> = fetch(`${endpoint}${targetTerm}`)
|
||||||
|
.then(handleError)
|
||||||
|
.then(response => response.json())
|
||||||
|
.catch(() => {
|
||||||
|
// Deleting the promised result from cache to allow retrying
|
||||||
|
cachedSuggestions.delete(normalizedTerm);
|
||||||
|
|
||||||
|
// And resolve failed promise with empty array
|
||||||
|
return [];
|
||||||
|
});
|
||||||
|
|
||||||
|
cachedSuggestions.set(normalizedTerm, promisedSuggestions);
|
||||||
|
|
||||||
|
return promisedSuggestions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function purgeSuggestionsCache() {
|
||||||
|
cachedSuggestions.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchLocalAutocomplete(): Promise<LocalAutocompleter> {
|
||||||
|
const now = new Date();
|
||||||
|
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
||||||
|
|
||||||
|
return await fetch(`/autocomplete/compiled?vsn=2&key=${cacheKey}`, {
|
||||||
|
credentials: 'omit',
|
||||||
|
cache: 'force-cache',
|
||||||
|
})
|
||||||
|
.then(handleError)
|
||||||
|
.then(resp => resp.arrayBuffer())
|
||||||
|
.then(buf => new LocalAutocompleter(buf));
|
||||||
|
}
|
|
@ -57,10 +57,10 @@ export function imageHitsComplex(img: HTMLElement, matchComplex: AstMatcher) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function displayTags(tags: TagData[]): string {
|
export function displayTags(tags: TagData[]): string {
|
||||||
const mainTag = tags[0],
|
const mainTag = tags[0];
|
||||||
otherTags = tags.slice(1);
|
const otherTags = tags.slice(1);
|
||||||
let list = escapeHtml(mainTag.name),
|
let list = escapeHtml(mainTag.name);
|
||||||
extras;
|
let extras;
|
||||||
|
|
||||||
if (otherTags.length > 0) {
|
if (otherTags.length > 0) {
|
||||||
extras = otherTags.map(tag => escapeHtml(tag.name)).join(', ');
|
extras = otherTags.map(tag => escapeHtml(tag.name)).join(', ');
|
||||||
|
|
96
assets/js/utils/unique-heap.ts
Normal file
96
assets/js/utils/unique-heap.ts
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
export type Compare<T> = (a: T, b: T) => boolean;
|
||||||
|
|
||||||
|
export class UniqueHeap<T extends object> {
|
||||||
|
private keys: Set<unknown>;
|
||||||
|
private values: T[];
|
||||||
|
private keyName: keyof T;
|
||||||
|
private compare: Compare<T>;
|
||||||
|
|
||||||
|
constructor(compare: Compare<T>, keyName: keyof T) {
|
||||||
|
this.keys = new Set();
|
||||||
|
this.values = [];
|
||||||
|
this.keyName = keyName;
|
||||||
|
this.compare = compare;
|
||||||
|
}
|
||||||
|
|
||||||
|
append(value: T) {
|
||||||
|
const key = value[this.keyName];
|
||||||
|
|
||||||
|
if (!this.keys.has(key)) {
|
||||||
|
this.keys.add(key);
|
||||||
|
this.values.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
topK(k: number): T[] {
|
||||||
|
// Create the output array.
|
||||||
|
const output: T[] = [];
|
||||||
|
|
||||||
|
for (const result of this.results()) {
|
||||||
|
if (output.length >= k) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
*results(): Generator<T, void, void> {
|
||||||
|
const { values } = this;
|
||||||
|
const length = values.length;
|
||||||
|
|
||||||
|
// Build the heap.
|
||||||
|
for (let i = (length >> 1) - 1; i >= 0; i--) {
|
||||||
|
this.heapify(length, i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Begin extracting values.
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
// Top value is the largest.
|
||||||
|
yield values[0];
|
||||||
|
|
||||||
|
// Swap with the element at the end.
|
||||||
|
const lastIndex = length - i - 1;
|
||||||
|
values[0] = values[lastIndex];
|
||||||
|
|
||||||
|
// Restore top value being the largest.
|
||||||
|
this.heapify(lastIndex, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private heapify(length: number, initialIndex: number) {
|
||||||
|
const { compare, values } = this;
|
||||||
|
let i = initialIndex;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const left = 2 * i + 1;
|
||||||
|
const right = 2 * i + 2;
|
||||||
|
let largest = i;
|
||||||
|
|
||||||
|
if (left < length && compare(values[largest], values[left])) {
|
||||||
|
// Left child is in-bounds and larger than parent. Swap with left.
|
||||||
|
largest = left;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (right < length && compare(values[largest], values[right])) {
|
||||||
|
// Right child is in-bounds and larger than parent or left. Swap with right.
|
||||||
|
largest = right;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (largest === i) {
|
||||||
|
// Largest value was already the parent. Done.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swap.
|
||||||
|
const temp = values[i];
|
||||||
|
values[i] = values[largest];
|
||||||
|
values[largest] = temp;
|
||||||
|
|
||||||
|
// Repair the subtree previously containing the largest element.
|
||||||
|
i = largest;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
1641
assets/package-lock.json
generated
1641
assets/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -20,25 +20,26 @@
|
||||||
"postcss-mixins": "^10.0.1",
|
"postcss-mixins": "^10.0.1",
|
||||||
"postcss-simple-vars": "^7.0.1",
|
"postcss-simple-vars": "^7.0.1",
|
||||||
"typescript": "^5.4",
|
"typescript": "^5.4",
|
||||||
"vite": "^5.2"
|
"vite": "^5.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@testing-library/dom": "^10.1.0",
|
"@testing-library/dom": "^10.1.0",
|
||||||
"@testing-library/jest-dom": "^6.4.6",
|
"@testing-library/jest-dom": "^6.4.6",
|
||||||
"@types/chai-dom": "^1.11.3",
|
"@types/chai-dom": "^1.11.3",
|
||||||
"@vitest/coverage-v8": "^1.6.0",
|
"@vitest/coverage-v8": "^2.1.0",
|
||||||
"chai": "^5",
|
"chai": "^5",
|
||||||
"eslint": "^9.4.0",
|
"eslint": "^9.11.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
|
"eslint-plugin-prettier": "^5.2.1",
|
||||||
"eslint-plugin-vitest": "^0.5.4",
|
"eslint-plugin-vitest": "^0.5.4",
|
||||||
"jest-environment-jsdom": "^29.7.0",
|
"jest-environment-jsdom": "^29.7.0",
|
||||||
"jsdom": "^24.1.0",
|
"jsdom": "^24.1.0",
|
||||||
"prettier": "^3.3.2",
|
"prettier": "^3.3.3",
|
||||||
"stylelint": "^16.6.1",
|
"stylelint": "^16.9.0",
|
||||||
"stylelint-config-standard": "^36.0.0",
|
"stylelint-config-standard": "^36.0.0",
|
||||||
"stylelint-prettier": "^5.0.0",
|
"stylelint-prettier": "^5.0.0",
|
||||||
"typescript-eslint": "8.0.0-alpha.39",
|
"typescript-eslint": "8.8.0",
|
||||||
"vitest": "^1.6.0",
|
"vitest": "^2.1.0",
|
||||||
"vitest-fetch-mock": "^0.2.2"
|
"vitest-fetch-mock": "^0.3.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ export function fixEventListeners(t: EventTarget) {
|
||||||
eventListeners = {};
|
eventListeners = {};
|
||||||
const oldAddEventListener = t.addEventListener;
|
const oldAddEventListener = t.addEventListener;
|
||||||
|
|
||||||
t.addEventListener = (type: string, listener: any, options: any): void => {
|
t.addEventListener = function (type: string, listener: any, options: any): void {
|
||||||
eventListeners[type] = eventListeners[type] || [];
|
eventListeners[type] = eventListeners[type] || [];
|
||||||
eventListeners[type].push(listener);
|
eventListeners[type].push(listener);
|
||||||
return oldAddEventListener(type, listener, options);
|
return oldAddEventListener(type, listener, options);
|
||||||
|
|
|
@ -31,7 +31,6 @@ Object.assign(globalThis, { URL, Blob });
|
||||||
|
|
||||||
// Prevents an error when calling `form.submit()` directly in
|
// Prevents an error when calling `form.submit()` directly in
|
||||||
// the code that is being tested
|
// the code that is being tested
|
||||||
// eslint-disable-next-line prettier/prettier
|
HTMLFormElement.prototype.submit = function () {
|
||||||
HTMLFormElement.prototype.submit = function() {
|
|
||||||
fireEvent.submit(this);
|
fireEvent.submit(this);
|
||||||
};
|
};
|
||||||
|
|
|
@ -14,7 +14,9 @@ export default defineConfig(({ command, mode }: ConfigEnv): UserConfig => {
|
||||||
fs.readdirSync(path.resolve(__dirname, 'css/themes/')).forEach(name => {
|
fs.readdirSync(path.resolve(__dirname, 'css/themes/')).forEach(name => {
|
||||||
const m = name.match(/([-a-z]+).css/);
|
const m = name.match(/([-a-z]+).css/);
|
||||||
|
|
||||||
if (m) targets.set(`css/${m[1]}`, `./css/themes/${m[1]}.css`);
|
if (m) return targets.set(`css/${m[1]}`, `./css/themes/${m[1]}.css`);
|
||||||
|
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
fs.readdirSync(path.resolve(__dirname, 'css/options/')).forEach(name => {
|
fs.readdirSync(path.resolve(__dirname, 'css/options/')).forEach(name => {
|
||||||
|
@ -66,13 +68,13 @@ export default defineConfig(({ command, mode }: ConfigEnv): UserConfig => {
|
||||||
test: {
|
test: {
|
||||||
globals: true,
|
globals: true,
|
||||||
environment: 'jsdom',
|
environment: 'jsdom',
|
||||||
|
exclude: ['node_modules/', '.*\\.test\\.ts$', '.*\\.d\\.ts$', '.*\\.spec\\.ts$'],
|
||||||
// TODO Jest --randomize CLI flag equivalent, consider enabling in the future
|
// TODO Jest --randomize CLI flag equivalent, consider enabling in the future
|
||||||
// sequence: { shuffle: true },
|
// sequence: { shuffle: true },
|
||||||
setupFiles: './test/vitest-setup.ts',
|
setupFiles: './test/vitest-setup.ts',
|
||||||
coverage: {
|
coverage: {
|
||||||
reporter: ['text', 'html'],
|
reporter: ['text', 'html'],
|
||||||
include: ['js/**/*.{js,ts}'],
|
include: ['js/**/*.{js,ts}'],
|
||||||
exclude: ['node_modules/', '.*\\.test\\.ts$', '.*\\.d\\.ts$'],
|
|
||||||
thresholds: {
|
thresholds: {
|
||||||
statements: 0,
|
statements: 0,
|
||||||
branches: 0,
|
branches: 0,
|
||||||
|
|
|
@ -59,7 +59,7 @@ services:
|
||||||
- '5173:5173'
|
- '5173:5173'
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16.3-alpine
|
image: postgres:16.4-alpine
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_PASSWORD=postgres
|
- POSTGRES_PASSWORD=postgres
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -68,7 +68,7 @@ services:
|
||||||
driver: "none"
|
driver: "none"
|
||||||
|
|
||||||
opensearch:
|
opensearch:
|
||||||
image: opensearchproject/opensearch:2.15.0
|
image: opensearchproject/opensearch:2.16.0
|
||||||
volumes:
|
volumes:
|
||||||
- opensearch_data:/usr/share/opensearch/data
|
- opensearch_data:/usr/share/opensearch/data
|
||||||
- ./docker/opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml
|
- ./docker/opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml
|
||||||
|
@ -80,12 +80,12 @@ services:
|
||||||
hard: 65536
|
hard: 65536
|
||||||
|
|
||||||
valkey:
|
valkey:
|
||||||
image: valkey/valkey:7.2.5-alpine
|
image: valkey/valkey:8.0-alpine
|
||||||
logging:
|
logging:
|
||||||
driver: "none"
|
driver: "none"
|
||||||
|
|
||||||
files:
|
files:
|
||||||
image: andrewgaul/s3proxy:sha-4175022
|
image: andrewgaul/s3proxy:sha-4976e17
|
||||||
environment:
|
environment:
|
||||||
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
||||||
volumes:
|
volumes:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM elixir:1.17-alpine
|
FROM elixir:1.17.2-alpine
|
||||||
|
|
||||||
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1 /tmp/ffmpeg_version.json
|
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1 /tmp/ffmpeg_version.json
|
||||||
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
||||||
|
|
|
@ -76,7 +76,7 @@ end
|
||||||
|
|
||||||
local function get_hashed_canonical_request(timestamp, host, uri)
|
local function get_hashed_canonical_request(timestamp, host, uri)
|
||||||
local digest = get_sha256_digest(ngx.var.request_body)
|
local digest = get_sha256_digest(ngx.var.request_body)
|
||||||
local canonical_request = ngx.var.request_method .. '\n'
|
local canonical_request = 'GET' .. '\n'
|
||||||
.. uri .. '\n'
|
.. uri .. '\n'
|
||||||
.. '\n'
|
.. '\n'
|
||||||
.. 'host:' .. host .. '\n'
|
.. 'host:' .. host .. '\n'
|
||||||
|
|
|
@ -34,7 +34,7 @@ init_by_lua_block {
|
||||||
function sign_aws_request()
|
function sign_aws_request()
|
||||||
-- The API token used should not allow writing, but
|
-- The API token used should not allow writing, but
|
||||||
-- sanitize this anyway to stop an upstream error
|
-- sanitize this anyway to stop an upstream error
|
||||||
if ngx.req.get_method() ~= 'GET' then
|
if ngx.req.get_method() ~= 'GET' and ngx.req.get_method() ~= 'HEAD' then
|
||||||
ngx.status = ngx.HTTP_UNAUTHORIZED
|
ngx.status = ngx.HTTP_UNAUTHORIZED
|
||||||
ngx.say('Unauthorized')
|
ngx.say('Unauthorized')
|
||||||
return ngx.exit(ngx.HTTP_UNAUTHORIZED)
|
return ngx.exit(ngx.HTTP_UNAUTHORIZED)
|
||||||
|
|
|
@ -42,6 +42,7 @@ metadata: image_search_json
|
||||||
'processed', processed,
|
'processed', processed,
|
||||||
'score', score,
|
'score', score,
|
||||||
'size', image_size,
|
'size', image_size,
|
||||||
|
'orig_size', image_orig_size,
|
||||||
'sha512_hash', image_sha512_hash,
|
'sha512_hash', image_sha512_hash,
|
||||||
'thumbnails_generated', thumbnails_generated,
|
'thumbnails_generated', thumbnails_generated,
|
||||||
'updated_at', updated_at,
|
'updated_at', updated_at,
|
||||||
|
|
|
@ -21,8 +21,8 @@ metadata: post_search_json
|
||||||
'body', p.body,
|
'body', p.body,
|
||||||
'subject', t.title,
|
'subject', t.title,
|
||||||
'ip', p.ip,
|
'ip', p.ip,
|
||||||
'user_agent', p.user_agent,
|
'user_agent', '',
|
||||||
'referrer', p.referrer,
|
'referrer', '',
|
||||||
'fingerprint', p.fingerprint,
|
'fingerprint', p.fingerprint,
|
||||||
'topic_position', p.topic_position,
|
'topic_position', p.topic_position,
|
||||||
'forum', f.short_name,
|
'forum', f.short_name,
|
||||||
|
|
|
@ -121,7 +121,7 @@ defmodule Philomena.Adverts do
|
||||||
"""
|
"""
|
||||||
def create_advert(attrs \\ %{}) do
|
def create_advert(attrs \\ %{}) do
|
||||||
%Advert{}
|
%Advert{}
|
||||||
|> Advert.save_changeset(attrs)
|
|> Advert.changeset(attrs)
|
||||||
|> Uploader.analyze_upload(attrs)
|
|> Uploader.analyze_upload(attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|> case do
|
|> case do
|
||||||
|
@ -150,7 +150,7 @@ defmodule Philomena.Adverts do
|
||||||
"""
|
"""
|
||||||
def update_advert(%Advert{} = advert, attrs) do
|
def update_advert(%Advert{} = advert, attrs) do
|
||||||
advert
|
advert
|
||||||
|> Advert.save_changeset(attrs)
|
|> Advert.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,6 @@ defmodule Philomena.Adverts.Advert do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
alias Philomena.Schema.Time
|
|
||||||
|
|
||||||
schema "adverts" do
|
schema "adverts" do
|
||||||
field :image, :string
|
field :image, :string
|
||||||
field :link, :string
|
field :link, :string
|
||||||
|
@ -11,8 +9,8 @@ defmodule Philomena.Adverts.Advert do
|
||||||
field :clicks, :integer, default: 0
|
field :clicks, :integer, default: 0
|
||||||
field :impressions, :integer, default: 0
|
field :impressions, :integer, default: 0
|
||||||
field :live, :boolean, default: false
|
field :live, :boolean, default: false
|
||||||
field :start_date, :utc_datetime
|
field :start_date, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :finish_date, :utc_datetime
|
field :finish_date, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :restrictions, :string
|
field :restrictions, :string
|
||||||
field :notes, :string
|
field :notes, :string
|
||||||
|
|
||||||
|
@ -24,29 +22,18 @@ defmodule Philomena.Adverts.Advert do
|
||||||
field :uploaded_image, :string, virtual: true
|
field :uploaded_image, :string, virtual: true
|
||||||
field :removed_image, :string, virtual: true
|
field :removed_image, :string, virtual: true
|
||||||
|
|
||||||
field :start_time, :string, virtual: true
|
|
||||||
field :finish_time, :string, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(advert, attrs) do
|
def changeset(advert, attrs) do
|
||||||
advert
|
advert
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:title, :link, :start_date, :finish_date, :live, :restrictions, :notes])
|
||||||
|> Time.propagate_time(:start_date, :start_time)
|
|
||||||
|> Time.propagate_time(:finish_date, :finish_time)
|
|
||||||
end
|
|
||||||
|
|
||||||
def save_changeset(advert, attrs) do
|
|
||||||
advert
|
|
||||||
|> cast(attrs, [:title, :link, :start_time, :finish_time, :live, :restrictions, :notes])
|
|
||||||
|> Time.assign_time(:start_time, :start_date)
|
|
||||||
|> Time.assign_time(:finish_time, :finish_date)
|
|
||||||
|> validate_required([:title, :link, :start_date, :finish_date])
|
|> validate_required([:title, :link, :start_date, :finish_date])
|
||||||
|> validate_inclusion(:restrictions, ["none", "nsfw", "sfw"])
|
|> validate_inclusion(:restrictions, ["none", "nsfw", "sfw"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def image_changeset(advert, attrs) do
|
def image_changeset(advert, attrs) do
|
||||||
advert
|
advert
|
||||||
|> cast(attrs, [
|
|> cast(attrs, [
|
||||||
|
|
|
@ -4,7 +4,7 @@ defmodule Philomena.Adverts.Recorder do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
def run(%{impressions: impressions, clicks: clicks}) do
|
def run(%{impressions: impressions, clicks: clicks}) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
|
|
||||||
# Create insert statements for Ecto
|
# Create insert statements for Ecto
|
||||||
impressions = Enum.map(impressions, &impressions_insert_all(&1, now))
|
impressions = Enum.map(impressions, &impressions_insert_all(&1, now))
|
||||||
|
|
|
@ -93,7 +93,7 @@ defmodule Philomena.ArtistLinks do
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.update(:artist_link, artist_link_changeset)
|
|> Multi.update(:artist_link, artist_link_changeset)
|
||||||
|> Multi.run(:add_award, fn _repo, _changes -> BadgeAwarder.award_badge(artist_link) end)
|
|> Multi.run(:add_award, BadgeAwarder.award_callback(artist_link, verifying_user))
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{artist_link: artist_link}} ->
|
{:ok, %{artist_link: artist_link}} ->
|
||||||
|
|
|
@ -15,8 +15,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
|
|
||||||
field :aasm_state, :string, default: "unverified"
|
field :aasm_state, :string, default: "unverified"
|
||||||
field :uri, :string
|
field :uri, :string
|
||||||
field :hostname, :string
|
|
||||||
field :path, :string
|
|
||||||
field :verification_code, :string
|
field :verification_code, :string
|
||||||
field :public, :boolean, default: true
|
field :public, :boolean, default: true
|
||||||
field :next_check_at, :utc_datetime
|
field :next_check_at, :utc_datetime
|
||||||
|
@ -37,7 +35,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
|> cast(attrs, [:uri, :public])
|
|> cast(attrs, [:uri, :public])
|
||||||
|> put_change(:tag_id, nil)
|
|> put_change(:tag_id, nil)
|
||||||
|> validate_required([:user, :uri, :public])
|
|> validate_required([:user, :uri, :public])
|
||||||
|> parse_uri()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def edit_changeset(artist_link, attrs, tag) do
|
def edit_changeset(artist_link, attrs, tag) do
|
||||||
|
@ -45,7 +42,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
|> cast(attrs, [:uri, :public])
|
|> cast(attrs, [:uri, :public])
|
||||||
|> put_change(:tag_id, tag.id)
|
|> put_change(:tag_id, tag.id)
|
||||||
|> validate_required([:user, :uri, :public])
|
|> validate_required([:user, :uri, :public])
|
||||||
|> parse_uri()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def creation_changeset(artist_link, attrs, user, tag) do
|
def creation_changeset(artist_link, attrs, user, tag) do
|
||||||
|
@ -57,7 +53,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
|> validate_required([:tag], message: "must exist")
|
|> validate_required([:tag], message: "must exist")
|
||||||
|> validate_format(:uri, ~r|\Ahttps?://|)
|
|> validate_format(:uri, ~r|\Ahttps?://|)
|
||||||
|> validate_category()
|
|> validate_category()
|
||||||
|> parse_uri()
|
|
||||||
|> put_verification_code()
|
|> put_verification_code()
|
||||||
|> put_next_check_at()
|
|> put_next_check_at()
|
||||||
|> unique_constraint([:uri, :tag_id, :user_id],
|
|> unique_constraint([:uri, :tag_id, :user_id],
|
||||||
|
@ -90,22 +85,13 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
end
|
end
|
||||||
|
|
||||||
def contact_changeset(artist_link, user) do
|
def contact_changeset(artist_link, user) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
artist_link
|
||||||
|
|> change()
|
||||||
change(artist_link)
|
|
||||||
|> put_change(:contacted_by_user_id, user.id)
|
|> put_change(:contacted_by_user_id, user.id)
|
||||||
|> put_change(:contacted_at, now)
|
|> put_change(:contacted_at, DateTime.utc_now(:second))
|
||||||
|> put_change(:aasm_state, "contacted")
|
|> put_change(:aasm_state, "contacted")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_uri(changeset) do
|
|
||||||
string_uri = get_field(changeset, :uri) |> to_string()
|
|
||||||
uri = URI.parse(string_uri)
|
|
||||||
|
|
||||||
changeset
|
|
||||||
|> change(hostname: uri.host, path: uri.path)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp put_verification_code(changeset) do
|
defp put_verification_code(changeset) do
|
||||||
code = :crypto.strong_rand_bytes(5) |> Base.encode16()
|
code = :crypto.strong_rand_bytes(5) |> Base.encode16()
|
||||||
change(changeset, verification_code: "#{gettext("PHILOMENA-LINKVALIDATION")}-#{code}")
|
change(changeset, verification_code: "#{gettext("PHILOMENA-LINKVALIDATION")}-#{code}")
|
||||||
|
@ -113,9 +99,9 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
||||||
|
|
||||||
defp put_next_check_at(changeset) do
|
defp put_next_check_at(changeset) do
|
||||||
time =
|
time =
|
||||||
DateTime.utc_now()
|
:second
|
||||||
|
|> DateTime.utc_now()
|
||||||
|> DateTime.add(60 * 2, :second)
|
|> DateTime.add(60 * 2, :second)
|
||||||
|> DateTime.truncate(:second)
|
|
||||||
|
|
||||||
change(changeset, next_check_at: time)
|
change(changeset, next_check_at: time)
|
||||||
end
|
end
|
||||||
|
|
|
@ -16,13 +16,22 @@ defmodule Philomena.ArtistLinks.BadgeAwarder do
|
||||||
Returns `{:ok, award}`, `{:ok, nil}`, or `{:error, changeset}`. The return value is
|
Returns `{:ok, award}`, `{:ok, nil}`, or `{:error, changeset}`. The return value is
|
||||||
suitable for use as the return value to an `Ecto.Multi.run/3` callback.
|
suitable for use as the return value to an `Ecto.Multi.run/3` callback.
|
||||||
"""
|
"""
|
||||||
def award_badge(artist_link) do
|
def award_badge(artist_link, verifying_user) do
|
||||||
with badge when not is_nil(badge) <- Badges.get_badge_by_title(@badge_title),
|
with badge when not is_nil(badge) <- Badges.get_badge_by_title(@badge_title),
|
||||||
award when is_nil(award) <- Badges.get_badge_award_for(badge, artist_link.user) do
|
award when is_nil(award) <- Badges.get_badge_award_for(badge, artist_link.user) do
|
||||||
Badges.create_badge_award(artist_link.user, artist_link.user, %{badge_id: badge.id})
|
Badges.create_badge_award(verifying_user, artist_link.user, %{badge_id: badge.id})
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
{:ok, nil}
|
{:ok, nil}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Get a callback for issuing a badge award from within an `m:Ecto.Multi`.
|
||||||
|
"""
|
||||||
|
def award_callback(artist_link, verifying_user) do
|
||||||
|
fn _repo, _changes ->
|
||||||
|
award_badge(artist_link, verifying_user)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -26,9 +26,7 @@ defmodule Philomena.Badges.Award do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_awarded_on(%{data: %{awarded_on: nil}} = changeset) do
|
defp put_awarded_on(%{data: %{awarded_on: nil}} = changeset) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
put_change(changeset, :awarded_on, DateTime.utc_now(:second))
|
||||||
|
|
||||||
put_change(changeset, :awarded_on, now)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_awarded_on(changeset), do: changeset
|
defp put_awarded_on(changeset), do: changeset
|
||||||
|
|
|
@ -56,7 +56,7 @@ defmodule Philomena.Bans do
|
||||||
"""
|
"""
|
||||||
def create_fingerprint(creator, attrs \\ %{}) do
|
def create_fingerprint(creator, attrs \\ %{}) do
|
||||||
%Fingerprint{banning_user_id: creator.id}
|
%Fingerprint{banning_user_id: creator.id}
|
||||||
|> Fingerprint.save_changeset(attrs)
|
|> Fingerprint.changeset(attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ defmodule Philomena.Bans do
|
||||||
"""
|
"""
|
||||||
def update_fingerprint(%Fingerprint{} = fingerprint, attrs) do
|
def update_fingerprint(%Fingerprint{} = fingerprint, attrs) do
|
||||||
fingerprint
|
fingerprint
|
||||||
|> Fingerprint.save_changeset(attrs)
|
|> Fingerprint.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -150,7 +150,7 @@ defmodule Philomena.Bans do
|
||||||
"""
|
"""
|
||||||
def create_subnet(creator, attrs \\ %{}) do
|
def create_subnet(creator, attrs \\ %{}) do
|
||||||
%Subnet{banning_user_id: creator.id}
|
%Subnet{banning_user_id: creator.id}
|
||||||
|> Subnet.save_changeset(attrs)
|
|> Subnet.changeset(attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ defmodule Philomena.Bans do
|
||||||
"""
|
"""
|
||||||
def update_subnet(%Subnet{} = subnet, attrs) do
|
def update_subnet(%Subnet{} = subnet, attrs) do
|
||||||
subnet
|
subnet
|
||||||
|> Subnet.save_changeset(attrs)
|
|> Subnet.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ defmodule Philomena.Bans do
|
||||||
def create_user(creator, attrs \\ %{}) do
|
def create_user(creator, attrs \\ %{}) do
|
||||||
changeset =
|
changeset =
|
||||||
%User{banning_user_id: creator.id}
|
%User{banning_user_id: creator.id}
|
||||||
|> User.save_changeset(attrs)
|
|> User.changeset(attrs)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.insert(:user_ban, changeset)
|
|> Multi.insert(:user_ban, changeset)
|
||||||
|
@ -276,7 +276,7 @@ defmodule Philomena.Bans do
|
||||||
"""
|
"""
|
||||||
def update_user(%User{} = user, attrs) do
|
def update_user(%User{} = user, attrs) do
|
||||||
user
|
user
|
||||||
|> User.save_changeset(attrs)
|
|> User.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
defmodule Philomena.Bans.Fingerprint do
|
defmodule Philomena.Bans.Fingerprint do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
import Philomena.Bans.IdGenerator
|
||||||
|
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
alias Philomena.Schema.Time
|
|
||||||
alias Philomena.Schema.BanId
|
|
||||||
|
|
||||||
schema "fingerprint_bans" do
|
schema "fingerprint_bans" do
|
||||||
belongs_to :banning_user, User
|
belongs_to :banning_user, User
|
||||||
|
@ -12,27 +11,18 @@ defmodule Philomena.Bans.Fingerprint do
|
||||||
field :reason, :string
|
field :reason, :string
|
||||||
field :note, :string
|
field :note, :string
|
||||||
field :enabled, :boolean, default: true
|
field :enabled, :boolean, default: true
|
||||||
field :valid_until, :utc_datetime
|
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :fingerprint, :string
|
field :fingerprint, :string
|
||||||
field :generated_ban_id, :string
|
field :generated_ban_id, :string
|
||||||
|
|
||||||
field :until, :string, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(fingerprint_ban, attrs) do
|
def changeset(fingerprint_ban, attrs) do
|
||||||
fingerprint_ban
|
fingerprint_ban
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:reason, :note, :enabled, :fingerprint, :valid_until])
|
||||||
|> Time.propagate_time(:valid_until, :until)
|
|> put_ban_id("F")
|
||||||
end
|
|
||||||
|
|
||||||
def save_changeset(fingerprint_ban, attrs) do
|
|
||||||
fingerprint_ban
|
|
||||||
|> cast(attrs, [:reason, :note, :enabled, :fingerprint, :until])
|
|
||||||
|> Time.assign_time(:until, :valid_until)
|
|
||||||
|> BanId.put_ban_id("F")
|
|
||||||
|> validate_required([:reason, :enabled, :fingerprint, :valid_until])
|
|> validate_required([:reason, :enabled, :fingerprint, :valid_until])
|
||||||
|> check_constraint(:valid_until, name: :fingerprint_ban_duration_must_be_valid)
|
|> check_constraint(:valid_until, name: :fingerprint_ban_duration_must_be_valid)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
defmodule Philomena.Schema.BanId do
|
defmodule Philomena.Bans.IdGenerator do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
def put_ban_id(%{data: %{generated_ban_id: nil}} = changeset, prefix) do
|
def put_ban_id(%{data: %{generated_ban_id: nil}} = changeset, prefix) do
|
|
@ -1,10 +1,9 @@
|
||||||
defmodule Philomena.Bans.Subnet do
|
defmodule Philomena.Bans.Subnet do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
import Philomena.Bans.IdGenerator
|
||||||
|
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
alias Philomena.Schema.Time
|
|
||||||
alias Philomena.Schema.BanId
|
|
||||||
|
|
||||||
schema "subnet_bans" do
|
schema "subnet_bans" do
|
||||||
belongs_to :banning_user, User
|
belongs_to :banning_user, User
|
||||||
|
@ -12,27 +11,18 @@ defmodule Philomena.Bans.Subnet do
|
||||||
field :reason, :string
|
field :reason, :string
|
||||||
field :note, :string
|
field :note, :string
|
||||||
field :enabled, :boolean, default: true
|
field :enabled, :boolean, default: true
|
||||||
field :valid_until, :utc_datetime
|
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :specification, EctoNetwork.INET
|
field :specification, EctoNetwork.INET
|
||||||
field :generated_ban_id, :string
|
field :generated_ban_id, :string
|
||||||
|
|
||||||
field :until, :string, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(subnet_ban, attrs) do
|
def changeset(subnet_ban, attrs) do
|
||||||
subnet_ban
|
subnet_ban
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:reason, :note, :enabled, :specification, :valid_until])
|
||||||
|> Time.propagate_time(:valid_until, :until)
|
|> put_ban_id("S")
|
||||||
end
|
|
||||||
|
|
||||||
def save_changeset(subnet_ban, attrs) do
|
|
||||||
subnet_ban
|
|
||||||
|> cast(attrs, [:reason, :note, :enabled, :specification, :until])
|
|
||||||
|> Time.assign_time(:until, :valid_until)
|
|
||||||
|> BanId.put_ban_id("S")
|
|
||||||
|> validate_required([:reason, :enabled, :specification, :valid_until])
|
|> validate_required([:reason, :enabled, :specification, :valid_until])
|
||||||
|> check_constraint(:valid_until, name: :subnet_ban_duration_must_be_valid)
|
|> check_constraint(:valid_until, name: :subnet_ban_duration_must_be_valid)
|
||||||
|> mask_specification()
|
|> mask_specification()
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
defmodule Philomena.Bans.User do
|
defmodule Philomena.Bans.User do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
import Philomena.Bans.IdGenerator
|
||||||
|
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
alias Philomena.Repo
|
|
||||||
alias Philomena.Schema.Time
|
|
||||||
alias Philomena.Schema.BanId
|
|
||||||
|
|
||||||
schema "user_bans" do
|
schema "user_bans" do
|
||||||
belongs_to :user, User
|
belongs_to :user, User
|
||||||
|
@ -14,48 +12,19 @@ defmodule Philomena.Bans.User do
|
||||||
field :reason, :string
|
field :reason, :string
|
||||||
field :note, :string
|
field :note, :string
|
||||||
field :enabled, :boolean, default: true
|
field :enabled, :boolean, default: true
|
||||||
field :valid_until, :utc_datetime
|
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :generated_ban_id, :string
|
field :generated_ban_id, :string
|
||||||
field :override_ip_ban, :boolean, default: false
|
field :override_ip_ban, :boolean, default: false
|
||||||
|
|
||||||
field :username, :string, virtual: true
|
|
||||||
field :until, :string, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(user_ban, attrs) do
|
def changeset(user_ban, attrs) do
|
||||||
user_ban
|
user_ban
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:reason, :note, :enabled, :override_ip_ban, :user_id, :valid_until])
|
||||||
|> Time.propagate_time(:valid_until, :until)
|
|> put_ban_id("U")
|
||||||
|> populate_username()
|
|
||||||
end
|
|
||||||
|
|
||||||
def save_changeset(user_ban, attrs) do
|
|
||||||
user_ban
|
|
||||||
|> cast(attrs, [:reason, :note, :enabled, :override_ip_ban, :username, :until])
|
|
||||||
|> Time.assign_time(:until, :valid_until)
|
|
||||||
|> populate_user_id()
|
|
||||||
|> BanId.put_ban_id("U")
|
|
||||||
|> validate_required([:reason, :enabled, :user_id, :valid_until])
|
|> validate_required([:reason, :enabled, :user_id, :valid_until])
|
||||||
|> check_constraint(:valid_until, name: :user_ban_duration_must_be_valid)
|
|> check_constraint(:valid_until, name: :user_ban_duration_must_be_valid)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp populate_username(changeset) do
|
|
||||||
case maybe_get_by(:id, get_field(changeset, :user_id)) do
|
|
||||||
nil -> changeset
|
|
||||||
user -> put_change(changeset, :username, user.name)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp populate_user_id(changeset) do
|
|
||||||
case maybe_get_by(:name, get_field(changeset, :username)) do
|
|
||||||
nil -> changeset
|
|
||||||
%{id: id} -> put_change(changeset, :user_id, id)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_get_by(_field, nil), do: nil
|
|
||||||
defp maybe_get_by(field, value), do: Repo.get_by(User, [{field, value}])
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,6 +9,11 @@ defmodule Philomena.Channels do
|
||||||
alias Philomena.Channels.AutomaticUpdater
|
alias Philomena.Channels.AutomaticUpdater
|
||||||
alias Philomena.Channels.Channel
|
alias Philomena.Channels.Channel
|
||||||
alias Philomena.Notifications
|
alias Philomena.Notifications
|
||||||
|
alias Philomena.Tags
|
||||||
|
|
||||||
|
use Philomena.Subscriptions,
|
||||||
|
on_delete: :clear_channel_notification,
|
||||||
|
id_name: :channel_id
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates all the tracked channels for which an update scheme is known.
|
Updates all the tracked channels for which an update scheme is known.
|
||||||
|
@ -47,6 +52,7 @@ defmodule Philomena.Channels do
|
||||||
"""
|
"""
|
||||||
def create_channel(attrs \\ %{}) do
|
def create_channel(attrs \\ %{}) do
|
||||||
%Channel{}
|
%Channel{}
|
||||||
|
|> update_artist_tag(attrs)
|
||||||
|> Channel.changeset(attrs)
|
|> Channel.changeset(attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
@ -65,10 +71,29 @@ defmodule Philomena.Channels do
|
||||||
"""
|
"""
|
||||||
def update_channel(%Channel{} = channel, attrs) do
|
def update_channel(%Channel{} = channel, attrs) do
|
||||||
channel
|
channel
|
||||||
|
|> update_artist_tag(attrs)
|
||||||
|> Channel.changeset(attrs)
|
|> Channel.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Adds the artist tag from the `"artist_tag"` tag name attribute.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> update_artist_tag(%Channel{}, %{"artist_tag" => "artist:nighty"})
|
||||||
|
%Ecto.Changeset{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def update_artist_tag(%Channel{} = channel, attrs) do
|
||||||
|
tag =
|
||||||
|
attrs
|
||||||
|
|> Map.get("artist_tag", "")
|
||||||
|
|> Tags.get_tag_by_name()
|
||||||
|
|
||||||
|
Channel.artist_tag_changeset(channel, tag)
|
||||||
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates a channel's state when it goes live.
|
Updates a channel's state when it goes live.
|
||||||
|
|
||||||
|
@ -116,68 +141,17 @@ defmodule Philomena.Channels do
|
||||||
Channel.changeset(channel, %{})
|
Channel.changeset(channel, %{})
|
||||||
end
|
end
|
||||||
|
|
||||||
alias Philomena.Channels.Subscription
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates a subscription.
|
Removes all channel notifications for a given channel and user.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> create_subscription(%{field: value})
|
iex> clear_channel_notification(channel, user)
|
||||||
{:ok, %Subscription{}}
|
:ok
|
||||||
|
|
||||||
iex> create_subscription(%{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_subscription(_channel, nil), do: {:ok, nil}
|
def clear_channel_notification(%Channel{} = channel, user) do
|
||||||
|
Notifications.clear_channel_live_notification(channel, user)
|
||||||
def create_subscription(channel, user) do
|
:ok
|
||||||
%Subscription{channel_id: channel.id, user_id: user.id}
|
|
||||||
|> Subscription.changeset(%{})
|
|
||||||
|> Repo.insert(on_conflict: :nothing)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Deletes a Subscription.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:ok, %Subscription{}}
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def delete_subscription(channel, user) do
|
|
||||||
clear_notification(channel, user)
|
|
||||||
|
|
||||||
%Subscription{channel_id: channel.id, user_id: user.id}
|
|
||||||
|> Repo.delete()
|
|
||||||
end
|
|
||||||
|
|
||||||
def subscribed?(_channel, nil), do: false
|
|
||||||
|
|
||||||
def subscribed?(channel, user) do
|
|
||||||
Subscription
|
|
||||||
|> where(channel_id: ^channel.id, user_id: ^user.id)
|
|
||||||
|> Repo.exists?()
|
|
||||||
end
|
|
||||||
|
|
||||||
def subscriptions(_channels, nil), do: %{}
|
|
||||||
|
|
||||||
def subscriptions(channels, user) do
|
|
||||||
channel_ids = Enum.map(channels, & &1.id)
|
|
||||||
|
|
||||||
Subscription
|
|
||||||
|> where([s], s.channel_id in ^channel_ids and s.user_id == ^user.id)
|
|
||||||
|> Repo.all()
|
|
||||||
|> Map.new(&{&1.channel_id, true})
|
|
||||||
end
|
|
||||||
|
|
||||||
def clear_notification(channel, user) do
|
|
||||||
Notifications.delete_unread_notification("Channel", channel.id, user)
|
|
||||||
Notifications.delete_unread_notification("LivestreamChannel", channel.id, user)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,6 @@ defmodule Philomena.Channels.Channel do
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
alias Philomena.Tags.Tag
|
alias Philomena.Tags.Tag
|
||||||
alias Philomena.Repo
|
|
||||||
|
|
||||||
schema "channels" do
|
schema "channels" do
|
||||||
belongs_to :associated_artist_tag, Tag
|
belongs_to :associated_artist_tag, Tag
|
||||||
|
@ -13,22 +12,12 @@ defmodule Philomena.Channels.Channel do
|
||||||
|
|
||||||
field :short_name, :string
|
field :short_name, :string
|
||||||
field :title, :string, default: ""
|
field :title, :string, default: ""
|
||||||
field :tags, :string
|
|
||||||
field :viewers, :integer, default: 0
|
field :viewers, :integer, default: 0
|
||||||
field :nsfw, :boolean, default: false
|
field :nsfw, :boolean, default: false
|
||||||
field :is_live, :boolean, default: false
|
field :is_live, :boolean, default: false
|
||||||
field :last_fetched_at, :utc_datetime
|
field :last_fetched_at, :utc_datetime
|
||||||
field :next_check_at, :utc_datetime
|
field :next_check_at, :utc_datetime
|
||||||
field :last_live_at, :utc_datetime
|
field :last_live_at, :utc_datetime
|
||||||
|
|
||||||
field :viewer_minutes_today, :integer, default: 0
|
|
||||||
field :viewer_minutes_thisweek, :integer, default: 0
|
|
||||||
field :viewer_minutes_thismonth, :integer, default: 0
|
|
||||||
field :total_viewer_minutes, :integer, default: 0
|
|
||||||
|
|
||||||
field :banner_image, :string
|
|
||||||
field :channel_image, :string
|
|
||||||
field :remote_stream_id, :integer
|
|
||||||
field :thumbnail_url, :string, default: ""
|
field :thumbnail_url, :string, default: ""
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
@ -36,19 +25,13 @@ defmodule Philomena.Channels.Channel do
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(channel, attrs) do
|
def changeset(channel, attrs) do
|
||||||
tag_id =
|
|
||||||
case Repo.get_by(Tag, name: attrs["artist_tag"] || "") do
|
|
||||||
%{id: id} -> id
|
|
||||||
_ -> nil
|
|
||||||
end
|
|
||||||
|
|
||||||
channel
|
channel
|
||||||
|> cast(attrs, [:type, :short_name])
|
|> cast(attrs, [:type, :short_name])
|
||||||
|> validate_required([:type, :short_name])
|
|> validate_required([:type, :short_name])
|
||||||
|> validate_inclusion(:type, ["PicartoChannel", "PiczelChannel"])
|
|> validate_inclusion(:type, ["PicartoChannel", "PiczelChannel"])
|
||||||
|> put_change(:associated_artist_tag_id, tag_id)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def update_changeset(channel, attrs) do
|
def update_changeset(channel, attrs) do
|
||||||
cast(channel, attrs, [
|
cast(channel, attrs, [
|
||||||
:title,
|
:title,
|
||||||
|
@ -60,4 +43,11 @@ defmodule Philomena.Channels.Channel do
|
||||||
:last_live_at
|
:last_live_at
|
||||||
])
|
])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def artist_tag_changeset(channel, tag) do
|
||||||
|
tag_id = Map.get(tag || %{}, :id)
|
||||||
|
|
||||||
|
change(channel, associated_artist_tag_id: tag_id)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,7 +8,6 @@ defmodule Philomena.Comments do
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
alias PhilomenaQuery.Search
|
alias PhilomenaQuery.Search
|
||||||
alias Philomena.Reports.Report
|
|
||||||
alias Philomena.UserStatistics
|
alias Philomena.UserStatistics
|
||||||
alias Philomena.Comments.Comment
|
alias Philomena.Comments.Comment
|
||||||
alias Philomena.Comments.SearchIndex, as: CommentIndex
|
alias Philomena.Comments.SearchIndex, as: CommentIndex
|
||||||
|
@ -16,10 +15,8 @@ defmodule Philomena.Comments do
|
||||||
alias Philomena.Images.Image
|
alias Philomena.Images.Image
|
||||||
alias Philomena.Images
|
alias Philomena.Images
|
||||||
alias Philomena.Notifications
|
alias Philomena.Notifications
|
||||||
alias Philomena.NotificationWorker
|
|
||||||
alias Philomena.Versions
|
alias Philomena.Versions
|
||||||
alias Philomena.Reports
|
alias Philomena.Reports
|
||||||
alias Philomena.Users.User
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single comment.
|
Gets a single comment.
|
||||||
|
@ -58,52 +55,20 @@ defmodule Philomena.Comments do
|
||||||
Image
|
Image
|
||||||
|> where(id: ^image.id)
|
|> where(id: ^image.id)
|
||||||
|
|
||||||
|
image_lock_query =
|
||||||
|
lock(image_query, "FOR UPDATE")
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|
|> Multi.one(:image, image_lock_query)
|
||||||
|> Multi.insert(:comment, comment)
|
|> Multi.insert(:comment, comment)
|
||||||
|> Multi.update_all(:image, image_query, inc: [comments_count: 1])
|
|> Multi.update_all(:update_image, image_query, inc: [comments_count: 1])
|
||||||
|> maybe_create_subscription_on_reply(image, attribution[:user])
|
|> Multi.run(:notification, ¬ify_comment/2)
|
||||||
|
|> Images.maybe_subscribe_on(:image, attribution[:user], :watch_on_reply)
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_create_subscription_on_reply(multi, image, %User{watch_on_reply: true} = user) do
|
defp notify_comment(_repo, %{image: image, comment: comment}) do
|
||||||
multi
|
Notifications.create_image_comment_notification(comment.user, image, comment)
|
||||||
|> Multi.run(:subscribe, fn _repo, _changes ->
|
|
||||||
Images.create_subscription(image, user)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_create_subscription_on_reply(multi, _image, _user) do
|
|
||||||
multi
|
|
||||||
end
|
|
||||||
|
|
||||||
def notify_comment(comment) do
|
|
||||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Comments", comment.id])
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform_notify(comment_id) do
|
|
||||||
comment = get_comment!(comment_id)
|
|
||||||
|
|
||||||
image =
|
|
||||||
comment
|
|
||||||
|> Repo.preload(:image)
|
|
||||||
|> Map.fetch!(:image)
|
|
||||||
|
|
||||||
subscriptions =
|
|
||||||
image
|
|
||||||
|> Repo.preload(:subscriptions)
|
|
||||||
|> Map.fetch!(:subscriptions)
|
|
||||||
|
|
||||||
Notifications.notify(
|
|
||||||
comment,
|
|
||||||
subscriptions,
|
|
||||||
%{
|
|
||||||
actor_id: image.id,
|
|
||||||
actor_type: "Image",
|
|
||||||
actor_child_id: comment.id,
|
|
||||||
actor_child_type: "Comment",
|
|
||||||
action: "commented on"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -119,7 +84,7 @@ defmodule Philomena.Comments do
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def update_comment(%Comment{} = comment, editor, attrs) do
|
def update_comment(%Comment{} = comment, editor, attrs) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
current_body = comment.body
|
current_body = comment.body
|
||||||
current_reason = comment.edit_reason
|
current_reason = comment.edit_reason
|
||||||
|
|
||||||
|
@ -153,17 +118,12 @@ defmodule Philomena.Comments do
|
||||||
end
|
end
|
||||||
|
|
||||||
def hide_comment(%Comment{} = comment, attrs, user) do
|
def hide_comment(%Comment{} = comment, attrs, user) do
|
||||||
reports =
|
report_query = Reports.close_report_query({"Comment", comment.id}, user)
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Comment", reportable_id: ^comment.id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
comment = Comment.hide_changeset(comment, attrs, user)
|
comment = Comment.hide_changeset(comment, attrs, user)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.update(:comment, comment)
|
|> Multi.update(:comment, comment)
|
||||||
|> Multi.update_all(:reports, reports, [])
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
||||||
|
@ -199,21 +159,15 @@ defmodule Philomena.Comments do
|
||||||
end
|
end
|
||||||
|
|
||||||
def approve_comment(%Comment{} = comment, user) do
|
def approve_comment(%Comment{} = comment, user) do
|
||||||
reports =
|
report_query = Reports.close_report_query({"Comment", comment.id}, user)
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Comment", reportable_id: ^comment.id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
comment = Comment.approve_changeset(comment)
|
comment = Comment.approve_changeset(comment)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.update(:comment, comment)
|
|> Multi.update(:comment, comment)
|
||||||
|> Multi.update_all(:reports, reports, [])
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
||||||
notify_comment(comment)
|
|
||||||
UserStatistics.inc_stat(comment.user, :comments_posted)
|
UserStatistics.inc_stat(comment.user, :comments_posted)
|
||||||
Reports.reindex_reports(reports)
|
Reports.reindex_reports(reports)
|
||||||
reindex_comment(comment)
|
reindex_comment(comment)
|
||||||
|
@ -229,8 +183,7 @@ defmodule Philomena.Comments do
|
||||||
|
|
||||||
def report_non_approved(comment) do
|
def report_non_approved(comment) do
|
||||||
Reports.create_system_report(
|
Reports.create_system_report(
|
||||||
comment.id,
|
{"Comment", comment.id},
|
||||||
"Comment",
|
|
||||||
"Approval",
|
"Approval",
|
||||||
"Comment contains externally-embedded images and has been flagged for review."
|
"Comment contains externally-embedded images and has been flagged for review."
|
||||||
)
|
)
|
||||||
|
|
|
@ -14,15 +14,12 @@ defmodule Philomena.Comments.Comment do
|
||||||
field :body, :string
|
field :body, :string
|
||||||
field :ip, EctoNetwork.INET
|
field :ip, EctoNetwork.INET
|
||||||
field :fingerprint, :string
|
field :fingerprint, :string
|
||||||
field :user_agent, :string, default: ""
|
|
||||||
field :referrer, :string, default: ""
|
|
||||||
field :anonymous, :boolean, default: false
|
field :anonymous, :boolean, default: false
|
||||||
field :hidden_from_users, :boolean, default: false
|
field :hidden_from_users, :boolean, default: false
|
||||||
field :edit_reason, :string
|
field :edit_reason, :string
|
||||||
field :edited_at, :utc_datetime
|
field :edited_at, :utc_datetime
|
||||||
field :deletion_reason, :string, default: ""
|
field :deletion_reason, :string, default: ""
|
||||||
field :destroyed_content, :boolean, default: false
|
field :destroyed_content, :boolean, default: false
|
||||||
field :name_at_post_time, :string
|
|
||||||
field :approved, :boolean
|
field :approved, :boolean
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
@ -35,7 +32,6 @@ defmodule Philomena.Comments.Comment do
|
||||||
|> validate_required([:body])
|
|> validate_required([:body])
|
||||||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||||
|> change(attribution)
|
|> change(attribution)
|
||||||
|> put_name_at_post_time(attribution[:user])
|
|
||||||
|> Approval.maybe_put_approval(attribution[:user])
|
|> Approval.maybe_put_approval(attribution[:user])
|
||||||
|> Approval.maybe_strip_images(attribution[:user])
|
|> Approval.maybe_strip_images(attribution[:user])
|
||||||
end
|
end
|
||||||
|
@ -74,7 +70,4 @@ defmodule Philomena.Comments.Comment do
|
||||||
change(comment)
|
change(comment)
|
||||||
|> put_change(:approved, true)
|
|> put_change(:approved, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_name_at_post_time(changeset, nil), do: changeset
|
|
||||||
defp put_name_at_post_time(changeset, user), do: change(changeset, name_at_post_time: user.name)
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -92,8 +92,8 @@ defmodule Philomena.Comments.Query do
|
||||||
|> Parser.parse(query_string, context)
|
|> Parser.parse(query_string, context)
|
||||||
end
|
end
|
||||||
|
|
||||||
def compile(user, query_string) do
|
def compile(query_string, opts \\ []) do
|
||||||
query_string = query_string || ""
|
user = Keyword.get(opts, :user)
|
||||||
|
|
||||||
case user do
|
case user do
|
||||||
nil ->
|
nil ->
|
||||||
|
|
|
@ -6,76 +6,112 @@ defmodule Philomena.Conversations do
|
||||||
import Ecto.Query, warn: false
|
import Ecto.Query, warn: false
|
||||||
alias Ecto.Multi
|
alias Ecto.Multi
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
alias Philomena.Reports
|
|
||||||
alias Philomena.Reports.Report
|
|
||||||
alias Philomena.Conversations.Conversation
|
alias Philomena.Conversations.Conversation
|
||||||
|
alias Philomena.Conversations.Message
|
||||||
|
alias Philomena.Reports
|
||||||
|
alias Philomena.Users
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single conversation.
|
Returns the number of unread conversations for the given user.
|
||||||
|
|
||||||
Raises `Ecto.NoResultsError` if the Conversation does not exist.
|
Conversations hidden by the given user are not counted.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> get_conversation!(123)
|
iex> count_unread_conversations(user1)
|
||||||
%Conversation{}
|
0
|
||||||
|
|
||||||
iex> get_conversation!(456)
|
iex> count_unread_conversations(user2)
|
||||||
** (Ecto.NoResultsError)
|
7
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def get_conversation!(id), do: Repo.get!(Conversation, id)
|
def count_unread_conversations(user) do
|
||||||
|
Conversation
|
||||||
|
|> where(
|
||||||
|
[c],
|
||||||
|
((c.to_id == ^user.id and c.to_read == false) or
|
||||||
|
(c.from_id == ^user.id and c.from_read == false)) and
|
||||||
|
not ((c.to_id == ^user.id and c.to_hidden == true) or
|
||||||
|
(c.from_id == ^user.id and c.from_hidden == true))
|
||||||
|
)
|
||||||
|
|> Repo.aggregate(:count)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns a `m:Scrivener.Page` of conversations between the partner and the user.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> list_conversations_with("123", %User{}, page_size: 10)
|
||||||
|
%Scrivener.Page{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def list_conversations_with(partner_id, user, pagination) do
|
||||||
|
query =
|
||||||
|
from c in Conversation,
|
||||||
|
where:
|
||||||
|
(c.from_id == ^partner_id and c.to_id == ^user.id) or
|
||||||
|
(c.to_id == ^partner_id and c.from_id == ^user.id)
|
||||||
|
|
||||||
|
list_conversations(query, user, pagination)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns a `m:Scrivener.Page` of conversations sent by or received from the user.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> list_conversations_with("123", %User{}, page_size: 10)
|
||||||
|
%Scrivener.Page{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def list_conversations(queryable \\ Conversation, user, pagination) do
|
||||||
|
query =
|
||||||
|
from c in queryable,
|
||||||
|
as: :conversations,
|
||||||
|
where:
|
||||||
|
(c.from_id == ^user.id and not c.from_hidden) or
|
||||||
|
(c.to_id == ^user.id and not c.to_hidden),
|
||||||
|
inner_lateral_join:
|
||||||
|
cnt in subquery(
|
||||||
|
from m in Message,
|
||||||
|
where: m.conversation_id == parent_as(:conversations).id,
|
||||||
|
select: %{count: count()}
|
||||||
|
),
|
||||||
|
on: true,
|
||||||
|
order_by: [desc: :last_message_at],
|
||||||
|
preload: [:to, :from],
|
||||||
|
select: %{c | message_count: cnt.count}
|
||||||
|
|
||||||
|
Repo.paginate(query, pagination)
|
||||||
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates a conversation.
|
Creates a conversation.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> create_conversation(%{field: value})
|
iex> create_conversation(from, to, %{field: value})
|
||||||
{:ok, %Conversation{}}
|
{:ok, %Conversation{}}
|
||||||
|
|
||||||
iex> create_conversation(%{field: bad_value})
|
iex> create_conversation(from, to, %{field: bad_value})
|
||||||
{:error, %Ecto.Changeset{}}
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_conversation(from, attrs \\ %{}) do
|
def create_conversation(from, attrs \\ %{}) do
|
||||||
|
to = Users.get_user_by_name(attrs["recipient"])
|
||||||
|
|
||||||
%Conversation{}
|
%Conversation{}
|
||||||
|> Conversation.creation_changeset(from, attrs)
|
|> Conversation.creation_changeset(from, to, attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
|> case do
|
||||||
|
{:ok, conversation} ->
|
||||||
|
report_non_approved_message(hd(conversation.messages))
|
||||||
|
{:ok, conversation}
|
||||||
|
|
||||||
@doc """
|
error ->
|
||||||
Updates a conversation.
|
error
|
||||||
|
end
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> update_conversation(conversation, %{field: new_value})
|
|
||||||
{:ok, %Conversation{}}
|
|
||||||
|
|
||||||
iex> update_conversation(conversation, %{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def update_conversation(%Conversation{} = conversation, attrs) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.changeset(attrs)
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Deletes a Conversation.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> delete_conversation(conversation)
|
|
||||||
{:ok, %Conversation{}}
|
|
||||||
|
|
||||||
iex> delete_conversation(conversation)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def delete_conversation(%Conversation{} = conversation) do
|
|
||||||
Repo.delete(conversation)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -91,201 +127,221 @@ defmodule Philomena.Conversations do
|
||||||
Conversation.changeset(conversation, %{})
|
Conversation.changeset(conversation, %{})
|
||||||
end
|
end
|
||||||
|
|
||||||
def count_unread_conversations(user) do
|
|
||||||
Conversation
|
|
||||||
|> where(
|
|
||||||
[c],
|
|
||||||
((c.to_id == ^user.id and c.to_read == false) or
|
|
||||||
(c.from_id == ^user.id and c.from_read == false)) and
|
|
||||||
not ((c.to_id == ^user.id and c.to_hidden == true) or
|
|
||||||
(c.from_id == ^user.id and c.from_hidden == true))
|
|
||||||
)
|
|
||||||
|> Repo.aggregate(:count, :id)
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_read(conversation, user, read \\ true)
|
|
||||||
|
|
||||||
def mark_conversation_read(
|
|
||||||
%Conversation{to_id: user_id, from_id: user_id} = conversation,
|
|
||||||
%{id: user_id},
|
|
||||||
read
|
|
||||||
) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.read_changeset(%{to_read: read, from_read: read})
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_read(%Conversation{to_id: user_id} = conversation, %{id: user_id}, read) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.read_changeset(%{to_read: read})
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_read(%Conversation{from_id: user_id} = conversation, %{id: user_id}, read) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.read_changeset(%{from_read: read})
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_read(_conversation, _user, _read), do: {:ok, nil}
|
|
||||||
|
|
||||||
def mark_conversation_hidden(conversation, user, hidden \\ true)
|
|
||||||
|
|
||||||
def mark_conversation_hidden(
|
|
||||||
%Conversation{to_id: user_id} = conversation,
|
|
||||||
%{id: user_id},
|
|
||||||
hidden
|
|
||||||
) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.hidden_changeset(%{to_hidden: hidden})
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_hidden(
|
|
||||||
%Conversation{from_id: user_id} = conversation,
|
|
||||||
%{id: user_id},
|
|
||||||
hidden
|
|
||||||
) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.hidden_changeset(%{from_hidden: hidden})
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mark_conversation_hidden(_conversation, _user, _read), do: {:ok, nil}
|
|
||||||
|
|
||||||
alias Philomena.Conversations.Message
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single message.
|
Marks a conversation as read or unread from the perspective of the given user.
|
||||||
|
|
||||||
Raises `Ecto.NoResultsError` if the Message does not exist.
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> get_message!(123)
|
iex> mark_conversation_read(conversation, user, true)
|
||||||
%Message{}
|
{:ok, %Conversation{}}
|
||||||
|
|
||||||
iex> get_message!(456)
|
iex> mark_conversation_read(conversation, user, false)
|
||||||
** (Ecto.NoResultsError)
|
{:ok, %Conversation{}}
|
||||||
|
|
||||||
|
iex> mark_conversation_read(conversation, %User{}, true)
|
||||||
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def get_message!(id), do: Repo.get!(Message, id)
|
def mark_conversation_read(%Conversation{} = conversation, user, read \\ true) do
|
||||||
|
changes =
|
||||||
|
%{}
|
||||||
|
|> put_conditional(:to_read, read, conversation.to_id == user.id)
|
||||||
|
|> put_conditional(:from_read, read, conversation.from_id == user.id)
|
||||||
|
|
||||||
|
conversation
|
||||||
|
|> Conversation.read_changeset(changes)
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates a message.
|
Marks a conversation as hidden or visible from the perspective of the given user.
|
||||||
|
|
||||||
|
Hidden conversations are not shown in the list of conversations for the user, and
|
||||||
|
are not counted when retrieving the number of unread conversations.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> create_message(%{field: value})
|
iex> mark_conversation_hidden(conversation, user, true)
|
||||||
|
{:ok, %Conversation{}}
|
||||||
|
|
||||||
|
iex> mark_conversation_hidden(conversation, user, false)
|
||||||
|
{:ok, %Conversation{}}
|
||||||
|
|
||||||
|
iex> mark_conversation_hidden(conversation, %User{}, true)
|
||||||
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def mark_conversation_hidden(%Conversation{} = conversation, user, hidden \\ true) do
|
||||||
|
changes =
|
||||||
|
%{}
|
||||||
|
|> put_conditional(:to_hidden, hidden, conversation.to_id == user.id)
|
||||||
|
|> put_conditional(:from_hidden, hidden, conversation.from_id == user.id)
|
||||||
|
|
||||||
|
conversation
|
||||||
|
|> Conversation.hidden_changeset(changes)
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp put_conditional(map, key, value, condition) do
|
||||||
|
if condition do
|
||||||
|
Map.put(map, key, value)
|
||||||
|
else
|
||||||
|
map
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns the number of messages in the given conversation.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> count_messages(%Conversation{})
|
||||||
|
3
|
||||||
|
|
||||||
|
"""
|
||||||
|
def count_messages(conversation) do
|
||||||
|
Message
|
||||||
|
|> where(conversation_id: ^conversation.id)
|
||||||
|
|> Repo.aggregate(:count)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns a `m:Scrivener.Page` of 2-tuples of messages and rendered output
|
||||||
|
within a conversation.
|
||||||
|
|
||||||
|
Messages are ordered by user message preference (`messages_newest_first`).
|
||||||
|
|
||||||
|
When coerced to a list and rendered as Markdown, the result may look like:
|
||||||
|
|
||||||
|
[
|
||||||
|
{%Message{body: "hello *world*"}, "hello <strong>world</strong>"}
|
||||||
|
]
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> list_messages(%Conversation{}, %User{}, & &1.body, page_size: 10)
|
||||||
|
%Scrivener.Page{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def list_messages(conversation, user, collection_renderer, pagination) do
|
||||||
|
direction =
|
||||||
|
if user.messages_newest_first do
|
||||||
|
:desc
|
||||||
|
else
|
||||||
|
:asc
|
||||||
|
end
|
||||||
|
|
||||||
|
query =
|
||||||
|
from m in Message,
|
||||||
|
where: m.conversation_id == ^conversation.id,
|
||||||
|
order_by: [{^direction, :created_at}],
|
||||||
|
preload: :from
|
||||||
|
|
||||||
|
messages = Repo.paginate(query, pagination)
|
||||||
|
rendered = collection_renderer.(messages)
|
||||||
|
|
||||||
|
put_in(messages.entries, Enum.zip(messages.entries, rendered))
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Creates a message within a conversation.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> create_message(%Conversation{}, %User{}, %{field: value})
|
||||||
{:ok, %Message{}}
|
{:ok, %Message{}}
|
||||||
|
|
||||||
iex> create_message(%{field: bad_value})
|
iex> create_message(%Conversation{}, %User{}, %{field: bad_value})
|
||||||
{:error, %Ecto.Changeset{}}
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_message(conversation, user, attrs \\ %{}) do
|
def create_message(conversation, user, attrs \\ %{}) do
|
||||||
message =
|
message_changeset =
|
||||||
Ecto.build_assoc(conversation, :messages)
|
conversation
|
||||||
|
|> Ecto.build_assoc(:messages)
|
||||||
|> Message.creation_changeset(attrs, user)
|
|> Message.creation_changeset(attrs, user)
|
||||||
|
|
||||||
show_as_read =
|
conversation_changeset =
|
||||||
case message do
|
Conversation.new_message_changeset(conversation)
|
||||||
%{changes: %{approved: true}} -> false
|
|
||||||
_ -> true
|
|
||||||
end
|
|
||||||
|
|
||||||
conversation_query =
|
|
||||||
Conversation
|
|
||||||
|> where(id: ^conversation.id)
|
|
||||||
|
|
||||||
now = DateTime.utc_now()
|
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.insert(:message, message)
|
|> Multi.insert(:message, message_changeset)
|
||||||
|> Multi.update_all(:conversation, conversation_query,
|
|> Multi.update(:conversation, conversation_changeset)
|
||||||
set: [from_read: show_as_read, to_read: show_as_read, last_message_at: now]
|
|
||||||
)
|
|
||||||
|> Repo.transaction()
|
|
||||||
end
|
|
||||||
|
|
||||||
def approve_conversation_message(message, user) do
|
|
||||||
reports_query =
|
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Conversation", reportable_id: ^message.conversation_id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
message_query =
|
|
||||||
message
|
|
||||||
|> Message.approve_changeset()
|
|
||||||
|
|
||||||
conversation_query =
|
|
||||||
Conversation
|
|
||||||
|> where(id: ^message.conversation_id)
|
|
||||||
|
|
||||||
Multi.new()
|
|
||||||
|> Multi.update(:message, message_query)
|
|
||||||
|> Multi.update_all(:conversation, conversation_query, set: [to_read: false])
|
|
||||||
|> Multi.update_all(:reports, reports_query, [])
|
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{reports: {_count, reports}} = result} ->
|
{:ok, %{message: message}} ->
|
||||||
Reports.reindex_reports(reports)
|
report_non_approved_message(message)
|
||||||
|
{:ok, message}
|
||||||
|
|
||||||
{:ok, result}
|
_error ->
|
||||||
|
{:error, message_changeset}
|
||||||
error ->
|
|
||||||
error
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def report_non_approved(id) do
|
|
||||||
Reports.create_system_report(
|
|
||||||
id,
|
|
||||||
"Conversation",
|
|
||||||
"Approval",
|
|
||||||
"PM contains externally-embedded images and has been flagged for review."
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def set_as_read(conversation) do
|
|
||||||
conversation
|
|
||||||
|> Conversation.to_read_changeset()
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates a message.
|
Approves a previously-posted message which was not approved at post time.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> update_message(message, %{field: new_value})
|
iex> approve_message(%Message{}, %User{})
|
||||||
{:ok, %Message{}}
|
{:ok, %Message{}}
|
||||||
|
|
||||||
iex> update_message(message, %{field: bad_value})
|
iex> approve_message(%Message{}, %User{})
|
||||||
{:error, %Ecto.Changeset{}}
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def update_message(%Message{} = message, attrs) do
|
def approve_message(message, approving_user) do
|
||||||
message
|
message_changeset = Message.approve_changeset(message)
|
||||||
|> Message.changeset(attrs)
|
|
||||||
|> Repo.update()
|
conversation_update_query =
|
||||||
|
from c in Conversation,
|
||||||
|
where: c.id == ^message.conversation_id,
|
||||||
|
update: [set: [from_read: false, to_read: false]]
|
||||||
|
|
||||||
|
reports_query =
|
||||||
|
Reports.close_report_query({"Conversation", message.conversation_id}, approving_user)
|
||||||
|
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.update(:message, message_changeset)
|
||||||
|
|> Multi.update_all(:conversation, conversation_update_query, [])
|
||||||
|
|> Multi.update_all(:reports, reports_query, [])
|
||||||
|
|> Repo.transaction()
|
||||||
|
|> case do
|
||||||
|
{:ok, %{reports: {_count, reports}, message: message}} ->
|
||||||
|
Reports.reindex_reports(reports)
|
||||||
|
|
||||||
|
message
|
||||||
|
|
||||||
|
_error ->
|
||||||
|
{:error, message_changeset}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Deletes a Message.
|
Generates a system report for an unapproved message.
|
||||||
|
|
||||||
|
This is called by `create_conversation/2` and `create_message/3`, so it normally does not
|
||||||
|
need to be called explicitly.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> delete_message(message)
|
iex> report_non_approved_message(%Message{approved: false})
|
||||||
{:ok, %Message{}}
|
{:ok, %Report{}}
|
||||||
|
|
||||||
iex> delete_message(message)
|
iex> report_non_approved_message(%Message{approved: true})
|
||||||
{:error, %Ecto.Changeset{}}
|
{:ok, nil}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def delete_message(%Message{} = message) do
|
def report_non_approved_message(message) do
|
||||||
Repo.delete(message)
|
if message.approved do
|
||||||
|
{:ok, nil}
|
||||||
|
else
|
||||||
|
Reports.create_system_report(
|
||||||
|
{"Conversation", message.conversation_id},
|
||||||
|
"Approval",
|
||||||
|
"PM contains externally-embedded images and has been flagged for review."
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
|
@ -4,7 +4,6 @@ defmodule Philomena.Conversations.Conversation do
|
||||||
|
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
alias Philomena.Conversations.Message
|
alias Philomena.Conversations.Message
|
||||||
alias Philomena.Repo
|
|
||||||
|
|
||||||
@derive {Phoenix.Param, key: :slug}
|
@derive {Phoenix.Param, key: :slug}
|
||||||
|
|
||||||
|
@ -20,6 +19,8 @@ defmodule Philomena.Conversations.Conversation do
|
||||||
field :from_hidden, :boolean, default: false
|
field :from_hidden, :boolean, default: false
|
||||||
field :slug, :string
|
field :slug, :string
|
||||||
field :last_message_at, :utc_datetime
|
field :last_message_at, :utc_datetime
|
||||||
|
|
||||||
|
field :message_count, :integer, virtual: true
|
||||||
field :recipient, :string, virtual: true
|
field :recipient, :string, virtual: true
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
@ -32,51 +33,39 @@ defmodule Philomena.Conversations.Conversation do
|
||||||
|> validate_required([])
|
|> validate_required([])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def read_changeset(conversation, attrs) do
|
def read_changeset(conversation, attrs) do
|
||||||
conversation
|
cast(conversation, attrs, [:from_read, :to_read])
|
||||||
|> cast(attrs, [:from_read, :to_read])
|
|
||||||
end
|
|
||||||
|
|
||||||
def to_read_changeset(conversation) do
|
|
||||||
change(conversation)
|
|
||||||
|> put_change(:to_read, true)
|
|
||||||
end
|
|
||||||
|
|
||||||
def hidden_changeset(conversation, attrs) do
|
|
||||||
conversation
|
|
||||||
|> cast(attrs, [:from_hidden, :to_hidden])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def creation_changeset(conversation, from, attrs) do
|
def hidden_changeset(conversation, attrs) do
|
||||||
conversation
|
cast(conversation, attrs, [:from_hidden, :to_hidden])
|
||||||
|> cast(attrs, [:title, :recipient])
|
|
||||||
|> validate_required([:title, :recipient])
|
|
||||||
|> validate_length(:title, max: 300, count: :bytes)
|
|
||||||
|> put_assoc(:from, from)
|
|
||||||
|> put_recipient()
|
|
||||||
|> set_slug()
|
|
||||||
|> set_last_message()
|
|
||||||
|> cast_assoc(:messages, with: &Message.creation_changeset(&1, &2, from))
|
|
||||||
|> validate_length(:messages, is: 1)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp set_slug(changeset) do
|
@doc false
|
||||||
changeset
|
def creation_changeset(conversation, from, to, attrs) do
|
||||||
|> change(slug: Ecto.UUID.generate())
|
conversation
|
||||||
|
|> cast(attrs, [:title])
|
||||||
|
|> put_assoc(:from, from)
|
||||||
|
|> put_assoc(:to, to)
|
||||||
|
|> put_change(:slug, Ecto.UUID.generate())
|
||||||
|
|> cast_assoc(:messages, with: &Message.creation_changeset(&1, &2, from))
|
||||||
|
|> set_last_message()
|
||||||
|
|> validate_length(:messages, is: 1)
|
||||||
|
|> validate_length(:title, max: 300, count: :bytes)
|
||||||
|
|> validate_required([:title, :from, :to])
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def new_message_changeset(conversation) do
|
||||||
|
conversation
|
||||||
|
|> change(from_read: false)
|
||||||
|
|> change(to_read: false)
|
||||||
|
|> set_last_message()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp set_last_message(changeset) do
|
defp set_last_message(changeset) do
|
||||||
changeset
|
change(changeset, last_message_at: DateTime.utc_now(:second))
|
||||||
|> change(last_message_at: DateTime.utc_now() |> DateTime.truncate(:second))
|
|
||||||
end
|
|
||||||
|
|
||||||
defp put_recipient(changeset) do
|
|
||||||
recipient = changeset |> get_field(:recipient)
|
|
||||||
user = Repo.get_by(User, name: recipient)
|
|
||||||
|
|
||||||
changeset
|
|
||||||
|> put_change(:to, user)
|
|
||||||
|> validate_required(:to)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -33,6 +33,7 @@ defmodule Philomena.Conversations.Message do
|
||||||
|> Approval.maybe_put_approval(user)
|
|> Approval.maybe_put_approval(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def approve_changeset(message) do
|
def approve_changeset(message) do
|
||||||
change(message, approved: true)
|
change(message, approved: true)
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,11 +3,15 @@ defmodule Philomena.DuplicateReports do
|
||||||
The DuplicateReports context.
|
The DuplicateReports context.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import Philomena.DuplicateReports.Power
|
||||||
import Ecto.Query, warn: false
|
import Ecto.Query, warn: false
|
||||||
|
|
||||||
alias Ecto.Multi
|
alias Ecto.Multi
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
alias Philomena.DuplicateReports.DuplicateReport
|
alias Philomena.DuplicateReports.DuplicateReport
|
||||||
|
alias Philomena.DuplicateReports.SearchQuery
|
||||||
|
alias Philomena.DuplicateReports.Uploader
|
||||||
alias Philomena.ImageIntensities.ImageIntensity
|
alias Philomena.ImageIntensities.ImageIntensity
|
||||||
alias Philomena.Images.Image
|
alias Philomena.Images.Image
|
||||||
alias Philomena.Images
|
alias Philomena.Images
|
||||||
|
@ -15,7 +19,8 @@ defmodule Philomena.DuplicateReports do
|
||||||
def generate_reports(source) do
|
def generate_reports(source) do
|
||||||
source = Repo.preload(source, :intensity)
|
source = Repo.preload(source, :intensity)
|
||||||
|
|
||||||
duplicates_of(source.intensity, source.image_aspect_ratio, 0.2, 0.05)
|
{source.intensity, source.image_aspect_ratio}
|
||||||
|
|> find_duplicates(dist: 0.2)
|
||||||
|> where([i, _it], i.id != ^source.id)
|
|> where([i, _it], i.id != ^source.id)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.map(fn target ->
|
|> Enum.map(fn target ->
|
||||||
|
@ -25,7 +30,11 @@ defmodule Philomena.DuplicateReports do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def duplicates_of(intensities, aspect_ratio, dist \\ 0.25, aspect_dist \\ 0.05) do
|
def find_duplicates({intensities, aspect_ratio}, opts \\ []) do
|
||||||
|
aspect_dist = Keyword.get(opts, :aspect_dist, 0.05)
|
||||||
|
limit = Keyword.get(opts, :limit, 10)
|
||||||
|
dist = Keyword.get(opts, :dist, 0.25)
|
||||||
|
|
||||||
# for each color channel
|
# for each color channel
|
||||||
dist = dist * 3
|
dist = dist * 3
|
||||||
|
|
||||||
|
@ -39,7 +48,72 @@ defmodule Philomena.DuplicateReports do
|
||||||
where:
|
where:
|
||||||
i.image_aspect_ratio >= ^(aspect_ratio - aspect_dist) and
|
i.image_aspect_ratio >= ^(aspect_ratio - aspect_dist) and
|
||||||
i.image_aspect_ratio <= ^(aspect_ratio + aspect_dist),
|
i.image_aspect_ratio <= ^(aspect_ratio + aspect_dist),
|
||||||
limit: 10
|
order_by: [
|
||||||
|
asc:
|
||||||
|
power(it.nw - ^intensities.nw, 2) +
|
||||||
|
power(it.ne - ^intensities.ne, 2) +
|
||||||
|
power(it.sw - ^intensities.sw, 2) +
|
||||||
|
power(it.se - ^intensities.se, 2) +
|
||||||
|
power(i.image_aspect_ratio - ^aspect_ratio, 2)
|
||||||
|
],
|
||||||
|
limit: ^limit
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Executes the reverse image search query from parameters.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> execute_search_query(%{"image" => ..., "distance" => "0.25"})
|
||||||
|
{:ok, [%Image{...}, ....]}
|
||||||
|
|
||||||
|
iex> execute_search_query(%{"image" => ..., "distance" => "asdf"})
|
||||||
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def execute_search_query(attrs \\ %{}) do
|
||||||
|
%SearchQuery{}
|
||||||
|
|> SearchQuery.changeset(attrs)
|
||||||
|
|> Uploader.analyze_upload(attrs)
|
||||||
|
|> Ecto.Changeset.apply_action(:create)
|
||||||
|
|> case do
|
||||||
|
{:ok, search_query} ->
|
||||||
|
intensities = generate_intensities(search_query)
|
||||||
|
aspect = search_query.image_aspect_ratio
|
||||||
|
limit = search_query.limit
|
||||||
|
dist = search_query.distance
|
||||||
|
|
||||||
|
images =
|
||||||
|
{intensities, aspect}
|
||||||
|
|> find_duplicates(dist: dist, aspect_dist: dist, limit: limit)
|
||||||
|
|> preload([:user, :intensity, [:sources, tags: :aliases]])
|
||||||
|
|> Repo.paginate(page_size: 50)
|
||||||
|
|
||||||
|
{:ok, images}
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_intensities(search_query) do
|
||||||
|
analysis = SearchQuery.to_analysis(search_query)
|
||||||
|
file = search_query.uploaded_image
|
||||||
|
|
||||||
|
PhilomenaMedia.Processors.intensities(analysis, file)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns an `%Ecto.Changeset{}` for tracking search query changes.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> change_search_query(search_query)
|
||||||
|
%Ecto.Changeset{source: %SearchQuery{}}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def change_search_query(%SearchQuery{} = search_query) do
|
||||||
|
SearchQuery.changeset(search_query)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
9
lib/philomena/duplicate_reports/power.ex
Normal file
9
lib/philomena/duplicate_reports/power.ex
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
defmodule Philomena.DuplicateReports.Power do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
defmacro power(left, right) do
|
||||||
|
quote do
|
||||||
|
fragment("power(?, ?)", unquote(left), unquote(right))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
69
lib/philomena/duplicate_reports/search_query.ex
Normal file
69
lib/philomena/duplicate_reports/search_query.ex
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
defmodule Philomena.DuplicateReports.SearchQuery do
|
||||||
|
use Ecto.Schema
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field :distance, :float, default: 0.25
|
||||||
|
field :limit, :integer, default: 10
|
||||||
|
|
||||||
|
field :image_width, :integer
|
||||||
|
field :image_height, :integer
|
||||||
|
field :image_format, :string
|
||||||
|
field :image_duration, :float
|
||||||
|
field :image_mime_type, :string
|
||||||
|
field :image_is_animated, :boolean
|
||||||
|
field :image_aspect_ratio, :float
|
||||||
|
field :uploaded_image, :string, virtual: true
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def changeset(search_query, attrs \\ %{}) do
|
||||||
|
search_query
|
||||||
|
|> cast(attrs, [:distance, :limit])
|
||||||
|
|> validate_number(:distance, greater_than_or_equal_to: 0, less_than_or_equal_to: 1)
|
||||||
|
|> validate_number(:limit, greater_than_or_equal_to: 1, less_than_or_equal_to: 50)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def image_changeset(search_query, attrs \\ %{}) do
|
||||||
|
search_query
|
||||||
|
|> cast(attrs, [
|
||||||
|
:image_width,
|
||||||
|
:image_height,
|
||||||
|
:image_format,
|
||||||
|
:image_duration,
|
||||||
|
:image_mime_type,
|
||||||
|
:image_is_animated,
|
||||||
|
:image_aspect_ratio,
|
||||||
|
:uploaded_image
|
||||||
|
])
|
||||||
|
|> validate_required([
|
||||||
|
:image_width,
|
||||||
|
:image_height,
|
||||||
|
:image_format,
|
||||||
|
:image_duration,
|
||||||
|
:image_mime_type,
|
||||||
|
:image_is_animated,
|
||||||
|
:image_aspect_ratio,
|
||||||
|
:uploaded_image
|
||||||
|
])
|
||||||
|
|> validate_number(:image_width, greater_than: 0)
|
||||||
|
|> validate_number(:image_height, greater_than: 0)
|
||||||
|
|> validate_inclusion(
|
||||||
|
:image_mime_type,
|
||||||
|
~W(image/gif image/jpeg image/png image/svg+xml video/webm),
|
||||||
|
message: "(#{attrs["image_mime_type"]}) is invalid"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def to_analysis(search_query) do
|
||||||
|
%PhilomenaMedia.Analyzers.Result{
|
||||||
|
animated?: search_query.image_is_animated,
|
||||||
|
dimensions: {search_query.image_width, search_query.image_height},
|
||||||
|
duration: search_query.image_duration,
|
||||||
|
extension: search_query.image_format,
|
||||||
|
mime_type: search_query.image_mime_type
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
17
lib/philomena/duplicate_reports/uploader.ex
Normal file
17
lib/philomena/duplicate_reports/uploader.ex
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
defmodule Philomena.DuplicateReports.Uploader do
|
||||||
|
@moduledoc """
|
||||||
|
Upload and processing callback logic for SearchQuery images.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Philomena.DuplicateReports.SearchQuery
|
||||||
|
alias PhilomenaMedia.Uploader
|
||||||
|
|
||||||
|
def analyze_upload(search_query, params) do
|
||||||
|
Uploader.analyze_upload(
|
||||||
|
search_query,
|
||||||
|
"image",
|
||||||
|
params["image"],
|
||||||
|
&SearchQuery.image_changeset/2
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,9 +1,10 @@
|
||||||
defmodule Philomena.Filters.Filter do
|
defmodule Philomena.Filters.Filter do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
import PhilomenaQuery.Ecto.QueryValidator
|
||||||
|
|
||||||
alias Philomena.Schema.TagList
|
alias Philomena.Schema.TagList
|
||||||
alias Philomena.Schema.Search
|
alias Philomena.Images.Query
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
|
@ -48,8 +49,8 @@ defmodule Philomena.Filters.Filter do
|
||||||
|> validate_required([:name])
|
|> validate_required([:name])
|
||||||
|> validate_my_downvotes(:spoilered_complex_str)
|
|> validate_my_downvotes(:spoilered_complex_str)
|
||||||
|> validate_my_downvotes(:hidden_complex_str)
|
|> validate_my_downvotes(:hidden_complex_str)
|
||||||
|> Search.validate_search(:spoilered_complex_str, user)
|
|> validate_query(:spoilered_complex_str, &Query.compile(&1, user: user))
|
||||||
|> Search.validate_search(:hidden_complex_str, user)
|
|> validate_query(:hidden_complex_str, &Query.compile(&1, user: user))
|
||||||
|> unsafe_validate_unique([:user_id, :name], Repo)
|
|> unsafe_validate_unique([:user_id, :name], Repo)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -33,8 +33,8 @@ defmodule Philomena.Filters.Query do
|
||||||
|> Parser.parse(query_string, context)
|
|> Parser.parse(query_string, context)
|
||||||
end
|
end
|
||||||
|
|
||||||
def compile(user, query_string) do
|
def compile(query_string, opts \\ []) do
|
||||||
query_string = query_string || ""
|
user = Keyword.get(opts, :user)
|
||||||
|
|
||||||
case user do
|
case user do
|
||||||
nil ->
|
nil ->
|
||||||
|
|
|
@ -7,8 +7,9 @@ defmodule Philomena.Forums do
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
alias Philomena.Forums.Forum
|
alias Philomena.Forums.Forum
|
||||||
alias Philomena.Forums.Subscription
|
|
||||||
alias Philomena.Notifications
|
use Philomena.Subscriptions,
|
||||||
|
id_name: :forum_id
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns the list of forums.
|
Returns the list of forums.
|
||||||
|
@ -103,45 +104,4 @@ defmodule Philomena.Forums do
|
||||||
def change_forum(%Forum{} = forum) do
|
def change_forum(%Forum{} = forum) do
|
||||||
Forum.changeset(forum, %{})
|
Forum.changeset(forum, %{})
|
||||||
end
|
end
|
||||||
|
|
||||||
def subscribed?(_forum, nil), do: false
|
|
||||||
|
|
||||||
def subscribed?(forum, user) do
|
|
||||||
Subscription
|
|
||||||
|> where(forum_id: ^forum.id, user_id: ^user.id)
|
|
||||||
|> Repo.exists?()
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_subscription(_forum, nil), do: {:ok, nil}
|
|
||||||
|
|
||||||
def create_subscription(forum, user) do
|
|
||||||
%Subscription{forum_id: forum.id, user_id: user.id}
|
|
||||||
|> Subscription.changeset(%{})
|
|
||||||
|> Repo.insert(on_conflict: :nothing)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Deletes a Subscription.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:ok, %Subscription{}}
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def delete_subscription(forum, user) do
|
|
||||||
clear_notification(forum, user)
|
|
||||||
|
|
||||||
%Subscription{forum_id: forum.id, user_id: user.id}
|
|
||||||
|> Repo.delete()
|
|
||||||
end
|
|
||||||
|
|
||||||
def clear_notification(_forum, nil), do: nil
|
|
||||||
|
|
||||||
def clear_notification(forum, user) do
|
|
||||||
Notifications.delete_unread_notification("Forum", forum.id, user)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -14,10 +14,12 @@ defmodule Philomena.Galleries do
|
||||||
alias Philomena.IndexWorker
|
alias Philomena.IndexWorker
|
||||||
alias Philomena.GalleryReorderWorker
|
alias Philomena.GalleryReorderWorker
|
||||||
alias Philomena.Notifications
|
alias Philomena.Notifications
|
||||||
alias Philomena.NotificationWorker
|
|
||||||
alias Philomena.Notifications.{Notification, UnreadNotification}
|
|
||||||
alias Philomena.Images
|
alias Philomena.Images
|
||||||
|
|
||||||
|
use Philomena.Subscriptions,
|
||||||
|
on_delete: :clear_gallery_notification,
|
||||||
|
id_name: :gallery_id
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single gallery.
|
Gets a single gallery.
|
||||||
|
|
||||||
|
@ -91,21 +93,8 @@ defmodule Philomena.Galleries do
|
||||||
|> select([i], i.image_id)
|
|> select([i], i.image_id)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|
|
||||||
unread_notifications =
|
|
||||||
UnreadNotification
|
|
||||||
|> join(:inner, [un], _ in assoc(un, :notification))
|
|
||||||
|> where([_, n], n.actor_type == "Gallery")
|
|
||||||
|> where([_, n], n.actor_id == ^gallery.id)
|
|
||||||
|
|
||||||
notifications =
|
|
||||||
Notification
|
|
||||||
|> where(actor_type: "Gallery")
|
|
||||||
|> where(actor_id: ^gallery.id)
|
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.delete(:gallery, gallery)
|
|> Multi.delete(:gallery, gallery)
|
||||||
|> Multi.delete_all(:unread_notifications, unread_notifications)
|
|
||||||
|> Multi.delete_all(:notifications, notifications)
|
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{gallery: gallery}} ->
|
{:ok, %{gallery: gallery}} ->
|
||||||
|
@ -173,7 +162,7 @@ defmodule Philomena.Galleries do
|
||||||
|
|
||||||
def add_image_to_gallery(gallery, image) do
|
def add_image_to_gallery(gallery, image) do
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.run(:lock, fn repo, %{} ->
|
|> Multi.run(:gallery, fn repo, %{} ->
|
||||||
gallery =
|
gallery =
|
||||||
Gallery
|
Gallery
|
||||||
|> where(id: ^gallery.id)
|
|> where(id: ^gallery.id)
|
||||||
|
@ -189,7 +178,7 @@ defmodule Philomena.Galleries do
|
||||||
|> Interaction.changeset(%{"image_id" => image.id, "position" => position})
|
|> Interaction.changeset(%{"image_id" => image.id, "position" => position})
|
||||||
|> repo.insert()
|
|> repo.insert()
|
||||||
end)
|
end)
|
||||||
|> Multi.run(:gallery, fn repo, %{} ->
|
|> Multi.run(:image_count, fn repo, %{} ->
|
||||||
now = DateTime.utc_now()
|
now = DateTime.utc_now()
|
||||||
|
|
||||||
{count, nil} =
|
{count, nil} =
|
||||||
|
@ -199,11 +188,11 @@ defmodule Philomena.Galleries do
|
||||||
|
|
||||||
{:ok, count}
|
{:ok, count}
|
||||||
end)
|
end)
|
||||||
|
|> Multi.run(:notification, ¬ify_gallery/2)
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, result} ->
|
{:ok, result} ->
|
||||||
Images.reindex_image(image)
|
Images.reindex_image(image)
|
||||||
notify_gallery(gallery, image)
|
|
||||||
reindex_gallery(gallery)
|
reindex_gallery(gallery)
|
||||||
|
|
||||||
{:ok, result}
|
{:ok, result}
|
||||||
|
@ -215,7 +204,7 @@ defmodule Philomena.Galleries do
|
||||||
|
|
||||||
def remove_image_from_gallery(gallery, image) do
|
def remove_image_from_gallery(gallery, image) do
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.run(:lock, fn repo, %{} ->
|
|> Multi.run(:gallery, fn repo, %{} ->
|
||||||
gallery =
|
gallery =
|
||||||
Gallery
|
Gallery
|
||||||
|> where(id: ^gallery.id)
|
|> where(id: ^gallery.id)
|
||||||
|
@ -232,7 +221,7 @@ defmodule Philomena.Galleries do
|
||||||
|
|
||||||
{:ok, count}
|
{:ok, count}
|
||||||
end)
|
end)
|
||||||
|> Multi.run(:gallery, fn repo, %{interaction: interaction_count} ->
|
|> Multi.run(:image_count, fn repo, %{interaction: interaction_count} ->
|
||||||
now = DateTime.utc_now()
|
now = DateTime.utc_now()
|
||||||
|
|
||||||
{count, nil} =
|
{count, nil} =
|
||||||
|
@ -255,37 +244,16 @@ defmodule Philomena.Galleries do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp notify_gallery(_repo, %{gallery: gallery}) do
|
||||||
|
Notifications.create_gallery_image_notification(gallery)
|
||||||
|
end
|
||||||
|
|
||||||
defp last_position(gallery_id) do
|
defp last_position(gallery_id) do
|
||||||
Interaction
|
Interaction
|
||||||
|> where(gallery_id: ^gallery_id)
|
|> where(gallery_id: ^gallery_id)
|
||||||
|> Repo.aggregate(:max, :position)
|
|> Repo.aggregate(:max, :position)
|
||||||
end
|
end
|
||||||
|
|
||||||
def notify_gallery(gallery, image) do
|
|
||||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", [gallery.id, image.id]])
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform_notify([gallery_id, image_id]) do
|
|
||||||
gallery = get_gallery!(gallery_id)
|
|
||||||
|
|
||||||
subscriptions =
|
|
||||||
gallery
|
|
||||||
|> Repo.preload(:subscriptions)
|
|
||||||
|> Map.fetch!(:subscriptions)
|
|
||||||
|
|
||||||
Notifications.notify(
|
|
||||||
gallery,
|
|
||||||
subscriptions,
|
|
||||||
%{
|
|
||||||
actor_id: gallery.id,
|
|
||||||
actor_type: "Gallery",
|
|
||||||
actor_child_id: image_id,
|
|
||||||
actor_child_type: "Image",
|
|
||||||
action: "added images to"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def reorder_gallery(gallery, image_ids) do
|
def reorder_gallery(gallery, image_ids) do
|
||||||
Exq.enqueue(Exq, "indexing", GalleryReorderWorker, [gallery.id, image_ids])
|
Exq.enqueue(Exq, "indexing", GalleryReorderWorker, [gallery.id, image_ids])
|
||||||
end
|
end
|
||||||
|
@ -357,54 +325,17 @@ defmodule Philomena.Galleries do
|
||||||
defp position_order(%{order_position_asc: true}), do: [asc: :position]
|
defp position_order(%{order_position_asc: true}), do: [asc: :position]
|
||||||
defp position_order(_gallery), do: [desc: :position]
|
defp position_order(_gallery), do: [desc: :position]
|
||||||
|
|
||||||
alias Philomena.Galleries.Subscription
|
|
||||||
|
|
||||||
def subscribed?(_gallery, nil), do: false
|
|
||||||
|
|
||||||
def subscribed?(gallery, user) do
|
|
||||||
Subscription
|
|
||||||
|> where(gallery_id: ^gallery.id, user_id: ^user.id)
|
|
||||||
|> Repo.exists?()
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates a subscription.
|
Removes all gallery notifications for a given gallery and user.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> create_subscription(%{field: value})
|
iex> clear_gallery_notification(gallery, user)
|
||||||
{:ok, %Subscription{}}
|
:ok
|
||||||
|
|
||||||
iex> create_subscription(%{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_subscription(gallery, user) do
|
def clear_gallery_notification(%Gallery{} = gallery, user) do
|
||||||
%Subscription{gallery_id: gallery.id, user_id: user.id}
|
Notifications.clear_gallery_image_notification(gallery, user)
|
||||||
|> Subscription.changeset(%{})
|
:ok
|
||||||
|> Repo.insert(on_conflict: :nothing)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Deletes a Subscription.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:ok, %Subscription{}}
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def delete_subscription(gallery, user) do
|
|
||||||
%Subscription{gallery_id: gallery.id, user_id: user.id}
|
|
||||||
|> Repo.delete()
|
|
||||||
end
|
|
||||||
|
|
||||||
def clear_notification(_gallery, nil), do: nil
|
|
||||||
|
|
||||||
def clear_notification(gallery, user) do
|
|
||||||
Notifications.delete_unread_notification("Gallery", gallery.id, user)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,8 +15,6 @@ defmodule Philomena.Galleries.Query do
|
||||||
end
|
end
|
||||||
|
|
||||||
def compile(query_string) do
|
def compile(query_string) do
|
||||||
query_string = query_string || ""
|
|
||||||
|
|
||||||
fields()
|
fields()
|
||||||
|> Parser.new()
|
|> Parser.new()
|
||||||
|> Parser.parse(query_string)
|
|> Parser.parse(query_string)
|
||||||
|
|
|
@ -22,8 +22,9 @@ defmodule Philomena.Images do
|
||||||
alias Philomena.IndexWorker
|
alias Philomena.IndexWorker
|
||||||
alias Philomena.ImageFeatures.ImageFeature
|
alias Philomena.ImageFeatures.ImageFeature
|
||||||
alias Philomena.SourceChanges.SourceChange
|
alias Philomena.SourceChanges.SourceChange
|
||||||
alias Philomena.Notifications.Notification
|
alias Philomena.Notifications.ImageCommentNotification
|
||||||
alias Philomena.NotificationWorker
|
alias Philomena.Notifications.ImageMergeNotification
|
||||||
|
alias Philomena.TagChanges.Limits
|
||||||
alias Philomena.TagChanges.TagChange
|
alias Philomena.TagChanges.TagChange
|
||||||
alias Philomena.Tags
|
alias Philomena.Tags
|
||||||
alias Philomena.UserStatistics
|
alias Philomena.UserStatistics
|
||||||
|
@ -31,12 +32,15 @@ defmodule Philomena.Images do
|
||||||
alias Philomena.Notifications
|
alias Philomena.Notifications
|
||||||
alias Philomena.Interactions
|
alias Philomena.Interactions
|
||||||
alias Philomena.Reports
|
alias Philomena.Reports
|
||||||
alias Philomena.Reports.Report
|
|
||||||
alias Philomena.Comments
|
alias Philomena.Comments
|
||||||
alias Philomena.Galleries.Gallery
|
alias Philomena.Galleries.Gallery
|
||||||
alias Philomena.Galleries.Interaction
|
alias Philomena.Galleries.Interaction
|
||||||
alias Philomena.Users.User
|
alias Philomena.Users.User
|
||||||
|
|
||||||
|
use Philomena.Subscriptions,
|
||||||
|
on_delete: :clear_image_notification,
|
||||||
|
id_name: :image_id
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single image.
|
Gets a single image.
|
||||||
|
|
||||||
|
@ -90,11 +94,6 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.insert(:image, image)
|
|> Multi.insert(:image, image)
|
||||||
|> Multi.run(:name_caches, fn repo, %{image: image} ->
|
|
||||||
image
|
|
||||||
|> Image.cache_changeset()
|
|
||||||
|> repo.update()
|
|
||||||
end)
|
|
||||||
|> Multi.run(:added_tag_count, fn repo, %{image: image} ->
|
|> Multi.run(:added_tag_count, fn repo, %{image: image} ->
|
||||||
tag_ids = image.added_tags |> Enum.map(& &1.id)
|
tag_ids = image.added_tags |> Enum.map(& &1.id)
|
||||||
tags = Tag |> where([t], t.id in ^tag_ids)
|
tags = Tag |> where([t], t.id in ^tag_ids)
|
||||||
|
@ -103,7 +102,7 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
{:ok, count}
|
{:ok, count}
|
||||||
end)
|
end)
|
||||||
|> maybe_create_subscription_on_upload(attribution[:user])
|
|> maybe_subscribe_on(:image, attribution[:user], :watch_on_upload)
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{image: image}} = result ->
|
{:ok, %{image: image}} = result ->
|
||||||
|
@ -157,17 +156,6 @@ defmodule Philomena.Images do
|
||||||
Logger.error("Aborting upload of #{image.id} after #{retry_count} retries")
|
Logger.error("Aborting upload of #{image.id} after #{retry_count} retries")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_create_subscription_on_upload(multi, %User{watch_on_upload: true} = user) do
|
|
||||||
multi
|
|
||||||
|> Multi.run(:subscribe, fn _repo, %{image: image} ->
|
|
||||||
create_subscription(image, user)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_create_subscription_on_upload(multi, _user) do
|
|
||||||
multi
|
|
||||||
end
|
|
||||||
|
|
||||||
def approve_image(image) do
|
def approve_image(image) do
|
||||||
image
|
image
|
||||||
|> Repo.preload(:user)
|
|> Repo.preload(:user)
|
||||||
|
@ -201,8 +189,7 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
defp maybe_suggest_user_verification(%User{id: id, uploads_count: 5, verified: false}) do
|
defp maybe_suggest_user_verification(%User{id: id, uploads_count: 5, verified: false}) do
|
||||||
Reports.create_system_report(
|
Reports.create_system_report(
|
||||||
id,
|
{"User", id},
|
||||||
"User",
|
|
||||||
"Verification",
|
"Verification",
|
||||||
"User has uploaded enough approved images to be considered for verification."
|
"User has uploaded enough approved images to be considered for verification."
|
||||||
)
|
)
|
||||||
|
@ -376,7 +363,7 @@ defmodule Philomena.Images do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp source_change_attributes(attribution, image, source, added, user) do
|
defp source_change_attributes(attribution, image, source, added, user) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
|
|
||||||
user_id =
|
user_id =
|
||||||
case user do
|
case user do
|
||||||
|
@ -392,8 +379,6 @@ defmodule Philomena.Images do
|
||||||
updated_at: now,
|
updated_at: now,
|
||||||
ip: attribution[:ip],
|
ip: attribution[:ip],
|
||||||
fingerprint: attribution[:fingerprint],
|
fingerprint: attribution[:fingerprint],
|
||||||
user_agent: attribution[:user_agent],
|
|
||||||
referrer: attribution[:referrer],
|
|
||||||
added: added
|
added: added
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -426,6 +411,9 @@ defmodule Philomena.Images do
|
||||||
error
|
error
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|
|> Multi.run(:check_limits, fn _repo, %{image: {image, _added, _removed}} ->
|
||||||
|
check_tag_change_limits_before_commit(image, attribution)
|
||||||
|
end)
|
||||||
|> Multi.run(:added_tag_changes, fn repo, %{image: {image, added_tags, _removed}} ->
|
|> Multi.run(:added_tag_changes, fn repo, %{image: {image, added_tags, _removed}} ->
|
||||||
tag_changes =
|
tag_changes =
|
||||||
added_tags
|
added_tags
|
||||||
|
@ -469,10 +457,47 @@ defmodule Philomena.Images do
|
||||||
{:ok, count}
|
{:ok, count}
|
||||||
end)
|
end)
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|
|> case do
|
||||||
|
{:ok, %{image: {image, _added, _removed}}} = res ->
|
||||||
|
update_tag_change_limits_after_commit(image, attribution)
|
||||||
|
|
||||||
|
res
|
||||||
|
|
||||||
|
err ->
|
||||||
|
err
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_tag_change_limits_before_commit(image, attribution) do
|
||||||
|
tag_changed_count = length(image.added_tags) + length(image.removed_tags)
|
||||||
|
rating_changed = image.ratings_changed
|
||||||
|
user = attribution[:user]
|
||||||
|
ip = attribution[:ip]
|
||||||
|
|
||||||
|
cond do
|
||||||
|
Limits.limited_for_tag_count?(user, ip, tag_changed_count) ->
|
||||||
|
{:error, :limit_exceeded}
|
||||||
|
|
||||||
|
rating_changed and Limits.limited_for_rating_count?(user, ip) ->
|
||||||
|
{:error, :limit_exceeded}
|
||||||
|
|
||||||
|
true ->
|
||||||
|
{:ok, 0}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_tag_change_limits_after_commit(image, attribution) do
|
||||||
|
rating_changed_count = if(image.ratings_changed, do: 1, else: 0)
|
||||||
|
tag_changed_count = length(image.added_tags) + length(image.removed_tags)
|
||||||
|
user = attribution[:user]
|
||||||
|
ip = attribution[:ip]
|
||||||
|
|
||||||
|
Limits.update_tag_count_after_update(user, ip, tag_changed_count)
|
||||||
|
Limits.update_rating_count_after_update(user, ip, rating_changed_count)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp tag_change_attributes(attribution, image, tag, added, user) do
|
defp tag_change_attributes(attribution, image, tag, added, user) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
|
|
||||||
user_id =
|
user_id =
|
||||||
case user do
|
case user do
|
||||||
|
@ -489,8 +514,6 @@ defmodule Philomena.Images do
|
||||||
tag_name_cache: tag.name,
|
tag_name_cache: tag.name,
|
||||||
ip: attribution[:ip],
|
ip: attribution[:ip],
|
||||||
fingerprint: attribution[:fingerprint],
|
fingerprint: attribution[:fingerprint],
|
||||||
user_agent: attribution[:user_agent],
|
|
||||||
referrer: attribution[:referrer],
|
|
||||||
added: added
|
added: added
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -569,13 +592,13 @@ defmodule Philomena.Images do
|
||||||
|> Multi.run(:migrate_interactions, fn _, %{} ->
|
|> Multi.run(:migrate_interactions, fn _, %{} ->
|
||||||
{:ok, Interactions.migrate_interactions(image, duplicate_of_image)}
|
{:ok, Interactions.migrate_interactions(image, duplicate_of_image)}
|
||||||
end)
|
end)
|
||||||
|
|> Multi.run(:notification, ¬ify_merge(&1, &2, image, duplicate_of_image))
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> process_after_hide()
|
|> process_after_hide()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, result} ->
|
{:ok, result} ->
|
||||||
reindex_image(duplicate_of_image)
|
reindex_image(duplicate_of_image)
|
||||||
Comments.reindex_comments(duplicate_of_image)
|
Comments.reindex_comments(duplicate_of_image)
|
||||||
notify_merge(image, duplicate_of_image)
|
|
||||||
|
|
||||||
{:ok, result}
|
{:ok, result}
|
||||||
|
|
||||||
|
@ -585,11 +608,7 @@ defmodule Philomena.Images do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp hide_image_multi(changeset, image, user, multi) do
|
defp hide_image_multi(changeset, image, user, multi) do
|
||||||
reports =
|
report_query = Reports.close_report_query({"Image", image.id}, user)
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Image", reportable_id: ^image.id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
galleries =
|
galleries =
|
||||||
Gallery
|
Gallery
|
||||||
|
@ -600,7 +619,7 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
multi
|
multi
|
||||||
|> Multi.update(:image, changeset)
|
|> Multi.update(:image, changeset)
|
||||||
|> Multi.update_all(:reports, reports, [])
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|> Multi.update_all(:galleries, galleries, [])
|
|> Multi.update_all(:galleries, galleries, [])
|
||||||
|> Multi.delete_all(:gallery_interactions, gallery_interactions, [])
|
|> Multi.delete_all(:gallery_interactions, gallery_interactions, [])
|
||||||
|> Multi.run(:tags, fn repo, %{image: image} ->
|
|> Multi.run(:tags, fn repo, %{image: image} ->
|
||||||
|
@ -715,7 +734,7 @@ defmodule Philomena.Images do
|
||||||
|> where([t], t.image_id in ^image_ids and t.tag_id in ^removed_tags)
|
|> where([t], t.image_id in ^image_ids and t.tag_id in ^removed_tags)
|
||||||
|> select([t], [t.image_id, t.tag_id])
|
|> select([t], [t.image_id, t.tag_id])
|
||||||
|
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
tag_change_attributes = Map.merge(tag_change_attributes, %{created_at: now, updated_at: now})
|
tag_change_attributes = Map.merge(tag_change_attributes, %{created_at: now, updated_at: now})
|
||||||
tag_attributes = %{name: "", slug: "", created_at: now, updated_at: now}
|
tag_attributes = %{name: "", slug: "", created_at: now, updated_at: now}
|
||||||
|
|
||||||
|
@ -868,53 +887,6 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
alias Philomena.Images.Subscription
|
alias Philomena.Images.Subscription
|
||||||
|
|
||||||
def subscribed?(_image, nil), do: false
|
|
||||||
|
|
||||||
def subscribed?(image, user) do
|
|
||||||
Subscription
|
|
||||||
|> where(image_id: ^image.id, user_id: ^user.id)
|
|
||||||
|> Repo.exists?()
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Creates a subscription.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> create_subscription(%{field: value})
|
|
||||||
{:ok, %Subscription{}}
|
|
||||||
|
|
||||||
iex> create_subscription(%{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def create_subscription(_image, nil), do: {:ok, nil}
|
|
||||||
|
|
||||||
def create_subscription(image, user) do
|
|
||||||
%Subscription{image_id: image.id, user_id: user.id}
|
|
||||||
|> Subscription.changeset(%{})
|
|
||||||
|> Repo.insert(on_conflict: :nothing)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Deletes a subscription.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:ok, %Subscription{}}
|
|
||||||
|
|
||||||
iex> delete_subscription(subscription)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def delete_subscription(image, user) do
|
|
||||||
clear_notification(image, user)
|
|
||||||
|
|
||||||
%Subscription{image_id: image.id, user_id: user.id}
|
|
||||||
|> Repo.delete()
|
|
||||||
end
|
|
||||||
|
|
||||||
def migrate_subscriptions(source, target) do
|
def migrate_subscriptions(source, target) do
|
||||||
subscriptions =
|
subscriptions =
|
||||||
Subscription
|
Subscription
|
||||||
|
@ -924,12 +896,40 @@ defmodule Philomena.Images do
|
||||||
|
|
||||||
Repo.insert_all(Subscription, subscriptions, on_conflict: :nothing)
|
Repo.insert_all(Subscription, subscriptions, on_conflict: :nothing)
|
||||||
|
|
||||||
{count, nil} =
|
comment_notifications =
|
||||||
Notification
|
from cn in ImageCommentNotification,
|
||||||
|> where(actor_type: "Image", actor_id: ^source.id)
|
where: cn.image_id == ^source.id,
|
||||||
|> Repo.delete_all()
|
select: %{
|
||||||
|
user_id: cn.user_id,
|
||||||
|
image_id: ^target.id,
|
||||||
|
comment_id: cn.comment_id,
|
||||||
|
read: cn.read,
|
||||||
|
created_at: cn.created_at,
|
||||||
|
updated_at: cn.updated_at
|
||||||
|
}
|
||||||
|
|
||||||
{:ok, count}
|
merge_notifications =
|
||||||
|
from mn in ImageMergeNotification,
|
||||||
|
where: mn.target_id == ^source.id,
|
||||||
|
select: %{
|
||||||
|
user_id: mn.user_id,
|
||||||
|
target_id: ^target.id,
|
||||||
|
source_id: mn.source_id,
|
||||||
|
read: mn.read,
|
||||||
|
created_at: mn.created_at,
|
||||||
|
updated_at: mn.updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
{comment_notification_count, nil} =
|
||||||
|
Repo.insert_all(ImageCommentNotification, comment_notifications, on_conflict: :nothing)
|
||||||
|
|
||||||
|
{merge_notification_count, nil} =
|
||||||
|
Repo.insert_all(ImageMergeNotification, merge_notifications, on_conflict: :nothing)
|
||||||
|
|
||||||
|
Repo.delete_all(exclude(comment_notifications, :select))
|
||||||
|
Repo.delete_all(exclude(merge_notifications, :select))
|
||||||
|
|
||||||
|
{:ok, {comment_notification_count, merge_notification_count}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_sources(source, target) do
|
def migrate_sources(source, target) do
|
||||||
|
@ -944,34 +944,22 @@ defmodule Philomena.Images do
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
def notify_merge(source, target) do
|
defp notify_merge(_repo, _changes, source, target) do
|
||||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Images", [source.id, target.id]])
|
Notifications.create_image_merge_notification(target, source)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform_notify([source_id, target_id]) do
|
@doc """
|
||||||
target = get_image!(target_id)
|
Removes all image notifications for a given image and user.
|
||||||
|
|
||||||
subscriptions =
|
## Examples
|
||||||
target
|
|
||||||
|> Repo.preload(:subscriptions)
|
|
||||||
|> Map.fetch!(:subscriptions)
|
|
||||||
|
|
||||||
Notifications.notify(
|
iex> clear_image_notification(image, user)
|
||||||
nil,
|
:ok
|
||||||
subscriptions,
|
|
||||||
%{
|
|
||||||
actor_id: target.id,
|
|
||||||
actor_type: "Image",
|
|
||||||
actor_child_id: nil,
|
|
||||||
actor_child_type: nil,
|
|
||||||
action: "merged ##{source_id} into"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def clear_notification(_image, nil), do: nil
|
"""
|
||||||
|
def clear_image_notification(%Image{} = image, user) do
|
||||||
def clear_notification(image, user) do
|
Notifications.clear_image_comment_notification(image, user)
|
||||||
Notifications.delete_unread_notification("Image", image.id, user)
|
Notifications.clear_image_merge_notification(image, user)
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,6 @@ defmodule Philomena.Images.Image do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
import Ecto.Query
|
|
||||||
|
|
||||||
alias Philomena.ImageIntensities.ImageIntensity
|
alias Philomena.ImageIntensities.ImageIntensity
|
||||||
alias Philomena.ImageVotes.ImageVote
|
alias Philomena.ImageVotes.ImageVote
|
||||||
|
@ -51,6 +50,7 @@ defmodule Philomena.Images.Image do
|
||||||
field :image_width, :integer
|
field :image_width, :integer
|
||||||
field :image_height, :integer
|
field :image_height, :integer
|
||||||
field :image_size, :integer
|
field :image_size, :integer
|
||||||
|
field :image_orig_size, :integer
|
||||||
field :image_format, :string
|
field :image_format, :string
|
||||||
field :image_mime_type, :string
|
field :image_mime_type, :string
|
||||||
field :image_aspect_ratio, :float
|
field :image_aspect_ratio, :float
|
||||||
|
@ -58,14 +58,11 @@ defmodule Philomena.Images.Image do
|
||||||
field :image_is_animated, :boolean, source: :is_animated
|
field :image_is_animated, :boolean, source: :is_animated
|
||||||
field :ip, EctoNetwork.INET
|
field :ip, EctoNetwork.INET
|
||||||
field :fingerprint, :string
|
field :fingerprint, :string
|
||||||
field :user_agent, :string, default: ""
|
|
||||||
field :referrer, :string, default: ""
|
|
||||||
field :anonymous, :boolean, default: false
|
field :anonymous, :boolean, default: false
|
||||||
field :score, :integer, default: 0
|
field :score, :integer, default: 0
|
||||||
field :faves_count, :integer, default: 0
|
field :faves_count, :integer, default: 0
|
||||||
field :upvotes_count, :integer, default: 0
|
field :upvotes_count, :integer, default: 0
|
||||||
field :downvotes_count, :integer, default: 0
|
field :downvotes_count, :integer, default: 0
|
||||||
field :votes_count, :integer, default: 0
|
|
||||||
field :source_url, :string
|
field :source_url, :string
|
||||||
field :description, :string, default: ""
|
field :description, :string, default: ""
|
||||||
field :image_sha512_hash, :string
|
field :image_sha512_hash, :string
|
||||||
|
@ -87,15 +84,11 @@ defmodule Philomena.Images.Image do
|
||||||
field :hides_count, :integer, default: 0
|
field :hides_count, :integer, default: 0
|
||||||
field :approved, :boolean
|
field :approved, :boolean
|
||||||
|
|
||||||
# todo: can probably remove these now
|
|
||||||
field :tag_list_cache, :string
|
|
||||||
field :tag_list_plus_alias_cache, :string
|
|
||||||
field :file_name_cache, :string
|
|
||||||
|
|
||||||
field :removed_tags, {:array, :any}, default: [], virtual: true
|
field :removed_tags, {:array, :any}, default: [], virtual: true
|
||||||
field :added_tags, {:array, :any}, default: [], virtual: true
|
field :added_tags, {:array, :any}, default: [], virtual: true
|
||||||
field :removed_sources, {:array, :any}, default: [], virtual: true
|
field :removed_sources, {:array, :any}, default: [], virtual: true
|
||||||
field :added_sources, {:array, :any}, default: [], virtual: true
|
field :added_sources, {:array, :any}, default: [], virtual: true
|
||||||
|
field :ratings_changed, :boolean, default: false, virtual: true
|
||||||
|
|
||||||
field :uploaded_image, :string, virtual: true
|
field :uploaded_image, :string, virtual: true
|
||||||
field :removed_image, :string, virtual: true
|
field :removed_image, :string, virtual: true
|
||||||
|
@ -120,11 +113,9 @@ defmodule Philomena.Images.Image do
|
||||||
end
|
end
|
||||||
|
|
||||||
def creation_changeset(image, attrs, attribution) do
|
def creation_changeset(image, attrs, attribution) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
|
||||||
|
|
||||||
image
|
image
|
||||||
|> cast(attrs, [:anonymous, :source_url, :description])
|
|> cast(attrs, [:anonymous, :source_url, :description])
|
||||||
|> change(first_seen_at: now)
|
|> change(first_seen_at: DateTime.utc_now(:second))
|
||||||
|> change(attribution)
|
|> change(attribution)
|
||||||
|> validate_length(:description, max: 50_000, count: :bytes)
|
|> validate_length(:description, max: 50_000, count: :bytes)
|
||||||
|> validate_format(:source_url, ~r/\Ahttps?:\/\//)
|
|> validate_format(:source_url, ~r/\Ahttps?:\/\//)
|
||||||
|
@ -138,6 +129,7 @@ defmodule Philomena.Images.Image do
|
||||||
:image_width,
|
:image_width,
|
||||||
:image_height,
|
:image_height,
|
||||||
:image_size,
|
:image_size,
|
||||||
|
:image_orig_size,
|
||||||
:image_format,
|
:image_format,
|
||||||
:image_mime_type,
|
:image_mime_type,
|
||||||
:image_aspect_ratio,
|
:image_aspect_ratio,
|
||||||
|
@ -153,6 +145,7 @@ defmodule Philomena.Images.Image do
|
||||||
:image_width,
|
:image_width,
|
||||||
:image_height,
|
:image_height,
|
||||||
:image_size,
|
:image_size,
|
||||||
|
:image_orig_size,
|
||||||
:image_format,
|
:image_format,
|
||||||
:image_mime_type,
|
:image_mime_type,
|
||||||
:image_aspect_ratio,
|
:image_aspect_ratio,
|
||||||
|
@ -226,7 +219,6 @@ defmodule Philomena.Images.Image do
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [])
|
||||||
|> TagDiffer.diff_input(old_tags, new_tags, excluded_tags)
|
|> TagDiffer.diff_input(old_tags, new_tags, excluded_tags)
|
||||||
|> TagValidator.validate_tags()
|
|> TagValidator.validate_tags()
|
||||||
|> cache_changeset()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def locked_tags_changeset(image, attrs, locked_tags) do
|
def locked_tags_changeset(image, attrs, locked_tags) do
|
||||||
|
@ -340,54 +332,7 @@ defmodule Philomena.Images.Image do
|
||||||
def approve_changeset(image) do
|
def approve_changeset(image) do
|
||||||
change(image)
|
change(image)
|
||||||
|> put_change(:approved, true)
|
|> put_change(:approved, true)
|
||||||
|> put_change(:first_seen_at, DateTime.truncate(DateTime.utc_now(), :second))
|
|> put_change(:first_seen_at, DateTime.utc_now(:second))
|
||||||
end
|
|
||||||
|
|
||||||
def cache_changeset(image) do
|
|
||||||
changeset = change(image)
|
|
||||||
image = apply_changes(changeset)
|
|
||||||
|
|
||||||
{tag_list_cache, tag_list_plus_alias_cache, file_name_cache} =
|
|
||||||
create_caches(image.id, image.tags)
|
|
||||||
|
|
||||||
changeset
|
|
||||||
|> put_change(:tag_list_cache, tag_list_cache)
|
|
||||||
|> put_change(:tag_list_plus_alias_cache, tag_list_plus_alias_cache)
|
|
||||||
|> put_change(:file_name_cache, file_name_cache)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp create_caches(image_id, tags) do
|
|
||||||
tags = Tag.display_order(tags)
|
|
||||||
|
|
||||||
tag_list_cache =
|
|
||||||
tags
|
|
||||||
|> Enum.map_join(", ", & &1.name)
|
|
||||||
|
|
||||||
tag_ids = tags |> Enum.map(& &1.id)
|
|
||||||
|
|
||||||
aliases =
|
|
||||||
Tag
|
|
||||||
|> where([t], t.aliased_tag_id in ^tag_ids)
|
|
||||||
|> Repo.all()
|
|
||||||
|
|
||||||
tag_list_plus_alias_cache =
|
|
||||||
(tags ++ aliases)
|
|
||||||
|> Tag.display_order()
|
|
||||||
|> Enum.map_join(", ", & &1.name)
|
|
||||||
|
|
||||||
# Truncate filename to 150 characters, making room for the path + filename on Windows
|
|
||||||
# https://stackoverflow.com/questions/265769/maximum-filename-length-in-ntfs-windows-xp-and-windows-vista
|
|
||||||
file_name_slug_fragment =
|
|
||||||
tags
|
|
||||||
|> Enum.map_join("_", & &1.slug)
|
|
||||||
|> String.to_charlist()
|
|
||||||
|> Enum.filter(&(&1 in ?a..?z or &1 in ~c"0123456789_-"))
|
|
||||||
|> List.to_string()
|
|
||||||
|> String.slice(0..150)
|
|
||||||
|
|
||||||
file_name_cache = "#{image_id}__#{file_name_slug_fragment}"
|
|
||||||
|
|
||||||
{tag_list_cache, tag_list_plus_alias_cache, file_name_cache}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_key do
|
defp create_key do
|
||||||
|
|
|
@ -84,7 +84,7 @@ defmodule Philomena.Images.Query do
|
||||||
defp anonymous_fields do
|
defp anonymous_fields do
|
||||||
[
|
[
|
||||||
int_fields:
|
int_fields:
|
||||||
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size comment_count source_count tag_count) ++
|
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size orig_size comment_count source_count tag_count) ++
|
||||||
tag_count_fields(),
|
tag_count_fields(),
|
||||||
float_fields: ~W(aspect_ratio wilson_score duration),
|
float_fields: ~W(aspect_ratio wilson_score duration),
|
||||||
date_fields: ~W(created_at updated_at first_seen_at),
|
date_fields: ~W(created_at updated_at first_seen_at),
|
||||||
|
@ -144,8 +144,9 @@ defmodule Philomena.Images.Query do
|
||||||
|> Parser.parse(query_string, context)
|
|> Parser.parse(query_string, context)
|
||||||
end
|
end
|
||||||
|
|
||||||
def compile(user, query_string, watch \\ false) do
|
def compile(query_string, opts \\ []) do
|
||||||
query_string = query_string || ""
|
user = Keyword.get(opts, :user)
|
||||||
|
watch = Keyword.get(opts, :watch, false)
|
||||||
|
|
||||||
case user do
|
case user do
|
||||||
nil ->
|
nil ->
|
||||||
|
|
|
@ -54,6 +54,7 @@ defmodule Philomena.Images.SearchIndex do
|
||||||
processed: %{type: "boolean"},
|
processed: %{type: "boolean"},
|
||||||
score: %{type: "integer"},
|
score: %{type: "integer"},
|
||||||
size: %{type: "integer"},
|
size: %{type: "integer"},
|
||||||
|
orig_size: %{type: "integer"},
|
||||||
sha512_hash: %{type: "keyword"},
|
sha512_hash: %{type: "keyword"},
|
||||||
source_url: %{type: "keyword"},
|
source_url: %{type: "keyword"},
|
||||||
source_count: %{type: "integer"},
|
source_count: %{type: "integer"},
|
||||||
|
@ -117,6 +118,7 @@ defmodule Philomena.Images.SearchIndex do
|
||||||
height: image.image_height,
|
height: image.image_height,
|
||||||
pixels: image.image_width * image.image_height,
|
pixels: image.image_width * image.image_height,
|
||||||
size: image.image_size,
|
size: image.image_size,
|
||||||
|
orig_size: image.image_orig_size,
|
||||||
animated: image.image_is_animated,
|
animated: image.image_is_animated,
|
||||||
duration: if(image.image_is_animated, do: image.image_duration, else: 0),
|
duration: if(image.image_is_animated, do: image.image_duration, else: 0),
|
||||||
tag_count: length(image.tags),
|
tag_count: length(image.tags),
|
||||||
|
|
|
@ -13,7 +13,9 @@ defmodule Philomena.Images.Source do
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(source, attrs) do
|
def changeset(source, attrs) do
|
||||||
source
|
source
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:source])
|
||||||
|> validate_required([])
|
|> validate_required([:source])
|
||||||
|
|> validate_format(:source, ~r/\Ahttps?:\/\//)
|
||||||
|
|> validate_length(:source, max: 255)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
defmodule Philomena.Images.SourceDiffer do
|
defmodule Philomena.Images.SourceDiffer do
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
alias Philomena.Images.Source
|
|
||||||
|
|
||||||
def diff_input(changeset, old_sources, new_sources) do
|
def diff_input(changeset, old_sources, new_sources) do
|
||||||
old_set = MapSet.new(flatten_input(old_sources))
|
old_set = MapSet.new(flatten_input(old_sources))
|
||||||
|
@ -13,12 +12,11 @@ defmodule Philomena.Images.SourceDiffer do
|
||||||
{sources, actually_added, actually_removed} =
|
{sources, actually_added, actually_removed} =
|
||||||
apply_changes(source_set, added_sources, removed_sources)
|
apply_changes(source_set, added_sources, removed_sources)
|
||||||
|
|
||||||
image_id = fetch_field!(changeset, :id)
|
|
||||||
|
|
||||||
changeset
|
changeset
|
||||||
|
|> cast(source_params(sources), [])
|
||||||
|> put_change(:added_sources, actually_added)
|
|> put_change(:added_sources, actually_added)
|
||||||
|> put_change(:removed_sources, actually_removed)
|
|> put_change(:removed_sources, actually_removed)
|
||||||
|> put_assoc(:sources, source_structs(image_id, sources))
|
|> cast_assoc(:sources)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp apply_changes(source_set, added_set, removed_set) do
|
defp apply_changes(source_set, added_set, removed_set) do
|
||||||
|
@ -44,8 +42,8 @@ defmodule Philomena.Images.SourceDiffer do
|
||||||
{sources, actually_added, actually_removed}
|
{sources, actually_added, actually_removed}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp source_structs(image_id, sources) do
|
defp source_params(sources) do
|
||||||
Enum.map(sources, &%Source{image_id: image_id, source: &1})
|
%{sources: Enum.map(sources, &%{source: &1})}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp flatten_input(input) when is_map(input) do
|
defp flatten_input(input) when is_map(input) do
|
||||||
|
|
|
@ -5,7 +5,20 @@ defmodule Philomena.Images.TagValidator do
|
||||||
def validate_tags(changeset) do
|
def validate_tags(changeset) do
|
||||||
tags = changeset |> get_field(:tags)
|
tags = changeset |> get_field(:tags)
|
||||||
|
|
||||||
validate_tag_input(changeset, tags)
|
changeset
|
||||||
|
|> validate_tag_input(tags)
|
||||||
|
|> set_rating_changed()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp set_rating_changed(changeset) do
|
||||||
|
added_tags = changeset |> get_field(:added_tags) |> extract_names()
|
||||||
|
removed_tags = changeset |> get_field(:removed_tags) |> extract_names()
|
||||||
|
ratings = all_ratings()
|
||||||
|
|
||||||
|
added_ratings = MapSet.intersection(ratings, added_tags) |> MapSet.size()
|
||||||
|
removed_ratings = MapSet.intersection(ratings, removed_tags) |> MapSet.size()
|
||||||
|
|
||||||
|
put_change(changeset, :ratings_changed, added_ratings + removed_ratings > 0)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp validate_tag_input(changeset, tags) do
|
defp validate_tag_input(changeset, tags) do
|
||||||
|
@ -108,6 +121,13 @@ defmodule Philomena.Images.TagValidator do
|
||||||
|> MapSet.new()
|
|> MapSet.new()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp all_ratings do
|
||||||
|
safe_rating()
|
||||||
|
|> MapSet.union(sexual_ratings())
|
||||||
|
|> MapSet.union(horror_ratings())
|
||||||
|
|> MapSet.union(gross_rating())
|
||||||
|
end
|
||||||
|
|
||||||
defp safe_rating, do: MapSet.new(["safe"])
|
defp safe_rating, do: MapSet.new(["safe"])
|
||||||
defp sexual_ratings, do: MapSet.new(["suggestive", "questionable", "explicit"])
|
defp sexual_ratings, do: MapSet.new(["suggestive", "questionable", "explicit"])
|
||||||
defp horror_ratings, do: MapSet.new(["semi-grimdark", "grimdark"])
|
defp horror_ratings, do: MapSet.new(["semi-grimdark", "grimdark"])
|
||||||
|
|
|
@ -76,7 +76,7 @@ defmodule Philomena.Images.Thumbnailer do
|
||||||
def generate_thumbnails(image_id) do
|
def generate_thumbnails(image_id) do
|
||||||
image = Repo.get!(Image, image_id)
|
image = Repo.get!(Image, image_id)
|
||||||
file = download_image_file(image)
|
file = download_image_file(image)
|
||||||
{:ok, analysis} = Analyzers.analyze(file)
|
{:ok, analysis} = Analyzers.analyze_path(file)
|
||||||
|
|
||||||
file =
|
file =
|
||||||
apply_edit_script(image, file, Processors.process(analysis, file, generated_sizes(image)))
|
apply_edit_script(image, file, Processors.process(analysis, file, generated_sizes(image)))
|
||||||
|
@ -127,7 +127,7 @@ defmodule Philomena.Images.Thumbnailer do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp recompute_meta(image, file, changeset_fn) do
|
defp recompute_meta(image, file, changeset_fn) do
|
||||||
{:ok, %{dimensions: {width, height}}} = Analyzers.analyze(file)
|
{:ok, %{dimensions: {width, height}}} = Analyzers.analyze_path(file)
|
||||||
|
|
||||||
image
|
image
|
||||||
|> changeset_fn.(%{
|
|> changeset_fn.(%{
|
||||||
|
|
|
@ -72,7 +72,7 @@ defmodule Philomena.Interactions do
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_interactions(source, target) do
|
def migrate_interactions(source, target) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
source = Repo.preload(source, [:hiders, :favers, :upvoters, :downvoters])
|
source = Repo.preload(source, [:hiders, :favers, :upvoters, :downvoters])
|
||||||
|
|
||||||
new_hides = Enum.map(source.hiders, &%{image_id: target.id, user_id: &1.id, created_at: now})
|
new_hides = Enum.map(source.hiders, &%{image_id: target.id, user_id: &1.id, created_at: now})
|
||||||
|
|
|
@ -7,18 +7,82 @@ defmodule Philomena.ModNotes do
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
alias Philomena.ModNotes.ModNote
|
alias Philomena.ModNotes.ModNote
|
||||||
|
alias Philomena.Polymorphic
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns the list of mod_notes.
|
Returns a list of 2-tuples of mod notes and rendered output for the notable type and id.
|
||||||
|
|
||||||
|
See `list_mod_notes/3` for more information about collection rendering.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> list_mod_notes()
|
iex> list_all_mod_notes_by_type_and_id("User", "1", & &1.body)
|
||||||
[%ModNote{}, ...]
|
[
|
||||||
|
{%ModNote{body: "hello *world*"}, "hello *world*"}
|
||||||
|
]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def list_mod_notes do
|
def list_all_mod_notes_by_type_and_id(notable_type, notable_id, collection_renderer) do
|
||||||
Repo.all(ModNote)
|
ModNote
|
||||||
|
|> where(notable_type: ^notable_type, notable_id: ^notable_id)
|
||||||
|
|> preload(:moderator)
|
||||||
|
|> order_by(desc: :id)
|
||||||
|
|> Repo.all()
|
||||||
|
|> preload_and_render(collection_renderer)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns a `m:Scrivener.Page` of 2-tuples of mod notes and rendered output
|
||||||
|
for the query string and current pagination.
|
||||||
|
|
||||||
|
All mod notes containing the substring `query_string` are matched and returned
|
||||||
|
case-insensitively.
|
||||||
|
|
||||||
|
See `list_mod_notes/3` for more information.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> list_mod_notes_by_query_string("quack", & &1.body, page_size: 15)
|
||||||
|
%Scrivener.Page{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def list_mod_notes_by_query_string(query_string, collection_renderer, pagination) do
|
||||||
|
ModNote
|
||||||
|
|> where([m], ilike(m.body, ^"%#{query_string}%"))
|
||||||
|
|> list_mod_notes(collection_renderer, pagination)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns a `m:Scrivener.Page` of 2-tuples of mod notes and rendered output
|
||||||
|
for the current pagination.
|
||||||
|
|
||||||
|
When coerced to a list and rendered as Markdown, the result may look like:
|
||||||
|
|
||||||
|
[
|
||||||
|
{%ModNote{body: "hello *world*"}, "hello <em>world</em>"}
|
||||||
|
]
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> list_mod_notes(& &1.body, page_size: 15)
|
||||||
|
%Scrivener.Page{}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def list_mod_notes(queryable \\ ModNote, collection_renderer, pagination) do
|
||||||
|
mod_notes =
|
||||||
|
queryable
|
||||||
|
|> preload(:moderator)
|
||||||
|
|> order_by(desc: :id)
|
||||||
|
|> Repo.paginate(pagination)
|
||||||
|
|
||||||
|
put_in(mod_notes.entries, preload_and_render(mod_notes, collection_renderer))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp preload_and_render(mod_notes, collection_renderer) do
|
||||||
|
bodies = collection_renderer.(mod_notes)
|
||||||
|
preloaded = Polymorphic.load_polymorphic(mod_notes, notable: [notable_id: :notable_type])
|
||||||
|
|
||||||
|
Enum.zip(preloaded, bodies)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
|
@ -9,40 +9,24 @@ defmodule Philomena.ModerationLogs do
|
||||||
alias Philomena.ModerationLogs.ModerationLog
|
alias Philomena.ModerationLogs.ModerationLog
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns the list of moderation_logs.
|
Returns a paginated list of moderation logs as a `m:Scrivener.Page`.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> list_moderation_logs()
|
iex> list_moderation_logs(page_size: 15)
|
||||||
[%ModerationLog{}, ...]
|
[%ModerationLog{}, ...]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def list_moderation_logs(conn) do
|
def list_moderation_logs(pagination) do
|
||||||
ModerationLog
|
ModerationLog
|
||||||
|> where([ml], ml.created_at > ago(2, "week"))
|
|> where([ml], ml.created_at >= ago(2, "week"))
|
||||||
|> preload(:user)
|
|> preload(:user)
|
||||||
|> order_by(desc: :created_at)
|
|> order_by(desc: :created_at)
|
||||||
|> Repo.paginate(conn.assigns.scrivener)
|
|> Repo.paginate(pagination)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single moderation_log.
|
Creates a moderation log.
|
||||||
|
|
||||||
Raises `Ecto.NoResultsError` if the Moderation log does not exist.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> get_moderation_log!(123)
|
|
||||||
%ModerationLog{}
|
|
||||||
|
|
||||||
iex> get_moderation_log!(456)
|
|
||||||
** (Ecto.NoResultsError)
|
|
||||||
|
|
||||||
"""
|
|
||||||
def get_moderation_log!(id), do: Repo.get!(ModerationLog, id)
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Creates a moderation_log.
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
|
@ -60,21 +44,14 @@ defmodule Philomena.ModerationLogs do
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Deletes a moderation_log.
|
Removes moderation logs created more than 2 weeks ago.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> delete_moderation_log(moderation_log)
|
iex> cleanup!()
|
||||||
{:ok, %ModerationLog{}}
|
{31, nil}
|
||||||
|
|
||||||
iex> delete_moderation_log(moderation_log)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def delete_moderation_log(%ModerationLog{} = moderation_log) do
|
|
||||||
Repo.delete(moderation_log)
|
|
||||||
end
|
|
||||||
|
|
||||||
def cleanup! do
|
def cleanup! do
|
||||||
ModerationLog
|
ModerationLog
|
||||||
|> where([ml], ml.created_at < ago(2, "week"))
|
|> where([ml], ml.created_at < ago(2, "week"))
|
||||||
|
|
|
@ -6,214 +6,291 @@ defmodule Philomena.Notifications do
|
||||||
import Ecto.Query, warn: false
|
import Ecto.Query, warn: false
|
||||||
alias Philomena.Repo
|
alias Philomena.Repo
|
||||||
|
|
||||||
alias Philomena.Notifications.Notification
|
alias Philomena.Channels.Subscription, as: ChannelSubscription
|
||||||
|
alias Philomena.Forums.Subscription, as: ForumSubscription
|
||||||
|
alias Philomena.Galleries.Subscription, as: GallerySubscription
|
||||||
|
alias Philomena.Images.Subscription, as: ImageSubscription
|
||||||
|
alias Philomena.Topics.Subscription, as: TopicSubscription
|
||||||
|
|
||||||
|
alias Philomena.Notifications.ChannelLiveNotification
|
||||||
|
alias Philomena.Notifications.ForumPostNotification
|
||||||
|
alias Philomena.Notifications.ForumTopicNotification
|
||||||
|
alias Philomena.Notifications.GalleryImageNotification
|
||||||
|
alias Philomena.Notifications.ImageCommentNotification
|
||||||
|
alias Philomena.Notifications.ImageMergeNotification
|
||||||
|
|
||||||
|
alias Philomena.Notifications.Category
|
||||||
|
alias Philomena.Notifications.Creator
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns the list of notifications.
|
Return the count of all currently unread notifications for the user in all categories.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> list_notifications()
|
iex> total_unread_notification_count(user)
|
||||||
[%Notification{}, ...]
|
15
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def list_notifications do
|
def total_unread_notification_count(user) do
|
||||||
Repo.all(Notification)
|
Category.total_unread_notification_count(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single notification.
|
Gather up and return the top N notifications for the user, for each category of
|
||||||
|
unread notification currently existing.
|
||||||
Raises `Ecto.NoResultsError` if the Notification does not exist.
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> get_notification!(123)
|
iex> unread_notifications_for_user(user, page_size: 10)
|
||||||
%Notification{}
|
[
|
||||||
|
channel_live: [],
|
||||||
iex> get_notification!(456)
|
forum_post: [%ForumPostNotification{...}, ...],
|
||||||
** (Ecto.NoResultsError)
|
forum_topic: [%ForumTopicNotification{...}, ...],
|
||||||
|
gallery_image: [],
|
||||||
|
image_comment: [%ImageCommentNotification{...}, ...],
|
||||||
|
image_merge: []
|
||||||
|
]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def get_notification!(id), do: Repo.get!(Notification, id)
|
def unread_notifications_for_user(user, pagination) do
|
||||||
|
Category.unread_notifications_for_user(user, pagination)
|
||||||
@doc """
|
|
||||||
Creates a notification.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> create_notification(%{field: value})
|
|
||||||
{:ok, %Notification{}}
|
|
||||||
|
|
||||||
iex> create_notification(%{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def create_notification(attrs \\ %{}) do
|
|
||||||
%Notification{}
|
|
||||||
|> Notification.changeset(attrs)
|
|
||||||
|> Repo.insert()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates a notification.
|
Returns paginated unread notifications for the user, given the category.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> update_notification(notification, %{field: new_value})
|
iex> unread_notifications_for_user_and_category(user, :image_comment)
|
||||||
{:ok, %Notification{}}
|
[%ImageCommentNotification{...}]
|
||||||
|
|
||||||
iex> update_notification(notification, %{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def update_notification(%Notification{} = notification, attrs) do
|
def unread_notifications_for_user_and_category(user, category, pagination) do
|
||||||
notification
|
Category.unread_notifications_for_user_and_category(user, category, pagination)
|
||||||
|> Notification.changeset(attrs)
|
|
||||||
|> Repo.insert_or_update()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Deletes a Notification.
|
Creates a channel live notification, returning the number of affected users.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> delete_notification(notification)
|
iex> create_channel_live_notification(channel)
|
||||||
{:ok, %Notification{}}
|
{:ok, 2}
|
||||||
|
|
||||||
iex> delete_notification(notification)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def delete_notification(%Notification{} = notification) do
|
def create_channel_live_notification(channel) do
|
||||||
Repo.delete(notification)
|
Creator.broadcast_notification(
|
||||||
|
from: {ChannelSubscription, channel_id: channel.id},
|
||||||
|
into: ChannelLiveNotification,
|
||||||
|
select: [channel_id: channel.id],
|
||||||
|
unique_key: :channel_id
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns an `%Ecto.Changeset{}` for tracking notification changes.
|
Creates a forum post notification, returning the number of affected users.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> change_notification(notification)
|
iex> create_forum_post_notification(user, topic, post)
|
||||||
%Ecto.Changeset{source: %Notification{}}
|
{:ok, 2}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def change_notification(%Notification{} = notification) do
|
def create_forum_post_notification(user, topic, post) do
|
||||||
Notification.changeset(notification, %{})
|
Creator.broadcast_notification(
|
||||||
end
|
notification_author: user,
|
||||||
|
from: {TopicSubscription, topic_id: topic.id},
|
||||||
alias Philomena.Notifications.UnreadNotification
|
into: ForumPostNotification,
|
||||||
|
select: [topic_id: topic.id, post_id: post.id],
|
||||||
def count_unread_notifications(user) do
|
unique_key: :topic_id
|
||||||
UnreadNotification
|
)
|
||||||
|> where(user_id: ^user.id)
|
|
||||||
|> Repo.aggregate(:count, :notification_id)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates a unread_notification.
|
Creates a forum topic notification, returning the number of affected users.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> create_unread_notification(%{field: value})
|
iex> create_forum_topic_notification(user, topic)
|
||||||
{:ok, %UnreadNotification{}}
|
{:ok, 2}
|
||||||
|
|
||||||
iex> create_unread_notification(%{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_unread_notification(attrs \\ %{}) do
|
def create_forum_topic_notification(user, topic) do
|
||||||
%UnreadNotification{}
|
Creator.broadcast_notification(
|
||||||
|> UnreadNotification.changeset(attrs)
|
notification_author: user,
|
||||||
|> Repo.insert()
|
from: {ForumSubscription, forum_id: topic.forum_id},
|
||||||
|
into: ForumTopicNotification,
|
||||||
|
select: [topic_id: topic.id],
|
||||||
|
unique_key: :topic_id
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates a unread_notification.
|
Creates a gallery image notification, returning the number of affected users.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> update_unread_notification(unread_notification, %{field: new_value})
|
iex> create_gallery_image_notification(gallery)
|
||||||
{:ok, %UnreadNotification{}}
|
{:ok, 2}
|
||||||
|
|
||||||
iex> update_unread_notification(unread_notification, %{field: bad_value})
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def update_unread_notification(%UnreadNotification{} = unread_notification, attrs) do
|
def create_gallery_image_notification(gallery) do
|
||||||
unread_notification
|
Creator.broadcast_notification(
|
||||||
|> UnreadNotification.changeset(attrs)
|
from: {GallerySubscription, gallery_id: gallery.id},
|
||||||
|> Repo.update()
|
into: GalleryImageNotification,
|
||||||
|
select: [gallery_id: gallery.id],
|
||||||
|
unique_key: :gallery_id
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Deletes a UnreadNotification.
|
Creates an image comment notification, returning the number of affected users.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
iex> delete_unread_notification(unread_notification)
|
iex> create_image_comment_notification(user, image, comment)
|
||||||
{:ok, %UnreadNotification{}}
|
{:ok, 2}
|
||||||
|
|
||||||
iex> delete_unread_notification(unread_notification)
|
|
||||||
{:error, %Ecto.Changeset{}}
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def delete_unread_notification(actor_type, actor_id, user) do
|
def create_image_comment_notification(user, image, comment) do
|
||||||
notification =
|
Creator.broadcast_notification(
|
||||||
Notification
|
notification_author: user,
|
||||||
|> where(actor_type: ^actor_type, actor_id: ^actor_id)
|
from: {ImageSubscription, image_id: image.id},
|
||||||
|> Repo.one()
|
into: ImageCommentNotification,
|
||||||
|
select: [image_id: image.id, comment_id: comment.id],
|
||||||
|
unique_key: :image_id
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
if notification do
|
@doc """
|
||||||
UnreadNotification
|
Creates an image merge notification, returning the number of affected users.
|
||||||
|> where(notification_id: ^notification.id, user_id: ^user.id)
|
|
||||||
|> Repo.delete_all()
|
## Examples
|
||||||
|
|
||||||
|
iex> create_image_merge_notification(target, source)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def create_image_merge_notification(target, source) do
|
||||||
|
Creator.broadcast_notification(
|
||||||
|
from: {ImageSubscription, image_id: target.id},
|
||||||
|
into: ImageMergeNotification,
|
||||||
|
select: [target_id: target.id, source_id: source.id],
|
||||||
|
unique_key: :target_id
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the channel live notification for a given channel and user, returning
|
||||||
|
the number of affected users.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_channel_live_notification(channel, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_channel_live_notification(channel, user) do
|
||||||
|
ChannelLiveNotification
|
||||||
|
|> where(channel_id: ^channel.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the forum post notification for a given topic and user, returning
|
||||||
|
the number of affected notifications.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_forum_post_notification(topic, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_forum_post_notification(topic, user) do
|
||||||
|
ForumPostNotification
|
||||||
|
|> where(topic_id: ^topic.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the forum topic notification for a given topic and user, returning
|
||||||
|
the number of affected notifications.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_forum_topic_notification(topic, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_forum_topic_notification(topic, user) do
|
||||||
|
ForumTopicNotification
|
||||||
|
|> where(topic_id: ^topic.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the gallery image notification for a given gallery and user, returning
|
||||||
|
the number of affected notifications.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_gallery_image_notification(topic, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_gallery_image_notification(gallery, user) do
|
||||||
|
GalleryImageNotification
|
||||||
|
|> where(gallery_id: ^gallery.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the image comment notification for a given image and user, returning
|
||||||
|
the number of affected notifications.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_gallery_image_notification(topic, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_image_comment_notification(image, user) do
|
||||||
|
ImageCommentNotification
|
||||||
|
|> where(image_id: ^image.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Removes the image merge notification for a given image and user, returning
|
||||||
|
the number of affected notifications.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> clear_image_merge_notification(topic, user)
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def clear_image_merge_notification(image, user) do
|
||||||
|
ImageMergeNotification
|
||||||
|
|> where(target_id: ^image.id)
|
||||||
|
|> delete_all_for_user(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
#
|
||||||
|
# Clear all unread notifications using the given query.
|
||||||
|
#
|
||||||
|
# Returns `{:ok, count}`, where `count` is the number of affected rows.
|
||||||
|
#
|
||||||
|
defp delete_all_for_user(query, user) do
|
||||||
|
if user do
|
||||||
|
{count, nil} =
|
||||||
|
query
|
||||||
|
|> where(user_id: ^user.id)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
|
||||||
|
{:ok, count}
|
||||||
|
else
|
||||||
|
{:ok, 0}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
|
||||||
Returns an `%Ecto.Changeset{}` for tracking unread_notification changes.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
iex> change_unread_notification(unread_notification)
|
|
||||||
%Ecto.Changeset{source: %UnreadNotification{}}
|
|
||||||
|
|
||||||
"""
|
|
||||||
def change_unread_notification(%UnreadNotification{} = unread_notification) do
|
|
||||||
UnreadNotification.changeset(unread_notification, %{})
|
|
||||||
end
|
|
||||||
|
|
||||||
def notify(_actor_child, [], _params), do: nil
|
|
||||||
|
|
||||||
def notify(actor_child, subscriptions, params) do
|
|
||||||
# Don't push to the user that created the notification
|
|
||||||
subscriptions =
|
|
||||||
case actor_child do
|
|
||||||
%{user_id: id} ->
|
|
||||||
subscriptions
|
|
||||||
|> Enum.reject(&(&1.user_id == id))
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
subscriptions
|
|
||||||
end
|
|
||||||
|
|
||||||
Repo.transaction(fn ->
|
|
||||||
notification =
|
|
||||||
Notification
|
|
||||||
|> Repo.get_by(actor_id: params.actor_id, actor_type: params.actor_type)
|
|
||||||
|
|
||||||
{:ok, notification} =
|
|
||||||
(notification || %Notification{})
|
|
||||||
|> update_notification(params)
|
|
||||||
|
|
||||||
# Insert the notification to any watchers who do not have it
|
|
||||||
unreads =
|
|
||||||
subscriptions
|
|
||||||
|> Enum.map(&%{user_id: &1.user_id, notification_id: notification.id})
|
|
||||||
|
|
||||||
UnreadNotification
|
|
||||||
|> Repo.insert_all(unreads, on_conflict: :nothing)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
166
lib/philomena/notifications/category.ex
Normal file
166
lib/philomena/notifications/category.ex
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
defmodule Philomena.Notifications.Category do
|
||||||
|
@moduledoc """
|
||||||
|
Notification category querying.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import Ecto.Query, warn: false
|
||||||
|
alias Philomena.Repo
|
||||||
|
|
||||||
|
alias Philomena.Notifications.ChannelLiveNotification
|
||||||
|
alias Philomena.Notifications.ForumPostNotification
|
||||||
|
alias Philomena.Notifications.ForumTopicNotification
|
||||||
|
alias Philomena.Notifications.GalleryImageNotification
|
||||||
|
alias Philomena.Notifications.ImageCommentNotification
|
||||||
|
alias Philomena.Notifications.ImageMergeNotification
|
||||||
|
|
||||||
|
@type t ::
|
||||||
|
:channel_live
|
||||||
|
| :forum_post
|
||||||
|
| :forum_topic
|
||||||
|
| :gallery_image
|
||||||
|
| :image_comment
|
||||||
|
| :image_merge
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Return a list of all supported categories.
|
||||||
|
"""
|
||||||
|
def categories do
|
||||||
|
[
|
||||||
|
:channel_live,
|
||||||
|
:forum_post,
|
||||||
|
:forum_topic,
|
||||||
|
:gallery_image,
|
||||||
|
:image_comment,
|
||||||
|
:image_merge
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Return the count of all currently unread notifications for the user in all categories.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> total_unread_notification_count(user)
|
||||||
|
15
|
||||||
|
|
||||||
|
"""
|
||||||
|
def total_unread_notification_count(user) do
|
||||||
|
categories()
|
||||||
|
|> Enum.map(fn category ->
|
||||||
|
category
|
||||||
|
|> query_for_category_and_user(user)
|
||||||
|
|> exclude(:preload)
|
||||||
|
|> select([_], %{one: 1})
|
||||||
|
end)
|
||||||
|
|> union_all_queries()
|
||||||
|
|> Repo.aggregate(:count)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp union_all_queries([query | rest]) do
|
||||||
|
Enum.reduce(rest, query, fn q, acc -> union_all(acc, ^q) end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Gather up and return the top N notifications for the user, for each category of
|
||||||
|
unread notification currently existing.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> unread_notifications_for_user(user, page_size: 10)
|
||||||
|
[
|
||||||
|
channel_live: [],
|
||||||
|
forum_post: [%ForumPostNotification{...}, ...],
|
||||||
|
forum_topic: [%ForumTopicNotification{...}, ...],
|
||||||
|
gallery_image: [],
|
||||||
|
image_comment: [%ImageCommentNotification{...}, ...],
|
||||||
|
image_merge: []
|
||||||
|
]
|
||||||
|
|
||||||
|
"""
|
||||||
|
def unread_notifications_for_user(user, pagination) do
|
||||||
|
Enum.map(categories(), fn category ->
|
||||||
|
results =
|
||||||
|
category
|
||||||
|
|> query_for_category_and_user(user)
|
||||||
|
|> order_by(desc: :updated_at)
|
||||||
|
|> Repo.paginate(pagination)
|
||||||
|
|
||||||
|
{category, results}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns paginated unread notifications for the user, given the category.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> unread_notifications_for_user_and_category(user, :image_comment)
|
||||||
|
[%ImageCommentNotification{...}]
|
||||||
|
|
||||||
|
"""
|
||||||
|
def unread_notifications_for_user_and_category(user, category, pagination) do
|
||||||
|
category
|
||||||
|
|> query_for_category_and_user(user)
|
||||||
|
|> order_by(desc: :updated_at)
|
||||||
|
|> Repo.paginate(pagination)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Determine the category of a notification.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> notification_category(%ImageCommentNotification{})
|
||||||
|
:image_comment
|
||||||
|
|
||||||
|
"""
|
||||||
|
def notification_category(n) do
|
||||||
|
case n.__struct__ do
|
||||||
|
ChannelLiveNotification -> :channel_live
|
||||||
|
GalleryImageNotification -> :gallery_image
|
||||||
|
ImageCommentNotification -> :image_comment
|
||||||
|
ImageMergeNotification -> :image_merge
|
||||||
|
ForumPostNotification -> :forum_post
|
||||||
|
ForumTopicNotification -> :forum_topic
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns an `m:Ecto.Query` that finds unread notifications for the given category,
|
||||||
|
for the given user, with preloads applied.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> query_for_category_and_user(:channel_live, user)
|
||||||
|
#Ecto.Query<from c0 in ChannelLiveNotification, where: c0.user_id == ^1, preload: [:channel]>
|
||||||
|
|
||||||
|
"""
|
||||||
|
def query_for_category_and_user(category, user) do
|
||||||
|
query =
|
||||||
|
case category do
|
||||||
|
:channel_live ->
|
||||||
|
from(n in ChannelLiveNotification, preload: :channel)
|
||||||
|
|
||||||
|
:gallery_image ->
|
||||||
|
from(n in GalleryImageNotification, preload: [gallery: :creator])
|
||||||
|
|
||||||
|
:image_comment ->
|
||||||
|
from(n in ImageCommentNotification,
|
||||||
|
preload: [image: [:sources, tags: :aliases], comment: :user]
|
||||||
|
)
|
||||||
|
|
||||||
|
:image_merge ->
|
||||||
|
from(n in ImageMergeNotification,
|
||||||
|
preload: [:source, target: [:sources, tags: :aliases]]
|
||||||
|
)
|
||||||
|
|
||||||
|
:forum_topic ->
|
||||||
|
from(n in ForumTopicNotification, preload: [topic: [:forum, :user]])
|
||||||
|
|
||||||
|
:forum_post ->
|
||||||
|
from(n in ForumPostNotification, preload: [topic: :forum, post: :user])
|
||||||
|
end
|
||||||
|
|
||||||
|
where(query, user_id: ^user.id)
|
||||||
|
end
|
||||||
|
end
|
17
lib/philomena/notifications/channel_live_notification.ex
Normal file
17
lib/philomena/notifications/channel_live_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
defmodule Philomena.Notifications.ChannelLiveNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Channels.Channel
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "channel_live_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :channel, Channel, primary_key: true
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
92
lib/philomena/notifications/creator.ex
Normal file
92
lib/philomena/notifications/creator.ex
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
defmodule Philomena.Notifications.Creator do
|
||||||
|
@moduledoc """
|
||||||
|
Internal notifications creation logic.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import Ecto.Query, warn: false
|
||||||
|
alias Philomena.Repo
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Propagate notifications for a notification table type.
|
||||||
|
|
||||||
|
Returns `{:ok, count}`, where `count` is the number of affected rows.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> broadcast_notification(
|
||||||
|
...> from: {GallerySubscription, gallery_id: gallery.id},
|
||||||
|
...> into: GalleryImageNotification,
|
||||||
|
...> select: [gallery_id: gallery.id],
|
||||||
|
...> unique_key: :gallery_id
|
||||||
|
...> )
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
iex> broadcast_notification(
|
||||||
|
...> notification_author: user,
|
||||||
|
...> from: {ImageSubscription, image_id: image.id},
|
||||||
|
...> into: ImageCommentNotification,
|
||||||
|
...> select: [image_id: image.id, comment_id: comment.id],
|
||||||
|
...> unique_key: :image_id
|
||||||
|
...> )
|
||||||
|
{:ok, 2}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def broadcast_notification(opts) do
|
||||||
|
opts = Keyword.validate!(opts, [:notification_author, :from, :into, :select, :unique_key])
|
||||||
|
|
||||||
|
notification_author = Keyword.get(opts, :notification_author, nil)
|
||||||
|
{subscription_schema, filters} = Keyword.fetch!(opts, :from)
|
||||||
|
notification_schema = Keyword.fetch!(opts, :into)
|
||||||
|
select_keywords = Keyword.fetch!(opts, :select)
|
||||||
|
unique_key = Keyword.fetch!(opts, :unique_key)
|
||||||
|
|
||||||
|
subscription_schema
|
||||||
|
|> subscription_query(notification_author)
|
||||||
|
|> where(^filters)
|
||||||
|
|> convert_to_notification(select_keywords)
|
||||||
|
|> insert_notifications(notification_schema, unique_key)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp convert_to_notification(subscription, extra) do
|
||||||
|
now = dynamic([_], type(^DateTime.utc_now(:second), :utc_datetime))
|
||||||
|
|
||||||
|
base = %{
|
||||||
|
user_id: dynamic([s], s.user_id),
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
read: false
|
||||||
|
}
|
||||||
|
|
||||||
|
extra =
|
||||||
|
Map.new(extra, fn {field, value} ->
|
||||||
|
{field, dynamic([_], type(^value, :integer))}
|
||||||
|
end)
|
||||||
|
|
||||||
|
from(subscription, select: ^Map.merge(base, extra))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp subscription_query(subscription, notification_author) do
|
||||||
|
case notification_author do
|
||||||
|
%{id: user_id} ->
|
||||||
|
# Avoid sending notifications to the user which performed the action.
|
||||||
|
from s in subscription,
|
||||||
|
where: s.user_id != ^user_id
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
# When not created by a user, send notifications to all subscribers.
|
||||||
|
subscription
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_notifications(query, notification, unique_key) do
|
||||||
|
{count, nil} =
|
||||||
|
Repo.insert_all(
|
||||||
|
notification,
|
||||||
|
query,
|
||||||
|
on_conflict: {:replace_all_except, [:created_at]},
|
||||||
|
conflict_target: [unique_key, :user_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
{:ok, count}
|
||||||
|
end
|
||||||
|
end
|
19
lib/philomena/notifications/forum_post_notification.ex
Normal file
19
lib/philomena/notifications/forum_post_notification.ex
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
defmodule Philomena.Notifications.ForumPostNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Topics.Topic
|
||||||
|
alias Philomena.Posts.Post
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "forum_post_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :topic, Topic, primary_key: true
|
||||||
|
belongs_to :post, Post
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
17
lib/philomena/notifications/forum_topic_notification.ex
Normal file
17
lib/philomena/notifications/forum_topic_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
defmodule Philomena.Notifications.ForumTopicNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Topics.Topic
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "forum_topic_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :topic, Topic, primary_key: true
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
17
lib/philomena/notifications/gallery_image_notification.ex
Normal file
17
lib/philomena/notifications/gallery_image_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
defmodule Philomena.Notifications.GalleryImageNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Galleries.Gallery
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "gallery_image_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :gallery, Gallery, primary_key: true
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
19
lib/philomena/notifications/image_comment_notification.ex
Normal file
19
lib/philomena/notifications/image_comment_notification.ex
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
defmodule Philomena.Notifications.ImageCommentNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Images.Image
|
||||||
|
alias Philomena.Comments.Comment
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "image_comment_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :image, Image, primary_key: true
|
||||||
|
belongs_to :comment, Comment
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
18
lib/philomena/notifications/image_merge_notification.ex
Normal file
18
lib/philomena/notifications/image_merge_notification.ex
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
defmodule Philomena.Notifications.ImageMergeNotification do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Philomena.Users.User
|
||||||
|
alias Philomena.Images.Image
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
schema "image_merge_notifications" do
|
||||||
|
belongs_to :user, User, primary_key: true
|
||||||
|
belongs_to :target, Image, primary_key: true
|
||||||
|
belongs_to :source, Image
|
||||||
|
|
||||||
|
field :read, :boolean, default: false
|
||||||
|
|
||||||
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,26 +0,0 @@
|
||||||
defmodule Philomena.Notifications.Notification do
|
|
||||||
use Ecto.Schema
|
|
||||||
import Ecto.Changeset
|
|
||||||
|
|
||||||
schema "notifications" do
|
|
||||||
field :action, :string
|
|
||||||
|
|
||||||
# fixme: rails polymorphic relation
|
|
||||||
field :actor_id, :integer
|
|
||||||
field :actor_type, :string
|
|
||||||
field :actor_child_id, :integer
|
|
||||||
field :actor_child_type, :string
|
|
||||||
|
|
||||||
field :actor, :any, virtual: true
|
|
||||||
field :actor_child, :any, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc false
|
|
||||||
def changeset(notification, attrs) do
|
|
||||||
notification
|
|
||||||
|> cast(attrs, [:actor_id, :actor_type, :actor_child_id, :actor_child_type, :action])
|
|
||||||
|> validate_required([:actor_id, :actor_type, :action])
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,21 +0,0 @@
|
||||||
defmodule Philomena.Notifications.UnreadNotification do
|
|
||||||
use Ecto.Schema
|
|
||||||
import Ecto.Changeset
|
|
||||||
|
|
||||||
alias Philomena.Users.User
|
|
||||||
alias Philomena.Notifications.Notification
|
|
||||||
|
|
||||||
@primary_key false
|
|
||||||
|
|
||||||
schema "unread_notifications" do
|
|
||||||
belongs_to :user, User, primary_key: true
|
|
||||||
belongs_to :notification, Notification, primary_key: true
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc false
|
|
||||||
def changeset(unread_notification, attrs) do
|
|
||||||
unread_notification
|
|
||||||
|> cast(attrs, [])
|
|
||||||
|> validate_required([])
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -41,7 +41,7 @@ defmodule Philomena.PollVotes do
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_poll_votes(user, poll, attrs) do
|
def create_poll_votes(user, poll, attrs) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
poll_votes = filter_options(user, poll, now, attrs)
|
poll_votes = filter_options(user, poll, now, attrs)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|
|
|
@ -51,7 +51,7 @@ defmodule Philomena.Polls do
|
||||||
"""
|
"""
|
||||||
def create_poll(attrs \\ %{}) do
|
def create_poll(attrs \\ %{}) do
|
||||||
%Poll{}
|
%Poll{}
|
||||||
|> Poll.update_changeset(attrs)
|
|> Poll.changeset(attrs)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ defmodule Philomena.Polls do
|
||||||
"""
|
"""
|
||||||
def update_poll(%Poll{} = poll, attrs) do
|
def update_poll(%Poll{} = poll, attrs) do
|
||||||
poll
|
poll
|
||||||
|> Poll.update_changeset(attrs)
|
|> Poll.changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -3,22 +3,16 @@ defmodule Philomena.Polls.Poll do
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
alias Philomena.Topics.Topic
|
alias Philomena.Topics.Topic
|
||||||
alias Philomena.Users.User
|
|
||||||
alias Philomena.PollOptions.PollOption
|
alias Philomena.PollOptions.PollOption
|
||||||
alias Philomena.Schema.Time
|
|
||||||
|
|
||||||
schema "polls" do
|
schema "polls" do
|
||||||
belongs_to :topic, Topic
|
belongs_to :topic, Topic
|
||||||
belongs_to :deleted_by, User
|
|
||||||
has_many :options, PollOption
|
has_many :options, PollOption
|
||||||
|
|
||||||
field :title, :string
|
field :title, :string
|
||||||
field :vote_method, :string
|
field :vote_method, :string
|
||||||
field :active_until, :utc_datetime
|
field :active_until, PhilomenaQuery.Ecto.RelativeDate
|
||||||
field :total_votes, :integer, default: 0
|
field :total_votes, :integer, default: 0
|
||||||
field :hidden_from_users, :boolean, default: false
|
|
||||||
field :deletion_reason, :string, default: ""
|
|
||||||
field :until, :string, virtual: true
|
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
end
|
end
|
||||||
|
@ -26,16 +20,7 @@ defmodule Philomena.Polls.Poll do
|
||||||
@doc false
|
@doc false
|
||||||
def changeset(poll, attrs) do
|
def changeset(poll, attrs) do
|
||||||
poll
|
poll
|
||||||
|> cast(attrs, [])
|
|> cast(attrs, [:title, :active_until, :vote_method])
|
||||||
|> validate_required([])
|
|
||||||
|> Time.propagate_time(:active_until, :until)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc false
|
|
||||||
def update_changeset(poll, attrs) do
|
|
||||||
poll
|
|
||||||
|> cast(attrs, [:title, :until, :vote_method])
|
|
||||||
|> Time.assign_time(:until, :active_until)
|
|
||||||
|> validate_required([:title, :active_until, :vote_method])
|
|> validate_required([:title, :active_until, :vote_method])
|
||||||
|> validate_length(:title, max: 140, count: :bytes)
|
|> validate_length(:title, max: 140, count: :bytes)
|
||||||
|> validate_inclusion(:vote_method, ["single", "multiple"])
|
|> validate_inclusion(:vote_method, ["single", "multiple"])
|
||||||
|
|
|
@ -16,11 +16,8 @@ defmodule Philomena.Posts do
|
||||||
alias Philomena.IndexWorker
|
alias Philomena.IndexWorker
|
||||||
alias Philomena.Forums.Forum
|
alias Philomena.Forums.Forum
|
||||||
alias Philomena.Notifications
|
alias Philomena.Notifications
|
||||||
alias Philomena.NotificationWorker
|
|
||||||
alias Philomena.Versions
|
alias Philomena.Versions
|
||||||
alias Philomena.Reports
|
alias Philomena.Reports
|
||||||
alias Philomena.Reports.Report
|
|
||||||
alias Philomena.Users.User
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Gets a single post.
|
Gets a single post.
|
||||||
|
@ -51,7 +48,7 @@ defmodule Philomena.Posts do
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_post(topic, attributes, params \\ %{}) do
|
def create_post(topic, attributes, params \\ %{}) do
|
||||||
now = DateTime.utc_now()
|
now = DateTime.utc_now(:second)
|
||||||
|
|
||||||
topic_query =
|
topic_query =
|
||||||
Topic
|
Topic
|
||||||
|
@ -66,7 +63,7 @@ defmodule Philomena.Posts do
|
||||||
|> where(id: ^topic.forum_id)
|
|> where(id: ^topic.forum_id)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.all(:topic_lock, topic_lock_query)
|
|> Multi.one(:topic, topic_lock_query)
|
||||||
|> Multi.run(:post, fn repo, _ ->
|
|> Multi.run(:post, fn repo, _ ->
|
||||||
last_position =
|
last_position =
|
||||||
Post
|
Post
|
||||||
|
@ -95,7 +92,8 @@ defmodule Philomena.Posts do
|
||||||
|
|
||||||
{:ok, count}
|
{:ok, count}
|
||||||
end)
|
end)
|
||||||
|> maybe_create_subscription_on_reply(topic, attributes[:user])
|
|> Multi.run(:notification, ¬ify_post/2)
|
||||||
|
|> Topics.maybe_subscribe_on(:topic, attributes[:user], :watch_on_reply)
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{post: post}} = result ->
|
{:ok, %{post: post}} = result ->
|
||||||
|
@ -108,58 +106,20 @@ defmodule Philomena.Posts do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_create_subscription_on_reply(multi, topic, %User{watch_on_reply: true} = user) do
|
defp notify_post(_repo, %{post: post, topic: topic}) do
|
||||||
multi
|
Notifications.create_forum_post_notification(post.user, topic, post)
|
||||||
|> Multi.run(:subscribe, fn _repo, _changes ->
|
|
||||||
Topics.create_subscription(topic, user)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_create_subscription_on_reply(multi, _topic, _user) do
|
|
||||||
multi
|
|
||||||
end
|
|
||||||
|
|
||||||
def notify_post(post) do
|
|
||||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Posts", post.id])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def report_non_approved(%Post{approved: true}), do: false
|
def report_non_approved(%Post{approved: true}), do: false
|
||||||
|
|
||||||
def report_non_approved(post) do
|
def report_non_approved(post) do
|
||||||
Reports.create_system_report(
|
Reports.create_system_report(
|
||||||
post.id,
|
{"Post", post.id},
|
||||||
"Post",
|
|
||||||
"Approval",
|
"Approval",
|
||||||
"Post contains externally-embedded images and has been flagged for review."
|
"Post contains externally-embedded images and has been flagged for review."
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform_notify(post_id) do
|
|
||||||
post = get_post!(post_id)
|
|
||||||
|
|
||||||
topic =
|
|
||||||
post
|
|
||||||
|> Repo.preload(:topic)
|
|
||||||
|> Map.fetch!(:topic)
|
|
||||||
|
|
||||||
subscriptions =
|
|
||||||
topic
|
|
||||||
|> Repo.preload(:subscriptions)
|
|
||||||
|> Map.fetch!(:subscriptions)
|
|
||||||
|
|
||||||
Notifications.notify(
|
|
||||||
post,
|
|
||||||
subscriptions,
|
|
||||||
%{
|
|
||||||
actor_id: topic.id,
|
|
||||||
actor_type: "Topic",
|
|
||||||
actor_child_id: post.id,
|
|
||||||
actor_child_type: "Post",
|
|
||||||
action: "posted a new reply in"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Updates a post.
|
Updates a post.
|
||||||
|
|
||||||
|
@ -173,7 +133,7 @@ defmodule Philomena.Posts do
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def update_post(%Post{} = post, editor, attrs) do
|
def update_post(%Post{} = post, editor, attrs) do
|
||||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
now = DateTime.utc_now(:second)
|
||||||
current_body = post.body
|
current_body = post.body
|
||||||
current_reason = post.edit_reason
|
current_reason = post.edit_reason
|
||||||
|
|
||||||
|
@ -216,11 +176,7 @@ defmodule Philomena.Posts do
|
||||||
end
|
end
|
||||||
|
|
||||||
def hide_post(%Post{} = post, attrs, user) do
|
def hide_post(%Post{} = post, attrs, user) do
|
||||||
reports =
|
report_query = Reports.close_report_query({"Post", post.id}, user)
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Post", reportable_id: ^post.id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
topics =
|
topics =
|
||||||
Topic
|
Topic
|
||||||
|
@ -236,7 +192,7 @@ defmodule Philomena.Posts do
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.update(:post, post)
|
|> Multi.update(:post, post)
|
||||||
|> Multi.update_all(:reports, reports, [])
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|> Multi.update_all(:topics, topics, [])
|
|> Multi.update_all(:topics, topics, [])
|
||||||
|> Multi.update_all(:forums, forums, [])
|
|> Multi.update_all(:forums, forums, [])
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|
@ -267,21 +223,15 @@ defmodule Philomena.Posts do
|
||||||
end
|
end
|
||||||
|
|
||||||
def approve_post(%Post{} = post, user) do
|
def approve_post(%Post{} = post, user) do
|
||||||
reports =
|
report_query = Reports.close_report_query({"Post", post.id}, user)
|
||||||
Report
|
|
||||||
|> where(reportable_type: "Post", reportable_id: ^post.id)
|
|
||||||
|> select([r], r.id)
|
|
||||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
|
||||||
|
|
||||||
post = Post.approve_changeset(post)
|
post = Post.approve_changeset(post)
|
||||||
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.update(:post, post)
|
|> Multi.update(:post, post)
|
||||||
|> Multi.update_all(:reports, reports, [])
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, %{post: post, reports: {_count, reports}}} ->
|
{:ok, %{post: post, reports: {_count, reports}}} ->
|
||||||
notify_post(post)
|
|
||||||
UserStatistics.inc_stat(post.user, :forum_posts)
|
UserStatistics.inc_stat(post.user, :forum_posts)
|
||||||
Reports.reindex_reports(reports)
|
Reports.reindex_reports(reports)
|
||||||
reindex_post(post)
|
reindex_post(post)
|
||||||
|
|
|
@ -15,15 +15,12 @@ defmodule Philomena.Posts.Post do
|
||||||
field :edit_reason, :string
|
field :edit_reason, :string
|
||||||
field :ip, EctoNetwork.INET
|
field :ip, EctoNetwork.INET
|
||||||
field :fingerprint, :string
|
field :fingerprint, :string
|
||||||
field :user_agent, :string, default: ""
|
|
||||||
field :referrer, :string, default: ""
|
|
||||||
field :topic_position, :integer
|
field :topic_position, :integer
|
||||||
field :hidden_from_users, :boolean, default: false
|
field :hidden_from_users, :boolean, default: false
|
||||||
field :anonymous, :boolean, default: false
|
field :anonymous, :boolean, default: false
|
||||||
field :edited_at, :utc_datetime
|
field :edited_at, :utc_datetime
|
||||||
field :deletion_reason, :string, default: ""
|
field :deletion_reason, :string, default: ""
|
||||||
field :destroyed_content, :boolean, default: false
|
field :destroyed_content, :boolean, default: false
|
||||||
field :name_at_post_time, :string
|
|
||||||
field :approved, :boolean, default: false
|
field :approved, :boolean, default: false
|
||||||
|
|
||||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||||
|
@ -47,7 +44,6 @@ defmodule Philomena.Posts.Post do
|
||||||
|> validate_required([:body])
|
|> validate_required([:body])
|
||||||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||||
|> change(attribution)
|
|> change(attribution)
|
||||||
|> put_name_at_post_time(attribution[:user])
|
|
||||||
|> Approval.maybe_put_approval(attribution[:user])
|
|> Approval.maybe_put_approval(attribution[:user])
|
||||||
|> Approval.maybe_strip_images(attribution[:user])
|
|> Approval.maybe_strip_images(attribution[:user])
|
||||||
end
|
end
|
||||||
|
@ -61,7 +57,6 @@ defmodule Philomena.Posts.Post do
|
||||||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||||
|> change(attribution)
|
|> change(attribution)
|
||||||
|> change(topic_position: 0)
|
|> change(topic_position: 0)
|
||||||
|> put_name_at_post_time(attribution[:user])
|
|
||||||
|> Approval.maybe_put_approval(attribution[:user])
|
|> Approval.maybe_put_approval(attribution[:user])
|
||||||
|> Approval.maybe_strip_images(attribution[:user])
|
|> Approval.maybe_strip_images(attribution[:user])
|
||||||
end
|
end
|
||||||
|
@ -90,7 +85,4 @@ defmodule Philomena.Posts.Post do
|
||||||
change(post)
|
change(post)
|
||||||
|> put_change(:approved, true)
|
|> put_change(:approved, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_name_at_post_time(changeset, nil), do: changeset
|
|
||||||
defp put_name_at_post_time(changeset, user), do: change(changeset, name_at_post_time: user.name)
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -90,8 +90,8 @@ defmodule Philomena.Posts.Query do
|
||||||
|> Parser.parse(query_string, context)
|
|> Parser.parse(query_string, context)
|
||||||
end
|
end
|
||||||
|
|
||||||
def compile(user, query_string) do
|
def compile(query_string, opts \\ []) do
|
||||||
query_string = query_string || ""
|
user = Keyword.get(opts, :user)
|
||||||
|
|
||||||
case user do
|
case user do
|
||||||
nil ->
|
nil ->
|
||||||
|
|
|
@ -52,8 +52,8 @@ defmodule Philomena.Posts.SearchIndex do
|
||||||
author: if(!!post.user and !post.anonymous, do: String.downcase(post.user.name)),
|
author: if(!!post.user and !post.anonymous, do: String.downcase(post.user.name)),
|
||||||
subject: post.topic.title,
|
subject: post.topic.title,
|
||||||
ip: post.ip |> to_string(),
|
ip: post.ip |> to_string(),
|
||||||
user_agent: post.user_agent,
|
user_agent: "",
|
||||||
referrer: post.referrer,
|
referrer: "",
|
||||||
fingerprint: post.fingerprint,
|
fingerprint: post.fingerprint,
|
||||||
topic_position: post.topic_position,
|
topic_position: post.topic_position,
|
||||||
forum: post.topic.forum.short_name,
|
forum: post.topic.forum.short_name,
|
||||||
|
|
|
@ -12,6 +12,31 @@ defmodule Philomena.Reports do
|
||||||
alias Philomena.IndexWorker
|
alias Philomena.IndexWorker
|
||||||
alias Philomena.Polymorphic
|
alias Philomena.Polymorphic
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns the current number of open reports.
|
||||||
|
|
||||||
|
If the user is allowed to view reports, returns the current count.
|
||||||
|
If the user is not allowed to view reports, returns `nil`.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> count_reports(%User{})
|
||||||
|
nil
|
||||||
|
|
||||||
|
iex> count_reports(%User{role: "admin"})
|
||||||
|
4
|
||||||
|
|
||||||
|
"""
|
||||||
|
def count_open_reports(user) do
|
||||||
|
if Canada.Can.can?(user, :index, Report) do
|
||||||
|
Report
|
||||||
|
|> where(open: true)
|
||||||
|
|> Repo.aggregate(:count)
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Returns the list of reports.
|
Returns the list of reports.
|
||||||
|
|
||||||
|
@ -53,14 +78,59 @@ defmodule Philomena.Reports do
|
||||||
{:error, %Ecto.Changeset{}}
|
{:error, %Ecto.Changeset{}}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def create_report(reportable_id, reportable_type, attribution, attrs \\ %{}) do
|
def create_report({reportable_type, reportable_id} = _type_and_id, attribution, attrs \\ %{}) do
|
||||||
%Report{reportable_id: reportable_id, reportable_type: reportable_type}
|
%Report{reportable_type: reportable_type, reportable_id: reportable_id}
|
||||||
|> Report.creation_changeset(attrs, attribution)
|
|> Report.creation_changeset(attrs, attribution)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|> reindex_after_update()
|
|> reindex_after_update()
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_system_report(reportable_id, reportable_type, category, reason) do
|
@doc """
|
||||||
|
Returns an `m:Ecto.Query` which updates all reports for the given `reportable_type`
|
||||||
|
and `reportable_id` to close them.
|
||||||
|
|
||||||
|
Because this is only a query due to the limitations of `m:Ecto.Multi`, this must be
|
||||||
|
coupled with an associated call to `reindex_reports/1` to operate correctly, e.g.:
|
||||||
|
|
||||||
|
report_query = Reports.close_system_report_query({"Image", image.id}, user)
|
||||||
|
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.update_all(:reports, report_query, [])
|
||||||
|
|> Repo.transaction()
|
||||||
|
|> case do
|
||||||
|
{:ok, %{reports: {_count, reports}} = result} ->
|
||||||
|
Reports.reindex_reports(reports)
|
||||||
|
|
||||||
|
{:ok, result}
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
|
end
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> close_system_report_query("Image", 1, %User{})
|
||||||
|
#Ecto.Query<...>
|
||||||
|
|
||||||
|
"""
|
||||||
|
def close_report_query({reportable_type, reportable_id} = _type_and_id, closing_user) do
|
||||||
|
from r in Report,
|
||||||
|
where: r.reportable_type == ^reportable_type and r.reportable_id == ^reportable_id,
|
||||||
|
select: r.id,
|
||||||
|
update: [set: [open: false, state: "closed", admin_id: ^closing_user.id]]
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Automatically create a report with the given category and reason on the given
|
||||||
|
`reportable_id` and `reportable_type`.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> create_system_report({"Comment", 1}, "Other", "Custom report reason")
|
||||||
|
{:ok, %Report{}}
|
||||||
|
|
||||||
|
"""
|
||||||
|
def create_system_report({reportable_type, reportable_id} = _type_and_id, category, reason) do
|
||||||
attrs = %{
|
attrs = %{
|
||||||
reason: reason,
|
reason: reason,
|
||||||
category: category
|
category: category
|
||||||
|
@ -69,12 +139,10 @@ defmodule Philomena.Reports do
|
||||||
attributes = %{
|
attributes = %{
|
||||||
system: true,
|
system: true,
|
||||||
ip: %Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32},
|
ip: %Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32},
|
||||||
fingerprint: "ffff",
|
fingerprint: "ffff"
|
||||||
user_agent:
|
|
||||||
"Mozilla/5.0 (X11; Philomena; Linux x86_64; rv:86.0) Gecko/20100101 Firefox/86.0"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
%Report{reportable_id: reportable_id, reportable_type: reportable_type}
|
%Report{reportable_type: reportable_type, reportable_id: reportable_id}
|
||||||
|> Report.creation_changeset(attrs, attributes)
|
|> Report.creation_changeset(attrs, attributes)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|> reindex_after_update()
|
|> reindex_after_update()
|
||||||
|
@ -128,6 +196,15 @@ defmodule Philomena.Reports do
|
||||||
Report.changeset(report, %{})
|
Report.changeset(report, %{})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Marks the report as claimed by the given user.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> claim_report(%Report{}, %User{})
|
||||||
|
{:ok, %Report{}}
|
||||||
|
|
||||||
|
"""
|
||||||
def claim_report(%Report{} = report, user) do
|
def claim_report(%Report{} = report, user) do
|
||||||
report
|
report
|
||||||
|> Report.claim_changeset(user)
|
|> Report.claim_changeset(user)
|
||||||
|
@ -135,6 +212,15 @@ defmodule Philomena.Reports do
|
||||||
|> reindex_after_update()
|
|> reindex_after_update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Marks the report as unclaimed.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> unclaim_report(%Report{})
|
||||||
|
{:ok, %Report{}}
|
||||||
|
|
||||||
|
"""
|
||||||
def unclaim_report(%Report{} = report) do
|
def unclaim_report(%Report{} = report) do
|
||||||
report
|
report
|
||||||
|> Report.unclaim_changeset()
|
|> Report.unclaim_changeset()
|
||||||
|
@ -142,6 +228,15 @@ defmodule Philomena.Reports do
|
||||||
|> reindex_after_update()
|
|> reindex_after_update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Marks the report as closed by the given user.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> close_report(%Report{}, %User{})
|
||||||
|
{:ok, %Report{}}
|
||||||
|
|
||||||
|
"""
|
||||||
def close_report(%Report{} = report, user) do
|
def close_report(%Report{} = report, user) do
|
||||||
report
|
report
|
||||||
|> Report.close_changeset(user)
|
|> Report.close_changeset(user)
|
||||||
|
@ -149,6 +244,15 @@ defmodule Philomena.Reports do
|
||||||
|> reindex_after_update()
|
|> reindex_after_update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Reindex all reports where the user or admin has `old_name`.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
iex> user_name_reindex("Administrator", "Administrator2")
|
||||||
|
{:ok, %Req.Response{}}
|
||||||
|
|
||||||
|
"""
|
||||||
def user_name_reindex(old_name, new_name) do
|
def user_name_reindex(old_name, new_name) do
|
||||||
data = ReportIndex.user_name_update_by_query(old_name, new_name)
|
data = ReportIndex.user_name_update_by_query(old_name, new_name)
|
||||||
|
|
||||||
|
@ -165,18 +269,25 @@ defmodule Philomena.Reports do
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Callback for post-transaction update.
|
||||||
|
|
||||||
|
See `close_report_query/2` for more information and example.
|
||||||
|
"""
|
||||||
def reindex_reports(report_ids) do
|
def reindex_reports(report_ids) do
|
||||||
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", report_ids])
|
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", report_ids])
|
||||||
|
|
||||||
report_ids
|
report_ids
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def reindex_report(%Report{} = report) do
|
def reindex_report(%Report{} = report) do
|
||||||
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", [report.id]])
|
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", [report.id]])
|
||||||
|
|
||||||
report
|
report
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
def perform_reindex(column, condition) do
|
def perform_reindex(column, condition) do
|
||||||
Report
|
Report
|
||||||
|> where([r], field(r, ^column) in ^condition)
|
|> where([r], field(r, ^column) in ^condition)
|
||||||
|
@ -185,14 +296,4 @@ defmodule Philomena.Reports do
|
||||||
|> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type])
|
|> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type])
|
||||||
|> Enum.map(&Search.index_document(&1, Report))
|
|> Enum.map(&Search.index_document(&1, Report))
|
||||||
end
|
end
|
||||||
|
|
||||||
def count_reports(user) do
|
|
||||||
if Canada.Can.can?(user, :index, Report) do
|
|
||||||
Report
|
|
||||||
|> where(open: true)
|
|
||||||
|> Repo.aggregate(:count, :id)
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -17,6 +17,6 @@ defmodule Philomena.Reports.Query do
|
||||||
def compile(query_string) do
|
def compile(query_string) do
|
||||||
fields()
|
fields()
|
||||||
|> Parser.new()
|
|> Parser.new()
|
||||||
|> Parser.parse(query_string || "", %{})
|
|> Parser.parse(query_string, %{})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue