mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-27 05:37:59 +01:00
Merge remote-tracking branch 'origin/master' into redesign
This commit is contained in:
commit
67a904cb98
236 changed files with 5647 additions and 3637 deletions
3
.github/workflows/elixir.yml
vendored
3
.github/workflows/elixir.yml
vendored
|
@ -78,3 +78,6 @@ jobs:
|
|||
|
||||
- run: npm run test
|
||||
working-directory: ./assets
|
||||
|
||||
- run: npm run build
|
||||
working-directory: ./assets
|
11
assets/css/views/_notifications.scss
Normal file
11
assets/css/views/_notifications.scss
Normal file
|
@ -0,0 +1,11 @@
|
|||
.notification-type-block:not(:last-child) {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.notification {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.notification:not(:last-child) {
|
||||
border-bottom: 0;
|
||||
}
|
|
@ -125,7 +125,7 @@ export default tsEslint.config(
|
|||
'no-irregular-whitespace': 2,
|
||||
'no-iterator': 2,
|
||||
'no-label-var': 2,
|
||||
'no-labels': 2,
|
||||
'no-labels': [2, { allowSwitch: true, allowLoop: true }],
|
||||
'no-lone-blocks': 2,
|
||||
'no-lonely-if': 0,
|
||||
'no-loop-func': 2,
|
||||
|
|
|
@ -8,17 +8,17 @@ describe('Input duplicator functionality', () => {
|
|||
document.documentElement.insertAdjacentHTML(
|
||||
'beforeend',
|
||||
`<form action="/">
|
||||
<div class="js-max-input-count">3</div>
|
||||
<div class="js-input-source">
|
||||
<input id="0" name="0" class="js-input" type="text"/>
|
||||
<label>
|
||||
<a href="#" class="js-remove-input">Delete</a>
|
||||
</label>
|
||||
</div>
|
||||
<div class="js-button-container">
|
||||
<button type="button" class="js-add-input">Add input</button>
|
||||
</div>
|
||||
</form>`,
|
||||
<div class="js-max-input-count">3</div>
|
||||
<div class="js-input-source">
|
||||
<input id="0" name="0" class="js-input" type="text"/>
|
||||
<label>
|
||||
<a href="#" class="js-remove-input">Delete</a>
|
||||
</label>
|
||||
</div>
|
||||
<div class="js-button-container">
|
||||
<button type="button" class="js-add-input">Add input</button>
|
||||
</div>
|
||||
</form>`,
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
@ -25,6 +25,9 @@ const errorResponse = {
|
|||
};
|
||||
/* eslint-enable camelcase */
|
||||
|
||||
const tagSets = ['', 'a tag', 'safe', 'one, two, three', 'safe, explicit', 'safe, explicit, three', 'safe, two, three'];
|
||||
const tagErrorCounts = [1, 2, 1, 1, 2, 1, 0];
|
||||
|
||||
describe('Image upload form', () => {
|
||||
let mockPng: File;
|
||||
let mockWebm: File;
|
||||
|
@ -58,18 +61,27 @@ describe('Image upload form', () => {
|
|||
let scraperError: HTMLDivElement;
|
||||
let fetchButton: HTMLButtonElement;
|
||||
let tagsEl: HTMLTextAreaElement;
|
||||
let taginputEl: HTMLDivElement;
|
||||
let sourceEl: HTMLInputElement;
|
||||
let descrEl: HTMLTextAreaElement;
|
||||
let submitButton: HTMLButtonElement;
|
||||
|
||||
const assertFetchButtonIsDisabled = () => {
|
||||
if (!fetchButton.hasAttribute('disabled')) throw new Error('fetchButton is not disabled');
|
||||
};
|
||||
|
||||
const assertSubmitButtonIsDisabled = () => {
|
||||
if (!submitButton.hasAttribute('disabled')) throw new Error('submitButton is not disabled');
|
||||
};
|
||||
|
||||
const assertSubmitButtonIsEnabled = () => {
|
||||
if (submitButton.hasAttribute('disabled')) throw new Error('submitButton is disabled');
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
document.documentElement.insertAdjacentHTML(
|
||||
'beforeend',
|
||||
`
|
||||
<form action="/images">
|
||||
`<form action="/images">
|
||||
<div id="js-image-upload-previews"></div>
|
||||
<input id="image_image" name="image[image]" type="file" class="js-scraper" />
|
||||
<input id="image_scraper_url" name="image[scraper_url]" type="url" class="js-scraper" />
|
||||
|
@ -78,9 +90,13 @@ describe('Image upload form', () => {
|
|||
|
||||
<input id="image_sources_0_source" name="image[sources][0][source]" type="text" class="js-source-url" />
|
||||
<textarea id="image_tag_input" name="image[tag_input]" class="js-image-tags-input"></textarea>
|
||||
<div class="js-taginput"></div>
|
||||
<button id="tagsinput-save" type="button" class="button">Save</button>
|
||||
<textarea id="image_description" name="image[description]" class="js-image-descr-input"></textarea>
|
||||
</form>
|
||||
`,
|
||||
<div class="actions">
|
||||
<button class="button input--separate-top" type="submit">Upload</button>
|
||||
</div>
|
||||
</form>`,
|
||||
);
|
||||
|
||||
form = assertNotNull($<HTMLFormElement>('form'));
|
||||
|
@ -89,9 +105,11 @@ describe('Image upload form', () => {
|
|||
remoteUrl = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[1]);
|
||||
scraperError = assertNotUndefined($$<HTMLInputElement>('.js-scraper')[2]);
|
||||
tagsEl = assertNotNull($<HTMLTextAreaElement>('.js-image-tags-input'));
|
||||
taginputEl = assertNotNull($<HTMLDivElement>('.js-taginput'));
|
||||
sourceEl = assertNotNull($<HTMLInputElement>('.js-source-url'));
|
||||
descrEl = assertNotNull($<HTMLTextAreaElement>('.js-image-descr-input'));
|
||||
fetchButton = assertNotNull($<HTMLButtonElement>('#js-scraper-preview'));
|
||||
submitButton = assertNotNull($<HTMLButtonElement>('.actions > .button'));
|
||||
|
||||
setupImageUpload();
|
||||
fetchMock.resetMocks();
|
||||
|
@ -195,4 +213,42 @@ describe('Image upload form', () => {
|
|||
expect(scraperError.innerText).toEqual('Error 1 Error 2');
|
||||
});
|
||||
});
|
||||
|
||||
async function submitForm(frm: HTMLFormElement): Promise<boolean> {
|
||||
return new Promise(resolve => {
|
||||
function onSubmit() {
|
||||
frm.removeEventListener('submit', onSubmit);
|
||||
resolve(true);
|
||||
}
|
||||
|
||||
frm.addEventListener('submit', onSubmit);
|
||||
|
||||
if (!fireEvent.submit(frm)) {
|
||||
frm.removeEventListener('submit', onSubmit);
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
it('should prevent form submission if tag checks fail', async () => {
|
||||
for (let i = 0; i < tagSets.length; i += 1) {
|
||||
taginputEl.innerText = tagSets[i];
|
||||
|
||||
if (await submitForm(form)) {
|
||||
// form submit succeeded
|
||||
await waitFor(() => {
|
||||
assertSubmitButtonIsDisabled();
|
||||
const succeededUnloadEvent = new Event('beforeunload', { cancelable: true });
|
||||
expect(fireEvent(window, succeededUnloadEvent)).toBe(true);
|
||||
});
|
||||
} else {
|
||||
// form submit prevented
|
||||
const frm = form;
|
||||
await waitFor(() => {
|
||||
assertSubmitButtonIsEnabled();
|
||||
expect(frm.querySelectorAll('.help-block')).toHaveLength(tagErrorCounts[i]);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,296 +0,0 @@
|
|||
/**
|
||||
* Autocomplete.
|
||||
*/
|
||||
|
||||
import { LocalAutocompleter } from './utils/local-autocompleter';
|
||||
import { handleError } from './utils/requests';
|
||||
import { getTermContexts } from './match_query';
|
||||
import store from './utils/store';
|
||||
|
||||
const cache = {};
|
||||
/** @type {HTMLInputElement} */
|
||||
let inputField,
|
||||
/** @type {string} */
|
||||
originalTerm,
|
||||
/** @type {string} */
|
||||
originalQuery,
|
||||
/** @type {TermContext} */
|
||||
selectedTerm;
|
||||
|
||||
function removeParent() {
|
||||
const parent = document.querySelector('.autocomplete');
|
||||
if (parent) parent.parentNode.removeChild(parent);
|
||||
}
|
||||
|
||||
function removeSelected() {
|
||||
const selected = document.querySelector('.autocomplete__item--selected');
|
||||
if (selected) selected.classList.remove('autocomplete__item--selected');
|
||||
}
|
||||
|
||||
function isSearchField() {
|
||||
return inputField && inputField.dataset.acMode === 'search';
|
||||
}
|
||||
|
||||
function restoreOriginalValue() {
|
||||
inputField.value = isSearchField() ? originalQuery : originalTerm;
|
||||
}
|
||||
|
||||
function applySelectedValue(selection) {
|
||||
if (!isSearchField()) {
|
||||
inputField.value = selection;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!selectedTerm) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [startIndex, endIndex] = selectedTerm[0];
|
||||
inputField.value = originalQuery.slice(0, startIndex) + selection + originalQuery.slice(endIndex);
|
||||
inputField.setSelectionRange(startIndex + selection.length, startIndex + selection.length);
|
||||
inputField.focus();
|
||||
}
|
||||
|
||||
function changeSelected(firstOrLast, current, sibling) {
|
||||
if (current && sibling) {
|
||||
// if the currently selected item has a sibling, move selection to it
|
||||
current.classList.remove('autocomplete__item--selected');
|
||||
sibling.classList.add('autocomplete__item--selected');
|
||||
} else if (current) {
|
||||
// if the next keypress will take the user outside the list, restore the unautocompleted term
|
||||
restoreOriginalValue();
|
||||
removeSelected();
|
||||
} else if (firstOrLast) {
|
||||
// if no item in the list is selected, select the first or last
|
||||
firstOrLast.classList.add('autocomplete__item--selected');
|
||||
}
|
||||
}
|
||||
|
||||
function isSelectionOutsideCurrentTerm() {
|
||||
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
||||
const [startIndex, endIndex] = selectedTerm[0];
|
||||
|
||||
return startIndex > selectionIndex || endIndex < selectionIndex;
|
||||
}
|
||||
|
||||
function keydownHandler(event) {
|
||||
const selected = document.querySelector('.autocomplete__item--selected'),
|
||||
firstItem = document.querySelector('.autocomplete__item:first-of-type'),
|
||||
lastItem = document.querySelector('.autocomplete__item:last-of-type');
|
||||
|
||||
if (isSearchField()) {
|
||||
// Prevent submission of the search field when Enter was hit
|
||||
if (selected && event.keyCode === 13) event.preventDefault(); // Enter
|
||||
|
||||
// Close autocompletion popup when text cursor is outside current tag
|
||||
if (selectedTerm && firstItem && (event.keyCode === 37 || event.keyCode === 39)) {
|
||||
// ArrowLeft || ArrowRight
|
||||
requestAnimationFrame(() => {
|
||||
if (isSelectionOutsideCurrentTerm()) removeParent();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (event.keyCode === 38) changeSelected(lastItem, selected, selected && selected.previousSibling); // ArrowUp
|
||||
if (event.keyCode === 40) changeSelected(firstItem, selected, selected && selected.nextSibling); // ArrowDown
|
||||
if (event.keyCode === 13 || event.keyCode === 27 || event.keyCode === 188) removeParent(); // Enter || Esc || Comma
|
||||
if (event.keyCode === 38 || event.keyCode === 40) {
|
||||
// ArrowUp || ArrowDown
|
||||
const newSelected = document.querySelector('.autocomplete__item--selected');
|
||||
if (newSelected) applySelectedValue(newSelected.dataset.value);
|
||||
event.preventDefault();
|
||||
}
|
||||
}
|
||||
|
||||
function createItem(list, suggestion) {
|
||||
const item = document.createElement('li');
|
||||
item.className = 'autocomplete__item';
|
||||
|
||||
item.textContent = suggestion.label;
|
||||
item.dataset.value = suggestion.value;
|
||||
|
||||
item.addEventListener('mouseover', () => {
|
||||
removeSelected();
|
||||
item.classList.add('autocomplete__item--selected');
|
||||
});
|
||||
|
||||
item.addEventListener('mouseout', () => {
|
||||
removeSelected();
|
||||
});
|
||||
|
||||
item.addEventListener('click', () => {
|
||||
applySelectedValue(item.dataset.value);
|
||||
inputField.dispatchEvent(
|
||||
new CustomEvent('autocomplete', {
|
||||
detail: {
|
||||
type: 'click',
|
||||
label: suggestion.label,
|
||||
value: suggestion.value,
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
list.appendChild(item);
|
||||
}
|
||||
|
||||
function createList(suggestions) {
|
||||
const parent = document.querySelector('.autocomplete'),
|
||||
list = document.createElement('ul');
|
||||
list.className = 'autocomplete__list';
|
||||
|
||||
suggestions.forEach(suggestion => createItem(list, suggestion));
|
||||
|
||||
parent.appendChild(list);
|
||||
}
|
||||
|
||||
function createParent() {
|
||||
const parent = document.createElement('div');
|
||||
parent.className = 'autocomplete';
|
||||
|
||||
// Position the parent below the inputfield
|
||||
parent.style.position = 'absolute';
|
||||
parent.style.left = `${inputField.offsetLeft}px`;
|
||||
// Take the inputfield offset, add its height and subtract the amount by which the parent element has scrolled
|
||||
parent.style.top = `${inputField.offsetTop + inputField.offsetHeight - inputField.parentNode.scrollTop}px`;
|
||||
|
||||
// We append the parent at the end of body
|
||||
document.body.appendChild(parent);
|
||||
}
|
||||
|
||||
function showAutocomplete(suggestions, fetchedTerm, targetInput) {
|
||||
// Remove old autocomplete suggestions
|
||||
removeParent();
|
||||
|
||||
// Save suggestions in cache
|
||||
cache[fetchedTerm] = suggestions;
|
||||
|
||||
// If the input target is not empty, still visible, and suggestions were found
|
||||
if (targetInput.value && targetInput.style.display !== 'none' && suggestions.length) {
|
||||
createParent();
|
||||
createList(suggestions);
|
||||
inputField.addEventListener('keydown', keydownHandler);
|
||||
}
|
||||
}
|
||||
|
||||
function getSuggestions(term) {
|
||||
// In case source URL was not given at all, do not try sending the request.
|
||||
if (!inputField.dataset.acSource) return [];
|
||||
return fetch(`${inputField.dataset.acSource}${term}`).then(response => response.json());
|
||||
}
|
||||
|
||||
function getSelectedTerm() {
|
||||
if (!inputField || !originalQuery) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
||||
const terms = getTermContexts(originalQuery);
|
||||
|
||||
return terms.find(([range]) => range[0] < selectionIndex && range[1] >= selectionIndex);
|
||||
}
|
||||
|
||||
function toggleSearchAutocomplete() {
|
||||
const enable = store.get('enable_search_ac');
|
||||
|
||||
for (const searchField of document.querySelectorAll('input[data-ac-mode=search]')) {
|
||||
if (enable) {
|
||||
searchField.autocomplete = 'off';
|
||||
} else {
|
||||
searchField.removeAttribute('data-ac');
|
||||
searchField.autocomplete = 'on';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function listenAutocomplete() {
|
||||
let timeout;
|
||||
|
||||
/** @type {LocalAutocompleter} */
|
||||
let localAc = null;
|
||||
let localFetched = false;
|
||||
|
||||
document.addEventListener('focusin', fetchLocalAutocomplete);
|
||||
|
||||
document.addEventListener('input', event => {
|
||||
removeParent();
|
||||
fetchLocalAutocomplete(event);
|
||||
window.clearTimeout(timeout);
|
||||
|
||||
if (localAc !== null && 'ac' in event.target.dataset) {
|
||||
inputField = event.target;
|
||||
let suggestionsCount = 5;
|
||||
|
||||
if (isSearchField()) {
|
||||
originalQuery = inputField.value;
|
||||
selectedTerm = getSelectedTerm();
|
||||
suggestionsCount = 10;
|
||||
|
||||
// We don't need to run auto-completion if user is not selecting tag at all
|
||||
if (!selectedTerm) {
|
||||
return;
|
||||
}
|
||||
|
||||
originalTerm = selectedTerm[1].toLowerCase();
|
||||
} else {
|
||||
originalTerm = `${inputField.value}`.toLowerCase();
|
||||
}
|
||||
|
||||
const suggestions = localAc
|
||||
.topK(originalTerm, suggestionsCount)
|
||||
.map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
||||
|
||||
if (suggestions.length) {
|
||||
return showAutocomplete(suggestions, originalTerm, event.target);
|
||||
}
|
||||
}
|
||||
|
||||
// Use a timeout to delay requests until the user has stopped typing
|
||||
timeout = window.setTimeout(() => {
|
||||
inputField = event.target;
|
||||
originalTerm = inputField.value;
|
||||
|
||||
const fetchedTerm = inputField.value;
|
||||
const { ac, acMinLength, acSource } = inputField.dataset;
|
||||
|
||||
if (ac && acSource && fetchedTerm.length >= acMinLength) {
|
||||
if (cache[fetchedTerm]) {
|
||||
showAutocomplete(cache[fetchedTerm], fetchedTerm, event.target);
|
||||
} else {
|
||||
// inputField could get overwritten while the suggestions are being fetched - use event.target
|
||||
getSuggestions(fetchedTerm).then(suggestions => {
|
||||
if (fetchedTerm === event.target.value) {
|
||||
showAutocomplete(suggestions, fetchedTerm, event.target);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}, 300);
|
||||
});
|
||||
|
||||
// If there's a click outside the inputField, remove autocomplete
|
||||
document.addEventListener('click', event => {
|
||||
if (event.target && event.target !== inputField) removeParent();
|
||||
if (event.target === inputField && isSearchField() && isSelectionOutsideCurrentTerm()) removeParent();
|
||||
});
|
||||
|
||||
function fetchLocalAutocomplete(event) {
|
||||
if (!localFetched && event.target.dataset && 'ac' in event.target.dataset) {
|
||||
const now = new Date();
|
||||
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
||||
|
||||
localFetched = true;
|
||||
|
||||
fetch(`/autocomplete/compiled?vsn=2&key=${cacheKey}`, { credentials: 'omit', cache: 'force-cache' })
|
||||
.then(handleError)
|
||||
.then(resp => resp.arrayBuffer())
|
||||
.then(buf => {
|
||||
localAc = new LocalAutocompleter(buf);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
toggleSearchAutocomplete();
|
||||
}
|
||||
|
||||
export { listenAutocomplete };
|
230
assets/js/autocomplete.ts
Normal file
230
assets/js/autocomplete.ts
Normal file
|
@ -0,0 +1,230 @@
|
|||
/**
|
||||
* Autocomplete.
|
||||
*/
|
||||
|
||||
import { LocalAutocompleter } from './utils/local-autocompleter';
|
||||
import { getTermContexts } from './match_query';
|
||||
import store from './utils/store';
|
||||
import { TermContext } from './query/lex';
|
||||
import { $$ } from './utils/dom';
|
||||
import { fetchLocalAutocomplete, fetchSuggestions, SuggestionsPopup, TermSuggestion } from './utils/suggestions';
|
||||
|
||||
let inputField: HTMLInputElement | null = null,
|
||||
originalTerm: string | undefined,
|
||||
originalQuery: string | undefined,
|
||||
selectedTerm: TermContext | null = null;
|
||||
|
||||
const popup = new SuggestionsPopup();
|
||||
|
||||
function isSearchField(targetInput: HTMLElement): boolean {
|
||||
return targetInput && targetInput.dataset.acMode === 'search';
|
||||
}
|
||||
|
||||
function restoreOriginalValue() {
|
||||
if (!inputField) return;
|
||||
|
||||
if (isSearchField(inputField) && originalQuery) {
|
||||
inputField.value = originalQuery;
|
||||
}
|
||||
|
||||
if (originalTerm) {
|
||||
inputField.value = originalTerm;
|
||||
}
|
||||
}
|
||||
|
||||
function applySelectedValue(selection: string) {
|
||||
if (!inputField) return;
|
||||
|
||||
if (!isSearchField(inputField)) {
|
||||
inputField.value = selection;
|
||||
return;
|
||||
}
|
||||
|
||||
if (selectedTerm && originalQuery) {
|
||||
const [startIndex, endIndex] = selectedTerm[0];
|
||||
inputField.value = originalQuery.slice(0, startIndex) + selection + originalQuery.slice(endIndex);
|
||||
inputField.setSelectionRange(startIndex + selection.length, startIndex + selection.length);
|
||||
inputField.focus();
|
||||
}
|
||||
}
|
||||
|
||||
function isSelectionOutsideCurrentTerm(): boolean {
|
||||
if (!inputField || !selectedTerm) return true;
|
||||
if (inputField.selectionStart === null || inputField.selectionEnd === null) return true;
|
||||
|
||||
const selectionIndex = Math.min(inputField.selectionStart, inputField.selectionEnd);
|
||||
const [startIndex, endIndex] = selectedTerm[0];
|
||||
|
||||
return startIndex > selectionIndex || endIndex < selectionIndex;
|
||||
}
|
||||
|
||||
function keydownHandler(event: KeyboardEvent) {
|
||||
if (inputField !== event.currentTarget) return;
|
||||
|
||||
if (inputField && isSearchField(inputField)) {
|
||||
// Prevent submission of the search field when Enter was hit
|
||||
if (popup.selectedTerm && event.keyCode === 13) event.preventDefault(); // Enter
|
||||
|
||||
// Close autocompletion popup when text cursor is outside current tag
|
||||
if (selectedTerm && (event.keyCode === 37 || event.keyCode === 39)) {
|
||||
// ArrowLeft || ArrowRight
|
||||
requestAnimationFrame(() => {
|
||||
if (isSelectionOutsideCurrentTerm()) popup.hide();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!popup.isActive) return;
|
||||
|
||||
if (event.keyCode === 38) popup.selectPrevious(); // ArrowUp
|
||||
if (event.keyCode === 40) popup.selectNext(); // ArrowDown
|
||||
if (event.keyCode === 13 || event.keyCode === 27 || event.keyCode === 188) popup.hide(); // Enter || Esc || Comma
|
||||
if (event.keyCode === 38 || event.keyCode === 40) {
|
||||
// ArrowUp || ArrowDown
|
||||
if (popup.selectedTerm) {
|
||||
applySelectedValue(popup.selectedTerm);
|
||||
} else {
|
||||
restoreOriginalValue();
|
||||
}
|
||||
|
||||
event.preventDefault();
|
||||
}
|
||||
}
|
||||
|
||||
function findSelectedTerm(targetInput: HTMLInputElement, searchQuery: string): TermContext | null {
|
||||
if (targetInput.selectionStart === null || targetInput.selectionEnd === null) return null;
|
||||
|
||||
const selectionIndex = Math.min(targetInput.selectionStart, targetInput.selectionEnd);
|
||||
const terms = getTermContexts(searchQuery);
|
||||
|
||||
return terms.find(([range]) => range[0] < selectionIndex && range[1] >= selectionIndex) ?? null;
|
||||
}
|
||||
|
||||
function toggleSearchAutocomplete() {
|
||||
const enable = store.get('enable_search_ac');
|
||||
|
||||
for (const searchField of $$<HTMLInputElement>('input[data-ac-mode=search]')) {
|
||||
if (enable) {
|
||||
searchField.autocomplete = 'off';
|
||||
} else {
|
||||
searchField.removeAttribute('data-ac');
|
||||
searchField.autocomplete = 'on';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function listenAutocomplete() {
|
||||
let serverSideSuggestionsTimeout: number | undefined;
|
||||
|
||||
let localAc: LocalAutocompleter | null = null;
|
||||
let isLocalLoading = false;
|
||||
|
||||
document.addEventListener('focusin', loadAutocompleteFromEvent);
|
||||
|
||||
document.addEventListener('input', event => {
|
||||
popup.hide();
|
||||
loadAutocompleteFromEvent(event);
|
||||
window.clearTimeout(serverSideSuggestionsTimeout);
|
||||
|
||||
if (!(event.target instanceof HTMLInputElement)) return;
|
||||
|
||||
const targetedInput = event.target;
|
||||
|
||||
if (!targetedInput.dataset.ac) return;
|
||||
|
||||
targetedInput.addEventListener('keydown', keydownHandler);
|
||||
|
||||
if (localAc !== null) {
|
||||
inputField = targetedInput;
|
||||
let suggestionsCount = 5;
|
||||
|
||||
if (isSearchField(inputField)) {
|
||||
originalQuery = inputField.value;
|
||||
selectedTerm = findSelectedTerm(inputField, originalQuery);
|
||||
suggestionsCount = 10;
|
||||
|
||||
// We don't need to run auto-completion if user is not selecting tag at all
|
||||
if (!selectedTerm) {
|
||||
return;
|
||||
}
|
||||
|
||||
originalTerm = selectedTerm[1].toLowerCase();
|
||||
} else {
|
||||
originalTerm = `${inputField.value}`.toLowerCase();
|
||||
}
|
||||
|
||||
const suggestions = localAc
|
||||
.matchPrefix(originalTerm)
|
||||
.topK(suggestionsCount)
|
||||
.map(({ name, imageCount }) => ({ label: `${name} (${imageCount})`, value: name }));
|
||||
|
||||
if (suggestions.length) {
|
||||
popup.renderSuggestions(suggestions).showForField(targetedInput);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const { acMinLength: minTermLength, acSource: endpointUrl } = targetedInput.dataset;
|
||||
|
||||
if (!endpointUrl) return;
|
||||
|
||||
// Use a timeout to delay requests until the user has stopped typing
|
||||
serverSideSuggestionsTimeout = window.setTimeout(() => {
|
||||
inputField = targetedInput;
|
||||
originalTerm = inputField.value;
|
||||
|
||||
const fetchedTerm = inputField.value;
|
||||
|
||||
if (minTermLength && fetchedTerm.length < parseInt(minTermLength, 10)) return;
|
||||
|
||||
fetchSuggestions(endpointUrl, fetchedTerm).then(suggestions => {
|
||||
// inputField could get overwritten while the suggestions are being fetched - use previously targeted input
|
||||
if (fetchedTerm === targetedInput.value) {
|
||||
popup.renderSuggestions(suggestions).showForField(targetedInput);
|
||||
}
|
||||
});
|
||||
}, 300);
|
||||
});
|
||||
|
||||
// If there's a click outside the inputField, remove autocomplete
|
||||
document.addEventListener('click', event => {
|
||||
if (event.target && event.target !== inputField) popup.hide();
|
||||
if (inputField && event.target === inputField && isSearchField(inputField) && isSelectionOutsideCurrentTerm()) {
|
||||
popup.hide();
|
||||
}
|
||||
});
|
||||
|
||||
function loadAutocompleteFromEvent(event: Event) {
|
||||
if (!(event.target instanceof HTMLInputElement)) return;
|
||||
|
||||
if (!isLocalLoading && event.target.dataset.ac) {
|
||||
isLocalLoading = true;
|
||||
|
||||
fetchLocalAutocomplete().then(autocomplete => {
|
||||
localAc = autocomplete;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
toggleSearchAutocomplete();
|
||||
|
||||
popup.onItemSelected((event: CustomEvent<TermSuggestion>) => {
|
||||
if (!event.detail || !inputField) return;
|
||||
|
||||
const originalSuggestion = event.detail;
|
||||
applySelectedValue(originalSuggestion.value);
|
||||
|
||||
inputField.dispatchEvent(
|
||||
new CustomEvent('autocomplete', {
|
||||
detail: Object.assign(
|
||||
{
|
||||
type: 'click',
|
||||
},
|
||||
originalSuggestion,
|
||||
),
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export { listenAutocomplete };
|
|
@ -9,47 +9,25 @@ import { fetchHtml, handleError } from './utils/requests';
|
|||
import { showBlock } from './utils/image';
|
||||
import { addTag } from './tagsinput';
|
||||
|
||||
/* eslint-disable prettier/prettier */
|
||||
|
||||
// Event types and any qualifying conditions - return true to not run action
|
||||
const types = {
|
||||
click(event) {
|
||||
return event.button !== 0; /* Left-click only */
|
||||
},
|
||||
|
||||
change() {
|
||||
/* No qualifier */
|
||||
},
|
||||
|
||||
fetchcomplete() {
|
||||
/* No qualifier */
|
||||
},
|
||||
click(event) { return event.button !== 0; /* Left-click only */ },
|
||||
change() { /* No qualifier */ },
|
||||
fetchcomplete() { /* No qualifier */ },
|
||||
};
|
||||
|
||||
const actions = {
|
||||
hide(data) {
|
||||
selectorCb(data.base, data.value, el => el.classList.add('hidden'));
|
||||
},
|
||||
|
||||
tabHide(data) {
|
||||
selectorCbChildren(data.base, data.value, el => el.classList.add('hidden'));
|
||||
},
|
||||
|
||||
show(data) {
|
||||
selectorCb(data.base, data.value, el => el.classList.remove('hidden'));
|
||||
},
|
||||
|
||||
toggle(data) {
|
||||
selectorCb(data.base, data.value, el => el.classList.toggle('hidden'));
|
||||
},
|
||||
|
||||
submit(data) {
|
||||
selectorCb(data.base, data.value, el => el.submit());
|
||||
},
|
||||
|
||||
disable(data) {
|
||||
selectorCb(data.base, data.value, el => {
|
||||
el.disabled = true;
|
||||
});
|
||||
},
|
||||
hide(data) { selectorCb(data.base, data.value, el => el.classList.add('hidden')); },
|
||||
show(data) { selectorCb(data.base, data.value, el => el.classList.remove('hidden')); },
|
||||
toggle(data) { selectorCb(data.base, data.value, el => el.classList.toggle('hidden')); },
|
||||
submit(data) { selectorCb(data.base, data.value, el => el.submit()); },
|
||||
disable(data) { selectorCb(data.base, data.value, el => el.disabled = true); },
|
||||
focus(data) { document.querySelector(data.value).focus(); },
|
||||
unfilter(data) { showBlock(data.el.closest('.image-show-container')); },
|
||||
tabHide(data) { selectorCbChildren(data.base, data.value, el => el.classList.add('hidden')); },
|
||||
preventdefault() { /* The existence of this entry is enough */ },
|
||||
|
||||
copy(data) {
|
||||
document.querySelector(data.value).select();
|
||||
|
@ -70,18 +48,17 @@ const actions = {
|
|||
});
|
||||
},
|
||||
|
||||
focus(data) {
|
||||
document.querySelector(data.value).focus();
|
||||
},
|
||||
|
||||
preventdefault() {
|
||||
/* The existence of this entry is enough */
|
||||
},
|
||||
|
||||
addtag(data) {
|
||||
addTag(document.querySelector(data.el.closest('[data-target]').dataset.target), data.el.dataset.tagName);
|
||||
},
|
||||
|
||||
hideParent(data) {
|
||||
const base = data.el.closest(data.value);
|
||||
if (base) {
|
||||
base.classList.add('hidden');
|
||||
}
|
||||
},
|
||||
|
||||
tab(data) {
|
||||
const block = data.el.parentNode.parentNode,
|
||||
newTab = $(`.block__tab[data-tab="${data.value}"]`),
|
||||
|
@ -114,12 +91,10 @@ const actions = {
|
|||
});
|
||||
}
|
||||
},
|
||||
|
||||
unfilter(data) {
|
||||
showBlock(data.el.closest('.image-show-container'));
|
||||
},
|
||||
};
|
||||
|
||||
/* eslint-enable prettier/prettier */
|
||||
|
||||
// Use this function to apply a callback to elements matching the selectors
|
||||
function selectorCb(base = document, selector, cb) {
|
||||
[].forEach.call(base.querySelectorAll(selector), cb);
|
||||
|
|
|
@ -22,9 +22,9 @@ export function setupGalleryEditing() {
|
|||
|
||||
initDraggables();
|
||||
|
||||
$$<HTMLDivElement>('.media-box', containerEl).forEach(i => {
|
||||
i.draggable = true;
|
||||
});
|
||||
for (const mediaBox of $$<HTMLDivElement>('.media-box', containerEl)) {
|
||||
mediaBox.draggable = true;
|
||||
}
|
||||
|
||||
rearrangeEl.addEventListener('click', () => {
|
||||
sortableEl.classList.add('editing');
|
||||
|
@ -46,8 +46,8 @@ export function setupGalleryEditing() {
|
|||
|
||||
fetchJson('PATCH', reorderPath, {
|
||||
image_ids: newImages,
|
||||
// copy the array again so that we have the newly updated set
|
||||
}).then(() => {
|
||||
// copy the array again so that we have the newly updated set
|
||||
oldImages = newImages.slice();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -95,9 +95,7 @@ function showHidden(imageId) {
|
|||
|
||||
function resetVoted(imageId) {
|
||||
uncacheStatus(imageId, 'voted');
|
||||
|
||||
onImage(imageId, '.interaction--upvote', el => el.classList.remove('active'));
|
||||
|
||||
onImage(imageId, '.interaction--downvote', el => el.classList.remove('active'));
|
||||
}
|
||||
|
||||
|
|
|
@ -4,22 +4,40 @@
|
|||
|
||||
import { $, $$ } from './utils/dom';
|
||||
|
||||
const markdownSyntax = {
|
||||
// List of options provided to the syntax handler function.
|
||||
interface SyntaxHandlerOptions {
|
||||
prefix: string;
|
||||
shortcutKeyCode: number;
|
||||
suffix: string;
|
||||
prefixMultiline: string;
|
||||
suffixMultiline: string;
|
||||
singleWrap: boolean;
|
||||
escapeChar: string;
|
||||
image: boolean;
|
||||
text: string;
|
||||
}
|
||||
|
||||
interface SyntaxHandler {
|
||||
action: (textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) => void;
|
||||
options: Partial<SyntaxHandlerOptions>;
|
||||
}
|
||||
|
||||
const markdownSyntax: Record<string, SyntaxHandler> = {
|
||||
bold: {
|
||||
action: wrapSelection,
|
||||
options: { prefix: '**', shortcutKey: 'b' },
|
||||
options: { prefix: '**', shortcutKeyCode: 66 },
|
||||
},
|
||||
italics: {
|
||||
action: wrapSelection,
|
||||
options: { prefix: '*', shortcutKey: 'i' },
|
||||
options: { prefix: '*', shortcutKeyCode: 73 },
|
||||
},
|
||||
under: {
|
||||
action: wrapSelection,
|
||||
options: { prefix: '__', shortcutKey: 'u' },
|
||||
options: { prefix: '__', shortcutKeyCode: 85 },
|
||||
},
|
||||
spoiler: {
|
||||
action: wrapSelection,
|
||||
options: { prefix: '||', shortcutKey: 's' },
|
||||
options: { prefix: '||', shortcutKeyCode: 83 },
|
||||
},
|
||||
code: {
|
||||
action: wrapSelectionOrLines,
|
||||
|
@ -29,7 +47,7 @@ const markdownSyntax = {
|
|||
prefixMultiline: '```\n',
|
||||
suffixMultiline: '\n```',
|
||||
singleWrap: true,
|
||||
shortcutKey: 'e',
|
||||
shortcutKeyCode: 69,
|
||||
},
|
||||
},
|
||||
strike: {
|
||||
|
@ -50,11 +68,11 @@ const markdownSyntax = {
|
|||
},
|
||||
link: {
|
||||
action: insertLink,
|
||||
options: { shortcutKey: 'l' },
|
||||
options: { shortcutKeyCode: 76 },
|
||||
},
|
||||
image: {
|
||||
action: insertLink,
|
||||
options: { image: true, shortcutKey: 'k' },
|
||||
options: { image: true, shortcutKeyCode: 75 },
|
||||
},
|
||||
escape: {
|
||||
action: escapeSelection,
|
||||
|
@ -62,14 +80,22 @@ const markdownSyntax = {
|
|||
},
|
||||
};
|
||||
|
||||
function getSelections(textarea, linesOnly = false) {
|
||||
interface SelectionResult {
|
||||
processLinesOnly: boolean;
|
||||
selectedText: string;
|
||||
beforeSelection: string;
|
||||
afterSelection: string;
|
||||
}
|
||||
|
||||
function getSelections(textarea: HTMLTextAreaElement, linesOnly: RegExp | boolean = false): SelectionResult {
|
||||
let { selectionStart, selectionEnd } = textarea,
|
||||
selection = textarea.value.substring(selectionStart, selectionEnd),
|
||||
leadingSpace = '',
|
||||
trailingSpace = '',
|
||||
caret;
|
||||
caret: number;
|
||||
|
||||
const processLinesOnly = linesOnly instanceof RegExp ? linesOnly.test(selection) : linesOnly;
|
||||
|
||||
if (processLinesOnly) {
|
||||
const explorer = /\n/g;
|
||||
let startNewlineIndex = 0,
|
||||
|
@ -119,7 +145,18 @@ function getSelections(textarea, linesOnly = false) {
|
|||
};
|
||||
}
|
||||
|
||||
function transformSelection(textarea, transformer, eachLine) {
|
||||
interface TransformResult {
|
||||
newText: string;
|
||||
caretOffset: number;
|
||||
}
|
||||
|
||||
type TransformCallback = (selectedText: string, processLinesOnly: boolean) => TransformResult;
|
||||
|
||||
function transformSelection(
|
||||
textarea: HTMLTextAreaElement,
|
||||
transformer: TransformCallback,
|
||||
eachLine: RegExp | boolean = false,
|
||||
) {
|
||||
const { selectedText, beforeSelection, afterSelection, processLinesOnly } = getSelections(textarea, eachLine),
|
||||
// For long comments, record scrollbar position to restore it later
|
||||
{ scrollTop } = textarea;
|
||||
|
@ -140,7 +177,7 @@ function transformSelection(textarea, transformer, eachLine) {
|
|||
textarea.dispatchEvent(new Event('change'));
|
||||
}
|
||||
|
||||
function insertLink(textarea, options) {
|
||||
function insertLink(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||
let hyperlink = window.prompt(options.image ? 'Image link:' : 'Link:');
|
||||
if (!hyperlink || hyperlink === '') return;
|
||||
|
||||
|
@ -155,10 +192,11 @@ function insertLink(textarea, options) {
|
|||
wrapSelection(textarea, { prefix, suffix });
|
||||
}
|
||||
|
||||
function wrapSelection(textarea, options) {
|
||||
transformSelection(textarea, selectedText => {
|
||||
function wrapSelection(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||
transformSelection(textarea, (selectedText: string): TransformResult => {
|
||||
const { text = selectedText, prefix = '', suffix = options.prefix } = options,
|
||||
emptyText = text === '';
|
||||
|
||||
let newText = text;
|
||||
|
||||
if (!emptyText) {
|
||||
|
@ -176,10 +214,14 @@ function wrapSelection(textarea, options) {
|
|||
});
|
||||
}
|
||||
|
||||
function wrapLines(textarea, options, eachLine = true) {
|
||||
function wrapLines(
|
||||
textarea: HTMLTextAreaElement,
|
||||
options: Partial<SyntaxHandlerOptions>,
|
||||
eachLine: RegExp | boolean = true,
|
||||
) {
|
||||
transformSelection(
|
||||
textarea,
|
||||
(selectedText, processLinesOnly) => {
|
||||
(selectedText: string, processLinesOnly: boolean): TransformResult => {
|
||||
const { text = selectedText, singleWrap = false } = options,
|
||||
prefix = (processLinesOnly && options.prefixMultiline) || options.prefix || '',
|
||||
suffix = (processLinesOnly && options.suffixMultiline) || options.suffix || '',
|
||||
|
@ -200,16 +242,22 @@ function wrapLines(textarea, options, eachLine = true) {
|
|||
);
|
||||
}
|
||||
|
||||
function wrapSelectionOrLines(textarea, options) {
|
||||
function wrapSelectionOrLines(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||
wrapLines(textarea, options, /\n/);
|
||||
}
|
||||
|
||||
function escapeSelection(textarea, options) {
|
||||
transformSelection(textarea, selectedText => {
|
||||
function escapeSelection(textarea: HTMLTextAreaElement, options: Partial<SyntaxHandlerOptions>) {
|
||||
transformSelection(textarea, (selectedText: string): TransformResult => {
|
||||
const { text = selectedText } = options,
|
||||
emptyText = text === '';
|
||||
|
||||
if (emptyText) return;
|
||||
// Nothing to escape, so do nothing
|
||||
if (emptyText) {
|
||||
return {
|
||||
newText: text,
|
||||
caretOffset: text.length,
|
||||
};
|
||||
}
|
||||
|
||||
const newText = text.replace(/([*_[\]()^`%\\~<>#|])/g, '\\$1');
|
||||
|
||||
|
@ -220,34 +268,55 @@ function escapeSelection(textarea, options) {
|
|||
});
|
||||
}
|
||||
|
||||
function clickHandler(event) {
|
||||
const button = event.target.closest('.communication__toolbar__button');
|
||||
if (!button) return;
|
||||
const toolbar = button.closest('.communication__toolbar'),
|
||||
// There may be multiple toolbars present on the page,
|
||||
// in the case of image pages with description edit active
|
||||
// we target the textarea that shares the same parent as the toolbar
|
||||
textarea = $('.js-toolbar-input', toolbar.parentNode),
|
||||
function clickHandler(event: MouseEvent) {
|
||||
if (!(event.target instanceof HTMLElement)) return;
|
||||
|
||||
const button = event.target.closest<HTMLElement>('.communication__toolbar__button');
|
||||
const toolbar = button?.closest<HTMLElement>('.communication__toolbar');
|
||||
|
||||
if (!button || !toolbar?.parentElement) return;
|
||||
|
||||
// There may be multiple toolbars present on the page,
|
||||
// in the case of image pages with description edit active
|
||||
// we target the textarea that shares the same parent as the toolbar
|
||||
const textarea = $<HTMLTextAreaElement>('.js-toolbar-input', toolbar.parentElement),
|
||||
id = button.dataset.syntaxId;
|
||||
|
||||
if (!textarea || !id) return;
|
||||
|
||||
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
||||
textarea.focus();
|
||||
}
|
||||
|
||||
function shortcutHandler(event) {
|
||||
if (
|
||||
!event.ctrlKey ||
|
||||
(window.navigator.platform === 'MacIntel' && !event.metaKey) ||
|
||||
event.shiftKey ||
|
||||
event.altKey
|
||||
) {
|
||||
function canAcceptShortcut(event: KeyboardEvent): boolean {
|
||||
let ctrl: boolean, otherModifier: boolean;
|
||||
|
||||
switch (window.navigator.platform) {
|
||||
case 'MacIntel':
|
||||
ctrl = event.metaKey;
|
||||
otherModifier = event.ctrlKey || event.shiftKey || event.altKey;
|
||||
break;
|
||||
default:
|
||||
ctrl = event.ctrlKey;
|
||||
otherModifier = event.metaKey || event.shiftKey || event.altKey;
|
||||
break;
|
||||
}
|
||||
|
||||
return ctrl && !otherModifier;
|
||||
}
|
||||
|
||||
function shortcutHandler(event: KeyboardEvent) {
|
||||
if (!canAcceptShortcut(event)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const textarea = event.target,
|
||||
key = event.key.toLowerCase();
|
||||
keyCode = event.keyCode;
|
||||
|
||||
if (!(textarea instanceof HTMLTextAreaElement)) return;
|
||||
|
||||
for (const id in markdownSyntax) {
|
||||
if (key === markdownSyntax[id].options.shortcutKey) {
|
||||
if (keyCode === markdownSyntax[id].options.shortcutKeyCode) {
|
||||
markdownSyntax[id].action(textarea, markdownSyntax[id].options);
|
||||
event.preventDefault();
|
||||
}
|
||||
|
@ -255,10 +324,10 @@ function shortcutHandler(event) {
|
|||
}
|
||||
|
||||
function setupToolbar() {
|
||||
$$('.communication__toolbar').forEach(toolbar => {
|
||||
$$<HTMLElement>('.communication__toolbar').forEach(toolbar => {
|
||||
toolbar.addEventListener('click', clickHandler);
|
||||
});
|
||||
$$('.js-toolbar-input').forEach(textarea => {
|
||||
$$<HTMLTextAreaElement>('.js-toolbar-input').forEach(textarea => {
|
||||
textarea.addEventListener('keydown', shortcutHandler);
|
||||
});
|
||||
}
|
|
@ -8,8 +8,8 @@ import { delegate } from './utils/events';
|
|||
import { assertNotNull, assertNotUndefined } from './utils/assert';
|
||||
import store from './utils/store';
|
||||
|
||||
const NOTIFICATION_INTERVAL = 600000,
|
||||
NOTIFICATION_EXPIRES = 300000;
|
||||
const NOTIFICATION_INTERVAL = 600000;
|
||||
const NOTIFICATION_EXPIRES = 300000;
|
||||
|
||||
function bindSubscriptionLinks() {
|
||||
delegate(document, 'fetchcomplete', {
|
||||
|
|
|
@ -18,7 +18,7 @@ export function warnAboutPMs() {
|
|||
|
||||
if (value.match(imageEmbedRegex)) {
|
||||
showEl(warning);
|
||||
} else if (!warning.classList.contains('hidden')) {
|
||||
} else {
|
||||
hideEl(warning);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -57,8 +57,22 @@ function makeRelativeDateMatcher(dateVal: string, qual: RangeEqualQualifier): Fi
|
|||
return makeMatcher(bottomDate, topDate, qual);
|
||||
}
|
||||
|
||||
const parseRes: RegExp[] = [
|
||||
// year
|
||||
/^(\d{4})/,
|
||||
// month
|
||||
/^-(\d{2})/,
|
||||
// day
|
||||
/^-(\d{2})/,
|
||||
// hour
|
||||
/^(?:\s+|T|t)(\d{2})/,
|
||||
// minute
|
||||
/^:(\d{2})/,
|
||||
// second
|
||||
/^:(\d{2})/,
|
||||
];
|
||||
|
||||
function makeAbsoluteDateMatcher(dateVal: string, qual: RangeEqualQualifier): FieldMatcher {
|
||||
const parseRes: RegExp[] = [/^(\d{4})/, /^-(\d{2})/, /^-(\d{2})/, /^(?:\s+|T|t)(\d{2})/, /^:(\d{2})/, /^:(\d{2})/];
|
||||
const timeZoneOffset: TimeZoneOffset = [0, 0];
|
||||
const timeData: AbsoluteDate = [0, 0, 1, 0, 0, 0];
|
||||
|
||||
|
|
|
@ -32,8 +32,8 @@ export interface LexResult {
|
|||
}
|
||||
|
||||
export function generateLexResult(searchStr: string, parseTerm: ParseTerm): LexResult {
|
||||
const opQueue: string[] = [],
|
||||
groupNegate: boolean[] = [];
|
||||
const opQueue: string[] = [];
|
||||
const groupNegate: boolean[] = [];
|
||||
|
||||
let searchTerm: string | null = null;
|
||||
let boostFuzzStr = '';
|
||||
|
@ -85,11 +85,10 @@ export function generateLexResult(searchStr: string, parseTerm: ParseTerm): LexR
|
|||
}
|
||||
|
||||
const token = match[0];
|
||||
const tokenIsBinaryOp = ['and_op', 'or_op'].indexOf(tokenName) !== -1;
|
||||
const tokenIsGroupStart = tokenName === 'rparen' && lparenCtr === 0;
|
||||
|
||||
if (
|
||||
searchTerm !== null &&
|
||||
(['and_op', 'or_op'].indexOf(tokenName) !== -1 || (tokenName === 'rparen' && lparenCtr === 0))
|
||||
) {
|
||||
if (searchTerm !== null && (tokenIsBinaryOp || tokenIsGroupStart)) {
|
||||
endTerm();
|
||||
}
|
||||
|
||||
|
|
|
@ -22,15 +22,15 @@ function makeWildcardMatcher(term: string): FieldMatcher {
|
|||
// Transforms wildcard match into regular expression.
|
||||
// A custom NFA with caching may be more sophisticated but not
|
||||
// likely to be faster.
|
||||
const wildcard = new RegExp(
|
||||
`^${term
|
||||
.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||
.replace(/^(?:\\\\)*\?/g, '.?')}$`,
|
||||
'i',
|
||||
);
|
||||
|
||||
const regexpForm = term
|
||||
.replace(/([.+^$[\]\\(){}|-])/g, '\\$1')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\*/g, '$1.*')
|
||||
.replace(/^(?:\\\\)*\*/g, '.*')
|
||||
.replace(/([^\\]|[^\\](?:\\\\)+)\?/g, '$1.?')
|
||||
.replace(/^(?:\\\\)*\?/g, '.?');
|
||||
|
||||
const wildcard = new RegExp(`^${regexpForm}$`, 'i');
|
||||
|
||||
return (v, name) => {
|
||||
const values = extractValues(v, name);
|
||||
|
|
|
@ -74,9 +74,9 @@ function submit() {
|
|||
|
||||
function modifyImageQueue(mediaBox) {
|
||||
if (currentTags()) {
|
||||
const imageId = mediaBox.dataset.imageId,
|
||||
queue = currentQueue(),
|
||||
isSelected = queue.includes(imageId);
|
||||
const imageId = mediaBox.dataset.imageId;
|
||||
const queue = currentQueue();
|
||||
const isSelected = queue.includes(imageId);
|
||||
|
||||
isSelected ? queue.splice(queue.indexOf(imageId), 1) : queue.push(imageId);
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
import { $ } from './utils/dom';
|
||||
|
||||
type ShortcutKeyMap = Record<string, () => void>;
|
||||
type ShortcutKeyMap = Record<number, () => void>;
|
||||
|
||||
function getHover(): string | null {
|
||||
const thumbBoxHover = $<HTMLDivElement>('.media-box:hover');
|
||||
|
@ -48,30 +48,32 @@ function isOK(event: KeyboardEvent): boolean {
|
|||
}
|
||||
|
||||
/* eslint-disable prettier/prettier */
|
||||
|
||||
const keyCodes: ShortcutKeyMap = {
|
||||
j() { click('.js-prev'); }, // J - go to previous image
|
||||
i() { click('.js-up'); }, // I - go to index page
|
||||
k() { click('.js-next'); }, // K - go to next image
|
||||
r() { click('.js-rand'); }, // R - go to random image
|
||||
s() { click('.js-source-link'); }, // S - go to image source
|
||||
l() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
||||
o() { openFullView(); }, // O - open original
|
||||
v() { openFullViewNewTab(); }, // V - open original in a new tab
|
||||
f() {
|
||||
74() { click('.js-prev'); }, // J - go to previous image
|
||||
73() { click('.js-up'); }, // I - go to index page
|
||||
75() { click('.js-next'); }, // K - go to next image
|
||||
82() { click('.js-rand'); }, // R - go to random image
|
||||
83() { click('.js-source-link'); }, // S - go to image source
|
||||
76() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
||||
79() { openFullView(); }, // O - open original
|
||||
86() { openFullViewNewTab(); }, // V - open original in a new tab
|
||||
70() {
|
||||
// F - favourite image
|
||||
click(getHover() ? `a.interaction--fave[data-image-id="${getHover()}"]` : '.block__header a.interaction--fave');
|
||||
},
|
||||
u() {
|
||||
85() {
|
||||
// U - upvote image
|
||||
click(getHover() ? `a.interaction--upvote[data-image-id="${getHover()}"]` : '.block__header a.interaction--upvote');
|
||||
},
|
||||
};
|
||||
|
||||
/* eslint-enable prettier/prettier */
|
||||
|
||||
export function listenForKeys() {
|
||||
document.addEventListener('keydown', (event: KeyboardEvent) => {
|
||||
if (isOK(event) && keyCodes[event.key]) {
|
||||
keyCodes[event.key]();
|
||||
if (isOK(event) && keyCodes[event.keyCode]) {
|
||||
keyCodes[event.keyCode]();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -35,12 +35,12 @@ function setTimeAgo(el: HTMLTimeElement) {
|
|||
const date = new Date(datetime);
|
||||
const distMillis = distance(date);
|
||||
|
||||
const seconds = Math.abs(distMillis) / 1000,
|
||||
minutes = seconds / 60,
|
||||
hours = minutes / 60,
|
||||
days = hours / 24,
|
||||
months = days / 30,
|
||||
years = days / 365;
|
||||
const seconds = Math.abs(distMillis) / 1000;
|
||||
const minutes = seconds / 60;
|
||||
const hours = minutes / 60;
|
||||
const days = hours / 24;
|
||||
const months = days / 30;
|
||||
const years = days / 365;
|
||||
|
||||
const words =
|
||||
(seconds < 45 && substitute('seconds', seconds)) ||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
* Fetch and display preview images for various image upload forms.
|
||||
*/
|
||||
|
||||
import { assertNotNull } from './utils/assert';
|
||||
import { fetchJson, handleError } from './utils/requests';
|
||||
import { $, $$, clearEl, hideEl, makeEl, showEl } from './utils/dom';
|
||||
import { addTag } from './tagsinput';
|
||||
|
@ -171,9 +172,98 @@ function setupImageUpload() {
|
|||
window.removeEventListener('beforeunload', beforeUnload);
|
||||
}
|
||||
|
||||
function createTagError(message) {
|
||||
const buttonAfter = $('#tagsinput-save');
|
||||
const errorElement = makeEl('span', { className: 'help-block tag-error', innerText: message });
|
||||
|
||||
buttonAfter.insertAdjacentElement('beforebegin', errorElement);
|
||||
}
|
||||
|
||||
function clearTagErrors() {
|
||||
$$('.tag-error').forEach(el => el.remove());
|
||||
}
|
||||
|
||||
const ratingsTags = ['safe', 'suggestive', 'questionable', 'explicit', 'semi-grimdark', 'grimdark', 'grotesque'];
|
||||
|
||||
// populate tag error helper bars as necessary
|
||||
// return true if all checks pass
|
||||
// return false if any check fails
|
||||
function validateTags() {
|
||||
const tagInput = $('textarea.js-taginput');
|
||||
|
||||
if (!tagInput) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const tagsArr = tagInput.value.split(',').map(t => t.trim());
|
||||
|
||||
const errors = [];
|
||||
|
||||
let hasRating = false;
|
||||
let hasSafe = false;
|
||||
let hasOtherRating = false;
|
||||
|
||||
tagsArr.forEach(tag => {
|
||||
if (ratingsTags.includes(tag)) {
|
||||
hasRating = true;
|
||||
if (tag === 'safe') {
|
||||
hasSafe = true;
|
||||
} else {
|
||||
hasOtherRating = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasRating) {
|
||||
errors.push('Tag input must contain at least one rating tag');
|
||||
} else if (hasSafe && hasOtherRating) {
|
||||
errors.push('Tag input may not contain any other rating if safe');
|
||||
}
|
||||
|
||||
if (tagsArr.length < 3) {
|
||||
errors.push('Tag input must contain at least 3 tags');
|
||||
}
|
||||
|
||||
errors.forEach(msg => createTagError(msg));
|
||||
|
||||
return errors.length === 0; // true: valid if no errors
|
||||
}
|
||||
|
||||
function disableUploadButton() {
|
||||
const submitButton = $('.button.input--separate-top');
|
||||
if (submitButton !== null) {
|
||||
submitButton.disabled = true;
|
||||
submitButton.innerText = 'Please wait...';
|
||||
}
|
||||
|
||||
// delay is needed because Safari stops the submit if the button is immediately disabled
|
||||
requestAnimationFrame(() => submitButton.setAttribute('disabled', 'disabled'));
|
||||
}
|
||||
|
||||
function submitHandler(event) {
|
||||
// Remove any existing tag error elements
|
||||
clearTagErrors();
|
||||
|
||||
if (validateTags()) {
|
||||
// Disable navigation check
|
||||
unregisterBeforeUnload();
|
||||
|
||||
// Prevent duplicate attempts to submit the form
|
||||
disableUploadButton();
|
||||
|
||||
// Let the form submission complete
|
||||
} else {
|
||||
// Scroll to view validation errors
|
||||
assertNotNull($('.fancy-tag-upload')).scrollIntoView();
|
||||
|
||||
// Prevent the form from being submitted
|
||||
event.preventDefault();
|
||||
}
|
||||
}
|
||||
|
||||
fileField.addEventListener('change', registerBeforeUnload);
|
||||
fetchButton.addEventListener('click', registerBeforeUnload);
|
||||
form.addEventListener('submit', unregisterBeforeUnload);
|
||||
form.addEventListener('submit', submitHandler);
|
||||
}
|
||||
|
||||
export { setupImageUpload };
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { delegate, fire, leftClick, on, PhilomenaAvailableEventsMap } from '../events';
|
||||
import { delegate, fire, mouseMoveThenOver, leftClick, on, PhilomenaAvailableEventsMap } from '../events';
|
||||
import { getRandomArrayItem } from '../../../test/randomness';
|
||||
import { fireEvent } from '@testing-library/dom';
|
||||
|
||||
|
@ -80,6 +80,55 @@ describe('Event utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('mouseMoveThenOver', () => {
|
||||
it('should NOT fire on first mouseover', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = vi.fn();
|
||||
|
||||
mouseMoveThenOver(mockButton, mockHandler);
|
||||
|
||||
fireEvent.mouseOver(mockButton);
|
||||
|
||||
expect(mockHandler).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should fire on the first mousemove', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = vi.fn();
|
||||
|
||||
mouseMoveThenOver(mockButton, mockHandler);
|
||||
|
||||
fireEvent.mouseMove(mockButton);
|
||||
|
||||
expect(mockHandler).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should fire on subsequent mouseover', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = vi.fn();
|
||||
|
||||
mouseMoveThenOver(mockButton, mockHandler);
|
||||
|
||||
fireEvent.mouseMove(mockButton);
|
||||
fireEvent.mouseOver(mockButton);
|
||||
|
||||
expect(mockHandler).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should NOT fire on subsequent mousemove', () => {
|
||||
const mockButton = document.createElement('button');
|
||||
const mockHandler = vi.fn();
|
||||
|
||||
mouseMoveThenOver(mockButton, mockHandler);
|
||||
|
||||
fireEvent.mouseMove(mockButton);
|
||||
fireEvent.mouseOver(mockButton);
|
||||
fireEvent.mouseMove(mockButton);
|
||||
|
||||
expect(mockHandler).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delegate', () => {
|
||||
it('should call the native addEventListener method on the element', () => {
|
||||
const mockElement = document.createElement('div');
|
||||
|
|
|
@ -58,42 +58,44 @@ describe('Local Autocompleter', () => {
|
|||
});
|
||||
|
||||
it('should return suggestions for exact tag name match', () => {
|
||||
const result = localAc.topK('safe', defaultK);
|
||||
expect(result).toEqual([expect.objectContaining({ name: 'safe', imageCount: 6 })]);
|
||||
const result = localAc.matchPrefix('safe').topK(defaultK);
|
||||
expect(result).toEqual([expect.objectContaining({ aliasName: 'safe', name: 'safe', imageCount: 6 })]);
|
||||
});
|
||||
|
||||
it('should return suggestion for original tag when passed an alias', () => {
|
||||
const result = localAc.topK('flowers', defaultK);
|
||||
expect(result).toEqual([expect.objectContaining({ name: 'flower', imageCount: 1 })]);
|
||||
const result = localAc.matchPrefix('flowers').topK(defaultK);
|
||||
expect(result).toEqual([expect.objectContaining({ aliasName: 'flowers', name: 'flower', imageCount: 1 })]);
|
||||
});
|
||||
|
||||
it('should return suggestions sorted by image count', () => {
|
||||
const result = localAc.topK(termStem, defaultK);
|
||||
const result = localAc.matchPrefix(termStem).topK(defaultK);
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({ name: 'forest', imageCount: 3 }),
|
||||
expect.objectContaining({ name: 'fog', imageCount: 1 }),
|
||||
expect.objectContaining({ name: 'force field', imageCount: 1 }),
|
||||
expect.objectContaining({ aliasName: 'forest', name: 'forest', imageCount: 3 }),
|
||||
expect.objectContaining({ aliasName: 'fog', name: 'fog', imageCount: 1 }),
|
||||
expect.objectContaining({ aliasName: 'force field', name: 'force field', imageCount: 1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return namespaced suggestions without including namespace', () => {
|
||||
const result = localAc.topK('test', defaultK);
|
||||
expect(result).toEqual([expect.objectContaining({ name: 'artist:test', imageCount: 1 })]);
|
||||
const result = localAc.matchPrefix('test').topK(defaultK);
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({ aliasName: 'artist:test', name: 'artist:test', imageCount: 1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return only the required number of suggestions', () => {
|
||||
const result = localAc.topK(termStem, 1);
|
||||
expect(result).toEqual([expect.objectContaining({ name: 'forest', imageCount: 3 })]);
|
||||
const result = localAc.matchPrefix(termStem).topK(1);
|
||||
expect(result).toEqual([expect.objectContaining({ aliasName: 'forest', name: 'forest', imageCount: 3 })]);
|
||||
});
|
||||
|
||||
it('should NOT return suggestions associated with hidden tags', () => {
|
||||
window.booru.hiddenTagList = [1];
|
||||
const result = localAc.topK(termStem, defaultK);
|
||||
const result = localAc.matchPrefix(termStem).topK(defaultK);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty prefix', () => {
|
||||
const result = localAc.topK('', defaultK);
|
||||
const result = localAc.matchPrefix('').topK(defaultK);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
|
334
assets/js/utils/__tests__/suggestions.spec.ts
Normal file
334
assets/js/utils/__tests__/suggestions.spec.ts
Normal file
|
@ -0,0 +1,334 @@
|
|||
import { fetchMock } from '../../../test/fetch-mock.ts';
|
||||
import {
|
||||
fetchLocalAutocomplete,
|
||||
fetchSuggestions,
|
||||
purgeSuggestionsCache,
|
||||
SuggestionsPopup,
|
||||
TermSuggestion,
|
||||
} from '../suggestions.ts';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { LocalAutocompleter } from '../local-autocompleter.ts';
|
||||
import { afterEach } from 'vitest';
|
||||
import { fireEvent } from '@testing-library/dom';
|
||||
|
||||
const mockedSuggestionsEndpoint = '/endpoint?term=';
|
||||
const mockedSuggestionsResponse = [
|
||||
{ label: 'artist:assasinmonkey (1)', value: 'artist:assasinmonkey' },
|
||||
{ label: 'artist:hydrusbeta (1)', value: 'artist:hydrusbeta' },
|
||||
{ label: 'artist:the sexy assistant (1)', value: 'artist:the sexy assistant' },
|
||||
{ label: 'artist:devinian (1)', value: 'artist:devinian' },
|
||||
{ label: 'artist:moe (1)', value: 'artist:moe' },
|
||||
];
|
||||
|
||||
function mockBaseSuggestionsPopup(includeMockedSuggestions: boolean = false): [SuggestionsPopup, HTMLInputElement] {
|
||||
const input = document.createElement('input');
|
||||
const popup = new SuggestionsPopup();
|
||||
|
||||
document.body.append(input);
|
||||
popup.showForField(input);
|
||||
|
||||
if (includeMockedSuggestions) {
|
||||
popup.renderSuggestions(mockedSuggestionsResponse);
|
||||
}
|
||||
|
||||
return [popup, input];
|
||||
}
|
||||
|
||||
const selectedItemClassName = 'autocomplete__item--selected';
|
||||
|
||||
describe('Suggestions', () => {
|
||||
let mockedAutocompleteBuffer: ArrayBuffer;
|
||||
let popup: SuggestionsPopup | undefined;
|
||||
let input: HTMLInputElement | undefined;
|
||||
|
||||
beforeAll(async () => {
|
||||
fetchMock.enableMocks();
|
||||
|
||||
mockedAutocompleteBuffer = await fs.promises
|
||||
.readFile(path.join(__dirname, 'autocomplete-compiled-v2.bin'))
|
||||
.then(fileBuffer => fileBuffer.buffer);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
fetchMock.disableMocks();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
purgeSuggestionsCache();
|
||||
fetchMock.resetMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (input) {
|
||||
input.remove();
|
||||
input = undefined;
|
||||
}
|
||||
|
||||
if (popup) {
|
||||
popup.hide();
|
||||
popup = undefined;
|
||||
}
|
||||
});
|
||||
|
||||
describe('SuggestionsPopup', () => {
|
||||
it('should create the popup container', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup();
|
||||
|
||||
expect(document.querySelector('.autocomplete')).toBeInstanceOf(HTMLElement);
|
||||
expect(popup.isActive).toBe(true);
|
||||
});
|
||||
|
||||
it('should be removed when hidden', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup();
|
||||
|
||||
popup.hide();
|
||||
|
||||
expect(document.querySelector('.autocomplete')).not.toBeInstanceOf(HTMLElement);
|
||||
expect(popup.isActive).toBe(false);
|
||||
});
|
||||
|
||||
it('should render suggestions', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
expect(document.querySelectorAll('.autocomplete__item').length).toBe(mockedSuggestionsResponse.length);
|
||||
});
|
||||
|
||||
it('should initially select first element when selectNext called', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
popup.selectNext();
|
||||
|
||||
expect(document.querySelector('.autocomplete__item:first-child')).toHaveClass(selectedItemClassName);
|
||||
});
|
||||
|
||||
it('should initially select last element when selectPrevious called', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
popup.selectPrevious();
|
||||
|
||||
expect(document.querySelector('.autocomplete__item:last-child')).toHaveClass(selectedItemClassName);
|
||||
});
|
||||
|
||||
it('should select and de-select items when hovering items over', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
const firstItem = document.querySelector('.autocomplete__item:first-child');
|
||||
const lastItem = document.querySelector('.autocomplete__item:last-child');
|
||||
|
||||
if (firstItem) {
|
||||
fireEvent.mouseOver(firstItem);
|
||||
fireEvent.mouseMove(firstItem);
|
||||
}
|
||||
|
||||
expect(firstItem).toHaveClass(selectedItemClassName);
|
||||
|
||||
if (lastItem) {
|
||||
fireEvent.mouseOver(lastItem);
|
||||
fireEvent.mouseMove(lastItem);
|
||||
}
|
||||
|
||||
expect(firstItem).not.toHaveClass(selectedItemClassName);
|
||||
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||
|
||||
if (lastItem) {
|
||||
fireEvent.mouseOut(lastItem);
|
||||
}
|
||||
|
||||
expect(lastItem).not.toHaveClass(selectedItemClassName);
|
||||
});
|
||||
|
||||
it('should allow switching between mouse and selection', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
const secondItem = document.querySelector('.autocomplete__item:nth-child(2)');
|
||||
const thirdItem = document.querySelector('.autocomplete__item:nth-child(3)');
|
||||
|
||||
if (secondItem) {
|
||||
fireEvent.mouseOver(secondItem);
|
||||
fireEvent.mouseMove(secondItem);
|
||||
}
|
||||
|
||||
expect(secondItem).toHaveClass(selectedItemClassName);
|
||||
|
||||
popup.selectNext();
|
||||
|
||||
expect(secondItem).not.toHaveClass(selectedItemClassName);
|
||||
expect(thirdItem).toHaveClass(selectedItemClassName);
|
||||
});
|
||||
|
||||
it('should loop around when selecting next on last and previous on first', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
const firstItem = document.querySelector('.autocomplete__item:first-child');
|
||||
const lastItem = document.querySelector('.autocomplete__item:last-child');
|
||||
|
||||
if (lastItem) {
|
||||
fireEvent.mouseOver(lastItem);
|
||||
fireEvent.mouseMove(lastItem);
|
||||
}
|
||||
|
||||
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||
|
||||
popup.selectNext();
|
||||
|
||||
expect(document.querySelector(`.${selectedItemClassName}`)).toBeNull();
|
||||
|
||||
popup.selectNext();
|
||||
|
||||
expect(firstItem).toHaveClass(selectedItemClassName);
|
||||
|
||||
popup.selectPrevious();
|
||||
|
||||
expect(document.querySelector(`.${selectedItemClassName}`)).toBeNull();
|
||||
|
||||
popup.selectPrevious();
|
||||
|
||||
expect(lastItem).toHaveClass(selectedItemClassName);
|
||||
});
|
||||
|
||||
it('should return selected item value', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
expect(popup.selectedTerm).toBe(null);
|
||||
|
||||
popup.selectNext();
|
||||
|
||||
expect(popup.selectedTerm).toBe(mockedSuggestionsResponse[0].value);
|
||||
});
|
||||
|
||||
it('should emit an event when item was clicked with mouse', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup(true);
|
||||
|
||||
let clickEvent: CustomEvent<TermSuggestion> | undefined;
|
||||
|
||||
const itemSelectedHandler = vi.fn((event: CustomEvent<TermSuggestion>) => {
|
||||
clickEvent = event;
|
||||
});
|
||||
|
||||
popup.onItemSelected(itemSelectedHandler);
|
||||
|
||||
const firstItem = document.querySelector('.autocomplete__item');
|
||||
|
||||
if (firstItem) {
|
||||
fireEvent.click(firstItem);
|
||||
}
|
||||
|
||||
expect(itemSelectedHandler).toBeCalledTimes(1);
|
||||
expect(clickEvent?.detail).toEqual(mockedSuggestionsResponse[0]);
|
||||
});
|
||||
|
||||
it('should not emit selection on items without value', () => {
|
||||
[popup, input] = mockBaseSuggestionsPopup();
|
||||
|
||||
popup.renderSuggestions([{ label: 'Option without value', value: '' }]);
|
||||
|
||||
const itemSelectionHandler = vi.fn();
|
||||
|
||||
popup.onItemSelected(itemSelectionHandler);
|
||||
|
||||
const firstItem = document.querySelector('.autocomplete__item:first-child')!;
|
||||
|
||||
if (firstItem) {
|
||||
fireEvent.click(firstItem);
|
||||
}
|
||||
|
||||
expect(itemSelectionHandler).not.toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchSuggestions', () => {
|
||||
it('should only call fetch once per single term', () => {
|
||||
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should be case-insensitive to terms and trim spaces', () => {
|
||||
fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
fetchSuggestions(mockedSuggestionsEndpoint, 'Art');
|
||||
fetchSuggestions(mockedSuggestionsEndpoint, ' ART ');
|
||||
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return the same suggestions from cache', async () => {
|
||||
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||
|
||||
const firstSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
const secondSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
|
||||
expect(firstSuggestions).toBe(secondSuggestions);
|
||||
});
|
||||
|
||||
it('should parse and return array of suggestions', async () => {
|
||||
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||
|
||||
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
|
||||
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||
expect(resolvedSuggestions.length).toBe(mockedSuggestionsResponse.length);
|
||||
expect(resolvedSuggestions).toEqual(mockedSuggestionsResponse);
|
||||
});
|
||||
|
||||
it('should return empty array on server error', async () => {
|
||||
fetchMock.mockResolvedValueOnce(new Response('', { status: 500 }));
|
||||
|
||||
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'unknown tag');
|
||||
|
||||
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||
expect(resolvedSuggestions.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should return empty array on invalid response format', async () => {
|
||||
fetchMock.mockResolvedValueOnce(new Response('invalid non-JSON response', { status: 200 }));
|
||||
|
||||
const resolvedSuggestions = await fetchSuggestions(mockedSuggestionsEndpoint, 'invalid response');
|
||||
|
||||
expect(resolvedSuggestions).toBeInstanceOf(Array);
|
||||
expect(resolvedSuggestions.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('purgeSuggestionsCache', () => {
|
||||
it('should clear cached responses', async () => {
|
||||
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify(mockedSuggestionsResponse), { status: 200 }));
|
||||
|
||||
const firstResult = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
purgeSuggestionsCache();
|
||||
const resultAfterPurge = await fetchSuggestions(mockedSuggestionsEndpoint, 'art');
|
||||
|
||||
expect(fetch).toBeCalledTimes(2);
|
||||
expect(firstResult).not.toBe(resultAfterPurge);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchLocalAutocomplete', () => {
|
||||
it('should request binary with date-related cache key', () => {
|
||||
fetchMock.mockResolvedValue(new Response(mockedAutocompleteBuffer, { status: 200 }));
|
||||
|
||||
const now = new Date();
|
||||
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
||||
const expectedEndpoint = `/autocomplete/compiled?vsn=2&key=${cacheKey}`;
|
||||
|
||||
fetchLocalAutocomplete();
|
||||
|
||||
expect(fetch).toBeCalledWith(expectedEndpoint, { credentials: 'omit', cache: 'force-cache' });
|
||||
});
|
||||
|
||||
it('should return auto-completer instance', async () => {
|
||||
fetchMock.mockResolvedValue(new Response(mockedAutocompleteBuffer, { status: 200 }));
|
||||
|
||||
const autocomplete = await fetchLocalAutocomplete();
|
||||
|
||||
expect(autocomplete).toBeInstanceOf(LocalAutocompleter);
|
||||
});
|
||||
|
||||
it('should throw generic server error on failing response', async () => {
|
||||
fetchMock.mockResolvedValue(new Response('error', { status: 500 }));
|
||||
|
||||
expect(() => fetchLocalAutocomplete()).rejects.toThrowError('Received error from server');
|
||||
});
|
||||
});
|
||||
});
|
70
assets/js/utils/__tests__/unique-heap.spec.ts
Normal file
70
assets/js/utils/__tests__/unique-heap.spec.ts
Normal file
|
@ -0,0 +1,70 @@
|
|||
import { UniqueHeap } from '../unique-heap';
|
||||
|
||||
describe('Unique Heap', () => {
|
||||
interface Result {
|
||||
name: string;
|
||||
}
|
||||
|
||||
function compare(a: Result, b: Result): boolean {
|
||||
return a.name < b.name;
|
||||
}
|
||||
|
||||
test('it should return no results when empty', () => {
|
||||
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||
expect(heap.topK(5)).toEqual([]);
|
||||
});
|
||||
|
||||
test("doesn't insert duplicate results", () => {
|
||||
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||
|
||||
heap.append({ name: 'name' });
|
||||
heap.append({ name: 'name' });
|
||||
|
||||
expect(heap.topK(2)).toEqual([expect.objectContaining({ name: 'name' })]);
|
||||
});
|
||||
|
||||
test('it should return results in reverse sorted order', () => {
|
||||
const heap = new UniqueHeap<Result>(compare, 'name');
|
||||
|
||||
const names = [
|
||||
'alpha',
|
||||
'beta',
|
||||
'gamma',
|
||||
'delta',
|
||||
'epsilon',
|
||||
'zeta',
|
||||
'eta',
|
||||
'theta',
|
||||
'iota',
|
||||
'kappa',
|
||||
'lambda',
|
||||
'mu',
|
||||
'nu',
|
||||
'xi',
|
||||
'omicron',
|
||||
'pi',
|
||||
'rho',
|
||||
'sigma',
|
||||
'tau',
|
||||
'upsilon',
|
||||
'phi',
|
||||
'chi',
|
||||
'psi',
|
||||
'omega',
|
||||
];
|
||||
|
||||
for (const name of names) {
|
||||
heap.append({ name });
|
||||
}
|
||||
|
||||
const results = heap.topK(5);
|
||||
|
||||
expect(results).toEqual([
|
||||
expect.objectContaining({ name: 'zeta' }),
|
||||
expect.objectContaining({ name: 'xi' }),
|
||||
expect.objectContaining({ name: 'upsilon' }),
|
||||
expect.objectContaining({ name: 'theta' }),
|
||||
expect.objectContaining({ name: 'tau' }),
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -43,6 +43,17 @@ export function leftClick<E extends MouseEvent, Target extends EventTarget>(func
|
|||
};
|
||||
}
|
||||
|
||||
export function mouseMoveThenOver<El extends HTMLElement>(element: El, func: (e: MouseEvent) => void) {
|
||||
element.addEventListener(
|
||||
'mousemove',
|
||||
(event: MouseEvent) => {
|
||||
func(event);
|
||||
element.addEventListener('mouseover', func);
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
}
|
||||
|
||||
export function delegate<K extends keyof PhilomenaAvailableEventsMap, Target extends Element>(
|
||||
node: PhilomenaEventElement,
|
||||
event: K,
|
||||
|
|
|
@ -1,12 +1,21 @@
|
|||
// Client-side tag completion.
|
||||
import { UniqueHeap } from './unique-heap';
|
||||
import store from './store';
|
||||
|
||||
interface Result {
|
||||
export interface Result {
|
||||
aliasName: string;
|
||||
name: string;
|
||||
imageCount: number;
|
||||
associations: number[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether Result a is considered less than Result b.
|
||||
*/
|
||||
function compareResult(a: Result, b: Result): boolean {
|
||||
return a.imageCount === b.imageCount ? a.name > b.name : a.imageCount < b.imageCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two strings, C-style.
|
||||
*/
|
||||
|
@ -18,10 +27,13 @@ function strcmp(a: string, b: string): number {
|
|||
* Returns the name of a tag without any namespace component.
|
||||
*/
|
||||
function nameInNamespace(s: string): string {
|
||||
const v = s.split(':', 2);
|
||||
const first = s.indexOf(':');
|
||||
|
||||
if (v.length === 2) return v[1];
|
||||
return v[0];
|
||||
if (first !== -1) {
|
||||
return s.slice(first + 1);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -59,7 +71,7 @@ export class LocalAutocompleter {
|
|||
/**
|
||||
* Get a tag's name and its associations given a byte location inside the file.
|
||||
*/
|
||||
getTagFromLocation(location: number): [string, number[]] {
|
||||
private getTagFromLocation(location: number, imageCount: number, aliasName?: string): Result {
|
||||
const nameLength = this.view.getUint8(location);
|
||||
const assnLength = this.view.getUint8(location + 1 + nameLength);
|
||||
|
||||
|
@ -70,29 +82,29 @@ export class LocalAutocompleter {
|
|||
associations.push(this.view.getUint32(location + 1 + nameLength + 1 + i * 4, true));
|
||||
}
|
||||
|
||||
return [name, associations];
|
||||
return { aliasName: aliasName || name, name, imageCount, associations };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Result object as the ith tag inside the file.
|
||||
*/
|
||||
getResultAt(i: number): [string, Result] {
|
||||
const nameLocation = this.view.getUint32(this.referenceStart + i * 8, true);
|
||||
private getResultAt(i: number, aliasName?: string): Result {
|
||||
const tagLocation = this.view.getUint32(this.referenceStart + i * 8, true);
|
||||
const imageCount = this.view.getInt32(this.referenceStart + i * 8 + 4, true);
|
||||
const [name, associations] = this.getTagFromLocation(nameLocation);
|
||||
const result = this.getTagFromLocation(tagLocation, imageCount, aliasName);
|
||||
|
||||
if (imageCount < 0) {
|
||||
// This is actually an alias, so follow it
|
||||
return [name, this.getResultAt(-imageCount - 1)[1]];
|
||||
return this.getResultAt(-imageCount - 1, aliasName || result.name);
|
||||
}
|
||||
|
||||
return [name, { name, imageCount, associations }];
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Result object as the ith tag inside the file, secondary ordering.
|
||||
*/
|
||||
getSecondaryResultAt(i: number): [string, Result] {
|
||||
private getSecondaryResultAt(i: number): Result {
|
||||
const referenceIndex = this.view.getUint32(this.secondaryStart + i * 4, true);
|
||||
return this.getResultAt(referenceIndex);
|
||||
}
|
||||
|
@ -100,23 +112,22 @@ export class LocalAutocompleter {
|
|||
/**
|
||||
* Perform a binary search to fetch all results matching a condition.
|
||||
*/
|
||||
scanResults(
|
||||
getResult: (i: number) => [string, Result],
|
||||
private scanResults(
|
||||
getResult: (i: number) => Result,
|
||||
compare: (name: string) => number,
|
||||
results: Record<string, Result>,
|
||||
results: UniqueHeap<Result>,
|
||||
hiddenTags: Set<number>,
|
||||
) {
|
||||
const unfilter = store.get('unfilter_tag_suggestions');
|
||||
const filter = !store.get('unfilter_tag_suggestions');
|
||||
|
||||
let min = 0;
|
||||
let max = this.numTags;
|
||||
|
||||
const hiddenTags = window.booru.hiddenTagList;
|
||||
|
||||
while (min < max - 1) {
|
||||
const med = (min + (max - min) / 2) | 0;
|
||||
const sortKey = getResult(med)[0];
|
||||
const med = min + (((max - min) / 2) | 0);
|
||||
const result = getResult(med);
|
||||
|
||||
if (compare(sortKey) >= 0) {
|
||||
if (compare(result.aliasName) >= 0) {
|
||||
// too large, go left
|
||||
max = med;
|
||||
} else {
|
||||
|
@ -126,40 +137,47 @@ export class LocalAutocompleter {
|
|||
}
|
||||
|
||||
// Scan forward until no more matches occur
|
||||
while (min < this.numTags - 1) {
|
||||
const [sortKey, result] = getResult(++min);
|
||||
if (compare(sortKey) !== 0) {
|
||||
outer: while (min < this.numTags - 1) {
|
||||
const result = getResult(++min);
|
||||
|
||||
if (compare(result.aliasName) !== 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Add if not filtering or no associations are filtered
|
||||
if (unfilter || hiddenTags.findIndex(ht => result.associations.includes(ht)) === -1) {
|
||||
results[result.name] = result;
|
||||
// Check if any associations are filtered
|
||||
if (filter) {
|
||||
for (const association of result.associations) {
|
||||
if (hiddenTags.has(association)) {
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Nothing was filtered, so add
|
||||
results.append(result);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the top k results by image count which match the given string prefix.
|
||||
*/
|
||||
topK(prefix: string, k: number): Result[] {
|
||||
const results: Record<string, Result> = {};
|
||||
matchPrefix(prefix: string): UniqueHeap<Result> {
|
||||
const results = new UniqueHeap<Result>(compareResult, 'name');
|
||||
|
||||
if (prefix === '') {
|
||||
return [];
|
||||
return results;
|
||||
}
|
||||
|
||||
const hiddenTags = new Set(window.booru.hiddenTagList);
|
||||
|
||||
// Find normally, in full name-sorted order
|
||||
const prefixMatch = (name: string) => strcmp(name.slice(0, prefix.length), prefix);
|
||||
this.scanResults(this.getResultAt.bind(this), prefixMatch, results);
|
||||
this.scanResults(this.getResultAt.bind(this), prefixMatch, results, hiddenTags);
|
||||
|
||||
// Find in secondary order
|
||||
const namespaceMatch = (name: string) => strcmp(nameInNamespace(name).slice(0, prefix.length), prefix);
|
||||
this.scanResults(this.getSecondaryResultAt.bind(this), namespaceMatch, results);
|
||||
this.scanResults(this.getSecondaryResultAt.bind(this), namespaceMatch, results, hiddenTags);
|
||||
|
||||
// Sort results by image count
|
||||
const sorted = Object.values(results).sort((a, b) => b.imageCount - a.imageCount);
|
||||
|
||||
return sorted.slice(0, k);
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
|
177
assets/js/utils/suggestions.ts
Normal file
177
assets/js/utils/suggestions.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import { makeEl } from './dom.ts';
|
||||
import { mouseMoveThenOver } from './events.ts';
|
||||
import { handleError } from './requests.ts';
|
||||
import { LocalAutocompleter } from './local-autocompleter.ts';
|
||||
|
||||
export interface TermSuggestion {
|
||||
label: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
const selectedSuggestionClassName = 'autocomplete__item--selected';
|
||||
|
||||
export class SuggestionsPopup {
|
||||
private readonly container: HTMLElement;
|
||||
private readonly listElement: HTMLUListElement;
|
||||
private selectedElement: HTMLElement | null = null;
|
||||
|
||||
constructor() {
|
||||
this.container = makeEl('div', {
|
||||
className: 'autocomplete',
|
||||
});
|
||||
|
||||
this.listElement = makeEl('ul', {
|
||||
className: 'autocomplete__list',
|
||||
});
|
||||
|
||||
this.container.appendChild(this.listElement);
|
||||
}
|
||||
|
||||
get selectedTerm(): string | null {
|
||||
return this.selectedElement?.dataset.value || null;
|
||||
}
|
||||
|
||||
get isActive(): boolean {
|
||||
return this.container.isConnected;
|
||||
}
|
||||
|
||||
hide() {
|
||||
this.clearSelection();
|
||||
this.container.remove();
|
||||
}
|
||||
|
||||
private clearSelection() {
|
||||
if (!this.selectedElement) return;
|
||||
|
||||
this.selectedElement.classList.remove(selectedSuggestionClassName);
|
||||
this.selectedElement = null;
|
||||
}
|
||||
|
||||
private updateSelection(targetItem: HTMLElement) {
|
||||
this.clearSelection();
|
||||
|
||||
this.selectedElement = targetItem;
|
||||
this.selectedElement.classList.add(selectedSuggestionClassName);
|
||||
}
|
||||
|
||||
renderSuggestions(suggestions: TermSuggestion[]): SuggestionsPopup {
|
||||
this.clearSelection();
|
||||
|
||||
this.listElement.innerHTML = '';
|
||||
|
||||
for (const suggestedTerm of suggestions) {
|
||||
const listItem = makeEl('li', {
|
||||
className: 'autocomplete__item',
|
||||
innerText: suggestedTerm.label,
|
||||
});
|
||||
|
||||
listItem.dataset.value = suggestedTerm.value;
|
||||
|
||||
this.watchItem(listItem, suggestedTerm);
|
||||
this.listElement.appendChild(listItem);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private watchItem(listItem: HTMLElement, suggestion: TermSuggestion) {
|
||||
mouseMoveThenOver(listItem, () => this.updateSelection(listItem));
|
||||
|
||||
listItem.addEventListener('mouseout', () => this.clearSelection());
|
||||
|
||||
listItem.addEventListener('click', () => {
|
||||
if (!listItem.dataset.value) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.container.dispatchEvent(new CustomEvent('item_selected', { detail: suggestion }));
|
||||
});
|
||||
}
|
||||
|
||||
private changeSelection(direction: number) {
|
||||
let nextTargetElement: Element | null;
|
||||
|
||||
if (!this.selectedElement) {
|
||||
nextTargetElement = direction > 0 ? this.listElement.firstElementChild : this.listElement.lastElementChild;
|
||||
} else {
|
||||
nextTargetElement =
|
||||
direction > 0 ? this.selectedElement.nextElementSibling : this.selectedElement.previousElementSibling;
|
||||
}
|
||||
|
||||
if (!(nextTargetElement instanceof HTMLElement)) {
|
||||
this.clearSelection();
|
||||
return;
|
||||
}
|
||||
|
||||
this.updateSelection(nextTargetElement);
|
||||
}
|
||||
|
||||
selectNext() {
|
||||
this.changeSelection(1);
|
||||
}
|
||||
|
||||
selectPrevious() {
|
||||
this.changeSelection(-1);
|
||||
}
|
||||
|
||||
showForField(targetElement: HTMLElement) {
|
||||
this.container.style.position = 'absolute';
|
||||
this.container.style.left = `${targetElement.offsetLeft}px`;
|
||||
|
||||
let topPosition = targetElement.offsetTop + targetElement.offsetHeight;
|
||||
|
||||
if (targetElement.parentElement) {
|
||||
topPosition -= targetElement.parentElement.scrollTop;
|
||||
}
|
||||
|
||||
this.container.style.top = `${topPosition}px`;
|
||||
|
||||
document.body.appendChild(this.container);
|
||||
}
|
||||
|
||||
onItemSelected(callback: (event: CustomEvent<TermSuggestion>) => void) {
|
||||
this.container.addEventListener('item_selected', callback as EventListener);
|
||||
}
|
||||
}
|
||||
|
||||
const cachedSuggestions = new Map<string, Promise<TermSuggestion[]>>();
|
||||
|
||||
export async function fetchSuggestions(endpoint: string, targetTerm: string): Promise<TermSuggestion[]> {
|
||||
const normalizedTerm = targetTerm.trim().toLowerCase();
|
||||
|
||||
if (cachedSuggestions.has(normalizedTerm)) {
|
||||
return cachedSuggestions.get(normalizedTerm)!;
|
||||
}
|
||||
|
||||
const promisedSuggestions: Promise<TermSuggestion[]> = fetch(`${endpoint}${targetTerm}`)
|
||||
.then(handleError)
|
||||
.then(response => response.json())
|
||||
.catch(() => {
|
||||
// Deleting the promised result from cache to allow retrying
|
||||
cachedSuggestions.delete(normalizedTerm);
|
||||
|
||||
// And resolve failed promise with empty array
|
||||
return [];
|
||||
});
|
||||
|
||||
cachedSuggestions.set(normalizedTerm, promisedSuggestions);
|
||||
|
||||
return promisedSuggestions;
|
||||
}
|
||||
|
||||
export function purgeSuggestionsCache() {
|
||||
cachedSuggestions.clear();
|
||||
}
|
||||
|
||||
export async function fetchLocalAutocomplete(): Promise<LocalAutocompleter> {
|
||||
const now = new Date();
|
||||
const cacheKey = `${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}`;
|
||||
|
||||
return await fetch(`/autocomplete/compiled?vsn=2&key=${cacheKey}`, {
|
||||
credentials: 'omit',
|
||||
cache: 'force-cache',
|
||||
})
|
||||
.then(handleError)
|
||||
.then(resp => resp.arrayBuffer())
|
||||
.then(buf => new LocalAutocompleter(buf));
|
||||
}
|
|
@ -57,10 +57,10 @@ export function imageHitsComplex(img: HTMLElement, matchComplex: AstMatcher) {
|
|||
}
|
||||
|
||||
export function displayTags(tags: TagData[]): string {
|
||||
const mainTag = tags[0],
|
||||
otherTags = tags.slice(1);
|
||||
let list = escapeHtml(mainTag.name),
|
||||
extras;
|
||||
const mainTag = tags[0];
|
||||
const otherTags = tags.slice(1);
|
||||
let list = escapeHtml(mainTag.name);
|
||||
let extras;
|
||||
|
||||
if (otherTags.length > 0) {
|
||||
extras = otherTags.map(tag => escapeHtml(tag.name)).join(', ');
|
||||
|
|
96
assets/js/utils/unique-heap.ts
Normal file
96
assets/js/utils/unique-heap.ts
Normal file
|
@ -0,0 +1,96 @@
|
|||
export type Compare<T> = (a: T, b: T) => boolean;
|
||||
|
||||
export class UniqueHeap<T extends object> {
|
||||
private keys: Set<unknown>;
|
||||
private values: T[];
|
||||
private keyName: keyof T;
|
||||
private compare: Compare<T>;
|
||||
|
||||
constructor(compare: Compare<T>, keyName: keyof T) {
|
||||
this.keys = new Set();
|
||||
this.values = [];
|
||||
this.keyName = keyName;
|
||||
this.compare = compare;
|
||||
}
|
||||
|
||||
append(value: T) {
|
||||
const key = value[this.keyName];
|
||||
|
||||
if (!this.keys.has(key)) {
|
||||
this.keys.add(key);
|
||||
this.values.push(value);
|
||||
}
|
||||
}
|
||||
|
||||
topK(k: number): T[] {
|
||||
// Create the output array.
|
||||
const output: T[] = [];
|
||||
|
||||
for (const result of this.results()) {
|
||||
if (output.length >= k) {
|
||||
break;
|
||||
}
|
||||
|
||||
output.push(result);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
*results(): Generator<T, void, void> {
|
||||
const { values } = this;
|
||||
const length = values.length;
|
||||
|
||||
// Build the heap.
|
||||
for (let i = (length >> 1) - 1; i >= 0; i--) {
|
||||
this.heapify(length, i);
|
||||
}
|
||||
|
||||
// Begin extracting values.
|
||||
for (let i = 0; i < length; i++) {
|
||||
// Top value is the largest.
|
||||
yield values[0];
|
||||
|
||||
// Swap with the element at the end.
|
||||
const lastIndex = length - i - 1;
|
||||
values[0] = values[lastIndex];
|
||||
|
||||
// Restore top value being the largest.
|
||||
this.heapify(lastIndex, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private heapify(length: number, initialIndex: number) {
|
||||
const { compare, values } = this;
|
||||
let i = initialIndex;
|
||||
|
||||
while (true) {
|
||||
const left = 2 * i + 1;
|
||||
const right = 2 * i + 2;
|
||||
let largest = i;
|
||||
|
||||
if (left < length && compare(values[largest], values[left])) {
|
||||
// Left child is in-bounds and larger than parent. Swap with left.
|
||||
largest = left;
|
||||
}
|
||||
|
||||
if (right < length && compare(values[largest], values[right])) {
|
||||
// Right child is in-bounds and larger than parent or left. Swap with right.
|
||||
largest = right;
|
||||
}
|
||||
|
||||
if (largest === i) {
|
||||
// Largest value was already the parent. Done.
|
||||
return;
|
||||
}
|
||||
|
||||
// Swap.
|
||||
const temp = values[i];
|
||||
values[i] = values[largest];
|
||||
values[largest] = temp;
|
||||
|
||||
// Repair the subtree previously containing the largest element.
|
||||
i = largest;
|
||||
}
|
||||
}
|
||||
}
|
1641
assets/package-lock.json
generated
1641
assets/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -20,25 +20,26 @@
|
|||
"postcss-mixins": "^10.0.1",
|
||||
"postcss-simple-vars": "^7.0.1",
|
||||
"typescript": "^5.4",
|
||||
"vite": "^5.2"
|
||||
"vite": "^5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/dom": "^10.1.0",
|
||||
"@testing-library/jest-dom": "^6.4.6",
|
||||
"@types/chai-dom": "^1.11.3",
|
||||
"@vitest/coverage-v8": "^1.6.0",
|
||||
"@vitest/coverage-v8": "^2.1.0",
|
||||
"chai": "^5",
|
||||
"eslint": "^9.4.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint": "^9.11.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.2.1",
|
||||
"eslint-plugin-vitest": "^0.5.4",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"jsdom": "^24.1.0",
|
||||
"prettier": "^3.3.2",
|
||||
"stylelint": "^16.6.1",
|
||||
"prettier": "^3.3.3",
|
||||
"stylelint": "^16.9.0",
|
||||
"stylelint-config-standard": "^36.0.0",
|
||||
"stylelint-prettier": "^5.0.0",
|
||||
"typescript-eslint": "8.0.0-alpha.39",
|
||||
"vitest": "^1.6.0",
|
||||
"vitest-fetch-mock": "^0.2.2"
|
||||
"typescript-eslint": "8.8.0",
|
||||
"vitest": "^2.1.0",
|
||||
"vitest-fetch-mock": "^0.3.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export function fixEventListeners(t: EventTarget) {
|
|||
eventListeners = {};
|
||||
const oldAddEventListener = t.addEventListener;
|
||||
|
||||
t.addEventListener = (type: string, listener: any, options: any): void => {
|
||||
t.addEventListener = function (type: string, listener: any, options: any): void {
|
||||
eventListeners[type] = eventListeners[type] || [];
|
||||
eventListeners[type].push(listener);
|
||||
return oldAddEventListener(type, listener, options);
|
||||
|
|
|
@ -31,7 +31,6 @@ Object.assign(globalThis, { URL, Blob });
|
|||
|
||||
// Prevents an error when calling `form.submit()` directly in
|
||||
// the code that is being tested
|
||||
// eslint-disable-next-line prettier/prettier
|
||||
HTMLFormElement.prototype.submit = function() {
|
||||
HTMLFormElement.prototype.submit = function () {
|
||||
fireEvent.submit(this);
|
||||
};
|
||||
|
|
|
@ -14,7 +14,9 @@ export default defineConfig(({ command, mode }: ConfigEnv): UserConfig => {
|
|||
fs.readdirSync(path.resolve(__dirname, 'css/themes/')).forEach(name => {
|
||||
const m = name.match(/([-a-z]+).css/);
|
||||
|
||||
if (m) targets.set(`css/${m[1]}`, `./css/themes/${m[1]}.css`);
|
||||
if (m) return targets.set(`css/${m[1]}`, `./css/themes/${m[1]}.css`);
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
fs.readdirSync(path.resolve(__dirname, 'css/options/')).forEach(name => {
|
||||
|
@ -66,13 +68,13 @@ export default defineConfig(({ command, mode }: ConfigEnv): UserConfig => {
|
|||
test: {
|
||||
globals: true,
|
||||
environment: 'jsdom',
|
||||
exclude: ['node_modules/', '.*\\.test\\.ts$', '.*\\.d\\.ts$', '.*\\.spec\\.ts$'],
|
||||
// TODO Jest --randomize CLI flag equivalent, consider enabling in the future
|
||||
// sequence: { shuffle: true },
|
||||
setupFiles: './test/vitest-setup.ts',
|
||||
coverage: {
|
||||
reporter: ['text', 'html'],
|
||||
include: ['js/**/*.{js,ts}'],
|
||||
exclude: ['node_modules/', '.*\\.test\\.ts$', '.*\\.d\\.ts$'],
|
||||
thresholds: {
|
||||
statements: 0,
|
||||
branches: 0,
|
||||
|
|
|
@ -59,7 +59,7 @@ services:
|
|||
- '5173:5173'
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
image: postgres:16.4-alpine
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
volumes:
|
||||
|
@ -68,7 +68,7 @@ services:
|
|||
driver: "none"
|
||||
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.15.0
|
||||
image: opensearchproject/opensearch:2.16.0
|
||||
volumes:
|
||||
- opensearch_data:/usr/share/opensearch/data
|
||||
- ./docker/opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml
|
||||
|
@ -80,12 +80,12 @@ services:
|
|||
hard: 65536
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7.2.5-alpine
|
||||
image: valkey/valkey:8.0-alpine
|
||||
logging:
|
||||
driver: "none"
|
||||
|
||||
files:
|
||||
image: andrewgaul/s3proxy:sha-4175022
|
||||
image: andrewgaul/s3proxy:sha-4976e17
|
||||
environment:
|
||||
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
||||
volumes:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM elixir:1.17-alpine
|
||||
FROM elixir:1.17.2-alpine
|
||||
|
||||
ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1 /tmp/ffmpeg_version.json
|
||||
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
||||
|
|
|
@ -76,7 +76,7 @@ end
|
|||
|
||||
local function get_hashed_canonical_request(timestamp, host, uri)
|
||||
local digest = get_sha256_digest(ngx.var.request_body)
|
||||
local canonical_request = ngx.var.request_method .. '\n'
|
||||
local canonical_request = 'GET' .. '\n'
|
||||
.. uri .. '\n'
|
||||
.. '\n'
|
||||
.. 'host:' .. host .. '\n'
|
||||
|
|
|
@ -34,7 +34,7 @@ init_by_lua_block {
|
|||
function sign_aws_request()
|
||||
-- The API token used should not allow writing, but
|
||||
-- sanitize this anyway to stop an upstream error
|
||||
if ngx.req.get_method() ~= 'GET' then
|
||||
if ngx.req.get_method() ~= 'GET' and ngx.req.get_method() ~= 'HEAD' then
|
||||
ngx.status = ngx.HTTP_UNAUTHORIZED
|
||||
ngx.say('Unauthorized')
|
||||
return ngx.exit(ngx.HTTP_UNAUTHORIZED)
|
||||
|
|
|
@ -42,6 +42,7 @@ metadata: image_search_json
|
|||
'processed', processed,
|
||||
'score', score,
|
||||
'size', image_size,
|
||||
'orig_size', image_orig_size,
|
||||
'sha512_hash', image_sha512_hash,
|
||||
'thumbnails_generated', thumbnails_generated,
|
||||
'updated_at', updated_at,
|
||||
|
|
|
@ -21,8 +21,8 @@ metadata: post_search_json
|
|||
'body', p.body,
|
||||
'subject', t.title,
|
||||
'ip', p.ip,
|
||||
'user_agent', p.user_agent,
|
||||
'referrer', p.referrer,
|
||||
'user_agent', '',
|
||||
'referrer', '',
|
||||
'fingerprint', p.fingerprint,
|
||||
'topic_position', p.topic_position,
|
||||
'forum', f.short_name,
|
||||
|
|
|
@ -121,7 +121,7 @@ defmodule Philomena.Adverts do
|
|||
"""
|
||||
def create_advert(attrs \\ %{}) do
|
||||
%Advert{}
|
||||
|> Advert.save_changeset(attrs)
|
||||
|> Advert.changeset(attrs)
|
||||
|> Uploader.analyze_upload(attrs)
|
||||
|> Repo.insert()
|
||||
|> case do
|
||||
|
@ -150,7 +150,7 @@ defmodule Philomena.Adverts do
|
|||
"""
|
||||
def update_advert(%Advert{} = advert, attrs) do
|
||||
advert
|
||||
|> Advert.save_changeset(attrs)
|
||||
|> Advert.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@ defmodule Philomena.Adverts.Advert do
|
|||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Philomena.Schema.Time
|
||||
|
||||
schema "adverts" do
|
||||
field :image, :string
|
||||
field :link, :string
|
||||
|
@ -11,8 +9,8 @@ defmodule Philomena.Adverts.Advert do
|
|||
field :clicks, :integer, default: 0
|
||||
field :impressions, :integer, default: 0
|
||||
field :live, :boolean, default: false
|
||||
field :start_date, :utc_datetime
|
||||
field :finish_date, :utc_datetime
|
||||
field :start_date, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :finish_date, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :restrictions, :string
|
||||
field :notes, :string
|
||||
|
||||
|
@ -24,29 +22,18 @@ defmodule Philomena.Adverts.Advert do
|
|||
field :uploaded_image, :string, virtual: true
|
||||
field :removed_image, :string, virtual: true
|
||||
|
||||
field :start_time, :string, virtual: true
|
||||
field :finish_time, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(advert, attrs) do
|
||||
advert
|
||||
|> cast(attrs, [])
|
||||
|> Time.propagate_time(:start_date, :start_time)
|
||||
|> Time.propagate_time(:finish_date, :finish_time)
|
||||
end
|
||||
|
||||
def save_changeset(advert, attrs) do
|
||||
advert
|
||||
|> cast(attrs, [:title, :link, :start_time, :finish_time, :live, :restrictions, :notes])
|
||||
|> Time.assign_time(:start_time, :start_date)
|
||||
|> Time.assign_time(:finish_time, :finish_date)
|
||||
|> cast(attrs, [:title, :link, :start_date, :finish_date, :live, :restrictions, :notes])
|
||||
|> validate_required([:title, :link, :start_date, :finish_date])
|
||||
|> validate_inclusion(:restrictions, ["none", "nsfw", "sfw"])
|
||||
end
|
||||
|
||||
@doc false
|
||||
def image_changeset(advert, attrs) do
|
||||
advert
|
||||
|> cast(attrs, [
|
||||
|
|
|
@ -4,7 +4,7 @@ defmodule Philomena.Adverts.Recorder do
|
|||
import Ecto.Query
|
||||
|
||||
def run(%{impressions: impressions, clicks: clicks}) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
|
||||
# Create insert statements for Ecto
|
||||
impressions = Enum.map(impressions, &impressions_insert_all(&1, now))
|
||||
|
|
|
@ -93,7 +93,7 @@ defmodule Philomena.ArtistLinks do
|
|||
|
||||
Multi.new()
|
||||
|> Multi.update(:artist_link, artist_link_changeset)
|
||||
|> Multi.run(:add_award, fn _repo, _changes -> BadgeAwarder.award_badge(artist_link) end)
|
||||
|> Multi.run(:add_award, BadgeAwarder.award_callback(artist_link, verifying_user))
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{artist_link: artist_link}} ->
|
||||
|
|
|
@ -15,8 +15,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
|
||||
field :aasm_state, :string, default: "unverified"
|
||||
field :uri, :string
|
||||
field :hostname, :string
|
||||
field :path, :string
|
||||
field :verification_code, :string
|
||||
field :public, :boolean, default: true
|
||||
field :next_check_at, :utc_datetime
|
||||
|
@ -37,7 +35,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
|> cast(attrs, [:uri, :public])
|
||||
|> put_change(:tag_id, nil)
|
||||
|> validate_required([:user, :uri, :public])
|
||||
|> parse_uri()
|
||||
end
|
||||
|
||||
def edit_changeset(artist_link, attrs, tag) do
|
||||
|
@ -45,7 +42,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
|> cast(attrs, [:uri, :public])
|
||||
|> put_change(:tag_id, tag.id)
|
||||
|> validate_required([:user, :uri, :public])
|
||||
|> parse_uri()
|
||||
end
|
||||
|
||||
def creation_changeset(artist_link, attrs, user, tag) do
|
||||
|
@ -57,7 +53,6 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
|> validate_required([:tag], message: "must exist")
|
||||
|> validate_format(:uri, ~r|\Ahttps?://|)
|
||||
|> validate_category()
|
||||
|> parse_uri()
|
||||
|> put_verification_code()
|
||||
|> put_next_check_at()
|
||||
|> unique_constraint([:uri, :tag_id, :user_id],
|
||||
|
@ -90,22 +85,13 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
end
|
||||
|
||||
def contact_changeset(artist_link, user) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
change(artist_link)
|
||||
artist_link
|
||||
|> change()
|
||||
|> put_change(:contacted_by_user_id, user.id)
|
||||
|> put_change(:contacted_at, now)
|
||||
|> put_change(:contacted_at, DateTime.utc_now(:second))
|
||||
|> put_change(:aasm_state, "contacted")
|
||||
end
|
||||
|
||||
defp parse_uri(changeset) do
|
||||
string_uri = get_field(changeset, :uri) |> to_string()
|
||||
uri = URI.parse(string_uri)
|
||||
|
||||
changeset
|
||||
|> change(hostname: uri.host, path: uri.path)
|
||||
end
|
||||
|
||||
defp put_verification_code(changeset) do
|
||||
code = :crypto.strong_rand_bytes(5) |> Base.encode16()
|
||||
change(changeset, verification_code: "#{gettext("PHILOMENA-LINKVALIDATION")}-#{code}")
|
||||
|
@ -113,9 +99,9 @@ defmodule Philomena.ArtistLinks.ArtistLink do
|
|||
|
||||
defp put_next_check_at(changeset) do
|
||||
time =
|
||||
DateTime.utc_now()
|
||||
:second
|
||||
|> DateTime.utc_now()
|
||||
|> DateTime.add(60 * 2, :second)
|
||||
|> DateTime.truncate(:second)
|
||||
|
||||
change(changeset, next_check_at: time)
|
||||
end
|
||||
|
|
|
@ -16,13 +16,22 @@ defmodule Philomena.ArtistLinks.BadgeAwarder do
|
|||
Returns `{:ok, award}`, `{:ok, nil}`, or `{:error, changeset}`. The return value is
|
||||
suitable for use as the return value to an `Ecto.Multi.run/3` callback.
|
||||
"""
|
||||
def award_badge(artist_link) do
|
||||
def award_badge(artist_link, verifying_user) do
|
||||
with badge when not is_nil(badge) <- Badges.get_badge_by_title(@badge_title),
|
||||
award when is_nil(award) <- Badges.get_badge_award_for(badge, artist_link.user) do
|
||||
Badges.create_badge_award(artist_link.user, artist_link.user, %{badge_id: badge.id})
|
||||
Badges.create_badge_award(verifying_user, artist_link.user, %{badge_id: badge.id})
|
||||
else
|
||||
_ ->
|
||||
{:ok, nil}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get a callback for issuing a badge award from within an `m:Ecto.Multi`.
|
||||
"""
|
||||
def award_callback(artist_link, verifying_user) do
|
||||
fn _repo, _changes ->
|
||||
award_badge(artist_link, verifying_user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -26,9 +26,7 @@ defmodule Philomena.Badges.Award do
|
|||
end
|
||||
|
||||
defp put_awarded_on(%{data: %{awarded_on: nil}} = changeset) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
put_change(changeset, :awarded_on, now)
|
||||
put_change(changeset, :awarded_on, DateTime.utc_now(:second))
|
||||
end
|
||||
|
||||
defp put_awarded_on(changeset), do: changeset
|
||||
|
|
|
@ -56,7 +56,7 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
def create_fingerprint(creator, attrs \\ %{}) do
|
||||
%Fingerprint{banning_user_id: creator.id}
|
||||
|> Fingerprint.save_changeset(attrs)
|
||||
|> Fingerprint.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
|
@ -74,7 +74,7 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
def update_fingerprint(%Fingerprint{} = fingerprint, attrs) do
|
||||
fingerprint
|
||||
|> Fingerprint.save_changeset(attrs)
|
||||
|> Fingerprint.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
|
@ -150,7 +150,7 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
def create_subnet(creator, attrs \\ %{}) do
|
||||
%Subnet{banning_user_id: creator.id}
|
||||
|> Subnet.save_changeset(attrs)
|
||||
|> Subnet.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
|
@ -168,7 +168,7 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
def update_subnet(%Subnet{} = subnet, attrs) do
|
||||
subnet
|
||||
|> Subnet.save_changeset(attrs)
|
||||
|> Subnet.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
|
@ -245,7 +245,7 @@ defmodule Philomena.Bans do
|
|||
def create_user(creator, attrs \\ %{}) do
|
||||
changeset =
|
||||
%User{banning_user_id: creator.id}
|
||||
|> User.save_changeset(attrs)
|
||||
|> User.changeset(attrs)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.insert(:user_ban, changeset)
|
||||
|
@ -276,7 +276,7 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
def update_user(%User{} = user, attrs) do
|
||||
user
|
||||
|> User.save_changeset(attrs)
|
||||
|> User.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
defmodule Philomena.Bans.Fingerprint do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import Philomena.Bans.IdGenerator
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Schema.Time
|
||||
alias Philomena.Schema.BanId
|
||||
|
||||
schema "fingerprint_bans" do
|
||||
belongs_to :banning_user, User
|
||||
|
@ -12,27 +11,18 @@ defmodule Philomena.Bans.Fingerprint do
|
|||
field :reason, :string
|
||||
field :note, :string
|
||||
field :enabled, :boolean, default: true
|
||||
field :valid_until, :utc_datetime
|
||||
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :fingerprint, :string
|
||||
field :generated_ban_id, :string
|
||||
|
||||
field :until, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(fingerprint_ban, attrs) do
|
||||
fingerprint_ban
|
||||
|> cast(attrs, [])
|
||||
|> Time.propagate_time(:valid_until, :until)
|
||||
end
|
||||
|
||||
def save_changeset(fingerprint_ban, attrs) do
|
||||
fingerprint_ban
|
||||
|> cast(attrs, [:reason, :note, :enabled, :fingerprint, :until])
|
||||
|> Time.assign_time(:until, :valid_until)
|
||||
|> BanId.put_ban_id("F")
|
||||
|> cast(attrs, [:reason, :note, :enabled, :fingerprint, :valid_until])
|
||||
|> put_ban_id("F")
|
||||
|> validate_required([:reason, :enabled, :fingerprint, :valid_until])
|
||||
|> check_constraint(:valid_until, name: :fingerprint_ban_duration_must_be_valid)
|
||||
end
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
defmodule Philomena.Schema.BanId do
|
||||
defmodule Philomena.Bans.IdGenerator do
|
||||
@moduledoc false
|
||||
|
||||
import Ecto.Changeset
|
||||
|
||||
def put_ban_id(%{data: %{generated_ban_id: nil}} = changeset, prefix) do
|
|
@ -1,10 +1,9 @@
|
|||
defmodule Philomena.Bans.Subnet do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import Philomena.Bans.IdGenerator
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Schema.Time
|
||||
alias Philomena.Schema.BanId
|
||||
|
||||
schema "subnet_bans" do
|
||||
belongs_to :banning_user, User
|
||||
|
@ -12,27 +11,18 @@ defmodule Philomena.Bans.Subnet do
|
|||
field :reason, :string
|
||||
field :note, :string
|
||||
field :enabled, :boolean, default: true
|
||||
field :valid_until, :utc_datetime
|
||||
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :specification, EctoNetwork.INET
|
||||
field :generated_ban_id, :string
|
||||
|
||||
field :until, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(subnet_ban, attrs) do
|
||||
subnet_ban
|
||||
|> cast(attrs, [])
|
||||
|> Time.propagate_time(:valid_until, :until)
|
||||
end
|
||||
|
||||
def save_changeset(subnet_ban, attrs) do
|
||||
subnet_ban
|
||||
|> cast(attrs, [:reason, :note, :enabled, :specification, :until])
|
||||
|> Time.assign_time(:until, :valid_until)
|
||||
|> BanId.put_ban_id("S")
|
||||
|> cast(attrs, [:reason, :note, :enabled, :specification, :valid_until])
|
||||
|> put_ban_id("S")
|
||||
|> validate_required([:reason, :enabled, :specification, :valid_until])
|
||||
|> check_constraint(:valid_until, name: :subnet_ban_duration_must_be_valid)
|
||||
|> mask_specification()
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
defmodule Philomena.Bans.User do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import Philomena.Bans.IdGenerator
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Repo
|
||||
alias Philomena.Schema.Time
|
||||
alias Philomena.Schema.BanId
|
||||
|
||||
schema "user_bans" do
|
||||
belongs_to :user, User
|
||||
|
@ -14,48 +12,19 @@ defmodule Philomena.Bans.User do
|
|||
field :reason, :string
|
||||
field :note, :string
|
||||
field :enabled, :boolean, default: true
|
||||
field :valid_until, :utc_datetime
|
||||
field :valid_until, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :generated_ban_id, :string
|
||||
field :override_ip_ban, :boolean, default: false
|
||||
|
||||
field :username, :string, virtual: true
|
||||
field :until, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(user_ban, attrs) do
|
||||
user_ban
|
||||
|> cast(attrs, [])
|
||||
|> Time.propagate_time(:valid_until, :until)
|
||||
|> populate_username()
|
||||
end
|
||||
|
||||
def save_changeset(user_ban, attrs) do
|
||||
user_ban
|
||||
|> cast(attrs, [:reason, :note, :enabled, :override_ip_ban, :username, :until])
|
||||
|> Time.assign_time(:until, :valid_until)
|
||||
|> populate_user_id()
|
||||
|> BanId.put_ban_id("U")
|
||||
|> cast(attrs, [:reason, :note, :enabled, :override_ip_ban, :user_id, :valid_until])
|
||||
|> put_ban_id("U")
|
||||
|> validate_required([:reason, :enabled, :user_id, :valid_until])
|
||||
|> check_constraint(:valid_until, name: :user_ban_duration_must_be_valid)
|
||||
end
|
||||
|
||||
defp populate_username(changeset) do
|
||||
case maybe_get_by(:id, get_field(changeset, :user_id)) do
|
||||
nil -> changeset
|
||||
user -> put_change(changeset, :username, user.name)
|
||||
end
|
||||
end
|
||||
|
||||
defp populate_user_id(changeset) do
|
||||
case maybe_get_by(:name, get_field(changeset, :username)) do
|
||||
nil -> changeset
|
||||
%{id: id} -> put_change(changeset, :user_id, id)
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_get_by(_field, nil), do: nil
|
||||
defp maybe_get_by(field, value), do: Repo.get_by(User, [{field, value}])
|
||||
end
|
||||
|
|
|
@ -9,6 +9,11 @@ defmodule Philomena.Channels do
|
|||
alias Philomena.Channels.AutomaticUpdater
|
||||
alias Philomena.Channels.Channel
|
||||
alias Philomena.Notifications
|
||||
alias Philomena.Tags
|
||||
|
||||
use Philomena.Subscriptions,
|
||||
on_delete: :clear_channel_notification,
|
||||
id_name: :channel_id
|
||||
|
||||
@doc """
|
||||
Updates all the tracked channels for which an update scheme is known.
|
||||
|
@ -47,6 +52,7 @@ defmodule Philomena.Channels do
|
|||
"""
|
||||
def create_channel(attrs \\ %{}) do
|
||||
%Channel{}
|
||||
|> update_artist_tag(attrs)
|
||||
|> Channel.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
@ -65,10 +71,29 @@ defmodule Philomena.Channels do
|
|||
"""
|
||||
def update_channel(%Channel{} = channel, attrs) do
|
||||
channel
|
||||
|> update_artist_tag(attrs)
|
||||
|> Channel.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Adds the artist tag from the `"artist_tag"` tag name attribute.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_artist_tag(%Channel{}, %{"artist_tag" => "artist:nighty"})
|
||||
%Ecto.Changeset{}
|
||||
|
||||
"""
|
||||
def update_artist_tag(%Channel{} = channel, attrs) do
|
||||
tag =
|
||||
attrs
|
||||
|> Map.get("artist_tag", "")
|
||||
|> Tags.get_tag_by_name()
|
||||
|
||||
Channel.artist_tag_changeset(channel, tag)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a channel's state when it goes live.
|
||||
|
||||
|
@ -116,68 +141,17 @@ defmodule Philomena.Channels do
|
|||
Channel.changeset(channel, %{})
|
||||
end
|
||||
|
||||
alias Philomena.Channels.Subscription
|
||||
|
||||
@doc """
|
||||
Creates a subscription.
|
||||
Removes all channel notifications for a given channel and user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_subscription(%{field: value})
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> create_subscription(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> clear_channel_notification(channel, user)
|
||||
:ok
|
||||
|
||||
"""
|
||||
def create_subscription(_channel, nil), do: {:ok, nil}
|
||||
|
||||
def create_subscription(channel, user) do
|
||||
%Subscription{channel_id: channel.id, user_id: user.id}
|
||||
|> Subscription.changeset(%{})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Subscription.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_subscription(channel, user) do
|
||||
clear_notification(channel, user)
|
||||
|
||||
%Subscription{channel_id: channel.id, user_id: user.id}
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def subscribed?(_channel, nil), do: false
|
||||
|
||||
def subscribed?(channel, user) do
|
||||
Subscription
|
||||
|> where(channel_id: ^channel.id, user_id: ^user.id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
def subscriptions(_channels, nil), do: %{}
|
||||
|
||||
def subscriptions(channels, user) do
|
||||
channel_ids = Enum.map(channels, & &1.id)
|
||||
|
||||
Subscription
|
||||
|> where([s], s.channel_id in ^channel_ids and s.user_id == ^user.id)
|
||||
|> Repo.all()
|
||||
|> Map.new(&{&1.channel_id, true})
|
||||
end
|
||||
|
||||
def clear_notification(channel, user) do
|
||||
Notifications.delete_unread_notification("Channel", channel.id, user)
|
||||
Notifications.delete_unread_notification("LivestreamChannel", channel.id, user)
|
||||
def clear_channel_notification(%Channel{} = channel, user) do
|
||||
Notifications.clear_channel_live_notification(channel, user)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,7 +3,6 @@ defmodule Philomena.Channels.Channel do
|
|||
import Ecto.Changeset
|
||||
|
||||
alias Philomena.Tags.Tag
|
||||
alias Philomena.Repo
|
||||
|
||||
schema "channels" do
|
||||
belongs_to :associated_artist_tag, Tag
|
||||
|
@ -13,22 +12,12 @@ defmodule Philomena.Channels.Channel do
|
|||
|
||||
field :short_name, :string
|
||||
field :title, :string, default: ""
|
||||
field :tags, :string
|
||||
field :viewers, :integer, default: 0
|
||||
field :nsfw, :boolean, default: false
|
||||
field :is_live, :boolean, default: false
|
||||
field :last_fetched_at, :utc_datetime
|
||||
field :next_check_at, :utc_datetime
|
||||
field :last_live_at, :utc_datetime
|
||||
|
||||
field :viewer_minutes_today, :integer, default: 0
|
||||
field :viewer_minutes_thisweek, :integer, default: 0
|
||||
field :viewer_minutes_thismonth, :integer, default: 0
|
||||
field :total_viewer_minutes, :integer, default: 0
|
||||
|
||||
field :banner_image, :string
|
||||
field :channel_image, :string
|
||||
field :remote_stream_id, :integer
|
||||
field :thumbnail_url, :string, default: ""
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
|
@ -36,19 +25,13 @@ defmodule Philomena.Channels.Channel do
|
|||
|
||||
@doc false
|
||||
def changeset(channel, attrs) do
|
||||
tag_id =
|
||||
case Repo.get_by(Tag, name: attrs["artist_tag"] || "") do
|
||||
%{id: id} -> id
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
channel
|
||||
|> cast(attrs, [:type, :short_name])
|
||||
|> validate_required([:type, :short_name])
|
||||
|> validate_inclusion(:type, ["PicartoChannel", "PiczelChannel"])
|
||||
|> put_change(:associated_artist_tag_id, tag_id)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def update_changeset(channel, attrs) do
|
||||
cast(channel, attrs, [
|
||||
:title,
|
||||
|
@ -60,4 +43,11 @@ defmodule Philomena.Channels.Channel do
|
|||
:last_live_at
|
||||
])
|
||||
end
|
||||
|
||||
@doc false
|
||||
def artist_tag_changeset(channel, tag) do
|
||||
tag_id = Map.get(tag || %{}, :id)
|
||||
|
||||
change(channel, associated_artist_tag_id: tag_id)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,7 +8,6 @@ defmodule Philomena.Comments do
|
|||
alias Philomena.Repo
|
||||
|
||||
alias PhilomenaQuery.Search
|
||||
alias Philomena.Reports.Report
|
||||
alias Philomena.UserStatistics
|
||||
alias Philomena.Comments.Comment
|
||||
alias Philomena.Comments.SearchIndex, as: CommentIndex
|
||||
|
@ -16,10 +15,8 @@ defmodule Philomena.Comments do
|
|||
alias Philomena.Images.Image
|
||||
alias Philomena.Images
|
||||
alias Philomena.Notifications
|
||||
alias Philomena.NotificationWorker
|
||||
alias Philomena.Versions
|
||||
alias Philomena.Reports
|
||||
alias Philomena.Users.User
|
||||
|
||||
@doc """
|
||||
Gets a single comment.
|
||||
|
@ -58,52 +55,20 @@ defmodule Philomena.Comments do
|
|||
Image
|
||||
|> where(id: ^image.id)
|
||||
|
||||
image_lock_query =
|
||||
lock(image_query, "FOR UPDATE")
|
||||
|
||||
Multi.new()
|
||||
|> Multi.one(:image, image_lock_query)
|
||||
|> Multi.insert(:comment, comment)
|
||||
|> Multi.update_all(:image, image_query, inc: [comments_count: 1])
|
||||
|> maybe_create_subscription_on_reply(image, attribution[:user])
|
||||
|> Multi.update_all(:update_image, image_query, inc: [comments_count: 1])
|
||||
|> Multi.run(:notification, ¬ify_comment/2)
|
||||
|> Images.maybe_subscribe_on(:image, attribution[:user], :watch_on_reply)
|
||||
|> Repo.transaction()
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_reply(multi, image, %User{watch_on_reply: true} = user) do
|
||||
multi
|
||||
|> Multi.run(:subscribe, fn _repo, _changes ->
|
||||
Images.create_subscription(image, user)
|
||||
end)
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_reply(multi, _image, _user) do
|
||||
multi
|
||||
end
|
||||
|
||||
def notify_comment(comment) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Comments", comment.id])
|
||||
end
|
||||
|
||||
def perform_notify(comment_id) do
|
||||
comment = get_comment!(comment_id)
|
||||
|
||||
image =
|
||||
comment
|
||||
|> Repo.preload(:image)
|
||||
|> Map.fetch!(:image)
|
||||
|
||||
subscriptions =
|
||||
image
|
||||
|> Repo.preload(:subscriptions)
|
||||
|> Map.fetch!(:subscriptions)
|
||||
|
||||
Notifications.notify(
|
||||
comment,
|
||||
subscriptions,
|
||||
%{
|
||||
actor_id: image.id,
|
||||
actor_type: "Image",
|
||||
actor_child_id: comment.id,
|
||||
actor_child_type: "Comment",
|
||||
action: "commented on"
|
||||
}
|
||||
)
|
||||
defp notify_comment(_repo, %{image: image, comment: comment}) do
|
||||
Notifications.create_image_comment_notification(comment.user, image, comment)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -119,7 +84,7 @@ defmodule Philomena.Comments do
|
|||
|
||||
"""
|
||||
def update_comment(%Comment{} = comment, editor, attrs) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
current_body = comment.body
|
||||
current_reason = comment.edit_reason
|
||||
|
||||
|
@ -153,17 +118,12 @@ defmodule Philomena.Comments do
|
|||
end
|
||||
|
||||
def hide_comment(%Comment{} = comment, attrs, user) do
|
||||
reports =
|
||||
Report
|
||||
|> where(reportable_type: "Comment", reportable_id: ^comment.id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
|
||||
report_query = Reports.close_report_query({"Comment", comment.id}, user)
|
||||
comment = Comment.hide_changeset(comment, attrs, user)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:comment, comment)
|
||||
|> Multi.update_all(:reports, reports, [])
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
||||
|
@ -199,21 +159,15 @@ defmodule Philomena.Comments do
|
|||
end
|
||||
|
||||
def approve_comment(%Comment{} = comment, user) do
|
||||
reports =
|
||||
Report
|
||||
|> where(reportable_type: "Comment", reportable_id: ^comment.id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
|
||||
report_query = Reports.close_report_query({"Comment", comment.id}, user)
|
||||
comment = Comment.approve_changeset(comment)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:comment, comment)
|
||||
|> Multi.update_all(:reports, reports, [])
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{comment: comment, reports: {_count, reports}}} ->
|
||||
notify_comment(comment)
|
||||
UserStatistics.inc_stat(comment.user, :comments_posted)
|
||||
Reports.reindex_reports(reports)
|
||||
reindex_comment(comment)
|
||||
|
@ -229,8 +183,7 @@ defmodule Philomena.Comments do
|
|||
|
||||
def report_non_approved(comment) do
|
||||
Reports.create_system_report(
|
||||
comment.id,
|
||||
"Comment",
|
||||
{"Comment", comment.id},
|
||||
"Approval",
|
||||
"Comment contains externally-embedded images and has been flagged for review."
|
||||
)
|
||||
|
|
|
@ -14,15 +14,12 @@ defmodule Philomena.Comments.Comment do
|
|||
field :body, :string
|
||||
field :ip, EctoNetwork.INET
|
||||
field :fingerprint, :string
|
||||
field :user_agent, :string, default: ""
|
||||
field :referrer, :string, default: ""
|
||||
field :anonymous, :boolean, default: false
|
||||
field :hidden_from_users, :boolean, default: false
|
||||
field :edit_reason, :string
|
||||
field :edited_at, :utc_datetime
|
||||
field :deletion_reason, :string, default: ""
|
||||
field :destroyed_content, :boolean, default: false
|
||||
field :name_at_post_time, :string
|
||||
field :approved, :boolean
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
|
@ -35,7 +32,6 @@ defmodule Philomena.Comments.Comment do
|
|||
|> validate_required([:body])
|
||||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||
|> change(attribution)
|
||||
|> put_name_at_post_time(attribution[:user])
|
||||
|> Approval.maybe_put_approval(attribution[:user])
|
||||
|> Approval.maybe_strip_images(attribution[:user])
|
||||
end
|
||||
|
@ -74,7 +70,4 @@ defmodule Philomena.Comments.Comment do
|
|||
change(comment)
|
||||
|> put_change(:approved, true)
|
||||
end
|
||||
|
||||
defp put_name_at_post_time(changeset, nil), do: changeset
|
||||
defp put_name_at_post_time(changeset, user), do: change(changeset, name_at_post_time: user.name)
|
||||
end
|
||||
|
|
|
@ -92,8 +92,8 @@ defmodule Philomena.Comments.Query do
|
|||
|> Parser.parse(query_string, context)
|
||||
end
|
||||
|
||||
def compile(user, query_string) do
|
||||
query_string = query_string || ""
|
||||
def compile(query_string, opts \\ []) do
|
||||
user = Keyword.get(opts, :user)
|
||||
|
||||
case user do
|
||||
nil ->
|
||||
|
|
|
@ -6,76 +6,112 @@ defmodule Philomena.Conversations do
|
|||
import Ecto.Query, warn: false
|
||||
alias Ecto.Multi
|
||||
alias Philomena.Repo
|
||||
alias Philomena.Reports
|
||||
alias Philomena.Reports.Report
|
||||
alias Philomena.Conversations.Conversation
|
||||
alias Philomena.Conversations.Message
|
||||
alias Philomena.Reports
|
||||
alias Philomena.Users
|
||||
|
||||
@doc """
|
||||
Gets a single conversation.
|
||||
Returns the number of unread conversations for the given user.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Conversation does not exist.
|
||||
Conversations hidden by the given user are not counted.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_conversation!(123)
|
||||
%Conversation{}
|
||||
iex> count_unread_conversations(user1)
|
||||
0
|
||||
|
||||
iex> get_conversation!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
iex> count_unread_conversations(user2)
|
||||
7
|
||||
|
||||
"""
|
||||
def get_conversation!(id), do: Repo.get!(Conversation, id)
|
||||
def count_unread_conversations(user) do
|
||||
Conversation
|
||||
|> where(
|
||||
[c],
|
||||
((c.to_id == ^user.id and c.to_read == false) or
|
||||
(c.from_id == ^user.id and c.from_read == false)) and
|
||||
not ((c.to_id == ^user.id and c.to_hidden == true) or
|
||||
(c.from_id == ^user.id and c.from_hidden == true))
|
||||
)
|
||||
|> Repo.aggregate(:count)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a `m:Scrivener.Page` of conversations between the partner and the user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_conversations_with("123", %User{}, page_size: 10)
|
||||
%Scrivener.Page{}
|
||||
|
||||
"""
|
||||
def list_conversations_with(partner_id, user, pagination) do
|
||||
query =
|
||||
from c in Conversation,
|
||||
where:
|
||||
(c.from_id == ^partner_id and c.to_id == ^user.id) or
|
||||
(c.to_id == ^partner_id and c.from_id == ^user.id)
|
||||
|
||||
list_conversations(query, user, pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a `m:Scrivener.Page` of conversations sent by or received from the user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_conversations_with("123", %User{}, page_size: 10)
|
||||
%Scrivener.Page{}
|
||||
|
||||
"""
|
||||
def list_conversations(queryable \\ Conversation, user, pagination) do
|
||||
query =
|
||||
from c in queryable,
|
||||
as: :conversations,
|
||||
where:
|
||||
(c.from_id == ^user.id and not c.from_hidden) or
|
||||
(c.to_id == ^user.id and not c.to_hidden),
|
||||
inner_lateral_join:
|
||||
cnt in subquery(
|
||||
from m in Message,
|
||||
where: m.conversation_id == parent_as(:conversations).id,
|
||||
select: %{count: count()}
|
||||
),
|
||||
on: true,
|
||||
order_by: [desc: :last_message_at],
|
||||
preload: [:to, :from],
|
||||
select: %{c | message_count: cnt.count}
|
||||
|
||||
Repo.paginate(query, pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a conversation.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_conversation(%{field: value})
|
||||
iex> create_conversation(from, to, %{field: value})
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> create_conversation(%{field: bad_value})
|
||||
iex> create_conversation(from, to, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_conversation(from, attrs \\ %{}) do
|
||||
to = Users.get_user_by_name(attrs["recipient"])
|
||||
|
||||
%Conversation{}
|
||||
|> Conversation.creation_changeset(from, attrs)
|
||||
|> Conversation.creation_changeset(from, to, attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|> case do
|
||||
{:ok, conversation} ->
|
||||
report_non_approved_message(hd(conversation.messages))
|
||||
{:ok, conversation}
|
||||
|
||||
@doc """
|
||||
Updates a conversation.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_conversation(conversation, %{field: new_value})
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> update_conversation(conversation, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_conversation(%Conversation{} = conversation, attrs) do
|
||||
conversation
|
||||
|> Conversation.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Conversation.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_conversation(conversation)
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> delete_conversation(conversation)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_conversation(%Conversation{} = conversation) do
|
||||
Repo.delete(conversation)
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -91,201 +127,221 @@ defmodule Philomena.Conversations do
|
|||
Conversation.changeset(conversation, %{})
|
||||
end
|
||||
|
||||
def count_unread_conversations(user) do
|
||||
Conversation
|
||||
|> where(
|
||||
[c],
|
||||
((c.to_id == ^user.id and c.to_read == false) or
|
||||
(c.from_id == ^user.id and c.from_read == false)) and
|
||||
not ((c.to_id == ^user.id and c.to_hidden == true) or
|
||||
(c.from_id == ^user.id and c.from_hidden == true))
|
||||
)
|
||||
|> Repo.aggregate(:count, :id)
|
||||
end
|
||||
|
||||
def mark_conversation_read(conversation, user, read \\ true)
|
||||
|
||||
def mark_conversation_read(
|
||||
%Conversation{to_id: user_id, from_id: user_id} = conversation,
|
||||
%{id: user_id},
|
||||
read
|
||||
) do
|
||||
conversation
|
||||
|> Conversation.read_changeset(%{to_read: read, from_read: read})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def mark_conversation_read(%Conversation{to_id: user_id} = conversation, %{id: user_id}, read) do
|
||||
conversation
|
||||
|> Conversation.read_changeset(%{to_read: read})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def mark_conversation_read(%Conversation{from_id: user_id} = conversation, %{id: user_id}, read) do
|
||||
conversation
|
||||
|> Conversation.read_changeset(%{from_read: read})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def mark_conversation_read(_conversation, _user, _read), do: {:ok, nil}
|
||||
|
||||
def mark_conversation_hidden(conversation, user, hidden \\ true)
|
||||
|
||||
def mark_conversation_hidden(
|
||||
%Conversation{to_id: user_id} = conversation,
|
||||
%{id: user_id},
|
||||
hidden
|
||||
) do
|
||||
conversation
|
||||
|> Conversation.hidden_changeset(%{to_hidden: hidden})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def mark_conversation_hidden(
|
||||
%Conversation{from_id: user_id} = conversation,
|
||||
%{id: user_id},
|
||||
hidden
|
||||
) do
|
||||
conversation
|
||||
|> Conversation.hidden_changeset(%{from_hidden: hidden})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def mark_conversation_hidden(_conversation, _user, _read), do: {:ok, nil}
|
||||
|
||||
alias Philomena.Conversations.Message
|
||||
|
||||
@doc """
|
||||
Gets a single message.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Message does not exist.
|
||||
Marks a conversation as read or unread from the perspective of the given user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_message!(123)
|
||||
%Message{}
|
||||
iex> mark_conversation_read(conversation, user, true)
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> get_message!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
iex> mark_conversation_read(conversation, user, false)
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> mark_conversation_read(conversation, %User{}, true)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def get_message!(id), do: Repo.get!(Message, id)
|
||||
def mark_conversation_read(%Conversation{} = conversation, user, read \\ true) do
|
||||
changes =
|
||||
%{}
|
||||
|> put_conditional(:to_read, read, conversation.to_id == user.id)
|
||||
|> put_conditional(:from_read, read, conversation.from_id == user.id)
|
||||
|
||||
conversation
|
||||
|> Conversation.read_changeset(changes)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a message.
|
||||
Marks a conversation as hidden or visible from the perspective of the given user.
|
||||
|
||||
Hidden conversations are not shown in the list of conversations for the user, and
|
||||
are not counted when retrieving the number of unread conversations.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_message(%{field: value})
|
||||
iex> mark_conversation_hidden(conversation, user, true)
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> mark_conversation_hidden(conversation, user, false)
|
||||
{:ok, %Conversation{}}
|
||||
|
||||
iex> mark_conversation_hidden(conversation, %User{}, true)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def mark_conversation_hidden(%Conversation{} = conversation, user, hidden \\ true) do
|
||||
changes =
|
||||
%{}
|
||||
|> put_conditional(:to_hidden, hidden, conversation.to_id == user.id)
|
||||
|> put_conditional(:from_hidden, hidden, conversation.from_id == user.id)
|
||||
|
||||
conversation
|
||||
|> Conversation.hidden_changeset(changes)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
defp put_conditional(map, key, value, condition) do
|
||||
if condition do
|
||||
Map.put(map, key, value)
|
||||
else
|
||||
map
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the number of messages in the given conversation.
|
||||
|
||||
## Example
|
||||
|
||||
iex> count_messages(%Conversation{})
|
||||
3
|
||||
|
||||
"""
|
||||
def count_messages(conversation) do
|
||||
Message
|
||||
|> where(conversation_id: ^conversation.id)
|
||||
|> Repo.aggregate(:count)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a `m:Scrivener.Page` of 2-tuples of messages and rendered output
|
||||
within a conversation.
|
||||
|
||||
Messages are ordered by user message preference (`messages_newest_first`).
|
||||
|
||||
When coerced to a list and rendered as Markdown, the result may look like:
|
||||
|
||||
[
|
||||
{%Message{body: "hello *world*"}, "hello <strong>world</strong>"}
|
||||
]
|
||||
|
||||
## Example
|
||||
|
||||
iex> list_messages(%Conversation{}, %User{}, & &1.body, page_size: 10)
|
||||
%Scrivener.Page{}
|
||||
|
||||
"""
|
||||
def list_messages(conversation, user, collection_renderer, pagination) do
|
||||
direction =
|
||||
if user.messages_newest_first do
|
||||
:desc
|
||||
else
|
||||
:asc
|
||||
end
|
||||
|
||||
query =
|
||||
from m in Message,
|
||||
where: m.conversation_id == ^conversation.id,
|
||||
order_by: [{^direction, :created_at}],
|
||||
preload: :from
|
||||
|
||||
messages = Repo.paginate(query, pagination)
|
||||
rendered = collection_renderer.(messages)
|
||||
|
||||
put_in(messages.entries, Enum.zip(messages.entries, rendered))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a message within a conversation.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_message(%Conversation{}, %User{}, %{field: value})
|
||||
{:ok, %Message{}}
|
||||
|
||||
iex> create_message(%{field: bad_value})
|
||||
iex> create_message(%Conversation{}, %User{}, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_message(conversation, user, attrs \\ %{}) do
|
||||
message =
|
||||
Ecto.build_assoc(conversation, :messages)
|
||||
message_changeset =
|
||||
conversation
|
||||
|> Ecto.build_assoc(:messages)
|
||||
|> Message.creation_changeset(attrs, user)
|
||||
|
||||
show_as_read =
|
||||
case message do
|
||||
%{changes: %{approved: true}} -> false
|
||||
_ -> true
|
||||
end
|
||||
|
||||
conversation_query =
|
||||
Conversation
|
||||
|> where(id: ^conversation.id)
|
||||
|
||||
now = DateTime.utc_now()
|
||||
conversation_changeset =
|
||||
Conversation.new_message_changeset(conversation)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.insert(:message, message)
|
||||
|> Multi.update_all(:conversation, conversation_query,
|
||||
set: [from_read: show_as_read, to_read: show_as_read, last_message_at: now]
|
||||
)
|
||||
|> Repo.transaction()
|
||||
end
|
||||
|
||||
def approve_conversation_message(message, user) do
|
||||
reports_query =
|
||||
Report
|
||||
|> where(reportable_type: "Conversation", reportable_id: ^message.conversation_id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
|
||||
message_query =
|
||||
message
|
||||
|> Message.approve_changeset()
|
||||
|
||||
conversation_query =
|
||||
Conversation
|
||||
|> where(id: ^message.conversation_id)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:message, message_query)
|
||||
|> Multi.update_all(:conversation, conversation_query, set: [to_read: false])
|
||||
|> Multi.update_all(:reports, reports_query, [])
|
||||
|> Multi.insert(:message, message_changeset)
|
||||
|> Multi.update(:conversation, conversation_changeset)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{reports: {_count, reports}} = result} ->
|
||||
Reports.reindex_reports(reports)
|
||||
{:ok, %{message: message}} ->
|
||||
report_non_approved_message(message)
|
||||
{:ok, message}
|
||||
|
||||
{:ok, result}
|
||||
|
||||
error ->
|
||||
error
|
||||
_error ->
|
||||
{:error, message_changeset}
|
||||
end
|
||||
end
|
||||
|
||||
def report_non_approved(id) do
|
||||
Reports.create_system_report(
|
||||
id,
|
||||
"Conversation",
|
||||
"Approval",
|
||||
"PM contains externally-embedded images and has been flagged for review."
|
||||
)
|
||||
end
|
||||
|
||||
def set_as_read(conversation) do
|
||||
conversation
|
||||
|> Conversation.to_read_changeset()
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a message.
|
||||
Approves a previously-posted message which was not approved at post time.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_message(message, %{field: new_value})
|
||||
iex> approve_message(%Message{}, %User{})
|
||||
{:ok, %Message{}}
|
||||
|
||||
iex> update_message(message, %{field: bad_value})
|
||||
iex> approve_message(%Message{}, %User{})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_message(%Message{} = message, attrs) do
|
||||
message
|
||||
|> Message.changeset(attrs)
|
||||
|> Repo.update()
|
||||
def approve_message(message, approving_user) do
|
||||
message_changeset = Message.approve_changeset(message)
|
||||
|
||||
conversation_update_query =
|
||||
from c in Conversation,
|
||||
where: c.id == ^message.conversation_id,
|
||||
update: [set: [from_read: false, to_read: false]]
|
||||
|
||||
reports_query =
|
||||
Reports.close_report_query({"Conversation", message.conversation_id}, approving_user)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:message, message_changeset)
|
||||
|> Multi.update_all(:conversation, conversation_update_query, [])
|
||||
|> Multi.update_all(:reports, reports_query, [])
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{reports: {_count, reports}, message: message}} ->
|
||||
Reports.reindex_reports(reports)
|
||||
|
||||
message
|
||||
|
||||
_error ->
|
||||
{:error, message_changeset}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Message.
|
||||
Generates a system report for an unapproved message.
|
||||
|
||||
This is called by `create_conversation/2` and `create_message/3`, so it normally does not
|
||||
need to be called explicitly.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_message(message)
|
||||
{:ok, %Message{}}
|
||||
iex> report_non_approved_message(%Message{approved: false})
|
||||
{:ok, %Report{}}
|
||||
|
||||
iex> delete_message(message)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> report_non_approved_message(%Message{approved: true})
|
||||
{:ok, nil}
|
||||
|
||||
"""
|
||||
def delete_message(%Message{} = message) do
|
||||
Repo.delete(message)
|
||||
def report_non_approved_message(message) do
|
||||
if message.approved do
|
||||
{:ok, nil}
|
||||
else
|
||||
Reports.create_system_report(
|
||||
{"Conversation", message.conversation_id},
|
||||
"Approval",
|
||||
"PM contains externally-embedded images and has been flagged for review."
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
|
|
@ -4,7 +4,6 @@ defmodule Philomena.Conversations.Conversation do
|
|||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Conversations.Message
|
||||
alias Philomena.Repo
|
||||
|
||||
@derive {Phoenix.Param, key: :slug}
|
||||
|
||||
|
@ -20,6 +19,8 @@ defmodule Philomena.Conversations.Conversation do
|
|||
field :from_hidden, :boolean, default: false
|
||||
field :slug, :string
|
||||
field :last_message_at, :utc_datetime
|
||||
|
||||
field :message_count, :integer, virtual: true
|
||||
field :recipient, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
|
@ -32,51 +33,39 @@ defmodule Philomena.Conversations.Conversation do
|
|||
|> validate_required([])
|
||||
end
|
||||
|
||||
@doc false
|
||||
def read_changeset(conversation, attrs) do
|
||||
conversation
|
||||
|> cast(attrs, [:from_read, :to_read])
|
||||
end
|
||||
|
||||
def to_read_changeset(conversation) do
|
||||
change(conversation)
|
||||
|> put_change(:to_read, true)
|
||||
end
|
||||
|
||||
def hidden_changeset(conversation, attrs) do
|
||||
conversation
|
||||
|> cast(attrs, [:from_hidden, :to_hidden])
|
||||
cast(conversation, attrs, [:from_read, :to_read])
|
||||
end
|
||||
|
||||
@doc false
|
||||
def creation_changeset(conversation, from, attrs) do
|
||||
conversation
|
||||
|> cast(attrs, [:title, :recipient])
|
||||
|> validate_required([:title, :recipient])
|
||||
|> validate_length(:title, max: 300, count: :bytes)
|
||||
|> put_assoc(:from, from)
|
||||
|> put_recipient()
|
||||
|> set_slug()
|
||||
|> set_last_message()
|
||||
|> cast_assoc(:messages, with: &Message.creation_changeset(&1, &2, from))
|
||||
|> validate_length(:messages, is: 1)
|
||||
def hidden_changeset(conversation, attrs) do
|
||||
cast(conversation, attrs, [:from_hidden, :to_hidden])
|
||||
end
|
||||
|
||||
defp set_slug(changeset) do
|
||||
changeset
|
||||
|> change(slug: Ecto.UUID.generate())
|
||||
@doc false
|
||||
def creation_changeset(conversation, from, to, attrs) do
|
||||
conversation
|
||||
|> cast(attrs, [:title])
|
||||
|> put_assoc(:from, from)
|
||||
|> put_assoc(:to, to)
|
||||
|> put_change(:slug, Ecto.UUID.generate())
|
||||
|> cast_assoc(:messages, with: &Message.creation_changeset(&1, &2, from))
|
||||
|> set_last_message()
|
||||
|> validate_length(:messages, is: 1)
|
||||
|> validate_length(:title, max: 300, count: :bytes)
|
||||
|> validate_required([:title, :from, :to])
|
||||
end
|
||||
|
||||
@doc false
|
||||
def new_message_changeset(conversation) do
|
||||
conversation
|
||||
|> change(from_read: false)
|
||||
|> change(to_read: false)
|
||||
|> set_last_message()
|
||||
end
|
||||
|
||||
defp set_last_message(changeset) do
|
||||
changeset
|
||||
|> change(last_message_at: DateTime.utc_now() |> DateTime.truncate(:second))
|
||||
end
|
||||
|
||||
defp put_recipient(changeset) do
|
||||
recipient = changeset |> get_field(:recipient)
|
||||
user = Repo.get_by(User, name: recipient)
|
||||
|
||||
changeset
|
||||
|> put_change(:to, user)
|
||||
|> validate_required(:to)
|
||||
change(changeset, last_message_at: DateTime.utc_now(:second))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -33,6 +33,7 @@ defmodule Philomena.Conversations.Message do
|
|||
|> Approval.maybe_put_approval(user)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def approve_changeset(message) do
|
||||
change(message, approved: true)
|
||||
end
|
||||
|
|
|
@ -3,11 +3,15 @@ defmodule Philomena.DuplicateReports do
|
|||
The DuplicateReports context.
|
||||
"""
|
||||
|
||||
import Philomena.DuplicateReports.Power
|
||||
import Ecto.Query, warn: false
|
||||
|
||||
alias Ecto.Multi
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.DuplicateReports.DuplicateReport
|
||||
alias Philomena.DuplicateReports.SearchQuery
|
||||
alias Philomena.DuplicateReports.Uploader
|
||||
alias Philomena.ImageIntensities.ImageIntensity
|
||||
alias Philomena.Images.Image
|
||||
alias Philomena.Images
|
||||
|
@ -15,7 +19,8 @@ defmodule Philomena.DuplicateReports do
|
|||
def generate_reports(source) do
|
||||
source = Repo.preload(source, :intensity)
|
||||
|
||||
duplicates_of(source.intensity, source.image_aspect_ratio, 0.2, 0.05)
|
||||
{source.intensity, source.image_aspect_ratio}
|
||||
|> find_duplicates(dist: 0.2)
|
||||
|> where([i, _it], i.id != ^source.id)
|
||||
|> Repo.all()
|
||||
|> Enum.map(fn target ->
|
||||
|
@ -25,7 +30,11 @@ defmodule Philomena.DuplicateReports do
|
|||
end)
|
||||
end
|
||||
|
||||
def duplicates_of(intensities, aspect_ratio, dist \\ 0.25, aspect_dist \\ 0.05) do
|
||||
def find_duplicates({intensities, aspect_ratio}, opts \\ []) do
|
||||
aspect_dist = Keyword.get(opts, :aspect_dist, 0.05)
|
||||
limit = Keyword.get(opts, :limit, 10)
|
||||
dist = Keyword.get(opts, :dist, 0.25)
|
||||
|
||||
# for each color channel
|
||||
dist = dist * 3
|
||||
|
||||
|
@ -39,7 +48,72 @@ defmodule Philomena.DuplicateReports do
|
|||
where:
|
||||
i.image_aspect_ratio >= ^(aspect_ratio - aspect_dist) and
|
||||
i.image_aspect_ratio <= ^(aspect_ratio + aspect_dist),
|
||||
limit: 10
|
||||
order_by: [
|
||||
asc:
|
||||
power(it.nw - ^intensities.nw, 2) +
|
||||
power(it.ne - ^intensities.ne, 2) +
|
||||
power(it.sw - ^intensities.sw, 2) +
|
||||
power(it.se - ^intensities.se, 2) +
|
||||
power(i.image_aspect_ratio - ^aspect_ratio, 2)
|
||||
],
|
||||
limit: ^limit
|
||||
end
|
||||
|
||||
@doc """
|
||||
Executes the reverse image search query from parameters.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> execute_search_query(%{"image" => ..., "distance" => "0.25"})
|
||||
{:ok, [%Image{...}, ....]}
|
||||
|
||||
iex> execute_search_query(%{"image" => ..., "distance" => "asdf"})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def execute_search_query(attrs \\ %{}) do
|
||||
%SearchQuery{}
|
||||
|> SearchQuery.changeset(attrs)
|
||||
|> Uploader.analyze_upload(attrs)
|
||||
|> Ecto.Changeset.apply_action(:create)
|
||||
|> case do
|
||||
{:ok, search_query} ->
|
||||
intensities = generate_intensities(search_query)
|
||||
aspect = search_query.image_aspect_ratio
|
||||
limit = search_query.limit
|
||||
dist = search_query.distance
|
||||
|
||||
images =
|
||||
{intensities, aspect}
|
||||
|> find_duplicates(dist: dist, aspect_dist: dist, limit: limit)
|
||||
|> preload([:user, :intensity, [:sources, tags: :aliases]])
|
||||
|> Repo.paginate(page_size: 50)
|
||||
|
||||
{:ok, images}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_intensities(search_query) do
|
||||
analysis = SearchQuery.to_analysis(search_query)
|
||||
file = search_query.uploaded_image
|
||||
|
||||
PhilomenaMedia.Processors.intensities(analysis, file)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking search query changes.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_search_query(search_query)
|
||||
%Ecto.Changeset{source: %SearchQuery{}}
|
||||
|
||||
"""
|
||||
def change_search_query(%SearchQuery{} = search_query) do
|
||||
SearchQuery.changeset(search_query)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
|
9
lib/philomena/duplicate_reports/power.ex
Normal file
9
lib/philomena/duplicate_reports/power.ex
Normal file
|
@ -0,0 +1,9 @@
|
|||
defmodule Philomena.DuplicateReports.Power do
|
||||
@moduledoc false
|
||||
|
||||
defmacro power(left, right) do
|
||||
quote do
|
||||
fragment("power(?, ?)", unquote(left), unquote(right))
|
||||
end
|
||||
end
|
||||
end
|
69
lib/philomena/duplicate_reports/search_query.ex
Normal file
69
lib/philomena/duplicate_reports/search_query.ex
Normal file
|
@ -0,0 +1,69 @@
|
|||
defmodule Philomena.DuplicateReports.SearchQuery do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
embedded_schema do
|
||||
field :distance, :float, default: 0.25
|
||||
field :limit, :integer, default: 10
|
||||
|
||||
field :image_width, :integer
|
||||
field :image_height, :integer
|
||||
field :image_format, :string
|
||||
field :image_duration, :float
|
||||
field :image_mime_type, :string
|
||||
field :image_is_animated, :boolean
|
||||
field :image_aspect_ratio, :float
|
||||
field :uploaded_image, :string, virtual: true
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(search_query, attrs \\ %{}) do
|
||||
search_query
|
||||
|> cast(attrs, [:distance, :limit])
|
||||
|> validate_number(:distance, greater_than_or_equal_to: 0, less_than_or_equal_to: 1)
|
||||
|> validate_number(:limit, greater_than_or_equal_to: 1, less_than_or_equal_to: 50)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def image_changeset(search_query, attrs \\ %{}) do
|
||||
search_query
|
||||
|> cast(attrs, [
|
||||
:image_width,
|
||||
:image_height,
|
||||
:image_format,
|
||||
:image_duration,
|
||||
:image_mime_type,
|
||||
:image_is_animated,
|
||||
:image_aspect_ratio,
|
||||
:uploaded_image
|
||||
])
|
||||
|> validate_required([
|
||||
:image_width,
|
||||
:image_height,
|
||||
:image_format,
|
||||
:image_duration,
|
||||
:image_mime_type,
|
||||
:image_is_animated,
|
||||
:image_aspect_ratio,
|
||||
:uploaded_image
|
||||
])
|
||||
|> validate_number(:image_width, greater_than: 0)
|
||||
|> validate_number(:image_height, greater_than: 0)
|
||||
|> validate_inclusion(
|
||||
:image_mime_type,
|
||||
~W(image/gif image/jpeg image/png image/svg+xml video/webm),
|
||||
message: "(#{attrs["image_mime_type"]}) is invalid"
|
||||
)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def to_analysis(search_query) do
|
||||
%PhilomenaMedia.Analyzers.Result{
|
||||
animated?: search_query.image_is_animated,
|
||||
dimensions: {search_query.image_width, search_query.image_height},
|
||||
duration: search_query.image_duration,
|
||||
extension: search_query.image_format,
|
||||
mime_type: search_query.image_mime_type
|
||||
}
|
||||
end
|
||||
end
|
17
lib/philomena/duplicate_reports/uploader.ex
Normal file
17
lib/philomena/duplicate_reports/uploader.ex
Normal file
|
@ -0,0 +1,17 @@
|
|||
defmodule Philomena.DuplicateReports.Uploader do
|
||||
@moduledoc """
|
||||
Upload and processing callback logic for SearchQuery images.
|
||||
"""
|
||||
|
||||
alias Philomena.DuplicateReports.SearchQuery
|
||||
alias PhilomenaMedia.Uploader
|
||||
|
||||
def analyze_upload(search_query, params) do
|
||||
Uploader.analyze_upload(
|
||||
search_query,
|
||||
"image",
|
||||
params["image"],
|
||||
&SearchQuery.image_changeset/2
|
||||
)
|
||||
end
|
||||
end
|
|
@ -1,9 +1,10 @@
|
|||
defmodule Philomena.Filters.Filter do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import PhilomenaQuery.Ecto.QueryValidator
|
||||
|
||||
alias Philomena.Schema.TagList
|
||||
alias Philomena.Schema.Search
|
||||
alias Philomena.Images.Query
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Repo
|
||||
|
||||
|
@ -48,8 +49,8 @@ defmodule Philomena.Filters.Filter do
|
|||
|> validate_required([:name])
|
||||
|> validate_my_downvotes(:spoilered_complex_str)
|
||||
|> validate_my_downvotes(:hidden_complex_str)
|
||||
|> Search.validate_search(:spoilered_complex_str, user)
|
||||
|> Search.validate_search(:hidden_complex_str, user)
|
||||
|> validate_query(:spoilered_complex_str, &Query.compile(&1, user: user))
|
||||
|> validate_query(:hidden_complex_str, &Query.compile(&1, user: user))
|
||||
|> unsafe_validate_unique([:user_id, :name], Repo)
|
||||
end
|
||||
|
||||
|
|
|
@ -33,8 +33,8 @@ defmodule Philomena.Filters.Query do
|
|||
|> Parser.parse(query_string, context)
|
||||
end
|
||||
|
||||
def compile(user, query_string) do
|
||||
query_string = query_string || ""
|
||||
def compile(query_string, opts \\ []) do
|
||||
user = Keyword.get(opts, :user)
|
||||
|
||||
case user do
|
||||
nil ->
|
||||
|
|
|
@ -7,8 +7,9 @@ defmodule Philomena.Forums do
|
|||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Forums.Forum
|
||||
alias Philomena.Forums.Subscription
|
||||
alias Philomena.Notifications
|
||||
|
||||
use Philomena.Subscriptions,
|
||||
id_name: :forum_id
|
||||
|
||||
@doc """
|
||||
Returns the list of forums.
|
||||
|
@ -103,45 +104,4 @@ defmodule Philomena.Forums do
|
|||
def change_forum(%Forum{} = forum) do
|
||||
Forum.changeset(forum, %{})
|
||||
end
|
||||
|
||||
def subscribed?(_forum, nil), do: false
|
||||
|
||||
def subscribed?(forum, user) do
|
||||
Subscription
|
||||
|> where(forum_id: ^forum.id, user_id: ^user.id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
def create_subscription(_forum, nil), do: {:ok, nil}
|
||||
|
||||
def create_subscription(forum, user) do
|
||||
%Subscription{forum_id: forum.id, user_id: user.id}
|
||||
|> Subscription.changeset(%{})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Subscription.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_subscription(forum, user) do
|
||||
clear_notification(forum, user)
|
||||
|
||||
%Subscription{forum_id: forum.id, user_id: user.id}
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def clear_notification(_forum, nil), do: nil
|
||||
|
||||
def clear_notification(forum, user) do
|
||||
Notifications.delete_unread_notification("Forum", forum.id, user)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -14,10 +14,12 @@ defmodule Philomena.Galleries do
|
|||
alias Philomena.IndexWorker
|
||||
alias Philomena.GalleryReorderWorker
|
||||
alias Philomena.Notifications
|
||||
alias Philomena.NotificationWorker
|
||||
alias Philomena.Notifications.{Notification, UnreadNotification}
|
||||
alias Philomena.Images
|
||||
|
||||
use Philomena.Subscriptions,
|
||||
on_delete: :clear_gallery_notification,
|
||||
id_name: :gallery_id
|
||||
|
||||
@doc """
|
||||
Gets a single gallery.
|
||||
|
||||
|
@ -91,21 +93,8 @@ defmodule Philomena.Galleries do
|
|||
|> select([i], i.image_id)
|
||||
|> Repo.all()
|
||||
|
||||
unread_notifications =
|
||||
UnreadNotification
|
||||
|> join(:inner, [un], _ in assoc(un, :notification))
|
||||
|> where([_, n], n.actor_type == "Gallery")
|
||||
|> where([_, n], n.actor_id == ^gallery.id)
|
||||
|
||||
notifications =
|
||||
Notification
|
||||
|> where(actor_type: "Gallery")
|
||||
|> where(actor_id: ^gallery.id)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.delete(:gallery, gallery)
|
||||
|> Multi.delete_all(:unread_notifications, unread_notifications)
|
||||
|> Multi.delete_all(:notifications, notifications)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{gallery: gallery}} ->
|
||||
|
@ -173,7 +162,7 @@ defmodule Philomena.Galleries do
|
|||
|
||||
def add_image_to_gallery(gallery, image) do
|
||||
Multi.new()
|
||||
|> Multi.run(:lock, fn repo, %{} ->
|
||||
|> Multi.run(:gallery, fn repo, %{} ->
|
||||
gallery =
|
||||
Gallery
|
||||
|> where(id: ^gallery.id)
|
||||
|
@ -189,7 +178,7 @@ defmodule Philomena.Galleries do
|
|||
|> Interaction.changeset(%{"image_id" => image.id, "position" => position})
|
||||
|> repo.insert()
|
||||
end)
|
||||
|> Multi.run(:gallery, fn repo, %{} ->
|
||||
|> Multi.run(:image_count, fn repo, %{} ->
|
||||
now = DateTime.utc_now()
|
||||
|
||||
{count, nil} =
|
||||
|
@ -199,11 +188,11 @@ defmodule Philomena.Galleries do
|
|||
|
||||
{:ok, count}
|
||||
end)
|
||||
|> Multi.run(:notification, ¬ify_gallery/2)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, result} ->
|
||||
Images.reindex_image(image)
|
||||
notify_gallery(gallery, image)
|
||||
reindex_gallery(gallery)
|
||||
|
||||
{:ok, result}
|
||||
|
@ -215,7 +204,7 @@ defmodule Philomena.Galleries do
|
|||
|
||||
def remove_image_from_gallery(gallery, image) do
|
||||
Multi.new()
|
||||
|> Multi.run(:lock, fn repo, %{} ->
|
||||
|> Multi.run(:gallery, fn repo, %{} ->
|
||||
gallery =
|
||||
Gallery
|
||||
|> where(id: ^gallery.id)
|
||||
|
@ -232,7 +221,7 @@ defmodule Philomena.Galleries do
|
|||
|
||||
{:ok, count}
|
||||
end)
|
||||
|> Multi.run(:gallery, fn repo, %{interaction: interaction_count} ->
|
||||
|> Multi.run(:image_count, fn repo, %{interaction: interaction_count} ->
|
||||
now = DateTime.utc_now()
|
||||
|
||||
{count, nil} =
|
||||
|
@ -255,37 +244,16 @@ defmodule Philomena.Galleries do
|
|||
end
|
||||
end
|
||||
|
||||
defp notify_gallery(_repo, %{gallery: gallery}) do
|
||||
Notifications.create_gallery_image_notification(gallery)
|
||||
end
|
||||
|
||||
defp last_position(gallery_id) do
|
||||
Interaction
|
||||
|> where(gallery_id: ^gallery_id)
|
||||
|> Repo.aggregate(:max, :position)
|
||||
end
|
||||
|
||||
def notify_gallery(gallery, image) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Galleries", [gallery.id, image.id]])
|
||||
end
|
||||
|
||||
def perform_notify([gallery_id, image_id]) do
|
||||
gallery = get_gallery!(gallery_id)
|
||||
|
||||
subscriptions =
|
||||
gallery
|
||||
|> Repo.preload(:subscriptions)
|
||||
|> Map.fetch!(:subscriptions)
|
||||
|
||||
Notifications.notify(
|
||||
gallery,
|
||||
subscriptions,
|
||||
%{
|
||||
actor_id: gallery.id,
|
||||
actor_type: "Gallery",
|
||||
actor_child_id: image_id,
|
||||
actor_child_type: "Image",
|
||||
action: "added images to"
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def reorder_gallery(gallery, image_ids) do
|
||||
Exq.enqueue(Exq, "indexing", GalleryReorderWorker, [gallery.id, image_ids])
|
||||
end
|
||||
|
@ -357,54 +325,17 @@ defmodule Philomena.Galleries do
|
|||
defp position_order(%{order_position_asc: true}), do: [asc: :position]
|
||||
defp position_order(_gallery), do: [desc: :position]
|
||||
|
||||
alias Philomena.Galleries.Subscription
|
||||
|
||||
def subscribed?(_gallery, nil), do: false
|
||||
|
||||
def subscribed?(gallery, user) do
|
||||
Subscription
|
||||
|> where(gallery_id: ^gallery.id, user_id: ^user.id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a subscription.
|
||||
Removes all gallery notifications for a given gallery and user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_subscription(%{field: value})
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> create_subscription(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> clear_gallery_notification(gallery, user)
|
||||
:ok
|
||||
|
||||
"""
|
||||
def create_subscription(gallery, user) do
|
||||
%Subscription{gallery_id: gallery.id, user_id: user.id}
|
||||
|> Subscription.changeset(%{})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Subscription.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_subscription(gallery, user) do
|
||||
%Subscription{gallery_id: gallery.id, user_id: user.id}
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def clear_notification(_gallery, nil), do: nil
|
||||
|
||||
def clear_notification(gallery, user) do
|
||||
Notifications.delete_unread_notification("Gallery", gallery.id, user)
|
||||
def clear_gallery_notification(%Gallery{} = gallery, user) do
|
||||
Notifications.clear_gallery_image_notification(gallery, user)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,8 +15,6 @@ defmodule Philomena.Galleries.Query do
|
|||
end
|
||||
|
||||
def compile(query_string) do
|
||||
query_string = query_string || ""
|
||||
|
||||
fields()
|
||||
|> Parser.new()
|
||||
|> Parser.parse(query_string)
|
||||
|
|
|
@ -22,8 +22,9 @@ defmodule Philomena.Images do
|
|||
alias Philomena.IndexWorker
|
||||
alias Philomena.ImageFeatures.ImageFeature
|
||||
alias Philomena.SourceChanges.SourceChange
|
||||
alias Philomena.Notifications.Notification
|
||||
alias Philomena.NotificationWorker
|
||||
alias Philomena.Notifications.ImageCommentNotification
|
||||
alias Philomena.Notifications.ImageMergeNotification
|
||||
alias Philomena.TagChanges.Limits
|
||||
alias Philomena.TagChanges.TagChange
|
||||
alias Philomena.Tags
|
||||
alias Philomena.UserStatistics
|
||||
|
@ -31,12 +32,15 @@ defmodule Philomena.Images do
|
|||
alias Philomena.Notifications
|
||||
alias Philomena.Interactions
|
||||
alias Philomena.Reports
|
||||
alias Philomena.Reports.Report
|
||||
alias Philomena.Comments
|
||||
alias Philomena.Galleries.Gallery
|
||||
alias Philomena.Galleries.Interaction
|
||||
alias Philomena.Users.User
|
||||
|
||||
use Philomena.Subscriptions,
|
||||
on_delete: :clear_image_notification,
|
||||
id_name: :image_id
|
||||
|
||||
@doc """
|
||||
Gets a single image.
|
||||
|
||||
|
@ -90,11 +94,6 @@ defmodule Philomena.Images do
|
|||
|
||||
Multi.new()
|
||||
|> Multi.insert(:image, image)
|
||||
|> Multi.run(:name_caches, fn repo, %{image: image} ->
|
||||
image
|
||||
|> Image.cache_changeset()
|
||||
|> repo.update()
|
||||
end)
|
||||
|> Multi.run(:added_tag_count, fn repo, %{image: image} ->
|
||||
tag_ids = image.added_tags |> Enum.map(& &1.id)
|
||||
tags = Tag |> where([t], t.id in ^tag_ids)
|
||||
|
@ -103,7 +102,7 @@ defmodule Philomena.Images do
|
|||
|
||||
{:ok, count}
|
||||
end)
|
||||
|> maybe_create_subscription_on_upload(attribution[:user])
|
||||
|> maybe_subscribe_on(:image, attribution[:user], :watch_on_upload)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{image: image}} = result ->
|
||||
|
@ -157,17 +156,6 @@ defmodule Philomena.Images do
|
|||
Logger.error("Aborting upload of #{image.id} after #{retry_count} retries")
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_upload(multi, %User{watch_on_upload: true} = user) do
|
||||
multi
|
||||
|> Multi.run(:subscribe, fn _repo, %{image: image} ->
|
||||
create_subscription(image, user)
|
||||
end)
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_upload(multi, _user) do
|
||||
multi
|
||||
end
|
||||
|
||||
def approve_image(image) do
|
||||
image
|
||||
|> Repo.preload(:user)
|
||||
|
@ -201,8 +189,7 @@ defmodule Philomena.Images do
|
|||
|
||||
defp maybe_suggest_user_verification(%User{id: id, uploads_count: 5, verified: false}) do
|
||||
Reports.create_system_report(
|
||||
id,
|
||||
"User",
|
||||
{"User", id},
|
||||
"Verification",
|
||||
"User has uploaded enough approved images to be considered for verification."
|
||||
)
|
||||
|
@ -376,7 +363,7 @@ defmodule Philomena.Images do
|
|||
end
|
||||
|
||||
defp source_change_attributes(attribution, image, source, added, user) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
|
||||
user_id =
|
||||
case user do
|
||||
|
@ -392,8 +379,6 @@ defmodule Philomena.Images do
|
|||
updated_at: now,
|
||||
ip: attribution[:ip],
|
||||
fingerprint: attribution[:fingerprint],
|
||||
user_agent: attribution[:user_agent],
|
||||
referrer: attribution[:referrer],
|
||||
added: added
|
||||
}
|
||||
end
|
||||
|
@ -426,6 +411,9 @@ defmodule Philomena.Images do
|
|||
error
|
||||
end
|
||||
end)
|
||||
|> Multi.run(:check_limits, fn _repo, %{image: {image, _added, _removed}} ->
|
||||
check_tag_change_limits_before_commit(image, attribution)
|
||||
end)
|
||||
|> Multi.run(:added_tag_changes, fn repo, %{image: {image, added_tags, _removed}} ->
|
||||
tag_changes =
|
||||
added_tags
|
||||
|
@ -469,10 +457,47 @@ defmodule Philomena.Images do
|
|||
{:ok, count}
|
||||
end)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{image: {image, _added, _removed}}} = res ->
|
||||
update_tag_change_limits_after_commit(image, attribution)
|
||||
|
||||
res
|
||||
|
||||
err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
defp check_tag_change_limits_before_commit(image, attribution) do
|
||||
tag_changed_count = length(image.added_tags) + length(image.removed_tags)
|
||||
rating_changed = image.ratings_changed
|
||||
user = attribution[:user]
|
||||
ip = attribution[:ip]
|
||||
|
||||
cond do
|
||||
Limits.limited_for_tag_count?(user, ip, tag_changed_count) ->
|
||||
{:error, :limit_exceeded}
|
||||
|
||||
rating_changed and Limits.limited_for_rating_count?(user, ip) ->
|
||||
{:error, :limit_exceeded}
|
||||
|
||||
true ->
|
||||
{:ok, 0}
|
||||
end
|
||||
end
|
||||
|
||||
def update_tag_change_limits_after_commit(image, attribution) do
|
||||
rating_changed_count = if(image.ratings_changed, do: 1, else: 0)
|
||||
tag_changed_count = length(image.added_tags) + length(image.removed_tags)
|
||||
user = attribution[:user]
|
||||
ip = attribution[:ip]
|
||||
|
||||
Limits.update_tag_count_after_update(user, ip, tag_changed_count)
|
||||
Limits.update_rating_count_after_update(user, ip, rating_changed_count)
|
||||
end
|
||||
|
||||
defp tag_change_attributes(attribution, image, tag, added, user) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
|
||||
user_id =
|
||||
case user do
|
||||
|
@ -489,8 +514,6 @@ defmodule Philomena.Images do
|
|||
tag_name_cache: tag.name,
|
||||
ip: attribution[:ip],
|
||||
fingerprint: attribution[:fingerprint],
|
||||
user_agent: attribution[:user_agent],
|
||||
referrer: attribution[:referrer],
|
||||
added: added
|
||||
}
|
||||
end
|
||||
|
@ -569,13 +592,13 @@ defmodule Philomena.Images do
|
|||
|> Multi.run(:migrate_interactions, fn _, %{} ->
|
||||
{:ok, Interactions.migrate_interactions(image, duplicate_of_image)}
|
||||
end)
|
||||
|> Multi.run(:notification, ¬ify_merge(&1, &2, image, duplicate_of_image))
|
||||
|> Repo.transaction()
|
||||
|> process_after_hide()
|
||||
|> case do
|
||||
{:ok, result} ->
|
||||
reindex_image(duplicate_of_image)
|
||||
Comments.reindex_comments(duplicate_of_image)
|
||||
notify_merge(image, duplicate_of_image)
|
||||
|
||||
{:ok, result}
|
||||
|
||||
|
@ -585,11 +608,7 @@ defmodule Philomena.Images do
|
|||
end
|
||||
|
||||
defp hide_image_multi(changeset, image, user, multi) do
|
||||
reports =
|
||||
Report
|
||||
|> where(reportable_type: "Image", reportable_id: ^image.id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
report_query = Reports.close_report_query({"Image", image.id}, user)
|
||||
|
||||
galleries =
|
||||
Gallery
|
||||
|
@ -600,7 +619,7 @@ defmodule Philomena.Images do
|
|||
|
||||
multi
|
||||
|> Multi.update(:image, changeset)
|
||||
|> Multi.update_all(:reports, reports, [])
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Multi.update_all(:galleries, galleries, [])
|
||||
|> Multi.delete_all(:gallery_interactions, gallery_interactions, [])
|
||||
|> Multi.run(:tags, fn repo, %{image: image} ->
|
||||
|
@ -715,7 +734,7 @@ defmodule Philomena.Images do
|
|||
|> where([t], t.image_id in ^image_ids and t.tag_id in ^removed_tags)
|
||||
|> select([t], [t.image_id, t.tag_id])
|
||||
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
tag_change_attributes = Map.merge(tag_change_attributes, %{created_at: now, updated_at: now})
|
||||
tag_attributes = %{name: "", slug: "", created_at: now, updated_at: now}
|
||||
|
||||
|
@ -868,53 +887,6 @@ defmodule Philomena.Images do
|
|||
|
||||
alias Philomena.Images.Subscription
|
||||
|
||||
def subscribed?(_image, nil), do: false
|
||||
|
||||
def subscribed?(image, user) do
|
||||
Subscription
|
||||
|> where(image_id: ^image.id, user_id: ^user.id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a subscription.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_subscription(%{field: value})
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> create_subscription(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_subscription(_image, nil), do: {:ok, nil}
|
||||
|
||||
def create_subscription(image, user) do
|
||||
%Subscription{image_id: image.id, user_id: user.id}
|
||||
|> Subscription.changeset(%{})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a subscription.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:ok, %Subscription{}}
|
||||
|
||||
iex> delete_subscription(subscription)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_subscription(image, user) do
|
||||
clear_notification(image, user)
|
||||
|
||||
%Subscription{image_id: image.id, user_id: user.id}
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def migrate_subscriptions(source, target) do
|
||||
subscriptions =
|
||||
Subscription
|
||||
|
@ -924,12 +896,40 @@ defmodule Philomena.Images do
|
|||
|
||||
Repo.insert_all(Subscription, subscriptions, on_conflict: :nothing)
|
||||
|
||||
{count, nil} =
|
||||
Notification
|
||||
|> where(actor_type: "Image", actor_id: ^source.id)
|
||||
|> Repo.delete_all()
|
||||
comment_notifications =
|
||||
from cn in ImageCommentNotification,
|
||||
where: cn.image_id == ^source.id,
|
||||
select: %{
|
||||
user_id: cn.user_id,
|
||||
image_id: ^target.id,
|
||||
comment_id: cn.comment_id,
|
||||
read: cn.read,
|
||||
created_at: cn.created_at,
|
||||
updated_at: cn.updated_at
|
||||
}
|
||||
|
||||
{:ok, count}
|
||||
merge_notifications =
|
||||
from mn in ImageMergeNotification,
|
||||
where: mn.target_id == ^source.id,
|
||||
select: %{
|
||||
user_id: mn.user_id,
|
||||
target_id: ^target.id,
|
||||
source_id: mn.source_id,
|
||||
read: mn.read,
|
||||
created_at: mn.created_at,
|
||||
updated_at: mn.updated_at
|
||||
}
|
||||
|
||||
{comment_notification_count, nil} =
|
||||
Repo.insert_all(ImageCommentNotification, comment_notifications, on_conflict: :nothing)
|
||||
|
||||
{merge_notification_count, nil} =
|
||||
Repo.insert_all(ImageMergeNotification, merge_notifications, on_conflict: :nothing)
|
||||
|
||||
Repo.delete_all(exclude(comment_notifications, :select))
|
||||
Repo.delete_all(exclude(merge_notifications, :select))
|
||||
|
||||
{:ok, {comment_notification_count, merge_notification_count}}
|
||||
end
|
||||
|
||||
def migrate_sources(source, target) do
|
||||
|
@ -944,34 +944,22 @@ defmodule Philomena.Images do
|
|||
|> Repo.update()
|
||||
end
|
||||
|
||||
def notify_merge(source, target) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Images", [source.id, target.id]])
|
||||
defp notify_merge(_repo, _changes, source, target) do
|
||||
Notifications.create_image_merge_notification(target, source)
|
||||
end
|
||||
|
||||
def perform_notify([source_id, target_id]) do
|
||||
target = get_image!(target_id)
|
||||
@doc """
|
||||
Removes all image notifications for a given image and user.
|
||||
|
||||
subscriptions =
|
||||
target
|
||||
|> Repo.preload(:subscriptions)
|
||||
|> Map.fetch!(:subscriptions)
|
||||
## Examples
|
||||
|
||||
Notifications.notify(
|
||||
nil,
|
||||
subscriptions,
|
||||
%{
|
||||
actor_id: target.id,
|
||||
actor_type: "Image",
|
||||
actor_child_id: nil,
|
||||
actor_child_type: nil,
|
||||
action: "merged ##{source_id} into"
|
||||
}
|
||||
)
|
||||
end
|
||||
iex> clear_image_notification(image, user)
|
||||
:ok
|
||||
|
||||
def clear_notification(_image, nil), do: nil
|
||||
|
||||
def clear_notification(image, user) do
|
||||
Notifications.delete_unread_notification("Image", image.id, user)
|
||||
"""
|
||||
def clear_image_notification(%Image{} = image, user) do
|
||||
Notifications.clear_image_comment_notification(image, user)
|
||||
Notifications.clear_image_merge_notification(image, user)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,7 +2,6 @@ defmodule Philomena.Images.Image do
|
|||
use Ecto.Schema
|
||||
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
alias Philomena.ImageIntensities.ImageIntensity
|
||||
alias Philomena.ImageVotes.ImageVote
|
||||
|
@ -51,6 +50,7 @@ defmodule Philomena.Images.Image do
|
|||
field :image_width, :integer
|
||||
field :image_height, :integer
|
||||
field :image_size, :integer
|
||||
field :image_orig_size, :integer
|
||||
field :image_format, :string
|
||||
field :image_mime_type, :string
|
||||
field :image_aspect_ratio, :float
|
||||
|
@ -58,14 +58,11 @@ defmodule Philomena.Images.Image do
|
|||
field :image_is_animated, :boolean, source: :is_animated
|
||||
field :ip, EctoNetwork.INET
|
||||
field :fingerprint, :string
|
||||
field :user_agent, :string, default: ""
|
||||
field :referrer, :string, default: ""
|
||||
field :anonymous, :boolean, default: false
|
||||
field :score, :integer, default: 0
|
||||
field :faves_count, :integer, default: 0
|
||||
field :upvotes_count, :integer, default: 0
|
||||
field :downvotes_count, :integer, default: 0
|
||||
field :votes_count, :integer, default: 0
|
||||
field :source_url, :string
|
||||
field :description, :string, default: ""
|
||||
field :image_sha512_hash, :string
|
||||
|
@ -87,15 +84,11 @@ defmodule Philomena.Images.Image do
|
|||
field :hides_count, :integer, default: 0
|
||||
field :approved, :boolean
|
||||
|
||||
# todo: can probably remove these now
|
||||
field :tag_list_cache, :string
|
||||
field :tag_list_plus_alias_cache, :string
|
||||
field :file_name_cache, :string
|
||||
|
||||
field :removed_tags, {:array, :any}, default: [], virtual: true
|
||||
field :added_tags, {:array, :any}, default: [], virtual: true
|
||||
field :removed_sources, {:array, :any}, default: [], virtual: true
|
||||
field :added_sources, {:array, :any}, default: [], virtual: true
|
||||
field :ratings_changed, :boolean, default: false, virtual: true
|
||||
|
||||
field :uploaded_image, :string, virtual: true
|
||||
field :removed_image, :string, virtual: true
|
||||
|
@ -120,11 +113,9 @@ defmodule Philomena.Images.Image do
|
|||
end
|
||||
|
||||
def creation_changeset(image, attrs, attribution) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
image
|
||||
|> cast(attrs, [:anonymous, :source_url, :description])
|
||||
|> change(first_seen_at: now)
|
||||
|> change(first_seen_at: DateTime.utc_now(:second))
|
||||
|> change(attribution)
|
||||
|> validate_length(:description, max: 50_000, count: :bytes)
|
||||
|> validate_format(:source_url, ~r/\Ahttps?:\/\//)
|
||||
|
@ -138,6 +129,7 @@ defmodule Philomena.Images.Image do
|
|||
:image_width,
|
||||
:image_height,
|
||||
:image_size,
|
||||
:image_orig_size,
|
||||
:image_format,
|
||||
:image_mime_type,
|
||||
:image_aspect_ratio,
|
||||
|
@ -153,6 +145,7 @@ defmodule Philomena.Images.Image do
|
|||
:image_width,
|
||||
:image_height,
|
||||
:image_size,
|
||||
:image_orig_size,
|
||||
:image_format,
|
||||
:image_mime_type,
|
||||
:image_aspect_ratio,
|
||||
|
@ -226,7 +219,6 @@ defmodule Philomena.Images.Image do
|
|||
|> cast(attrs, [])
|
||||
|> TagDiffer.diff_input(old_tags, new_tags, excluded_tags)
|
||||
|> TagValidator.validate_tags()
|
||||
|> cache_changeset()
|
||||
end
|
||||
|
||||
def locked_tags_changeset(image, attrs, locked_tags) do
|
||||
|
@ -340,54 +332,7 @@ defmodule Philomena.Images.Image do
|
|||
def approve_changeset(image) do
|
||||
change(image)
|
||||
|> put_change(:approved, true)
|
||||
|> put_change(:first_seen_at, DateTime.truncate(DateTime.utc_now(), :second))
|
||||
end
|
||||
|
||||
def cache_changeset(image) do
|
||||
changeset = change(image)
|
||||
image = apply_changes(changeset)
|
||||
|
||||
{tag_list_cache, tag_list_plus_alias_cache, file_name_cache} =
|
||||
create_caches(image.id, image.tags)
|
||||
|
||||
changeset
|
||||
|> put_change(:tag_list_cache, tag_list_cache)
|
||||
|> put_change(:tag_list_plus_alias_cache, tag_list_plus_alias_cache)
|
||||
|> put_change(:file_name_cache, file_name_cache)
|
||||
end
|
||||
|
||||
defp create_caches(image_id, tags) do
|
||||
tags = Tag.display_order(tags)
|
||||
|
||||
tag_list_cache =
|
||||
tags
|
||||
|> Enum.map_join(", ", & &1.name)
|
||||
|
||||
tag_ids = tags |> Enum.map(& &1.id)
|
||||
|
||||
aliases =
|
||||
Tag
|
||||
|> where([t], t.aliased_tag_id in ^tag_ids)
|
||||
|> Repo.all()
|
||||
|
||||
tag_list_plus_alias_cache =
|
||||
(tags ++ aliases)
|
||||
|> Tag.display_order()
|
||||
|> Enum.map_join(", ", & &1.name)
|
||||
|
||||
# Truncate filename to 150 characters, making room for the path + filename on Windows
|
||||
# https://stackoverflow.com/questions/265769/maximum-filename-length-in-ntfs-windows-xp-and-windows-vista
|
||||
file_name_slug_fragment =
|
||||
tags
|
||||
|> Enum.map_join("_", & &1.slug)
|
||||
|> String.to_charlist()
|
||||
|> Enum.filter(&(&1 in ?a..?z or &1 in ~c"0123456789_-"))
|
||||
|> List.to_string()
|
||||
|> String.slice(0..150)
|
||||
|
||||
file_name_cache = "#{image_id}__#{file_name_slug_fragment}"
|
||||
|
||||
{tag_list_cache, tag_list_plus_alias_cache, file_name_cache}
|
||||
|> put_change(:first_seen_at, DateTime.utc_now(:second))
|
||||
end
|
||||
|
||||
defp create_key do
|
||||
|
|
|
@ -84,7 +84,7 @@ defmodule Philomena.Images.Query do
|
|||
defp anonymous_fields do
|
||||
[
|
||||
int_fields:
|
||||
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size comment_count source_count tag_count) ++
|
||||
~W(id width height score upvotes downvotes faves uploader_id faved_by_id pixels size orig_size comment_count source_count tag_count) ++
|
||||
tag_count_fields(),
|
||||
float_fields: ~W(aspect_ratio wilson_score duration),
|
||||
date_fields: ~W(created_at updated_at first_seen_at),
|
||||
|
@ -144,8 +144,9 @@ defmodule Philomena.Images.Query do
|
|||
|> Parser.parse(query_string, context)
|
||||
end
|
||||
|
||||
def compile(user, query_string, watch \\ false) do
|
||||
query_string = query_string || ""
|
||||
def compile(query_string, opts \\ []) do
|
||||
user = Keyword.get(opts, :user)
|
||||
watch = Keyword.get(opts, :watch, false)
|
||||
|
||||
case user do
|
||||
nil ->
|
||||
|
|
|
@ -54,6 +54,7 @@ defmodule Philomena.Images.SearchIndex do
|
|||
processed: %{type: "boolean"},
|
||||
score: %{type: "integer"},
|
||||
size: %{type: "integer"},
|
||||
orig_size: %{type: "integer"},
|
||||
sha512_hash: %{type: "keyword"},
|
||||
source_url: %{type: "keyword"},
|
||||
source_count: %{type: "integer"},
|
||||
|
@ -117,6 +118,7 @@ defmodule Philomena.Images.SearchIndex do
|
|||
height: image.image_height,
|
||||
pixels: image.image_width * image.image_height,
|
||||
size: image.image_size,
|
||||
orig_size: image.image_orig_size,
|
||||
animated: image.image_is_animated,
|
||||
duration: if(image.image_is_animated, do: image.image_duration, else: 0),
|
||||
tag_count: length(image.tags),
|
||||
|
|
|
@ -13,7 +13,9 @@ defmodule Philomena.Images.Source do
|
|||
@doc false
|
||||
def changeset(source, attrs) do
|
||||
source
|
||||
|> cast(attrs, [])
|
||||
|> validate_required([])
|
||||
|> cast(attrs, [:source])
|
||||
|> validate_required([:source])
|
||||
|> validate_format(:source, ~r/\Ahttps?:\/\//)
|
||||
|> validate_length(:source, max: 255)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
defmodule Philomena.Images.SourceDiffer do
|
||||
import Ecto.Changeset
|
||||
alias Philomena.Images.Source
|
||||
|
||||
def diff_input(changeset, old_sources, new_sources) do
|
||||
old_set = MapSet.new(flatten_input(old_sources))
|
||||
|
@ -13,12 +12,11 @@ defmodule Philomena.Images.SourceDiffer do
|
|||
{sources, actually_added, actually_removed} =
|
||||
apply_changes(source_set, added_sources, removed_sources)
|
||||
|
||||
image_id = fetch_field!(changeset, :id)
|
||||
|
||||
changeset
|
||||
|> cast(source_params(sources), [])
|
||||
|> put_change(:added_sources, actually_added)
|
||||
|> put_change(:removed_sources, actually_removed)
|
||||
|> put_assoc(:sources, source_structs(image_id, sources))
|
||||
|> cast_assoc(:sources)
|
||||
end
|
||||
|
||||
defp apply_changes(source_set, added_set, removed_set) do
|
||||
|
@ -44,8 +42,8 @@ defmodule Philomena.Images.SourceDiffer do
|
|||
{sources, actually_added, actually_removed}
|
||||
end
|
||||
|
||||
defp source_structs(image_id, sources) do
|
||||
Enum.map(sources, &%Source{image_id: image_id, source: &1})
|
||||
defp source_params(sources) do
|
||||
%{sources: Enum.map(sources, &%{source: &1})}
|
||||
end
|
||||
|
||||
defp flatten_input(input) when is_map(input) do
|
||||
|
|
|
@ -5,7 +5,20 @@ defmodule Philomena.Images.TagValidator do
|
|||
def validate_tags(changeset) do
|
||||
tags = changeset |> get_field(:tags)
|
||||
|
||||
validate_tag_input(changeset, tags)
|
||||
changeset
|
||||
|> validate_tag_input(tags)
|
||||
|> set_rating_changed()
|
||||
end
|
||||
|
||||
defp set_rating_changed(changeset) do
|
||||
added_tags = changeset |> get_field(:added_tags) |> extract_names()
|
||||
removed_tags = changeset |> get_field(:removed_tags) |> extract_names()
|
||||
ratings = all_ratings()
|
||||
|
||||
added_ratings = MapSet.intersection(ratings, added_tags) |> MapSet.size()
|
||||
removed_ratings = MapSet.intersection(ratings, removed_tags) |> MapSet.size()
|
||||
|
||||
put_change(changeset, :ratings_changed, added_ratings + removed_ratings > 0)
|
||||
end
|
||||
|
||||
defp validate_tag_input(changeset, tags) do
|
||||
|
@ -108,6 +121,13 @@ defmodule Philomena.Images.TagValidator do
|
|||
|> MapSet.new()
|
||||
end
|
||||
|
||||
defp all_ratings do
|
||||
safe_rating()
|
||||
|> MapSet.union(sexual_ratings())
|
||||
|> MapSet.union(horror_ratings())
|
||||
|> MapSet.union(gross_rating())
|
||||
end
|
||||
|
||||
defp safe_rating, do: MapSet.new(["safe"])
|
||||
defp sexual_ratings, do: MapSet.new(["suggestive", "questionable", "explicit"])
|
||||
defp horror_ratings, do: MapSet.new(["semi-grimdark", "grimdark"])
|
||||
|
|
|
@ -76,7 +76,7 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
def generate_thumbnails(image_id) do
|
||||
image = Repo.get!(Image, image_id)
|
||||
file = download_image_file(image)
|
||||
{:ok, analysis} = Analyzers.analyze(file)
|
||||
{:ok, analysis} = Analyzers.analyze_path(file)
|
||||
|
||||
file =
|
||||
apply_edit_script(image, file, Processors.process(analysis, file, generated_sizes(image)))
|
||||
|
@ -127,7 +127,7 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
end
|
||||
|
||||
defp recompute_meta(image, file, changeset_fn) do
|
||||
{:ok, %{dimensions: {width, height}}} = Analyzers.analyze(file)
|
||||
{:ok, %{dimensions: {width, height}}} = Analyzers.analyze_path(file)
|
||||
|
||||
image
|
||||
|> changeset_fn.(%{
|
||||
|
|
|
@ -72,7 +72,7 @@ defmodule Philomena.Interactions do
|
|||
end
|
||||
|
||||
def migrate_interactions(source, target) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
source = Repo.preload(source, [:hiders, :favers, :upvoters, :downvoters])
|
||||
|
||||
new_hides = Enum.map(source.hiders, &%{image_id: target.id, user_id: &1.id, created_at: now})
|
||||
|
|
|
@ -7,18 +7,82 @@ defmodule Philomena.ModNotes do
|
|||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.ModNotes.ModNote
|
||||
alias Philomena.Polymorphic
|
||||
|
||||
@doc """
|
||||
Returns the list of mod_notes.
|
||||
Returns a list of 2-tuples of mod notes and rendered output for the notable type and id.
|
||||
|
||||
See `list_mod_notes/3` for more information about collection rendering.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_mod_notes()
|
||||
[%ModNote{}, ...]
|
||||
iex> list_all_mod_notes_by_type_and_id("User", "1", & &1.body)
|
||||
[
|
||||
{%ModNote{body: "hello *world*"}, "hello *world*"}
|
||||
]
|
||||
|
||||
"""
|
||||
def list_mod_notes do
|
||||
Repo.all(ModNote)
|
||||
def list_all_mod_notes_by_type_and_id(notable_type, notable_id, collection_renderer) do
|
||||
ModNote
|
||||
|> where(notable_type: ^notable_type, notable_id: ^notable_id)
|
||||
|> preload(:moderator)
|
||||
|> order_by(desc: :id)
|
||||
|> Repo.all()
|
||||
|> preload_and_render(collection_renderer)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a `m:Scrivener.Page` of 2-tuples of mod notes and rendered output
|
||||
for the query string and current pagination.
|
||||
|
||||
All mod notes containing the substring `query_string` are matched and returned
|
||||
case-insensitively.
|
||||
|
||||
See `list_mod_notes/3` for more information.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_mod_notes_by_query_string("quack", & &1.body, page_size: 15)
|
||||
%Scrivener.Page{}
|
||||
|
||||
"""
|
||||
def list_mod_notes_by_query_string(query_string, collection_renderer, pagination) do
|
||||
ModNote
|
||||
|> where([m], ilike(m.body, ^"%#{query_string}%"))
|
||||
|> list_mod_notes(collection_renderer, pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a `m:Scrivener.Page` of 2-tuples of mod notes and rendered output
|
||||
for the current pagination.
|
||||
|
||||
When coerced to a list and rendered as Markdown, the result may look like:
|
||||
|
||||
[
|
||||
{%ModNote{body: "hello *world*"}, "hello <em>world</em>"}
|
||||
]
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_mod_notes(& &1.body, page_size: 15)
|
||||
%Scrivener.Page{}
|
||||
|
||||
"""
|
||||
def list_mod_notes(queryable \\ ModNote, collection_renderer, pagination) do
|
||||
mod_notes =
|
||||
queryable
|
||||
|> preload(:moderator)
|
||||
|> order_by(desc: :id)
|
||||
|> Repo.paginate(pagination)
|
||||
|
||||
put_in(mod_notes.entries, preload_and_render(mod_notes, collection_renderer))
|
||||
end
|
||||
|
||||
defp preload_and_render(mod_notes, collection_renderer) do
|
||||
bodies = collection_renderer.(mod_notes)
|
||||
preloaded = Polymorphic.load_polymorphic(mod_notes, notable: [notable_id: :notable_type])
|
||||
|
||||
Enum.zip(preloaded, bodies)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
|
|
@ -9,40 +9,24 @@ defmodule Philomena.ModerationLogs do
|
|||
alias Philomena.ModerationLogs.ModerationLog
|
||||
|
||||
@doc """
|
||||
Returns the list of moderation_logs.
|
||||
Returns a paginated list of moderation logs as a `m:Scrivener.Page`.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_moderation_logs()
|
||||
iex> list_moderation_logs(page_size: 15)
|
||||
[%ModerationLog{}, ...]
|
||||
|
||||
"""
|
||||
def list_moderation_logs(conn) do
|
||||
def list_moderation_logs(pagination) do
|
||||
ModerationLog
|
||||
|> where([ml], ml.created_at > ago(2, "week"))
|
||||
|> where([ml], ml.created_at >= ago(2, "week"))
|
||||
|> preload(:user)
|
||||
|> order_by(desc: :created_at)
|
||||
|> Repo.paginate(conn.assigns.scrivener)
|
||||
|> Repo.paginate(pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single moderation_log.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Moderation log does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_moderation_log!(123)
|
||||
%ModerationLog{}
|
||||
|
||||
iex> get_moderation_log!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
|
||||
"""
|
||||
def get_moderation_log!(id), do: Repo.get!(ModerationLog, id)
|
||||
|
||||
@doc """
|
||||
Creates a moderation_log.
|
||||
Creates a moderation log.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -60,21 +44,14 @@ defmodule Philomena.ModerationLogs do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Deletes a moderation_log.
|
||||
Removes moderation logs created more than 2 weeks ago.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_moderation_log(moderation_log)
|
||||
{:ok, %ModerationLog{}}
|
||||
|
||||
iex> delete_moderation_log(moderation_log)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> cleanup!()
|
||||
{31, nil}
|
||||
|
||||
"""
|
||||
def delete_moderation_log(%ModerationLog{} = moderation_log) do
|
||||
Repo.delete(moderation_log)
|
||||
end
|
||||
|
||||
def cleanup! do
|
||||
ModerationLog
|
||||
|> where([ml], ml.created_at < ago(2, "week"))
|
||||
|
|
|
@ -6,214 +6,291 @@ defmodule Philomena.Notifications do
|
|||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Notifications.Notification
|
||||
alias Philomena.Channels.Subscription, as: ChannelSubscription
|
||||
alias Philomena.Forums.Subscription, as: ForumSubscription
|
||||
alias Philomena.Galleries.Subscription, as: GallerySubscription
|
||||
alias Philomena.Images.Subscription, as: ImageSubscription
|
||||
alias Philomena.Topics.Subscription, as: TopicSubscription
|
||||
|
||||
alias Philomena.Notifications.ChannelLiveNotification
|
||||
alias Philomena.Notifications.ForumPostNotification
|
||||
alias Philomena.Notifications.ForumTopicNotification
|
||||
alias Philomena.Notifications.GalleryImageNotification
|
||||
alias Philomena.Notifications.ImageCommentNotification
|
||||
alias Philomena.Notifications.ImageMergeNotification
|
||||
|
||||
alias Philomena.Notifications.Category
|
||||
alias Philomena.Notifications.Creator
|
||||
|
||||
@doc """
|
||||
Returns the list of notifications.
|
||||
Return the count of all currently unread notifications for the user in all categories.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_notifications()
|
||||
[%Notification{}, ...]
|
||||
iex> total_unread_notification_count(user)
|
||||
15
|
||||
|
||||
"""
|
||||
def list_notifications do
|
||||
Repo.all(Notification)
|
||||
def total_unread_notification_count(user) do
|
||||
Category.total_unread_notification_count(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single notification.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Notification does not exist.
|
||||
Gather up and return the top N notifications for the user, for each category of
|
||||
unread notification currently existing.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_notification!(123)
|
||||
%Notification{}
|
||||
|
||||
iex> get_notification!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
iex> unread_notifications_for_user(user, page_size: 10)
|
||||
[
|
||||
channel_live: [],
|
||||
forum_post: [%ForumPostNotification{...}, ...],
|
||||
forum_topic: [%ForumTopicNotification{...}, ...],
|
||||
gallery_image: [],
|
||||
image_comment: [%ImageCommentNotification{...}, ...],
|
||||
image_merge: []
|
||||
]
|
||||
|
||||
"""
|
||||
def get_notification!(id), do: Repo.get!(Notification, id)
|
||||
|
||||
@doc """
|
||||
Creates a notification.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_notification(%{field: value})
|
||||
{:ok, %Notification{}}
|
||||
|
||||
iex> create_notification(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_notification(attrs \\ %{}) do
|
||||
%Notification{}
|
||||
|> Notification.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
def unread_notifications_for_user(user, pagination) do
|
||||
Category.unread_notifications_for_user(user, pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a notification.
|
||||
Returns paginated unread notifications for the user, given the category.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_notification(notification, %{field: new_value})
|
||||
{:ok, %Notification{}}
|
||||
|
||||
iex> update_notification(notification, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> unread_notifications_for_user_and_category(user, :image_comment)
|
||||
[%ImageCommentNotification{...}]
|
||||
|
||||
"""
|
||||
def update_notification(%Notification{} = notification, attrs) do
|
||||
notification
|
||||
|> Notification.changeset(attrs)
|
||||
|> Repo.insert_or_update()
|
||||
def unread_notifications_for_user_and_category(user, category, pagination) do
|
||||
Category.unread_notifications_for_user_and_category(user, category, pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Notification.
|
||||
Creates a channel live notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_notification(notification)
|
||||
{:ok, %Notification{}}
|
||||
|
||||
iex> delete_notification(notification)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> create_channel_live_notification(channel)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def delete_notification(%Notification{} = notification) do
|
||||
Repo.delete(notification)
|
||||
def create_channel_live_notification(channel) do
|
||||
Creator.broadcast_notification(
|
||||
from: {ChannelSubscription, channel_id: channel.id},
|
||||
into: ChannelLiveNotification,
|
||||
select: [channel_id: channel.id],
|
||||
unique_key: :channel_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking notification changes.
|
||||
Creates a forum post notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_notification(notification)
|
||||
%Ecto.Changeset{source: %Notification{}}
|
||||
iex> create_forum_post_notification(user, topic, post)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def change_notification(%Notification{} = notification) do
|
||||
Notification.changeset(notification, %{})
|
||||
end
|
||||
|
||||
alias Philomena.Notifications.UnreadNotification
|
||||
|
||||
def count_unread_notifications(user) do
|
||||
UnreadNotification
|
||||
|> where(user_id: ^user.id)
|
||||
|> Repo.aggregate(:count, :notification_id)
|
||||
def create_forum_post_notification(user, topic, post) do
|
||||
Creator.broadcast_notification(
|
||||
notification_author: user,
|
||||
from: {TopicSubscription, topic_id: topic.id},
|
||||
into: ForumPostNotification,
|
||||
select: [topic_id: topic.id, post_id: post.id],
|
||||
unique_key: :topic_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a unread_notification.
|
||||
Creates a forum topic notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_unread_notification(%{field: value})
|
||||
{:ok, %UnreadNotification{}}
|
||||
|
||||
iex> create_unread_notification(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> create_forum_topic_notification(user, topic)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def create_unread_notification(attrs \\ %{}) do
|
||||
%UnreadNotification{}
|
||||
|> UnreadNotification.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
def create_forum_topic_notification(user, topic) do
|
||||
Creator.broadcast_notification(
|
||||
notification_author: user,
|
||||
from: {ForumSubscription, forum_id: topic.forum_id},
|
||||
into: ForumTopicNotification,
|
||||
select: [topic_id: topic.id],
|
||||
unique_key: :topic_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a unread_notification.
|
||||
Creates a gallery image notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_unread_notification(unread_notification, %{field: new_value})
|
||||
{:ok, %UnreadNotification{}}
|
||||
|
||||
iex> update_unread_notification(unread_notification, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> create_gallery_image_notification(gallery)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def update_unread_notification(%UnreadNotification{} = unread_notification, attrs) do
|
||||
unread_notification
|
||||
|> UnreadNotification.changeset(attrs)
|
||||
|> Repo.update()
|
||||
def create_gallery_image_notification(gallery) do
|
||||
Creator.broadcast_notification(
|
||||
from: {GallerySubscription, gallery_id: gallery.id},
|
||||
into: GalleryImageNotification,
|
||||
select: [gallery_id: gallery.id],
|
||||
unique_key: :gallery_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a UnreadNotification.
|
||||
Creates an image comment notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_unread_notification(unread_notification)
|
||||
{:ok, %UnreadNotification{}}
|
||||
|
||||
iex> delete_unread_notification(unread_notification)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
iex> create_image_comment_notification(user, image, comment)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def delete_unread_notification(actor_type, actor_id, user) do
|
||||
notification =
|
||||
Notification
|
||||
|> where(actor_type: ^actor_type, actor_id: ^actor_id)
|
||||
|> Repo.one()
|
||||
def create_image_comment_notification(user, image, comment) do
|
||||
Creator.broadcast_notification(
|
||||
notification_author: user,
|
||||
from: {ImageSubscription, image_id: image.id},
|
||||
into: ImageCommentNotification,
|
||||
select: [image_id: image.id, comment_id: comment.id],
|
||||
unique_key: :image_id
|
||||
)
|
||||
end
|
||||
|
||||
if notification do
|
||||
UnreadNotification
|
||||
|> where(notification_id: ^notification.id, user_id: ^user.id)
|
||||
|> Repo.delete_all()
|
||||
@doc """
|
||||
Creates an image merge notification, returning the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_image_merge_notification(target, source)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def create_image_merge_notification(target, source) do
|
||||
Creator.broadcast_notification(
|
||||
from: {ImageSubscription, image_id: target.id},
|
||||
into: ImageMergeNotification,
|
||||
select: [target_id: target.id, source_id: source.id],
|
||||
unique_key: :target_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the channel live notification for a given channel and user, returning
|
||||
the number of affected users.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_channel_live_notification(channel, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_channel_live_notification(channel, user) do
|
||||
ChannelLiveNotification
|
||||
|> where(channel_id: ^channel.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the forum post notification for a given topic and user, returning
|
||||
the number of affected notifications.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_forum_post_notification(topic, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_forum_post_notification(topic, user) do
|
||||
ForumPostNotification
|
||||
|> where(topic_id: ^topic.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the forum topic notification for a given topic and user, returning
|
||||
the number of affected notifications.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_forum_topic_notification(topic, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_forum_topic_notification(topic, user) do
|
||||
ForumTopicNotification
|
||||
|> where(topic_id: ^topic.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the gallery image notification for a given gallery and user, returning
|
||||
the number of affected notifications.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_gallery_image_notification(topic, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_gallery_image_notification(gallery, user) do
|
||||
GalleryImageNotification
|
||||
|> where(gallery_id: ^gallery.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the image comment notification for a given image and user, returning
|
||||
the number of affected notifications.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_gallery_image_notification(topic, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_image_comment_notification(image, user) do
|
||||
ImageCommentNotification
|
||||
|> where(image_id: ^image.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes the image merge notification for a given image and user, returning
|
||||
the number of affected notifications.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> clear_image_merge_notification(topic, user)
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def clear_image_merge_notification(image, user) do
|
||||
ImageMergeNotification
|
||||
|> where(target_id: ^image.id)
|
||||
|> delete_all_for_user(user)
|
||||
end
|
||||
|
||||
#
|
||||
# Clear all unread notifications using the given query.
|
||||
#
|
||||
# Returns `{:ok, count}`, where `count` is the number of affected rows.
|
||||
#
|
||||
defp delete_all_for_user(query, user) do
|
||||
if user do
|
||||
{count, nil} =
|
||||
query
|
||||
|> where(user_id: ^user.id)
|
||||
|> Repo.delete_all()
|
||||
|
||||
{:ok, count}
|
||||
else
|
||||
{:ok, 0}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking unread_notification changes.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_unread_notification(unread_notification)
|
||||
%Ecto.Changeset{source: %UnreadNotification{}}
|
||||
|
||||
"""
|
||||
def change_unread_notification(%UnreadNotification{} = unread_notification) do
|
||||
UnreadNotification.changeset(unread_notification, %{})
|
||||
end
|
||||
|
||||
def notify(_actor_child, [], _params), do: nil
|
||||
|
||||
def notify(actor_child, subscriptions, params) do
|
||||
# Don't push to the user that created the notification
|
||||
subscriptions =
|
||||
case actor_child do
|
||||
%{user_id: id} ->
|
||||
subscriptions
|
||||
|> Enum.reject(&(&1.user_id == id))
|
||||
|
||||
_ ->
|
||||
subscriptions
|
||||
end
|
||||
|
||||
Repo.transaction(fn ->
|
||||
notification =
|
||||
Notification
|
||||
|> Repo.get_by(actor_id: params.actor_id, actor_type: params.actor_type)
|
||||
|
||||
{:ok, notification} =
|
||||
(notification || %Notification{})
|
||||
|> update_notification(params)
|
||||
|
||||
# Insert the notification to any watchers who do not have it
|
||||
unreads =
|
||||
subscriptions
|
||||
|> Enum.map(&%{user_id: &1.user_id, notification_id: notification.id})
|
||||
|
||||
UnreadNotification
|
||||
|> Repo.insert_all(unreads, on_conflict: :nothing)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
166
lib/philomena/notifications/category.ex
Normal file
166
lib/philomena/notifications/category.ex
Normal file
|
@ -0,0 +1,166 @@
|
|||
defmodule Philomena.Notifications.Category do
|
||||
@moduledoc """
|
||||
Notification category querying.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Notifications.ChannelLiveNotification
|
||||
alias Philomena.Notifications.ForumPostNotification
|
||||
alias Philomena.Notifications.ForumTopicNotification
|
||||
alias Philomena.Notifications.GalleryImageNotification
|
||||
alias Philomena.Notifications.ImageCommentNotification
|
||||
alias Philomena.Notifications.ImageMergeNotification
|
||||
|
||||
@type t ::
|
||||
:channel_live
|
||||
| :forum_post
|
||||
| :forum_topic
|
||||
| :gallery_image
|
||||
| :image_comment
|
||||
| :image_merge
|
||||
|
||||
@doc """
|
||||
Return a list of all supported categories.
|
||||
"""
|
||||
def categories do
|
||||
[
|
||||
:channel_live,
|
||||
:forum_post,
|
||||
:forum_topic,
|
||||
:gallery_image,
|
||||
:image_comment,
|
||||
:image_merge
|
||||
]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Return the count of all currently unread notifications for the user in all categories.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> total_unread_notification_count(user)
|
||||
15
|
||||
|
||||
"""
|
||||
def total_unread_notification_count(user) do
|
||||
categories()
|
||||
|> Enum.map(fn category ->
|
||||
category
|
||||
|> query_for_category_and_user(user)
|
||||
|> exclude(:preload)
|
||||
|> select([_], %{one: 1})
|
||||
end)
|
||||
|> union_all_queries()
|
||||
|> Repo.aggregate(:count)
|
||||
end
|
||||
|
||||
defp union_all_queries([query | rest]) do
|
||||
Enum.reduce(rest, query, fn q, acc -> union_all(acc, ^q) end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gather up and return the top N notifications for the user, for each category of
|
||||
unread notification currently existing.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> unread_notifications_for_user(user, page_size: 10)
|
||||
[
|
||||
channel_live: [],
|
||||
forum_post: [%ForumPostNotification{...}, ...],
|
||||
forum_topic: [%ForumTopicNotification{...}, ...],
|
||||
gallery_image: [],
|
||||
image_comment: [%ImageCommentNotification{...}, ...],
|
||||
image_merge: []
|
||||
]
|
||||
|
||||
"""
|
||||
def unread_notifications_for_user(user, pagination) do
|
||||
Enum.map(categories(), fn category ->
|
||||
results =
|
||||
category
|
||||
|> query_for_category_and_user(user)
|
||||
|> order_by(desc: :updated_at)
|
||||
|> Repo.paginate(pagination)
|
||||
|
||||
{category, results}
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns paginated unread notifications for the user, given the category.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> unread_notifications_for_user_and_category(user, :image_comment)
|
||||
[%ImageCommentNotification{...}]
|
||||
|
||||
"""
|
||||
def unread_notifications_for_user_and_category(user, category, pagination) do
|
||||
category
|
||||
|> query_for_category_and_user(user)
|
||||
|> order_by(desc: :updated_at)
|
||||
|> Repo.paginate(pagination)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Determine the category of a notification.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> notification_category(%ImageCommentNotification{})
|
||||
:image_comment
|
||||
|
||||
"""
|
||||
def notification_category(n) do
|
||||
case n.__struct__ do
|
||||
ChannelLiveNotification -> :channel_live
|
||||
GalleryImageNotification -> :gallery_image
|
||||
ImageCommentNotification -> :image_comment
|
||||
ImageMergeNotification -> :image_merge
|
||||
ForumPostNotification -> :forum_post
|
||||
ForumTopicNotification -> :forum_topic
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `m:Ecto.Query` that finds unread notifications for the given category,
|
||||
for the given user, with preloads applied.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> query_for_category_and_user(:channel_live, user)
|
||||
#Ecto.Query<from c0 in ChannelLiveNotification, where: c0.user_id == ^1, preload: [:channel]>
|
||||
|
||||
"""
|
||||
def query_for_category_and_user(category, user) do
|
||||
query =
|
||||
case category do
|
||||
:channel_live ->
|
||||
from(n in ChannelLiveNotification, preload: :channel)
|
||||
|
||||
:gallery_image ->
|
||||
from(n in GalleryImageNotification, preload: [gallery: :creator])
|
||||
|
||||
:image_comment ->
|
||||
from(n in ImageCommentNotification,
|
||||
preload: [image: [:sources, tags: :aliases], comment: :user]
|
||||
)
|
||||
|
||||
:image_merge ->
|
||||
from(n in ImageMergeNotification,
|
||||
preload: [:source, target: [:sources, tags: :aliases]]
|
||||
)
|
||||
|
||||
:forum_topic ->
|
||||
from(n in ForumTopicNotification, preload: [topic: [:forum, :user]])
|
||||
|
||||
:forum_post ->
|
||||
from(n in ForumPostNotification, preload: [topic: :forum, post: :user])
|
||||
end
|
||||
|
||||
where(query, user_id: ^user.id)
|
||||
end
|
||||
end
|
17
lib/philomena/notifications/channel_live_notification.ex
Normal file
17
lib/philomena/notifications/channel_live_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
|||
defmodule Philomena.Notifications.ChannelLiveNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Channels.Channel
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "channel_live_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :channel, Channel, primary_key: true
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
92
lib/philomena/notifications/creator.ex
Normal file
92
lib/philomena/notifications/creator.ex
Normal file
|
@ -0,0 +1,92 @@
|
|||
defmodule Philomena.Notifications.Creator do
|
||||
@moduledoc """
|
||||
Internal notifications creation logic.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
@doc """
|
||||
Propagate notifications for a notification table type.
|
||||
|
||||
Returns `{:ok, count}`, where `count` is the number of affected rows.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> broadcast_notification(
|
||||
...> from: {GallerySubscription, gallery_id: gallery.id},
|
||||
...> into: GalleryImageNotification,
|
||||
...> select: [gallery_id: gallery.id],
|
||||
...> unique_key: :gallery_id
|
||||
...> )
|
||||
{:ok, 2}
|
||||
|
||||
iex> broadcast_notification(
|
||||
...> notification_author: user,
|
||||
...> from: {ImageSubscription, image_id: image.id},
|
||||
...> into: ImageCommentNotification,
|
||||
...> select: [image_id: image.id, comment_id: comment.id],
|
||||
...> unique_key: :image_id
|
||||
...> )
|
||||
{:ok, 2}
|
||||
|
||||
"""
|
||||
def broadcast_notification(opts) do
|
||||
opts = Keyword.validate!(opts, [:notification_author, :from, :into, :select, :unique_key])
|
||||
|
||||
notification_author = Keyword.get(opts, :notification_author, nil)
|
||||
{subscription_schema, filters} = Keyword.fetch!(opts, :from)
|
||||
notification_schema = Keyword.fetch!(opts, :into)
|
||||
select_keywords = Keyword.fetch!(opts, :select)
|
||||
unique_key = Keyword.fetch!(opts, :unique_key)
|
||||
|
||||
subscription_schema
|
||||
|> subscription_query(notification_author)
|
||||
|> where(^filters)
|
||||
|> convert_to_notification(select_keywords)
|
||||
|> insert_notifications(notification_schema, unique_key)
|
||||
end
|
||||
|
||||
defp convert_to_notification(subscription, extra) do
|
||||
now = dynamic([_], type(^DateTime.utc_now(:second), :utc_datetime))
|
||||
|
||||
base = %{
|
||||
user_id: dynamic([s], s.user_id),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
read: false
|
||||
}
|
||||
|
||||
extra =
|
||||
Map.new(extra, fn {field, value} ->
|
||||
{field, dynamic([_], type(^value, :integer))}
|
||||
end)
|
||||
|
||||
from(subscription, select: ^Map.merge(base, extra))
|
||||
end
|
||||
|
||||
defp subscription_query(subscription, notification_author) do
|
||||
case notification_author do
|
||||
%{id: user_id} ->
|
||||
# Avoid sending notifications to the user which performed the action.
|
||||
from s in subscription,
|
||||
where: s.user_id != ^user_id
|
||||
|
||||
_ ->
|
||||
# When not created by a user, send notifications to all subscribers.
|
||||
subscription
|
||||
end
|
||||
end
|
||||
|
||||
defp insert_notifications(query, notification, unique_key) do
|
||||
{count, nil} =
|
||||
Repo.insert_all(
|
||||
notification,
|
||||
query,
|
||||
on_conflict: {:replace_all_except, [:created_at]},
|
||||
conflict_target: [unique_key, :user_id]
|
||||
)
|
||||
|
||||
{:ok, count}
|
||||
end
|
||||
end
|
19
lib/philomena/notifications/forum_post_notification.ex
Normal file
19
lib/philomena/notifications/forum_post_notification.ex
Normal file
|
@ -0,0 +1,19 @@
|
|||
defmodule Philomena.Notifications.ForumPostNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Topics.Topic
|
||||
alias Philomena.Posts.Post
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "forum_post_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :topic, Topic, primary_key: true
|
||||
belongs_to :post, Post
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
17
lib/philomena/notifications/forum_topic_notification.ex
Normal file
17
lib/philomena/notifications/forum_topic_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
|||
defmodule Philomena.Notifications.ForumTopicNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Topics.Topic
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "forum_topic_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :topic, Topic, primary_key: true
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
17
lib/philomena/notifications/gallery_image_notification.ex
Normal file
17
lib/philomena/notifications/gallery_image_notification.ex
Normal file
|
@ -0,0 +1,17 @@
|
|||
defmodule Philomena.Notifications.GalleryImageNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Galleries.Gallery
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "gallery_image_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :gallery, Gallery, primary_key: true
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
19
lib/philomena/notifications/image_comment_notification.ex
Normal file
19
lib/philomena/notifications/image_comment_notification.ex
Normal file
|
@ -0,0 +1,19 @@
|
|||
defmodule Philomena.Notifications.ImageCommentNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Images.Image
|
||||
alias Philomena.Comments.Comment
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "image_comment_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :image, Image, primary_key: true
|
||||
belongs_to :comment, Comment
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
18
lib/philomena/notifications/image_merge_notification.ex
Normal file
18
lib/philomena/notifications/image_merge_notification.ex
Normal file
|
@ -0,0 +1,18 @@
|
|||
defmodule Philomena.Notifications.ImageMergeNotification do
|
||||
use Ecto.Schema
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Images.Image
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "image_merge_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :target, Image, primary_key: true
|
||||
belongs_to :source, Image
|
||||
|
||||
field :read, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
end
|
|
@ -1,26 +0,0 @@
|
|||
defmodule Philomena.Notifications.Notification do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
schema "notifications" do
|
||||
field :action, :string
|
||||
|
||||
# fixme: rails polymorphic relation
|
||||
field :actor_id, :integer
|
||||
field :actor_type, :string
|
||||
field :actor_child_id, :integer
|
||||
field :actor_child_type, :string
|
||||
|
||||
field :actor, :any, virtual: true
|
||||
field :actor_child, :any, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(notification, attrs) do
|
||||
notification
|
||||
|> cast(attrs, [:actor_id, :actor_type, :actor_child_id, :actor_child_type, :action])
|
||||
|> validate_required([:actor_id, :actor_type, :action])
|
||||
end
|
||||
end
|
|
@ -1,21 +0,0 @@
|
|||
defmodule Philomena.Notifications.UnreadNotification do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.Notifications.Notification
|
||||
|
||||
@primary_key false
|
||||
|
||||
schema "unread_notifications" do
|
||||
belongs_to :user, User, primary_key: true
|
||||
belongs_to :notification, Notification, primary_key: true
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(unread_notification, attrs) do
|
||||
unread_notification
|
||||
|> cast(attrs, [])
|
||||
|> validate_required([])
|
||||
end
|
||||
end
|
|
@ -41,7 +41,7 @@ defmodule Philomena.PollVotes do
|
|||
|
||||
"""
|
||||
def create_poll_votes(user, poll, attrs) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
poll_votes = filter_options(user, poll, now, attrs)
|
||||
|
||||
Multi.new()
|
||||
|
|
|
@ -51,7 +51,7 @@ defmodule Philomena.Polls do
|
|||
"""
|
||||
def create_poll(attrs \\ %{}) do
|
||||
%Poll{}
|
||||
|> Poll.update_changeset(attrs)
|
||||
|> Poll.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
|
@ -69,7 +69,7 @@ defmodule Philomena.Polls do
|
|||
"""
|
||||
def update_poll(%Poll{} = poll, attrs) do
|
||||
poll
|
||||
|> Poll.update_changeset(attrs)
|
||||
|> Poll.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
|
|
|
@ -3,22 +3,16 @@ defmodule Philomena.Polls.Poll do
|
|||
import Ecto.Changeset
|
||||
|
||||
alias Philomena.Topics.Topic
|
||||
alias Philomena.Users.User
|
||||
alias Philomena.PollOptions.PollOption
|
||||
alias Philomena.Schema.Time
|
||||
|
||||
schema "polls" do
|
||||
belongs_to :topic, Topic
|
||||
belongs_to :deleted_by, User
|
||||
has_many :options, PollOption
|
||||
|
||||
field :title, :string
|
||||
field :vote_method, :string
|
||||
field :active_until, :utc_datetime
|
||||
field :active_until, PhilomenaQuery.Ecto.RelativeDate
|
||||
field :total_votes, :integer, default: 0
|
||||
field :hidden_from_users, :boolean, default: false
|
||||
field :deletion_reason, :string, default: ""
|
||||
field :until, :string, virtual: true
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
end
|
||||
|
@ -26,16 +20,7 @@ defmodule Philomena.Polls.Poll do
|
|||
@doc false
|
||||
def changeset(poll, attrs) do
|
||||
poll
|
||||
|> cast(attrs, [])
|
||||
|> validate_required([])
|
||||
|> Time.propagate_time(:active_until, :until)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def update_changeset(poll, attrs) do
|
||||
poll
|
||||
|> cast(attrs, [:title, :until, :vote_method])
|
||||
|> Time.assign_time(:until, :active_until)
|
||||
|> cast(attrs, [:title, :active_until, :vote_method])
|
||||
|> validate_required([:title, :active_until, :vote_method])
|
||||
|> validate_length(:title, max: 140, count: :bytes)
|
||||
|> validate_inclusion(:vote_method, ["single", "multiple"])
|
||||
|
|
|
@ -16,11 +16,8 @@ defmodule Philomena.Posts do
|
|||
alias Philomena.IndexWorker
|
||||
alias Philomena.Forums.Forum
|
||||
alias Philomena.Notifications
|
||||
alias Philomena.NotificationWorker
|
||||
alias Philomena.Versions
|
||||
alias Philomena.Reports
|
||||
alias Philomena.Reports.Report
|
||||
alias Philomena.Users.User
|
||||
|
||||
@doc """
|
||||
Gets a single post.
|
||||
|
@ -51,7 +48,7 @@ defmodule Philomena.Posts do
|
|||
|
||||
"""
|
||||
def create_post(topic, attributes, params \\ %{}) do
|
||||
now = DateTime.utc_now()
|
||||
now = DateTime.utc_now(:second)
|
||||
|
||||
topic_query =
|
||||
Topic
|
||||
|
@ -66,7 +63,7 @@ defmodule Philomena.Posts do
|
|||
|> where(id: ^topic.forum_id)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.all(:topic_lock, topic_lock_query)
|
||||
|> Multi.one(:topic, topic_lock_query)
|
||||
|> Multi.run(:post, fn repo, _ ->
|
||||
last_position =
|
||||
Post
|
||||
|
@ -95,7 +92,8 @@ defmodule Philomena.Posts do
|
|||
|
||||
{:ok, count}
|
||||
end)
|
||||
|> maybe_create_subscription_on_reply(topic, attributes[:user])
|
||||
|> Multi.run(:notification, ¬ify_post/2)
|
||||
|> Topics.maybe_subscribe_on(:topic, attributes[:user], :watch_on_reply)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{post: post}} = result ->
|
||||
|
@ -108,58 +106,20 @@ defmodule Philomena.Posts do
|
|||
end
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_reply(multi, topic, %User{watch_on_reply: true} = user) do
|
||||
multi
|
||||
|> Multi.run(:subscribe, fn _repo, _changes ->
|
||||
Topics.create_subscription(topic, user)
|
||||
end)
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_reply(multi, _topic, _user) do
|
||||
multi
|
||||
end
|
||||
|
||||
def notify_post(post) do
|
||||
Exq.enqueue(Exq, "notifications", NotificationWorker, ["Posts", post.id])
|
||||
defp notify_post(_repo, %{post: post, topic: topic}) do
|
||||
Notifications.create_forum_post_notification(post.user, topic, post)
|
||||
end
|
||||
|
||||
def report_non_approved(%Post{approved: true}), do: false
|
||||
|
||||
def report_non_approved(post) do
|
||||
Reports.create_system_report(
|
||||
post.id,
|
||||
"Post",
|
||||
{"Post", post.id},
|
||||
"Approval",
|
||||
"Post contains externally-embedded images and has been flagged for review."
|
||||
)
|
||||
end
|
||||
|
||||
def perform_notify(post_id) do
|
||||
post = get_post!(post_id)
|
||||
|
||||
topic =
|
||||
post
|
||||
|> Repo.preload(:topic)
|
||||
|> Map.fetch!(:topic)
|
||||
|
||||
subscriptions =
|
||||
topic
|
||||
|> Repo.preload(:subscriptions)
|
||||
|> Map.fetch!(:subscriptions)
|
||||
|
||||
Notifications.notify(
|
||||
post,
|
||||
subscriptions,
|
||||
%{
|
||||
actor_id: topic.id,
|
||||
actor_type: "Topic",
|
||||
actor_child_id: post.id,
|
||||
actor_child_type: "Post",
|
||||
action: "posted a new reply in"
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a post.
|
||||
|
||||
|
@ -173,7 +133,7 @@ defmodule Philomena.Posts do
|
|||
|
||||
"""
|
||||
def update_post(%Post{} = post, editor, attrs) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
now = DateTime.utc_now(:second)
|
||||
current_body = post.body
|
||||
current_reason = post.edit_reason
|
||||
|
||||
|
@ -216,11 +176,7 @@ defmodule Philomena.Posts do
|
|||
end
|
||||
|
||||
def hide_post(%Post{} = post, attrs, user) do
|
||||
reports =
|
||||
Report
|
||||
|> where(reportable_type: "Post", reportable_id: ^post.id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
report_query = Reports.close_report_query({"Post", post.id}, user)
|
||||
|
||||
topics =
|
||||
Topic
|
||||
|
@ -236,7 +192,7 @@ defmodule Philomena.Posts do
|
|||
|
||||
Multi.new()
|
||||
|> Multi.update(:post, post)
|
||||
|> Multi.update_all(:reports, reports, [])
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Multi.update_all(:topics, topics, [])
|
||||
|> Multi.update_all(:forums, forums, [])
|
||||
|> Repo.transaction()
|
||||
|
@ -267,21 +223,15 @@ defmodule Philomena.Posts do
|
|||
end
|
||||
|
||||
def approve_post(%Post{} = post, user) do
|
||||
reports =
|
||||
Report
|
||||
|> where(reportable_type: "Post", reportable_id: ^post.id)
|
||||
|> select([r], r.id)
|
||||
|> update(set: [open: false, state: "closed", admin_id: ^user.id])
|
||||
|
||||
report_query = Reports.close_report_query({"Post", post.id}, user)
|
||||
post = Post.approve_changeset(post)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:post, post)
|
||||
|> Multi.update_all(:reports, reports, [])
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{post: post, reports: {_count, reports}}} ->
|
||||
notify_post(post)
|
||||
UserStatistics.inc_stat(post.user, :forum_posts)
|
||||
Reports.reindex_reports(reports)
|
||||
reindex_post(post)
|
||||
|
|
|
@ -15,15 +15,12 @@ defmodule Philomena.Posts.Post do
|
|||
field :edit_reason, :string
|
||||
field :ip, EctoNetwork.INET
|
||||
field :fingerprint, :string
|
||||
field :user_agent, :string, default: ""
|
||||
field :referrer, :string, default: ""
|
||||
field :topic_position, :integer
|
||||
field :hidden_from_users, :boolean, default: false
|
||||
field :anonymous, :boolean, default: false
|
||||
field :edited_at, :utc_datetime
|
||||
field :deletion_reason, :string, default: ""
|
||||
field :destroyed_content, :boolean, default: false
|
||||
field :name_at_post_time, :string
|
||||
field :approved, :boolean, default: false
|
||||
|
||||
timestamps(inserted_at: :created_at, type: :utc_datetime)
|
||||
|
@ -47,7 +44,6 @@ defmodule Philomena.Posts.Post do
|
|||
|> validate_required([:body])
|
||||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||
|> change(attribution)
|
||||
|> put_name_at_post_time(attribution[:user])
|
||||
|> Approval.maybe_put_approval(attribution[:user])
|
||||
|> Approval.maybe_strip_images(attribution[:user])
|
||||
end
|
||||
|
@ -61,7 +57,6 @@ defmodule Philomena.Posts.Post do
|
|||
|> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|
||||
|> change(attribution)
|
||||
|> change(topic_position: 0)
|
||||
|> put_name_at_post_time(attribution[:user])
|
||||
|> Approval.maybe_put_approval(attribution[:user])
|
||||
|> Approval.maybe_strip_images(attribution[:user])
|
||||
end
|
||||
|
@ -90,7 +85,4 @@ defmodule Philomena.Posts.Post do
|
|||
change(post)
|
||||
|> put_change(:approved, true)
|
||||
end
|
||||
|
||||
defp put_name_at_post_time(changeset, nil), do: changeset
|
||||
defp put_name_at_post_time(changeset, user), do: change(changeset, name_at_post_time: user.name)
|
||||
end
|
||||
|
|
|
@ -90,8 +90,8 @@ defmodule Philomena.Posts.Query do
|
|||
|> Parser.parse(query_string, context)
|
||||
end
|
||||
|
||||
def compile(user, query_string) do
|
||||
query_string = query_string || ""
|
||||
def compile(query_string, opts \\ []) do
|
||||
user = Keyword.get(opts, :user)
|
||||
|
||||
case user do
|
||||
nil ->
|
||||
|
|
|
@ -52,8 +52,8 @@ defmodule Philomena.Posts.SearchIndex do
|
|||
author: if(!!post.user and !post.anonymous, do: String.downcase(post.user.name)),
|
||||
subject: post.topic.title,
|
||||
ip: post.ip |> to_string(),
|
||||
user_agent: post.user_agent,
|
||||
referrer: post.referrer,
|
||||
user_agent: "",
|
||||
referrer: "",
|
||||
fingerprint: post.fingerprint,
|
||||
topic_position: post.topic_position,
|
||||
forum: post.topic.forum.short_name,
|
||||
|
|
|
@ -12,6 +12,31 @@ defmodule Philomena.Reports do
|
|||
alias Philomena.IndexWorker
|
||||
alias Philomena.Polymorphic
|
||||
|
||||
@doc """
|
||||
Returns the current number of open reports.
|
||||
|
||||
If the user is allowed to view reports, returns the current count.
|
||||
If the user is not allowed to view reports, returns `nil`.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> count_reports(%User{})
|
||||
nil
|
||||
|
||||
iex> count_reports(%User{role: "admin"})
|
||||
4
|
||||
|
||||
"""
|
||||
def count_open_reports(user) do
|
||||
if Canada.Can.can?(user, :index, Report) do
|
||||
Report
|
||||
|> where(open: true)
|
||||
|> Repo.aggregate(:count)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the list of reports.
|
||||
|
||||
|
@ -53,14 +78,59 @@ defmodule Philomena.Reports do
|
|||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_report(reportable_id, reportable_type, attribution, attrs \\ %{}) do
|
||||
%Report{reportable_id: reportable_id, reportable_type: reportable_type}
|
||||
def create_report({reportable_type, reportable_id} = _type_and_id, attribution, attrs \\ %{}) do
|
||||
%Report{reportable_type: reportable_type, reportable_id: reportable_id}
|
||||
|> Report.creation_changeset(attrs, attribution)
|
||||
|> Repo.insert()
|
||||
|> reindex_after_update()
|
||||
end
|
||||
|
||||
def create_system_report(reportable_id, reportable_type, category, reason) do
|
||||
@doc """
|
||||
Returns an `m:Ecto.Query` which updates all reports for the given `reportable_type`
|
||||
and `reportable_id` to close them.
|
||||
|
||||
Because this is only a query due to the limitations of `m:Ecto.Multi`, this must be
|
||||
coupled with an associated call to `reindex_reports/1` to operate correctly, e.g.:
|
||||
|
||||
report_query = Reports.close_system_report_query({"Image", image.id}, user)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update_all(:reports, report_query, [])
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{reports: {_count, reports}} = result} ->
|
||||
Reports.reindex_reports(reports)
|
||||
|
||||
{:ok, result}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
|
||||
## Examples
|
||||
|
||||
iex> close_system_report_query("Image", 1, %User{})
|
||||
#Ecto.Query<...>
|
||||
|
||||
"""
|
||||
def close_report_query({reportable_type, reportable_id} = _type_and_id, closing_user) do
|
||||
from r in Report,
|
||||
where: r.reportable_type == ^reportable_type and r.reportable_id == ^reportable_id,
|
||||
select: r.id,
|
||||
update: [set: [open: false, state: "closed", admin_id: ^closing_user.id]]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Automatically create a report with the given category and reason on the given
|
||||
`reportable_id` and `reportable_type`.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_system_report({"Comment", 1}, "Other", "Custom report reason")
|
||||
{:ok, %Report{}}
|
||||
|
||||
"""
|
||||
def create_system_report({reportable_type, reportable_id} = _type_and_id, category, reason) do
|
||||
attrs = %{
|
||||
reason: reason,
|
||||
category: category
|
||||
|
@ -69,12 +139,10 @@ defmodule Philomena.Reports do
|
|||
attributes = %{
|
||||
system: true,
|
||||
ip: %Postgrex.INET{address: {127, 0, 0, 1}, netmask: 32},
|
||||
fingerprint: "ffff",
|
||||
user_agent:
|
||||
"Mozilla/5.0 (X11; Philomena; Linux x86_64; rv:86.0) Gecko/20100101 Firefox/86.0"
|
||||
fingerprint: "ffff"
|
||||
}
|
||||
|
||||
%Report{reportable_id: reportable_id, reportable_type: reportable_type}
|
||||
%Report{reportable_type: reportable_type, reportable_id: reportable_id}
|
||||
|> Report.creation_changeset(attrs, attributes)
|
||||
|> Repo.insert()
|
||||
|> reindex_after_update()
|
||||
|
@ -128,6 +196,15 @@ defmodule Philomena.Reports do
|
|||
Report.changeset(report, %{})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Marks the report as claimed by the given user.
|
||||
|
||||
## Example
|
||||
|
||||
iex> claim_report(%Report{}, %User{})
|
||||
{:ok, %Report{}}
|
||||
|
||||
"""
|
||||
def claim_report(%Report{} = report, user) do
|
||||
report
|
||||
|> Report.claim_changeset(user)
|
||||
|
@ -135,6 +212,15 @@ defmodule Philomena.Reports do
|
|||
|> reindex_after_update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Marks the report as unclaimed.
|
||||
|
||||
## Example
|
||||
|
||||
iex> unclaim_report(%Report{})
|
||||
{:ok, %Report{}}
|
||||
|
||||
"""
|
||||
def unclaim_report(%Report{} = report) do
|
||||
report
|
||||
|> Report.unclaim_changeset()
|
||||
|
@ -142,6 +228,15 @@ defmodule Philomena.Reports do
|
|||
|> reindex_after_update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Marks the report as closed by the given user.
|
||||
|
||||
## Example
|
||||
|
||||
iex> close_report(%Report{}, %User{})
|
||||
{:ok, %Report{}}
|
||||
|
||||
"""
|
||||
def close_report(%Report{} = report, user) do
|
||||
report
|
||||
|> Report.close_changeset(user)
|
||||
|
@ -149,6 +244,15 @@ defmodule Philomena.Reports do
|
|||
|> reindex_after_update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Reindex all reports where the user or admin has `old_name`.
|
||||
|
||||
## Example
|
||||
|
||||
iex> user_name_reindex("Administrator", "Administrator2")
|
||||
{:ok, %Req.Response{}}
|
||||
|
||||
"""
|
||||
def user_name_reindex(old_name, new_name) do
|
||||
data = ReportIndex.user_name_update_by_query(old_name, new_name)
|
||||
|
||||
|
@ -165,18 +269,25 @@ defmodule Philomena.Reports do
|
|||
result
|
||||
end
|
||||
|
||||
@doc """
|
||||
Callback for post-transaction update.
|
||||
|
||||
See `close_report_query/2` for more information and example.
|
||||
"""
|
||||
def reindex_reports(report_ids) do
|
||||
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", report_ids])
|
||||
|
||||
report_ids
|
||||
end
|
||||
|
||||
@doc false
|
||||
def reindex_report(%Report{} = report) do
|
||||
Exq.enqueue(Exq, "indexing", IndexWorker, ["Reports", "id", [report.id]])
|
||||
|
||||
report
|
||||
end
|
||||
|
||||
@doc false
|
||||
def perform_reindex(column, condition) do
|
||||
Report
|
||||
|> where([r], field(r, ^column) in ^condition)
|
||||
|
@ -185,14 +296,4 @@ defmodule Philomena.Reports do
|
|||
|> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type])
|
||||
|> Enum.map(&Search.index_document(&1, Report))
|
||||
end
|
||||
|
||||
def count_reports(user) do
|
||||
if Canada.Can.can?(user, :index, Report) do
|
||||
Report
|
||||
|> where(open: true)
|
||||
|> Repo.aggregate(:count, :id)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,6 +17,6 @@ defmodule Philomena.Reports.Query do
|
|||
def compile(query_string) do
|
||||
fields()
|
||||
|> Parser.new()
|
||||
|> Parser.parse(query_string || "", %{})
|
||||
|> Parser.parse(query_string, %{})
|
||||
end
|
||||
end
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue