mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-27 05:37:59 +01:00
Merge remote-tracking branch 'origin/master' into redesign
This commit is contained in:
commit
128f63639f
64 changed files with 1309 additions and 692 deletions
11
.credo.exs
Normal file
11
.credo.exs
Normal file
|
@ -0,0 +1,11 @@
|
|||
%{
|
||||
configs: %{
|
||||
name: "default",
|
||||
checks: %{
|
||||
disabled: [
|
||||
{Credo.Check.Refactor.CondStatements, false},
|
||||
{Credo.Check.Refactor.NegatedConditionsWithElse, false}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,32 +3,34 @@
|
|||
*/
|
||||
|
||||
import store from './utils/store';
|
||||
import { $, $$ } from './utils/dom';
|
||||
import { assertNotNull } from './utils/assert';
|
||||
import { $, $$, hideEl, showEl } from './utils/dom';
|
||||
import { assertNotNull, assertType } from './utils/assert';
|
||||
import '../types/ujs';
|
||||
|
||||
let touchMoved = false;
|
||||
|
||||
function formResult({target, detail}: FetchcompleteEvent) {
|
||||
const elements: {[key: string]: string} = {
|
||||
const elements: Record<string, string> = {
|
||||
'#description-form': '.image-description',
|
||||
'#uploader-form': '.image-uploader'
|
||||
};
|
||||
|
||||
function showResult(resultEl: HTMLElement, formEl: HTMLFormElement, response: string) {
|
||||
function showResult(formEl: HTMLFormElement, resultEl: HTMLElement, response: string) {
|
||||
resultEl.innerHTML = response;
|
||||
resultEl.classList.remove('hidden');
|
||||
formEl.classList.add('hidden');
|
||||
const inputEl = $<HTMLInputElement>('input[type="submit"]', formEl);
|
||||
const buttonEl = $<HTMLButtonElement>('button', formEl);
|
||||
hideEl(formEl);
|
||||
showEl(resultEl);
|
||||
|
||||
if (inputEl) inputEl.disabled = false;
|
||||
if (buttonEl) buttonEl.disabled = false;
|
||||
$$<HTMLInputElement | HTMLButtonElement>('input[type="submit"],button', formEl).forEach(button => {
|
||||
button.disabled = false;
|
||||
});
|
||||
}
|
||||
|
||||
for (const element in elements) {
|
||||
if (target.matches(element)) {
|
||||
detail.text().then(text => showResult(assertNotNull($<HTMLElement>(elements[element])), target as HTMLFormElement, text));
|
||||
for (const [ formSelector, resultSelector ] of Object.entries(elements)) {
|
||||
if (target.matches(formSelector)) {
|
||||
const form = assertType(target, HTMLFormElement);
|
||||
const result = assertNotNull($<HTMLElement>(resultSelector));
|
||||
|
||||
detail.text().then(text => showResult(form, result, text));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -79,11 +81,11 @@ export function setupEvents() {
|
|||
const extrameta = $<HTMLElement>('#extrameta');
|
||||
|
||||
if (extrameta && store.get('hide_uploader')) {
|
||||
extrameta.classList.add('hidden');
|
||||
hideEl(extrameta);
|
||||
}
|
||||
|
||||
if (store.get('hide_score')) {
|
||||
$$<HTMLElement>('.upvotes,.score,.downvotes').forEach(s => s.classList.add('hidden'));
|
||||
$$<HTMLElement>('.upvotes,.score,.downvotes').forEach(s => hideEl(s));
|
||||
}
|
||||
|
||||
document.addEventListener('fetchcomplete', formResult);
|
||||
|
|
|
@ -29,7 +29,8 @@ function getNewNotifications() {
|
|||
}
|
||||
|
||||
fetchJson('GET', '/notifications/unread')
|
||||
.then(handleError).then(response => response.json())
|
||||
.then(handleError)
|
||||
.then(response => response.json())
|
||||
.then(({ notifications }) => {
|
||||
updateNotificationTicker(notifications);
|
||||
storeNotificationCount(notifications);
|
||||
|
@ -38,9 +39,9 @@ function getNewNotifications() {
|
|||
});
|
||||
}
|
||||
|
||||
function updateNotificationTicker(notificationCount: unknown) {
|
||||
function updateNotificationTicker(notificationCount: string | null) {
|
||||
const ticker = assertNotNull($<HTMLSpanElement>('.js-notification-ticker'));
|
||||
const parsedNotificationCount = Number(notificationCount as string);
|
||||
const parsedNotificationCount = Number(notificationCount);
|
||||
|
||||
ticker.dataset.notificationCount = parsedNotificationCount.toString();
|
||||
ticker.textContent = parsedNotificationCount.toString();
|
||||
|
@ -58,11 +59,8 @@ export function setupNotifications() {
|
|||
setTimeout(getNewNotifications, NOTIFICATION_INTERVAL);
|
||||
|
||||
// Update the current number of notifications based on the latest page load
|
||||
const ticker = $<HTMLSpanElement>('.js-notification-ticker');
|
||||
|
||||
if (ticker) {
|
||||
storeNotificationCount(assertNotUndefined(ticker.dataset.notificationCount));
|
||||
}
|
||||
const ticker = assertNotNull($<HTMLSpanElement>('.js-notification-ticker'));
|
||||
storeNotificationCount(assertNotUndefined(ticker.dataset.notificationCount));
|
||||
|
||||
// Update ticker when the stored value changes - this will occur in all open tabs
|
||||
store.watch('notificationCount', updateNotificationTicker);
|
||||
|
|
|
@ -44,19 +44,19 @@ function isOK(event: KeyboardEvent): boolean {
|
|||
}
|
||||
|
||||
const keyCodes: ShortcutKeyMap = {
|
||||
KeyJ() { click('.js-prev'); }, // J - go to previous image
|
||||
KeyI() { click('.js-up'); }, // I - go to index page
|
||||
KeyK() { click('.js-next'); }, // K - go to next image
|
||||
KeyR() { click('.js-rand'); }, // R - go to random image
|
||||
KeyS() { click('.js-source-link'); }, // S - go to image source
|
||||
KeyL() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
||||
KeyO() { openFullView(); }, // O - open original
|
||||
KeyV() { openFullViewNewTab(); }, // V - open original in a new tab
|
||||
KeyF() { // F - favourite image
|
||||
'j'() { click('.js-prev'); }, // J - go to previous image
|
||||
'i'() { click('.js-up'); }, // I - go to index page
|
||||
'k'() { click('.js-next'); }, // K - go to next image
|
||||
'r'() { click('.js-rand'); }, // R - go to random image
|
||||
's'() { click('.js-source-link'); }, // S - go to image source
|
||||
'l'() { click('.js-tag-sauce-toggle'); }, // L - edit tags
|
||||
'o'() { openFullView(); }, // O - open original
|
||||
'v'() { openFullViewNewTab(); }, // V - open original in a new tab
|
||||
'f'() { // F - favourite image
|
||||
click(getHover() ? `a.interaction--fave[data-image-id="${getHover()}"]`
|
||||
: '.block__header a.interaction--fave');
|
||||
},
|
||||
KeyU() { // U - upvote image
|
||||
'u'() { // U - upvote image
|
||||
click(getHover() ? `a.interaction--upvote[data-image-id="${getHover()}"]`
|
||||
: '.block__header a.interaction--upvote');
|
||||
},
|
||||
|
@ -64,8 +64,8 @@ const keyCodes: ShortcutKeyMap = {
|
|||
|
||||
export function listenForKeys() {
|
||||
document.addEventListener('keydown', (event: KeyboardEvent) => {
|
||||
if (isOK(event) && keyCodes[event.code]) {
|
||||
keyCodes[event.code]();
|
||||
if (isOK(event) && keyCodes[event.key]) {
|
||||
keyCodes[event.key]();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -38,9 +38,9 @@ export default {
|
|||
},
|
||||
|
||||
// Watch changes to a specified key - returns value on change
|
||||
watch(key: string, callback: (value: unknown) => void) {
|
||||
watch<Value = unknown>(key: string, callback: (value: Value | null) => void) {
|
||||
const handler = (event: StorageEvent) => {
|
||||
if (event.key === key) callback(this.get(key));
|
||||
if (event.key === key) callback(this.get<Value>(key));
|
||||
};
|
||||
window.addEventListener('storage', handler);
|
||||
return () => window.removeEventListener('storage', handler);
|
||||
|
|
|
@ -16,9 +16,6 @@ config :logger,
|
|||
config :philomena,
|
||||
ecto_repos: [Philomena.Repo]
|
||||
|
||||
config :elastix,
|
||||
json_codec: Jason
|
||||
|
||||
config :exq,
|
||||
max_retries: 5,
|
||||
scheduler_enable: true,
|
||||
|
@ -37,6 +34,9 @@ config :philomena, PhilomenaWeb.Endpoint,
|
|||
render_errors: [view: PhilomenaWeb.ErrorView, accepts: ~w(html json)],
|
||||
pubsub_server: Philomena.PubSub
|
||||
|
||||
# Configure only SMTP for mailing, not HTTP
|
||||
config :swoosh, :api_client, false
|
||||
|
||||
# Markdown
|
||||
config :philomena, Philomena.Native,
|
||||
crate: "philomena",
|
||||
|
|
|
@ -87,10 +87,6 @@ config :philomena, :s3_secondary_options,
|
|||
|
||||
config :philomena, :s3_secondary_bucket, System.get_env("ALT_S3_BUCKET")
|
||||
|
||||
# Don't bail on OpenSearch's self-signed certificate
|
||||
config :elastix,
|
||||
httpoison_options: [ssl: [verify: :verify_none]]
|
||||
|
||||
config :ex_aws, http_client: PhilomenaMedia.Req
|
||||
|
||||
config :ex_aws, :retries,
|
||||
|
|
|
@ -68,7 +68,7 @@ services:
|
|||
driver: "none"
|
||||
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.14.0
|
||||
image: opensearchproject/opensearch:2.15.0
|
||||
volumes:
|
||||
- opensearch_data:/usr/share/opensearch/data
|
||||
- ./docker/opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml
|
||||
|
|
|
@ -4,7 +4,7 @@ ADD https://api.github.com/repos/philomena-dev/FFmpeg/git/refs/heads/release/6.1
|
|||
RUN (echo "https://github.com/philomena-dev/prebuilt-ffmpeg/raw/master"; cat /etc/apk/repositories) > /tmp/repositories \
|
||||
&& cp /tmp/repositories /etc/apk/repositories \
|
||||
&& apk update --allow-untrusted \
|
||||
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql16-client wget rust cargo --allow-untrusted \
|
||||
&& apk add inotify-tools build-base git ffmpeg ffmpeg-dev npm nodejs file-dev libjpeg-turbo-dev libpng-dev gifsicle optipng libjpeg-turbo-utils librsvg rsvg-convert imagemagick postgresql16-client wget rust cargo --allow-untrusted \
|
||||
&& mix local.hex --force \
|
||||
&& mix local.rebar --force
|
||||
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=comments.jsonl --output=http://localhost:9200/ --output-index=comments --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=comments.jsonl --output=$OPENSEARCH_URL --output-index=comments --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata authors tags
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_comments.jsonb_object_agg(object) from temp_comments.comment_search_json group by comment_id) to stdout;' > comments.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_comments cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_comments.jsonb_object_agg(object) from temp_comments.comment_search_json group by comment_id) to stdout;' > comments.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_comments cascade;'
|
||||
sed -i comments.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: comment_search_json
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
ELASTICSEARCH_URL ?= http://localhost:9200/
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
# uncomment if getting "redirection unexpected" error on dump_jsonl
|
||||
#SHELL=/bin/bash
|
||||
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=filters.jsonl --output=$(ELASTICSEARCH_URL) --output-index=filters --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=filters.jsonl --output=$OPENSEARCH_URL --output-index=filters --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata creators
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_filters.jsonb_object_agg(object) from temp_filters.filter_search_json group by filter_id) to stdout;' > filters.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_filters cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_filters.jsonb_object_agg(object) from temp_filters.filter_search_json group by filter_id) to stdout;' > filters.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_filters cascade;'
|
||||
sed -i filters.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: filter_search_json
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=galleries.jsonl --output=http://localhost:9200/ --output-index=galleries --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=galleries.jsonl --output=$OPENSEARCH_URL --output-index=galleries --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata subscribers images
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_galleries.jsonb_object_agg(object) from temp_galleries.gallery_search_json group by gallery_id) to stdout;' > galleries.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_galleries cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_galleries.jsonb_object_agg(object) from temp_galleries.gallery_search_json group by gallery_id) to stdout;' > galleries.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_galleries cascade;'
|
||||
sed -i galleries.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: gallery_search_json
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=images.jsonl --output=http://localhost:9200/ --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=images.jsonl --output=$OPENSEARCH_URL --output-index=images --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata true_uploaders uploaders deleters galleries tags sources hides upvotes downvotes faves tag_names
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_images cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_images.jsonb_object_agg(object) from temp_images.image_search_json group by image_id) to stdout;' > images.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_images cascade;'
|
||||
sed -i images.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: image_search_json
|
||||
|
@ -84,7 +85,7 @@ tags: image_search_json
|
|||
'body_type_tag_count', count(case when t.category = 'body-type' then t.category else null end),
|
||||
'content_fanmade_tag_count', count(case when t.category = 'content-fanmade' then t.category else null end),
|
||||
'content_official_tag_count', count(case when t.category = 'content-official' then t.category else null end),
|
||||
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end),
|
||||
'spoiler_tag_count', count(case when t.category = 'spoiler' then t.category else null end)
|
||||
) from image_taggings it inner join tags t on t.id = it.tag_id group by image_id;
|
||||
SQL
|
||||
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=posts.jsonl --output=http://localhost:9200/ --output-index=posts --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=posts.jsonl --output=$OPENSEARCH_URL --output-index=posts --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata authors
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_posts.jsonb_object_agg(object) from temp_posts.post_search_json group by post_id) to stdout;' > posts.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_posts cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_posts.jsonb_object_agg(object) from temp_posts.post_search_json group by post_id) to stdout;' > posts.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_posts cascade;'
|
||||
sed -i posts.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: post_search_json
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=reports.jsonl --output=http://localhost:9200/ --output-index=reports --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=reports.jsonl --output=$OPENSEARCH_URL --output-index=reports --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata image_ids comment_image_ids
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_reports.jsonb_object_agg(object) from temp_reports.report_search_json group by report_id) to stdout;' > reports.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_reports cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_reports.jsonb_object_agg(object) from temp_reports.report_search_json group by report_id) to stdout;' > reports.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_reports cascade;'
|
||||
sed -i reports.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: report_search_json
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
DATABASE ?= philomena
|
||||
OPENSEARCH_URL ?= http://localhost:9200/
|
||||
ELASTICDUMP ?= elasticdump
|
||||
.ONESHELL:
|
||||
|
||||
all: import_es
|
||||
|
||||
import_es: dump_jsonl
|
||||
$(ELASTICDUMP) --input=tags.jsonl --output=http://localhost:9200/ --output-index=tags --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
$(ELASTICDUMP) --input=tags.jsonl --output=$OPENSEARCH_URL --output-index=tags --limit 10000 --retryAttempts=5 --type=data --transform="doc._source = Object.assign({},doc); doc._id = doc.id"
|
||||
|
||||
dump_jsonl: metadata aliases implied_tags implied_by_tags
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'copy (select temp_tags.jsonb_object_agg(object) from temp_tags.tag_search_json group by tag_id) to stdout;' > tags.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 <<< 'drop schema temp_tags cascade;'
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'copy (select temp_tags.jsonb_object_agg(object) from temp_tags.tag_search_json group by tag_id) to stdout;' > tags.jsonl
|
||||
psql $(DATABASE) -v ON_ERROR_STOP=1 -c 'drop schema temp_tags cascade;'
|
||||
sed -i tags.jsonl -e 's/\\\\/\\/g'
|
||||
|
||||
metadata: tag_search_json
|
||||
|
|
|
@ -7,53 +7,88 @@ defmodule Philomena.Adverts do
|
|||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Adverts.Advert
|
||||
alias Philomena.Adverts.Restrictions
|
||||
alias Philomena.Adverts.Server
|
||||
alias Philomena.Adverts.Uploader
|
||||
|
||||
@doc """
|
||||
Gets an advert that is currently live.
|
||||
|
||||
Returns the advert, or nil if nothing was live.
|
||||
|
||||
iex> random_live()
|
||||
nil
|
||||
|
||||
iex> random_live()
|
||||
%Advert{}
|
||||
|
||||
"""
|
||||
def random_live do
|
||||
random_live_for_tags([])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets an advert that is currently live, matching any tagging restrictions
|
||||
for the given image.
|
||||
|
||||
Returns the advert, or nil if nothing was live.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> random_live(%Image{})
|
||||
nil
|
||||
|
||||
iex> random_live(%Image{})
|
||||
%Advert{}
|
||||
|
||||
"""
|
||||
def random_live(image) do
|
||||
image
|
||||
|> Repo.preload(:tags)
|
||||
|> Map.get(:tags)
|
||||
|> Enum.map(& &1.name)
|
||||
|> random_live_for_tags()
|
||||
end
|
||||
|
||||
defp random_live_for_tags(tags) do
|
||||
now = DateTime.utc_now()
|
||||
restrictions = Restrictions.tags(tags)
|
||||
|
||||
Advert
|
||||
|> where(live: true, restrictions: "none")
|
||||
|> where([a], a.start_date < ^now and a.finish_date > ^now)
|
||||
|> order_by(asc: fragment("random()"))
|
||||
|> limit(1)
|
||||
|> Repo.one()
|
||||
query =
|
||||
from a in Advert,
|
||||
where: a.live == true,
|
||||
where: a.restrictions in ^restrictions,
|
||||
where: a.start_date < ^now and a.finish_date > ^now,
|
||||
order_by: [asc: fragment("random()")],
|
||||
limit: 1
|
||||
|
||||
Repo.one(query)
|
||||
end
|
||||
|
||||
def random_live_for(image) do
|
||||
image = Repo.preload(image, :tags)
|
||||
now = DateTime.utc_now()
|
||||
@doc """
|
||||
Asynchronously records a new impression.
|
||||
|
||||
Advert
|
||||
|> where(live: true)
|
||||
|> where([a], a.restrictions in ^restrictions(image))
|
||||
|> where([a], a.start_date < ^now and a.finish_date > ^now)
|
||||
|> order_by(asc: fragment("random()"))
|
||||
|> limit(1)
|
||||
|> Repo.one()
|
||||
## Example
|
||||
|
||||
iex> record_impression(%Advert{})
|
||||
:ok
|
||||
|
||||
"""
|
||||
def record_impression(%Advert{id: id}) do
|
||||
Server.record_impression(id)
|
||||
end
|
||||
|
||||
defp sfw?(image) do
|
||||
image_tags = MapSet.new(image.tags |> Enum.map(& &1.name))
|
||||
sfw_tags = MapSet.new(["safe", "suggestive"])
|
||||
intersect = MapSet.intersection(image_tags, sfw_tags)
|
||||
@doc """
|
||||
Asynchronously records a new click.
|
||||
|
||||
MapSet.size(intersect) > 0
|
||||
end
|
||||
## Example
|
||||
|
||||
defp nsfw?(image) do
|
||||
image_tags = MapSet.new(image.tags |> Enum.map(& &1.name))
|
||||
nsfw_tags = MapSet.new(["questionable", "explicit"])
|
||||
intersect = MapSet.intersection(image_tags, nsfw_tags)
|
||||
iex> record_click(%Advert{})
|
||||
:ok
|
||||
|
||||
MapSet.size(intersect) > 0
|
||||
end
|
||||
|
||||
defp restrictions(image) do
|
||||
restrictions = ["none"]
|
||||
restrictions = if nsfw?(image), do: ["nsfw" | restrictions], else: restrictions
|
||||
restrictions = if sfw?(image), do: ["sfw" | restrictions], else: restrictions
|
||||
restrictions
|
||||
"""
|
||||
def record_click(%Advert{id: id}) do
|
||||
Server.record_click(id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -102,7 +137,7 @@ defmodule Philomena.Adverts do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Updates an advert.
|
||||
Updates an Advert without updating its image.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -119,6 +154,18 @@ defmodule Philomena.Adverts do
|
|||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the image for an Advert.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_advert_image(advert, %{image: new_value})
|
||||
{:ok, %Advert{}}
|
||||
|
||||
iex> update_advert_image(advert, %{image: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_advert_image(%Advert{} = advert, attrs) do
|
||||
advert
|
||||
|> Advert.changeset(attrs)
|
||||
|
|
|
@ -1,33 +1,9 @@
|
|||
defmodule PhilomenaWeb.AdvertUpdater do
|
||||
defmodule Philomena.Adverts.Recorder do
|
||||
alias Philomena.Adverts.Advert
|
||||
alias Philomena.Repo
|
||||
import Ecto.Query
|
||||
|
||||
def child_spec([]) do
|
||||
%{
|
||||
id: PhilomenaWeb.AdvertUpdater,
|
||||
start: {PhilomenaWeb.AdvertUpdater, :start_link, [[]]}
|
||||
}
|
||||
end
|
||||
|
||||
def start_link([]) do
|
||||
{:ok, spawn_link(&init/0)}
|
||||
end
|
||||
|
||||
def cast(type, advert_id) when type in [:impression, :click] do
|
||||
pid = Process.whereis(:advert_updater)
|
||||
if pid, do: send(pid, {type, advert_id})
|
||||
end
|
||||
|
||||
defp init do
|
||||
Process.register(self(), :advert_updater)
|
||||
run()
|
||||
end
|
||||
|
||||
defp run do
|
||||
# Read impression counts from mailbox
|
||||
{impressions, clicks} = receive_all()
|
||||
|
||||
def run(%{impressions: impressions, clicks: clicks}) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
# Create insert statements for Ecto
|
||||
|
@ -41,24 +17,7 @@ defmodule PhilomenaWeb.AdvertUpdater do
|
|||
Repo.insert_all(Advert, impressions, on_conflict: impressions_update, conflict_target: [:id])
|
||||
Repo.insert_all(Advert, clicks, on_conflict: clicks_update, conflict_target: [:id])
|
||||
|
||||
:timer.sleep(:timer.seconds(10))
|
||||
|
||||
run()
|
||||
end
|
||||
|
||||
defp receive_all(impressions \\ %{}, clicks \\ %{}) do
|
||||
receive do
|
||||
{:impression, advert_id} ->
|
||||
impressions = Map.update(impressions, advert_id, 1, &(&1 + 1))
|
||||
receive_all(impressions, clicks)
|
||||
|
||||
{:click, advert_id} ->
|
||||
clicks = Map.update(clicks, advert_id, 1, &(&1 + 1))
|
||||
receive_all(impressions, clicks)
|
||||
after
|
||||
0 ->
|
||||
{impressions, clicks}
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
defp impressions_insert_all({advert_id, impressions}, now) do
|
47
lib/philomena/adverts/restrictions.ex
Normal file
47
lib/philomena/adverts/restrictions.ex
Normal file
|
@ -0,0 +1,47 @@
|
|||
defmodule Philomena.Adverts.Restrictions do
|
||||
@moduledoc """
|
||||
Advert restriction application.
|
||||
"""
|
||||
|
||||
@type restriction :: String.t()
|
||||
@type restriction_list :: [restriction()]
|
||||
@type tag_list :: [String.t()]
|
||||
|
||||
@nsfw_tags MapSet.new(["questionable", "explicit"])
|
||||
@sfw_tags MapSet.new(["safe", "suggestive"])
|
||||
|
||||
@doc """
|
||||
Calculates the restrictions available to a given tag list.
|
||||
|
||||
Returns a list containing `"none"`, and neither or one of `"sfw"`, `"nsfw"`.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> tags([])
|
||||
["none"]
|
||||
|
||||
iex> tags(["safe"])
|
||||
["sfw", "none"]
|
||||
|
||||
iex> tags(["explicit"])
|
||||
["nsfw", "none"]
|
||||
|
||||
"""
|
||||
@spec tags(tag_list()) :: restriction_list()
|
||||
def tags(tags) do
|
||||
tags = MapSet.new(tags)
|
||||
|
||||
["none"]
|
||||
|> apply_if(tags, @nsfw_tags, "nsfw")
|
||||
|> apply_if(tags, @sfw_tags, "sfw")
|
||||
end
|
||||
|
||||
@spec apply_if(restriction_list(), MapSet.t(), MapSet.t(), restriction()) :: restriction_list()
|
||||
defp apply_if(restrictions, tags, test, new_restriction) do
|
||||
if MapSet.disjoint?(tags, test) do
|
||||
restrictions
|
||||
else
|
||||
[new_restriction | restrictions]
|
||||
end
|
||||
end
|
||||
end
|
94
lib/philomena/adverts/server.ex
Normal file
94
lib/philomena/adverts/server.ex
Normal file
|
@ -0,0 +1,94 @@
|
|||
defmodule Philomena.Adverts.Server do
|
||||
@moduledoc """
|
||||
Advert impression and click aggregator.
|
||||
|
||||
Updating the impression count for adverts and clicks on every pageload is unnecessary
|
||||
and slows down requests. This module collects the adverts and clicks and submits a batch
|
||||
of updates to the database after every 10 seconds asynchronously, reducing the amount of
|
||||
work to be done.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
alias Philomena.Adverts.Recorder
|
||||
|
||||
@type advert_id :: integer()
|
||||
|
||||
@doc """
|
||||
Starts the GenServer.
|
||||
|
||||
See `GenServer.start_link/2` for more information.
|
||||
"""
|
||||
def start_link(_) do
|
||||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Asynchronously records a new impression.
|
||||
|
||||
## Example
|
||||
|
||||
iex> record_impression(advert.id)
|
||||
:ok
|
||||
|
||||
"""
|
||||
@spec record_impression(advert_id()) :: :ok
|
||||
def record_impression(advert_id) do
|
||||
GenServer.cast(__MODULE__, {:impressions, advert_id})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Asynchronously records a new click.
|
||||
|
||||
## Example
|
||||
|
||||
iex> record_click(advert.id)
|
||||
:ok
|
||||
|
||||
"""
|
||||
@spec record_click(advert_id()) :: :ok
|
||||
def record_click(advert_id) do
|
||||
GenServer.cast(__MODULE__, {:clicks, advert_id})
|
||||
end
|
||||
|
||||
# Used to force the GenServer to immediately sleep when no
|
||||
# messages are available.
|
||||
@timeout 0
|
||||
@sleep :timer.seconds(10)
|
||||
|
||||
@impl true
|
||||
@doc false
|
||||
def init(_) do
|
||||
{:ok, initial_state(), @timeout}
|
||||
end
|
||||
|
||||
@impl true
|
||||
@doc false
|
||||
def handle_cast({type, advert_id}, state) do
|
||||
# Update the counter described by the message
|
||||
state = update_in(state[type], &increment_counter(&1, advert_id))
|
||||
|
||||
# Return to GenServer event loop
|
||||
{:noreply, state, @timeout}
|
||||
end
|
||||
|
||||
@impl true
|
||||
@doc false
|
||||
def handle_info(:timeout, state) do
|
||||
# Process all updates from state now
|
||||
Recorder.run(state)
|
||||
|
||||
# Sleep for the specified delay
|
||||
:timer.sleep(@sleep)
|
||||
|
||||
# Return to GenServer event loop
|
||||
{:noreply, initial_state(), @timeout}
|
||||
end
|
||||
|
||||
defp increment_counter(map, advert_id) do
|
||||
Map.update(map, advert_id, 1, &(&1 + 1))
|
||||
end
|
||||
|
||||
defp initial_state do
|
||||
%{impressions: %{}, clicks: %{}}
|
||||
end
|
||||
end
|
|
@ -28,8 +28,10 @@ defmodule Philomena.Application do
|
|||
node_name: valid_node_name(node())
|
||||
]},
|
||||
|
||||
# Advert update batching
|
||||
Philomena.Adverts.Server,
|
||||
|
||||
# Start the endpoint when the application starts
|
||||
PhilomenaWeb.AdvertUpdater,
|
||||
PhilomenaWeb.UserFingerprintUpdater,
|
||||
PhilomenaWeb.UserIpUpdater,
|
||||
PhilomenaWeb.Endpoint
|
||||
|
|
|
@ -9,39 +9,19 @@ defmodule Philomena.ArtistLinks do
|
|||
|
||||
alias Philomena.ArtistLinks.ArtistLink
|
||||
alias Philomena.ArtistLinks.AutomaticVerifier
|
||||
alias Philomena.Badges.Badge
|
||||
alias Philomena.Badges.Award
|
||||
alias Philomena.Tags.Tag
|
||||
alias Philomena.ArtistLinks.BadgeAwarder
|
||||
alias Philomena.Tags
|
||||
|
||||
@doc """
|
||||
Check links pending verification to see if the user placed
|
||||
the appropriate code on the page.
|
||||
Updates all artist links pending verification, by transitioning to link verified state
|
||||
or resetting next update time.
|
||||
"""
|
||||
def automatic_verify! do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
# Automatically retry in an hour if we don't manage to
|
||||
# successfully verify any given link
|
||||
recheck_time = DateTime.add(now, 3600, :second)
|
||||
|
||||
recheck_query =
|
||||
from ul in ArtistLink,
|
||||
where: ul.aasm_state == "unverified",
|
||||
where: ul.next_check_at < ^now
|
||||
|
||||
recheck_query
|
||||
|> Repo.all()
|
||||
|> Enum.map(fn link ->
|
||||
ArtistLink.automatic_verify_changeset(
|
||||
link,
|
||||
AutomaticVerifier.check_link(link, recheck_time)
|
||||
)
|
||||
end)
|
||||
|> Enum.map(&Repo.update!/1)
|
||||
Enum.each(AutomaticVerifier.generate_updates(), &Repo.update!/1)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single artist_link.
|
||||
Gets a single artist link.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Artist link does not exist.
|
||||
|
||||
|
@ -57,7 +37,7 @@ defmodule Philomena.ArtistLinks do
|
|||
def get_artist_link!(id), do: Repo.get!(ArtistLink, id)
|
||||
|
||||
@doc """
|
||||
Creates a artist_link.
|
||||
Creates an artist link.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -69,7 +49,7 @@ defmodule Philomena.ArtistLinks do
|
|||
|
||||
"""
|
||||
def create_artist_link(user, attrs \\ %{}) do
|
||||
tag = fetch_tag(attrs["tag_name"])
|
||||
tag = Tags.get_tag_or_alias_by_name(attrs["tag_name"])
|
||||
|
||||
%ArtistLink{}
|
||||
|> ArtistLink.creation_changeset(attrs, user, tag)
|
||||
|
@ -77,7 +57,7 @@ defmodule Philomena.ArtistLinks do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Updates a artist_link.
|
||||
Updates an artist link.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -89,47 +69,71 @@ defmodule Philomena.ArtistLinks do
|
|||
|
||||
"""
|
||||
def update_artist_link(%ArtistLink{} = artist_link, attrs) do
|
||||
tag = fetch_tag(attrs["tag_name"])
|
||||
tag = Tags.get_tag_or_alias_by_name(attrs["tag_name"])
|
||||
|
||||
artist_link
|
||||
|> ArtistLink.edit_changeset(attrs, tag)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def verify_artist_link(%ArtistLink{} = artist_link, user) do
|
||||
artist_link_changeset =
|
||||
artist_link
|
||||
|> ArtistLink.verify_changeset(user)
|
||||
@doc """
|
||||
Transitions an artist link to the verified state.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> verify_artist_link(artist_link, verifying_user)
|
||||
{:ok, %ArtistLink{}}
|
||||
|
||||
iex> verify_artist_link(artist_link, verifying_user)
|
||||
:error
|
||||
|
||||
"""
|
||||
def verify_artist_link(%ArtistLink{} = artist_link, verifying_user) do
|
||||
artist_link_changeset = ArtistLink.verify_changeset(artist_link, verifying_user)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update(:artist_link, artist_link_changeset)
|
||||
|> Multi.run(:add_award, fn repo, _changes ->
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
with badge when not is_nil(badge) <- repo.get_by(limit(Badge, 1), title: "Artist"),
|
||||
nil <- repo.get_by(limit(Award, 1), badge_id: badge.id, user_id: artist_link.user_id) do
|
||||
%Award{
|
||||
badge_id: badge.id,
|
||||
user_id: artist_link.user_id,
|
||||
awarded_by_id: user.id,
|
||||
awarded_on: now
|
||||
}
|
||||
|> Award.changeset(%{})
|
||||
|> repo.insert()
|
||||
else
|
||||
_ ->
|
||||
{:ok, nil}
|
||||
end
|
||||
end)
|
||||
|> Multi.run(:add_award, fn _repo, _changes -> BadgeAwarder.award_badge(artist_link) end)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{artist_link: artist_link}} ->
|
||||
{:ok, artist_link}
|
||||
|
||||
{:error, _operation, _value, _changes} ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Transitions an artist link to the rejected state.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> reject_artist_link(artist_link)
|
||||
{:ok, %ArtistLink{}}
|
||||
|
||||
iex> reject_artist_link(artist_link)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def reject_artist_link(%ArtistLink{} = artist_link) do
|
||||
artist_link
|
||||
|> ArtistLink.reject_changeset()
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Transitions an artist link to the contacted state.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> contact_artist_link(artist_link)
|
||||
{:ok, %ArtistLink{}}
|
||||
|
||||
iex> contact_artist_link(artist_link)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def contact_artist_link(%ArtistLink{} = artist_link, user) do
|
||||
artist_link
|
||||
|> ArtistLink.contact_changeset(user)
|
||||
|
@ -137,7 +141,7 @@ defmodule Philomena.ArtistLinks do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Deletes a ArtistLink.
|
||||
Deletes an artist link.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -153,7 +157,7 @@ defmodule Philomena.ArtistLinks do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking artist_link changes.
|
||||
Returns an `%Ecto.Changeset{}` for tracking artist link changes.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -165,24 +169,26 @@ defmodule Philomena.ArtistLinks do
|
|||
ArtistLink.changeset(artist_link, %{})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Counts the number of artist links which are pending moderation action, or
|
||||
nil if the user is not permitted to moderate artist links.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> count_artist_links(normal_user)
|
||||
nil
|
||||
|
||||
iex> count_artist_links(admin_user)
|
||||
0
|
||||
|
||||
"""
|
||||
def count_artist_links(user) do
|
||||
if Canada.Can.can?(user, :index, %ArtistLink{}) do
|
||||
ArtistLink
|
||||
|> where([ul], ul.aasm_state in ^["unverified", "link_verified"])
|
||||
|> Repo.aggregate(:count, :id)
|
||||
|> Repo.aggregate(:count)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_tag(name) do
|
||||
Tag
|
||||
|> preload(:aliased_tag)
|
||||
|> where(name: ^name)
|
||||
|> Repo.one()
|
||||
|> case do
|
||||
nil -> nil
|
||||
tag -> tag.aliased_tag || tag
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,47 @@
|
|||
defmodule Philomena.ArtistLinks.AutomaticVerifier do
|
||||
def check_link(artist_link, recheck_time) do
|
||||
@moduledoc """
|
||||
Artist link automatic verification.
|
||||
|
||||
Artist links contain a random code which is generated when the link is created. If the user
|
||||
places the code on their linked page and this verifier finds it, this expedites the process
|
||||
of verifying a link for the moderator, as they can simply use the presence of the code in a
|
||||
field controlled by the artist to ascertain the validity of the artist link.
|
||||
"""
|
||||
|
||||
alias Philomena.ArtistLinks.ArtistLink
|
||||
alias Philomena.Repo
|
||||
import Ecto.Query
|
||||
|
||||
@doc """
|
||||
Check links pending verification to see if the user placed the appropriate code on the page.
|
||||
|
||||
Polls each artist link in unverified state and generates a changeset to either set it to
|
||||
link verified, if the code was found on the page, or reset the next check time, if the code
|
||||
was not found.
|
||||
|
||||
Returns a list of changesets with updated links.
|
||||
"""
|
||||
def generate_updates do
|
||||
# Automatically retry in an hour if we don't manage to
|
||||
# successfully verify any given link
|
||||
now = DateTime.utc_now(:second)
|
||||
recheck_time = DateTime.add(now, 3600, :second)
|
||||
|
||||
Enum.map(links_to_check(now), fn link ->
|
||||
ArtistLink.automatic_verify_changeset(link, check_link(link, recheck_time))
|
||||
end)
|
||||
end
|
||||
|
||||
defp links_to_check(now) do
|
||||
recheck_query =
|
||||
from ul in ArtistLink,
|
||||
where: ul.aasm_state == "unverified",
|
||||
where: ul.next_check_at < ^now
|
||||
|
||||
Repo.all(recheck_query)
|
||||
end
|
||||
|
||||
defp check_link(artist_link, recheck_time) do
|
||||
artist_link.uri
|
||||
|> PhilomenaProxy.Http.get()
|
||||
|> contains_verification_code?(artist_link.verification_code)
|
||||
|
|
28
lib/philomena/artist_links/badge_awarder.ex
Normal file
28
lib/philomena/artist_links/badge_awarder.ex
Normal file
|
@ -0,0 +1,28 @@
|
|||
defmodule Philomena.ArtistLinks.BadgeAwarder do
|
||||
@moduledoc """
|
||||
Handles awarding a badge to the user of an associated artist link.
|
||||
"""
|
||||
|
||||
alias Philomena.Badges
|
||||
|
||||
@badge_title "Artist"
|
||||
|
||||
@doc """
|
||||
Awards a badge to an artist with a verified link.
|
||||
|
||||
If the badge with the title `"Artist"` does not exist, no award will be created.
|
||||
If the user already has an award with that badge title, no award will be created.
|
||||
|
||||
Returns `{:ok, award}`, `{:ok, nil}`, or `{:error, changeset}`. The return value is
|
||||
suitable for use as the return value to an `Ecto.Multi.run/3` callback.
|
||||
"""
|
||||
def award_badge(artist_link) do
|
||||
with badge when not is_nil(badge) <- Badges.get_badge_by_title(@badge_title),
|
||||
award when is_nil(award) <- Badges.get_badge_award_for(badge, artist_link.user) do
|
||||
Badges.create_badge_award(artist_link.user, artist_link.user, %{badge_id: badge.id})
|
||||
else
|
||||
_ ->
|
||||
{:ok, nil}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,19 +1,32 @@
|
|||
defmodule Philomena.Autocomplete do
|
||||
@moduledoc """
|
||||
Pregenerated autocomplete files.
|
||||
|
||||
These are used to eliminate the latency of looking up search results on the server.
|
||||
A script can parse the binary and generate results directly as the user types, without
|
||||
incurring any roundtrip penalty.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Tags.Tag
|
||||
alias Philomena.Images.Tagging
|
||||
alias Philomena.Autocomplete.Autocomplete
|
||||
alias Philomena.Autocomplete.Generator
|
||||
|
||||
@type tags_list() :: [{String.t(), number(), number(), String.t() | nil}]
|
||||
@type assoc_map() :: %{String.t() => [number()]}
|
||||
@doc """
|
||||
Gets the current local autocompletion binary.
|
||||
|
||||
@spec get_autocomplete() :: Autocomplete.t() | nil
|
||||
Returns nil if the binary is not currently generated.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_autocomplete()
|
||||
nil
|
||||
|
||||
iex> get_autocomplete()
|
||||
%Autocomplete{}
|
||||
|
||||
"""
|
||||
def get_autocomplete do
|
||||
Autocomplete
|
||||
|> order_by(desc: :created_at)
|
||||
|
@ -21,103 +34,11 @@ defmodule Philomena.Autocomplete do
|
|||
|> Repo.one()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a new local autocompletion binary, replacing any which currently exist.
|
||||
"""
|
||||
def generate_autocomplete! do
|
||||
tags = get_tags()
|
||||
associations = get_associations(tags)
|
||||
|
||||
# Tags are already sorted, so just add them to the file directly
|
||||
#
|
||||
# struct tag {
|
||||
# uint8_t key_length;
|
||||
# uint8_t key[];
|
||||
# uint8_t association_length;
|
||||
# uint32_t associations[];
|
||||
# };
|
||||
#
|
||||
|
||||
{ac_file, name_locations} =
|
||||
Enum.reduce(tags, {<<>>, %{}}, fn {name, _, _, _}, {file, name_locations} ->
|
||||
pos = byte_size(file)
|
||||
assn = Map.get(associations, name, [])
|
||||
assn_bin = for id <- assn, into: <<>>, do: <<id::32-little>>
|
||||
|
||||
{
|
||||
<<file::binary, byte_size(name)::8, name::binary, length(assn)::8, assn_bin::binary>>,
|
||||
Map.put(name_locations, name, pos)
|
||||
}
|
||||
end)
|
||||
|
||||
# Link reference list; self-referential, so must be preprocessed to deal with aliases
|
||||
#
|
||||
# struct tag_reference {
|
||||
# uint32_t tag_location;
|
||||
# uint8_t is_aliased : 1;
|
||||
# union {
|
||||
# uint32_t num_uses : 31;
|
||||
# uint32_t alias_index : 31;
|
||||
# };
|
||||
# };
|
||||
#
|
||||
|
||||
ac_file = int32_align(ac_file)
|
||||
reference_start = byte_size(ac_file)
|
||||
|
||||
reference_indexes =
|
||||
tags
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {{name, _, _, _}, index} -> {name, index} end)
|
||||
|> Map.new()
|
||||
|
||||
references =
|
||||
Enum.reduce(tags, <<>>, fn {name, images_count, _, alias_target}, references ->
|
||||
pos = Map.fetch!(name_locations, name)
|
||||
|
||||
if not is_nil(alias_target) do
|
||||
target = Map.fetch!(reference_indexes, alias_target)
|
||||
|
||||
<<references::binary, pos::32-little, -(target + 1)::32-little>>
|
||||
else
|
||||
<<references::binary, pos::32-little, images_count::32-little>>
|
||||
end
|
||||
end)
|
||||
|
||||
# Reorder tags by name in their namespace to provide a secondary ordering
|
||||
#
|
||||
# struct secondary_reference {
|
||||
# uint32_t primary_location;
|
||||
# };
|
||||
#
|
||||
|
||||
secondary_references =
|
||||
tags
|
||||
|> Enum.map(&{name_in_namespace(elem(&1, 0)), elem(&1, 0)})
|
||||
|> Enum.sort()
|
||||
|> Enum.reduce(<<>>, fn {_k, v}, secondary_references ->
|
||||
target = Map.fetch!(reference_indexes, v)
|
||||
|
||||
<<secondary_references::binary, target::32-little>>
|
||||
end)
|
||||
|
||||
# Finally add the reference start and number of tags in the footer
|
||||
#
|
||||
# struct autocomplete_file {
|
||||
# struct tag tags[];
|
||||
# struct tag_reference primary_references[];
|
||||
# struct secondary_reference secondary_references[];
|
||||
# uint32_t format_version;
|
||||
# uint32_t reference_start;
|
||||
# uint32_t num_tags;
|
||||
# };
|
||||
#
|
||||
|
||||
ac_file = <<
|
||||
ac_file::binary,
|
||||
references::binary,
|
||||
secondary_references::binary,
|
||||
2::32-little,
|
||||
reference_start::32-little,
|
||||
length(tags)::32-little
|
||||
>>
|
||||
ac_file = Generator.generate()
|
||||
|
||||
# Insert the autocomplete binary
|
||||
new_ac =
|
||||
|
@ -130,93 +51,4 @@ defmodule Philomena.Autocomplete do
|
|||
|> where([ac], ac.created_at < ^new_ac.created_at)
|
||||
|> Repo.delete_all()
|
||||
end
|
||||
|
||||
#
|
||||
# Get the names of tags and their number of uses as a map.
|
||||
# Sort is done in the application to avoid collation.
|
||||
#
|
||||
@spec get_tags() :: tags_list()
|
||||
defp get_tags do
|
||||
top_tags =
|
||||
Tag
|
||||
|> select([t], {t.name, t.images_count, t.id, nil})
|
||||
|> where([t], t.images_count > 0)
|
||||
|> order_by(desc: :images_count)
|
||||
|> limit(50_000)
|
||||
|> Repo.all()
|
||||
|
||||
aliases_of_top_tags =
|
||||
Tag
|
||||
|> where([t], t.aliased_tag_id in ^Enum.map(top_tags, fn {_, _, id, _} -> id end))
|
||||
|> join(:inner, [t], _ in assoc(t, :aliased_tag))
|
||||
|> select([t, a], {t.name, 0, 0, a.name})
|
||||
|> Repo.all()
|
||||
|
||||
(aliases_of_top_tags ++ top_tags)
|
||||
|> Enum.filter(fn {name, _, _, _} -> byte_size(name) < 255 end)
|
||||
|> Enum.sort()
|
||||
end
|
||||
|
||||
#
|
||||
# Get up to eight associated tag ids for each returned tag.
|
||||
#
|
||||
@spec get_associations(tags_list()) :: assoc_map()
|
||||
defp get_associations(tags) do
|
||||
tags
|
||||
|> Enum.filter(fn {_, _, _, aliased} -> is_nil(aliased) end)
|
||||
|> Enum.map(fn {name, images_count, id, _} ->
|
||||
# Randomly sample 100 images with this tag
|
||||
image_sample =
|
||||
Tagging
|
||||
|> where(tag_id: ^id)
|
||||
|> select([it], it.image_id)
|
||||
|> order_by(asc: fragment("random()"))
|
||||
|> limit(100)
|
||||
|
||||
# Select the tags from those images which have more uses than
|
||||
# the current one being considered, and overlap more than 50%
|
||||
assoc_ids =
|
||||
Tagging
|
||||
|> join(:inner, [it], _ in assoc(it, :tag))
|
||||
|> where([_, t], t.images_count > ^images_count)
|
||||
|> where([it, _], it.image_id in subquery(image_sample))
|
||||
|> group_by([_, t], t.id)
|
||||
|> order_by(desc: fragment("count(*)"))
|
||||
|> having([_, t], fragment("(100 * count(*)::float / LEAST(?, 100)) > 50", ^images_count))
|
||||
|> select([_, t], t.id)
|
||||
|> limit(8)
|
||||
|> Repo.all(timeout: 120_000)
|
||||
|
||||
{name, assoc_ids}
|
||||
end)
|
||||
|> Map.new()
|
||||
end
|
||||
|
||||
#
|
||||
# Right-pad a binary to be a multiple of 4 bytes.
|
||||
#
|
||||
@spec int32_align(binary()) :: binary()
|
||||
defp int32_align(bin) do
|
||||
pad_bits = 8 * (4 - rem(byte_size(bin), 4))
|
||||
|
||||
<<bin::binary, 0::size(pad_bits)>>
|
||||
end
|
||||
|
||||
#
|
||||
# Remove the artist:, oc: etc. prefix from a tag name,
|
||||
# if one is present.
|
||||
#
|
||||
@spec name_in_namespace(String.t()) :: String.t()
|
||||
defp name_in_namespace(s) do
|
||||
case String.split(s, ":", parts: 2, trim: true) do
|
||||
[_namespace, name] ->
|
||||
name
|
||||
|
||||
[name] ->
|
||||
name
|
||||
|
||||
_unknown ->
|
||||
s
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
147
lib/philomena/autocomplete/generator.ex
Normal file
147
lib/philomena/autocomplete/generator.ex
Normal file
|
@ -0,0 +1,147 @@
|
|||
defmodule Philomena.Autocomplete.Generator do
|
||||
@moduledoc """
|
||||
Compiled autocomplete binary for frontend usage.
|
||||
|
||||
See assets/js/utils/local-autocompleter.ts for how this should be used.
|
||||
The file follows the following binary format:
|
||||
|
||||
struct tag {
|
||||
uint8_t key_length;
|
||||
uint8_t key[];
|
||||
uint8_t association_length;
|
||||
uint32_t associations[];
|
||||
};
|
||||
|
||||
struct tag_reference {
|
||||
uint32_t tag_location;
|
||||
union {
|
||||
int32_t raw;
|
||||
uint32_t num_uses; ///< when positive
|
||||
uint32_t alias_index; ///< when negative, -alias_index - 1
|
||||
};
|
||||
};
|
||||
|
||||
struct secondary_reference {
|
||||
uint32_t primary_location;
|
||||
};
|
||||
|
||||
struct autocomplete_file {
|
||||
struct tag tags[];
|
||||
struct tag_reference primary_references[];
|
||||
struct secondary_reference secondary_references[];
|
||||
uint32_t format_version;
|
||||
uint32_t reference_start;
|
||||
uint32_t num_tags;
|
||||
};
|
||||
|
||||
"""
|
||||
|
||||
alias Philomena.Tags.LocalAutocomplete
|
||||
|
||||
@format_version 2
|
||||
@top_tags 50_000
|
||||
@max_associations 8
|
||||
|
||||
@doc """
|
||||
Create the compiled autocomplete binary.
|
||||
|
||||
See module documentation for the format. This is not expected to be larger
|
||||
than a few megabytes on average.
|
||||
"""
|
||||
@spec generate() :: binary()
|
||||
def generate do
|
||||
{tags, associations} = tags_and_associations()
|
||||
|
||||
# Tags are already sorted, so just add them to the file directly
|
||||
{tag_block, name_locations} =
|
||||
Enum.reduce(tags, {<<>>, %{}}, fn %{name: name}, {data, name_locations} ->
|
||||
pos = byte_size(data)
|
||||
assn = Map.get(associations, name, [])
|
||||
assn_bin = for id <- assn, into: <<>>, do: <<id::32-little>>
|
||||
|
||||
{
|
||||
<<data::binary, byte_size(name)::8, name::binary, length(assn)::8, assn_bin::binary>>,
|
||||
Map.put(name_locations, name, pos)
|
||||
}
|
||||
end)
|
||||
|
||||
# Link reference list; self-referential, so must be preprocessed to deal with aliases
|
||||
tag_block = int32_align(tag_block)
|
||||
reference_start = byte_size(tag_block)
|
||||
|
||||
reference_indexes =
|
||||
tags
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {entry, index} -> {entry.name, index} end)
|
||||
|> Map.new()
|
||||
|
||||
references =
|
||||
Enum.reduce(tags, <<>>, fn entry, references ->
|
||||
pos = Map.fetch!(name_locations, entry.name)
|
||||
|
||||
if not is_nil(entry.alias_name) do
|
||||
target = Map.fetch!(reference_indexes, entry.alias_name)
|
||||
|
||||
<<references::binary, pos::32-little, -(target + 1)::32-little>>
|
||||
else
|
||||
<<references::binary, pos::32-little, entry.images_count::32-little>>
|
||||
end
|
||||
end)
|
||||
|
||||
# Reorder tags by name in their namespace to provide a secondary ordering
|
||||
secondary_references =
|
||||
tags
|
||||
|> Enum.map(&{name_in_namespace(&1.name), &1.name})
|
||||
|> Enum.sort()
|
||||
|> Enum.reduce(<<>>, fn {_k, v}, secondary_references ->
|
||||
target = Map.fetch!(reference_indexes, v)
|
||||
|
||||
<<secondary_references::binary, target::32-little>>
|
||||
end)
|
||||
|
||||
# Finally add the reference start and number of tags in the footer
|
||||
<<
|
||||
tag_block::binary,
|
||||
references::binary,
|
||||
secondary_references::binary,
|
||||
@format_version::32-little,
|
||||
reference_start::32-little,
|
||||
length(tags)::32-little
|
||||
>>
|
||||
end
|
||||
|
||||
defp tags_and_associations do
|
||||
# Names longer than 255 bytes do not fit and will break parsing.
|
||||
# Sort is done in the application to avoid collation.
|
||||
tags =
|
||||
LocalAutocomplete.get_tags(@top_tags)
|
||||
|> Enum.filter(&(byte_size(&1.name) < 255))
|
||||
|> Enum.sort_by(& &1.name)
|
||||
|
||||
associations =
|
||||
LocalAutocomplete.get_associations(tags, @max_associations)
|
||||
|
||||
{tags, associations}
|
||||
end
|
||||
|
||||
defp int32_align(bin) do
|
||||
# Right-pad a binary to be a multiple of 4 bytes.
|
||||
pad_bits = 8 * (4 - rem(byte_size(bin), 4))
|
||||
|
||||
<<bin::binary, 0::size(pad_bits)>>
|
||||
end
|
||||
|
||||
defp name_in_namespace(s) do
|
||||
# Remove the artist:, oc: etc. prefix from a tag name, if one is present.
|
||||
case String.split(s, ":", parts: 2, trim: true) do
|
||||
[_namespace, name] ->
|
||||
name
|
||||
|
||||
[name] ->
|
||||
name
|
||||
|
||||
_unknown ->
|
||||
s
|
||||
end
|
||||
end
|
||||
end
|
|
@ -38,6 +38,22 @@ defmodule Philomena.Badges do
|
|||
"""
|
||||
def get_badge!(id), do: Repo.get!(Badge, id)
|
||||
|
||||
@doc """
|
||||
Gets a single badge by its title.
|
||||
|
||||
Returns nil if the Badge does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_badge_by_title("Artist")
|
||||
%Badge{}
|
||||
|
||||
iex> get_badge_by_title("Nonexistent")
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_badge_by_title(title), do: Repo.get_by(Badge, title: title)
|
||||
|
||||
@doc """
|
||||
Creates a badge.
|
||||
|
||||
|
@ -68,7 +84,7 @@ defmodule Philomena.Badges do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Updates a badge.
|
||||
Updates a badge without updating its image.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -85,6 +101,18 @@ defmodule Philomena.Badges do
|
|||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the image for a badge.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_badge_image(badge, %{image: new_value})
|
||||
{:ok, %Badge{}}
|
||||
|
||||
iex> update_badge_image(badge, %{image: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_badge_image(%Badge{} = badge, attrs) do
|
||||
badge
|
||||
|> Badge.changeset(attrs)
|
||||
|
@ -162,6 +190,24 @@ defmodule Philomena.Badges do
|
|||
"""
|
||||
def get_badge_award!(id), do: Repo.get!(Award, id)
|
||||
|
||||
@doc """
|
||||
Gets a the badge_award with the given badge type belonging to the user.
|
||||
|
||||
Raises nil if the Badge award does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_badge_award_for(badge, user)
|
||||
%Award{}
|
||||
|
||||
iex> get_badge_award_for(badge, user)
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_badge_award_for(badge, user) do
|
||||
Repo.get_by(Award, badge_id: badge.id, user_id: user.id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a badge_award.
|
||||
|
||||
|
|
|
@ -4,13 +4,17 @@ defmodule Philomena.Bans do
|
|||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Ecto.Multi
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.UserIps
|
||||
alias Philomena.Bans.Finder
|
||||
alias Philomena.Bans.Fingerprint
|
||||
alias Philomena.Bans.SubnetCreator
|
||||
alias Philomena.Bans.Subnet
|
||||
alias Philomena.Bans.User
|
||||
|
||||
@doc """
|
||||
Returns the list of fingerprint_bans.
|
||||
Returns the list of fingerprint bans.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -23,9 +27,9 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Gets a single fingerprint.
|
||||
Gets a single fingerprint ban.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Fingerprint does not exist.
|
||||
Raises `Ecto.NoResultsError` if the fingerprint ban does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -39,7 +43,7 @@ defmodule Philomena.Bans do
|
|||
def get_fingerprint!(id), do: Repo.get!(Fingerprint, id)
|
||||
|
||||
@doc """
|
||||
Creates a fingerprint.
|
||||
Creates a fingerprint ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -57,7 +61,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Updates a fingerprint.
|
||||
Updates a fingerprint ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -75,7 +79,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Fingerprint.
|
||||
Deletes a fingerprint ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -91,7 +95,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking fingerprint changes.
|
||||
Returns an `%Ecto.Changeset{}` for tracking fingerprint ban changes.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -103,10 +107,8 @@ defmodule Philomena.Bans do
|
|||
Fingerprint.changeset(fingerprint, %{})
|
||||
end
|
||||
|
||||
alias Philomena.Bans.Subnet
|
||||
|
||||
@doc """
|
||||
Returns the list of subnet_bans.
|
||||
Returns the list of subnet bans.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -119,9 +121,9 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Gets a single subnet.
|
||||
Gets a single subnet ban.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Subnet does not exist.
|
||||
Raises `Ecto.NoResultsError` if the subnet ban does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -135,7 +137,7 @@ defmodule Philomena.Bans do
|
|||
def get_subnet!(id), do: Repo.get!(Subnet, id)
|
||||
|
||||
@doc """
|
||||
Creates a subnet.
|
||||
Creates a subnet ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -153,7 +155,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Updates a subnet.
|
||||
Updates a subnet ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -171,7 +173,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Subnet.
|
||||
Deletes a subnet ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -187,7 +189,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking subnet changes.
|
||||
Returns an `%Ecto.Changeset{}` for tracking subnet ban changes.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -199,10 +201,8 @@ defmodule Philomena.Bans do
|
|||
Subnet.changeset(subnet, %{})
|
||||
end
|
||||
|
||||
alias Philomena.Bans.User
|
||||
|
||||
@doc """
|
||||
Returns the list of user_bans.
|
||||
Returns the list of user bans.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -215,9 +215,9 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Gets a single user.
|
||||
Gets a single user ban.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the User does not exist.
|
||||
Raises `Ecto.NoResultsError` if the user ban does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -231,7 +231,7 @@ defmodule Philomena.Bans do
|
|||
def get_user!(id), do: Repo.get!(User, id)
|
||||
|
||||
@doc """
|
||||
Creates a user.
|
||||
Creates a user ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -243,31 +243,27 @@ defmodule Philomena.Bans do
|
|||
|
||||
"""
|
||||
def create_user(creator, attrs \\ %{}) do
|
||||
%User{banning_user_id: creator.id}
|
||||
|> User.save_changeset(attrs)
|
||||
|> Repo.insert()
|
||||
changeset =
|
||||
%User{banning_user_id: creator.id}
|
||||
|> User.save_changeset(attrs)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.insert(:user_ban, changeset)
|
||||
|> Multi.run(:subnet_ban, fn _repo, %{user_ban: %{user_id: user_id}} ->
|
||||
SubnetCreator.create_for_user(creator, user_id, attrs)
|
||||
end)
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, user_ban} ->
|
||||
ip = UserIps.get_ip_for_user(user_ban.user_id)
|
||||
|
||||
if ip do
|
||||
# Automatically create associated IP ban.
|
||||
ip = UserIps.masked_ip(ip)
|
||||
|
||||
%Subnet{banning_user_id: creator.id, specification: ip}
|
||||
|> Subnet.save_changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
{:ok, %{user_ban: user_ban}} ->
|
||||
{:ok, user_ban}
|
||||
|
||||
error ->
|
||||
error
|
||||
{:error, :user_ban, changeset, _changes} ->
|
||||
{:error, changeset}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a user.
|
||||
Updates a user ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -285,7 +281,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Deletes a User.
|
||||
Deletes a user ban.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -301,7 +297,7 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking user changes.
|
||||
Returns an `%Ecto.Changeset{}` for tracking user ban changes.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -314,88 +310,9 @@ defmodule Philomena.Bans do
|
|||
end
|
||||
|
||||
@doc """
|
||||
Returns the first ban, if any, that matches the specified request
|
||||
attributes.
|
||||
Returns the first ban, if any, that matches the specified request attributes.
|
||||
"""
|
||||
def exists_for?(user, ip, fingerprint) do
|
||||
now = DateTime.utc_now()
|
||||
|
||||
queries =
|
||||
subnet_query(ip, now) ++
|
||||
fingerprint_query(fingerprint, now) ++
|
||||
user_query(user, now)
|
||||
|
||||
bans =
|
||||
queries
|
||||
|> union_all_queries()
|
||||
|> Repo.all()
|
||||
|
||||
# Don't return a ban if the user is currently signed in.
|
||||
case is_nil(user) do
|
||||
true -> Enum.at(bans, 0)
|
||||
false -> user_ban(bans)
|
||||
end
|
||||
end
|
||||
|
||||
defp fingerprint_query(nil, _now), do: []
|
||||
|
||||
defp fingerprint_query(fingerprint, now) do
|
||||
[
|
||||
Fingerprint
|
||||
|> select([f], %{
|
||||
reason: f.reason,
|
||||
valid_until: f.valid_until,
|
||||
generated_ban_id: f.generated_ban_id,
|
||||
type: ^"FingerprintBan"
|
||||
})
|
||||
|> where([f], f.enabled and f.valid_until > ^now)
|
||||
|> where([f], f.fingerprint == ^fingerprint)
|
||||
]
|
||||
end
|
||||
|
||||
defp subnet_query(nil, _now), do: []
|
||||
|
||||
defp subnet_query(ip, now) do
|
||||
{:ok, inet} = EctoNetwork.INET.cast(ip)
|
||||
|
||||
[
|
||||
Subnet
|
||||
|> select([s], %{
|
||||
reason: s.reason,
|
||||
valid_until: s.valid_until,
|
||||
generated_ban_id: s.generated_ban_id,
|
||||
type: ^"SubnetBan"
|
||||
})
|
||||
|> where([s], s.enabled and s.valid_until > ^now)
|
||||
|> where(fragment("specification >>= ?", ^inet))
|
||||
]
|
||||
end
|
||||
|
||||
defp user_query(nil, _now), do: []
|
||||
|
||||
defp user_query(user, now) do
|
||||
[
|
||||
User
|
||||
|> select([u], %{
|
||||
reason: u.reason,
|
||||
valid_until: u.valid_until,
|
||||
generated_ban_id: u.generated_ban_id,
|
||||
type: ^"UserBan"
|
||||
})
|
||||
|> where([u], u.enabled and u.valid_until > ^now)
|
||||
|> where([u], u.user_id == ^user.id)
|
||||
]
|
||||
end
|
||||
|
||||
defp union_all_queries([query]),
|
||||
do: query
|
||||
|
||||
defp union_all_queries([query | rest]),
|
||||
do: query |> union_all(^union_all_queries(rest))
|
||||
|
||||
defp user_ban(bans) do
|
||||
bans
|
||||
|> Enum.filter(&(&1.type == "UserBan"))
|
||||
|> Enum.at(0)
|
||||
def find(user, ip, fingerprint) do
|
||||
Finder.find(user, ip, fingerprint)
|
||||
end
|
||||
end
|
||||
|
|
86
lib/philomena/bans/finder.ex
Normal file
86
lib/philomena/bans/finder.ex
Normal file
|
@ -0,0 +1,86 @@
|
|||
defmodule Philomena.Bans.Finder do
|
||||
@moduledoc """
|
||||
Helper to find a bans associated with a set of request attributes.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Bans.Fingerprint
|
||||
alias Philomena.Bans.Subnet
|
||||
alias Philomena.Bans.User
|
||||
|
||||
@fingerprint "Fingerprint"
|
||||
@subnet "Subnet"
|
||||
@user "User"
|
||||
|
||||
@doc """
|
||||
Returns the first ban, if any, that matches the specified request attributes.
|
||||
"""
|
||||
def find(user, ip, fingerprint) do
|
||||
bans =
|
||||
generate_valid_queries([
|
||||
{ip, &subnet_query/2},
|
||||
{fingerprint, &fingerprint_query/2},
|
||||
{user, &user_query/2}
|
||||
])
|
||||
|> union_all_queries()
|
||||
|> Repo.all()
|
||||
|
||||
# Don't return a fingerprint or subnet ban if the user is currently signed in.
|
||||
case is_nil(user) do
|
||||
true -> Enum.at(bans, 0)
|
||||
false -> user_ban(bans)
|
||||
end
|
||||
end
|
||||
|
||||
defp query_base(schema, name, now) do
|
||||
from b in schema,
|
||||
where: b.enabled and b.valid_until > ^now,
|
||||
select: %{
|
||||
reason: b.reason,
|
||||
valid_until: b.valid_until,
|
||||
generated_ban_id: b.generated_ban_id,
|
||||
type: type(^name, :string)
|
||||
}
|
||||
end
|
||||
|
||||
defp fingerprint_query(fingerprint, now) do
|
||||
Fingerprint
|
||||
|> query_base(@fingerprint, now)
|
||||
|> where([f], f.fingerprint == ^fingerprint)
|
||||
end
|
||||
|
||||
defp subnet_query(ip, now) do
|
||||
{:ok, inet} = EctoNetwork.INET.cast(ip)
|
||||
|
||||
Subnet
|
||||
|> query_base(@subnet, now)
|
||||
|> where(fragment("specification >>= ?", ^inet))
|
||||
end
|
||||
|
||||
defp user_query(user, now) do
|
||||
User
|
||||
|> query_base(@user, now)
|
||||
|> where([u], u.user_id == ^user.id)
|
||||
end
|
||||
|
||||
defp generate_valid_queries(sources) do
|
||||
now = DateTime.utc_now()
|
||||
|
||||
Enum.flat_map(sources, fn
|
||||
{nil, _cb} -> []
|
||||
{source, cb} -> [cb.(source, now)]
|
||||
end)
|
||||
end
|
||||
|
||||
defp union_all_queries([query | rest]) do
|
||||
Enum.reduce(rest, query, fn q, acc -> union_all(acc, ^q) end)
|
||||
end
|
||||
|
||||
defp user_ban(bans) do
|
||||
bans
|
||||
|> Enum.filter(&(&1.type == @user))
|
||||
|> Enum.at(0)
|
||||
end
|
||||
end
|
27
lib/philomena/bans/subnet_creator.ex
Normal file
27
lib/philomena/bans/subnet_creator.ex
Normal file
|
@ -0,0 +1,27 @@
|
|||
defmodule Philomena.Bans.SubnetCreator do
|
||||
@moduledoc """
|
||||
Handles automatic creation of subnet bans for an input user ban.
|
||||
|
||||
This prevents trivial ban evasion with the creation of a new account from the same address.
|
||||
The user must work around or wait out the subnet ban first.
|
||||
"""
|
||||
|
||||
alias Philomena.UserIps
|
||||
alias Philomena.Bans
|
||||
|
||||
@doc """
|
||||
Creates a subnet ban for the given user's last known IP address.
|
||||
|
||||
Returns `{:ok, ban}`, `{:ok, nil}`, or `{:error, changeset}`. The return value is
|
||||
suitable for use as the return value to an `Ecto.Multi.run/3` callback.
|
||||
"""
|
||||
def create_for_user(creator, user_id, attrs) do
|
||||
ip = UserIps.get_ip_for_user(user_id)
|
||||
|
||||
if ip do
|
||||
Bans.create_subnet(creator, Map.put(attrs, "specification", UserIps.masked_ip(ip)))
|
||||
else
|
||||
{:ok, nil}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -6,49 +6,15 @@ defmodule Philomena.Channels do
|
|||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Channels.AutomaticUpdater
|
||||
alias Philomena.Channels.Channel
|
||||
alias Philomena.Channels.PicartoChannel
|
||||
alias Philomena.Channels.PiczelChannel
|
||||
alias Philomena.Notifications
|
||||
|
||||
@doc """
|
||||
Updates all the tracked channels for which an update
|
||||
scheme is known.
|
||||
Updates all the tracked channels for which an update scheme is known.
|
||||
"""
|
||||
def update_tracked_channels! do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
|
||||
picarto_channels = PicartoChannel.live_channels(now)
|
||||
live_picarto_channels = Map.keys(picarto_channels)
|
||||
|
||||
piczel_channels = PiczelChannel.live_channels(now)
|
||||
live_piczel_channels = Map.keys(piczel_channels)
|
||||
|
||||
# Update all channels which are offline to reflect offline status
|
||||
offline_query =
|
||||
from c in Channel,
|
||||
where: c.type == "PicartoChannel" and c.short_name not in ^live_picarto_channels,
|
||||
or_where: c.type == "PiczelChannel" and c.short_name not in ^live_piczel_channels
|
||||
|
||||
Repo.update_all(offline_query, set: [is_live: false, updated_at: now])
|
||||
|
||||
# Update all channels which are online to reflect online status using
|
||||
# changeset functions
|
||||
online_query =
|
||||
from c in Channel,
|
||||
where: c.type == "PicartoChannel" and c.short_name in ^live_picarto_channels,
|
||||
or_where: c.type == "PiczelChannel" and c.short_name in ^live_picarto_channels
|
||||
|
||||
online_query
|
||||
|> Repo.all()
|
||||
|> Enum.map(fn
|
||||
%{type: "PicartoChannel", short_name: name} = channel ->
|
||||
Channel.update_changeset(channel, Map.get(picarto_channels, name, []))
|
||||
|
||||
%{type: "PiczelChannel", short_name: name} = channel ->
|
||||
Channel.update_changeset(channel, Map.get(piczel_channels, name, []))
|
||||
end)
|
||||
|> Enum.map(&Repo.update!/1)
|
||||
AutomaticUpdater.update_tracked_channels!()
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -103,6 +69,24 @@ defmodule Philomena.Channels do
|
|||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a channel's state when it goes live.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_channel_state(channel, %{field: new_value})
|
||||
{:ok, %Channel{}}
|
||||
|
||||
iex> update_channel_state(channel, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_channel_state(%Channel{} = channel, attrs) do
|
||||
channel
|
||||
|> Channel.update_changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a Channel.
|
||||
|
||||
|
|
64
lib/philomena/channels/automatic_updater.ex
Normal file
64
lib/philomena/channels/automatic_updater.ex
Normal file
|
@ -0,0 +1,64 @@
|
|||
defmodule Philomena.Channels.AutomaticUpdater do
|
||||
@moduledoc """
|
||||
Automatic update routine for streams.
|
||||
|
||||
Calls APIs for each stream provider to remove channels which are no longer online,
|
||||
and to restore channels which are currently online.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Philomena.Repo
|
||||
|
||||
alias Philomena.Channels
|
||||
alias Philomena.Channels.Channel
|
||||
alias Philomena.Channels.PicartoChannel
|
||||
alias Philomena.Channels.PiczelChannel
|
||||
|
||||
@doc """
|
||||
Updates all the tracked channels for which an update scheme is known.
|
||||
"""
|
||||
def update_tracked_channels! do
|
||||
now = DateTime.utc_now(:second)
|
||||
Enum.each(providers(), &update_provider(&1, now))
|
||||
end
|
||||
|
||||
defp providers do
|
||||
[
|
||||
{"PicartoChannel", PicartoChannel.live_channels()},
|
||||
{"PiczelChannel", PiczelChannel.live_channels()}
|
||||
]
|
||||
end
|
||||
|
||||
defp update_provider({provider_name, live_channels}, now) do
|
||||
channel_names = Map.keys(live_channels)
|
||||
|
||||
provider_name
|
||||
|> update_offline_query(channel_names, now)
|
||||
|> Repo.update_all([])
|
||||
|
||||
provider_name
|
||||
|> online_query(channel_names)
|
||||
|> Repo.all()
|
||||
|> Enum.each(&update_online_channel(&1, live_channels, now))
|
||||
end
|
||||
|
||||
defp update_offline_query(provider_name, channel_names, now) do
|
||||
from c in Channel,
|
||||
where: c.type == ^provider_name and c.short_name not in ^channel_names,
|
||||
update: [set: [is_live: false, updated_at: ^now]]
|
||||
end
|
||||
|
||||
defp online_query(provider_name, channel_names) do
|
||||
from c in Channel,
|
||||
where: c.type == ^provider_name and c.short_name in ^channel_names
|
||||
end
|
||||
|
||||
defp update_online_channel(channel, live_channels, now) do
|
||||
attrs =
|
||||
live_channels
|
||||
|> Map.get(channel.short_name, %{})
|
||||
|> Map.merge(%{last_live_at: now, last_fetched_at: now})
|
||||
|
||||
Channels.update_channel_state(channel, attrs)
|
||||
end
|
||||
end
|
|
@ -1,30 +1,28 @@
|
|||
defmodule Philomena.Channels.PicartoChannel do
|
||||
@api_online "https://api.picarto.tv/api/v1/online?adult=true&gaming=true"
|
||||
|
||||
@spec live_channels(DateTime.t()) :: map()
|
||||
def live_channels(now) do
|
||||
@spec live_channels() :: map()
|
||||
def live_channels do
|
||||
@api_online
|
||||
|> PhilomenaProxy.Http.get()
|
||||
|> case do
|
||||
{:ok, %{body: body, status: 200}} ->
|
||||
body
|
||||
|> Jason.decode!()
|
||||
|> Map.new(&{&1["name"], fetch(&1, now)})
|
||||
|> Map.new(&{&1["name"], fetch(&1)})
|
||||
|
||||
_error ->
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch(api, now) do
|
||||
defp fetch(api) do
|
||||
%{
|
||||
title: api["title"],
|
||||
is_live: true,
|
||||
nsfw: api["adult"],
|
||||
viewers: api["viewers"],
|
||||
thumbnail_url: api["thumbnails"]["web"],
|
||||
last_fetched_at: now,
|
||||
last_live_at: now,
|
||||
description: nil
|
||||
}
|
||||
end
|
||||
|
|
|
@ -1,30 +1,28 @@
|
|||
defmodule Philomena.Channels.PiczelChannel do
|
||||
@api_online "https://api.piczel.tv/api/streams"
|
||||
|
||||
@spec live_channels(DateTime.t()) :: map()
|
||||
def live_channels(now) do
|
||||
@spec live_channels() :: map()
|
||||
def live_channels do
|
||||
@api_online
|
||||
|> PhilomenaProxy.Http.get()
|
||||
|> case do
|
||||
{:ok, %{body: body, status: 200}} ->
|
||||
body
|
||||
|> Jason.decode!()
|
||||
|> Map.new(&{&1["slug"], fetch(&1, now)})
|
||||
|> Map.new(&{&1["slug"], fetch(&1)})
|
||||
|
||||
_error ->
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch(api, now) do
|
||||
defp fetch(api) do
|
||||
%{
|
||||
title: api["title"],
|
||||
is_live: api["live"],
|
||||
nsfw: api["adult"],
|
||||
viewers: api["viewers"],
|
||||
thumbnail_url: api["user"]["avatar"]["avatar"]["url"],
|
||||
last_fetched_at: now,
|
||||
last_live_at: now
|
||||
thumbnail_url: api["user"]["avatar"]["avatar"]["url"]
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Comments.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Filters.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -333,7 +333,7 @@ defmodule Philomena.Galleries do
|
|||
end)
|
||||
|
||||
changes
|
||||
|> Enum.map(fn change ->
|
||||
|> Enum.each(fn change ->
|
||||
id = Keyword.fetch!(change, :id)
|
||||
change = Keyword.delete(change, :id)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Galleries.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Images.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Posts.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Reports.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -81,6 +81,31 @@ defmodule Philomena.Tags do
|
|||
"""
|
||||
def get_tag!(id), do: Repo.get!(Tag, id)
|
||||
|
||||
@doc """
|
||||
Gets a single tag by its name, or the tag it is aliased to, if it is aliased.
|
||||
|
||||
Returns nil if the tag does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_tag_or_alias_by_name("safe")
|
||||
%Tag{}
|
||||
|
||||
iex> get_tag_or_alias_by_name("nonexistent")
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_tag_or_alias_by_name(name) do
|
||||
Tag
|
||||
|> where(name: ^name)
|
||||
|> preload(:aliased_tag)
|
||||
|> Repo.one()
|
||||
|> case do
|
||||
nil -> nil
|
||||
tag -> tag.aliased_tag || tag
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a tag.
|
||||
|
||||
|
|
101
lib/philomena/tags/local_autocomplete.ex
Normal file
101
lib/philomena/tags/local_autocomplete.ex
Normal file
|
@ -0,0 +1,101 @@
|
|||
defmodule Philomena.Tags.LocalAutocomplete do
|
||||
alias Philomena.Images.Tagging
|
||||
alias Philomena.Tags.Tag
|
||||
alias Philomena.Repo
|
||||
import Ecto.Query
|
||||
|
||||
defmodule Entry do
|
||||
@moduledoc """
|
||||
An individual entry record for autocomplete generation.
|
||||
"""
|
||||
|
||||
@type t :: %__MODULE__{
|
||||
name: String.t(),
|
||||
images_count: integer(),
|
||||
id: integer(),
|
||||
alias_name: String.t() | nil
|
||||
}
|
||||
|
||||
defstruct name: "",
|
||||
images_count: 0,
|
||||
id: 0,
|
||||
alias_name: nil
|
||||
end
|
||||
|
||||
@type entry_list() :: [Entry.t()]
|
||||
|
||||
@type tag_id :: integer()
|
||||
@type assoc_map() :: %{optional(String.t()) => [tag_id()]}
|
||||
|
||||
@doc """
|
||||
Get a flat list of entry records for all of the top `amount` tags, and all of their
|
||||
aliases.
|
||||
"""
|
||||
@spec get_tags(integer()) :: entry_list()
|
||||
def get_tags(amount) do
|
||||
tags = top_tags(amount)
|
||||
aliases = aliases_of_tags(tags)
|
||||
aliases ++ tags
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get a map of tag names to their most associated tag ids.
|
||||
|
||||
For every tag entry, its associated tags satisfy the following properties:
|
||||
- is not the same as the entry's tag id
|
||||
- of a sample of 100 images, appear simultaneously more than 50% of the time
|
||||
"""
|
||||
@spec get_associations(entry_list(), integer()) :: assoc_map()
|
||||
def get_associations(tags, amount) do
|
||||
tags
|
||||
|> Enum.filter(&is_nil(&1.alias_name))
|
||||
|> Map.new(&{&1.name, associated_tag_ids(&1, amount)})
|
||||
end
|
||||
|
||||
defp top_tags(amount) do
|
||||
query =
|
||||
from t in Tag,
|
||||
where: t.images_count > 0,
|
||||
select: %Entry{name: t.name, images_count: t.images_count, id: t.id},
|
||||
order_by: [desc: :images_count],
|
||||
limit: ^amount
|
||||
|
||||
Repo.all(query)
|
||||
end
|
||||
|
||||
defp aliases_of_tags(tags) do
|
||||
ids = Enum.map(tags, & &1.id)
|
||||
|
||||
query =
|
||||
from t in Tag,
|
||||
where: t.aliased_tag_id in ^ids,
|
||||
inner_join: a in assoc(t, :aliased_tag),
|
||||
select: %Entry{name: t.name, images_count: 0, id: 0, alias_name: a.name}
|
||||
|
||||
Repo.all(query)
|
||||
end
|
||||
|
||||
defp associated_tag_ids(entry, amount) do
|
||||
image_sample_query =
|
||||
from it in Tagging,
|
||||
where: it.tag_id == ^entry.id,
|
||||
select: it.image_id,
|
||||
order_by: [asc: fragment("random()")],
|
||||
limit: 100
|
||||
|
||||
# Select the tags from those images which have more uses than
|
||||
# the current one being considered, and overlap more than 50%
|
||||
assoc_query =
|
||||
from it in Tagging,
|
||||
inner_join: t in assoc(it, :tag),
|
||||
where: t.images_count > ^entry.images_count,
|
||||
where: it.image_id in subquery(image_sample_query),
|
||||
group_by: t.id,
|
||||
order_by: [desc: fragment("count(*)")],
|
||||
having: fragment("(100 * count(*)::float / LEAST(?, 100)) > 50", ^entry.images_count),
|
||||
select: t.id,
|
||||
limit: ^amount
|
||||
|
||||
Repo.all(assoc_query, timeout: 120_000)
|
||||
end
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Philomena.Tags.SearchIndex do
|
||||
@behaviour PhilomenaQuery.SearchIndex
|
||||
@behaviour PhilomenaQuery.Search.Index
|
||||
|
||||
@impl true
|
||||
def index_name do
|
||||
|
|
|
@ -540,7 +540,6 @@ defmodule Philomena.Users.User do
|
|||
"data:image/png;base64," <> png
|
||||
end
|
||||
|
||||
@spec totp_secret(%Philomena.Users.User{}) :: binary()
|
||||
def totp_secret(user) do
|
||||
Philomena.Users.Encryptor.decrypt_model(
|
||||
user.encrypted_otp_secret,
|
||||
|
|
|
@ -17,9 +17,7 @@ defmodule PhilomenaMedia.GifPreview do
|
|||
Generate a GIF preview of the given video input with evenly-spaced sample points.
|
||||
|
||||
The input should have pre-computed duration `duration`. The `dimensions`
|
||||
are a `{target_width, target_height}` tuple of the largest dimensions desired,
|
||||
and the image will be resized to fit inside the box of those dimensions,
|
||||
preserving aspect ratio.
|
||||
are a `{target_width, target_height}` tuple.
|
||||
|
||||
Depending on the input file, this may take a long time to process.
|
||||
|
||||
|
@ -81,8 +79,7 @@ defmodule PhilomenaMedia.GifPreview do
|
|||
"#{concat_input_pads} concat=n=#{num_images}, settb=1/#{target_framerate}, setpts=N [concat]"
|
||||
|
||||
scale_filter =
|
||||
"[concat] scale=width=#{target_width}:height=#{target_height}:" <>
|
||||
"force_original_aspect_ratio=decrease [scale]"
|
||||
"[concat] scale=width=#{target_width}:height=#{target_height},setsar=1 [scale]"
|
||||
|
||||
split_filter = "[scale] split [s0][s1]"
|
||||
|
||||
|
|
|
@ -8,6 +8,9 @@ defmodule PhilomenaMedia.Processors.Jpeg do
|
|||
|
||||
@behaviour Processor
|
||||
|
||||
@exit_success 0
|
||||
@exit_warning 2
|
||||
|
||||
@spec versions(Processors.version_list()) :: [Processors.version_filename()]
|
||||
def versions(sizes) do
|
||||
Enum.map(sizes, fn {name, _} -> "#{name}.jpg" end)
|
||||
|
@ -68,7 +71,7 @@ defmodule PhilomenaMedia.Processors.Jpeg do
|
|||
|
||||
_ ->
|
||||
# Transmux only: Strip EXIF without touching orientation
|
||||
{_output, 0} = System.cmd("jpegtran", ["-copy", "none", "-outfile", stripped, file])
|
||||
validate_return(System.cmd("jpegtran", ["-copy", "none", "-outfile", stripped, file]))
|
||||
end
|
||||
|
||||
stripped
|
||||
|
@ -77,7 +80,7 @@ defmodule PhilomenaMedia.Processors.Jpeg do
|
|||
defp optimize(file) do
|
||||
optimized = Briefly.create!(extname: ".jpg")
|
||||
|
||||
{_output, 0} = System.cmd("jpegtran", ["-optimize", "-outfile", optimized, file])
|
||||
validate_return(System.cmd("jpegtran", ["-optimize", "-outfile", optimized, file]))
|
||||
|
||||
optimized
|
||||
end
|
||||
|
@ -108,4 +111,8 @@ defmodule PhilomenaMedia.Processors.Jpeg do
|
|||
defp srgb_profile do
|
||||
Path.join(File.cwd!(), "priv/icc/sRGB.icc")
|
||||
end
|
||||
|
||||
defp validate_return({_output, ret}) when ret in [@exit_success, @exit_warning] do
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
|
|
@ -87,7 +87,7 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
|
||||
cond do
|
||||
thumb_name in [:thumb, :thumb_small, :thumb_tiny] ->
|
||||
gif = scale_gif(file, duration, target_dimensions)
|
||||
gif = scale_gif(file, duration, dimensions, target_dimensions)
|
||||
|
||||
[
|
||||
{:copy, webm, "#{thumb_name}.webm"},
|
||||
|
@ -104,10 +104,9 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
end
|
||||
|
||||
defp scale_videos(file, dimensions, target_dimensions) do
|
||||
{width, height} = box_dimensions(dimensions, target_dimensions)
|
||||
filter = scale_filter(dimensions, target_dimensions)
|
||||
webm = Briefly.create!(extname: ".webm")
|
||||
mp4 = Briefly.create!(extname: ".mp4")
|
||||
scale_filter = "scale=w=#{width}:h=#{height}"
|
||||
|
||||
{_output, 0} =
|
||||
System.cmd("ffmpeg", [
|
||||
|
@ -131,7 +130,7 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
"-crf",
|
||||
"31",
|
||||
"-vf",
|
||||
scale_filter,
|
||||
filter,
|
||||
"-threads",
|
||||
"4",
|
||||
"-max_muxing_queue_size",
|
||||
|
@ -152,7 +151,7 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
"-b:v",
|
||||
"5M",
|
||||
"-vf",
|
||||
scale_filter,
|
||||
filter,
|
||||
"-threads",
|
||||
"4",
|
||||
"-max_muxing_queue_size",
|
||||
|
@ -164,9 +163,8 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
end
|
||||
|
||||
defp scale_mp4_only(file, dimensions, target_dimensions) do
|
||||
{width, height} = box_dimensions(dimensions, target_dimensions)
|
||||
filter = scale_filter(dimensions, target_dimensions)
|
||||
mp4 = Briefly.create!(extname: ".mp4")
|
||||
scale_filter = "scale=w=#{width}:h=#{height}"
|
||||
|
||||
{_output, 0} =
|
||||
System.cmd("ffmpeg", [
|
||||
|
@ -188,7 +186,7 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
"-b:v",
|
||||
"5M",
|
||||
"-vf",
|
||||
scale_filter,
|
||||
filter,
|
||||
"-threads",
|
||||
"4",
|
||||
"-max_muxing_queue_size",
|
||||
|
@ -199,17 +197,23 @@ defmodule PhilomenaMedia.Processors.Webm do
|
|||
mp4
|
||||
end
|
||||
|
||||
defp scale_gif(file, duration, dimensions) do
|
||||
defp scale_gif(file, duration, dimensions, target_dimensions) do
|
||||
{width, height} = box_dimensions(dimensions, target_dimensions)
|
||||
gif = Briefly.create!(extname: ".gif")
|
||||
|
||||
GifPreview.preview(file, gif, duration, dimensions)
|
||||
GifPreview.preview(file, gif, duration, {width, height})
|
||||
|
||||
gif
|
||||
end
|
||||
|
||||
defp scale_filter(dimensions, target_dimensions) do
|
||||
{width, height} = box_dimensions(dimensions, target_dimensions)
|
||||
"scale=w=#{width}:h=#{height},setsar=1"
|
||||
end
|
||||
|
||||
# x264 requires image dimensions to be a multiple of 2
|
||||
# -2 = ~1
|
||||
def box_dimensions({width, height}, {target_width, target_height}) do
|
||||
defp box_dimensions({width, height}, {target_width, target_height}) do
|
||||
ratio = min(target_width / width, target_height / height)
|
||||
new_width = min(max(trunc(width * ratio) &&& -2, 2), target_width)
|
||||
new_height = min(max(trunc(height * ratio) &&& -2, 2), target_height)
|
||||
|
|
|
@ -212,9 +212,7 @@ defmodule PhilomenaQuery.Parse.Parser do
|
|||
end
|
||||
|
||||
defp debug_tokens(tokens) do
|
||||
tokens
|
||||
|> Enum.map(fn {_k, v} -> v end)
|
||||
|> Enum.join("")
|
||||
Enum.map_join(tokens, fn {_k, v} -> v end)
|
||||
end
|
||||
|
||||
#
|
||||
|
|
|
@ -26,7 +26,6 @@ defmodule PhilomenaQuery.Parse.String do
|
|||
str
|
||||
|> String.replace("\r", "")
|
||||
|> String.split("\n", trim: true)
|
||||
|> Enum.map(fn s -> "(#{s})" end)
|
||||
|> Enum.join(" || ")
|
||||
|> Enum.map_join(" || ", &"(#{&1})")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,10 +10,10 @@ defmodule PhilomenaQuery.Search do
|
|||
"""
|
||||
|
||||
alias PhilomenaQuery.Batch
|
||||
alias PhilomenaQuery.Search.Api
|
||||
alias Philomena.Repo
|
||||
require Logger
|
||||
import Ecto.Query
|
||||
import Elastix.HTTP
|
||||
|
||||
# todo: fetch through compile_env?
|
||||
@policy Philomena.SearchPolicy
|
||||
|
@ -85,11 +85,7 @@ defmodule PhilomenaQuery.Search do
|
|||
def create_index!(module) do
|
||||
index = @policy.index_for(module)
|
||||
|
||||
Elastix.Index.create(
|
||||
@policy.opensearch_url(),
|
||||
index.index_name(),
|
||||
index.mapping()
|
||||
)
|
||||
Api.create_index(@policy.opensearch_url(), index.index_name(), index.mapping())
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
|
@ -109,7 +105,7 @@ defmodule PhilomenaQuery.Search do
|
|||
def delete_index!(module) do
|
||||
index = @policy.index_for(module)
|
||||
|
||||
Elastix.Index.delete(@policy.opensearch_url(), index.index_name())
|
||||
Api.delete_index(@policy.opensearch_url(), index.index_name())
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
|
@ -132,9 +128,7 @@ defmodule PhilomenaQuery.Search do
|
|||
index_name = index.index_name()
|
||||
mapping = index.mapping().mappings.properties
|
||||
|
||||
Elastix.Mapping.put(@policy.opensearch_url(), index_name, "_doc", %{properties: mapping},
|
||||
include_type_name: true
|
||||
)
|
||||
Api.update_index_mapping(@policy.opensearch_url(), index_name, %{properties: mapping})
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
|
@ -157,13 +151,7 @@ defmodule PhilomenaQuery.Search do
|
|||
index = @policy.index_for(module)
|
||||
data = index.as_json(doc)
|
||||
|
||||
Elastix.Document.index(
|
||||
@policy.opensearch_url(),
|
||||
index.index_name(),
|
||||
"_doc",
|
||||
data.id,
|
||||
data
|
||||
)
|
||||
Api.index_document(@policy.opensearch_url(), index.index_name(), data, data.id)
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
|
@ -186,12 +174,7 @@ defmodule PhilomenaQuery.Search do
|
|||
def delete_document(id, module) do
|
||||
index = @policy.index_for(module)
|
||||
|
||||
Elastix.Document.delete(
|
||||
@policy.opensearch_url(),
|
||||
index.index_name(),
|
||||
"_doc",
|
||||
id
|
||||
)
|
||||
Api.delete_document(@policy.opensearch_url(), index.index_name(), id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -231,12 +214,7 @@ defmodule PhilomenaQuery.Search do
|
|||
]
|
||||
end)
|
||||
|
||||
Elastix.Bulk.post(
|
||||
@policy.opensearch_url(),
|
||||
lines,
|
||||
index: index.index_name(),
|
||||
httpoison_options: [timeout: 30_000]
|
||||
)
|
||||
Api.bulk(@policy.opensearch_url(), lines)
|
||||
end)
|
||||
end
|
||||
|
||||
|
@ -272,11 +250,6 @@ defmodule PhilomenaQuery.Search do
|
|||
def update_by_query(module, query_body, set_replacements, replacements) do
|
||||
index = @policy.index_for(module)
|
||||
|
||||
url =
|
||||
@policy.opensearch_url()
|
||||
|> prepare_url([index.index_name(), "_update_by_query"])
|
||||
|> append_query_string(%{conflicts: "proceed", wait_for_completion: "false"})
|
||||
|
||||
# "Painless" scripting language
|
||||
script = """
|
||||
// Replace values in "sets" (arrays in the source document)
|
||||
|
@ -320,7 +293,7 @@ defmodule PhilomenaQuery.Search do
|
|||
"""
|
||||
|
||||
body =
|
||||
Jason.encode!(%{
|
||||
%{
|
||||
script: %{
|
||||
source: script,
|
||||
params: %{
|
||||
|
@ -329,9 +302,9 @@ defmodule PhilomenaQuery.Search do
|
|||
}
|
||||
},
|
||||
query: query_body
|
||||
})
|
||||
}
|
||||
|
||||
{:ok, %{status_code: 200}} = Elastix.HTTP.post(url, body)
|
||||
Api.update_by_query(@policy.opensearch_url(), index.index_name(), body)
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
|
@ -360,13 +333,8 @@ defmodule PhilomenaQuery.Search do
|
|||
def search(module, query_body) do
|
||||
index = @policy.index_for(module)
|
||||
|
||||
{:ok, %{body: results, status_code: 200}} =
|
||||
Elastix.Search.search(
|
||||
@policy.opensearch_url(),
|
||||
index.index_name(),
|
||||
[],
|
||||
query_body
|
||||
)
|
||||
{:ok, %{body: results, status: 200}} =
|
||||
Api.search(@policy.opensearch_url(), index.index_name(), query_body)
|
||||
|
||||
results
|
||||
end
|
||||
|
@ -401,13 +369,8 @@ defmodule PhilomenaQuery.Search do
|
|||
]
|
||||
end)
|
||||
|
||||
{:ok, %{body: results, status_code: 200}} =
|
||||
Elastix.Search.search(
|
||||
@policy.opensearch_url(),
|
||||
"_all",
|
||||
[],
|
||||
msearch_body
|
||||
)
|
||||
{:ok, %{body: results, status: 200}} =
|
||||
Api.msearch(@policy.opensearch_url(), msearch_body)
|
||||
|
||||
results["responses"]
|
||||
end
|
||||
|
|
141
lib/philomena_query/search/api.ex
Normal file
141
lib/philomena_query/search/api.ex
Normal file
|
@ -0,0 +1,141 @@
|
|||
defmodule PhilomenaQuery.Search.Api do
|
||||
@moduledoc """
|
||||
Interaction with OpenSearch API by endpoint name.
|
||||
|
||||
See https://opensearch.org/docs/latest/api-reference for a complete reference.
|
||||
"""
|
||||
|
||||
alias PhilomenaQuery.Search.Client
|
||||
|
||||
@type server_url :: String.t()
|
||||
@type index_name :: String.t()
|
||||
|
||||
@type properties :: map()
|
||||
@type mapping :: map()
|
||||
@type document :: map()
|
||||
@type document_id :: integer()
|
||||
|
||||
@doc """
|
||||
Create the index named `name` with the given `mapping`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/index-apis/create-index/
|
||||
"""
|
||||
@spec create_index(server_url(), index_name(), mapping()) :: Client.result()
|
||||
def create_index(url, name, mapping) do
|
||||
url
|
||||
|> prepare_url([name])
|
||||
|> Client.put(mapping)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Delete the index named `name`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/index-apis/delete-index/
|
||||
"""
|
||||
@spec delete_index(server_url(), index_name()) :: Client.result()
|
||||
def delete_index(url, name) do
|
||||
url
|
||||
|> prepare_url([name])
|
||||
|> Client.delete()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Update the index named `name` with the given `properties`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/index-apis/put-mapping/
|
||||
"""
|
||||
@spec update_index_mapping(server_url(), index_name(), properties()) :: Client.result()
|
||||
def update_index_mapping(url, name, properties) do
|
||||
url
|
||||
|> prepare_url([name, "_mapping"])
|
||||
|> Client.put(properties)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Index `document` in the index named `name` with integer id `id`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/document-apis/index-document/
|
||||
"""
|
||||
@spec index_document(server_url(), index_name(), document(), document_id()) :: Client.result()
|
||||
def index_document(url, name, document, id) do
|
||||
url
|
||||
|> prepare_url([name, "_doc", Integer.to_string(id)])
|
||||
|> Client.put(document)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Remove document in the index named `name` with integer id `id`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/document-apis/delete-document/
|
||||
"""
|
||||
@spec delete_document(server_url(), index_name(), document_id()) :: Client.result()
|
||||
def delete_document(url, name, id) do
|
||||
url
|
||||
|> prepare_url([name, "_doc", Integer.to_string(id)])
|
||||
|> Client.delete()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Bulk operation.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/document-apis/bulk/
|
||||
"""
|
||||
@spec bulk(server_url(), list()) :: Client.result()
|
||||
def bulk(url, lines) do
|
||||
url
|
||||
|> prepare_url(["_bulk"])
|
||||
|> Client.post(lines)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Asynchronous scripted updates.
|
||||
|
||||
Sets `conflicts` to `proceed` and `wait_for_completion` to `false`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/document-apis/update-by-query/
|
||||
"""
|
||||
@spec update_by_query(server_url(), index_name(), map()) :: Client.result()
|
||||
def update_by_query(url, name, body) do
|
||||
url
|
||||
|> prepare_url([name, "_update_by_query"])
|
||||
|> append_query_string(%{conflicts: "proceed", wait_for_completion: "false"})
|
||||
|> Client.post(body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Search for documents in index named `name` with `query`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/search/
|
||||
"""
|
||||
@spec search(server_url(), index_name(), map()) :: Client.result()
|
||||
def search(url, name, body) do
|
||||
url
|
||||
|> prepare_url([name, "_search"])
|
||||
|> Client.get(body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Search for documents in all indices with specified `lines`.
|
||||
|
||||
https://opensearch.org/docs/latest/api-reference/multi-search/
|
||||
"""
|
||||
@spec msearch(server_url(), list()) :: Client.result()
|
||||
def msearch(url, lines) do
|
||||
url
|
||||
|> prepare_url(["_msearch"])
|
||||
|> Client.get(lines)
|
||||
end
|
||||
|
||||
@spec prepare_url(String.t(), [String.t()]) :: String.t()
|
||||
defp prepare_url(url, parts) when is_list(parts) do
|
||||
# Combine path generated by the parts with the main URL
|
||||
url
|
||||
|> URI.merge(Path.join(parts))
|
||||
|> to_string()
|
||||
end
|
||||
|
||||
@spec append_query_string(String.t(), map()) :: String.t()
|
||||
defp append_query_string(url, params) do
|
||||
url <> "?" <> URI.encode_query(params)
|
||||
end
|
||||
end
|
62
lib/philomena_query/search/client.ex
Normal file
62
lib/philomena_query/search/client.ex
Normal file
|
@ -0,0 +1,62 @@
|
|||
defmodule PhilomenaQuery.Search.Client do
|
||||
@moduledoc """
|
||||
HTTP-level interaction with OpenSearch JSON API.
|
||||
|
||||
Allows two styles of parameters for bodies:
|
||||
- map: the map is directly encoded as a JSON object
|
||||
- list: each element of the list is encoded as a JSON object and interspersed with newlines.
|
||||
This is used by bulk APIs.
|
||||
"""
|
||||
|
||||
@receive_timeout 30_000
|
||||
|
||||
@type list_or_map :: list() | map()
|
||||
@type result :: {:ok, Req.Response.t()} | {:error, Exception.t()}
|
||||
|
||||
@doc """
|
||||
HTTP GET
|
||||
"""
|
||||
@spec get(String.t(), list_or_map()) :: result()
|
||||
def get(url, body) do
|
||||
Req.get(url, encode_options(body))
|
||||
end
|
||||
|
||||
@doc """
|
||||
HTTP POST
|
||||
"""
|
||||
@spec post(String.t(), list_or_map()) :: result()
|
||||
def post(url, body) do
|
||||
Req.post(url, encode_options(body))
|
||||
end
|
||||
|
||||
@doc """
|
||||
HTTP PUT
|
||||
"""
|
||||
@spec put(String.t(), list_or_map()) :: result()
|
||||
def put(url, body) do
|
||||
Req.put(url, encode_options(body))
|
||||
end
|
||||
|
||||
@doc """
|
||||
HTTP DELETE
|
||||
"""
|
||||
@spec delete(String.t()) :: result()
|
||||
def delete(url) do
|
||||
Req.delete(url, encode_options())
|
||||
end
|
||||
|
||||
defp encode_body(body) when is_map(body),
|
||||
do: Jason.encode!(body)
|
||||
|
||||
defp encode_body(body) when is_list(body),
|
||||
do: [Enum.map_intersperse(body, "\n", &Jason.encode!(&1)), "\n"]
|
||||
|
||||
defp encode_options,
|
||||
do: [headers: request_headers(), receive_timeout: @receive_timeout]
|
||||
|
||||
defp encode_options(body),
|
||||
do: Keyword.merge(encode_options(), body: encode_body(body))
|
||||
|
||||
defp request_headers,
|
||||
do: [content_type: "application/json"]
|
||||
end
|
|
@ -1,4 +1,4 @@
|
|||
defmodule PhilomenaQuery.SearchIndex do
|
||||
defmodule PhilomenaQuery.Search.Index do
|
||||
@moduledoc """
|
||||
Behaviour module for schemas with search indexing.
|
||||
"""
|
|
@ -13,12 +13,12 @@ defmodule PhilomenaWeb.Admin.ArtistLink.VerificationController do
|
|||
preload: [:user]
|
||||
|
||||
def create(conn, _params) do
|
||||
{:ok, result} =
|
||||
{:ok, artist_link} =
|
||||
ArtistLinks.verify_artist_link(conn.assigns.artist_link, conn.assigns.current_user)
|
||||
|
||||
conn
|
||||
|> put_flash(:info, "Artist link successfully verified.")
|
||||
|> moderation_log(details: &log_details/2, data: result.artist_link)
|
||||
|> moderation_log(details: &log_details/2, data: artist_link)
|
||||
|> redirect(to: ~p"/admin/artist_links")
|
||||
end
|
||||
|
||||
|
|
|
@ -53,9 +53,6 @@ defmodule PhilomenaWeb.Admin.UserBanController do
|
|||
|> moderation_log(details: &log_details/2, data: user_ban)
|
||||
|> redirect(to: ~p"/admin/user_bans")
|
||||
|
||||
{:error, :user_ban, changeset, _changes} ->
|
||||
render(conn, "new.html", changeset: changeset)
|
||||
|
||||
{:error, changeset} ->
|
||||
render(conn, "new.html", changeset: changeset)
|
||||
end
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
defmodule PhilomenaWeb.AdvertController do
|
||||
use PhilomenaWeb, :controller
|
||||
|
||||
alias PhilomenaWeb.AdvertUpdater
|
||||
alias Philomena.Adverts.Advert
|
||||
alias Philomena.Adverts
|
||||
|
||||
plug :load_resource, model: Advert
|
||||
|
||||
def show(conn, _params) do
|
||||
advert = conn.assigns.advert
|
||||
|
||||
AdvertUpdater.cast(:click, advert.id)
|
||||
Adverts.record_click(advert)
|
||||
|
||||
redirect(conn, external: advert.link)
|
||||
end
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
defmodule PhilomenaWeb.AdvertPlug do
|
||||
alias PhilomenaWeb.AdvertUpdater
|
||||
alias Philomena.Adverts
|
||||
alias Plug.Conn
|
||||
|
||||
|
@ -19,7 +18,7 @@ defmodule PhilomenaWeb.AdvertPlug do
|
|||
do: Conn.assign(conn, :advert, record_impression(Adverts.random_live()))
|
||||
|
||||
defp maybe_assign_ad(conn, image, true),
|
||||
do: Conn.assign(conn, :advert, record_impression(Adverts.random_live_for(image)))
|
||||
do: Conn.assign(conn, :advert, record_impression(Adverts.random_live(image)))
|
||||
|
||||
defp maybe_assign_ad(conn, _image, _false),
|
||||
do: Conn.assign(conn, :advert, nil)
|
||||
|
@ -33,7 +32,7 @@ defmodule PhilomenaWeb.AdvertPlug do
|
|||
defp record_impression(nil), do: nil
|
||||
|
||||
defp record_impression(advert) do
|
||||
AdvertUpdater.cast(:impression, advert.id)
|
||||
Adverts.record_impression(advert)
|
||||
|
||||
advert
|
||||
end
|
||||
|
|
|
@ -22,7 +22,7 @@ defmodule PhilomenaWeb.ApiRequireAuthorizationPlug do
|
|||
|
||||
conn
|
||||
|> maybe_unauthorized(user)
|
||||
|> maybe_forbidden(Bans.exists_for?(user, conn.remote_ip, "NOTAPI"))
|
||||
|> maybe_forbidden(Bans.find(user, conn.remote_ip, "NOTAPI"))
|
||||
end
|
||||
|
||||
defp maybe_unauthorized(conn, nil) do
|
||||
|
|
|
@ -37,10 +37,7 @@ defmodule PhilomenaWeb.ContentSecurityPolicyPlug do
|
|||
{:media_src, ["'self'", "blob:", "data:", cdn_uri, camo_uri]}
|
||||
]
|
||||
|
||||
csp_value =
|
||||
csp_config
|
||||
|> Enum.map(&cspify_element/1)
|
||||
|> Enum.join("; ")
|
||||
csp_value = Enum.map_join(csp_config, "; ", &cspify_element/1)
|
||||
|
||||
csp_relaxed? do
|
||||
if conn.status == 500 do
|
||||
|
|
|
@ -20,7 +20,7 @@ defmodule PhilomenaWeb.CurrentBanPlug do
|
|||
user = conn.assigns.current_user
|
||||
ip = conn.remote_ip
|
||||
|
||||
ban = Bans.exists_for?(user, ip, fingerprint)
|
||||
ban = Bans.find(user, ip, fingerprint)
|
||||
|
||||
Conn.assign(conn, :current_ban, ban)
|
||||
end
|
||||
|
|
|
@ -129,9 +129,7 @@ defmodule PhilomenaWeb.AppView do
|
|||
def escape_nl2br(text) do
|
||||
text
|
||||
|> String.split("\n")
|
||||
|> Enum.map(&html_escape/1)
|
||||
|> Enum.map(&safe_to_string/1)
|
||||
|> Enum.join("<br/>")
|
||||
|> Enum.map_intersperse("<br />", &safe_to_string(html_escape(&1)))
|
||||
|> raw()
|
||||
end
|
||||
|
||||
|
|
1
mix.exs
1
mix.exs
|
@ -54,7 +54,6 @@ defmodule Philomena.MixProject do
|
|||
{:bcrypt_elixir, "~> 3.0"},
|
||||
{:pot, "~> 1.0"},
|
||||
{:secure_compare, "~> 0.1"},
|
||||
{:elastix, "~> 0.10"},
|
||||
{:nimble_parsec, "~> 1.2"},
|
||||
{:scrivener_ecto, "~> 2.7"},
|
||||
{:pbkdf2, ">= 0.0.0",
|
||||
|
|
3
mix.lock
3
mix.lock
|
@ -19,7 +19,6 @@
|
|||
"ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"},
|
||||
"ecto_network": {:hex, :ecto_network, "1.5.0", "a930c910975e7a91237b858ebf0f4ad7b2aae32fa846275aa203cb858459ec73", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:phoenix_html, ">= 0.0.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.14.0", [hex: :postgrex, repo: "hexpm", optional: false]}], "hexpm", "4d614434ae3e6d373a2f693d56aafaa3f3349714668ffd6d24e760caf578aa2f"},
|
||||
"ecto_sql": {:hex, :ecto_sql, "3.11.2", "c7cc7f812af571e50b80294dc2e535821b3b795ce8008d07aa5f336591a185a8", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "73c07f995ac17dbf89d3cfaaf688fcefabcd18b7b004ac63b0dc4ef39499ed6b"},
|
||||
"elastix": {:hex, :elastix, "0.10.0", "7567da885677ba9deffc20063db5f3ca8cd10f23cff1ab3ed9c52b7063b7e340", [:mix], [{:httpoison, "~> 1.4", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0", [hex: :poison, repo: "hexpm", optional: true]}, {:retry, "~> 0.8", [hex: :retry, repo: "hexpm", optional: false]}], "hexpm", "5fb342ce068b20f7845f5dd198c2dc80d967deafaa940a6e51b846db82696d1d"},
|
||||
"elixir_make": {:hex, :elixir_make, "0.8.4", "4960a03ce79081dee8fe119d80ad372c4e7badb84c493cc75983f9d3bc8bde0f", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "6e7f1d619b5f61dfabd0a20aa268e575572b542ac31723293a4c1a567d5ef040"},
|
||||
"elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"},
|
||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||
|
@ -31,9 +30,7 @@
|
|||
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
|
||||
"finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"},
|
||||
"gettext": {:hex, :gettext, "0.24.0", "6f4d90ac5f3111673cbefc4ebee96fe5f37a114861ab8c7b7d5b30a1108ce6d8", [:mix], [{:expo, "~> 0.5.1", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "bdf75cdfcbe9e4622dd18e034b227d77dd17f0f133853a1c73b97b3d6c770e8b"},
|
||||
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
||||
"hpax": {:hex, :hpax, "0.2.0", "5a58219adcb75977b2edce5eb22051de9362f08236220c9e859a47111c194ff5", [:mix], [], "hexpm", "bea06558cdae85bed075e6c036993d43cd54d447f76d8190a8db0dc5893fa2f1"},
|
||||
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
||||
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
|
||||
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
|
||||
|
|
Loading…
Reference in a new issue