2020-05-08 04:10:54 +02:00
|
|
|
defmodule PhilomenaWeb.ImageNavigator do
|
|
|
|
alias PhilomenaWeb.ImageSorter
|
2019-12-24 22:14:42 +01:00
|
|
|
alias Philomena.Images.{Image, ElasticsearchIndex}
|
|
|
|
alias Philomena.Elasticsearch
|
2019-11-30 03:33:15 +01:00
|
|
|
alias Philomena.Repo
|
|
|
|
import Ecto.Query
|
|
|
|
|
|
|
|
# We get consecutive images by finding all images greater than or less than
|
|
|
|
# the current image, and grabbing the FIRST one
|
|
|
|
@range_comparison_for_order %{
|
2019-12-26 23:43:32 +01:00
|
|
|
"asc" => :gt,
|
|
|
|
"desc" => :lt
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# If we didn't reverse for prev, it would be the LAST image, which would
|
|
|
|
# make Elasticsearch choke on deep pagination
|
|
|
|
@order_for_dir %{
|
2019-12-26 23:43:32 +01:00
|
|
|
next: %{"asc" => "asc", "desc" => "desc"},
|
|
|
|
prev: %{"asc" => "desc", "desc" => "asc"}
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
@range_map %{
|
|
|
|
gt: :gte,
|
|
|
|
lt: :lte
|
|
|
|
}
|
|
|
|
|
|
|
|
def find_consecutive(image, rel, params, compiled_query, compiled_filter) do
|
|
|
|
image_index =
|
|
|
|
Image
|
|
|
|
|> where(id: ^image.id)
|
2019-12-26 23:43:32 +01:00
|
|
|
|> preload([:gallery_interactions, tags: :aliases])
|
2019-11-30 03:33:15 +01:00
|
|
|
|> Repo.one()
|
|
|
|
|> Map.merge(empty_fields())
|
2019-12-24 22:14:42 +01:00
|
|
|
|> ElasticsearchIndex.as_json()
|
2019-11-30 03:33:15 +01:00
|
|
|
|
|
|
|
sort_data = ImageSorter.parse_sort(params)
|
|
|
|
|
|
|
|
{sorts, filters} =
|
|
|
|
sort_data.sorts
|
|
|
|
|> Enum.map(&extract_filters(&1, image_index, rel))
|
|
|
|
|> Enum.unzip()
|
|
|
|
|
|
|
|
sorts = sortify(sorts, image_index)
|
|
|
|
filters = filterify(filters, image_index)
|
|
|
|
|
2019-12-24 22:14:42 +01:00
|
|
|
Elasticsearch.search_records(
|
|
|
|
Image,
|
2019-11-30 03:33:15 +01:00
|
|
|
%{
|
|
|
|
query: %{
|
|
|
|
bool: %{
|
|
|
|
must: List.flatten([compiled_query, sort_data.queries, filters]),
|
|
|
|
must_not: [
|
|
|
|
compiled_filter,
|
|
|
|
%{term: %{hidden_from_users: true}}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
},
|
|
|
|
sort: List.flatten(sorts)
|
|
|
|
},
|
2019-12-24 22:44:58 +01:00
|
|
|
%{page_size: 1},
|
|
|
|
Image
|
2019-11-30 03:33:15 +01:00
|
|
|
)
|
|
|
|
|> Enum.to_list()
|
|
|
|
|> case do
|
|
|
|
[] -> image
|
|
|
|
[next_image] -> next_image
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp extract_filters(%{"galleries.position" => term} = sort, image, rel) do
|
|
|
|
# Extract gallery ID and current position
|
2019-12-27 00:00:44 +01:00
|
|
|
gid = term.nested.filter.term["galleries.id"]
|
2020-01-11 05:20:19 +01:00
|
|
|
pos = Enum.find(image[:galleries], &(&1.id == gid)).position
|
2019-11-30 03:33:15 +01:00
|
|
|
|
|
|
|
# Sort in the other direction if we are going backwards
|
2019-12-26 23:43:32 +01:00
|
|
|
sd = term.order
|
|
|
|
order = @order_for_dir[rel][to_string(sd)]
|
|
|
|
term = %{term | order: order}
|
2019-11-30 03:33:15 +01:00
|
|
|
sort = %{sort | "galleries.position" => term}
|
|
|
|
|
|
|
|
filter = gallery_range_filter(@range_comparison_for_order[order], pos)
|
|
|
|
|
|
|
|
{[sort], [filter]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp extract_filters(sort, image, rel) do
|
|
|
|
[{sf, sd}] = Enum.to_list(sort)
|
|
|
|
order = @order_for_dir[rel][sd]
|
|
|
|
sort = %{sort | sf => order}
|
|
|
|
|
|
|
|
field = String.to_existing_atom(sf)
|
|
|
|
filter = range_filter(sf, @range_comparison_for_order[order], image[field])
|
|
|
|
|
|
|
|
cond do
|
|
|
|
sf in [:_random, :_score] ->
|
|
|
|
{[sort], []}
|
|
|
|
|
|
|
|
true ->
|
|
|
|
{[sort], [filter]}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sortify(sorts, _image) do
|
|
|
|
List.flatten(sorts)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp filterify(filters, image) do
|
|
|
|
filters = List.flatten(filters)
|
|
|
|
|
|
|
|
filters =
|
|
|
|
filters
|
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.map(fn
|
2020-01-11 05:20:19 +01:00
|
|
|
{filter, 0} ->
|
|
|
|
filter.this
|
|
|
|
|
2019-11-30 03:33:15 +01:00
|
|
|
{filter, i} ->
|
2020-01-11 05:20:19 +01:00
|
|
|
filters_so_far =
|
2019-11-30 03:33:15 +01:00
|
|
|
filters
|
|
|
|
|> Enum.take(i)
|
|
|
|
|> Enum.map(& &1.for_next)
|
|
|
|
|
|
|
|
%{
|
|
|
|
bool: %{
|
|
|
|
must: [filter.this | filters_so_far]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
%{
|
|
|
|
bool: %{
|
|
|
|
should: filters,
|
|
|
|
must_not: %{term: %{id: image.id}}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp range_filter(sf, dir, val) do
|
|
|
|
%{
|
|
|
|
this: %{range: %{sf => %{dir => parse_val(val)}}},
|
|
|
|
next: %{range: %{sf => %{@range_map[dir] => parse_val(val)}}}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp gallery_range_filter(dir, val) do
|
|
|
|
%{
|
|
|
|
this: %{
|
|
|
|
nested: %{
|
2020-01-11 05:20:19 +01:00
|
|
|
path: :galleries,
|
2019-11-30 03:33:15 +01:00
|
|
|
query: %{range: %{"galleries.position" => %{dir => val}}}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
next: %{
|
|
|
|
nested: %{
|
2020-01-11 05:20:19 +01:00
|
|
|
path: :galleries,
|
2019-11-30 03:33:15 +01:00
|
|
|
query: %{range: %{"galleries.position" => %{@range_map[dir] => val}}}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp empty_fields do
|
|
|
|
%{
|
|
|
|
user: nil,
|
|
|
|
deleter: nil,
|
|
|
|
upvoters: [],
|
|
|
|
downvoters: [],
|
|
|
|
favers: [],
|
2019-12-26 23:43:32 +01:00
|
|
|
hiders: []
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp parse_val(%NaiveDateTime{} = value), do: NaiveDateTime.to_iso8601(value)
|
|
|
|
defp parse_val(value), do: value
|
2019-12-24 22:14:42 +01:00
|
|
|
end
|