2020-05-08 04:10:54 +02:00
|
|
|
defmodule PhilomenaWeb.ImageNavigator do
|
|
|
|
alias PhilomenaWeb.ImageSorter
|
2019-12-24 22:14:42 +01:00
|
|
|
alias Philomena.Images.{Image, ElasticsearchIndex}
|
|
|
|
alias Philomena.Elasticsearch
|
2019-11-30 03:33:15 +01:00
|
|
|
alias Philomena.Repo
|
|
|
|
import Ecto.Query
|
|
|
|
|
|
|
|
# We get consecutive images by finding all images greater than or less than
|
|
|
|
# the current image, and grabbing the FIRST one
|
|
|
|
@range_comparison_for_order %{
|
2019-12-26 23:43:32 +01:00
|
|
|
"asc" => :gt,
|
|
|
|
"desc" => :lt
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# If we didn't reverse for prev, it would be the LAST image, which would
|
|
|
|
# make Elasticsearch choke on deep pagination
|
|
|
|
@order_for_dir %{
|
2019-12-26 23:43:32 +01:00
|
|
|
next: %{"asc" => "asc", "desc" => "desc"},
|
|
|
|
prev: %{"asc" => "desc", "desc" => "asc"}
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
@range_map %{
|
|
|
|
gt: :gte,
|
|
|
|
lt: :lte
|
|
|
|
}
|
|
|
|
|
2020-05-08 05:58:21 +02:00
|
|
|
def find_consecutive(conn, image, rel, params, compiled_query, compiled_filter) do
|
2019-11-30 03:33:15 +01:00
|
|
|
image_index =
|
|
|
|
Image
|
|
|
|
|> where(id: ^image.id)
|
2019-12-26 23:43:32 +01:00
|
|
|
|> preload([:gallery_interactions, tags: :aliases])
|
2019-11-30 03:33:15 +01:00
|
|
|
|> Repo.one()
|
|
|
|
|> Map.merge(empty_fields())
|
2019-12-24 22:14:42 +01:00
|
|
|
|> ElasticsearchIndex.as_json()
|
2019-11-30 03:33:15 +01:00
|
|
|
|
2020-05-29 02:35:52 +02:00
|
|
|
%{query: compiled_query, sorts: sort} = ImageSorter.parse_sort(params, compiled_query)
|
2019-11-30 03:33:15 +01:00
|
|
|
|
|
|
|
{sorts, filters} =
|
2020-05-29 01:43:17 +02:00
|
|
|
sort
|
2019-11-30 03:33:15 +01:00
|
|
|
|> Enum.map(&extract_filters(&1, image_index, rel))
|
|
|
|
|> Enum.unzip()
|
|
|
|
|
|
|
|
sorts = sortify(sorts, image_index)
|
|
|
|
filters = filterify(filters, image_index)
|
|
|
|
|
2019-12-24 22:14:42 +01:00
|
|
|
Elasticsearch.search_records(
|
|
|
|
Image,
|
2019-11-30 03:33:15 +01:00
|
|
|
%{
|
|
|
|
query: %{
|
|
|
|
bool: %{
|
2020-05-29 01:43:17 +02:00
|
|
|
must: List.flatten([compiled_query, filters]),
|
2019-11-30 03:33:15 +01:00
|
|
|
must_not: [
|
|
|
|
compiled_filter,
|
2020-05-08 05:58:21 +02:00
|
|
|
%{term: %{hidden_from_users: true}},
|
|
|
|
hidden_filter(conn.assigns.current_user, conn.params["hidden"])
|
2019-11-30 03:33:15 +01:00
|
|
|
]
|
|
|
|
}
|
|
|
|
},
|
|
|
|
sort: List.flatten(sorts)
|
|
|
|
},
|
2019-12-24 22:44:58 +01:00
|
|
|
%{page_size: 1},
|
|
|
|
Image
|
2019-11-30 03:33:15 +01:00
|
|
|
)
|
|
|
|
|> Enum.to_list()
|
|
|
|
|> case do
|
|
|
|
[] -> image
|
|
|
|
[next_image] -> next_image
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp extract_filters(%{"galleries.position" => term} = sort, image, rel) do
|
|
|
|
# Extract gallery ID and current position
|
2019-12-27 00:00:44 +01:00
|
|
|
gid = term.nested.filter.term["galleries.id"]
|
2020-01-11 05:20:19 +01:00
|
|
|
pos = Enum.find(image[:galleries], &(&1.id == gid)).position
|
2019-11-30 03:33:15 +01:00
|
|
|
|
|
|
|
# Sort in the other direction if we are going backwards
|
2019-12-26 23:43:32 +01:00
|
|
|
sd = term.order
|
|
|
|
order = @order_for_dir[rel][to_string(sd)]
|
|
|
|
term = %{term | order: order}
|
2019-11-30 03:33:15 +01:00
|
|
|
sort = %{sort | "galleries.position" => term}
|
|
|
|
|
|
|
|
filter = gallery_range_filter(@range_comparison_for_order[order], pos)
|
|
|
|
|
|
|
|
{[sort], [filter]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp extract_filters(sort, image, rel) do
|
|
|
|
[{sf, sd}] = Enum.to_list(sort)
|
|
|
|
order = @order_for_dir[rel][sd]
|
|
|
|
sort = %{sort | sf => order}
|
|
|
|
|
|
|
|
field = String.to_existing_atom(sf)
|
|
|
|
filter = range_filter(sf, @range_comparison_for_order[order], image[field])
|
|
|
|
|
2020-08-08 02:23:36 +02:00
|
|
|
case sf do
|
|
|
|
"_score" ->
|
2019-11-30 03:33:15 +01:00
|
|
|
{[sort], []}
|
|
|
|
|
2020-08-08 02:23:36 +02:00
|
|
|
_ ->
|
2019-11-30 03:33:15 +01:00
|
|
|
{[sort], [filter]}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sortify(sorts, _image) do
|
|
|
|
List.flatten(sorts)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp filterify(filters, image) do
|
|
|
|
filters = List.flatten(filters)
|
|
|
|
|
|
|
|
filters =
|
|
|
|
filters
|
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.map(fn
|
2020-01-11 05:20:19 +01:00
|
|
|
{filter, 0} ->
|
|
|
|
filter.this
|
|
|
|
|
2019-11-30 03:33:15 +01:00
|
|
|
{filter, i} ->
|
2020-01-11 05:20:19 +01:00
|
|
|
filters_so_far =
|
2019-11-30 03:33:15 +01:00
|
|
|
filters
|
|
|
|
|> Enum.take(i)
|
|
|
|
|> Enum.map(& &1.for_next)
|
|
|
|
|
|
|
|
%{
|
|
|
|
bool: %{
|
|
|
|
must: [filter.this | filters_so_far]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
%{
|
|
|
|
bool: %{
|
|
|
|
should: filters,
|
|
|
|
must_not: %{term: %{id: image.id}}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2020-05-08 05:58:21 +02:00
|
|
|
defp hidden_filter(%{id: id}, param) when param != "1", do: %{term: %{hidden_by_user_ids: id}}
|
|
|
|
defp hidden_filter(_user, _param), do: %{match_none: %{}}
|
|
|
|
|
2019-11-30 03:33:15 +01:00
|
|
|
defp range_filter(sf, dir, val) do
|
|
|
|
%{
|
|
|
|
this: %{range: %{sf => %{dir => parse_val(val)}}},
|
|
|
|
next: %{range: %{sf => %{@range_map[dir] => parse_val(val)}}}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp gallery_range_filter(dir, val) do
|
|
|
|
%{
|
|
|
|
this: %{
|
|
|
|
nested: %{
|
2020-01-11 05:20:19 +01:00
|
|
|
path: :galleries,
|
2019-11-30 03:33:15 +01:00
|
|
|
query: %{range: %{"galleries.position" => %{dir => val}}}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
next: %{
|
|
|
|
nested: %{
|
2020-01-11 05:20:19 +01:00
|
|
|
path: :galleries,
|
2019-11-30 03:33:15 +01:00
|
|
|
query: %{range: %{"galleries.position" => %{@range_map[dir] => val}}}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp empty_fields do
|
|
|
|
%{
|
|
|
|
user: nil,
|
|
|
|
deleter: nil,
|
|
|
|
upvoters: [],
|
|
|
|
downvoters: [],
|
|
|
|
favers: [],
|
2019-12-26 23:43:32 +01:00
|
|
|
hiders: []
|
2019-11-30 03:33:15 +01:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp parse_val(%NaiveDateTime{} = value), do: NaiveDateTime.to_iso8601(value)
|
|
|
|
defp parse_val(value), do: value
|
2019-12-24 22:14:42 +01:00
|
|
|
end
|