Merge pull request #266 from philomena-dev/query-namespace

Split out query features to PhilomenaQuery namespace
This commit is contained in:
liamwhite 2024-06-03 21:17:00 -04:00 committed by GitHub
commit 0d6acafc96
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
82 changed files with 1215 additions and 554 deletions

View file

@ -22,7 +22,7 @@ Once the application has started, navigate to http://localhost:8080 and login wi
If you are running Docker on Windows and the application crashes immediately upon startup, please ensure that `autocrlf` is set to `false` in your Git config, and then re-clone the repository. Additionally, it is recommended that you allocate at least 4GB of RAM to your Docker VM. If you are running Docker on Windows and the application crashes immediately upon startup, please ensure that `autocrlf` is set to `false` in your Git config, and then re-clone the repository. Additionally, it is recommended that you allocate at least 4GB of RAM to your Docker VM.
If you run into an Elasticsearch bootstrap error, you may need to increase your `max_map_count` on the host as follows: If you run into an OpenSearch bootstrap error, you may need to increase your `max_map_count` on the host as follows:
``` ```
sudo sysctl -w vm.max_map_count=262144 sudo sysctl -w vm.max_map_count=262144
``` ```

View file

@ -5,9 +5,9 @@ export MIX_ENV=test
# Always install mix dependencies # Always install mix dependencies
(cd /srv/philomena && mix deps.get) (cd /srv/philomena && mix deps.get)
# Sleep to allow Elasticsearch to finish initializing # Sleep to allow OpenSearch to finish initializing
# if it's not done doing whatever it does yet # if it's not done doing whatever it does yet
echo -n "Waiting for Elasticsearch" echo -n "Waiting for OpenSearch"
until wget -qO - opensearch:9200; do until wget -qO - opensearch:9200; do
echo -n "." echo -n "."

View file

@ -1,7 +1,7 @@
defmodule Mix.Tasks.ReindexAll do defmodule Mix.Tasks.ReindexAll do
use Mix.Task use Mix.Task
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.{ alias Philomena.{
Comments.Comment, Comments.Comment,
@ -27,7 +27,7 @@ defmodule Mix.Tasks.ReindexAll do
{Filters, Filter} {Filters, Filter}
] ]
@shortdoc "Destroys and recreates all Elasticsearch indices." @shortdoc "Destroys and recreates all OpenSearch indices."
@requirements ["app.start"] @requirements ["app.start"]
@impl Mix.Task @impl Mix.Task
def run(args) do def run(args) do
@ -38,23 +38,23 @@ defmodule Mix.Tasks.ReindexAll do
@indices @indices
|> Enum.map(fn {context, schema} -> |> Enum.map(fn {context, schema} ->
Task.async(fn -> Task.async(fn ->
Elasticsearch.delete_index!(schema) Search.delete_index!(schema)
Elasticsearch.create_index!(schema) Search.create_index!(schema)
Elasticsearch.reindex(preload(schema, ^context.indexing_preloads()), schema) Search.reindex(preload(schema, ^context.indexing_preloads()), schema)
end) end)
end) end)
|> Task.await_many(:infinity) |> Task.await_many(:infinity)
# Reports are a bit special # Reports are a bit special
Elasticsearch.delete_index!(Report) Search.delete_index!(Report)
Elasticsearch.create_index!(Report) Search.create_index!(Report)
Report Report
|> preload([:user, :admin]) |> preload([:user, :admin])
|> Repo.all() |> Repo.all()
|> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type]) |> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type])
|> Enum.map(&Elasticsearch.index_document(&1, Report)) |> Enum.map(&Search.index_document(&1, Report))
end end
end end

View file

@ -11,7 +11,7 @@ defmodule Mix.Tasks.UploadToS3 do
alias Philomena.Images.Thumbnailer alias Philomena.Images.Thumbnailer
alias Philomena.Objects alias Philomena.Objects
alias Philomena.Batch alias PhilomenaQuery.Batch
import Ecto.Query import Ecto.Query
@shortdoc "Dumps existing image files to S3 storage backend" @shortdoc "Dumps existing image files to S3 storage backend"

View file

@ -1,56 +0,0 @@
defmodule Philomena.Batch do
alias Philomena.Repo
import Ecto.Query
@doc """
Load records from the given queryable in batches, to avoid locking.
Valid options:
* :batch_size
* :id_field
"""
def record_batches(queryable, opts \\ [], callback) do
query_batches(queryable, opts, &callback.(Repo.all(&1)))
end
@doc """
Load queries from the given queryable in batches, to avoid locking.
Valid options:
* :batch_size
* :id_field
"""
def query_batches(queryable, opts \\ [], callback) do
ids = load_ids(queryable, -1, opts)
query_batches(queryable, opts, callback, ids)
end
defp query_batches(_queryable, _opts, _callback, []), do: []
defp query_batches(queryable, opts, callback, ids) do
id_field = Keyword.get(opts, :id_field, :id)
queryable
|> where([m], field(m, ^id_field) in ^ids)
|> callback.()
ids = load_ids(queryable, Enum.max(ids), opts)
query_batches(queryable, opts, callback, ids)
end
defp load_ids(queryable, max_id, opts) do
id_field = Keyword.get(opts, :id_field, :id)
batch_size = Keyword.get(opts, :batch_size, 1000)
queryable
|> exclude(:preload)
|> exclude(:order_by)
|> order_by(asc: ^id_field)
|> where([m], field(m, ^id_field) > ^max_id)
|> select([m], field(m, ^id_field))
|> limit(^batch_size)
|> Repo.all()
end
end

View file

@ -7,11 +7,11 @@ defmodule Philomena.Comments do
alias Ecto.Multi alias Ecto.Multi
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Reports.Report alias Philomena.Reports.Report
alias Philomena.UserStatistics alias Philomena.UserStatistics
alias Philomena.Comments.Comment alias Philomena.Comments.Comment
alias Philomena.Comments.ElasticsearchIndex, as: CommentIndex alias Philomena.Comments.SearchIndex, as: CommentIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Images alias Philomena.Images
@ -265,7 +265,7 @@ defmodule Philomena.Comments do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = CommentIndex.user_name_update_by_query(old_name, new_name) data = CommentIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Comment, data.query, data.set_replacements, data.replacements) Search.update_by_query(Comment, data.query, data.set_replacements, data.replacements)
end end
def reindex_comment(%Comment{} = comment) do def reindex_comment(%Comment{} = comment) do
@ -288,6 +288,6 @@ defmodule Philomena.Comments do
Comment Comment
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([c], field(c, ^column) in ^condition) |> where([c], field(c, ^column) in ^condition)
|> Elasticsearch.reindex(Comment) |> Search.reindex(Comment)
end end
end end

View file

@ -1,5 +1,5 @@
defmodule Philomena.Comments.Query do defmodule Philomena.Comments.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp user_id_transform(_ctx, data) do defp user_id_transform(_ctx, data) do
case Integer.parse(data) do case Integer.parse(data) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Comments.ElasticsearchIndex do defmodule Philomena.Comments.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -1,294 +0,0 @@
defmodule Philomena.Elasticsearch do
alias Philomena.Batch
alias Philomena.Repo
require Logger
import Ecto.Query
import Elastix.HTTP
alias Philomena.Comments.Comment
alias Philomena.Galleries.Gallery
alias Philomena.Images.Image
alias Philomena.Posts.Post
alias Philomena.Reports.Report
alias Philomena.Tags.Tag
alias Philomena.Filters.Filter
alias Philomena.Comments.ElasticsearchIndex, as: CommentIndex
alias Philomena.Galleries.ElasticsearchIndex, as: GalleryIndex
alias Philomena.Images.ElasticsearchIndex, as: ImageIndex
alias Philomena.Posts.ElasticsearchIndex, as: PostIndex
alias Philomena.Reports.ElasticsearchIndex, as: ReportIndex
alias Philomena.Tags.ElasticsearchIndex, as: TagIndex
alias Philomena.Filters.ElasticsearchIndex, as: FilterIndex
defp index_for(Comment), do: CommentIndex
defp index_for(Gallery), do: GalleryIndex
defp index_for(Image), do: ImageIndex
defp index_for(Post), do: PostIndex
defp index_for(Report), do: ReportIndex
defp index_for(Tag), do: TagIndex
defp index_for(Filter), do: FilterIndex
defp elastic_url do
Application.get_env(:philomena, :opensearch_url)
end
def create_index!(module) do
index = index_for(module)
Elastix.Index.create(
elastic_url(),
index.index_name(),
index.mapping()
)
end
def delete_index!(module) do
index = index_for(module)
Elastix.Index.delete(elastic_url(), index.index_name())
end
def update_mapping!(module) do
index = index_for(module)
index_name = index.index_name()
mapping = index.mapping().mappings.properties
Elastix.Mapping.put(elastic_url(), index_name, "_doc", %{properties: mapping},
include_type_name: true
)
end
def index_document(doc, module) do
index = index_for(module)
data = index.as_json(doc)
Elastix.Document.index(
elastic_url(),
index.index_name(),
"_doc",
data.id,
data
)
end
def delete_document(id, module) do
index = index_for(module)
Elastix.Document.delete(
elastic_url(),
index.index_name(),
"_doc",
id
)
end
def reindex(queryable, module, opts \\ []) do
index = index_for(module)
Batch.record_batches(queryable, opts, fn records ->
lines =
Enum.flat_map(records, fn record ->
doc = index.as_json(record)
[
%{index: %{_index: index.index_name(), _id: doc.id}},
doc
]
end)
Elastix.Bulk.post(
elastic_url(),
lines,
index: index.index_name(),
httpoison_options: [timeout: 30_000]
)
end)
end
def update_by_query(module, query_body, set_replacements, replacements) do
index = index_for(module)
url =
elastic_url()
|> prepare_url([index.index_name(), "_update_by_query"])
|> append_query_string(%{conflicts: "proceed", wait_for_completion: "false"})
# Elasticsearch "Painless" scripting language
script = """
// Replace values in "sets" (arrays in the source document)
for (int i = 0; i < params.set_replacements.length; ++i) {
def replacement = params.set_replacements[i];
def path = replacement.path;
def old_value = replacement.old;
def new_value = replacement.new;
def reference = ctx._source;
for (int j = 0; j < path.length; ++j) {
reference = reference[path[j]];
}
for (int j = 0; j < reference.length; ++j) {
if (reference[j].equals(old_value)) {
reference[j] = new_value;
}
}
}
// Replace values in standalone fields
for (int i = 0; i < params.replacements.length; ++i) {
def replacement = params.replacements[i];
def path = replacement.path;
def old_value = replacement.old;
def new_value = replacement.new;
def reference = ctx._source;
// A little bit more complicated: go up to the last one before it
// so that the value can actually be replaced
for (int j = 0; j < path.length - 1; ++j) {
reference = reference[path[j]];
}
if (reference[path[path.length - 1]] != null && reference[path[path.length - 1]].equals(old_value)) {
reference[path[path.length - 1]] = new_value;
}
}
"""
body =
Jason.encode!(%{
script: %{
source: script,
params: %{
set_replacements: set_replacements,
replacements: replacements
}
},
query: query_body
})
{:ok, %{status_code: 200}} = Elastix.HTTP.post(url, body)
end
def search(module, query_body) do
index = index_for(module)
{:ok, %{body: results, status_code: 200}} =
Elastix.Search.search(
elastic_url(),
index.index_name(),
[],
query_body
)
results
end
def msearch(definitions) do
msearch_body =
Enum.flat_map(definitions, fn def ->
[
%{index: index_for(def.module).index_name()},
def.body
]
end)
{:ok, %{body: results, status_code: 200}} =
Elastix.Search.search(
elastic_url(),
"_all",
[],
msearch_body
)
results["responses"]
end
def search_definition(module, elastic_query, pagination_params \\ %{}) do
page_number = pagination_params[:page_number] || 1
page_size = pagination_params[:page_size] || 25
elastic_query =
Map.merge(elastic_query, %{
from: (page_number - 1) * page_size,
size: page_size,
_source: false,
track_total_hits: true
})
%{
module: module,
body: elastic_query,
page_number: page_number,
page_size: page_size
}
end
defp process_results(results, definition) do
time = results["took"]
count = results["hits"]["total"]["value"]
entries = Enum.map(results["hits"]["hits"], &{String.to_integer(&1["_id"]), &1})
Logger.debug("[Elasticsearch] Query took #{time}ms")
Logger.debug("[Elasticsearch] #{Jason.encode!(definition.body)}")
%Scrivener.Page{
entries: entries,
page_number: definition.page_number,
page_size: definition.page_size,
total_entries: count,
total_pages: div(count + definition.page_size - 1, definition.page_size)
}
end
def search_results(definition) do
process_results(search(definition.module, definition.body), definition)
end
def msearch_results(definitions) do
Enum.map(Enum.zip(msearch(definitions), definitions), fn {result, definition} ->
process_results(result, definition)
end)
end
defp load_records_from_results(results, ecto_queries) do
Enum.map(Enum.zip(results, ecto_queries), fn {page, ecto_query} ->
{ids, hits} = Enum.unzip(page.entries)
records =
ecto_query
|> where([m], m.id in ^ids)
|> Repo.all()
|> Enum.sort_by(&Enum.find_index(ids, fn el -> el == &1.id end))
%{page | entries: Enum.zip(records, hits)}
end)
end
def search_records_with_hits(definition, ecto_query) do
[page] = load_records_from_results([search_results(definition)], [ecto_query])
page
end
def msearch_records_with_hits(definitions, ecto_queries) do
load_records_from_results(msearch_results(definitions), ecto_queries)
end
def search_records(definition, ecto_query) do
page = search_records_with_hits(definition, ecto_query)
{records, _hits} = Enum.unzip(page.entries)
%{page | entries: records}
end
def msearch_records(definitions, ecto_queries) do
Enum.map(load_records_from_results(msearch_results(definitions), ecto_queries), fn page ->
{records, _hits} = Enum.unzip(page.entries)
%{page | entries: records}
end)
end
end

View file

@ -7,8 +7,8 @@ defmodule Philomena.Filters do
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Filters.Filter alias Philomena.Filters.Filter
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Filters.ElasticsearchIndex, as: FilterIndex alias Philomena.Filters.SearchIndex, as: FilterIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
@doc """ @doc """
@ -223,7 +223,7 @@ defmodule Philomena.Filters do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = FilterIndex.user_name_update_by_query(old_name, new_name) data = FilterIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Filter, data.query, data.set_replacements, data.replacements) Search.update_by_query(Filter, data.query, data.set_replacements, data.replacements)
end end
def reindex_filter(%Filter{} = filter) do def reindex_filter(%Filter{} = filter) do
@ -233,7 +233,7 @@ defmodule Philomena.Filters do
end end
def unindex_filter(%Filter{} = filter) do def unindex_filter(%Filter{} = filter) do
Elasticsearch.delete_document(filter.id, Filter) Search.delete_document(filter.id, Filter)
filter filter
end end
@ -246,6 +246,6 @@ defmodule Philomena.Filters do
Filter Filter
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([f], field(f, ^column) in ^condition) |> where([f], field(f, ^column) in ^condition)
|> Elasticsearch.reindex(Filter) |> Search.reindex(Filter)
end end
end end

View file

@ -1,5 +1,5 @@
defmodule Philomena.Filters.Query do defmodule Philomena.Filters.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp user_my_transform(%{user: %{id: id}}, "filters"), defp user_my_transform(%{user: %{id: id}}, "filters"),
do: {:ok, %{term: %{user_id: id}}} do: {:ok, %{term: %{user_id: id}}}

View file

@ -1,5 +1,5 @@
defmodule Philomena.Filters.ElasticsearchIndex do defmodule Philomena.Filters.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -7,10 +7,10 @@ defmodule Philomena.Galleries do
alias Ecto.Multi alias Ecto.Multi
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Galleries.Gallery alias Philomena.Galleries.Gallery
alias Philomena.Galleries.Interaction alias Philomena.Galleries.Interaction
alias Philomena.Galleries.ElasticsearchIndex, as: GalleryIndex alias Philomena.Galleries.SearchIndex, as: GalleryIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.GalleryReorderWorker alias Philomena.GalleryReorderWorker
alias Philomena.Notifications alias Philomena.Notifications
@ -135,7 +135,7 @@ defmodule Philomena.Galleries do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = GalleryIndex.user_name_update_by_query(old_name, new_name) data = GalleryIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Gallery, data.query, data.set_replacements, data.replacements) Search.update_by_query(Gallery, data.query, data.set_replacements, data.replacements)
end end
defp reindex_after_update({:ok, gallery}) do defp reindex_after_update({:ok, gallery}) do
@ -155,7 +155,7 @@ defmodule Philomena.Galleries do
end end
def unindex_gallery(%Gallery{} = gallery) do def unindex_gallery(%Gallery{} = gallery) do
Elasticsearch.delete_document(gallery.id, Gallery) Search.delete_document(gallery.id, Gallery)
gallery gallery
end end
@ -168,7 +168,7 @@ defmodule Philomena.Galleries do
Gallery Gallery
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([g], field(g, ^column) in ^condition) |> where([g], field(g, ^column) in ^condition)
|> Elasticsearch.reindex(Gallery) |> Search.reindex(Gallery)
end end
def add_image_to_gallery(gallery, image) do def add_image_to_gallery(gallery, image) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Galleries.Query do defmodule Philomena.Galleries.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp fields do defp fields do
[ [

View file

@ -1,5 +1,5 @@
defmodule Philomena.Galleries.ElasticsearchIndex do defmodule Philomena.Galleries.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -9,7 +9,7 @@ defmodule Philomena.Images do
alias Ecto.Multi alias Ecto.Multi
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.ThumbnailWorker alias Philomena.ThumbnailWorker
alias Philomena.ImagePurgeWorker alias Philomena.ImagePurgeWorker
alias Philomena.DuplicateReports.DuplicateReport alias Philomena.DuplicateReports.DuplicateReport
@ -18,7 +18,7 @@ defmodule Philomena.Images do
alias Philomena.Images.Tagging alias Philomena.Images.Tagging
alias Philomena.Images.Thumbnailer alias Philomena.Images.Thumbnailer
alias Philomena.Images.Source alias Philomena.Images.Source
alias Philomena.Images.ElasticsearchIndex, as: ImageIndex alias Philomena.Images.SearchIndex, as: ImageIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.ImageFeatures.ImageFeature alias Philomena.ImageFeatures.ImageFeature
alias Philomena.SourceChanges.SourceChange alias Philomena.SourceChanges.SourceChange
@ -812,7 +812,7 @@ defmodule Philomena.Images do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = ImageIndex.user_name_update_by_query(old_name, new_name) data = ImageIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Image, data.query, data.set_replacements, data.replacements) Search.update_by_query(Image, data.query, data.set_replacements, data.replacements)
end end
def reindex_image(%Image{} = image) do def reindex_image(%Image{} = image) do
@ -845,7 +845,7 @@ defmodule Philomena.Images do
Image Image
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([i], field(i, ^column) in ^condition) |> where([i], field(i, ^column) in ^condition)
|> Elasticsearch.reindex(Image) |> Search.reindex(Image)
end end
def purge_files(image, hidden_key) do def purge_files(image, hidden_key) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Images.Query do defmodule Philomena.Images.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
alias Philomena.Repo alias Philomena.Repo
defp gallery_id_transform(_ctx, value) do defp gallery_id_transform(_ctx, value) do
@ -60,7 +60,7 @@ defmodule Philomena.Images.Query do
do: {:error, "Unknown `my' value."} do: {:error, "Unknown `my' value."}
defp invalid_filter_guard(ctx, search_string) do defp invalid_filter_guard(ctx, search_string) do
case parse(user_fields(), ctx, Philomena.Search.String.normalize(search_string)) do case parse(user_fields(), ctx, PhilomenaQuery.Parse.String.normalize(search_string)) do
{:ok, query} -> query {:ok, query} -> query
_error -> %{match_all: %{}} _error -> %{match_all: %{}}
end end

View file

@ -1,5 +1,5 @@
defmodule Philomena.Images.ElasticsearchIndex do defmodule Philomena.Images.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -7,12 +7,12 @@ defmodule Philomena.Posts do
alias Ecto.Multi alias Ecto.Multi
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Topics.Topic alias Philomena.Topics.Topic
alias Philomena.Topics alias Philomena.Topics
alias Philomena.UserStatistics alias Philomena.UserStatistics
alias Philomena.Posts.Post alias Philomena.Posts.Post
alias Philomena.Posts.ElasticsearchIndex, as: PostIndex alias Philomena.Posts.SearchIndex, as: PostIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.Forums.Forum alias Philomena.Forums.Forum
alias Philomena.Notifications alias Philomena.Notifications
@ -309,7 +309,7 @@ defmodule Philomena.Posts do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = PostIndex.user_name_update_by_query(old_name, new_name) data = PostIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Post, data.query, data.set_replacements, data.replacements) Search.update_by_query(Post, data.query, data.set_replacements, data.replacements)
end end
defp reindex_after_update({:ok, post}) do defp reindex_after_update({:ok, post}) do
@ -336,6 +336,6 @@ defmodule Philomena.Posts do
Post Post
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([p], field(p, ^column) in ^condition) |> where([p], field(p, ^column) in ^condition)
|> Elasticsearch.reindex(Post) |> Search.reindex(Post)
end end
end end

View file

@ -1,5 +1,5 @@
defmodule Philomena.Posts.Query do defmodule Philomena.Posts.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp user_id_transform(_ctx, data) do defp user_id_transform(_ctx, data) do
case Integer.parse(data) do case Integer.parse(data) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Posts.ElasticsearchIndex do defmodule Philomena.Posts.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -6,9 +6,9 @@ defmodule Philomena.Reports do
import Ecto.Query, warn: false import Ecto.Query, warn: false
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Reports.Report alias Philomena.Reports.Report
alias Philomena.Reports.ElasticsearchIndex, as: ReportIndex alias Philomena.Reports.SearchIndex, as: ReportIndex
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.Polymorphic alias Philomena.Polymorphic
@ -152,7 +152,7 @@ defmodule Philomena.Reports do
def user_name_reindex(old_name, new_name) do def user_name_reindex(old_name, new_name) do
data = ReportIndex.user_name_update_by_query(old_name, new_name) data = ReportIndex.user_name_update_by_query(old_name, new_name)
Elasticsearch.update_by_query(Report, data.query, data.set_replacements, data.replacements) Search.update_by_query(Report, data.query, data.set_replacements, data.replacements)
end end
defp reindex_after_update({:ok, report}) do defp reindex_after_update({:ok, report}) do
@ -183,7 +183,7 @@ defmodule Philomena.Reports do
|> preload([:user, :admin]) |> preload([:user, :admin])
|> Repo.all() |> Repo.all()
|> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type]) |> Polymorphic.load_polymorphic(reportable: [reportable_id: :reportable_type])
|> Enum.map(&Elasticsearch.index_document(&1, Report)) |> Enum.map(&Search.index_document(&1, Report))
end end
def count_reports(user) do def count_reports(user) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Reports.Query do defmodule Philomena.Reports.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp fields do defp fields do
[ [

View file

@ -1,5 +1,5 @@
defmodule Philomena.Reports.ElasticsearchIndex do defmodule Philomena.Reports.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -1,6 +1,6 @@
defmodule Philomena.Schema.Search do defmodule Philomena.Schema.Search do
alias Philomena.Images.Query alias Philomena.Images.Query
alias Philomena.Search.String alias PhilomenaQuery.Parse.String
import Ecto.Changeset import Ecto.Changeset
def validate_search(changeset, field, user, watched \\ false) do def validate_search(changeset, field, user, watched \\ false) do

View file

@ -1,5 +1,5 @@
defmodule Philomena.Schema.Time do defmodule Philomena.Schema.Time do
alias Philomena.RelativeDate alias PhilomenaQuery.RelativeDate
import Ecto.Changeset import Ecto.Changeset
def assign_time(changeset, field, target_field) do def assign_time(changeset, field, target_field) do

View file

@ -1,13 +0,0 @@
defmodule Philomena.Search.String do
def normalize(nil) do
""
end
def normalize(str) do
str
|> String.replace("\r", "")
|> String.split("\n", trim: true)
|> Enum.map(fn s -> "(#{s})" end)
|> Enum.join(" || ")
end
end

View file

@ -0,0 +1,55 @@
defmodule Philomena.SearchPolicy do
alias Philomena.Comments.Comment
alias Philomena.Galleries.Gallery
alias Philomena.Images.Image
alias Philomena.Posts.Post
alias Philomena.Reports.Report
alias Philomena.Tags.Tag
alias Philomena.Filters.Filter
alias Philomena.Comments.SearchIndex, as: CommentIndex
alias Philomena.Galleries.SearchIndex, as: GalleryIndex
alias Philomena.Images.SearchIndex, as: ImageIndex
alias Philomena.Posts.SearchIndex, as: PostIndex
alias Philomena.Reports.SearchIndex, as: ReportIndex
alias Philomena.Tags.SearchIndex, as: TagIndex
alias Philomena.Filters.SearchIndex, as: FilterIndex
@type schema_module :: Comment | Gallery | Image | Post | Report | Tag | Filter
@doc """
For a given schema module (e.g. `m:Philomena.Images.Image`), return the associated module
which implements the `SearchIndex` behaviour (e.g. `m:Philomena.Images.SearchIndex`).
## Example
iex> SearchPolicy.index_for(Gallery)
Philomena.Galleries.SearchIndex
iex> SearchPolicy.index_for(:foo)
** (FunctionClauseError) no function clause matching in Philomena.SearchPolicy.index_for/1
"""
@spec index_for(schema_module()) :: module()
def index_for(Comment), do: CommentIndex
def index_for(Gallery), do: GalleryIndex
def index_for(Image), do: ImageIndex
def index_for(Post), do: PostIndex
def index_for(Report), do: ReportIndex
def index_for(Tag), do: TagIndex
def index_for(Filter), do: FilterIndex
@doc """
Return the path used to interact with the search engine.
## Example
iex> SearchPolicy.opensearch_url()
"http://localhost:9200"
"""
@spec opensearch_url :: String.t()
def opensearch_url do
Application.get_env(:philomena, :opensearch_url)
end
end

View file

@ -6,7 +6,7 @@ defmodule Philomena.Tags do
import Ecto.Query, warn: false import Ecto.Query, warn: false
alias Philomena.Repo alias Philomena.Repo
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.IndexWorker alias Philomena.IndexWorker
alias Philomena.TagAliasWorker alias Philomena.TagAliasWorker
alias Philomena.TagUnaliasWorker alias Philomena.TagUnaliasWorker
@ -194,12 +194,12 @@ defmodule Philomena.Tags do
{:ok, tag} = Repo.delete(tag) {:ok, tag} = Repo.delete(tag)
Elasticsearch.delete_document(tag.id, Tag) Search.delete_document(tag.id, Tag)
Image Image
|> where([i], i.id in ^image_ids) |> where([i], i.id in ^image_ids)
|> preload(^Images.indexing_preloads()) |> preload(^Images.indexing_preloads())
|> Elasticsearch.reindex(Image) |> Search.reindex(Image)
end end
def alias_tag(%Tag{} = tag, attrs) do def alias_tag(%Tag{} = tag, attrs) do
@ -301,13 +301,13 @@ defmodule Philomena.Tags do
|> join(:inner, [i], _ in assoc(i, :tags)) |> join(:inner, [i], _ in assoc(i, :tags))
|> where([_i, t], t.id == ^tag.id) |> where([_i, t], t.id == ^tag.id)
|> preload(^Images.indexing_preloads()) |> preload(^Images.indexing_preloads())
|> Elasticsearch.reindex(Image) |> Search.reindex(Image)
Filter Filter
|> where([f], fragment("? @> ARRAY[?]::integer[]", f.hidden_tag_ids, ^tag.id)) |> where([f], fragment("? @> ARRAY[?]::integer[]", f.hidden_tag_ids, ^tag.id))
|> or_where([f], fragment("? @> ARRAY[?]::integer[]", f.spoilered_tag_ids, ^tag.id)) |> or_where([f], fragment("? @> ARRAY[?]::integer[]", f.spoilered_tag_ids, ^tag.id))
|> preload(^Filters.indexing_preloads()) |> preload(^Filters.indexing_preloads())
|> Elasticsearch.reindex(Filter) |> Search.reindex(Filter)
end end
def unalias_tag(%Tag{} = tag) do def unalias_tag(%Tag{} = tag) do
@ -416,7 +416,7 @@ defmodule Philomena.Tags do
Tag Tag
|> preload(^indexing_preloads()) |> preload(^indexing_preloads())
|> where([t], field(t, ^column) in ^condition) |> where([t], field(t, ^column) in ^condition)
|> Elasticsearch.reindex(Tag) |> Search.reindex(Tag)
end end
alias Philomena.Tags.Implication alias Philomena.Tags.Implication

View file

@ -1,5 +1,5 @@
defmodule Philomena.Tags.Query do defmodule Philomena.Tags.Query do
alias Philomena.Search.Parser alias PhilomenaQuery.Parse.Parser
defp fields do defp fields do
[ [

View file

@ -1,5 +1,5 @@
defmodule Philomena.Tags.ElasticsearchIndex do defmodule Philomena.Tags.SearchIndex do
@behaviour Philomena.ElasticsearchIndex @behaviour PhilomenaQuery.SearchIndex
@impl true @impl true
def index_name do def index_name do

View file

@ -1,6 +1,6 @@
defmodule Philomena.UserDownvoteWipe do defmodule Philomena.UserDownvoteWipe do
alias Philomena.Batch alias PhilomenaQuery.Batch
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Users alias Philomena.Users
alias Philomena.Users.User alias Philomena.Users.User
alias Philomena.Images.Image alias Philomena.Images.Image
@ -63,7 +63,7 @@ defmodule Philomena.UserDownvoteWipe do
Image Image
|> where([i], i.id in ^image_ids) |> where([i], i.id in ^image_ids)
|> preload(^Images.indexing_preloads()) |> preload(^Images.indexing_preloads())
|> Elasticsearch.reindex(Image) |> Search.reindex(Image)
# allow time for indexing to catch up # allow time for indexing to catch up
:timer.sleep(:timer.seconds(10)) :timer.sleep(:timer.seconds(10))

View file

@ -15,7 +15,7 @@ defmodule Philomena.IndexWorker do
# Image # Image
# |> preload(^indexing_preloads()) # |> preload(^indexing_preloads())
# |> where([i], field(i, ^column) in ^condition) # |> where([i], field(i, ^column) in ^condition)
# |> Elasticsearch.reindex(Image) # |> Search.reindex(Image)
# end # end
# #
def perform(module, column, condition) do def perform(module, column, condition) do

View file

@ -1,7 +1,7 @@
defmodule Philomena.TagChangeRevertWorker do defmodule Philomena.TagChangeRevertWorker do
alias Philomena.TagChanges.TagChange alias Philomena.TagChanges.TagChange
alias Philomena.TagChanges alias Philomena.TagChanges
alias Philomena.Batch alias PhilomenaQuery.Batch
alias Philomena.Repo alias Philomena.Repo
import Ecto.Query import Ecto.Query

View file

@ -0,0 +1,111 @@
defmodule PhilomenaQuery.Batch do
@moduledoc """
Locking-reduced database batch operations.
These operations are non-transactional by their very nature. This prevents inadvertent
downtimes due to blocking, but can result in consistency errors in the database,
and so should be used sparingly.
They are best suited for when large numbers of rows can be expected to be processed,
as doing so may otherwise result in Ecto timing out the query.
"""
alias Philomena.Repo
import Ecto.Query
@type queryable :: any()
@type batch_size :: {:batch_size, integer()}
@type id_field :: {:id_field, atom()}
@type batch_options :: [batch_size() | id_field()]
@type record_batch_callback :: ([struct()] -> any())
@type query_batch_callback :: ([Ecto.Query.t()] -> any())
@doc """
Execute a callback with lists of schema structures on a queryable,
using batches to avoid locking.
Valid options:
* `batch_size` (integer) - the number of records to load per batch
* `id_field` (atom) - the name of the field containing the ID
## Example
queryable = from i in Image, where: i.image_width >= 1920
cb = fn images ->
Enum.each(images, &IO.inspect(&1.id))
end
PhilomenaQuery.Batch.record_batches(queryable, cb)
"""
@spec record_batches(queryable(), batch_options(), record_batch_callback()) :: []
def record_batches(queryable, opts \\ [], callback) do
query_batches(queryable, opts, &callback.(Repo.all(&1)))
end
@doc """
Execute a callback with bulk queries on a queryable, using batches to avoid locking.
Valid options:
* `batch_size` (integer) - the number of records to load per batch
* `id_field` (atom) - the name of the field containing the ID
> #### Info {: .info}
>
> If you are looking to receive schema structures (e.g., you are querying for `Image`s,
> and you want to receive `Image` objects, then use `record_batches/3` instead.
An `m:Ecto.Query` which selects all IDs in the current batch is passed into the callback
during each invocation.
## Example
queryable = from ui in ImageVote, where: ui.user_id == 1234
opts = [id_field: :image_id]
cb = fn bulk_query ->
Repo.delete_all(bulk_query)
end
PhilomenaQuery.Batch.query_batches(queryable, opts, cb)
"""
@spec query_batches(queryable(), batch_options(), query_batch_callback()) :: []
def query_batches(queryable, opts \\ [], callback) do
ids = load_ids(queryable, -1, opts)
query_batches(queryable, opts, callback, ids)
end
defp query_batches(_queryable, _opts, _callback, []), do: []
defp query_batches(queryable, opts, callback, ids) do
id_field = Keyword.get(opts, :id_field, :id)
queryable
|> where([m], field(m, ^id_field) in ^ids)
|> callback.()
ids = load_ids(queryable, Enum.max(ids), opts)
query_batches(queryable, opts, callback, ids)
end
defp load_ids(queryable, max_id, opts) do
id_field = Keyword.get(opts, :id_field, :id)
batch_size = Keyword.get(opts, :batch_size, 1000)
queryable
|> exclude(:preload)
|> exclude(:order_by)
|> order_by(asc: ^id_field)
|> where([m], field(m, ^id_field) > ^max_id)
|> select([m], field(m, ^id_field))
|> limit(^batch_size)
|> Repo.all()
end
end

View file

@ -1,4 +1,6 @@
defmodule Philomena.Search.BoolParser do defmodule PhilomenaQuery.Parse.BoolParser do
@moduledoc false
import NimbleParsec import NimbleParsec
space = space =

View file

@ -1,4 +1,6 @@
defmodule Philomena.Search.DateParser do defmodule PhilomenaQuery.Parse.DateParser do
@moduledoc false
import NimbleParsec import NimbleParsec
@dialyzer [:no_match, :no_unused] @dialyzer [:no_match, :no_unused]
@ -100,9 +102,9 @@ defmodule Philomena.Search.DateParser do
end end
defp relative_datetime(_rest, [count, scale], context, _line, _offset) do defp relative_datetime(_rest, [count, scale], context, _line, _offset) do
millenium_seconds = 31_536_000_000 millennium_seconds = 31_536_000_000
case count * scale <= millenium_seconds do case count * scale <= millennium_seconds do
true -> true ->
now = DateTime.utc_now() now = DateTime.utc_now()
@ -113,7 +115,7 @@ defmodule Philomena.Search.DateParser do
_false -> _false ->
{:error, {:error,
"invalid date format in input; requested time #{count * scale} seconds is over a millenium ago"} "invalid date format in input; requested time #{count * scale} seconds is over a millennium ago"}
end end
end end

View file

@ -1,6 +1,25 @@
defmodule Philomena.Search.Evaluator do defmodule PhilomenaQuery.Parse.Evaluator do
@moduledoc """
Tools to evaluate whether a search query matches a document.
"""
# TODO: rethink the necessity of this module. # TODO: rethink the necessity of this module.
# Can we do this in elasticsearch instead? # Can we do this in the search engine instead?
@doc """
Check whether a hit is matched by a query.
- `doc` - a document definition. This could be returned by the index's `as_json/1` function.
- `query` - a search query
## Example
iex> Evaluator.hits?(def, %{term: %{tags: "safe"}})
true
"""
@spec hits?(map(), map()) :: boolean()
def hits?(doc, query)
def hits?(doc, %{bool: bool_query}) do def hits?(doc, %{bool: bool_query}) do
must(doc, bool_query[:must]) and must(doc, bool_query[:must]) and
@ -101,7 +120,7 @@ defmodule Philomena.Search.Evaluator do
defp atomify(atom) when is_atom(atom), do: atom defp atomify(atom) when is_atom(atom), do: atom
defp atomify(string) when is_binary(string), do: String.to_existing_atom(string) defp atomify(string) when is_binary(string), do: String.to_existing_atom(string)
def levenshtein(s1, s2) do defp levenshtein(s1, s2) do
{dist, _lookup} = levenshtein_lookup(s1, s2, %{}, 0) {dist, _lookup} = levenshtein_lookup(s1, s2, %{}, 0)
dist dist

View file

@ -1,8 +1,10 @@
defmodule Philomena.Search.FloatParser do defmodule PhilomenaQuery.Parse.FloatParser do
@moduledoc false
import NimbleParsec import NimbleParsec
defp to_number(input), do: Philomena.Search.Helpers.to_number(input) defp to_number(input), do: PhilomenaQuery.Parse.Helpers.to_number(input)
defp range(input), do: Philomena.Search.Helpers.range(input) defp range(input), do: PhilomenaQuery.Parse.Helpers.range(input)
space = space =
choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")]) choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")])

View file

@ -1,4 +1,6 @@
defmodule Philomena.Search.Helpers do defmodule PhilomenaQuery.Parse.Helpers do
@moduledoc false
# Apparently, it's too hard for the standard library to to parse a number # Apparently, it's too hard for the standard library to to parse a number
# as a float if it doesn't contain a decimal point. WTF # as a float if it doesn't contain a decimal point. WTF
def to_number(term) do def to_number(term) do

View file

@ -1,8 +1,10 @@
defmodule Philomena.Search.IntParser do defmodule PhilomenaQuery.Parse.IntParser do
@moduledoc false
import NimbleParsec import NimbleParsec
defp to_int(input), do: Philomena.Search.Helpers.to_int(input) defp to_int(input), do: PhilomenaQuery.Parse.Helpers.to_int(input)
defp range(input), do: Philomena.Search.Helpers.range(input) defp range(input), do: PhilomenaQuery.Parse.Helpers.range(input)
space = space =
choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")]) choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")])

View file

@ -1,4 +1,6 @@
defmodule Philomena.Search.IpParser do defmodule PhilomenaQuery.Parse.IpParser do
@moduledoc false
import NimbleParsec import NimbleParsec
ipv4_octet = ipv4_octet =

View file

@ -1,7 +1,9 @@
defmodule Philomena.Search.Lexer do defmodule PhilomenaQuery.Parse.Lexer do
@moduledoc false
import NimbleParsec import NimbleParsec
defp to_number(input), do: Philomena.Search.Helpers.to_number(input) defp to_number(input), do: PhilomenaQuery.Parse.Helpers.to_number(input)
space = space =
choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")]) choice([string(" "), string("\t"), string("\n"), string("\r"), string("\v"), string("\f")])

View file

@ -1,8 +1,10 @@
defmodule Philomena.Search.LiteralParser do defmodule PhilomenaQuery.Parse.LiteralParser do
@moduledoc false
import NimbleParsec import NimbleParsec
@dialyzer [:no_match, :no_unused] @dialyzer [:no_match, :no_unused]
defp to_number(input), do: Philomena.Search.Helpers.to_number(input) defp to_number(input), do: PhilomenaQuery.Parse.Helpers.to_number(input)
float = float =
ascii_string([?0..?9], min: 1) ascii_string([?0..?9], min: 1)

View file

@ -1,5 +1,7 @@
defmodule Philomena.Search.NgramParser do defmodule PhilomenaQuery.Parse.NgramParser do
alias Philomena.Search.LiteralParser @moduledoc false
alias PhilomenaQuery.Parse.LiteralParser
# Dummy stub. Used for convenient parser implementation. # Dummy stub. Used for convenient parser implementation.
def parse(input), do: LiteralParser.parse(input) def parse(input), do: LiteralParser.parse(input)

View file

@ -1,5 +1,34 @@
defmodule Philomena.Search.Parser do defmodule PhilomenaQuery.Parse.Parser do
alias Philomena.Search.{ @moduledoc """
A search language for safely evaluating user-input queries.
The query language supports the following features:
- Disjunction (OR/||)
- Conjunction (AND/&&/,)
- Negation (NOT/-/!)
- Expression boosting
- Parenthetical grouping
Several types of terms are supported:
- Booleans
- Dates (absolute and relative, time points and ranges)
- Floats
- Integers
- IP Addresses
- Literal text
- Stemmed text
Specific terms can support the following features:
- Range queries (.lte/.lt/.gte/.gt)
- Fuzzing (~0.5)
- Wildcarding (*?)
- CIDR masks (/27)
The rich search expression grammar is arguably a defining feature of Philomena, and its
feature set makes it stand out in comparison to traditional boorus.
"""
alias PhilomenaQuery.Parse.{
BoolParser, BoolParser,
DateParser, DateParser,
FloatParser, FloatParser,
@ -12,6 +41,31 @@ defmodule Philomena.Search.Parser do
TermRangeParser TermRangeParser
} }
@type context :: any()
@type query :: map()
@type default_field_type :: :term | :ngram
@type transform_result :: {:ok, query()} | {:error, String.t()}
@type transform :: (context, String.t() -> transform_result())
@type t :: %__MODULE__{
default_field: {String.t(), default_field_type()},
bool_fields: [String.t()],
date_fields: [String.t()],
float_fields: [String.t()],
int_fields: [String.t()],
ip_fields: [String.t()],
literal_fields: [String.t()],
ngram_fields: [String.t()],
custom_fields: [String.t()],
transforms: %{String.t() => transform()},
aliases: %{String.t() => String.t()},
no_downcase_fields: [String.t()],
__fields__: map(),
__data__: context()
}
defstruct [ defstruct [
:default_field, :default_field,
bool_fields: [], bool_fields: [],
@ -31,6 +85,37 @@ defmodule Philomena.Search.Parser do
@max_clause_count 512 @max_clause_count 512
@doc """
Creates a `Parser` suitable for safely parsing user-input queries.
Fields refer to attributes of the indexed document which will be searchable with
`m:PhilomenaQuery.Search`.
Available options:
- `bool_fields` - a list of field names parsed as booleans
- `float_fields` - a list of field names parsed as floats
- `int_fields` - a list of field names parsed as integers
- `ip_fields` - a list of field names parsed as IP CIDR masks
- `literal_fields` - wildcardable fields which are searched as the exact value
- `ngram_fields` - wildcardable fields which are searched as stemmed values
- `custom_fields` - fields which do not exist on the document and are created by a callback
- `transforms` - a map of custom field names to transform functions
- `aliases` - a map of field names to the names they should have in the search engine
- `no_downcase_fields` - a list of field names which do not have string downcasing applied
## Example
options = [
bool_fields: ["hidden"],
custom_fields: ["example"],
transforms: %{"example" => fn _ctx, term -> %{term: %{term => "example"}} end},
aliases: %{"hidden" => "hidden_from_users"}
]
Parser.parser(options)
"""
@spec parser(keyword()) :: t()
def parser(options) do def parser(options) do
parser = struct(Parser, options) parser = struct(Parser, options)
@ -47,6 +132,34 @@ defmodule Philomena.Search.Parser do
%{parser | __fields__: Map.new(fields)} %{parser | __fields__: Map.new(fields)}
end end
@doc """
Parse the query into a definition suitable for the search engine.
The parser argument should have been created with a previous call to `parser/1`. When the
`context` argument is passed, it becomes the first argument to any transform functions defined
in the `transform` option.
## Example
iex> Parser.parse(parser, "safe")
{:ok, %{term: %{"namespaced_tags.name" => "safe"}}}
iex> Parser.parse(nil, "safe OR solo")
{:ok,
%{
bool: %{
should: [
%{term: %{"namespaced_tags.name" => "safe"}},
%{term: %{"namespaced_tags.name" => "solo"}}
]
}
}}
iex> Parser.parse(parser, ")")
{:error, "Imbalanced parentheses."}
"""
@spec parse(t(), String.t(), context()) :: {:ok, query()} | {:error, String.t()}
def parse(parser, input, context \\ nil) def parse(parser, input, context \\ nil)
# Empty search should emit a match_none. # Empty search should emit a match_none.

View file

@ -0,0 +1,32 @@
defmodule PhilomenaQuery.Parse.String do
@moduledoc """
Search string normalization utilities.
"""
@doc """
Convert a multiline or empty search string into a single search string.
## Examples
iex> Search.String.normalize(nil)
""
iex> Search.String.normalize("foo\nbar")
"(foo) || (bar)"
"""
@spec normalize(String.t() | nil) :: String.t()
def normalize(str)
def normalize(nil) do
""
end
def normalize(str) do
str
|> String.replace("\r", "")
|> String.split("\n", trim: true)
|> Enum.map(fn s -> "(#{s})" end)
|> Enum.join(" || ")
end
end

View file

@ -1,6 +1,8 @@
defmodule Philomena.Search.TermRangeParser do defmodule PhilomenaQuery.Parse.TermRangeParser do
alias Philomena.Search.LiteralParser @moduledoc false
alias Philomena.Search.NgramParser
alias PhilomenaQuery.Parse.LiteralParser
alias PhilomenaQuery.Parse.NgramParser
# Unfortunately, we can't use NimbleParsec here. It requires # Unfortunately, we can't use NimbleParsec here. It requires
# the compiler, and we're not in a macro environment. # the compiler, and we're not in a macro environment.

View file

@ -1,4 +1,8 @@
defmodule Philomena.RelativeDate do defmodule PhilomenaQuery.RelativeDate do
@moduledoc """
Relative date parsing, for strings like "a week ago" or "5 years from now".
"""
import NimbleParsec import NimbleParsec
number_words = number_words =
@ -72,6 +76,13 @@ defmodule Philomena.RelativeDate do
defparsecp(:relative_date, relative_date) defparsecp(:relative_date, relative_date)
@doc """
Parse an absolute date in valid ISO 8601 format, or an English-language relative date.
See `parse_absolute/1` and `parse_relative/1` for examples of what may be accepted
by this function.
"""
@spec parse_absolute(String.t()) :: {:ok, DateTime.t()} | {:error, any()}
def parse(input) do def parse(input) do
input = input =
input input
@ -87,6 +98,22 @@ defmodule Philomena.RelativeDate do
end end
end end
@doc """
Parse an absolute date, given in a valid ISO 8601 format.
## Example
iex> PhilomenaQuery.RelativeDate.parse_absolute("2024-01-01T00:00:00Z")
{:ok, ~U[2024-01-01 00:00:00Z]}
iex> PhilomenaQuery.RelativeDate.parse_absolute("2024-01-01T00:00:00-01:00")
{:ok, ~U[2024-01-01 01:00:00Z]
iex> PhilomenaQuery.RelativeDate.parse_absolute("2024")
{:error, "Parse error"}
"""
@spec parse_absolute(String.t()) :: {:ok, DateTime.t()} | {:error, any()}
def parse_absolute(input) do def parse_absolute(input) do
case DateTime.from_iso8601(input) do case DateTime.from_iso8601(input) do
{:ok, datetime, _offset} -> {:ok, datetime, _offset} ->
@ -97,6 +124,25 @@ defmodule Philomena.RelativeDate do
end end
end end
@doc """
Parse an English-language relative date. Accepts "moon" to mean 1000 years from now.
## Example
iex> PhilomenaQuery.RelativeDate.parse_relative("a year ago")
{:ok, ~U[2023-01-01 00:00:00Z]
iex> PhilomenaQuery.RelativeDate.parse_relative("three days from now")
{:ok, ~U[2024-01-04 00:00:00Z]}
iex> PhilomenaQuery.RelativeDate.parse_relative("moon")
{:ok, ~U[3024-01-01 00:00:00Z]}
iex> PhilomenaQuery.RelativeDate.parse_relative("2024")
{:error, "Parse error"}
"""
@spec parse_relative(String.t()) :: {:ok, DateTime.t()} | {:error, any()}
def parse_relative(input) do def parse_relative(input) do
case relative_date(input) do case relative_date(input) do
{:ok, [moon: _moon], _1, _2, _3, _4} -> {:ok, [moon: _moon], _1, _2, _3, _4} ->

View file

@ -0,0 +1,629 @@
defmodule PhilomenaQuery.Search do
@moduledoc """
Low-level search engine interaction.
This module generates and delivers search bodies to the OpenSearch backend.
Note that before an index can be used to index or query documents, a call to
`create_index!/1` must be made. When setting up an application, or dealing with data loss
in the search engine, you must call `create_index!/1` before running an indexing task.
"""
alias PhilomenaQuery.Batch
alias Philomena.Repo
require Logger
import Ecto.Query
import Elastix.HTTP
# todo: fetch through compile_env?
@policy Philomena.SearchPolicy
@type schema_module :: @policy.schema_module()
@type queryable :: any()
@type query_body :: map()
@type replacement :: %{
path: [String.t()],
old: term(),
new: term()
}
@type search_definition :: %{
module: schema_module(),
body: query_body(),
page_number: integer(),
page_size: integer()
}
@type pagination_params :: %{
optional(:page_number) => integer(),
optional(:page_size) => integer()
}
@doc ~S"""
Create the index with the module's index name and mapping.
`PUT /#{index_name}`
You **must** use this function before indexing documents in order for the mapping to be created
correctly. If you index documents without a mapping created, the search engine will create a
mapping which does not contain the correct types for mapping fields, which will require
destroying and recreating the index.
## Example
iex> Search.create_index!(Image)
"""
@spec create_index!(schema_module()) :: any()
def create_index!(module) do
index = @policy.index_for(module)
Elastix.Index.create(
@policy.opensearch_url(),
index.index_name(),
index.mapping()
)
end
@doc ~S"""
Delete the index with the module's index name.
`DELETE /#{index_name}`
This undoes the effect of `create_index!/1` and removes the index permanently, deleting
all indexed documents within.
## Example
iex> Search.delete_index!(Image)
"""
@spec delete_index!(schema_module()) :: any()
def delete_index!(module) do
index = @policy.index_for(module)
Elastix.Index.delete(@policy.opensearch_url(), index.index_name())
end
@doc ~S"""
Update the schema mapping for the module's index name.
`PUT /#{index_name}/_mapping`
This is used to add new fields to an existing search mapping. This cannot be used to
remove fields; removing fields requires recreating the index.
## Example
iex> Search.update_mapping!(Image)
"""
@spec update_mapping!(schema_module()) :: any()
def update_mapping!(module) do
index = @policy.index_for(module)
index_name = index.index_name()
mapping = index.mapping().mappings.properties
Elastix.Mapping.put(@policy.opensearch_url(), index_name, "_doc", %{properties: mapping},
include_type_name: true
)
end
@doc ~S"""
Add a single document to the index named by the module.
`PUT /#{index_name}/_doc/#{id}`
This allows the search engine to query the document.
Note that indexing is near real-time and requires an index refresh before the document will
become visible. Unless changed in the mapping, this happens after 5 seconds have elapsed.
## Example
iex> Search.index_document(%Image{...}, Image)
"""
@spec index_document(struct(), schema_module()) :: any()
def index_document(doc, module) do
index = @policy.index_for(module)
data = index.as_json(doc)
Elastix.Document.index(
@policy.opensearch_url(),
index.index_name(),
"_doc",
data.id,
data
)
end
@doc ~S"""
Remove a single document from the index named by the module.
`DELETE /#{index_name}/_doc/#{id}`
This undoes the effect of `index_document/2`; it instructs the search engine to discard
the document and no longer return it in queries.
Note that indexing is near real-time and requires an index refresh before the document will
be removed. Unless changed in the mapping, this happens after 5 seconds have elapsed.
## Example
iex> Search.delete_document(image.id, Image)
"""
@spec delete_document(term(), schema_module()) :: any()
def delete_document(id, module) do
index = @policy.index_for(module)
Elastix.Document.delete(
@policy.opensearch_url(),
index.index_name(),
"_doc",
id
)
end
@doc """
Efficiently index a batch of documents in the index named by the module.
This function is substantially more efficient than running `index_document/2` for
each instance of a schema struct and can index with hundreds of times the throughput.
The queryable should be a schema type with its indexing preloads included in
the query. The options are forwarded to `PhilomenaQuery.Batch.record_batches/3`.
Note that indexing is near real-time and requires an index refresh before documents will
become visible. Unless changed in the mapping, this happens after 5 seconds have elapsed.
## Example
query =
from i in Image,
where: i.id < 100_000,
preload: ^Images.indexing_preloads()
Search.reindex(query, Image, batch_size: 5000)
"""
@spec reindex(queryable(), schema_module(), Batch.batch_options()) :: []
def reindex(queryable, module, opts \\ []) do
index = @policy.index_for(module)
Batch.record_batches(queryable, opts, fn records ->
lines =
Enum.flat_map(records, fn record ->
doc = index.as_json(record)
[
%{index: %{_index: index.index_name(), _id: doc.id}},
doc
]
end)
Elastix.Bulk.post(
@policy.opensearch_url(),
lines,
index: index.index_name(),
httpoison_options: [timeout: 30_000]
)
end)
end
@doc ~S"""
Asynchronously update all documents in the given index matching a query.
`POST /#{index_name}/_update_by_query`
This is used to replace values in documents on the fly without requiring a more-expensive
reindex operation from the database.
`set_replacements` are used to rename values in fields which are conceptually sets (arrays).
`replacements` are used to rename values in fields which are standalone terms.
Both `replacements` and `set_replacements` may be specified. Specifying neither will waste
the search engine's time evaluating the query and indexing the documents, so be sure to
specify at least one.
This function does not wait for completion of the update.
## Examples
query_body = %{term: %{"namespaced_tags.name" => old_name}}
replacement = %{path: ["namespaced_tags", "name"], old: old_name, new: new_name}
Search.update_by_query(Image, query_body, [], [replacement])
query_body = %{term: %{author: old_name}}
set_replacement = %{path: ["author"], old: old_name, new: new_name}
Search.update_by_query(Post, query_body, [set_replacement], [])
"""
@spec update_by_query(schema_module(), query_body(), [replacement()], [replacement()]) :: any()
def update_by_query(module, query_body, set_replacements, replacements) do
index = @policy.index_for(module)
url =
@policy.opensearch_url()
|> prepare_url([index.index_name(), "_update_by_query"])
|> append_query_string(%{conflicts: "proceed", wait_for_completion: "false"})
# "Painless" scripting language
script = """
// Replace values in "sets" (arrays in the source document)
for (int i = 0; i < params.set_replacements.length; ++i) {
def replacement = params.set_replacements[i];
def path = replacement.path;
def old_value = replacement.old;
def new_value = replacement.new;
def reference = ctx._source;
for (int j = 0; j < path.length; ++j) {
reference = reference[path[j]];
}
for (int j = 0; j < reference.length; ++j) {
if (reference[j].equals(old_value)) {
reference[j] = new_value;
}
}
}
// Replace values in standalone fields
for (int i = 0; i < params.replacements.length; ++i) {
def replacement = params.replacements[i];
def path = replacement.path;
def old_value = replacement.old;
def new_value = replacement.new;
def reference = ctx._source;
// A little bit more complicated: go up to the last one before it
// so that the value can actually be replaced
for (int j = 0; j < path.length - 1; ++j) {
reference = reference[path[j]];
}
if (reference[path[path.length - 1]] != null && reference[path[path.length - 1]].equals(old_value)) {
reference[path[path.length - 1]] = new_value;
}
}
"""
body =
Jason.encode!(%{
script: %{
source: script,
params: %{
set_replacements: set_replacements,
replacements: replacements
}
},
query: query_body
})
{:ok, %{status_code: 200}} = Elastix.HTTP.post(url, body)
end
@doc ~S"""
Search the index named by the module.
`GET /#{index_name}/_search`
Given a query body, this returns the raw query results.
## Example
iex> Search.search(Image, %{query: %{match_all: %{}}})
%{
"_shards" => %{"failed" => 0, "skipped" => 0, "successful" => 5, "total" => 5},
"hits" => %{
"hits" => [%{"_id" => "1", "_index" => "images", "_score" => 1.0, ...}, ...]
"max_score" => 1.0,
"total" => %{"relation" => "eq", "value" => 6}
},
"timed_out" => false,
"took" => 1
}
"""
@spec search(schema_module(), query_body()) :: map()
def search(module, query_body) do
index = @policy.index_for(module)
{:ok, %{body: results, status_code: 200}} =
Elastix.Search.search(
@policy.opensearch_url(),
index.index_name(),
[],
query_body
)
results
end
@doc ~S"""
Given maps of module and body, searches each index with the respective body.
`GET /_all/_search`
This is more efficient than performing a `search/1` for each index individually.
Like `search/1`, this returns the raw query results.
## Example
iex> Search.msearch([
...> %{module: Image, body: %{query: %{match_all: %{}}}},
...> %{module: Post, body: %{query: %{match_all: %{}}}}
...> ])
[
%{"_shards" => ..., "hits" => ..., "timed_out" => false, "took" => 1},
%{"_shards" => ..., "hits" => ..., "timed_out" => false, "took" => 2}
]
"""
@spec msearch([search_definition()]) :: [map()]
def msearch(definitions) do
msearch_body =
Enum.flat_map(definitions, fn def ->
[
%{index: @policy.index_for(def.module).index_name()},
def.body
]
end)
{:ok, %{body: results, status_code: 200}} =
Elastix.Search.search(
@policy.opensearch_url(),
"_all",
[],
msearch_body
)
results["responses"]
end
@doc """
Transforms an index module, query body, and pagination parameters into a query suitable
for submission to the search engine.
Any of the following functions may be used for submission:
- `search_results/1`
- `msearch_results/1`
- `search_records/2`
- `msearch_records/2`
- `search_records_with_hits/2`
- `msearch_records_with_hits/2`
## Example
iex> Search.search_definition(Image, %{query: %{match_all: %{}}}, %{page_number: 3, page_size: 50})
%{
module: Image,
body: %{
size: 50,
query: %{match_all: %{}},
from: 100,
_source: false,
track_total_hits: true
},
page_size: 50,
page_number: 3
}
"""
@spec search_definition(schema_module(), query_body(), pagination_params()) ::
search_definition()
def search_definition(module, search_query, pagination_params \\ %{}) do
page_number = pagination_params[:page_number] || 1
page_size = pagination_params[:page_size] || 25
search_query =
Map.merge(search_query, %{
from: (page_number - 1) * page_size,
size: page_size,
_source: false,
track_total_hits: true
})
%{
module: module,
body: search_query,
page_number: page_number,
page_size: page_size
}
end
defp process_results(results, definition) do
time = results["took"]
count = results["hits"]["total"]["value"]
entries = Enum.map(results["hits"]["hits"], &{String.to_integer(&1["_id"]), &1})
Logger.debug("[Search] Query took #{time}ms")
Logger.debug("[Search] #{Jason.encode!(definition.body)}")
%Scrivener.Page{
entries: entries,
page_number: definition.page_number,
page_size: definition.page_size,
total_entries: count,
total_pages: div(count + definition.page_size - 1, definition.page_size)
}
end
@doc """
Given a search definition generated by `search_definition/3`, submit the query and return
a `m:Scrivener.Page` of results.
The `entries` in the page are a list of tuples of record IDs paired with the hit that generated
them.
## Example
iex> Search.search_results(definition)
%Scrivener.Page{
entries: [{1, %{"_id" => "1", ...}}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
"""
@spec search_results(search_definition()) :: Scrivener.Page.t()
def search_results(definition) do
process_results(search(definition.module, definition.body), definition)
end
@doc """
Given a list of search definitions, each generated by `search_definition/3`, submit the query
and return a corresponding list of `m:Scrivener.Page` for each query.
The `entries` in the page are a list of tuples of record IDs paired with the hit that generated
them.
## Example
iex> Search.msearch_results([definition])
[
%Scrivener.Page{
entries: [{1, %{"_id" => "1", ...}}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
]
"""
@spec msearch_results([search_definition()]) :: [Scrivener.Page.t()]
def msearch_results(definitions) do
Enum.map(Enum.zip(msearch(definitions), definitions), fn {result, definition} ->
process_results(result, definition)
end)
end
defp load_records_from_results(results, ecto_queries) do
Enum.map(Enum.zip(results, ecto_queries), fn {page, ecto_query} ->
{ids, hits} = Enum.unzip(page.entries)
records =
ecto_query
|> where([m], m.id in ^ids)
|> Repo.all()
|> Enum.sort_by(&Enum.find_index(ids, fn el -> el == &1.id end))
%{page | entries: Enum.zip(records, hits)}
end)
end
@doc """
Given a search definition generated by `search_definition/3`, submit the query and return a
`m:Scrivener.Page` of results.
The `entries` in the page are a list of tuples of schema structs paired with the hit that
generated them.
## Example
iex> Search.search_records_with_hits(definition, preload(Image, :tags))
%Scrivener.Page{
entries: [{%Image{id: 1, ...}, %{"_id" => "1", ...}}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
"""
@spec search_records_with_hits(search_definition(), queryable()) :: Scrivener.Page.t()
def search_records_with_hits(definition, ecto_query) do
[page] = load_records_from_results([search_results(definition)], [ecto_query])
page
end
@doc """
Given a list of search definitions, each generated by `search_definition/3`, submit the query
and return a corresponding list of `m:Scrivener.Page` for each query.
The `entries` in the page are a list of tuples of schema structs paired with the hit that
generated them.
## Example
iex> Search.msearch_records_with_hits([definition], [preload(Image, :tags)])
[
%Scrivener.Page{
entries: [{%Image{id: 1, ...}, %{"_id" => "1", ...}}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
]
"""
@spec msearch_records_with_hits([search_definition()], [queryable()]) :: [Scrivener.Page.t()]
def msearch_records_with_hits(definitions, ecto_queries) do
load_records_from_results(msearch_results(definitions), ecto_queries)
end
@doc """
Given a search definition generated by `search_definition/3`, submit the query and return a
`m:Scrivener.Page` of results.
The `entries` in the page are a list of schema structs.
## Example
iex> Search.search_records(definition, preload(Image, :tags))
%Scrivener.Page{
entries: [%Image{id: 1, ...}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
"""
@spec search_records(search_definition(), queryable()) :: Scrivener.Page.t()
def search_records(definition, ecto_query) do
page = search_records_with_hits(definition, ecto_query)
{records, _hits} = Enum.unzip(page.entries)
%{page | entries: records}
end
@doc """
Given a list of search definitions, each generated by `search_definition/3`, submit the query
and return a corresponding list of `m:Scrivener.Page` for each query.
The `entries` in the page are a list of schema structs.
## Example
iex> Search.msearch_records([definition], [preload(Image, :tags)])
[
%Scrivener.Page{
entries: [%Image{id: 1, ...}, ...],
page_number: 1,
page_size: 25,
total_entries: 6,
total_pages: 1
}
]
"""
@spec msearch_records([search_definition()], [queryable()]) :: [Scrivener.Page.t()]
def msearch_records(definitions, ecto_queries) do
Enum.map(load_records_from_results(msearch_results(definitions), ecto_queries), fn page ->
{records, _hits} = Enum.unzip(page.entries)
%{page | entries: records}
end)
end
end

View file

@ -1,4 +1,4 @@
defmodule Philomena.ElasticsearchIndex do defmodule PhilomenaQuery.SearchIndex do
# Returns the index name for the index. # Returns the index name for the index.
# This is usually a collection name like "images". # This is usually a collection name like "images".
@callback index_name() :: String.t() @callback index_name() :: String.t()
@ -6,7 +6,6 @@ defmodule Philomena.ElasticsearchIndex do
# Returns the mapping and settings for the index. # Returns the mapping and settings for the index.
@callback mapping() :: map() @callback mapping() :: map()
# Returns the JSON representation of the given struct # Returns the JSON representation of the given struct for indexing in OpenSearch.
# for indexing in Elasticsearch.
@callback as_json(struct()) :: map() @callback as_json(struct()) :: map()
end end

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.ActivityController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.{ alias Philomena.{
Images.Image, Images.Image,
@ -36,7 +36,7 @@ defmodule PhilomenaWeb.ActivityController do
) )
comments = comments =
Elasticsearch.search_definition( Search.search_definition(
Comment, Comment,
%{ %{
query: %{ query: %{
@ -144,7 +144,7 @@ defmodule PhilomenaWeb.ActivityController do
defp multi_search(images, top_scoring, comments, nil) do defp multi_search(images, top_scoring, comments, nil) do
responses = responses =
Elasticsearch.msearch_records( Search.msearch_records(
[images, top_scoring, comments], [images, top_scoring, comments],
[ [
preload(Image, [:sources, tags: :aliases]), preload(Image, [:sources, tags: :aliases]),
@ -157,7 +157,7 @@ defmodule PhilomenaWeb.ActivityController do
end end
defp multi_search(images, top_scoring, comments, watched) do defp multi_search(images, top_scoring, comments, watched) do
Elasticsearch.msearch_records( Search.msearch_records(
[images, top_scoring, comments, watched], [images, top_scoring, comments, watched],
[ [
preload(Image, [:sources, tags: :aliases]), preload(Image, [:sources, tags: :aliases]),

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Admin.ReportController do defmodule PhilomenaWeb.Admin.ReportController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias Philomena.Reports.Report alias Philomena.Reports.Report
alias Philomena.Reports.Query alias Philomena.Reports.Query
@ -94,14 +94,14 @@ defmodule PhilomenaWeb.Admin.ReportController do
defp load_reports(conn, query) do defp load_reports(conn, query) do
reports = reports =
Report Report
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: query, query: query,
sort: sorts() sort: sorts()
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Report, [:admin, user: :linked_tags])) |> Search.search_records(preload(Report, [:admin, user: :linked_tags]))
entries = Polymorphic.load_polymorphic(reports, reportable: [reportable_id: :reportable_type]) entries = Polymorphic.load_polymorphic(reports, reportable: [reportable_id: :reportable_type])

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Api.Json.Search.CommentController do defmodule PhilomenaWeb.Api.Json.Search.CommentController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Comments.Comment alias Philomena.Comments.Comment
alias Philomena.Comments.Query alias Philomena.Comments.Query
import Ecto.Query import Ecto.Query
@ -14,7 +14,7 @@ defmodule PhilomenaWeb.Api.Json.Search.CommentController do
{:ok, query} -> {:ok, query} ->
comments = comments =
Comment Comment
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -31,7 +31,7 @@ defmodule PhilomenaWeb.Api.Json.Search.CommentController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Comment, [:image, :user])) |> Search.search_records(preload(Comment, [:image, :user]))
conn conn
|> put_view(PhilomenaWeb.Api.Json.CommentView) |> put_view(PhilomenaWeb.Api.Json.CommentView)

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Api.Json.Search.FilterController do defmodule PhilomenaWeb.Api.Json.Search.FilterController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Filters.Filter alias Philomena.Filters.Filter
alias Philomena.Filters.Query alias Philomena.Filters.Query
import Ecto.Query import Ecto.Query
@ -13,7 +13,7 @@ defmodule PhilomenaWeb.Api.Json.Search.FilterController do
{:ok, query} -> {:ok, query} ->
filters = filters =
Filter Filter
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -36,7 +36,7 @@ defmodule PhilomenaWeb.Api.Json.Search.FilterController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Filter, [:user])) |> Search.search_records(preload(Filter, [:user]))
conn conn
|> put_view(PhilomenaWeb.Api.Json.FilterView) |> put_view(PhilomenaWeb.Api.Json.FilterView)

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Api.Json.Search.GalleryController do defmodule PhilomenaWeb.Api.Json.Search.GalleryController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Galleries.Gallery alias Philomena.Galleries.Gallery
alias Philomena.Galleries.Query alias Philomena.Galleries.Query
import Ecto.Query import Ecto.Query
@ -11,14 +11,14 @@ defmodule PhilomenaWeb.Api.Json.Search.GalleryController do
{:ok, query} -> {:ok, query} ->
galleries = galleries =
Gallery Gallery
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: query, query: query,
sort: %{created_at: :desc} sort: %{created_at: :desc}
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Gallery, [:creator])) |> Search.search_records(preload(Gallery, [:creator]))
conn conn
|> put_view(PhilomenaWeb.Api.Json.GalleryView) |> put_view(PhilomenaWeb.Api.Json.GalleryView)

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.Api.Json.Search.ImageController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Interactions alias Philomena.Interactions
alias Philomena.Images.Image alias Philomena.Images.Image
import Ecto.Query import Ecto.Query
@ -13,7 +13,7 @@ defmodule PhilomenaWeb.Api.Json.Search.ImageController do
case ImageLoader.search_string(conn, params["q"]) do case ImageLoader.search_string(conn, params["q"]) do
{:ok, {images, _tags}} -> {:ok, {images, _tags}} ->
images = Elasticsearch.search_records(images, queryable) images = Search.search_records(images, queryable)
interactions = Interactions.user_interactions(images, user) interactions = Interactions.user_interactions(images, user)
conn conn

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Api.Json.Search.PostController do defmodule PhilomenaWeb.Api.Json.Search.PostController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Posts.Post alias Philomena.Posts.Post
alias Philomena.Posts.Query alias Philomena.Posts.Query
import Ecto.Query import Ecto.Query
@ -13,7 +13,7 @@ defmodule PhilomenaWeb.Api.Json.Search.PostController do
{:ok, query} -> {:ok, query} ->
posts = posts =
Post Post
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -28,7 +28,7 @@ defmodule PhilomenaWeb.Api.Json.Search.PostController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Post, [:user, :topic])) |> Search.search_records(preload(Post, [:user, :topic]))
conn conn
|> put_view(PhilomenaWeb.Api.Json.Forum.Topic.PostView) |> put_view(PhilomenaWeb.Api.Json.Forum.Topic.PostView)

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Api.Json.Search.TagController do defmodule PhilomenaWeb.Api.Json.Search.TagController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Tags.Tag alias Philomena.Tags.Tag
alias Philomena.Tags.Query alias Philomena.Tags.Query
import Ecto.Query import Ecto.Query
@ -11,11 +11,11 @@ defmodule PhilomenaWeb.Api.Json.Search.TagController do
{:ok, query} -> {:ok, query} ->
tags = tags =
Tag Tag
|> Elasticsearch.search_definition( |> Search.search_definition(
%{query: query, sort: %{images: :desc}}, %{query: query, sort: %{images: :desc}},
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records( |> Search.search_records(
preload(Tag, [:aliased_tag, :aliases, :implied_tags, :implied_by_tags, :dnp_entries]) preload(Tag, [:aliased_tag, :aliases, :implied_tags, :implied_by_tags, :dnp_entries])
) )

View file

@ -3,13 +3,13 @@ defmodule PhilomenaWeb.Api.Rss.WatchedController do
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
import Ecto.Query import Ecto.Query
def index(conn, _params) do def index(conn, _params) do
{:ok, {images, _tags}} = ImageLoader.search_string(conn, "my:watched") {:ok, {images, _tags}} = ImageLoader.search_string(conn, "my:watched")
images = Elasticsearch.search_records(images, preload(Image, [:sources, tags: :aliases])) images = Search.search_records(images, preload(Image, [:sources, tags: :aliases]))
# NB: this is RSS, but using the RSS format causes Phoenix not to # NB: this is RSS, but using the RSS format causes Phoenix not to
# escape HTML # escape HTML

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.Autocomplete.TagController do defmodule PhilomenaWeb.Autocomplete.TagController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Tags.Tag alias Philomena.Tags.Tag
import Ecto.Query import Ecto.Query
@ -13,7 +13,7 @@ defmodule PhilomenaWeb.Autocomplete.TagController do
term -> term ->
Tag Tag
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -27,7 +27,7 @@ defmodule PhilomenaWeb.Autocomplete.TagController do
}, },
%{page_size: 10} %{page_size: 10}
) )
|> Elasticsearch.search_records(preload(Tag, :aliased_tag)) |> Search.search_records(preload(Tag, :aliased_tag))
|> Enum.map(&(&1.aliased_tag || &1)) |> Enum.map(&(&1.aliased_tag || &1))
|> Enum.uniq_by(& &1.id) |> Enum.uniq_by(& &1.id)
|> Enum.filter(&(&1.images_count > 0)) |> Enum.filter(&(&1.images_count > 0))

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.CommentController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.{Comments.Query, Comments.Comment} alias Philomena.{Comments.Query, Comments.Comment}
import Ecto.Query import Ecto.Query
@ -21,7 +21,7 @@ defmodule PhilomenaWeb.CommentController do
defp render_index({:ok, query}, conn, user) do defp render_index({:ok, query}, conn, user) do
comments = comments =
Comment Comment
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -35,7 +35,7 @@ defmodule PhilomenaWeb.CommentController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records( |> Search.search_records(
preload(Comment, [:deleted_by, image: [:sources, tags: :aliases], user: [awards: :badge]]) preload(Comment, [:deleted_by, image: [:sources, tags: :aliases], user: [awards: :badge]])
) )

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.FilterController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias Philomena.{Filters, Filters.Filter, Filters.Query, Tags.Tag} alias Philomena.{Filters, Filters.Filter, Filters.Query, Tags.Tag}
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Schema.TagList alias Philomena.Schema.TagList
alias Philomena.Repo alias Philomena.Repo
import Ecto.Query import Ecto.Query
@ -47,7 +47,7 @@ defmodule PhilomenaWeb.FilterController do
defp render_index({:ok, query}, conn, user) do defp render_index({:ok, query}, conn, user) do
filters = filters =
Filter Filter
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -61,7 +61,7 @@ defmodule PhilomenaWeb.FilterController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records(preload(Filter, [:user])) |> Search.search_records(preload(Filter, [:user]))
render(conn, "index.html", title: "Filters", filters: filters) render(conn, "index.html", title: "Filters", filters: filters)
end end

View file

@ -3,7 +3,7 @@ defmodule PhilomenaWeb.GalleryController do
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias PhilomenaWeb.NotificationCountPlug alias PhilomenaWeb.NotificationCountPlug
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Interactions alias Philomena.Interactions
alias Philomena.Galleries.Gallery alias Philomena.Galleries.Gallery
alias Philomena.Galleries alias Philomena.Galleries
@ -21,7 +21,7 @@ defmodule PhilomenaWeb.GalleryController do
def index(conn, params) do def index(conn, params) do
galleries = galleries =
Gallery Gallery
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -32,7 +32,7 @@ defmodule PhilomenaWeb.GalleryController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records( |> Search.search_records(
preload(Gallery, [:creator, thumbnail: [:sources, tags: :aliases]]) preload(Gallery, [:creator, thumbnail: [:sources, tags: :aliases]])
) )
@ -62,7 +62,7 @@ defmodule PhilomenaWeb.GalleryController do
{gallery_prev, gallery_next} = prev_next_page_images(conn, query) {gallery_prev, gallery_next} = prev_next_page_images(conn, query)
[images, gallery_prev, gallery_next] = [images, gallery_prev, gallery_next] =
Elasticsearch.msearch_records_with_hits( Search.msearch_records_with_hits(
[images, gallery_prev, gallery_next], [images, gallery_prev, gallery_next],
[ [
preload(Image, [:sources, tags: :aliases]), preload(Image, [:sources, tags: :aliases]),
@ -154,7 +154,7 @@ defmodule PhilomenaWeb.GalleryController do
limit = conn.assigns.image_pagination.page_size limit = conn.assigns.image_pagination.page_size
offset = (conn.assigns.image_pagination.page_number - 1) * limit offset = (conn.assigns.image_pagination.page_number - 1) * limit
# Inconsistency: Elasticsearch doesn't allow requesting offsets which are less than 0, # Inconsistency: OpenSearch doesn't allow requesting offsets which are less than 0,
# but it does allow requesting offsets which are beyond the total number of results. # but it does allow requesting offsets which are beyond the total number of results.
prev_image = gallery_image(offset - 1, conn, query) prev_image = gallery_image(offset - 1, conn, query)
@ -164,7 +164,7 @@ defmodule PhilomenaWeb.GalleryController do
end end
defp gallery_image(offset, _conn, _query) when offset < 0 do defp gallery_image(offset, _conn, _query) when offset < 0 do
Elasticsearch.search_definition(Image, %{query: %{match_none: %{}}}) Search.search_definition(Image, %{query: %{match_none: %{}}})
end end
defp gallery_image(offset, conn, query) do defp gallery_image(offset, conn, query) do

View file

@ -4,7 +4,7 @@ defmodule PhilomenaWeb.Image.NavigateController do
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias PhilomenaWeb.ImageNavigator alias PhilomenaWeb.ImageNavigator
alias PhilomenaWeb.ImageScope alias PhilomenaWeb.ImageScope
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Images.Query alias Philomena.Images.Query
@ -37,7 +37,7 @@ defmodule PhilomenaWeb.Image.NavigateController do
body = %{range: %{id: %{gt: conn.assigns.image.id}}} body = %{range: %{id: %{gt: conn.assigns.image.id}}}
{images, _tags} = ImageLoader.query(conn, body, pagination: pagination) {images, _tags} = ImageLoader.query(conn, body, pagination: pagination)
images = Elasticsearch.search_records(images, Image) images = Search.search_records(images, Image)
page_num = page_for_offset(pagination.page_size, images.total_entries) page_num = page_for_offset(pagination.page_size, images.total_entries)

View file

@ -4,7 +4,7 @@ defmodule PhilomenaWeb.Image.RandomController do
alias PhilomenaWeb.ImageSorter alias PhilomenaWeb.ImageSorter
alias PhilomenaWeb.ImageScope alias PhilomenaWeb.ImageScope
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Images.Image alias Philomena.Images.Image
def index(conn, params) do def index(conn, params) do
@ -32,7 +32,7 @@ defmodule PhilomenaWeb.Image.RandomController do
defp unwrap_random_result({:ok, {definition, _tags}}) do defp unwrap_random_result({:ok, {definition, _tags}}) do
definition definition
|> Elasticsearch.search_records(Image) |> Search.search_records(Image)
|> Enum.to_list() |> Enum.to_list()
|> unwrap() |> unwrap()
end end

View file

@ -4,7 +4,7 @@ defmodule PhilomenaWeb.Image.RelatedController do
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Interactions alias Philomena.Interactions
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
import Ecto.Query import Ecto.Query
plug PhilomenaWeb.CanaryMapPlug, index: :show plug PhilomenaWeb.CanaryMapPlug, index: :show
@ -60,7 +60,7 @@ defmodule PhilomenaWeb.Image.RelatedController do
pagination: %{conn.assigns.image_pagination | page_number: 1} pagination: %{conn.assigns.image_pagination | page_number: 1}
) )
images = Elasticsearch.search_records(images, preload(Image, [:sources, tags: :aliases])) images = Search.search_records(images, preload(Image, [:sources, tags: :aliases]))
interactions = Interactions.user_interactions(images, user) interactions = Interactions.user_interactions(images, user)

View file

@ -14,7 +14,7 @@ defmodule PhilomenaWeb.ImageController do
Galleries.Gallery Galleries.Gallery
} }
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Interactions alias Philomena.Interactions
alias Philomena.Comments alias Philomena.Comments
alias Philomena.Repo alias Philomena.Repo
@ -40,7 +40,7 @@ defmodule PhilomenaWeb.ImageController do
{:ok, {images, _tags}} = {:ok, {images, _tags}} =
ImageLoader.search_string(conn, "created_at.lte:3 minutes ago, -thumbnails_generated:false") ImageLoader.search_string(conn, "created_at.lte:3 minutes ago, -thumbnails_generated:false")
images = Elasticsearch.search_records(images, preload(Image, [:sources, tags: :aliases])) images = Search.search_records(images, preload(Image, [:sources, tags: :aliases]))
interactions = Interactions.user_interactions(images, conn.assigns.current_user) interactions = Interactions.user_interactions(images, conn.assigns.current_user)

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.PostController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.{Posts.Query, Posts.Post} alias Philomena.{Posts.Query, Posts.Post}
import Ecto.Query import Ecto.Query
@ -21,7 +21,7 @@ defmodule PhilomenaWeb.PostController do
defp render_index({:ok, query}, conn, user) do defp render_index({:ok, query}, conn, user) do
posts = posts =
Post Post
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -32,7 +32,7 @@ defmodule PhilomenaWeb.PostController do
}, },
conn.assigns.pagination conn.assigns.pagination
) )
|> Elasticsearch.search_records( |> Search.search_records(
preload(Post, [:deleted_by, topic: :forum, user: [awards: :badge]]) preload(Post, [:deleted_by, topic: :forum, user: [awards: :badge]])
) )

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.ProfileController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias Philomena.UserStatistics.UserStatistic alias Philomena.UserStatistics.UserStatistic
alias Philomena.Users.User alias Philomena.Users.User
@ -79,7 +79,7 @@ defmodule PhilomenaWeb.ProfileController do
recent_artwork = recent_artwork(conn, tags) recent_artwork = recent_artwork(conn, tags)
recent_comments = recent_comments =
Elasticsearch.search_definition( Search.search_definition(
Comment, Comment,
%{ %{
query: %{ query: %{
@ -100,7 +100,7 @@ defmodule PhilomenaWeb.ProfileController do
) )
recent_posts = recent_posts =
Elasticsearch.search_definition( Search.search_definition(
Post, Post,
%{ %{
query: %{ query: %{
@ -119,7 +119,7 @@ defmodule PhilomenaWeb.ProfileController do
) )
[recent_uploads, recent_faves, recent_artwork, recent_comments, recent_posts] = [recent_uploads, recent_faves, recent_artwork, recent_comments, recent_posts] =
Elasticsearch.msearch_records( Search.msearch_records(
[recent_uploads, recent_faves, recent_artwork, recent_comments, recent_posts], [recent_uploads, recent_faves, recent_artwork, recent_comments, recent_posts],
[ [
preload(Image, [:sources, tags: :aliases]), preload(Image, [:sources, tags: :aliases]),
@ -228,7 +228,7 @@ defmodule PhilomenaWeb.ProfileController do
defp tags(links), do: Enum.map(links, & &1.tag) |> Enum.reject(&is_nil/1) defp tags(links), do: Enum.map(links, & &1.tag) |> Enum.reject(&is_nil/1)
defp recent_artwork(_conn, []) do defp recent_artwork(_conn, []) do
Elasticsearch.search_definition(Image, %{query: %{match_none: %{}}}) Search.search_definition(Image, %{query: %{match_none: %{}}})
end end
defp recent_artwork(conn, tags) do defp recent_artwork(conn, tags) do

View file

@ -3,7 +3,7 @@ defmodule PhilomenaWeb.SearchController do
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Interactions alias Philomena.Interactions
import Ecto.Query import Ecto.Query
@ -41,8 +41,8 @@ defmodule PhilomenaWeb.SearchController do
end end
end end
defp search_function(true), do: &Elasticsearch.search_records_with_hits/2 defp search_function(true), do: &Search.search_records_with_hits/2
defp search_function(_custom), do: &Elasticsearch.search_records/2 defp search_function(_custom), do: &Search.search_records/2
defp custom_ordering?(%{params: %{"sf" => sf}}) when sf != "id", do: true defp custom_ordering?(%{params: %{"sf" => sf}}) when sf != "id", do: true
defp custom_ordering?(_conn), do: false defp custom_ordering?(_conn), do: false

View file

@ -2,7 +2,7 @@ defmodule PhilomenaWeb.TagController do
use PhilomenaWeb, :controller use PhilomenaWeb, :controller
alias PhilomenaWeb.ImageLoader alias PhilomenaWeb.ImageLoader
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.{Tags, Tags.Tag} alias Philomena.{Tags, Tags.Tag}
alias Philomena.{Images, Images.Image} alias Philomena.{Images, Images.Image}
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
@ -34,7 +34,7 @@ defmodule PhilomenaWeb.TagController do
with {:ok, query} <- Tags.Query.compile(query_string) do with {:ok, query} <- Tags.Query.compile(query_string) do
tags = tags =
Tag Tag
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: query, query: query,
size: 250, size: 250,
@ -42,7 +42,7 @@ defmodule PhilomenaWeb.TagController do
}, },
%{conn.assigns.pagination | page_size: 250} %{conn.assigns.pagination | page_size: 250}
) )
|> Elasticsearch.search_records(Tag) |> Search.search_records(Tag)
render(conn, "index.html", title: "Tags", tags: tags) render(conn, "index.html", title: "Tags", tags: tags)
else else
@ -57,7 +57,7 @@ defmodule PhilomenaWeb.TagController do
{images, _tags} = ImageLoader.query(conn, %{term: %{"namespaced_tags.name" => tag.name}}) {images, _tags} = ImageLoader.query(conn, %{term: %{"namespaced_tags.name" => tag.name}})
images = Elasticsearch.search_records(images, preload(Image, [:sources, tags: :aliases])) images = Search.search_records(images, preload(Image, [:sources, tags: :aliases]))
interactions = Interactions.user_interactions(images, user) interactions = Interactions.user_interactions(images, user)

View file

@ -1,6 +1,6 @@
defmodule PhilomenaWeb.ImageLoader do defmodule PhilomenaWeb.ImageLoader do
alias PhilomenaWeb.ImageSorter alias PhilomenaWeb.ImageSorter
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Images.{Image, Query} alias Philomena.Images.{Image, Query}
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias Philomena.Tags.Tag alias Philomena.Tags.Tag
@ -36,7 +36,7 @@ defmodule PhilomenaWeb.ImageLoader do
%{query: query, sorts: sort} = sorts.(body) %{query: query, sorts: sort} = sorts.(body)
definition = definition =
Elasticsearch.search_definition( Search.search_definition(
Image, Image,
%{ %{
query: %{ query: %{

View file

@ -1,7 +1,7 @@
defmodule PhilomenaWeb.ImageNavigator do defmodule PhilomenaWeb.ImageNavigator do
alias PhilomenaWeb.ImageSorter alias PhilomenaWeb.ImageSorter
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
@order_for_dir %{ @order_for_dir %{
"next" => %{"asc" => "asc", "desc" => "desc"}, "next" => %{"asc" => "asc", "desc" => "desc"},
@ -54,8 +54,8 @@ defmodule PhilomenaWeb.ImageNavigator do
defp maybe_search_after(module, body, options, queryable, true) do defp maybe_search_after(module, body, options, queryable, true) do
module module
|> Elasticsearch.search_definition(body, options) |> Search.search_definition(body, options)
|> Elasticsearch.search_records_with_hits(queryable) |> Search.search_records_with_hits(queryable)
end end
defp maybe_search_after(_module, _body, _options, _queryable, _false) do defp maybe_search_after(_module, _body, _options, _queryable, _false) do

View file

@ -6,8 +6,8 @@ defmodule PhilomenaWeb.FilterForcedUsersPlug do
import Phoenix.Controller import Phoenix.Controller
import Plug.Conn import Plug.Conn
alias Philomena.Search.String, as: SearchString alias PhilomenaQuery.Parse.String, as: SearchString
alias Philomena.Search.Evaluator alias PhilomenaQuery.Parse.Evaluator
alias Philomena.Images.Query alias Philomena.Images.Query
alias PhilomenaWeb.ImageView alias PhilomenaWeb.ImageView

View file

@ -1,6 +1,6 @@
defmodule PhilomenaWeb.ImageFilterPlug do defmodule PhilomenaWeb.ImageFilterPlug do
import Plug.Conn import Plug.Conn
import Philomena.Search.String import PhilomenaQuery.Parse.String
alias Philomena.Images.Query alias Philomena.Images.Query

View file

@ -1,6 +1,6 @@
defmodule PhilomenaWeb.StatsUpdater do defmodule PhilomenaWeb.StatsUpdater do
alias Philomena.Config alias Philomena.Config
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Comments.Comment alias Philomena.Comments.Comment
alias Philomena.Topics.Topic alias Philomena.Topics.Topic
@ -68,8 +68,8 @@ defmodule PhilomenaWeb.StatsUpdater do
data = Config.get(:aggregation) data = Config.get(:aggregation)
{ {
Elasticsearch.search(Image, data["images"]), Search.search(Image, data["images"]),
Elasticsearch.search(Comment, data["comments"]) Search.search(Comment, data["comments"])
} }
end end

View file

@ -71,7 +71,7 @@ defmodule PhilomenaWeb.Api.Json.ImageView do
tag_ids: Enum.map(image.tags, & &1.id), tag_ids: Enum.map(image.tags, & &1.id),
uploader: if(!!image.user and !image.anonymous, do: image.user.name), uploader: if(!!image.user and !image.anonymous, do: image.user.name),
uploader_id: if(!!image.user and !image.anonymous, do: image.user.id), uploader_id: if(!!image.user and !image.anonymous, do: image.user.id),
wilson_score: Philomena.Images.ElasticsearchIndex.wilson_score(image), wilson_score: Philomena.Images.SearchIndex.wilson_score(image),
intensities: intensities(image), intensities: intensities(image),
score: image.score, score: image.score,
upvotes: image.upvotes_count, upvotes: image.upvotes_count,

View file

@ -291,7 +291,7 @@ defmodule PhilomenaWeb.ImageView do
} }
} }
Philomena.Search.Evaluator.hits?(doc, query) PhilomenaQuery.Parse.Evaluator.hits?(doc, query)
end end
def image_source_icon(nil), do: "fa fa-link" def image_source_icon(nil), do: "fa fa-link"

View file

@ -43,9 +43,9 @@ defmodule PhilomenaWeb.LayoutView do
data = [ data = [
filter_id: filter.id, filter_id: filter.id,
hidden_tag_list: Jason.encode!(filter.hidden_tag_ids), hidden_tag_list: Jason.encode!(filter.hidden_tag_ids),
hidden_filter: Philomena.Search.String.normalize(filter.hidden_complex_str || ""), hidden_filter: PhilomenaQuery.Parse.String.normalize(filter.hidden_complex_str || ""),
spoilered_tag_list: Jason.encode!(filter.spoilered_tag_ids), spoilered_tag_list: Jason.encode!(filter.spoilered_tag_ids),
spoilered_filter: Philomena.Search.String.normalize(filter.spoilered_complex_str || ""), spoilered_filter: PhilomenaQuery.Parse.String.normalize(filter.spoilered_complex_str || ""),
user_id: if(user, do: user.id, else: nil), user_id: if(user, do: user.id, else: nil),
user_name: if(user, do: user.name, else: nil), user_name: if(user, do: user.name, else: nil),
user_slug: if(user, do: user.slug, else: nil), user_slug: if(user, do: user.slug, else: nil),

View file

@ -3,7 +3,7 @@ defmodule PhilomenaWeb.TagView do
# this is bad practice, don't copy this. # this is bad practice, don't copy this.
alias Philomena.Config alias Philomena.Config
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Tags.Tag alias Philomena.Tags.Tag
alias Philomena.Repo alias Philomena.Repo
alias PhilomenaWeb.ImageScope alias PhilomenaWeb.ImageScope
@ -143,7 +143,7 @@ defmodule PhilomenaWeb.TagView do
defp implied_by_multitag(tag_names, ignore_tag_names) do defp implied_by_multitag(tag_names, ignore_tag_names) do
Tag Tag
|> Elasticsearch.search_definition( |> Search.search_definition(
%{ %{
query: %{ query: %{
bool: %{ bool: %{
@ -155,7 +155,7 @@ defmodule PhilomenaWeb.TagView do
}, },
%{page_size: 40} %{page_size: 40}
) )
|> Elasticsearch.search_records(preload(Tag, :implied_tags)) |> Search.search_records(preload(Tag, :implied_tags))
end end
defp manages_links?(conn), defp manages_links?(conn),

View file

@ -26,17 +26,17 @@ alias Philomena.{
StaticPages.StaticPage StaticPages.StaticPage
} }
alias Philomena.Elasticsearch alias PhilomenaQuery.Search
alias Philomena.Users alias Philomena.Users
alias Philomena.Tags alias Philomena.Tags
alias Philomena.Filters alias Philomena.Filters
import Ecto.Query import Ecto.Query
IO.puts("---- Creating Elasticsearch indices") IO.puts("---- Creating search indices")
for model <- [Image, Comment, Gallery, Tag, Post, Report, Filter] do for model <- [Image, Comment, Gallery, Tag, Post, Report, Filter] do
Elasticsearch.delete_index!(model) Search.delete_index!(model)
Elasticsearch.create_index!(model) Search.create_index!(model)
end end
resources = resources =
@ -112,6 +112,6 @@ for page_def <- resources["pages"] do
end end
IO.puts("---- Indexing content") IO.puts("---- Indexing content")
Elasticsearch.reindex(Tag |> preload(^Tags.indexing_preloads()), Tag) Search.reindex(Tag |> preload(^Tags.indexing_preloads()), Tag)
IO.puts("---- Done.") IO.puts("---- Done.")