mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-27 05:37:59 +01:00
Fix all but one dialyzer warning
This commit is contained in:
parent
1e6cef6092
commit
539eb223ff
16 changed files with 87 additions and 101 deletions
|
@ -18,9 +18,10 @@ defmodule Mix.Tasks.ReindexAll do
|
|||
import Ecto.Query
|
||||
|
||||
@shortdoc "Destroys and recreates all Elasticsearch indices."
|
||||
def run(_) do
|
||||
if Mix.env() == :prod and not Enum.member?(System.argv(), "--i-know-what-im-doing") do
|
||||
raise "do not run this task in production"
|
||||
@impl Mix.Task
|
||||
def run(args) do
|
||||
if Mix.env() == :prod and not Enum.member?(args, "--i-know-what-im-doing") do
|
||||
raise "do not run this task unless you know what you're doing"
|
||||
end
|
||||
|
||||
{:ok, _apps} = Application.ensure_all_started(:philomena)
|
||||
|
|
|
@ -2,12 +2,6 @@ defmodule Philomena.Images.TagValidator do
|
|||
alias Philomena.Servers.Config
|
||||
import Ecto.Changeset
|
||||
|
||||
@safe_rating MapSet.new(["safe"])
|
||||
@sexual_ratings MapSet.new(["suggestive", "questionable", "explicit"])
|
||||
@horror_ratings MapSet.new(["semi-grimdark", "grimdark"])
|
||||
@gross_rating MapSet.new(["grotesque"])
|
||||
@empty MapSet.new()
|
||||
|
||||
def validate_tags(changeset) do
|
||||
tags = changeset |> get_field(:tags)
|
||||
|
||||
|
@ -27,11 +21,11 @@ defmodule Philomena.Images.TagValidator do
|
|||
|> validate_horror_exclusion(rating_set)
|
||||
end
|
||||
|
||||
defp ratings(%MapSet{} = tag_set) do
|
||||
safe = MapSet.intersection(tag_set, @safe_rating)
|
||||
sexual = MapSet.intersection(tag_set, @sexual_ratings)
|
||||
horror = MapSet.intersection(tag_set, @horror_ratings)
|
||||
gross = MapSet.intersection(tag_set, @gross_rating)
|
||||
defp ratings(tag_set) do
|
||||
safe = MapSet.intersection(tag_set, safe_rating())
|
||||
sexual = MapSet.intersection(tag_set, sexual_ratings())
|
||||
horror = MapSet.intersection(tag_set, horror_ratings())
|
||||
gross = MapSet.intersection(tag_set, gross_rating())
|
||||
|
||||
%{
|
||||
safe: safe,
|
||||
|
@ -44,8 +38,7 @@ defmodule Philomena.Images.TagValidator do
|
|||
defp validate_number_of_tags(changeset, tag_set, num) do
|
||||
cond do
|
||||
MapSet.size(tag_set) < num ->
|
||||
changeset
|
||||
|> add_error(:tag_input, "must contain at least #{num} tags")
|
||||
add_error(changeset, :tag_input, "must contain at least #{num} tags")
|
||||
|
||||
true ->
|
||||
changeset
|
||||
|
@ -69,27 +62,30 @@ defmodule Philomena.Images.TagValidator do
|
|||
end
|
||||
end
|
||||
|
||||
defp validate_has_rating(changeset, %{safe: s, sexual: x, horror: h, gross: g})
|
||||
when s == @empty and x == @empty and h == @empty and g == @empty do
|
||||
defp validate_has_rating(changeset, %{safe: s, sexual: x, horror: h, gross: g}) do
|
||||
cond do
|
||||
MapSet.size(s) > 0 or MapSet.size(x) > 0 or MapSet.size(h) > 0 or MapSet.size(g) > 0 ->
|
||||
changeset
|
||||
|> add_error(:tag_input, "must contain at least one rating tag")
|
||||
|
||||
true ->
|
||||
add_error(changeset, :tag_input, "must contain at least one rating tag")
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_has_rating(changeset, _ratings), do: changeset
|
||||
defp validate_safe(changeset, %{safe: s, sexual: x, horror: h, gross: g}) do
|
||||
cond do
|
||||
MapSet.size(s) > 1 and (MapSet.size(x) > 0 or MapSet.size(h) > 0 or MapSet.size(g) > 0) ->
|
||||
add_error(changeset, :tag_input, "may not contain any other rating if safe")
|
||||
|
||||
defp validate_safe(changeset, %{safe: s, sexual: x, horror: h, gross: g})
|
||||
when s != @empty and (x != @empty or h != @empty or g != @empty) do
|
||||
true ->
|
||||
changeset
|
||||
|> add_error(:tag_input, "may not contain any other rating if safe")
|
||||
end
|
||||
|
||||
defp validate_safe(changeset, _ratings), do: changeset
|
||||
end
|
||||
|
||||
defp validate_sexual_exclusion(changeset, %{sexual: x}) do
|
||||
cond do
|
||||
MapSet.size(x) > 1 ->
|
||||
changeset
|
||||
|> add_error(:tag_input, "may contain at most one sexual rating")
|
||||
add_error(changeset, :tag_input, "may contain at most one sexual rating")
|
||||
|
||||
true ->
|
||||
changeset
|
||||
|
@ -99,8 +95,7 @@ defmodule Philomena.Images.TagValidator do
|
|||
defp validate_horror_exclusion(changeset, %{horror: h}) do
|
||||
cond do
|
||||
MapSet.size(h) > 1 ->
|
||||
changeset
|
||||
|> add_error(:tag_input, "may contain at most one grim rating")
|
||||
add_error(changeset, :tag_input, "may contain at most one grim rating")
|
||||
|
||||
true ->
|
||||
changeset
|
||||
|
@ -112,4 +107,9 @@ defmodule Philomena.Images.TagValidator do
|
|||
|> Enum.map(& &1.name)
|
||||
|> MapSet.new()
|
||||
end
|
||||
|
||||
defp safe_rating, do: MapSet.new(["safe"])
|
||||
defp sexual_ratings, do: MapSet.new(["suggestive", "questionable", "explicit"])
|
||||
defp horror_ratings, do: MapSet.new(["semi-grimdark", "grimdark"])
|
||||
defp gross_rating, do: MapSet.new(["grotesque"])
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
defmodule Philomena.Search.DateParser do
|
||||
import NimbleParsec
|
||||
@dialyzer [:no_match, :no_unused]
|
||||
|
||||
defp build_datetime(naive, tz_off, tz_hour, tz_minute) do
|
||||
tz_hour =
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
defmodule Philomena.Search.LiteralParser do
|
||||
import NimbleParsec
|
||||
@dialyzer [:no_match, :no_unused]
|
||||
|
||||
defp to_number(input), do: Philomena.Search.Helpers.to_number(input)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ defmodule Philomena.Tags do
|
|||
alias Philomena.DnpEntries.DnpEntry
|
||||
alias Philomena.Channels.Channel
|
||||
|
||||
@spec get_or_create_tags(String.t()) :: List.t()
|
||||
@spec get_or_create_tags(String.t()) :: list()
|
||||
def get_or_create_tags(tag_list) do
|
||||
tag_names = Tag.parse_tag_list(tag_list)
|
||||
|
||||
|
|
|
@ -394,10 +394,6 @@ defmodule Philomena.Textile.Parser do
|
|||
{:text, escape(open)},
|
||||
tree2
|
||||
], r3_tokens}
|
||||
|
||||
_ ->
|
||||
{:ok, [{:text, escape(start)}, {:text, escape(flatten(tree))}, {:text, escape(open)}],
|
||||
r_tokens}
|
||||
end
|
||||
|
||||
_ ->
|
||||
|
|
|
@ -436,16 +436,13 @@ defmodule Philomena.Users.User do
|
|||
|
||||
cond do
|
||||
totp_valid?(user, token) ->
|
||||
changeset
|
||||
|> change(%{consumed_timestep: String.to_integer(token)})
|
||||
change(changeset, consumed_timestep: String.to_integer(token))
|
||||
|
||||
backup_code_valid?(user, token) ->
|
||||
changeset
|
||||
|> change(%{otp_backup_codes: remove_backup_code(user, token)})
|
||||
change(changeset, otp_backup_codes: remove_backup_code(user, token))
|
||||
|
||||
true ->
|
||||
changeset
|
||||
|> add_error(:twofactor_token, "Invalid token")
|
||||
add_error(changeset, :twofactor_token, "Invalid token")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -500,6 +497,7 @@ defmodule Philomena.Users.User do
|
|||
"data:image/png;base64," <> png
|
||||
end
|
||||
|
||||
@spec totp_secret(%Philomena.Users.User{}) :: binary()
|
||||
def totp_secret(user) do
|
||||
Philomena.Users.Encryptor.decrypt_model(
|
||||
user.encrypted_otp_secret,
|
||||
|
|
|
@ -7,7 +7,7 @@ defmodule PhilomenaWeb.Api.Json.ImageController do
|
|||
alias Philomena.Repo
|
||||
import Ecto.Query
|
||||
|
||||
plug :set_scraper_cache
|
||||
plug PhilomenaWeb.ScraperCachePlug
|
||||
plug PhilomenaWeb.ApiRequireAuthorizationPlug when action in [:create]
|
||||
plug PhilomenaWeb.UserAttributionPlug when action in [:create]
|
||||
|
||||
|
@ -55,13 +55,4 @@ defmodule PhilomenaWeb.Api.Json.ImageController do
|
|||
|> render("error.json", changeset: changeset)
|
||||
end
|
||||
end
|
||||
|
||||
defp set_scraper_cache(conn, _opts) do
|
||||
params =
|
||||
conn.params
|
||||
|> Map.put_new("image", %{})
|
||||
|> Map.put("scraper_cache", conn.params["url"])
|
||||
|
||||
%{conn | params: params}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,7 +4,7 @@ defmodule PhilomenaWeb.Api.Json.Search.ReverseController do
|
|||
alias PhilomenaWeb.ImageReverse
|
||||
alias Philomena.Interactions
|
||||
|
||||
plug :set_scraper_cache
|
||||
plug PhilomenaWeb.ScraperCachePlug
|
||||
plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image"
|
||||
|
||||
def create(conn, %{"image" => image_params}) do
|
||||
|
@ -21,23 +21,4 @@ defmodule PhilomenaWeb.Api.Json.Search.ReverseController do
|
|||
|> put_view(PhilomenaWeb.Api.Json.ImageView)
|
||||
|> render("index.json", images: images, total: length(images), interactions: interactions)
|
||||
end
|
||||
|
||||
defp set_scraper_cache(conn, _opts) do
|
||||
params =
|
||||
conn.params
|
||||
|> Map.put("image", %{})
|
||||
|> Map.put("distance", normalize_dist(conn.params))
|
||||
|> Map.put("scraper_cache", conn.params["url"])
|
||||
|
||||
%{conn | params: params}
|
||||
end
|
||||
|
||||
defp normalize_dist(%{"distance" => distance}) do
|
||||
("0" <> distance)
|
||||
|> Float.parse()
|
||||
|> elem(0)
|
||||
|> Float.to_string()
|
||||
end
|
||||
|
||||
defp normalize_dist(_dist), do: "0.25"
|
||||
end
|
||||
|
|
|
@ -3,7 +3,7 @@ defmodule PhilomenaWeb.Search.ReverseController do
|
|||
|
||||
alias PhilomenaWeb.ImageReverse
|
||||
|
||||
plug :set_scraper_cache
|
||||
plug PhilomenaWeb.ScraperCachePlug
|
||||
plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image"
|
||||
|
||||
def index(conn, params) do
|
||||
|
@ -19,23 +19,4 @@ defmodule PhilomenaWeb.Search.ReverseController do
|
|||
def create(conn, _params) do
|
||||
render(conn, "index.html", title: "Reverse Search", images: nil)
|
||||
end
|
||||
|
||||
defp set_scraper_cache(conn, _opts) do
|
||||
params =
|
||||
conn.params
|
||||
|> Map.put_new("image", %{})
|
||||
|> Map.put_new("scraper_cache", conn.params["url"])
|
||||
|> Map.put("distance", normalize_dist(conn.params))
|
||||
|
||||
%{conn | params: params}
|
||||
end
|
||||
|
||||
defp normalize_dist(%{"distance" => distance}) do
|
||||
("0" <> distance)
|
||||
|> Float.parse()
|
||||
|> elem(0)
|
||||
|> Float.to_string()
|
||||
end
|
||||
|
||||
defp normalize_dist(_dist), do: "0.25"
|
||||
end
|
||||
|
|
|
@ -92,11 +92,11 @@ defmodule PhilomenaWeb.ImageNavigator do
|
|||
field = String.to_existing_atom(sf)
|
||||
filter = range_filter(sf, @range_comparison_for_order[order], image[field])
|
||||
|
||||
cond do
|
||||
sf in [:_random, :_score] ->
|
||||
case sf do
|
||||
"_score" ->
|
||||
{[sort], []}
|
||||
|
||||
true ->
|
||||
_ ->
|
||||
{[sort], [filter]}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -13,6 +13,8 @@ defmodule PhilomenaWeb.AdvertPlug do
|
|||
maybe_assign_ad(conn, image, show_ads?)
|
||||
end
|
||||
|
||||
defp maybe_assign_ad(conn, image, show_ads?)
|
||||
|
||||
defp maybe_assign_ad(conn, nil, true),
|
||||
do: Conn.assign(conn, :advert, record_impression(Adverts.random_live()))
|
||||
|
||||
|
@ -22,8 +24,8 @@ defmodule PhilomenaWeb.AdvertPlug do
|
|||
defp maybe_assign_ad(conn, _image, _false),
|
||||
do: conn
|
||||
|
||||
defp show_ads?(%{hide_advertisements: false}),
|
||||
do: true
|
||||
defp show_ads?(%{hide_advertisements: hide}),
|
||||
do: !hide
|
||||
|
||||
defp show_ads?(_user),
|
||||
do: true
|
||||
|
|
28
lib/philomena_web/plugs/scraper_cache_plug.ex
Normal file
28
lib/philomena_web/plugs/scraper_cache_plug.ex
Normal file
|
@ -0,0 +1,28 @@
|
|||
defmodule PhilomenaWeb.ScraperCachePlug do
|
||||
@spec init(any()) :: any()
|
||||
def init(opts) do
|
||||
opts
|
||||
end
|
||||
|
||||
@spec call(Plug.Conn.t(), any()) :: Plug.Conn.t()
|
||||
def call(conn, _opts) do
|
||||
params =
|
||||
conn.params
|
||||
|> Map.put_new("image", %{})
|
||||
|> Map.put_new("scraper_cache", conn.params["url"])
|
||||
|> Map.put("distance", normalize_dist(conn.params))
|
||||
|
||||
%Plug.Conn{conn | params: params}
|
||||
end
|
||||
|
||||
defp normalize_dist(%{"distance" => distance}) do
|
||||
("0" <> distance)
|
||||
|> Float.parse()
|
||||
|> elem(0)
|
||||
|> Float.to_string()
|
||||
end
|
||||
|
||||
defp normalize_dist(_dist) do
|
||||
"0.25"
|
||||
end
|
||||
end
|
|
@ -1,5 +1,7 @@
|
|||
defmodule PhilomenaWeb.ScraperPlug do
|
||||
def init(opts), do: opts
|
||||
def init(opts) do
|
||||
opts
|
||||
end
|
||||
|
||||
def call(conn, opts) do
|
||||
params_name = Keyword.get(opts, :params_name, "image")
|
||||
|
@ -10,7 +12,8 @@ defmodule PhilomenaWeb.ScraperPlug do
|
|||
conn
|
||||
|
||||
%{"scraper_cache" => url} when not is_nil(url) ->
|
||||
Philomena.Http.get!(url)
|
||||
url
|
||||
|> Philomena.Http.get!()
|
||||
|> maybe_fixup_params(opts, conn)
|
||||
|
||||
_ ->
|
||||
|
@ -36,7 +39,7 @@ defmodule PhilomenaWeb.ScraperPlug do
|
|||
|
||||
updated_params = Map.put(conn.params, params_name, updated_form)
|
||||
|
||||
%{conn | params: updated_params}
|
||||
%Plug.Conn{conn | params: updated_params}
|
||||
end
|
||||
|
||||
defp maybe_fixup_params(_response, _opts, conn), do: conn
|
||||
|
|
|
@ -5,6 +5,8 @@ defmodule PhilomenaWeb.AdvertView do
|
|||
advert_url_root() <> "/" <> image
|
||||
end
|
||||
|
||||
def advert_image_url(_), do: nil
|
||||
|
||||
defp advert_url_root do
|
||||
Application.get_env(:philomena, :advert_url_root)
|
||||
end
|
||||
|
|
3
mix.exs
3
mix.exs
|
@ -10,7 +10,8 @@ defmodule Philomena.MixProject do
|
|||
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
||||
start_permanent: Mix.env() == :prod,
|
||||
aliases: aliases(),
|
||||
deps: deps()
|
||||
deps: deps(),
|
||||
dialyzer: [plt_add_apps: [:mix]]
|
||||
]
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in a new issue