Fix all but one dialyzer warning

This commit is contained in:
byte[] 2020-08-07 20:23:36 -04:00
parent 1e6cef6092
commit 539eb223ff
16 changed files with 87 additions and 101 deletions

View file

@ -18,9 +18,10 @@ defmodule Mix.Tasks.ReindexAll do
import Ecto.Query import Ecto.Query
@shortdoc "Destroys and recreates all Elasticsearch indices." @shortdoc "Destroys and recreates all Elasticsearch indices."
def run(_) do @impl Mix.Task
if Mix.env() == :prod and not Enum.member?(System.argv(), "--i-know-what-im-doing") do def run(args) do
raise "do not run this task in production" if Mix.env() == :prod and not Enum.member?(args, "--i-know-what-im-doing") do
raise "do not run this task unless you know what you're doing"
end end
{:ok, _apps} = Application.ensure_all_started(:philomena) {:ok, _apps} = Application.ensure_all_started(:philomena)

View file

@ -2,12 +2,6 @@ defmodule Philomena.Images.TagValidator do
alias Philomena.Servers.Config alias Philomena.Servers.Config
import Ecto.Changeset import Ecto.Changeset
@safe_rating MapSet.new(["safe"])
@sexual_ratings MapSet.new(["suggestive", "questionable", "explicit"])
@horror_ratings MapSet.new(["semi-grimdark", "grimdark"])
@gross_rating MapSet.new(["grotesque"])
@empty MapSet.new()
def validate_tags(changeset) do def validate_tags(changeset) do
tags = changeset |> get_field(:tags) tags = changeset |> get_field(:tags)
@ -27,11 +21,11 @@ defmodule Philomena.Images.TagValidator do
|> validate_horror_exclusion(rating_set) |> validate_horror_exclusion(rating_set)
end end
defp ratings(%MapSet{} = tag_set) do defp ratings(tag_set) do
safe = MapSet.intersection(tag_set, @safe_rating) safe = MapSet.intersection(tag_set, safe_rating())
sexual = MapSet.intersection(tag_set, @sexual_ratings) sexual = MapSet.intersection(tag_set, sexual_ratings())
horror = MapSet.intersection(tag_set, @horror_ratings) horror = MapSet.intersection(tag_set, horror_ratings())
gross = MapSet.intersection(tag_set, @gross_rating) gross = MapSet.intersection(tag_set, gross_rating())
%{ %{
safe: safe, safe: safe,
@ -44,8 +38,7 @@ defmodule Philomena.Images.TagValidator do
defp validate_number_of_tags(changeset, tag_set, num) do defp validate_number_of_tags(changeset, tag_set, num) do
cond do cond do
MapSet.size(tag_set) < num -> MapSet.size(tag_set) < num ->
changeset add_error(changeset, :tag_input, "must contain at least #{num} tags")
|> add_error(:tag_input, "must contain at least #{num} tags")
true -> true ->
changeset changeset
@ -69,27 +62,30 @@ defmodule Philomena.Images.TagValidator do
end end
end end
defp validate_has_rating(changeset, %{safe: s, sexual: x, horror: h, gross: g}) defp validate_has_rating(changeset, %{safe: s, sexual: x, horror: h, gross: g}) do
when s == @empty and x == @empty and h == @empty and g == @empty do cond do
changeset MapSet.size(s) > 0 or MapSet.size(x) > 0 or MapSet.size(h) > 0 or MapSet.size(g) > 0 ->
|> add_error(:tag_input, "must contain at least one rating tag") changeset
true ->
add_error(changeset, :tag_input, "must contain at least one rating tag")
end
end end
defp validate_has_rating(changeset, _ratings), do: changeset defp validate_safe(changeset, %{safe: s, sexual: x, horror: h, gross: g}) do
cond do
MapSet.size(s) > 1 and (MapSet.size(x) > 0 or MapSet.size(h) > 0 or MapSet.size(g) > 0) ->
add_error(changeset, :tag_input, "may not contain any other rating if safe")
defp validate_safe(changeset, %{safe: s, sexual: x, horror: h, gross: g}) true ->
when s != @empty and (x != @empty or h != @empty or g != @empty) do changeset
changeset end
|> add_error(:tag_input, "may not contain any other rating if safe")
end end
defp validate_safe(changeset, _ratings), do: changeset
defp validate_sexual_exclusion(changeset, %{sexual: x}) do defp validate_sexual_exclusion(changeset, %{sexual: x}) do
cond do cond do
MapSet.size(x) > 1 -> MapSet.size(x) > 1 ->
changeset add_error(changeset, :tag_input, "may contain at most one sexual rating")
|> add_error(:tag_input, "may contain at most one sexual rating")
true -> true ->
changeset changeset
@ -99,8 +95,7 @@ defmodule Philomena.Images.TagValidator do
defp validate_horror_exclusion(changeset, %{horror: h}) do defp validate_horror_exclusion(changeset, %{horror: h}) do
cond do cond do
MapSet.size(h) > 1 -> MapSet.size(h) > 1 ->
changeset add_error(changeset, :tag_input, "may contain at most one grim rating")
|> add_error(:tag_input, "may contain at most one grim rating")
true -> true ->
changeset changeset
@ -112,4 +107,9 @@ defmodule Philomena.Images.TagValidator do
|> Enum.map(& &1.name) |> Enum.map(& &1.name)
|> MapSet.new() |> MapSet.new()
end end
defp safe_rating, do: MapSet.new(["safe"])
defp sexual_ratings, do: MapSet.new(["suggestive", "questionable", "explicit"])
defp horror_ratings, do: MapSet.new(["semi-grimdark", "grimdark"])
defp gross_rating, do: MapSet.new(["grotesque"])
end end

View file

@ -1,5 +1,6 @@
defmodule Philomena.Search.DateParser do defmodule Philomena.Search.DateParser do
import NimbleParsec import NimbleParsec
@dialyzer [:no_match, :no_unused]
defp build_datetime(naive, tz_off, tz_hour, tz_minute) do defp build_datetime(naive, tz_off, tz_hour, tz_minute) do
tz_hour = tz_hour =

View file

@ -1,5 +1,6 @@
defmodule Philomena.Search.LiteralParser do defmodule Philomena.Search.LiteralParser do
import NimbleParsec import NimbleParsec
@dialyzer [:no_match, :no_unused]
defp to_number(input), do: Philomena.Search.Helpers.to_number(input) defp to_number(input), do: Philomena.Search.Helpers.to_number(input)

View file

@ -19,7 +19,7 @@ defmodule Philomena.Tags do
alias Philomena.DnpEntries.DnpEntry alias Philomena.DnpEntries.DnpEntry
alias Philomena.Channels.Channel alias Philomena.Channels.Channel
@spec get_or_create_tags(String.t()) :: List.t() @spec get_or_create_tags(String.t()) :: list()
def get_or_create_tags(tag_list) do def get_or_create_tags(tag_list) do
tag_names = Tag.parse_tag_list(tag_list) tag_names = Tag.parse_tag_list(tag_list)

View file

@ -394,10 +394,6 @@ defmodule Philomena.Textile.Parser do
{:text, escape(open)}, {:text, escape(open)},
tree2 tree2
], r3_tokens} ], r3_tokens}
_ ->
{:ok, [{:text, escape(start)}, {:text, escape(flatten(tree))}, {:text, escape(open)}],
r_tokens}
end end
_ -> _ ->

View file

@ -436,16 +436,13 @@ defmodule Philomena.Users.User do
cond do cond do
totp_valid?(user, token) -> totp_valid?(user, token) ->
changeset change(changeset, consumed_timestep: String.to_integer(token))
|> change(%{consumed_timestep: String.to_integer(token)})
backup_code_valid?(user, token) -> backup_code_valid?(user, token) ->
changeset change(changeset, otp_backup_codes: remove_backup_code(user, token))
|> change(%{otp_backup_codes: remove_backup_code(user, token)})
true -> true ->
changeset add_error(changeset, :twofactor_token, "Invalid token")
|> add_error(:twofactor_token, "Invalid token")
end end
end end
@ -500,6 +497,7 @@ defmodule Philomena.Users.User do
"data:image/png;base64," <> png "data:image/png;base64," <> png
end end
@spec totp_secret(%Philomena.Users.User{}) :: binary()
def totp_secret(user) do def totp_secret(user) do
Philomena.Users.Encryptor.decrypt_model( Philomena.Users.Encryptor.decrypt_model(
user.encrypted_otp_secret, user.encrypted_otp_secret,

View file

@ -7,7 +7,7 @@ defmodule PhilomenaWeb.Api.Json.ImageController do
alias Philomena.Repo alias Philomena.Repo
import Ecto.Query import Ecto.Query
plug :set_scraper_cache plug PhilomenaWeb.ScraperCachePlug
plug PhilomenaWeb.ApiRequireAuthorizationPlug when action in [:create] plug PhilomenaWeb.ApiRequireAuthorizationPlug when action in [:create]
plug PhilomenaWeb.UserAttributionPlug when action in [:create] plug PhilomenaWeb.UserAttributionPlug when action in [:create]
@ -55,13 +55,4 @@ defmodule PhilomenaWeb.Api.Json.ImageController do
|> render("error.json", changeset: changeset) |> render("error.json", changeset: changeset)
end end
end end
defp set_scraper_cache(conn, _opts) do
params =
conn.params
|> Map.put_new("image", %{})
|> Map.put("scraper_cache", conn.params["url"])
%{conn | params: params}
end
end end

View file

@ -4,7 +4,7 @@ defmodule PhilomenaWeb.Api.Json.Search.ReverseController do
alias PhilomenaWeb.ImageReverse alias PhilomenaWeb.ImageReverse
alias Philomena.Interactions alias Philomena.Interactions
plug :set_scraper_cache plug PhilomenaWeb.ScraperCachePlug
plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image" plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image"
def create(conn, %{"image" => image_params}) do def create(conn, %{"image" => image_params}) do
@ -21,23 +21,4 @@ defmodule PhilomenaWeb.Api.Json.Search.ReverseController do
|> put_view(PhilomenaWeb.Api.Json.ImageView) |> put_view(PhilomenaWeb.Api.Json.ImageView)
|> render("index.json", images: images, total: length(images), interactions: interactions) |> render("index.json", images: images, total: length(images), interactions: interactions)
end end
defp set_scraper_cache(conn, _opts) do
params =
conn.params
|> Map.put("image", %{})
|> Map.put("distance", normalize_dist(conn.params))
|> Map.put("scraper_cache", conn.params["url"])
%{conn | params: params}
end
defp normalize_dist(%{"distance" => distance}) do
("0" <> distance)
|> Float.parse()
|> elem(0)
|> Float.to_string()
end
defp normalize_dist(_dist), do: "0.25"
end end

View file

@ -3,7 +3,7 @@ defmodule PhilomenaWeb.Search.ReverseController do
alias PhilomenaWeb.ImageReverse alias PhilomenaWeb.ImageReverse
plug :set_scraper_cache plug PhilomenaWeb.ScraperCachePlug
plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image" plug PhilomenaWeb.ScraperPlug, params_key: "image", params_name: "image"
def index(conn, params) do def index(conn, params) do
@ -19,23 +19,4 @@ defmodule PhilomenaWeb.Search.ReverseController do
def create(conn, _params) do def create(conn, _params) do
render(conn, "index.html", title: "Reverse Search", images: nil) render(conn, "index.html", title: "Reverse Search", images: nil)
end end
defp set_scraper_cache(conn, _opts) do
params =
conn.params
|> Map.put_new("image", %{})
|> Map.put_new("scraper_cache", conn.params["url"])
|> Map.put("distance", normalize_dist(conn.params))
%{conn | params: params}
end
defp normalize_dist(%{"distance" => distance}) do
("0" <> distance)
|> Float.parse()
|> elem(0)
|> Float.to_string()
end
defp normalize_dist(_dist), do: "0.25"
end end

View file

@ -92,11 +92,11 @@ defmodule PhilomenaWeb.ImageNavigator do
field = String.to_existing_atom(sf) field = String.to_existing_atom(sf)
filter = range_filter(sf, @range_comparison_for_order[order], image[field]) filter = range_filter(sf, @range_comparison_for_order[order], image[field])
cond do case sf do
sf in [:_random, :_score] -> "_score" ->
{[sort], []} {[sort], []}
true -> _ ->
{[sort], [filter]} {[sort], [filter]}
end end
end end

View file

@ -13,6 +13,8 @@ defmodule PhilomenaWeb.AdvertPlug do
maybe_assign_ad(conn, image, show_ads?) maybe_assign_ad(conn, image, show_ads?)
end end
defp maybe_assign_ad(conn, image, show_ads?)
defp maybe_assign_ad(conn, nil, true), defp maybe_assign_ad(conn, nil, true),
do: Conn.assign(conn, :advert, record_impression(Adverts.random_live())) do: Conn.assign(conn, :advert, record_impression(Adverts.random_live()))
@ -22,8 +24,8 @@ defmodule PhilomenaWeb.AdvertPlug do
defp maybe_assign_ad(conn, _image, _false), defp maybe_assign_ad(conn, _image, _false),
do: conn do: conn
defp show_ads?(%{hide_advertisements: false}), defp show_ads?(%{hide_advertisements: hide}),
do: true do: !hide
defp show_ads?(_user), defp show_ads?(_user),
do: true do: true

View file

@ -0,0 +1,28 @@
defmodule PhilomenaWeb.ScraperCachePlug do
@spec init(any()) :: any()
def init(opts) do
opts
end
@spec call(Plug.Conn.t(), any()) :: Plug.Conn.t()
def call(conn, _opts) do
params =
conn.params
|> Map.put_new("image", %{})
|> Map.put_new("scraper_cache", conn.params["url"])
|> Map.put("distance", normalize_dist(conn.params))
%Plug.Conn{conn | params: params}
end
defp normalize_dist(%{"distance" => distance}) do
("0" <> distance)
|> Float.parse()
|> elem(0)
|> Float.to_string()
end
defp normalize_dist(_dist) do
"0.25"
end
end

View file

@ -1,5 +1,7 @@
defmodule PhilomenaWeb.ScraperPlug do defmodule PhilomenaWeb.ScraperPlug do
def init(opts), do: opts def init(opts) do
opts
end
def call(conn, opts) do def call(conn, opts) do
params_name = Keyword.get(opts, :params_name, "image") params_name = Keyword.get(opts, :params_name, "image")
@ -10,7 +12,8 @@ defmodule PhilomenaWeb.ScraperPlug do
conn conn
%{"scraper_cache" => url} when not is_nil(url) -> %{"scraper_cache" => url} when not is_nil(url) ->
Philomena.Http.get!(url) url
|> Philomena.Http.get!()
|> maybe_fixup_params(opts, conn) |> maybe_fixup_params(opts, conn)
_ -> _ ->
@ -36,7 +39,7 @@ defmodule PhilomenaWeb.ScraperPlug do
updated_params = Map.put(conn.params, params_name, updated_form) updated_params = Map.put(conn.params, params_name, updated_form)
%{conn | params: updated_params} %Plug.Conn{conn | params: updated_params}
end end
defp maybe_fixup_params(_response, _opts, conn), do: conn defp maybe_fixup_params(_response, _opts, conn), do: conn

View file

@ -5,6 +5,8 @@ defmodule PhilomenaWeb.AdvertView do
advert_url_root() <> "/" <> image advert_url_root() <> "/" <> image
end end
def advert_image_url(_), do: nil
defp advert_url_root do defp advert_url_root do
Application.get_env(:philomena, :advert_url_root) Application.get_env(:philomena, :advert_url_root)
end end

View file

@ -10,7 +10,8 @@ defmodule Philomena.MixProject do
compilers: [:phoenix, :gettext] ++ Mix.compilers(), compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod, start_permanent: Mix.env() == :prod,
aliases: aliases(), aliases: aliases(),
deps: deps() deps: deps(),
dialyzer: [plt_add_apps: [:mix]]
] ]
end end