From 39ce0ae4fde27774be13c604939bf65a9e5975c2 Mon Sep 17 00:00:00 2001 From: "byte[]" Date: Wed, 9 Sep 2020 23:12:54 -0400 Subject: [PATCH] require http clients to handle errors --- lib/philomena/http.ex | 8 ++++---- lib/philomena/scrapers/deviantart.ex | 12 ++++++------ lib/philomena/scrapers/raw.ex | 4 ++-- lib/philomena/scrapers/tumblr.ex | 6 +++--- lib/philomena/scrapers/twitter.ex | 11 +++++++---- lib/philomena/servers/picarto_channel_updater.ex | 4 ++-- lib/philomena/servers/piczel_channel_updater.ex | 4 ++-- lib/philomena/servers/user_link_updater.ex | 4 ++-- .../plugs/compromised_password_check_plug.ex | 4 ++-- lib/philomena_web/plugs/scraper_plug.ex | 4 ++-- priv/repo/seeds_development.exs | 2 +- 11 files changed, 33 insertions(+), 30 deletions(-) diff --git a/lib/philomena/http.ex b/lib/philomena/http.ex index 77a57c41..65c55699 100644 --- a/lib/philomena/http.ex +++ b/lib/philomena/http.ex @@ -1,10 +1,10 @@ defmodule Philomena.Http do - def get!(url, headers \\ [], options \\ []) do - Tesla.get!(client(headers), url, opts: [adapter: adapter_opts(options)]) + def get(url, headers \\ [], options \\ []) do + Tesla.get(client(headers), url, opts: [adapter: adapter_opts(options)]) end - def head!(url, headers \\ [], options \\ []) do - Tesla.head!(client(headers), url, opts: [adapter: adapter_opts(options)]) + def head(url, headers \\ [], options \\ []) do + Tesla.head(client(headers), url, opts: [adapter: adapter_opts(options)]) end defp adapter_opts(opts) do diff --git a/lib/philomena/scrapers/deviantart.ex b/lib/philomena/scrapers/deviantart.ex index f83d132f..55ffacf2 100644 --- a/lib/philomena/scrapers/deviantart.ex +++ b/lib/philomena/scrapers/deviantart.ex @@ -30,7 +30,7 @@ defmodule Philomena.Scrapers.Deviantart do |> try_old_hires!() end - defp extract_data!(%Tesla.Env{body: body, status: 200}) do + defp extract_data!({:ok, %Tesla.Env{body: body, status: 200}}) do [image] = Regex.run(@image_regex, body, capture: :all_but_first) [source] = Regex.run(@source_regex, body, capture: :all_but_first) [artist] = Regex.run(@artist_regex, source, capture: :all_but_first) @@ -51,7 +51,7 @@ defmodule Philomena.Scrapers.Deviantart do with [domain, object_uuid, object_name] <- Regex.run(@cdnint_regex, image.url, capture: :all_but_first), built_url <- "#{domain}/intermediary/f/#{object_uuid}/#{object_name}", - %Tesla.Env{status: 200} <- Philomena.Http.head!(built_url) do + {:ok, %Tesla.Env{status: 200}} <- Philomena.Http.head(built_url) do # This is the high resolution URL. %{ data @@ -110,8 +110,8 @@ defmodule Philomena.Scrapers.Deviantart do built_url = "http://orig01.deviantart.net/x_by_x-d#{base36}.png" - case Philomena.Http.get!(built_url) do - %Tesla.Env{status: 301, headers: headers} -> + case Philomena.Http.get(built_url) do + {:ok, %Tesla.Env{status: 301, headers: headers}} -> # Location header provides URL of high res image. {_location, link} = Enum.find(headers, fn {header, _val} -> header == "location" end) @@ -135,8 +135,8 @@ defmodule Philomena.Scrapers.Deviantart do defp follow_redirect(_url, 0), do: nil defp follow_redirect(url, max_times) do - case Philomena.Http.get!(url) do - %Tesla.Env{headers: headers, status: code} when code in [301, 302] -> + case Philomena.Http.get(url) do + {:ok, %Tesla.Env{headers: headers, status: code}} when code in [301, 302] -> location = Enum.find_value(headers, &location_header/1) follow_redirect(location, max_times - 1) diff --git a/lib/philomena/scrapers/raw.ex b/lib/philomena/scrapers/raw.ex index 01e4506e..0085f54c 100644 --- a/lib/philomena/scrapers/raw.ex +++ b/lib/philomena/scrapers/raw.ex @@ -3,9 +3,9 @@ defmodule Philomena.Scrapers.Raw do @spec can_handle?(URI.t(), String.t()) :: true | false def can_handle?(_uri, url) do - Philomena.Http.head!(url) + Philomena.Http.head(url) |> case do - %Tesla.Env{status: 200, headers: headers} -> + {:ok, %Tesla.Env{status: 200, headers: headers}} -> headers |> Enum.any?(fn {k, v} -> String.downcase(k) == "content-type" and String.downcase(v) in @mime_types diff --git a/lib/philomena/scrapers/tumblr.ex b/lib/philomena/scrapers/tumblr.ex index df91d4ee..8ce08999 100644 --- a/lib/philomena/scrapers/tumblr.ex +++ b/lib/philomena/scrapers/tumblr.ex @@ -26,12 +26,12 @@ defmodule Philomena.Scrapers.Tumblr do tumblr_api_key() }" - Philomena.Http.get!(api_url) + Philomena.Http.get(api_url) |> json!() |> process_response!() end - defp json!(%Tesla.Env{body: body, status: 200}), + defp json!({:ok, %Tesla.Env{body: body, status: 200}}), do: Jason.decode!(body) defp process_response!(%{"response" => %{"posts" => [post | _rest]}}), @@ -70,7 +70,7 @@ defmodule Philomena.Scrapers.Tumblr do end defp url_ok?(url) do - match?(%Tesla.Env{status: 200}, Philomena.Http.head!(url)) + match?({:ok, %Tesla.Env{status: 200}}, Philomena.Http.head(url)) end defp add_meta(post, images) do diff --git a/lib/philomena/scrapers/twitter.ex b/lib/philomena/scrapers/twitter.ex index 7750d710..07ecafa7 100644 --- a/lib/philomena/scrapers/twitter.ex +++ b/lib/philomena/scrapers/twitter.ex @@ -46,10 +46,13 @@ defmodule Philomena.Scrapers.Twitter do url = "https://twitter.com/#{user}/status/#{status_id}" {gt, bearer} = - Philomena.Http.get!(page_url) + Philomena.Http.get(page_url) |> extract_guest_token_and_bearer() - Philomena.Http.get!(api_url, [{"Authorization", "Bearer #{bearer}"}, {"x-guest-token", gt}]) + {:ok, api_resp} = + Philomena.Http.get(api_url, [{"Authorization", "Bearer #{bearer}"}, {"x-guest-token", gt}]) + + api_resp |> Map.get(:body) |> Jason.decode!() |> Map.get("globalObjects") @@ -59,11 +62,11 @@ defmodule Philomena.Scrapers.Twitter do |> Map.put("url", url) end - defp extract_guest_token_and_bearer(%Tesla.Env{body: page}) do + defp extract_guest_token_and_bearer({:ok, %Tesla.Env{body: page}}) do [gt] = Regex.run(@gt_regex, page, capture: :all_but_first) [script] = Regex.run(@script_regex, page, capture: :all_but_first) - %{body: body} = Philomena.Http.get!(script) + {:ok, %{body: body}} = Philomena.Http.get(script) [bearer] = Regex.run(@bearer_regex, body, capture: :all_but_first) diff --git a/lib/philomena/servers/picarto_channel_updater.ex b/lib/philomena/servers/picarto_channel_updater.ex index a6edf84a..b136058b 100644 --- a/lib/philomena/servers/picarto_channel_updater.ex +++ b/lib/philomena/servers/picarto_channel_updater.ex @@ -22,13 +22,13 @@ defmodule Philomena.Servers.PicartoChannelUpdater do now = DateTime.utc_now() |> DateTime.truncate(:second) @api_online - |> Philomena.Http.get!() + |> Philomena.Http.get() |> handle_response(now) run() end - defp handle_response(%Tesla.Env{body: body, status: 200}, now) do + defp handle_response({:ok, %Tesla.Env{body: body, status: 200}}, now) do resp = body |> Jason.decode!() diff --git a/lib/philomena/servers/piczel_channel_updater.ex b/lib/philomena/servers/piczel_channel_updater.ex index 78a57624..bd703f77 100644 --- a/lib/philomena/servers/piczel_channel_updater.ex +++ b/lib/philomena/servers/piczel_channel_updater.ex @@ -22,13 +22,13 @@ defmodule Philomena.Servers.PiczelChannelUpdater do now = DateTime.utc_now() |> DateTime.truncate(:second) @api_online - |> Philomena.Http.get!() + |> Philomena.Http.get() |> handle_response(now) run() end - defp handle_response(%Tesla.Env{body: body, status: 200}, now) do + defp handle_response({:ok, %Tesla.Env{body: body, status: 200}}, now) do resp = body |> Jason.decode!() diff --git a/lib/philomena/servers/user_link_updater.ex b/lib/philomena/servers/user_link_updater.ex index d2ab12f1..db0e2c3b 100644 --- a/lib/philomena/servers/user_link_updater.ex +++ b/lib/philomena/servers/user_link_updater.ex @@ -59,11 +59,11 @@ defmodule Philomena.Servers.UserLinkUpdater do user_link |> Map.get(:uri) - |> Philomena.Http.get!() + |> Philomena.Http.get() |> handle_response(user_link) end - defp handle_response(%Tesla.Env{body: body, status: 200}, user_link) do + defp handle_response({:ok, %Tesla.Env{body: body, status: 200}}, user_link) do case :binary.match(body, user_link.verification_code) do :nomatch -> nil diff --git a/lib/philomena_web/plugs/compromised_password_check_plug.ex b/lib/philomena_web/plugs/compromised_password_check_plug.ex index d14e3a83..eaeecd2a 100644 --- a/lib/philomena_web/plugs/compromised_password_check_plug.ex +++ b/lib/philomena_web/plugs/compromised_password_check_plug.ex @@ -35,8 +35,8 @@ defmodule PhilomenaWeb.CompromisedPasswordCheckPlug do :crypto.hash(:sha, password) |> Base.encode16() - case Philomena.Http.get!(make_api_url(prefix)) do - %Tesla.Env{body: body, status: 200} -> String.contains?(body, rest) + case Philomena.Http.get(make_api_url(prefix)) do + {:ok, %Tesla.Env{body: body, status: 200}} -> String.contains?(body, rest) _ -> false end end diff --git a/lib/philomena_web/plugs/scraper_plug.ex b/lib/philomena_web/plugs/scraper_plug.ex index 41108857..7ec60ea4 100644 --- a/lib/philomena_web/plugs/scraper_plug.ex +++ b/lib/philomena_web/plugs/scraper_plug.ex @@ -13,7 +13,7 @@ defmodule PhilomenaWeb.ScraperPlug do %{"scraper_cache" => url} when not is_nil(url) -> url - |> Philomena.Http.get!() + |> Philomena.Http.get() |> maybe_fixup_params(opts, conn) _ -> @@ -21,7 +21,7 @@ defmodule PhilomenaWeb.ScraperPlug do end end - defp maybe_fixup_params(%Tesla.Env{body: body, status: 200}, opts, conn) do + defp maybe_fixup_params({:ok, %Tesla.Env{body: body, status: 200}}, opts, conn) do params_name = Keyword.get(opts, :params_name, "image") params_key = Keyword.get(opts, :params_key, "image") file = Briefly.create!() diff --git a/priv/repo/seeds_development.exs b/priv/repo/seeds_development.exs index 848f09d8..d5c26e37 100644 --- a/priv/repo/seeds_development.exs +++ b/priv/repo/seeds_development.exs @@ -52,7 +52,7 @@ for image_def <- resources["remote_images"] do now = DateTime.utc_now() |> DateTime.to_unix(:microsecond) IO.puts "Fetching #{image_def["url"]} ..." - %{body: body} = Philomena.Http.get!(image_def["url"]) + {:ok, %{body: body}} = Philomena.Http.get(image_def["url"]) File.write!(file, body)