Fix scraper error with invalid hostname

This commit is contained in:
Liam 2024-06-09 12:40:44 -04:00
parent 363e27f063
commit 1eed44aa95

View file

@ -56,16 +56,17 @@ defmodule PhilomenaProxy.Scrapers do
def scrape!(url) do def scrape!(url) do
uri = URI.parse(url) uri = URI.parse(url)
@scrapers cond do
|> Enum.find(& &1.can_handle?(uri, url)) is_nil(uri.host) ->
|> wrap() # Scraping without a hostname doesn't make sense because the proxy cannot fetch it, and
|> Enum.map(& &1.scrape(uri, url)) # some scrapers may test properties of the hostname.
|> unwrap() nil
end
defp wrap(nil), do: [] true ->
defp wrap(res), do: [res] # Find the first scraper which can handle the URL and process, or return nil
Enum.find_value(@scrapers, nil, fn scraper ->
defp unwrap([result]), do: result scraper.can_handle?(uri, url) && scraper.scrape(uri, url)
defp unwrap(_result), do: nil end)
end
end
end end