philomena/lib/mix/tasks/upload_to_s3.ex

172 lines
4.6 KiB
Elixir
Raw Permalink Normal View History

2022-02-12 19:44:42 +01:00
defmodule Mix.Tasks.UploadToS3 do
use Mix.Task
alias Philomena.{
Adverts.Advert,
Badges.Badge,
Images.Image,
Tags.Tag,
Users.User
}
alias Philomena.Images.Thumbnailer
alias PhilomenaMedia.Objects
alias PhilomenaQuery.Batch
2022-02-12 19:44:42 +01:00
import Ecto.Query
@shortdoc "Dumps existing image files to S3 storage backend"
@requirements ["app.start"]
@impl Mix.Task
def run(args) do
{args, rest} =
OptionParser.parse_head!(args,
strict: [
concurrency: :integer,
adverts: :boolean,
avatars: :boolean,
badges: :boolean,
tags: :boolean,
images: :boolean
]
)
concurrency = Keyword.get(args, :concurrency, 4)
time =
with [time] <- rest,
{:ok, time, _} <- DateTime.from_iso8601(time) do
time
else
_ -> raise ArgumentError, "Must provide a RFC3339 start time, like 1970-01-01T00:00:00Z"
end
2022-02-12 19:44:42 +01:00
if args[:adverts] do
2022-02-13 18:13:28 +01:00
file_root = System.get_env("OLD_ADVERT_FILE_ROOT", "priv/static/system/images/adverts")
new_file_root = Application.fetch_env!(:philomena, :advert_file_root)
2022-02-12 19:44:42 +01:00
2022-02-13 18:32:35 +01:00
IO.puts("\nAdverts:")
upload_typical(
where(Advert, [a], not is_nil(a.image) and a.updated_at >= ^time),
2022-02-13 18:32:35 +01:00
concurrency,
file_root,
new_file_root,
:image
)
2022-02-12 19:44:42 +01:00
end
if args[:avatars] do
2022-02-13 18:13:28 +01:00
file_root = System.get_env("OLD_AVATAR_FILE_ROOT", "priv/static/system/images/avatars")
new_file_root = Application.fetch_env!(:philomena, :avatar_file_root)
2022-02-12 19:44:42 +01:00
2022-02-13 18:32:35 +01:00
IO.puts("\nAvatars:")
upload_typical(
where(User, [u], not is_nil(u.avatar) and u.updated_at >= ^time),
2022-02-13 18:32:35 +01:00
concurrency,
file_root,
new_file_root,
:avatar
)
2022-02-12 19:44:42 +01:00
end
if args[:badges] do
2022-02-13 18:13:28 +01:00
file_root = System.get_env("OLD_BADGE_FILE_ROOT", "priv/static/system/images")
new_file_root = Application.fetch_env!(:philomena, :badge_file_root)
2022-02-12 19:44:42 +01:00
2022-02-13 18:32:35 +01:00
IO.puts("\nBadges:")
upload_typical(
where(Badge, [b], not is_nil(b.image) and b.updated_at >= ^time),
2022-02-13 18:32:35 +01:00
concurrency,
file_root,
new_file_root,
:image
)
2022-02-12 19:44:42 +01:00
end
if args[:tags] do
2022-02-13 18:13:28 +01:00
file_root = System.get_env("OLD_TAG_FILE_ROOT", "priv/static/system/images")
new_file_root = Application.fetch_env!(:philomena, :tag_file_root)
2022-02-12 19:44:42 +01:00
2022-02-13 18:32:35 +01:00
IO.puts("\nTags:")
upload_typical(
where(Tag, [t], not is_nil(t.image) and t.updated_at >= ^time),
2022-02-13 18:32:35 +01:00
concurrency,
file_root,
new_file_root,
:image
)
2022-02-12 19:44:42 +01:00
end
if args[:images] do
2022-02-13 18:32:35 +01:00
file_root =
Path.join(System.get_env("OLD_IMAGE_FILE_ROOT", "priv/static/system/images"), "thumbs")
2022-02-13 18:13:28 +01:00
new_file_root = Application.fetch_env!(:philomena, :image_file_root)
2022-02-12 19:44:42 +01:00
2022-02-13 18:13:28 +01:00
# Temporarily set file root to empty path so we can get the proper prefix
Application.put_env(:philomena, :image_file_root, "")
2022-02-12 19:44:42 +01:00
2022-02-13 18:32:35 +01:00
IO.puts("\nImages:")
upload_images(
where(Image, [i], not is_nil(i.image) and i.updated_at >= ^time),
concurrency,
file_root,
new_file_root
)
2022-02-12 19:44:42 +01:00
end
end
defp upload_typical(queryable, batch_size, file_root, new_file_root, field_name) do
Batch.record_batches(queryable, [batch_size: batch_size], fn models ->
2022-02-13 18:13:28 +01:00
models
2022-05-15 15:44:17 +02:00
|> Task.async_stream(&upload_typical_model(&1, file_root, new_file_root, field_name),
timeout: :infinity
)
2022-02-13 18:13:28 +01:00
|> Stream.run()
2022-02-12 19:44:42 +01:00
IO.write("\r#{hd(models).id} (#{DateTime.to_iso8601(hd(models).updated_at)})")
2022-02-12 19:44:42 +01:00
end)
end
defp upload_typical_model(model, file_root, new_file_root, field_name) do
2022-02-13 18:13:28 +01:00
field = Map.fetch!(model, field_name)
path = Path.join(file_root, field)
2022-02-12 19:44:42 +01:00
2022-02-13 18:13:28 +01:00
if File.regular?(path) do
put_file(path, Path.join(new_file_root, field))
2022-02-12 19:44:42 +01:00
end
end
defp upload_images(queryable, batch_size, file_root, new_file_root) do
Batch.record_batches(queryable, [batch_size: batch_size], fn models ->
2022-02-13 18:13:28 +01:00
models
2022-05-15 15:44:17 +02:00
|> Task.async_stream(&upload_image_model(&1, file_root, new_file_root), timeout: :infinity)
2022-02-13 18:13:28 +01:00
|> Stream.run()
2022-02-12 19:44:42 +01:00
IO.write("\r#{hd(models).id} (#{DateTime.to_iso8601(hd(models).updated_at)})")
2022-02-12 19:44:42 +01:00
end)
end
defp upload_image_model(model, file_root, new_file_root) do
path_prefix = Thumbnailer.image_thumb_prefix(model)
Thumbnailer.all_versions(model)
|> Enum.map(fn version ->
path = Path.join([file_root, path_prefix, version])
new_path = Path.join([new_file_root, path_prefix, version])
2022-02-13 18:13:28 +01:00
if File.regular?(path) do
put_file(path, new_path)
end
2022-02-12 19:44:42 +01:00
end)
end
defp put_file(path, uploaded_path) do
2022-05-14 23:22:29 +02:00
Objects.put(uploaded_path, path)
2022-02-12 19:44:42 +01:00
end
end